diff --git a/.mvn/extensions.xml b/.mvn/extensions.xml index 5b2a2add5d..ab744149e7 100644 --- a/.mvn/extensions.xml +++ b/.mvn/extensions.xml @@ -27,4 +27,12 @@ maven-notifier 2.1.2 + + + io.opentelemetry.contrib + opentelemetry-maven-extension + 1.24.0-alpha + diff --git a/.mvn/wrapper/MavenWrapperDownloader.java b/.mvn/wrapper/MavenWrapperDownloader.java index 732313c431..84d1e60d8d 100644 --- a/.mvn/wrapper/MavenWrapperDownloader.java +++ b/.mvn/wrapper/MavenWrapperDownloader.java @@ -23,85 +23,50 @@ import java.net.PasswordAuthentication; import java.net.URL; import java.nio.file.Files; -import java.nio.file.LinkOption; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardCopyOption; -import java.nio.file.StandardOpenOption; -import java.util.Properties; public final class MavenWrapperDownloader { - private static final String WRAPPER_VERSION = "3.1.1"; + private static final String WRAPPER_VERSION = "3.2.0"; private static final boolean VERBOSE = Boolean.parseBoolean( System.getenv( "MVNW_VERBOSE" ) ); - /** - * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided. - */ - private static final String DEFAULT_DOWNLOAD_URL = - "https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/" + WRAPPER_VERSION - + "/maven-wrapper-" + WRAPPER_VERSION + ".jar"; - - /** - * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to use instead of the - * default one. - */ - private static final String MAVEN_WRAPPER_PROPERTIES_PATH = ".mvn/wrapper/maven-wrapper.properties"; - - /** - * Path where the maven-wrapper.jar will be saved to. - */ - private static final String MAVEN_WRAPPER_JAR_PATH = ".mvn/wrapper/maven-wrapper.jar"; - - /** - * Name of the property which should be used to override the default download url for the wrapper. - */ - private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl"; - public static void main( String[] args ) { - if ( args.length == 0 ) - { - System.err.println( " - ERROR projectBasedir parameter missing" ); - System.exit( 1 ); - } + log( "Apache Maven Wrapper Downloader " + WRAPPER_VERSION ); - log( " - Downloader started" ); - final String dir = args[0].replace( "..", "" ); // Sanitize path - final Path projectBasedir = Paths.get( dir ).toAbsolutePath().normalize(); - if ( !Files.isDirectory( projectBasedir, LinkOption.NOFOLLOW_LINKS ) ) + if ( args.length != 2 ) { - System.err.println( " - ERROR projectBasedir not exists: " + projectBasedir ); + System.err.println( " - ERROR wrapperUrl or wrapperJarPath parameter missing" ); System.exit( 1 ); } - log( " - Using base directory: " + projectBasedir ); - - // If the maven-wrapper.properties exists, read it and check if it contains a custom - // wrapperUrl parameter. - Path mavenWrapperPropertyFile = projectBasedir.resolve( MAVEN_WRAPPER_PROPERTIES_PATH ); - String url = readWrapperUrl( mavenWrapperPropertyFile ); - try { - Path outputFile = projectBasedir.resolve( MAVEN_WRAPPER_JAR_PATH ); - createDirectories( outputFile.getParent() ); - downloadFileFromURL( url, outputFile ); + log( " - Downloader started" ); + final URL wrapperUrl = new URL( args[0] ); + final String jarPath = args[1].replace( "..", "" ); // Sanitize path + final Path wrapperJarPath = Paths.get( jarPath ).toAbsolutePath().normalize(); + downloadFileFromURL( wrapperUrl, wrapperJarPath ); log( "Done" ); - System.exit( 0 ); } catch ( IOException e ) { - System.err.println( "- Error downloading" ); - e.printStackTrace(); + System.err.println( "- Error downloading: " + e.getMessage() ); + if ( VERBOSE ) + { + e.printStackTrace(); + } System.exit( 1 ); } } - private static void downloadFileFromURL( String urlString, Path destination ) throws IOException + private static void downloadFileFromURL( URL wrapperUrl, Path wrapperJarPath ) + throws IOException { - log( " - Downloading to: " + destination ); + log( " - Downloading to: " + wrapperJarPath ); if ( System.getenv( "MVNW_USERNAME" ) != null && System.getenv( "MVNW_PASSWORD" ) != null ) { final String username = System.getenv( "MVNW_USERNAME" ); @@ -115,40 +80,11 @@ protected PasswordAuthentication getPasswordAuthentication() } } ); } - URL website = new URL( urlString ); - try ( InputStream inStream = website.openStream() ) { - Files.copy( inStream, destination, StandardCopyOption.REPLACE_EXISTING ); - } - log( " - Downloader complete" ); - } - - private static void createDirectories(Path outputPath) throws IOException - { - if ( !Files.isDirectory( outputPath, LinkOption.NOFOLLOW_LINKS ) ) { - Path createDirectories = Files.createDirectories( outputPath ); - log( " - Directories created: " + createDirectories ); - } - } - - private static String readWrapperUrl( Path mavenWrapperPropertyFile ) - { - String url = DEFAULT_DOWNLOAD_URL; - if ( Files.exists( mavenWrapperPropertyFile, LinkOption.NOFOLLOW_LINKS ) ) + try ( InputStream inStream = wrapperUrl.openStream() ) { - log( " - Reading property file: " + mavenWrapperPropertyFile ); - try ( InputStream in = Files.newInputStream( mavenWrapperPropertyFile, StandardOpenOption.READ ) ) - { - Properties mavenWrapperProperties = new Properties(); - mavenWrapperProperties.load( in ); - url = mavenWrapperProperties.getProperty( PROPERTY_NAME_WRAPPER_URL, DEFAULT_DOWNLOAD_URL ); - } - catch ( IOException e ) - { - System.err.println( " - ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'" ); - } + Files.copy( inStream, wrapperJarPath, StandardCopyOption.REPLACE_EXISTING ); } - log( " - Downloading from: " + url ); - return url; + log( " - Downloader complete" ); } private static void log( String msg ) diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties index dc3affce3d..d8b2495a1e 100644 --- a/.mvn/wrapper/maven-wrapper.properties +++ b/.mvn/wrapper/maven-wrapper.properties @@ -6,7 +6,7 @@ # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an @@ -14,5 +14,5 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.6/apache-maven-3.8.6-bin.zip -wrapperUrl=https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.1.1/maven-wrapper-3.1.1.jar +distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.1/apache-maven-3.9.1-bin.zip +wrapperUrl=https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.2.0/maven-wrapper-3.2.0.jar diff --git a/README.md b/README.md index 90fe547da0..e80fb73961 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,7 @@ Documentation is available at https://docs.dremio.com. * JDK 8 or 11 (OpenJDK or Oracle) as the default JDK (`JAVA_HOME` set to it) * JDK 8 (OpenJDK or Oracle) in Maven toolchain, required to run certain integration tests -* (Optional) Maven 3.3.9 or later (using Homebrew: `brew install maven`) +* (Optional) Maven 3.9.1 or later (using Homebrew: `brew install maven`) Run the following commands to verify that you have the correct versions of Maven and JDK installed: diff --git a/client/base/pom.xml b/client/base/pom.xml index c9380e8658..be2da7417d 100644 --- a/client/base/pom.xml +++ b/client/base/pom.xml @@ -22,7 +22,7 @@ com.dremio.client dremio-client-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-client-base diff --git a/client/base/src/main/java/com/dremio/exec/client/DremioClient.java b/client/base/src/main/java/com/dremio/exec/client/DremioClient.java index 50c4a23d7d..1bb2da2344 100644 --- a/client/base/src/main/java/com/dremio/exec/client/DremioClient.java +++ b/client/base/src/main/java/com/dremio/exec/client/DremioClient.java @@ -148,6 +148,11 @@ public ServiceSet getOrCreateServiceSet(String serviceName) { return clusterCoordinator.getOrCreateServiceSet(serviceName); } + @Override + public void deleteServiceSet(String serviceName) { + clusterCoordinator.deleteServiceSet(serviceName); + } + @Override public Iterable getServiceNames() throws Exception { return clusterCoordinator.getServiceNames(); @@ -852,6 +857,7 @@ void cleanUpResources() { resources.add(clusterCoordinator); resources.add(new AutoCloseable() { + @Override public void close() throws Exception { try { eventLoopGroup.shutdownGracefully(0, 0, TimeUnit.SECONDS).sync(); diff --git a/client/jdbc/pom.xml b/client/jdbc/pom.xml index 035d0c4236..292ef6fba8 100644 --- a/client/jdbc/pom.xml +++ b/client/jdbc/pom.xml @@ -21,7 +21,7 @@ com.dremio.client dremio-client-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-client-jdbc Client - JDBC Driver diff --git a/client/jdbc/src/main/java/com/dremio/exec/vector/accessor/BitAccessor.java b/client/jdbc/src/main/java/com/dremio/exec/vector/accessor/BitAccessor.java index e7f8b08a53..ac881bf9c4 100644 --- a/client/jdbc/src/main/java/com/dremio/exec/vector/accessor/BitAccessor.java +++ b/client/jdbc/src/main/java/com/dremio/exec/vector/accessor/BitAccessor.java @@ -59,6 +59,7 @@ public int getInt(int index) { return ac.get(index); } + @Override public boolean getBoolean(int index) { if (ac.isNull(index)) { return false; diff --git a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioConnectionImpl.java b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioConnectionImpl.java index ec955a2177..f199b1d97d 100644 --- a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioConnectionImpl.java +++ b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioConnectionImpl.java @@ -94,7 +94,8 @@ protected DremioConnectionImpl(DriverImpl driver, AvaticaFactory factory, this.client = new DremioClient(driver.getSabotConfig(), config.isDirect()); final String connect = config.getZookeeperConnectionString(); - this.client.setClientName("Dremio JDBC Driver"); + this.client.setClientName(isDremioToDremio(info) ? "Dremio-to-Dremio" : "Dremio JDBC Driver"); + this.client.connect(connect, info); } catch (OutOfMemoryException e) { throw new SQLException("Failure creating root allocator", e); @@ -106,6 +107,10 @@ protected DremioConnectionImpl(DriverImpl driver, AvaticaFactory factory, } } + private boolean isDremioToDremio(Properties info) { + return Boolean.parseBoolean((String) info.get("D2D")); + } + @Override protected AvaticaStatement lookupStatement(StatementHandle h) throws SQLException { return super.lookupStatement(h); @@ -159,8 +164,7 @@ public void commit() throws SQLException { throwIfClosed(); if ( getAutoCommit() ) { throw new JdbcApiSqlException( "Can't call commit() in auto-commit mode." ); - } - else { + } else { // (Currently not reachable.) throw new SQLFeatureNotSupportedException( "Connection.commit() is not supported. (Dremio is not transactional.)" ); @@ -172,8 +176,7 @@ public void rollback() throws SQLException { throwIfClosed(); if ( getAutoCommit() ) { throw new JdbcApiSqlException( "Can't call rollback() in auto-commit mode." ); - } - else { + } else { // (Currently not reachable.) throw new SQLFeatureNotSupportedException( "Connection.rollback() is not supported. (Dremio is not transactional.)" ); @@ -256,13 +259,11 @@ public void setNetworkTimeout( Executor executor, int milliseconds ) if ( null == executor ) { throw new InvalidParameterSqlException( "Invalid (null) \"executor\" parameter to setNetworkTimeout(...)" ); - } - else if ( milliseconds < 0 ) { + } else if ( milliseconds < 0 ) { throw new InvalidParameterSqlException( "Invalid (negative) \"milliseconds\" parameter to" + " setNetworkTimeout(...) (" + milliseconds + ")" ); - } - else { + } else { if ( 0 != milliseconds ) { throw new SQLFeatureNotSupportedException( "Setting network timeout is not supported." ); @@ -336,8 +337,7 @@ public CallableStatement prepareCall(String sql) throws SQLException { throwIfClosed(); try { return super.prepareCall(sql); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -347,8 +347,7 @@ public String nativeSQL(String sql) throws SQLException { throwIfClosed(); try { return super.nativeSQL(sql); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -442,8 +441,7 @@ public CallableStatement prepareCall(String sql, int resultSetType, throwIfClosed(); try { return super.prepareCall(sql, resultSetType, resultSetConcurrency); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -453,8 +451,7 @@ public Map> getTypeMap() throws SQLException { throwIfClosed(); try { return super.getTypeMap(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -464,8 +461,7 @@ public void setTypeMap(Map> map) throws SQLException { throwIfClosed(); try { super.setTypeMap(map); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -490,8 +486,7 @@ public CallableStatement prepareCall(String sql, int resultSetType, try { return super.prepareCall(sql, resultSetType, resultSetConcurrency, resultSetHoldability); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -502,8 +497,7 @@ public PreparedStatement prepareStatement(String sql, throwIfClosed(); try { return super.prepareStatement(sql, autoGeneratedKeys); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -514,8 +508,7 @@ public PreparedStatement prepareStatement(String sql, throwIfClosed(); try { return super.prepareStatement(sql, columnIndexes); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -526,8 +519,7 @@ public PreparedStatement prepareStatement(String sql, throwIfClosed(); try { return super.prepareStatement(sql, columnNames); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -537,8 +529,7 @@ public Clob createClob() throws SQLException { throwIfClosed(); try { return super.createClob(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -548,8 +539,7 @@ public Blob createBlob() throws SQLException { throwIfClosed(); try { return super.createBlob(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -559,8 +549,7 @@ public NClob createNClob() throws SQLException { throwIfClosed(); try { return super.createNClob(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -570,8 +559,7 @@ public SQLXML createSQLXML() throws SQLException { throwIfClosed(); try { return super.createSQLXML(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -590,8 +578,7 @@ public void setClientInfo(String name, String value) throws SQLClientInfoExcepti } try { super.setClientInfo(name, value); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { SQLFeatureNotSupportedException intended = new SQLFeatureNotSupportedException(e.getMessage(), e); throw new SQLClientInfoException(e.getMessage(), null, intended); @@ -607,8 +594,7 @@ public void setClientInfo(Properties properties) throws SQLClientInfoException { } try { super.setClientInfo(properties); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { SQLFeatureNotSupportedException intended = new SQLFeatureNotSupportedException(e.getMessage(), e); throw new SQLClientInfoException(e.getMessage(), null, intended); @@ -620,8 +606,7 @@ public String getClientInfo(String name) throws SQLException { throwIfClosed(); try { return super.getClientInfo(name); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -631,8 +616,7 @@ public Properties getClientInfo() throws SQLException { throwIfClosed(); try { return super.getClientInfo(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -642,8 +626,7 @@ public Array createArrayOf(String typeName, Object[] elements) throws SQLExcepti throwIfClosed(); try { return super.createArrayOf(typeName, elements); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -653,8 +636,7 @@ public Struct createStruct(String typeName, Object[] attributes) throws SQLExcep throwIfClosed(); try { return super.createStruct(typeName, attributes); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -676,8 +658,7 @@ public void abort(Executor executor) throws SQLException { throwIfClosed(); try { super.abort(executor); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } diff --git a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioCursor.java b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioCursor.java index 5ab1b2487b..031c7cf65b 100644 --- a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioCursor.java +++ b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioCursor.java @@ -518,8 +518,7 @@ private boolean nextRowInternally() throws SQLException { try { schemaChanged = currentBatchHolder.load(qrb.getHeader().getDef(), qrb.getData()); - } - finally { + } finally { qrb.release(); } schema = currentBatchHolder.getSchema(); @@ -533,32 +532,27 @@ private boolean nextRowInternally() throws SQLException { } return true; } - } - catch ( UserException e ) { + } catch ( UserException e ) { // A normally expected case--for any server-side error (e.g., syntax // error in SQL statement). // Construct SQLException with message text from the UserException. // TODO: Map UserException error type to SQLException subclass (once // error type is accessible, of course. :-( ) throw new SQLException( e.getMessage(), e ); - } - catch ( TimeoutException e ) { + } catch ( TimeoutException e ) { throw new SqlTimeoutException( String.format("Cancelled after expiration of timeout of %d seconds.", statement.getQueryTimeout()), e); - } - catch ( InterruptedException e ) { + } catch ( InterruptedException e ) { // Not normally expected--Dremio doesn't interrupt in this area (right?)-- // but JDBC client certainly could. throw new SQLException( "Interrupted.", e ); - } - catch ( SchemaChangeException e ) { + } catch ( SchemaChangeException e ) { // TODO: Clean: DRILL-2933: RecordBatchLoader.load(...) no longer // throws SchemaChangeException, so check/clean catch clause. throw new SQLException( "Unexpected SchemaChangeException from RecordBatchLoader.load(...)" ); - } - catch ( RuntimeException e ) { + } catch ( RuntimeException e ) { throw new SQLException( "Unexpected RuntimeException: " + e.toString(), e ); } @@ -649,14 +643,12 @@ public boolean next() throws SQLException { if ( afterLastRow ) { // We're already after end of rows/records--just report that after end. return false; - } - else if ( returnTrueForNextCallToNext ) { + } else if ( returnTrueForNextCallToNext ) { ++currentRowNumber; // We have a deferred "not after end" to report--reset and report that. returnTrueForNextCallToNext = false; return true; - } - else { + } else { accessors.clearLastColumnIndexedInRow(); boolean res = nextRowInternally(); if (res) { ++ currentRowNumber; } diff --git a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioDatabaseMetaDataImpl.java b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioDatabaseMetaDataImpl.java index 9ad9c8ca13..3970732e2d 100644 --- a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioDatabaseMetaDataImpl.java +++ b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioDatabaseMetaDataImpl.java @@ -1277,13 +1277,11 @@ public boolean ownUpdatesAreVisible(int type) throws SQLException { throwIfClosed(); try { return super.ownUpdatesAreVisible(type); - } - catch (RuntimeException e) { + } catch (RuntimeException e) { if ("todo: implement this method".equals(e.getMessage())) { throw new SQLFeatureNotSupportedException( "ownUpdatesAreVisible(int) is not supported", e); - } - else { + } else { throw new SQLException(e.getMessage(), e); } } @@ -1294,13 +1292,11 @@ public boolean ownDeletesAreVisible(int type) throws SQLException { throwIfClosed(); try { return super.ownDeletesAreVisible(type); - } - catch (RuntimeException e) { + } catch (RuntimeException e) { if ("todo: implement this method".equals(e.getMessage())) { throw new SQLFeatureNotSupportedException( "ownDeletesAreVisible(int) is not supported", e); - } - else { + } else { throw new SQLException(e.getMessage(), e); } } @@ -1311,13 +1307,11 @@ public boolean ownInsertsAreVisible(int type) throws SQLException { throwIfClosed(); try { return super.ownInsertsAreVisible(type); - } - catch (RuntimeException e) { + } catch (RuntimeException e) { if ("todo: implement this method".equals(e.getMessage())) { throw new SQLFeatureNotSupportedException( "ownInsertsAreVisible(int) is not supported", e); - } - else { + } else { throw new SQLException(e.getMessage(), e); } } @@ -1328,13 +1322,11 @@ public boolean othersUpdatesAreVisible(int type) throws SQLException { throwIfClosed(); try { return super.othersUpdatesAreVisible(type); - } - catch (RuntimeException e) { + } catch (RuntimeException e) { if ("todo: implement this method".equals(e.getMessage())) { throw new SQLFeatureNotSupportedException( "othersUpdatesAreVisible(int) is not supported", e); - } - else { + } else { throw new SQLException(e.getMessage(), e); } } @@ -1345,13 +1337,11 @@ public boolean othersDeletesAreVisible(int type) throws SQLException { throwIfClosed(); try { return super.othersDeletesAreVisible(type); - } - catch (RuntimeException e) { + } catch (RuntimeException e) { if ("todo: implement this method".equals(e.getMessage())) { throw new SQLFeatureNotSupportedException( "othersDeletesAreVisible(int) is not supported", e); - } - else { + } else { throw new SQLException(e.getMessage(), e); } } @@ -1362,13 +1352,11 @@ public boolean othersInsertsAreVisible(int type) throws SQLException { throwIfClosed(); try { return super.othersInsertsAreVisible(type); - } - catch (RuntimeException e) { + } catch (RuntimeException e) { if ("todo: implement this method".equals(e.getMessage())) { throw new SQLFeatureNotSupportedException( "othersInsertsAreVisible(int) is not supported", e); - } - else { + } else { throw new SQLException(e.getMessage(), e); } } @@ -1379,13 +1367,11 @@ public boolean updatesAreDetected(int type) throws SQLException { throwIfClosed(); try { return super.updatesAreDetected(type); - } - catch (RuntimeException e) { + } catch (RuntimeException e) { if ("todo: implement this method".equals(e.getMessage())) { throw new SQLFeatureNotSupportedException( "updatesAreDetected(int) is not supported", e); - } - else { + } else { throw new SQLException(e.getMessage(), e); } } @@ -1396,13 +1382,11 @@ public boolean deletesAreDetected(int type) throws SQLException { throwIfClosed(); try { return super.deletesAreDetected(type); - } - catch (RuntimeException e) { + } catch (RuntimeException e) { if ("todo: implement this method".equals(e.getMessage())) { throw new SQLFeatureNotSupportedException( "deletesAreDetected(int) is not supported", e); - } - else { + } else { throw new SQLException(e.getMessage(), e); } } @@ -1413,13 +1397,11 @@ public boolean insertsAreDetected(int type) throws SQLException { throwIfClosed(); try { return super.insertsAreDetected(type); - } - catch (RuntimeException e) { + } catch (RuntimeException e) { if ("todo: implement this method".equals(e.getMessage())) { throw new SQLFeatureNotSupportedException( "insertsAreDetected(int) is not supported", e); - } - else { + } else { throw new SQLException(e.getMessage(), e); } } @@ -1497,13 +1479,11 @@ public boolean supportsResultSetHoldability(int holdability) throws SQLException throwIfClosed(); try { return super.supportsResultSetHoldability(holdability); - } - catch (RuntimeException e) { + } catch (RuntimeException e) { if ("todo: implement this method".equals(e.getMessage())) { throw new SQLFeatureNotSupportedException( "supportsResultSetHoldability(int) is not supported", e); - } - else { + } else { throw new SQLException(e.getMessage(), e); } } diff --git a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioMetaImpl.java b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioMetaImpl.java index af39e77e5a..4b1c77b8d4 100644 --- a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioMetaImpl.java +++ b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioMetaImpl.java @@ -75,8 +75,7 @@ private DremioMeta getDelegate() throws SQLException { if (connection.getConfig().isServerMetadataDisabled() || ! connection.getClient().getSupportedMethods().containsAll(requiredMetaMethods)) { delegate = new DremioMetaClientImpl(connection); - } - else { + } else { delegate = new DremioMetaServerImpl(connection); } } diff --git a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioPreparedStatementImpl.java b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioPreparedStatementImpl.java index d0a6871dff..74d6585291 100644 --- a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioPreparedStatementImpl.java +++ b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioPreparedStatementImpl.java @@ -121,8 +121,7 @@ public ResultSet executeQuery(String sql) throws SQLException { throwIfClosed(); try { return super.executeQuery(sql); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -132,8 +131,7 @@ public long executeLargeUpdate(String sql) throws SQLException { throwIfClosed(); try { return super.executeLargeUpdate(sql); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -145,8 +143,7 @@ public int getMaxFieldSize() throws SQLException { throwIfClosed(); try { return super.getMaxFieldSize(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -156,8 +153,7 @@ public void setMaxFieldSize(int max) throws SQLException { throwIfClosed(); try { super.setMaxFieldSize(max); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -185,8 +181,7 @@ public void setEscapeProcessing(boolean enable) throws SQLException { throwIfClosed(); try { super.setEscapeProcessing(enable); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -226,8 +221,7 @@ public void setCursorName(String name) throws SQLException { throwIfClosed(); try { super.setCursorName(name); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -255,8 +249,7 @@ public boolean getMoreResults() throws SQLException { throwIfClosed(); try { return super.getMoreResults(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -290,8 +283,7 @@ public int getResultSetConcurrency() throws SQLException { throwIfClosed(); try { return super.getResultSetConcurrency(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -301,8 +293,7 @@ public int getResultSetType() throws SQLException { throwIfClosed(); try { return super.getResultSetType(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -312,8 +303,7 @@ public void addBatch(String sql) throws SQLException { throwIfClosed(); try { super.addBatch(sql); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -329,8 +319,7 @@ public int[] executeBatch() throws SQLException { throwIfClosed(); try { return super.executeBatch(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -340,8 +329,7 @@ public boolean getMoreResults(int current) throws SQLException { throwIfClosed(); try { return super.getMoreResults(current); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -351,8 +339,7 @@ public ResultSet getGeneratedKeys() throws SQLException { throwIfClosed(); try { return super.getGeneratedKeys(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -362,8 +349,7 @@ public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException throwIfClosed(); try { return super.executeUpdate(sql, autoGeneratedKeys); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -373,8 +359,7 @@ public int executeUpdate(String sql, int[] columnIndexes) throws SQLException { throwIfClosed(); try { return super.executeUpdate(sql, columnIndexes); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -384,8 +369,7 @@ public int executeUpdate(String sql, String[] columnNames) throws SQLException { throwIfClosed(); try { return super.executeUpdate(sql, columnNames); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -395,8 +379,7 @@ public boolean execute(String sql, int autoGeneratedKeys) throws SQLException { throwIfClosed(); try { return super.execute(sql, autoGeneratedKeys); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -406,8 +389,7 @@ public boolean execute(String sql, int[] columnIndexes) throws SQLException { throwIfClosed(); try { return super.execute(sql, columnIndexes); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -417,8 +399,7 @@ public boolean execute(String sql, String[] columnNames) throws SQLException { throwIfClosed(); try { return super.execute(sql, columnNames); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -428,8 +409,7 @@ public int getResultSetHoldability() throws SQLException { throwIfClosed(); try { return super.getResultSetHoldability(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -449,8 +429,7 @@ public void setPoolable(boolean poolable) throws SQLException { throwIfClosed(); try { super.setPoolable(poolable); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -460,8 +439,7 @@ public boolean isPoolable() throws SQLException { throwIfClosed(); try { return super.isPoolable(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -489,8 +467,7 @@ public long executeLargeUpdate() throws SQLException { throwIfClosed(); try { return super.executeLargeUpdate(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -519,8 +496,7 @@ public void clearParameters() throws SQLException { throwIfClosed(); try { super.clearParameters(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -534,8 +510,7 @@ public boolean execute() throws SQLException { throwIfClosed(); try { return super.execute(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -545,8 +520,7 @@ public void addBatch() throws SQLException { throwIfClosed(); try { super.addBatch(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } diff --git a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioResultSetImpl.java b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioResultSetImpl.java index b581ddc95c..ba7884737a 100644 --- a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioResultSetImpl.java +++ b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioResultSetImpl.java @@ -94,8 +94,7 @@ private void throwIfClosed() throws AlreadyClosedSqlException, hasPendingCancelationNotification = false; throw new ExecutionCanceledSqlException( "SQL statement execution canceled; ResultSet now closed." ); - } - else { + } else { throw new AlreadyClosedSqlException( "ResultSet is already closed." ); } } @@ -376,8 +375,7 @@ public String getCursorName() throws SQLException { throwIfClosed(); try { return super.getCursorName(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -472,8 +470,7 @@ public boolean isLast() throws SQLException { throwIfClosed(); try { return super.isLast(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -483,8 +480,7 @@ public void beforeFirst() throws SQLException { throwIfClosed(); try { super.beforeFirst(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -494,8 +490,7 @@ public void afterLast() throws SQLException { throwIfClosed(); try { super.afterLast(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -505,8 +500,7 @@ public boolean first() throws SQLException { throwIfClosed(); try { return super.first(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -516,8 +510,7 @@ public boolean last() throws SQLException { throwIfClosed(); try { return super.last(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -535,8 +528,7 @@ public boolean absolute( int row ) throws SQLException { throwIfClosed(); try { return super.absolute( row ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -546,8 +538,7 @@ public boolean relative( int rows ) throws SQLException { throwIfClosed(); try { return super.relative( rows ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -557,8 +548,7 @@ public boolean previous() throws SQLException { throwIfClosed(); try { return super.previous(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -629,8 +619,7 @@ public void updateNull( int columnIndex ) throws SQLException { throwIfClosed(); try { super.updateNull( columnIndex ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -640,8 +629,7 @@ public void updateBoolean( int columnIndex, boolean x ) throws SQLException { throwIfClosed(); try { super.updateBoolean( columnIndex, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -651,8 +639,7 @@ public void updateByte( int columnIndex, byte x ) throws SQLException { throwIfClosed(); try { super.updateByte( columnIndex, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -662,8 +649,7 @@ public void updateShort( int columnIndex, short x ) throws SQLException { throwIfClosed(); try { super.updateShort( columnIndex, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -673,8 +659,7 @@ public void updateInt( int columnIndex, int x ) throws SQLException { throwIfClosed(); try { super.updateInt( columnIndex, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -684,8 +669,7 @@ public void updateLong( int columnIndex, long x ) throws SQLException { throwIfClosed(); try { super.updateLong( columnIndex, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -695,8 +679,7 @@ public void updateFloat( int columnIndex, float x ) throws SQLException { throwIfClosed(); try { super.updateFloat( columnIndex, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -706,8 +689,7 @@ public void updateDouble( int columnIndex, double x ) throws SQLException { throwIfClosed(); try { super.updateDouble( columnIndex, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -718,8 +700,7 @@ public void updateBigDecimal( int columnIndex, throwIfClosed(); try { super.updateBigDecimal( columnIndex, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -729,8 +710,7 @@ public void updateString( int columnIndex, String x ) throws SQLException { throwIfClosed(); try { super.updateString( columnIndex, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -740,8 +720,7 @@ public void updateBytes( int columnIndex, byte[] x ) throws SQLException { throwIfClosed(); try { super.updateBytes( columnIndex, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -751,8 +730,7 @@ public void updateDate( int columnIndex, Date x ) throws SQLException { throwIfClosed(); try { super.updateDate( columnIndex, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -762,8 +740,7 @@ public void updateTime( int columnIndex, Time x ) throws SQLException { throwIfClosed(); try { super.updateTime( columnIndex, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -773,8 +750,7 @@ public void updateTimestamp( int columnIndex, Timestamp x ) throws SQLException throwIfClosed(); try { super.updateTimestamp( columnIndex, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -785,8 +761,7 @@ public void updateAsciiStream( int columnIndex, InputStream x, throwIfClosed(); try { super.updateAsciiStream( columnIndex, x, length ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -797,8 +772,7 @@ public void updateBinaryStream( int columnIndex, InputStream x, throwIfClosed(); try { super.updateBinaryStream( columnIndex, x, length ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -809,8 +783,7 @@ public void updateCharacterStream( int columnIndex, Reader x, throwIfClosed(); try { super.updateCharacterStream( columnIndex, x, length ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -821,8 +794,7 @@ public void updateObject( int columnIndex, Object x, throwIfClosed(); try { super.updateObject( columnIndex, x, scaleOrLength ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -832,8 +804,7 @@ public void updateObject( int columnIndex, Object x ) throws SQLException { throwIfClosed(); try { super.updateObject( columnIndex, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -843,8 +814,7 @@ public void updateNull( String columnLabel ) throws SQLException { throwIfClosed(); try { super.updateNull( columnLabel ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -854,8 +824,7 @@ public void updateBoolean( String columnLabel, boolean x ) throws SQLException { throwIfClosed(); try { super.updateBoolean( columnLabel, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -865,8 +834,7 @@ public void updateByte( String columnLabel, byte x ) throws SQLException { throwIfClosed(); try { super.updateByte( columnLabel, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -876,8 +844,7 @@ public void updateShort( String columnLabel, short x ) throws SQLException { throwIfClosed(); try { super.updateShort( columnLabel, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -887,8 +854,7 @@ public void updateInt( String columnLabel, int x ) throws SQLException { throwIfClosed(); try { super.updateInt( columnLabel, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -898,8 +864,7 @@ public void updateLong( String columnLabel, long x ) throws SQLException { throwIfClosed(); try { super.updateLong( columnLabel, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -909,8 +874,7 @@ public void updateFloat( String columnLabel, float x ) throws SQLException { throwIfClosed(); try { super.updateFloat( columnLabel, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -920,8 +884,7 @@ public void updateDouble( String columnLabel, double x ) throws SQLException { throwIfClosed(); try { super.updateDouble( columnLabel, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -932,8 +895,7 @@ public void updateBigDecimal( String columnLabel, throwIfClosed(); try { super.updateBigDecimal( columnLabel, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -943,8 +905,7 @@ public void updateString( String columnLabel, String x ) throws SQLException { throwIfClosed(); try { super.updateString( columnLabel, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -954,8 +915,7 @@ public void updateBytes( String columnLabel, byte[] x ) throws SQLException { throwIfClosed(); try { super.updateBytes( columnLabel, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -965,8 +925,7 @@ public void updateDate( String columnLabel, Date x ) throws SQLException { throwIfClosed(); try { super.updateDate( columnLabel, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -976,8 +935,7 @@ public void updateTime( String columnLabel, Time x ) throws SQLException { throwIfClosed(); try { super.updateTime( columnLabel, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -987,8 +945,7 @@ public void updateTimestamp( String columnLabel, Timestamp x ) throws SQLExcepti throwIfClosed(); try { super.updateTimestamp( columnLabel, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -999,8 +956,7 @@ public void updateAsciiStream( String columnLabel, InputStream x, throwIfClosed(); try { super.updateAsciiStream( columnLabel, x, length ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1011,8 +967,7 @@ public void updateBinaryStream( String columnLabel, InputStream x, throwIfClosed(); try { super.updateBinaryStream( columnLabel, x, length ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1023,8 +978,7 @@ public void updateCharacterStream( String columnLabel, Reader reader, throwIfClosed(); try { super.updateCharacterStream( columnLabel, reader, length ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1035,8 +989,7 @@ public void updateObject( String columnLabel, Object x, throwIfClosed(); try { super.updateObject( columnLabel, x, scaleOrLength ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1046,8 +999,7 @@ public void updateObject( String columnLabel, Object x ) throws SQLException { throwIfClosed(); try { super.updateObject( columnLabel, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1057,8 +1009,7 @@ public void insertRow() throws SQLException { throwIfClosed(); try { super.insertRow(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1068,8 +1019,7 @@ public void updateRow() throws SQLException { throwIfClosed(); try { super.updateRow(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1079,8 +1029,7 @@ public void deleteRow() throws SQLException { throwIfClosed(); try { super.deleteRow(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1090,8 +1039,7 @@ public void refreshRow() throws SQLException { throwIfClosed(); try { super.refreshRow(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1101,8 +1049,7 @@ public void cancelRowUpdates() throws SQLException { throwIfClosed(); try { super.cancelRowUpdates(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1112,8 +1059,7 @@ public void moveToInsertRow() throws SQLException { throwIfClosed(); try { super.moveToInsertRow(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1123,8 +1069,7 @@ public void moveToCurrentRow() throws SQLException { throwIfClosed(); try { super.moveToCurrentRow(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1253,8 +1198,7 @@ public void updateRef( int columnIndex, Ref x ) throws SQLException { throwIfClosed(); try { super.updateRef( columnIndex, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1264,8 +1208,7 @@ public void updateRef( String columnLabel, Ref x ) throws SQLException { throwIfClosed(); try { super.updateRef( columnLabel, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1275,8 +1218,7 @@ public void updateBlob( int columnIndex, Blob x ) throws SQLException { throwIfClosed(); try { super.updateBlob( columnIndex, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1286,8 +1228,7 @@ public void updateBlob( String columnLabel, Blob x ) throws SQLException { throwIfClosed(); try { super.updateBlob( columnLabel, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1297,8 +1238,7 @@ public void updateClob( int columnIndex, Clob x ) throws SQLException { throwIfClosed(); try { super.updateClob( columnIndex, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1308,8 +1248,7 @@ public void updateClob( String columnLabel, Clob x ) throws SQLException { throwIfClosed(); try { super.updateClob( columnLabel, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1319,8 +1258,7 @@ public void updateArray( int columnIndex, Array x ) throws SQLException { throwIfClosed(); try { super.updateArray( columnIndex, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1330,8 +1268,7 @@ public void updateArray( String columnLabel, Array x ) throws SQLException { throwIfClosed(); try { super.updateArray( columnLabel, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1342,8 +1279,7 @@ public RowId getRowId( int columnIndex ) throws SQLException { throwIfClosed(); try { return super.getRowId( columnIndex ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1353,8 +1289,7 @@ public RowId getRowId( String columnLabel ) throws SQLException { throwIfClosed(); try { return super.getRowId( columnLabel ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1364,8 +1299,7 @@ public void updateRowId( int columnIndex, RowId x ) throws SQLException { throwIfClosed(); try { super.updateRowId( columnIndex, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1375,8 +1309,7 @@ public void updateRowId( String columnLabel, RowId x ) throws SQLException { throwIfClosed(); try { super.updateRowId( columnLabel, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1398,8 +1331,7 @@ public void updateNString( int columnIndex, String nString ) throws SQLException throwIfClosed(); try { super.updateNString( columnIndex, nString ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1410,8 +1342,7 @@ public void updateNString( String columnLabel, throwIfClosed(); try { super.updateNString( columnLabel, nString ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1421,8 +1352,7 @@ public void updateNClob( int columnIndex, NClob nClob ) throws SQLException { throwIfClosed(); try { super.updateNClob( columnIndex, nClob ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1432,8 +1362,7 @@ public void updateNClob( String columnLabel, NClob nClob ) throws SQLException { throwIfClosed(); try { super.updateNClob( columnLabel, nClob ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1468,8 +1397,7 @@ public void updateSQLXML( int columnIndex, throwIfClosed(); try { super.updateSQLXML( columnIndex, xmlObject ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1480,8 +1408,7 @@ public void updateSQLXML( String columnLabel, throwIfClosed(); try { super.updateSQLXML( columnLabel, xmlObject ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1516,8 +1443,7 @@ public void updateNCharacterStream( int columnIndex, Reader x, throwIfClosed(); try { super.updateNCharacterStream( columnIndex, x, length ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1528,8 +1454,7 @@ public void updateNCharacterStream( String columnLabel, Reader reader, throwIfClosed(); try { super.updateNCharacterStream( columnLabel, reader, length ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1540,8 +1465,7 @@ public void updateAsciiStream( int columnIndex, InputStream x, throwIfClosed(); try { super.updateAsciiStream( columnIndex, x, length ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1552,8 +1476,7 @@ public void updateBinaryStream( int columnIndex, InputStream x, throwIfClosed(); try { super.updateBinaryStream( columnIndex, x, length ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1564,8 +1487,7 @@ public void updateCharacterStream( int columnIndex, Reader x, throwIfClosed(); try { super.updateCharacterStream( columnIndex, x, length ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1576,8 +1498,7 @@ public void updateAsciiStream( String columnLabel, InputStream x, throwIfClosed(); try { super.updateAsciiStream( columnLabel, x, length ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1588,8 +1509,7 @@ public void updateBinaryStream( String columnLabel, InputStream x, throwIfClosed(); try { super.updateBinaryStream( columnLabel, x, length ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1600,8 +1520,7 @@ public void updateCharacterStream( String columnLabel, Reader reader, throwIfClosed(); try { super.updateCharacterStream( columnLabel, reader, length ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1612,8 +1531,7 @@ public void updateBlob( int columnIndex, InputStream inputStream, throwIfClosed(); try { super.updateBlob( columnIndex, inputStream, length ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1624,8 +1542,7 @@ public void updateBlob( String columnLabel, InputStream inputStream, throwIfClosed(); try { super.updateBlob( columnLabel, inputStream, length ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1636,8 +1553,7 @@ public void updateClob( int columnIndex, Reader reader, throwIfClosed(); try { super.updateClob( columnIndex, reader, length ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1648,8 +1564,7 @@ public void updateClob( String columnLabel, Reader reader, throwIfClosed(); try { super.updateClob( columnLabel, reader, length ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1660,8 +1575,7 @@ public void updateNClob( int columnIndex, Reader reader, throwIfClosed(); try { super.updateNClob( columnIndex, reader, length ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1672,8 +1586,7 @@ public void updateNClob( String columnLabel, Reader reader, throwIfClosed(); try { super.updateNClob( columnLabel, reader, length ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1685,8 +1598,7 @@ public void updateNCharacterStream( int columnIndex, throwIfClosed(); try { super.updateNCharacterStream( columnIndex, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1697,8 +1609,7 @@ public void updateNCharacterStream( String columnLabel, throwIfClosed(); try { super.updateNCharacterStream( columnLabel, reader ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1709,8 +1620,7 @@ public void updateAsciiStream( int columnIndex, throwIfClosed(); try { super.updateAsciiStream( columnIndex, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1721,8 +1631,7 @@ public void updateBinaryStream( int columnIndex, throwIfClosed(); try { super.updateBinaryStream( columnIndex, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1733,8 +1642,7 @@ public void updateCharacterStream( int columnIndex, throwIfClosed(); try { super.updateCharacterStream( columnIndex, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1745,8 +1653,7 @@ public void updateAsciiStream( String columnLabel, throwIfClosed(); try { super.updateAsciiStream( columnLabel, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1757,8 +1664,7 @@ public void updateBinaryStream( String columnLabel, throwIfClosed(); try { super.updateBinaryStream( columnLabel, x ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1769,8 +1675,7 @@ public void updateCharacterStream( String columnLabel, throwIfClosed(); try { super.updateCharacterStream( columnLabel, reader ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1781,8 +1686,7 @@ public void updateBlob( int columnIndex, throwIfClosed(); try { super.updateBlob( columnIndex, inputStream ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1793,8 +1697,7 @@ public void updateBlob( String columnLabel, throwIfClosed(); try { super.updateBlob( columnLabel, inputStream ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1804,8 +1707,7 @@ public void updateClob( int columnIndex, Reader reader ) throws SQLException { throwIfClosed(); try { super.updateClob( columnIndex, reader ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1815,8 +1717,7 @@ public void updateClob( String columnLabel, Reader reader ) throws SQLException throwIfClosed(); try { super.updateClob( columnLabel, reader ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1826,8 +1727,7 @@ public void updateNClob( int columnIndex, Reader reader ) throws SQLException { throwIfClosed(); try { super.updateNClob( columnIndex, reader ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1837,8 +1737,7 @@ public void updateNClob( String columnLabel, Reader reader ) throws SQLExceptio throwIfClosed(); try { super.updateNClob( columnLabel, reader ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -1878,8 +1777,7 @@ protected DremioResultSetImpl execute() throws SQLException{ if (signature.cursorFactory != null) { // Avatica accessors have to be wrapped to match Dremio behaviour regarding exception thrown super.execute(); - } - else { + } else { DremioCursor cursor = new DremioCursor(connection, statement, signature); super.execute2(cursor, this.signature.columns); diff --git a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioStatementImpl.java b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioStatementImpl.java index aaf66f95eb..502f7829a8 100644 --- a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioStatementImpl.java +++ b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioStatementImpl.java @@ -83,8 +83,7 @@ private SQLException unwrapIfExtra( final SQLException superMethodException ) { final Throwable cause = superMethodException.getCause(); if ( null != cause && cause instanceof SQLException ) { result = (SQLException) cause; - } - else { + } else { result = superMethodException; } return result; @@ -95,8 +94,7 @@ public boolean execute( String sql ) throws SQLException { throwIfClosed(); try { return super.execute( sql ); - } - catch ( final SQLException possiblyExtraWrapperException ) { + } catch ( final SQLException possiblyExtraWrapperException ) { throw unwrapIfExtra( possiblyExtraWrapperException ); } } @@ -106,8 +104,7 @@ public ResultSet executeQuery( String sql ) throws SQLException { try { throwIfClosed(); return super.executeQuery( sql ); - } - catch ( final SQLException possiblyExtraWrapperException ) { + } catch ( final SQLException possiblyExtraWrapperException ) { throw unwrapIfExtra( possiblyExtraWrapperException ); } } @@ -117,8 +114,7 @@ public long executeLargeUpdate( String sql ) throws SQLException { throwIfClosed(); try { return super.executeLargeUpdate( sql ); - } - catch ( final SQLException possiblyExtraWrapperException ) { + } catch ( final SQLException possiblyExtraWrapperException ) { throw unwrapIfExtra( possiblyExtraWrapperException ); } } @@ -128,8 +124,7 @@ public int executeUpdate( String sql, int[] columnIndexes ) throws SQLException throwIfClosed(); try { return super.executeUpdate( sql, columnIndexes ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -139,8 +134,7 @@ public int executeUpdate( String sql, String[] columnNames ) throws SQLException throwIfClosed(); try { return super.executeUpdate( sql, columnNames ); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -181,8 +175,7 @@ public int getMaxFieldSize() throws SQLException { throwIfClosed(); try { return super.getMaxFieldSize(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -192,8 +185,7 @@ public void setMaxFieldSize(int max) throws SQLException { throwIfClosed(); try { super.setMaxFieldSize(max); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -221,8 +213,7 @@ public void setEscapeProcessing(boolean enable) throws SQLException { throwIfClosed(); try { super.setEscapeProcessing(enable); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -250,8 +241,7 @@ public void setCursorName(String name) throws SQLException { throwIfClosed(); try { super.setCursorName(name); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -273,8 +263,7 @@ public boolean getMoreResults() throws SQLException { throwIfClosed(); try { return super.getMoreResults(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -308,8 +297,7 @@ public int getResultSetConcurrency() throws SQLException { throwIfClosed(); try { return super.getResultSetConcurrency(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -319,8 +307,7 @@ public int getResultSetType() throws SQLException { throwIfClosed(); try { return super.getResultSetType(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -330,8 +317,7 @@ public void addBatch(String sql) throws SQLException { throwIfClosed(); try { super.addBatch(sql); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -341,8 +327,7 @@ public void clearBatch() throws SQLException { throwIfClosed(); try { super.clearBatch(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -352,8 +337,7 @@ public int[] executeBatch() throws SQLException { throwIfClosed(); try { return super.executeBatch(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -363,8 +347,7 @@ public boolean getMoreResults(int current) throws SQLException { throwIfClosed(); try { return super.getMoreResults(current); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -374,8 +357,7 @@ public ResultSet getGeneratedKeys() throws SQLException { throwIfClosed(); try { return super.getGeneratedKeys(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -385,8 +367,7 @@ public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException throwIfClosed(); try { return super.executeUpdate(sql, autoGeneratedKeys); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -396,8 +377,7 @@ public boolean execute(String sql, int autoGeneratedKeys) throws SQLException { throwIfClosed(); try { return super.execute(sql, autoGeneratedKeys); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -407,8 +387,7 @@ public boolean execute(String sql, int[] columnIndexes) throws SQLException { throwIfClosed(); try { return super.execute(sql, columnIndexes); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -418,8 +397,7 @@ public boolean execute(String sql, String[] columnNames) throws SQLException { throwIfClosed(); try { return super.execute(sql, columnNames); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -429,8 +407,7 @@ public int getResultSetHoldability() throws SQLException { throwIfClosed(); try { return super.getResultSetHoldability(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -440,8 +417,7 @@ public void setPoolable(boolean poolable) throws SQLException { throwIfClosed(); try { super.setPoolable(poolable); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } @@ -451,8 +427,7 @@ public boolean isPoolable() throws SQLException { throwIfClosed(); try { return super.isPoolable(); - } - catch (UnsupportedOperationException e) { + } catch (UnsupportedOperationException e) { throw new SQLFeatureNotSupportedException(e.getMessage(), e); } } diff --git a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioStatementRegistry.java b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioStatementRegistry.java index 48170f3f04..9751366716 100644 --- a/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioStatementRegistry.java +++ b/client/jdbc/src/main/java/com/dremio/jdbc/impl/DremioStatementRegistry.java @@ -64,8 +64,7 @@ void close() { try { logger.debug( "Auto-closing (via open-statements registry): " + statement ); statement.close(); - } - catch ( SQLException e ) { + } catch ( SQLException e ) { logger.error( "Error auto-closing statement " + statement + ": " + e, e ); // Otherwise ignore the error, to close which statements we can close. } diff --git a/client/jdbc/src/main/java/com/dremio/jdbc/impl/SqlAccessorWrapper.java b/client/jdbc/src/main/java/com/dremio/jdbc/impl/SqlAccessorWrapper.java index 44a0060635..a89083a705 100644 --- a/client/jdbc/src/main/java/com/dremio/jdbc/impl/SqlAccessorWrapper.java +++ b/client/jdbc/src/main/java/com/dremio/jdbc/impl/SqlAccessorWrapper.java @@ -67,12 +67,10 @@ private int getCurrentRecordNumber() throws SQLException { if ( cursor.isAfterLast() ) { throw new InvalidCursorStateSqlException( "Result set cursor is already positioned past all rows." ); - } - else if ( cursor.isBeforeFirst() ) { + } else if ( cursor.isBeforeFirst() ) { throw new InvalidCursorStateSqlException( "Result set cursor is positioned before all rows. Call next() first." ); - } - else { + } else { return cursor.getCurrentRecordNumber(); } } diff --git a/client/jdbc/src/main/java/com/dremio/jdbc/proxy/InvocationReporterImpl.java b/client/jdbc/src/main/java/com/dremio/jdbc/proxy/InvocationReporterImpl.java index f4eab72670..12abdc8b17 100644 --- a/client/jdbc/src/main/java/com/dremio/jdbc/proxy/InvocationReporterImpl.java +++ b/client/jdbc/src/main/java/com/dremio/jdbc/proxy/InvocationReporterImpl.java @@ -141,8 +141,7 @@ private String getObjectId( final Object object ) String id; if ( null == object ) { id = "n/a"; - } - else { + } else { id = objectsToIdsMap.get( object ); if ( null == id ) { ++lastObjNum; @@ -170,21 +169,18 @@ private String formatType( final Class type ) { try { Class.forName( p.getName() + "." + type.getSimpleName() ); sameSimpleNameCount++; - } - catch ( ClassNotFoundException e ) { + } catch ( ClassNotFoundException e ) { // Nothing to do. } } if ( 1 == sameSimpleNameCount ) { result = type.getSimpleName(); - } - else { + } else { // Multiple classes with same simple name, so would be ambiguous to // abbreviate, so use fully qualified name. result = type.getName(); } - } - else { + } else { result = type.getName(); } } @@ -218,13 +214,11 @@ private String formatValue( final Object value ) { final String result; if ( null == value ) { result = "null"; - } - else { + } else { final Class rawActualType = value.getClass(); if ( String.class == rawActualType ) { result = formatString( (String) value ); - } - else if ( rawActualType.isArray() + } else if ( rawActualType.isArray() && ! rawActualType.getComponentType().isPrimitive() ) { // Array of non-primitive type @@ -244,11 +238,9 @@ else if ( rawActualType.isArray() } buffer.append( " }" ); result = buffer.toString(); - } - else if ( DriverPropertyInfo.class == rawActualType ) { + } else if ( DriverPropertyInfo.class == rawActualType ) { result = formatDriverPropertyInfo( (DriverPropertyInfo) value ); - } - else if ( + } else if ( // Is type seen and whose toString() renders value well. rawActualType == java.lang.Boolean.class || rawActualType == java.lang.Byte.class @@ -263,16 +255,14 @@ else if ( || rawActualType == java.sql.Timestamp.class ) { result = value.toString(); - } - else if ( + } else if ( // Is type seen and whose toString() has rendered value well--in cases // seen so far. rawActualType == java.util.Properties.class || rawActualType.isEnum() ) { result = value.toString(); - } - else if ( + } else if ( // Is type to warn about (one case). rawActualType == com.dremio.jdbc.DremioResultSet.class ) { @@ -281,8 +271,7 @@ else if ( + " (While it's a class, it can't be proxied, and some methods can't" + " be traced.)" ); result = value.toString(); - } - else if ( + } else if ( // Is type to warn about (second case). // Note: Using strings rather than compiled-in class references to // avoid failing when run using JDBC-all Jar, which excludes @@ -297,8 +286,7 @@ else if ( printWarningLine( "Should " + rawActualType + " be appearing at JDBC interface?" ); result = value.toString(); - } - else { + } else { // Is other type--unknown whether it already formats well. // (No handled yet: byte[].) printWarningLine( "Unnoted type encountered in formatting (value might" @@ -326,8 +314,7 @@ private String formatTypeAndValue( Class declaredType, Object value ) { // Null--show no actual type or object ID. actualTypePart = ""; actualValuePart = formatValue( value ); - } - else { + } else { // Non-null value--show at least some representation of value. Class rawActualType = value.getClass(); Class origActualType = @@ -336,14 +323,12 @@ private String formatTypeAndValue( Class declaredType, Object value ) { // String--show no actual type or object ID. actualTypePart = ""; actualValuePart = formatValue( value ); - } - else if ( origActualType.isPrimitive() ) { + } else if ( origActualType.isPrimitive() ) { // Primitive type--show no actual type or object ID. actualTypePart = ""; // (Remember--primitive type is wrapped here.) actualValuePart = value.toString(); - } - else { + } else { // Non-primitive, non-String value--include object ID. final String idPrefix = " "; if ( declaredType.isInterface() @@ -352,13 +337,11 @@ else if ( origActualType.isPrimitive() ) { // (because object is proxied and therefore all uses will be traced). actualTypePart = ""; actualValuePart = idPrefix + "..."; - } - else if ( origActualType == declaredType ) { + } else if ( origActualType == declaredType ) { // Actual type is same as declared--don't show redundant actual type. actualTypePart = ""; actualValuePart = idPrefix + formatValue( value ); - } - else { + } else { // Other--show actual type and (try to) show value. actualTypePart = "(" + formatType( rawActualType) + ") "; actualValuePart = idPrefix + formatValue( value ); @@ -427,8 +410,7 @@ private String formatArgs( Class[] declaredTypes, Object[] argValues ) final String result; if ( null == argValues ) { result = "()"; - } - else { + } else { final StringBuilder s = new StringBuilder(); s.append( "( " ); for ( int ax = 0; ax < argValues.length; ax++ ) { diff --git a/client/jdbc/src/main/java/com/dremio/jdbc/proxy/ProxiesManager.java b/client/jdbc/src/main/java/com/dremio/jdbc/proxy/ProxiesManager.java index 05b7d2cbd2..2190a8e58b 100644 --- a/client/jdbc/src/main/java/com/dremio/jdbc/proxy/ProxiesManager.java +++ b/client/jdbc/src/main/java/com/dremio/jdbc/proxy/ProxiesManager.java @@ -81,8 +81,7 @@ public INTF getProxyInstanceForOriginal( final INTF originalInstance, if ( null != existingProxy ) { // Repeated occurrence of original--return same proxy instance as before. proxyInstance = existingProxy; - } - else { + } else { // Original we haven't seen yet--create proxy instance and return that. Class proxyReturnClass = getProxyClassForInterface( declaredType ); @@ -101,8 +100,7 @@ public INTF getProxyInstanceForOriginal( final INTF originalInstance, .newInstance( new Object[] { callHandler } ); proxiedsToProxiesMap.put( originalInstance, newProxyInstance ); proxyInstance = newProxyInstance; - } - catch ( InstantiationException | IllegalAccessException + } catch ( InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException e ) { throw new RuntimeException( diff --git a/client/jdbc/src/main/java/com/dremio/jdbc/proxy/TracingInvocationHandler.java b/client/jdbc/src/main/java/com/dremio/jdbc/proxy/TracingInvocationHandler.java index 78a0235e04..b191f56a49 100644 --- a/client/jdbc/src/main/java/com/dremio/jdbc/proxy/TracingInvocationHandler.java +++ b/client/jdbc/src/main/java/com/dremio/jdbc/proxy/TracingInvocationHandler.java @@ -71,8 +71,7 @@ public Object invoke( Object proxy, Method method, Object[] args ) if ( null == rawReturnedResult ) { netReturnedResult = null; - } - else { + } else { Class methodReturnType = method.getReturnType(); if ( ! methodReturnType.isInterface() ) { @@ -80,20 +79,17 @@ public Object invoke( Object proxy, Method method, Object[] args ) // instance. (We could proxy and intercept some methods, but we can't // intercept all, so intercepting only some would be misleading.) netReturnedResult = rawReturnedResult; - } - else { + } else { // Get the new or existing proxying instance for the returned instance. netReturnedResult = proxiesManager.getProxyInstanceForOriginal( rawReturnedResult, methodReturnType ); } } - } - catch ( IllegalAccessException | IllegalArgumentException e ) { + } catch ( IllegalAccessException | IllegalArgumentException e ) { throw new RuntimeException( "Unexpected/unhandled error calling proxied method: " + e, e ); - } - catch ( InvocationTargetException e ) { + } catch ( InvocationTargetException e ) { Throwable thrownResult = e.getCause(); // Report that method threw exception: callReporter.methodThrew( proxiedObject, proxiedInterface, method, args, diff --git a/client/jdbc/src/main/java/com/dremio/jdbc/proxy/TracingProxyDriver.java b/client/jdbc/src/main/java/com/dremio/jdbc/proxy/TracingProxyDriver.java index c8cef554fc..c2d56f5564 100644 --- a/client/jdbc/src/main/java/com/dremio/jdbc/proxy/TracingProxyDriver.java +++ b/client/jdbc/src/main/java/com/dremio/jdbc/proxy/TracingProxyDriver.java @@ -106,15 +106,13 @@ public class TracingProxyDriver implements java.sql.Driver { final Driver proxyDriver; try { proxyDriver = new TracingProxyDriver(); - } - catch ( SQLException e ) { + } catch ( SQLException e ) { throw new RuntimeException( "Error in initializing " + TracingProxyDriver.class + ": " + e, e ); } try { DriverManager.registerDriver( proxyDriver ); - } - catch ( SQLException e ) { + } catch ( SQLException e ) { throw new RuntimeException( "Error in registering " + TracingProxyDriver.class + ": " + e, e ); } @@ -154,8 +152,7 @@ private static class UrlHandler { if ( ! "".equals( classSpec ) ) { try { Class.forName( classSpec); - } - catch ( ClassNotFoundException e ) { + } catch ( ClassNotFoundException e ) { throw new ProxySetupSQLException( "Couldn't load class \"" + classSpec + "\"" + " (from proxy driver URL \"" + url + "\" (between second and " @@ -172,8 +169,7 @@ private static class UrlHandler { "DriverManager.getDriver( \"" + proxiedURL + "\" ) returned a(n) " + proxiedDriverForProxiedUrl.getClass().getName() + ": " + proxiedDriverForProxiedUrl + "." ); - } - catch ( SQLException e ) { + } catch ( SQLException e ) { final String message = "Error getting driver from DriverManager for proxied URL \"" + proxiedURL + "\" (from proxy driver URL \"" + url + "\"" @@ -220,8 +216,7 @@ public boolean acceptsURL( String url ) throws SQLException { final boolean accepted; if ( null == url || ! url.startsWith( JDBC_URL_PREFIX ) ) { accepted = false; - } - else { + } else { UrlHandler urlHandler = new UrlHandler( proxiesManager, url ); setProxyDriver( urlHandler.getProxyDriver(), urlHandler.getProxiedDriver() ); @@ -242,16 +237,14 @@ public Connection connect( String url, Properties info ) if ( null == url || ! url.startsWith( JDBC_URL_PREFIX ) ) { result = null; // (Not a URL understood by this driver.) - } - else { + } else { UrlHandler urlHandler = new UrlHandler( proxiesManager, url ); setProxyDriver( urlHandler.getProxyDriver(), urlHandler.getProxiedDriver() ); // (Call connect() through proxy so it gets traced too.) try { result = proxyDriver.connect( urlHandler.getProxiedUrl(), info ); - } - catch ( SQLException e ) { + } catch ( SQLException e ) { throw new ProxySetupSQLException( "Exception from proxied driver: " + e, e ); } diff --git a/client/jdbc/src/test/java/com/dremio/jdbc/MultiConnectionCachingFactory.java b/client/jdbc/src/test/java/com/dremio/jdbc/MultiConnectionCachingFactory.java index 3fdd9be3df..83aae1dfcc 100644 --- a/client/jdbc/src/test/java/com/dremio/jdbc/MultiConnectionCachingFactory.java +++ b/client/jdbc/src/test/java/com/dremio/jdbc/MultiConnectionCachingFactory.java @@ -56,6 +56,7 @@ public Connection getConnection(ConnectionInfo info) throws Exception { /** * Closes all active connections in the cache. */ + @Override public void closeConnections() throws SQLException { for (Connection conn : cache.values()) { conn.close(); diff --git a/client/jdbc/src/test/java/com/dremio/jdbc/NonClosableConnection.java b/client/jdbc/src/test/java/com/dremio/jdbc/NonClosableConnection.java index 2250e2eb7a..4584d21c1d 100644 --- a/client/jdbc/src/test/java/com/dremio/jdbc/NonClosableConnection.java +++ b/client/jdbc/src/test/java/com/dremio/jdbc/NonClosableConnection.java @@ -52,218 +52,272 @@ public static NonClosableConnection of(final Connection inner) { return new NonClosableConnection(inner); } + @Override public T unwrap(Class iface) throws SQLException { return delegate.unwrap(iface); } + @Override public boolean isWrapperFor(Class iface) throws SQLException { return delegate.isWrapperFor(iface); } + @Override public Statement createStatement() throws SQLException { return delegate.createStatement(); } + @Override public PreparedStatement prepareStatement(String sql) throws SQLException { return delegate.prepareStatement(sql); } + @Override public CallableStatement prepareCall(String sql) throws SQLException { return delegate.prepareCall(sql); } + @Override public String nativeSQL(String sql) throws SQLException { return delegate.nativeSQL(sql); } + @Override public void setAutoCommit(boolean autoCommit) throws SQLException { delegate.setAutoCommit(autoCommit); } + @Override public boolean getAutoCommit() throws SQLException { return delegate.getAutoCommit(); } + @Override public void commit() throws SQLException { delegate.commit(); } + @Override public void rollback() throws SQLException { delegate.rollback(); } + @Override public void close() throws SQLException { // noop here. this instance is non-closable. } + @Override public boolean isClosed() throws SQLException { return delegate.isClosed(); } + @Override public DatabaseMetaData getMetaData() throws SQLException { return delegate.getMetaData(); } + @Override public void setReadOnly(boolean readOnly) throws SQLException { delegate.setReadOnly(readOnly); } + @Override public boolean isReadOnly() throws SQLException { return delegate.isReadOnly(); } + @Override public void setCatalog(String catalog) throws SQLException { delegate.setCatalog(catalog); } + @Override public String getCatalog() throws SQLException { return delegate.getCatalog(); } + @Override public void setTransactionIsolation(int level) throws SQLException { delegate.setTransactionIsolation(level); } + @Override public int getTransactionIsolation() throws SQLException { return delegate.getTransactionIsolation(); } + @Override public SQLWarning getWarnings() throws SQLException { return delegate.getWarnings(); } + @Override public void clearWarnings() throws SQLException { delegate.clearWarnings(); } + @Override public Statement createStatement(int resultSetType, int resultSetConcurrency) throws SQLException { return delegate.createStatement(resultSetType, resultSetConcurrency); } + @Override public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency) throws SQLException { return delegate.prepareStatement(sql, resultSetType, resultSetConcurrency); } + @Override public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency) throws SQLException { return delegate.prepareCall(sql, resultSetType, resultSetConcurrency); } + @Override public Map> getTypeMap() throws SQLException { return delegate.getTypeMap(); } + @Override public void setTypeMap(Map> map) throws SQLException { delegate.setTypeMap(map); } + @Override public void setHoldability(int holdability) throws SQLException { delegate.setHoldability(holdability); } + @Override public int getHoldability() throws SQLException { return delegate.getHoldability(); } + @Override public Savepoint setSavepoint() throws SQLException { return delegate.setSavepoint(); } + @Override public Savepoint setSavepoint(String name) throws SQLException { return delegate.setSavepoint(name); } + @Override public void rollback(Savepoint savepoint) throws SQLException { delegate.rollback(savepoint); } + @Override public void releaseSavepoint(Savepoint savepoint) throws SQLException { delegate.releaseSavepoint(savepoint); } + @Override public Statement createStatement(int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { return delegate.createStatement(resultSetType, resultSetConcurrency, resultSetHoldability); } + @Override public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { return delegate.prepareStatement(sql, resultSetType, resultSetConcurrency, resultSetHoldability); } + @Override public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { return delegate.prepareCall(sql, resultSetType, resultSetConcurrency, resultSetHoldability); } + @Override public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException { return delegate.prepareStatement(sql, autoGeneratedKeys); } + @Override public PreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException { return delegate.prepareStatement(sql, columnIndexes); } + @Override public PreparedStatement prepareStatement(String sql, String[] columnNames) throws SQLException { return delegate.prepareStatement(sql, columnNames); } + @Override public Clob createClob() throws SQLException { return delegate.createClob(); } + @Override public Blob createBlob() throws SQLException { return delegate.createBlob(); } + @Override public NClob createNClob() throws SQLException { return delegate.createNClob(); } + @Override public SQLXML createSQLXML() throws SQLException { return delegate.createSQLXML(); } + @Override public boolean isValid(int timeout) throws SQLException { return delegate.isValid(timeout); } + @Override public void setClientInfo(String name, String value) throws SQLClientInfoException { delegate.setClientInfo(name, value); } + @Override public void setClientInfo(Properties properties) throws SQLClientInfoException { delegate.setClientInfo(properties); } + @Override public String getClientInfo(String name) throws SQLException { return delegate.getClientInfo(name); } + @Override public Properties getClientInfo() throws SQLException { return delegate.getClientInfo(); } + @Override public Array createArrayOf(String typeName, Object[] elements) throws SQLException { return delegate.createArrayOf(typeName, elements); } + @Override public Struct createStruct(String typeName, Object[] attributes) throws SQLException { return delegate.createStruct(typeName, attributes); } + @Override public void setSchema(String schema) throws SQLException { delegate.setSchema(schema); } + @Override public String getSchema() throws SQLException { return delegate.getSchema(); } + @Override public void abort(Executor executor) throws SQLException { delegate.abort(executor); } + @Override public void setNetworkTimeout(Executor executor, int milliseconds) throws SQLException { delegate.setNetworkTimeout(executor, milliseconds); } + @Override public int getNetworkTimeout() throws SQLException { return delegate.getNetworkTimeout(); } diff --git a/client/jdbc/src/test/java/com/dremio/jdbc/impl/TypeConvertingSqlAccessorTest.java b/client/jdbc/src/test/java/com/dremio/jdbc/impl/TypeConvertingSqlAccessorTest.java index 63df84a82f..ccf800205c 100644 --- a/client/jdbc/src/test/java/com/dremio/jdbc/impl/TypeConvertingSqlAccessorTest.java +++ b/client/jdbc/src/test/java/com/dremio/jdbc/impl/TypeConvertingSqlAccessorTest.java @@ -80,6 +80,7 @@ private static class TinyIntStubAccessor extends BaseStubAccessor { super( Types.optional(MinorType.TINYINT), value ); } + @Override public byte getByte( int rowOffset ) { return (Byte) getValue(); } @@ -92,6 +93,7 @@ private static class SmallIntStubAccessor extends BaseStubAccessor { super(Types.optional(MinorType.SMALLINT), value); } + @Override public short getShort( int rowOffset ) { return (Short) getValue(); } @@ -104,6 +106,7 @@ private static class IntegerStubAccessor extends BaseStubAccessor { super(Types.optional(MinorType.INT), value); } + @Override public int getInt( int rowOffset ) { return (Integer) getValue(); } @@ -117,6 +120,7 @@ private static class BigIntStubAccessor extends BaseStubAccessor { super(Types.optional(MinorType.BIGINT), value); } + @Override public long getLong( int rowOffset ) { return (Long) getValue(); } @@ -129,6 +133,7 @@ private static class FloatStubAccessor extends BaseStubAccessor { super( Types.optional(MinorType.FLOAT4), value); } + @Override public float getFloat( int rowOffset ) { return (Float) getValue(); } @@ -140,6 +145,7 @@ private static class DoubleStubAccessor extends BaseStubAccessor { super(Types.optional(MinorType.FLOAT8), value); } + @Override public double getDouble( int rowOffset ) { return (double) getValue(); } diff --git a/client/jdbc/src/test/java/com/dremio/jdbc/proxy/TracingProxyDriverTest.java b/client/jdbc/src/test/java/com/dremio/jdbc/proxy/TracingProxyDriverTest.java index be1a552c23..4adfa520b8 100644 --- a/client/jdbc/src/test/java/com/dremio/jdbc/proxy/TracingProxyDriverTest.java +++ b/client/jdbc/src/test/java/com/dremio/jdbc/proxy/TracingProxyDriverTest.java @@ -106,8 +106,7 @@ public void testBasicReturnTrace() throws SQLException { try { nameThis.redirect(); proxyConnection.isClosed(); - } - finally { + } finally { nameThis.unredirect(); } @@ -145,11 +144,9 @@ public void testBasicThrowTrace() throws SQLException { try { stdErrCapturer.redirect(); statement.execute( "" ); - } - catch ( final SQLException e ) { + } catch ( final SQLException e ) { // "already closed" is expected - } - finally { + } finally { stdErrCapturer.unredirect(); } diff --git a/client/jdbc/src/test/java/com/dremio/jdbc/test/Drill2489CallsAfterCloseThrowExceptionsTest.java b/client/jdbc/src/test/java/com/dremio/jdbc/test/Drill2489CallsAfterCloseThrowExceptionsTest.java index 086d3578fd..3bb5c5e37d 100644 --- a/client/jdbc/src/test/java/com/dremio/jdbc/test/Drill2489CallsAfterCloseThrowExceptionsTest.java +++ b/client/jdbc/src/test/java/com/dremio/jdbc/test/Drill2489CallsAfterCloseThrowExceptionsTest.java @@ -232,33 +232,24 @@ private static Object getDummyValueForType(Class type) { final Object result; if (! type.isPrimitive()) { result = null; - } - else { + } else { if (type == boolean.class) { result = false; - } - else if (type == byte.class) { + } else if (type == byte.class) { result = (byte) 0; - } - else if (type == short.class) { + } else if (type == short.class) { result = (short) 0; - } - else if (type == char.class) { + } else if (type == char.class) { result = (char) 0; - } - else if (type == int.class) { + } else if (type == int.class) { result = 0; - } - else if (type == long.class) { + } else if (type == long.class) { result = (long) 0L; - } - else if (type == float.class) { + } else if (type == float.class) { result = 0F; - } - else if (type == double.class) { + } else if (type == double.class) { result = 0.0; - } - else { + } else { fail("Test needs to be updated to handle type " + type); result = null; // Not executed; for "final". } @@ -323,13 +314,11 @@ private void testOneMethod(Method method) { if (isOkayNonthrowingMethod(method)) { successLinesBuf.append(resultLine); - } - else { + } else { logger.trace("Failure: " + resultLine); failureLinesBuf.append(resultLine); } - } - catch (InvocationTargetException e) { + } catch (InvocationTargetException e) { final Throwable cause = e.getCause(); final String resultLine = "- " + methodLabel + " threw <" + cause + ">\n"; @@ -337,22 +326,19 @@ private void testOneMethod(Method method) { && normalClosedExceptionText.equals(cause.getMessage())) { // Common good case--our preferred exception class with our message. successLinesBuf.append(resultLine); - } - else if (NullPointerException.class == cause.getClass() + } else if (NullPointerException.class == cause.getClass() && (method.getName().equals("isWrapperFor") || method.getName().equals("unwrap"))) { // Known good-enough case--these methods don't throw already-closed // exception, but do throw NullPointerException because of the way // we call them (with null) and the way Avatica code implements them. successLinesBuf.append(resultLine); - } - else { + } else { // Not a case that base-class code here recognizes, but subclass may // know that it's okay. if (isOkaySpecialCaseException(method, cause)) { successLinesBuf.append(resultLine); - } - else { + } else { final String badResultLine = "- " + methodLabel + " threw <" + cause + "> instead" + " of " + AlreadyClosedSqlException.class.getSimpleName() @@ -363,8 +349,7 @@ else if (NullPointerException.class == cause.getClass() failureLinesBuf.append(badResultLine); } } - } - catch (IllegalAccessException | IllegalArgumentException e) { + } catch (IllegalAccessException | IllegalArgumentException e) { fail("Unexpected exception: " + e + ", cause = " + e.getCause() + " from " + method); } @@ -422,13 +407,11 @@ protected boolean isOkayNonthrowingMethod(Method method) { final boolean result; if (super.isOkayNonthrowingMethod(method)) { result = true; - } - else if ( method.getName().equals("beginRequest") + } else if ( method.getName().equals("beginRequest") || method.getName().equals("endRequest")) { // TODO: New Java 9 methods not implemented in Avatica. result = true; - } - else { + } else { result = false; } return result; @@ -439,29 +422,25 @@ protected boolean isOkaySpecialCaseException(Method method, Throwable cause) { final boolean result; if (super.isOkaySpecialCaseException(method, cause)) { result = true; - } - else if (SQLClientInfoException.class == cause.getClass() + } else if (SQLClientInfoException.class == cause.getClass() && normalClosedExceptionText.equals(cause.getMessage()) && ( method.getName().equals("setClientInfo") || method.getName().equals("getClientInfo") )) { // Special good case--we had to use SQLClientInfoException from those. result = true; - } - else if (RuntimeException.class == cause.getClass() + } else if (RuntimeException.class == cause.getClass() && normalClosedExceptionText.equals(cause.getMessage()) && ( method.getName().equals("getCatalog") || method.getName().equals("getSchema") )) { // Special good-enough case--we had to use RuntimeException for now. result = true; - } - else if ( method.getName().equals("setShardingKeyIfValid") + } else if ( method.getName().equals("setShardingKeyIfValid") || method.getName().equals("setShardingKey")) { // TODO: New Java 9 methods not implemented in Avatica. result = true; - } - else { + } else { result = false; } return result; @@ -501,20 +480,17 @@ protected boolean isOkaySpecialCaseException(Method method, Throwable cause) { final boolean result; if (super.isOkaySpecialCaseException(method, cause)) { result = true; - } - else if ( method.getName().equals("executeLargeBatch") + } else if ( method.getName().equals("executeLargeBatch") || method.getName().equals("executeLargeUpdate")) { // TODO: New Java 8 methods not implemented in Avatica. result = true; - } - else if ( method.getName().equals("enquoteLiteral") + } else if ( method.getName().equals("enquoteLiteral") || method.getName().equals("enquoteIdentifier") || method.getName().equals("enquoteNCharLiteral") || method.getName().equals("isSimpleIdentifier")) { // TODO: New Java 9 methods not implemented in Avatica. result = true; - } - else if (RuntimeException.class == cause.getClass() + } else if (RuntimeException.class == cause.getClass() && normalClosedExceptionText.equals(cause.getMessage()) && ( method.getName().equals("getConnection") || method.getName().equals("getFetchDirection") @@ -524,8 +500,7 @@ else if (RuntimeException.class == cause.getClass() )) { // Special good-enough case--we had to use RuntimeException for now. result = true; - } - else { + } else { result = false; } return result; @@ -565,8 +540,7 @@ protected boolean isOkaySpecialCaseException(Method method, Throwable cause) { final boolean result; if (super.isOkaySpecialCaseException(method, cause)) { result = true; - } - else if (RuntimeException.class == cause.getClass() + } else if (RuntimeException.class == cause.getClass() && normalClosedExceptionText.equals(cause.getMessage()) && ( method.getName().equals("clearBatch") || method.getName().equals("getConnection") @@ -577,23 +551,20 @@ else if (RuntimeException.class == cause.getClass() )) { // Special good-enough case--we had to use RuntimeException for now. result = true; - } - else if ( method.getName().equals("setObject") + } else if ( method.getName().equals("setObject") || method.getName().equals("executeLargeUpdate") || method.getName().equals("executeLargeBatch") || method.getName().equals("getLargeMaxRows") ) { // TODO: Java 8 methods not yet supported by Avatica. result = true; - } - else if ( method.getName().equals("enquoteLiteral") + } else if ( method.getName().equals("enquoteLiteral") || method.getName().equals("enquoteIdentifier") || method.getName().equals("enquoteNCharLiteral") || method.getName().equals("isSimpleIdentifier")) { // TODO: New Java 9 methods not implemented in Avatica. result = true; - } - else { + } else { result = false; } return result; @@ -628,19 +599,16 @@ protected boolean isOkaySpecialCaseException(Method method, Throwable cause) { final boolean result; if (super.isOkaySpecialCaseException(method, cause)) { result = true; - } - else if (RuntimeException.class == cause.getClass() + } else if (RuntimeException.class == cause.getClass() && normalClosedExceptionText.equals(cause.getMessage()) && method.getName().equals("getStatement")) { // Special good-enough case--we had to use RuntimeException for now. result = true; - } - else if (SQLFeatureNotSupportedException.class == cause.getClass() + } else if (SQLFeatureNotSupportedException.class == cause.getClass() && (method.getName().equals("updateObject"))) { // TODO: Java 8 methods not yet supported by Avatica. result = true; - } - else { + } else { result = false; } return result; @@ -741,14 +709,12 @@ protected boolean isOkaySpecialCaseException(Method method, Throwable cause) { final boolean result; if (super.isOkaySpecialCaseException(method, cause)) { result = true; - } - else if (RuntimeException.class == cause.getClass() + } else if (RuntimeException.class == cause.getClass() && normalClosedExceptionText.equals(cause.getMessage()) && method.getName().equals("getResultSetHoldability")) { // Special good-enough case--we had to use RuntimeException for now. result = true; - } - else { + } else { result = false; } return result; diff --git a/client/jdbc/src/test/java/com/dremio/jdbc/test/Drill2769UnsupportedReportsUseSqlExceptionTest.java b/client/jdbc/src/test/java/com/dremio/jdbc/test/Drill2769UnsupportedReportsUseSqlExceptionTest.java index f5187d8285..2569b9dd60 100644 --- a/client/jdbc/src/test/java/com/dremio/jdbc/test/Drill2769UnsupportedReportsUseSqlExceptionTest.java +++ b/client/jdbc/src/test/java/com/dremio/jdbc/test/Drill2769UnsupportedReportsUseSqlExceptionTest.java @@ -87,15 +87,13 @@ public static void setUpConnection() throws SQLException { try { getConnection().prepareCall("VALUES 'CallableStatement query'"); fail("Test seems to be out of date. Was prepareCall(...) implemented?"); - } - catch (SQLException | UnsupportedOperationException e) { + } catch (SQLException | UnsupportedOperationException e) { // Expected. } try { getConnection().createArrayOf("STRUCT", new Object[0]); fail("Test seems to be out of date. Were arrays implemented?"); - } - catch (SQLException | UnsupportedOperationException e) { + } catch (SQLException | UnsupportedOperationException e) { // Expected. } @@ -153,33 +151,24 @@ private static Object getDummyValueForType(Class type) { final Object result; if (type == String.class) { result = ""; - } - else if (! type.isPrimitive()) { + } else if (! type.isPrimitive()) { result = null; - } - else { + } else { if (type == boolean.class) { result = false; - } - else if (type == byte.class) { + } else if (type == byte.class) { result = (byte) 0; - } - else if (type == short.class) { + } else if (type == short.class) { result = (short) 0; - } - else if (type == int.class) { + } else if (type == int.class) { result = 0; - } - else if (type == long.class) { + } else if (type == long.class) { result = (long) 0L; - } - else if (type == float.class) { + } else if (type == float.class) { result = 0F; - } - else if (type == double.class) { + } else if (type == double.class) { result = 0.0; - } - else { + } else { fail("Test needs to be updated to handle type " + type); result = null; // Not executed; for "final". } @@ -242,8 +231,7 @@ private void testOneMethod(Method method) { final String resultLine = "- " + methodLabel + " didn't throw\n"; successLinesBuf.append(resultLine); - } - catch (InvocationTargetException wrapperEx) { + } catch (InvocationTargetException wrapperEx) { final Throwable cause = wrapperEx.getCause(); final String resultLine = "- " + methodLabel + " threw <" + cause + ">\n"; @@ -254,28 +242,23 @@ private void testOneMethod(Method method) { // Good case--almost any exception should be SQLException or subclass // (but make sure not accidentally closed). successLinesBuf.append(resultLine); - } - else if (NullPointerException.class == cause.getClass() + } else if (NullPointerException.class == cause.getClass() && (method.getName().equals("isWrapperFor") || method.getName().equals("unwrap"))) { // Known good-enough case--these methods throw NullPointerException // because of the way we call them (with null) and the way Avatica // code implements them. successLinesBuf.append(resultLine); - } - else if (isOkaySpecialCaseException(method, cause)) { + } else if (isOkaySpecialCaseException(method, cause)) { successLinesBuf.append(resultLine); - } - - else { + } else { final String badResultLine = "- " + methodLabel + " threw <" + cause + "> instead" + " of a " + SQLException.class.getSimpleName() + "\n"; logger.trace("Failure: " + resultLine); failureLinesBuf.append(badResultLine); } - } - catch (IllegalAccessException | IllegalArgumentException e) { + } catch (IllegalAccessException | IllegalArgumentException e) { fail("Unexpected exception: " + e + ", cause = " + e.getCause() + " from " + method); } @@ -286,20 +269,18 @@ public void testMethods() { final String methodLabel = makeLabel(method); if ("close".equals(method.getName())) { logger.debug("Skipping (because closes): " + methodLabel); - } /* Uncomment to suppress calling DatabaseMetaData.getColumns(...), which sometimes takes about 2 minutes, and other DatabaseMetaData methods that query, collectively taking a while too: - else if (DatabaseMetaData.class == jdbcIntf + } else if (DatabaseMetaData.class == jdbcIntf && "getColumns".equals(method.getName())) { logger.debug("Skipping (because really slow): " + methodLabel); - } - else if (DatabaseMetaData.class == jdbcIntf + } else if (DatabaseMetaData.class == jdbcIntf && ResultSet.class == method.getReturnType()) { logger.debug("Skipping (because a bit slow): " + methodLabel); } */ - else { + } else { logger.debug("Testing method " + methodLabel); testOneMethod(method); } diff --git a/client/jdbc/src/test/java/com/dremio/jdbc/test/Hook.java b/client/jdbc/src/test/java/com/dremio/jdbc/test/Hook.java index 1f96ed1328..6fcae5690a 100644 --- a/client/jdbc/src/test/java/com/dremio/jdbc/test/Hook.java +++ b/client/jdbc/src/test/java/com/dremio/jdbc/test/Hook.java @@ -30,6 +30,7 @@ public enum Hook { public Closeable add(final Function handler) { handlers.add(handler); return new Closeable() { + @Override public void close() { remove(handler); } @@ -50,6 +51,7 @@ public void run(Object arg) { /** Removes a Hook after use. */ public interface Closeable extends AutoCloseable { + @Override void close(); // override, removing "throws" } } diff --git a/client/pom.xml b/client/pom.xml index a84ba1e83e..1355845ff8 100644 --- a/client/pom.xml +++ b/client/pom.xml @@ -22,7 +22,7 @@ com.dremio dremio-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 com.dremio.client diff --git a/common/pom.xml b/common/pom.xml index 302c7032e0..b9f19a29a3 100644 --- a/common/pom.xml +++ b/common/pom.xml @@ -23,7 +23,7 @@ com.dremio dremio-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-common @@ -231,6 +231,10 @@ io.grpc grpc-api + + io.grpc + grpc-stub + software.amazon.awssdk auth diff --git a/common/src/main/java/com/dremio/common/VM.java b/common/src/main/java/com/dremio/common/VM.java index 93679384a6..1c9082fedc 100644 --- a/common/src/main/java/com/dremio/common/VM.java +++ b/common/src/main/java/com/dremio/common/VM.java @@ -181,15 +181,19 @@ static long maxDirectMemory(final List inputArguments) { case "t": case "T": multiplier *= 1024; + // fall through case "g": case "G": multiplier *= 1024; + // fall through case "m": case "M": multiplier *= 1024; + // fall through case "k": case "K": multiplier *= 1024; + // fall through default: break; } diff --git a/common/src/main/java/com/dremio/common/WakeupHandler.java b/common/src/main/java/com/dremio/common/WakeupHandler.java index cb2a5bbd8b..aad0dd8a67 100644 --- a/common/src/main/java/com/dremio/common/WakeupHandler.java +++ b/common/src/main/java/com/dremio/common/WakeupHandler.java @@ -20,6 +20,9 @@ import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicBoolean; +import javax.inject.Provider; + +import com.dremio.context.RequestContext; import com.google.common.base.Preconditions; /** @@ -35,10 +38,17 @@ public class WakeupHandler { private final Runnable manager; private final ExecutorService executor; + private final Provider requestContextProvider; public WakeupHandler(ExecutorService executor, Runnable manager) { + this(executor, manager, null); + } + + public WakeupHandler( + ExecutorService executor, Runnable manager, Provider requestContextProvider) { this.executor = Preconditions.checkNotNull(executor, "executor service required"); this.manager = Preconditions.checkNotNull(manager, "runnable manager required"); + this.requestContextProvider = requestContextProvider; } public Future handle(String reason) { @@ -62,7 +72,11 @@ public void run() { try { wakeup.set(false); - manager.run(); + if (requestContextProvider != null) { + requestContextProvider.get().run(() -> manager.run()); + } else { + manager.run(); + } } finally { running.set(false); } diff --git a/common/src/main/java/com/dremio/common/concurrent/ContextMigratingExecutorService.java b/common/src/main/java/com/dremio/common/concurrent/ContextMigratingExecutorService.java index 996d5867e9..b116b74811 100644 --- a/common/src/main/java/com/dremio/common/concurrent/ContextMigratingExecutorService.java +++ b/common/src/main/java/com/dremio/common/concurrent/ContextMigratingExecutorService.java @@ -57,6 +57,20 @@ public ContextMigratingExecutorService(E delegate, Tracer tracer) { this.tracer = tracer; } + public static Runnable makeContextMigratingTask(Runnable runnable, String taskName) { + return new ContextMigratingRunnableTask() { + @Override + public String getSpanName() { + return taskName; + } + + @Override + public void run() { + runnable.run(); + } + }; + } + @Override public void shutdown() { delegate.shutdown(); diff --git a/common/src/main/java/com/dremio/common/concurrent/ContextMigratingRunnableTask.java b/common/src/main/java/com/dremio/common/concurrent/ContextMigratingRunnableTask.java new file mode 100644 index 0000000000..9aabc9e02e --- /dev/null +++ b/common/src/main/java/com/dremio/common/concurrent/ContextMigratingRunnableTask.java @@ -0,0 +1,22 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.common.concurrent; + +/** + * Implements a runnable that also migrates context + */ +public interface ContextMigratingRunnableTask extends ContextMigratingTask, Runnable { +} diff --git a/common/src/main/java/com/dremio/common/concurrent/ContextMigratingTask.java b/common/src/main/java/com/dremio/common/concurrent/ContextMigratingTask.java index 87e5c6572f..2993b360aa 100644 --- a/common/src/main/java/com/dremio/common/concurrent/ContextMigratingTask.java +++ b/common/src/main/java/com/dremio/common/concurrent/ContextMigratingTask.java @@ -24,4 +24,5 @@ public interface ContextMigratingTask { * @return the name of the child span created while running the task */ String getSpanName(); + } diff --git a/common/src/main/java/com/dremio/common/config/SabotConfig.java b/common/src/main/java/com/dremio/common/config/SabotConfig.java index 338e469f42..6252c18e23 100644 --- a/common/src/main/java/com/dremio/common/config/SabotConfig.java +++ b/common/src/main/java/com/dremio/common/config/SabotConfig.java @@ -15,6 +15,7 @@ */ package com.dremio.common.config; +import java.io.File; import java.lang.reflect.Constructor; import java.net.URL; import java.util.Collection; @@ -30,6 +31,7 @@ import com.google.common.base.Preconditions; import com.google.common.base.Stopwatch; import com.typesafe.config.Config; +import com.typesafe.config.ConfigException; import com.typesafe.config.ConfigFactory; import com.typesafe.config.ConfigRenderOptions; import com.typesafe.config.ConfigValue; @@ -247,6 +249,26 @@ private static SabotConfig create(String overrideFileResourcePathname, } } + private static SabotConfig createFromSavedSabotConfig() { + final String savedSabotConfigFile = System.getProperty("com.dremio.savedSabotConfig"); + if (savedSabotConfigFile == null) { + return null; + } + + final File savedSabotConfig = new File(savedSabotConfigFile); + if (savedSabotConfig.exists()) { + try { + Config config = ConfigFactory.parseFile(savedSabotConfig); + return new SabotConfig(config); + } catch (ConfigException e) { + logger.warn("Unable to read saved SabotConfig from '{}' (proceeding to slow path): {}", + savedSabotConfigFile, e.toString()); + } + } + + return null; + } + /** * @param overrideFileResourcePathname * see {@link #create(String)}'s {@code overrideFileResourcePathname} @@ -264,6 +286,11 @@ private static SabotConfig doCreate(String overrideFileResourcePathname, ? CommonConstants.CONFIG_OVERRIDE_RESOURCE_PATHNAME : overrideFileResourcePathname; + final SabotConfig preCreated = createFromSavedSabotConfig(); + if (preCreated != null) { + return preCreated; + } + // 1. Load defaults configuration file. Config fallback = null; final ClassLoader[] classLoaders = ClasspathHelper.classLoaders(); diff --git a/common/src/main/java/com/dremio/common/exceptions/GrpcExceptionUtil.java b/common/src/main/java/com/dremio/common/exceptions/GrpcExceptionUtil.java index c37a23eb37..e7de35cc84 100644 --- a/common/src/main/java/com/dremio/common/exceptions/GrpcExceptionUtil.java +++ b/common/src/main/java/com/dremio/common/exceptions/GrpcExceptionUtil.java @@ -16,6 +16,7 @@ package com.dremio.common.exceptions; +import java.security.AccessControlException; import java.util.Optional; import org.slf4j.Logger; @@ -29,8 +30,10 @@ import com.google.rpc.Status; import io.grpc.Status.Code; +import io.grpc.StatusException; import io.grpc.StatusRuntimeException; import io.grpc.protobuf.StatusProto; +import io.grpc.stub.StreamObserver; /** * Utility functions related to grpc errors. @@ -109,6 +112,81 @@ public static StatusRuntimeException toStatusRuntimeException(String message, Co .build()); } + /** + * Handles unknown {@link Throwable} by passing it in the {@link StreamObserver} as a Status* exception + * Only use this method after handling the throwable as accurately as possible, and when no other information about the throwable is available + * @param responseObserver responseObserver + * @param t unknown exception + * @param message High level description of what failed (can be found from the method name) + */ + public static void fallbackHandleException(StreamObserver responseObserver, Throwable t, String message) { + logger.warn("Using fallback to handle unknown exception", t); + if (t instanceof UserException) { + responseObserver.onError(toStatusRuntimeException((UserException) t)); + } else if (t instanceof StatusException) { + responseObserver.onError((StatusException) t); + } else if (t instanceof StatusRuntimeException) { + responseObserver.onError(statusRuntimeExceptionMapper(t)); + } else if (t instanceof IllegalArgumentException) { + responseObserver.onError(io.grpc.Status.INVALID_ARGUMENT + .withCause(t) + .withDescription(message) + .asRuntimeException()); + } else if (t instanceof IllegalStateException) { + responseObserver.onError(io.grpc.Status.INTERNAL + .withCause(t) + .withDescription(message) + .asRuntimeException()); + } else if (t instanceof AccessControlException) { + responseObserver.onError(io.grpc.Status.PERMISSION_DENIED + .withCause(t) + .withDescription(message) + .asRuntimeException()); + } else if (t instanceof RuntimeException) { + responseObserver.onError(io.grpc.Status.UNKNOWN + .withCause(t) + .withDescription(message) + .asRuntimeException() + ); + } else { + responseObserver.onError(io.grpc.Status.UNKNOWN + .withCause(t) + .withDescription(message) + .asException() + ); + } + } + + private static StatusRuntimeException statusRuntimeExceptionMapper(Throwable t) { + if (!(t instanceof StatusRuntimeException)) { + return new StatusRuntimeException( + io.grpc.Status.UNKNOWN.withDescription( + "The server encountered an unexpected error. Please retry your request.") + .withCause(t)); + } + + StatusRuntimeException sre = (StatusRuntimeException) t; + // UNAVAILABLE error is shown as "UNAVAILABLE: no healthy upstream" to the user. + // Provide a readable error message to user. + if (sre.getStatus().getCode() == io.grpc.Status.Code.UNAVAILABLE) { + return new StatusRuntimeException( + io.grpc.Status.UNAVAILABLE.withDescription( + "The service is temporarily unavailable. Please retry your request.") + .withCause(t)); + } + return sre; + } + + /** + * Handles unknown {@link Throwable} by passing it in the {@link StreamObserver} as a Status* exception + * Only use this method after handling the throwable as accurately as possible, and when no other information about the throwable is available + * @param responseObserver responseObserver + * @param t unknown exception + */ + public static void fallbackHandleException(StreamObserver responseObserver, Throwable t) { + fallbackHandleException(responseObserver, t, t.getMessage()); + } + /** * Converts the given {@link StatusRuntimeException} to a {@link UserException}, if possible. * diff --git a/common/src/main/java/com/dremio/common/exceptions/JsonAdditionalExceptionContext.java b/common/src/main/java/com/dremio/common/exceptions/JsonAdditionalExceptionContext.java index 457b1e3652..d43fb373db 100644 --- a/common/src/main/java/com/dremio/common/exceptions/JsonAdditionalExceptionContext.java +++ b/common/src/main/java/com/dremio/common/exceptions/JsonAdditionalExceptionContext.java @@ -56,6 +56,7 @@ protected static T fromUserException(Clas } } + @Override public ByteString toByteString() { try { return ProtobufByteStringSerDe.writeValue(contextMapper, this, diff --git a/common/src/main/java/com/dremio/common/exceptions/UserException.java b/common/src/main/java/com/dremio/common/exceptions/UserException.java index 02724e0286..a48556ef3e 100644 --- a/common/src/main/java/com/dremio/common/exceptions/UserException.java +++ b/common/src/main/java/com/dremio/common/exceptions/UserException.java @@ -55,6 +55,19 @@ public class UserException extends RuntimeException { public static final String REFRESH_METADATA_FAILED_CONCURRENT_UPDATE_MSG = "Unable to refresh metadata for the dataset (due to concurrent updates). Please retry."; + // reasons for cancelling a query using UserException + public enum AttemptCompletionState { + SUCCESS, // attempt is successful (has UT) + CLIENT_CANCELLED, // cancelled by user (has UT) + PLANNING_TIMEOUT, // query cancelled because it exceeded planning time + ENGINE_TIMEOUT, // timeout waiting for an engine slot (has UT) + RUNTIME_EXCEEDED, // query camcelled because runtime exceeded (has UT) + HEAP_MONITOR_C, // query cancelled by coordinator heap monitor (has UT) + HEAP_MONITOR_E, // query cancelled by executor heap monitor + UNKNOWN, // Query cancellation reason is unknown + DREMIO_PB_ERROR, // DremioPB.ErrorType contains the error type + } + /** * Creates a new INVALID_DATASET_METADATA exception builder. * @@ -675,6 +688,7 @@ public static final class Builder { private ByteString rawAdditionalContext; private boolean fixedMessage; // if true, calls to message() are a no op + private AttemptCompletionState attemptCompletionState = AttemptCompletionState.UNKNOWN; /** * Wraps an existing exception inside a user exception. @@ -871,6 +885,11 @@ public Builder setAdditionalExceptionContext(AdditionalExceptionContext addition return this; } + public Builder attemptCompletionState(AttemptCompletionState attemptCompletionState) { + this.attemptCompletionState = attemptCompletionState; + return this; + } + /** * builds a user exception or returns the wrapped one. If the error is a system error, the error message is logged * to the given {@link Logger}. @@ -981,6 +1000,8 @@ public UserException buildSilently() { private final ByteString rawAdditionalContext; + private final AttemptCompletionState attemptCompletionState; + protected UserException(final DremioPBError.ErrorType errorType, final String message, final Throwable cause, final ByteString rawAdditionalContext) { super(message, cause); @@ -988,6 +1009,7 @@ protected UserException(final DremioPBError.ErrorType errorType, final String me this.errorType = errorType; this.context = new UserExceptionContext(); this.rawAdditionalContext = rawAdditionalContext; + this.attemptCompletionState = AttemptCompletionState.UNKNOWN; } private UserException(final Builder builder) { @@ -995,6 +1017,7 @@ private UserException(final Builder builder) { this.errorType = builder.errorType; this.context = builder.context; this.rawAdditionalContext = builder.rawAdditionalContext; + this.attemptCompletionState = builder.attemptCompletionState; } /** @@ -1038,6 +1061,10 @@ public String getVerboseMessage(boolean includeErrorIdAndIdentity) { return generateMessage(includeErrorIdAndIdentity) + "\n\n" + ErrorHelper.buildCausesMessage(getCause()); } + public AttemptCompletionState getAttemptCompletionState() { + return attemptCompletionState; + } + /** * returns or creates a DremioPBError object corresponding to this user exception. * diff --git a/common/src/main/java/com/dremio/common/logging/obfuscation/BlockLogLevelTurboFilter.java b/common/src/main/java/com/dremio/common/logging/obfuscation/BlockLogLevelTurboFilter.java index 7f0279f8e7..e7267e7165 100644 --- a/common/src/main/java/com/dremio/common/logging/obfuscation/BlockLogLevelTurboFilter.java +++ b/common/src/main/java/com/dremio/common/logging/obfuscation/BlockLogLevelTurboFilter.java @@ -97,10 +97,12 @@ public void setDefaultLogLevelThreshold(String defaultLogLevelThreshold) { this.defaultLogLevelThreshold = Level.toLevel(defaultLogLevelThreshold); } + @Override public void stop() { this.start = false; } + @Override public void start() { if (this.defaultLogLevelThreshold != null) { super.start(); diff --git a/common/src/main/java/com/dremio/common/scanner/RunTimeScan.java b/common/src/main/java/com/dremio/common/scanner/RunTimeScan.java index 7e8b90ef26..fa2fd19318 100644 --- a/common/src/main/java/com/dremio/common/scanner/RunTimeScan.java +++ b/common/src/main/java/com/dremio/common/scanner/RunTimeScan.java @@ -15,18 +15,22 @@ */ package com.dremio.common.scanner; +import java.io.File; +import java.io.IOException; import java.net.URL; import java.util.Collection; import java.util.List; import com.dremio.common.config.SabotConfig; import com.dremio.common.scanner.persistence.ScanResult; +import com.fasterxml.jackson.databind.ObjectMapper; /** * Utility to scan classpath at runtime * */ public class RunTimeScan { + private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RunTimeScan.class); /** result of prescan */ private static final ScanResult PRESCANNED = BuildTimeScan.load(); @@ -43,13 +47,38 @@ static Collection getNonPrescannedMarkedPaths() { return markedPaths; } + private static ScanResult createFromSavedScanResults() { + final String savedScanResults = System.getProperty("com.dremio.savedScanResults"); + if (savedScanResults == null) { + return null; + } + + final File scanResultsFile = new File(savedScanResults); + if (scanResultsFile.exists()) { + try { + return new ObjectMapper().readValue(scanResultsFile, ScanResult.class); + } catch (IOException e) { + logger.warn("Unable to read scan result from {} (proceeding to slow path): {}", + scanResultsFile.getName(), e.toString()); + } + } + return null; + } + /** * loads prescanned classpath info and scans for extra ones based on configuration. * (unless prescan is disabled with {@see ClassPathScanner#IMPLEMENTATIONS_SCAN_CACHE}=falses) + * If ScanResult was generated at build time and is indicated by com.dremio.savedScanResults, + * then short circuit to just load that and return * @param config to retrieve the packages to scan * @return the scan result */ public static ScanResult fromPrescan(SabotConfig config) { + final ScanResult preCreated = createFromSavedScanResults(); + if (preCreated != null) { + return preCreated; + } + List packagePrefixes = ClassPathScanner.getPackagePrefixes(config); List scannedBaseClasses = ClassPathScanner.getScannedBaseClasses(config); List scannedAnnotations = ClassPathScanner.getScannedAnnotations(config); diff --git a/common/src/main/java/com/dremio/common/util/Retryer.java b/common/src/main/java/com/dremio/common/util/Retryer.java index 813bf32d7c..06d17b10ef 100644 --- a/common/src/main/java/com/dremio/common/util/Retryer.java +++ b/common/src/main/java/com/dremio/common/util/Retryer.java @@ -27,19 +27,18 @@ import com.dremio.io.ExponentialBackoff; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; +import com.google.errorprone.annotations.CheckReturnValue; /** * Simple retrying utility - * - * @param */ @SuppressWarnings("checkstyle:FinalClass") -public class Retryer implements ExponentialBackoff { +public class Retryer implements ExponentialBackoff { private static final Logger logger = LoggerFactory.getLogger(Retryer.class); public enum WaitStrategy {EXPONENTIAL, FLAT} //Can be extended - private Set> retryableExceptionClasses = new HashSet<>(); + private final Set> retryableExceptionClasses = new HashSet<>(); private WaitStrategy waitStrategy = WaitStrategy.EXPONENTIAL; private int maxRetries = 4; // default private int baseMillis = 250; @@ -52,7 +51,7 @@ public enum WaitStrategy {EXPONENTIAL, FLAT} //Can be extended private Retryer() { } - public T call(Callable callable) { + public T call(Callable callable) { for (int attemptNo = 1; infiniteRetries || (attemptNo <= maxRetries); attemptNo++) { try { return callable.call(); @@ -139,47 +138,56 @@ public int getMaxRetries() { return maxRetries; } - public static class Builder { - private Retryer retryer = new Retryer<>(); + @CheckReturnValue + public static Builder newBuilder() { + return new Retryer.Builder(); + } + + public static class Builder { + private final Retryer retryer = new Retryer(); + + private Builder() { + // use static factory method newBuilder instead + } - public Builder retryIfExceptionOfType(Class clazz) { + public Builder retryIfExceptionOfType(Class clazz) { Preconditions.checkState(retryer.isRetriable == retryer.isExceptionClassRetriable, "Retryer does not support mix of exception class and exception function"); retryer.retryableExceptionClasses.add(clazz); return this; } - public Builder retryOnExceptionFunc(Function function) { + public Builder retryOnExceptionFunc(Function function) { Preconditions.checkState(retryer.retryableExceptionClasses.isEmpty(), "Retryer does not support mix of exception class and exception function"); retryer.isRetriable = function; return this; } - public Builder setWaitStrategy(WaitStrategy waitStrategy, int baseMillis, int maxMillis) { + public Builder setWaitStrategy(WaitStrategy waitStrategy, int baseMillis, int maxMillis) { retryer.waitStrategy = waitStrategy; retryer.baseMillis = baseMillis; retryer.maxMillis = maxMillis; return this; } - public Builder setMaxRetries(int maxRetries) { + public Builder setMaxRetries(int maxRetries) { retryer.maxRetries = maxRetries; return this; } - public Builder setInfiniteRetries(boolean infiniteRetries) { + public Builder setInfiniteRetries(boolean infiniteRetries) { retryer.infiniteRetries = infiniteRetries; return this; } - public Retryer build() { + public Retryer build() { return retryer; } } - public Retryer copy() { - Retryer copy = new Retryer<>(); + public Retryer copy() { + Retryer copy = new Retryer(); copy.waitStrategy = waitStrategy; copy.baseMillis = baseMillis; copy.maxMillis = maxMillis; @@ -199,7 +207,8 @@ public static class OperationFailedAfterRetriesException extends RuntimeExceptio } public T getWrappedCause(Class clazz, Function conversionFunc) { - return clazz.isInstance(getCause()) ? (T)getCause() : conversionFunc.apply(getCause()); + Throwable cause = getCause(); + return clazz.isInstance(cause) ? clazz.cast(cause) : conversionFunc.apply(cause); } } } diff --git a/common/src/main/java/com/dremio/common/utils/ReservedCharacters.java b/common/src/main/java/com/dremio/common/utils/ReservedCharacters.java new file mode 100644 index 0000000000..75af4c0d63 --- /dev/null +++ b/common/src/main/java/com/dremio/common/utils/ReservedCharacters.java @@ -0,0 +1,36 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.common.utils; + +/** + * Set of characters that Dremio reserves to use and considers illegal in identifiers + */ +public enum ReservedCharacters { + INFORMATION_SEPARATOR_ONE('\u001F'); //https://www.fileformat.info/info/unicode/char/1f/index.htm + private char reservedChar; + + ReservedCharacters(char c) { + this.reservedChar = c; + } + + public char getReservedChar() { + return reservedChar; + } + + public static String getInformationSeparatorOne() { + return String.valueOf(INFORMATION_SEPARATOR_ONE.reservedChar); + } +} diff --git a/common/src/main/java/com/dremio/context/ExecutorToken.java b/common/src/main/java/com/dremio/context/ExecutorToken.java index a5e90d9fc2..de412ef24e 100644 --- a/common/src/main/java/com/dremio/context/ExecutorToken.java +++ b/common/src/main/java/com/dremio/context/ExecutorToken.java @@ -15,13 +15,25 @@ */ package com.dremio.context; +import java.util.Map; + +import org.apache.commons.lang3.StringUtils; + +import com.google.common.collect.ImmutableMap; + +import io.grpc.Metadata; + /** * Request Context Holder for executor token */ -public class ExecutorToken { +public class ExecutorToken implements SerializableContext { public static final RequestContext.Key CTX_KEY = RequestContext.newKey( "executor_token_key"); + // Note: Public due to usage in some interceptors which do not need deserialization. + public static final Metadata.Key TOKEN_HEADER_KEY = + Metadata.Key.of("x-dremio-token-key", Metadata.ASCII_STRING_MARSHALLER); + private final String executorToken; public ExecutorToken(String executorToken) { @@ -32,4 +44,21 @@ public String getExecutorToken() { return executorToken; } + @Override + public void serialize(ImmutableMap.Builder builder) { + builder.put(TOKEN_HEADER_KEY.name(), executorToken); + } + + public static class Transformer implements SerializableContextTransformer { + @Override + public RequestContext deserialize(final Map headers, RequestContext builder) { + if (headers.containsKey(TOKEN_HEADER_KEY.name()) && StringUtils.isNotEmpty(headers.get(TOKEN_HEADER_KEY.name()))) { + return builder.with( + ExecutorToken.CTX_KEY, + new ExecutorToken(headers.get(TOKEN_HEADER_KEY.name()))); + } + + return builder; + } + } } diff --git a/common/src/main/java/com/dremio/context/SerializableContext.java b/common/src/main/java/com/dremio/context/SerializableContext.java new file mode 100644 index 0000000000..954365fc02 --- /dev/null +++ b/common/src/main/java/com/dremio/context/SerializableContext.java @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.context; + +import com.google.common.collect.ImmutableMap; + +/** + * Context object that can be serialized and deserialized for transport over the network. + */ +public interface SerializableContext { + /** + * Serializes the contents of this context into the provided map builder. + */ + void serialize(ImmutableMap.Builder builder); +} diff --git a/common/src/main/java/com/dremio/context/SerializableContextTransformer.java b/common/src/main/java/com/dremio/context/SerializableContextTransformer.java new file mode 100644 index 0000000000..f8b22613bc --- /dev/null +++ b/common/src/main/java/com/dremio/context/SerializableContextTransformer.java @@ -0,0 +1,48 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.context; + +import java.util.Map; + +import com.google.common.collect.ImmutableMap; + +import io.grpc.Metadata; + +/** + * Transformer for deserializing context objects from gRPC headers. + */ +public interface SerializableContextTransformer { + /** + * Helper for converting a set of gRPC headers into a map which this transformer can consume. + */ + static Map convert(final Metadata headers) { + final ImmutableMap.Builder builder = ImmutableMap.builder(); + headers.keys().forEach((key) -> { + final String value = headers.get(Metadata.Key.of(key, Metadata.ASCII_STRING_MARSHALLER)); + if (value != null) { + builder.put(key, value); + } + }); + return builder.build(); + } + + /** + * Constructs a context object from the provided map. + * If the headers are not present, the builder should be returned without modification. + * @param builder A RequestContext object to extend off of. + */ + RequestContext deserialize(final Map headers, RequestContext builder); +} diff --git a/common/src/main/java/com/dremio/context/SupportContext.java b/common/src/main/java/com/dremio/context/SupportContext.java index 16f6c9915d..4aa3f7bbf8 100644 --- a/common/src/main/java/com/dremio/context/SupportContext.java +++ b/common/src/main/java/com/dremio/context/SupportContext.java @@ -16,15 +16,22 @@ package com.dremio.context; import java.util.Arrays; +import java.util.Map; + +import com.google.common.collect.ImmutableMap; /** * Support context. */ -public class SupportContext { +public class SupportContext implements SerializableContext { public static final RequestContext.Key CTX_KEY = RequestContext.newKey("support_ctx_key"); private static final String ROLES_DELIMITER = ","; + private static final String SUPPORT_TICKET_HEADER_KEY = "x-dremio-support-ticket-key"; + private static final String SUPPORT_EMAIL_HEADER_KEY = "x-dremio-support-email-key"; + private static final String SUPPORT_ROLES_HEADER_KEY = "x-dremio-support-roles-key"; + // Note: This refers to the UserID field held within the UserContext, // but this constant only appears if the SupportContext is set. public static final String SUPPORT_USER_ID = "$Dremio-Support-Super-Admin-User$"; @@ -35,7 +42,8 @@ public enum SupportRole { BASIC_SUPPORT_ROLE("basic-support"), BILLING_ROLE("billing"), ORG_DELETE_ROLE("org-delete"), - CONSISTENCY_FIXER_ROLE("consistency-fixer"); + CONSISTENCY_FIXER_ROLE("consistency-fixer"), + DEBUG_ROLE("debug-role"); private final String value; @@ -95,6 +103,10 @@ public static boolean isSupportUserWithConsistencyFixerRole() { return isSupportUser() && isSupportUserHasRole(SupportRole.CONSISTENCY_FIXER_ROLE); } + public static boolean isSupportUserWithDebugRole() { + return isSupportUser() && isSupportUserHasRole(SupportRole.DEBUG_ROLE); + } + public static boolean doesSupportUserHaveRole(SupportContext supportContext, SupportRole role) { return supportContext.roles.length > 0 && Arrays.stream(supportContext.roles).anyMatch(r -> r.equals(role.value)); } @@ -110,12 +122,37 @@ private static boolean isSupportUserHasRole(SupportRole role) { return doesSupportUserHaveRole(RequestContext.current().get(SupportContext.CTX_KEY), role); } - public static String serializeSupportRoles(String[] rolesArr) { + @Override + public void serialize(ImmutableMap.Builder builder) { + builder.put(SUPPORT_TICKET_HEADER_KEY, ticket); + builder.put(SUPPORT_EMAIL_HEADER_KEY, email); + builder.put(SUPPORT_ROLES_HEADER_KEY, serializeSupportRoles(roles)); + } + + private static String serializeSupportRoles(String[] rolesArr) { return rolesArr != null ? String.join(ROLES_DELIMITER, rolesArr) : ""; } - public static String[] deserializeSupportRoles(String rolesStr) { - return rolesStr != null ? rolesStr.split(ROLES_DELIMITER) : new String[0]; + public static class Transformer implements SerializableContextTransformer { + @Override + public RequestContext deserialize(final Map headers, RequestContext builder) { + if (headers.containsKey(SUPPORT_TICKET_HEADER_KEY) + && headers.containsKey(SUPPORT_EMAIL_HEADER_KEY) + && headers.containsKey(SUPPORT_ROLES_HEADER_KEY)) + { + return builder.with( + SupportContext.CTX_KEY, + new SupportContext( + headers.get(SUPPORT_TICKET_HEADER_KEY), + headers.get(SUPPORT_EMAIL_HEADER_KEY), + deserializeSupportRoles(headers.get(SUPPORT_ROLES_HEADER_KEY)))); + } + + return builder; + } } + private static String[] deserializeSupportRoles(String rolesStr) { + return rolesStr != null ? rolesStr.split(ROLES_DELIMITER) : new String[0]; + } } diff --git a/common/src/main/java/com/dremio/context/TenantContext.java b/common/src/main/java/com/dremio/context/TenantContext.java index 2211480fce..7d6609132f 100644 --- a/common/src/main/java/com/dremio/context/TenantContext.java +++ b/common/src/main/java/com/dremio/context/TenantContext.java @@ -15,12 +15,17 @@ */ package com.dremio.context; +import java.util.Map; import java.util.UUID; +import com.google.common.collect.ImmutableMap; + +import io.grpc.Metadata; + /** * Tenant context. */ -public class TenantContext { +public class TenantContext implements SerializableContext { public static final RequestContext.Key CTX_KEY = RequestContext.newKey("tenant_ctx_key"); // The default tenant id used in product. public static final String DEFAULT_PRODUCT_PROJECT_ID = "77a89f85-c936-4f42-ab21-2ee90e9609b8"; @@ -31,6 +36,12 @@ public class TenantContext { public static final TenantContext DEFAULT_SERVICE_CONTEXT = new TenantContext(DEFAULT_SERVICE_PROJECT_ID, DEFAULT_SERVICE_ORG_ID); + // Note: These are public for use in annotating traces. + public static final Metadata.Key PROJECT_ID_HEADER_KEY = + Metadata.Key.of("x-dremio-project-id-key", Metadata.ASCII_STRING_MARSHALLER); + public static final Metadata.Key ORG_ID_HEADER_KEY = + Metadata.Key.of("x-dremio-org-id-key", Metadata.ASCII_STRING_MARSHALLER); + private final UUID projectId; private final UUID orgId; @@ -47,4 +58,25 @@ public UUID getOrgId() { return orgId; } + @Override + public void serialize(ImmutableMap.Builder builder) { + builder.put(PROJECT_ID_HEADER_KEY.name(), projectId.toString()); + builder.put(ORG_ID_HEADER_KEY.name(), orgId.toString()); + } + + public static class Transformer implements SerializableContextTransformer { + @Override + public RequestContext deserialize(final Map headers, RequestContext builder) { + if (headers.containsKey(PROJECT_ID_HEADER_KEY.name()) + && headers.containsKey(ORG_ID_HEADER_KEY.name())) { + return builder.with( + TenantContext.CTX_KEY, + new TenantContext( + headers.get(PROJECT_ID_HEADER_KEY.name()), + headers.get(ORG_ID_HEADER_KEY.name()))); + } + + return builder; + } + } } diff --git a/common/src/main/java/com/dremio/context/UserContext.java b/common/src/main/java/com/dremio/context/UserContext.java index 99b5417a91..ba2e8fc598 100644 --- a/common/src/main/java/com/dremio/context/UserContext.java +++ b/common/src/main/java/com/dremio/context/UserContext.java @@ -15,15 +15,22 @@ */ package com.dremio.context; +import java.util.Map; + +import com.google.common.collect.ImmutableMap; + /** * User context. */ -public class UserContext { +public class UserContext implements SerializableContext { public static final RequestContext.Key CTX_KEY = RequestContext.newKey("user_ctx_key"); public static final UserContext DEFAULT_SERVICE_CONTEXT = new UserContext("77a89f85-c936-4f42-ab21-2ee90e9609b8"); // represents the Dremio System User ($dremio$) public static final UserContext SYSTEM_USER_CONTEXT = new UserContext("678cc92c-01ed-4db3-9a28-d1f871042d9f"); + // TODO(DX-63584): Change to private once the use in proxy handlers is removed. + public static final String USER_HEADER_KEY = "x-dremio-user-key"; + private final String userId; public UserContext(String userId) { @@ -34,12 +41,26 @@ public String getUserId() { return userId; } - public String serialize() { - return userId; - } - public static boolean isSystemUser() { return RequestContext.current().get(UserContext.CTX_KEY) != null && SYSTEM_USER_CONTEXT.getUserId().equals(RequestContext.current().get(UserContext.CTX_KEY).getUserId()); } + + @Override + public void serialize(ImmutableMap.Builder builder) { + builder.put(USER_HEADER_KEY, userId); + } + + public static class Transformer implements SerializableContextTransformer { + @Override + public RequestContext deserialize(final Map headers, RequestContext builder) { + if (headers.containsKey(USER_HEADER_KEY)) { + return builder.with( + UserContext.CTX_KEY, + new UserContext(headers.get(USER_HEADER_KEY))); + } + + return builder; + } + } } diff --git a/common/src/main/java/com/dremio/context/UsernameContext.java b/common/src/main/java/com/dremio/context/UsernameContext.java new file mode 100644 index 0000000000..afb3755310 --- /dev/null +++ b/common/src/main/java/com/dremio/context/UsernameContext.java @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.context; + +/** + * The Username of the User. + * + * TODO: + * Note that there is ongoing working to add the user's username to UserContext; + * Refer to DX-51988: Introduce username to UserContext. + * Once the ticket is completed, this class is unnecessary and should be removed in lieu of + * simply using the username in the UserContext. + * Refer to DX-59840: Remove UsernameContext once username is included in UserContext. + */ +public class UsernameContext { + public static final RequestContext.Key CTX_KEY = RequestContext.newKey("user_name_ctx_key"); + + private final String userName; + + public UsernameContext(String userName) { + this.userName = userName; + } + + public String getUserName() { + return userName; + } +} diff --git a/common/src/main/java/com/hubspot/jackson/datatype/protobuf/builtin/serializers/MessageSerializer.java b/common/src/main/java/com/hubspot/jackson/datatype/protobuf/builtin/serializers/MessageSerializer.java deleted file mode 100644 index 829faa0c89..0000000000 --- a/common/src/main/java/com/hubspot/jackson/datatype/protobuf/builtin/serializers/MessageSerializer.java +++ /dev/null @@ -1,171 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.hubspot.jackson.datatype.protobuf.builtin.serializers; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -import com.fasterxml.jackson.annotation.JsonInclude.Include; -import com.fasterxml.jackson.core.JsonGenerator; -import com.fasterxml.jackson.databind.PropertyNamingStrategy.PropertyNamingStrategyBase; -import com.fasterxml.jackson.databind.SerializationFeature; -import com.fasterxml.jackson.databind.SerializerProvider; -import com.fasterxml.jackson.databind.util.NameTransformer; -import com.google.protobuf.Descriptors.Descriptor; -import com.google.protobuf.Descriptors.FieldDescriptor; -import com.google.protobuf.Descriptors.FieldDescriptor.JavaType; -import com.google.protobuf.Descriptors.FileDescriptor.Syntax; -import com.google.protobuf.ExtensionRegistry.ExtensionInfo; -import com.google.protobuf.GeneratedMessageV3.ExtendableMessageOrBuilder; -import com.google.protobuf.MessageOrBuilder; -import com.hubspot.jackson.datatype.protobuf.ExtensionRegistryWrapper; -import com.hubspot.jackson.datatype.protobuf.PropertyNamingStrategyWrapper; -import com.hubspot.jackson.datatype.protobuf.ProtobufJacksonConfig; -import com.hubspot.jackson.datatype.protobuf.ProtobufSerializer; - -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; - -/** - * A Modified MessageSerializer to add support for Unwrapped Serialization. - * - * This file submitted as a PR to the jackson protobuf library. - * PR - https://github.com/HubSpot/jackson-datatype-protobuf/pull/79 - */ -public class MessageSerializer extends ProtobufSerializer { - @SuppressFBWarnings(value="SE_BAD_FIELD") - private final ProtobufJacksonConfig config; - private final boolean unwrappingSerializer; - private final NameTransformer nameTransformer; - - /** - * @deprecated use {@link #MessageSerializer(ProtobufJacksonConfig)} instead - */ - @Deprecated - public MessageSerializer(ExtensionRegistryWrapper extensionRegistry) { - this(ProtobufJacksonConfig.builder().extensionRegistry(extensionRegistry).build()); - } - - public MessageSerializer(ProtobufJacksonConfig config) { - this(config, false); - } - - public MessageSerializer(ProtobufJacksonConfig config, boolean unwrappingSerializer) { - this(config, null, unwrappingSerializer); - } - - public MessageSerializer(ProtobufJacksonConfig config, NameTransformer nameTransformer, boolean unwrappingSerializer) { - super(MessageOrBuilder.class); - this.config = config; - this.unwrappingSerializer = unwrappingSerializer; - if (nameTransformer == null) { - this.nameTransformer = NameTransformer.NOP; - } else { - this.nameTransformer = nameTransformer; - } - } - - @Override - public void serialize( - MessageOrBuilder message, - JsonGenerator generator, - SerializerProvider serializerProvider - ) throws IOException { - if (!isUnwrappingSerializer()) { - generator.writeStartObject(); - } - - boolean proto3 = message.getDescriptorForType().getFile().getSyntax() == Syntax.PROTO3; - Include include = serializerProvider.getConfig().getDefaultPropertyInclusion().getValueInclusion(); - boolean writeDefaultValues = proto3 && include != Include.NON_DEFAULT; - boolean writeEmptyCollections = include != Include.NON_DEFAULT && include != Include.NON_EMPTY; - - //If NamingTransformer is provided (in case of UnwrappingSerializer), we chain it on top of - // the namingStrategy. - final PropertyNamingStrategyBase namingStrategy = new PropertyNamingStrategyBase() { - @Override - public String translate(String fieldName) { - PropertyNamingStrategyBase configuredNamingStrategy = - new PropertyNamingStrategyWrapper(serializerProvider.getConfig().getPropertyNamingStrategy()); - return nameTransformer.transform(configuredNamingStrategy.translate(fieldName)); - } - }; - - - Descriptor descriptor = message.getDescriptorForType(); - List fields = new ArrayList<>(descriptor.getFields()); - if (message instanceof ExtendableMessageOrBuilder) { - for (ExtensionInfo extensionInfo : config.extensionRegistry().getExtensionsByDescriptor(descriptor)) { - fields.add(extensionInfo.descriptor); - } - } - - for (FieldDescriptor field : fields) { - if (field.isRepeated()) { - List valueList = (List) message.getField(field); - - if (!valueList.isEmpty() || writeEmptyCollections) { - if (field.isMapField()) { - generator.writeFieldName(nameTransformer.transform(namingStrategy.translate(field.getName()))); - writeMap(field, valueList, generator, serializerProvider); - } else if (valueList.size() == 1 && writeSingleElementArraysUnwrapped(serializerProvider)) { - generator.writeFieldName(nameTransformer.transform(namingStrategy.translate(field.getName()))); - writeValue(field, valueList.get(0), generator, serializerProvider); - } else { - generator.writeArrayFieldStart(nameTransformer.transform(namingStrategy.translate(field.getName()))); - for (Object subValue : valueList) { - writeValue(field, subValue, generator, serializerProvider); - } - generator.writeEndArray(); - } - } - } else if (message.hasField(field) || (writeDefaultValues && !supportsFieldPresence(field) && field.getContainingOneof() == null)) { - generator.writeFieldName(nameTransformer.transform(namingStrategy.translate(field.getName()))); - writeValue(field, message.getField(field), generator, serializerProvider); - } else if (include == Include.ALWAYS && field.getContainingOneof() == null) { - generator.writeFieldName(nameTransformer.transform(namingStrategy.translate(field.getName()))); - generator.writeNull(); - } - } - - if (!isUnwrappingSerializer()) { - generator.writeEndObject(); - } - } - - @Override - public boolean isUnwrappingSerializer() { - return unwrappingSerializer; - } - - @Override - public MessageSerializer unwrappingSerializer(NameTransformer nameTransformer) { - return new MessageSerializer(config, nameTransformer, true); - } - - private static boolean supportsFieldPresence(FieldDescriptor field) { - // messages still support field presence in proto3 - return field.getJavaType() == JavaType.MESSAGE; - } - - private static boolean writeEmptyArrays(SerializerProvider config) { - return config.isEnabled(SerializationFeature.WRITE_EMPTY_JSON_ARRAYS); - } - - private static boolean writeSingleElementArraysUnwrapped(SerializerProvider config) { - return config.isEnabled(SerializationFeature.WRITE_SINGLE_ELEM_ARRAYS_UNWRAPPED); - } -} diff --git a/common/src/test/java/com/dremio/TestBlockLevel/TestBlockLevelLogging.java b/common/src/test/java/com/dremio/TestBlockLevel/TestBlockLevelLogging.java index e2cc07adc5..f1928c2b10 100644 --- a/common/src/test/java/com/dremio/TestBlockLevel/TestBlockLevelLogging.java +++ b/common/src/test/java/com/dremio/TestBlockLevel/TestBlockLevelLogging.java @@ -33,18 +33,26 @@ import ch.qos.logback.classic.spi.ILoggingEvent; import ch.qos.logback.core.read.ListAppender; +/** + * expectations in this test depend on the log configuration in the logback-test.xml resource + */ public class TestBlockLevelLogging { - boolean isLowestLevelPresentInLogList(List logsList, Level level) + + private static void assertLowestLogLevel(List logsList, Level level) { - for (int i=0;i testLogFilteringUtil(ch.qos.logback.classic.Logger logger) { + + public static List testLogFilteringUtil(org.slf4j.Logger slf4jLogger) { + ch.qos.logback.classic.Logger logger = (ch.qos.logback.classic.Logger) slf4jLogger; ListAppender listAppender = new ListAppender<>(); listAppender.start(); logger.addAppender(listAppender); @@ -56,86 +64,60 @@ public static List testLogFilteringUtil(ch.qos.logback.classic.Lo List logsList = listAppender.list; return logsList; } + @Test public void testAFirst() { AFirst aFirst = new AFirst(); List logsList = aFirst.testLogFiltering(); - Assert.assertTrue(isLowestLevelPresentInLogList(logsList,Level.ERROR)); - for (int i=0;i logsList = aSecond.testLogFiltering(); - Assert.assertTrue(isLowestLevelPresentInLogList(logsList,Level.WARN)); - for (int i=0;i logsList = aThird.testLogFiltering(); - Assert.assertTrue(isLowestLevelPresentInLogList(logsList,Level.WARN)); - for (int i=0;i logsList = bFirst.testLogFiltering(); - Assert.assertTrue(isLowestLevelPresentInLogList(logsList,Level.INFO)); - for (int i=0;i logsList = bSecond.testLogFiltering(); - Assert.assertTrue(isLowestLevelPresentInLogList(logsList,Level.ERROR)); - for (int i=0;i logsList = bThird.testLogFiltering(); - Assert.assertTrue(isLowestLevelPresentInLogList(logsList,Level.DEBUG)); - for (int i=0;i logsList = cFirst.testLogFiltering(); - Assert.assertTrue(isLowestLevelPresentInLogList(logsList,Level.DEBUG)); - for (int i=0;i logsList = cSecond.testLogFiltering(); - Assert.assertTrue(isLowestLevelPresentInLogList(logsList,Level.TRACE)); - Assert.assertTrue(Level.toLevel("TRACE").isGreaterOrEqual(Level.toLevel("TRACE"))); - - for (int i=0;i callable = () -> RequestContext.current().get(UserContext.CTX_KEY).serialize(); + Callable callable = () -> RequestContext.current().get(UserContext.CTX_KEY).getUserId(); Future future = RequestContext.empty() .with(UserContext.CTX_KEY, new UserContext(testUser)) .call(() -> pool.submit(callable)); @@ -161,7 +161,7 @@ public void testContextWithRunnable() throws Exception { final String testUser = "testUser2"; final Pointer foundUser = new Pointer<>(); - Runnable runnable = () -> foundUser.value = RequestContext.current().get(UserContext.CTX_KEY).serialize(); + Runnable runnable = () -> foundUser.value = RequestContext.current().get(UserContext.CTX_KEY).getUserId(); Future future = RequestContext.empty() .with(UserContext.CTX_KEY, new UserContext(testUser)) .call(() -> pool.submit(runnable)); diff --git a/common/src/test/java/com/dremio/common/logging/TestStructuredLogging.java b/common/src/test/java/com/dremio/common/logging/TestStructuredLogging.java index 2426908426..baba650e16 100644 --- a/common/src/test/java/com/dremio/common/logging/TestStructuredLogging.java +++ b/common/src/test/java/com/dremio/common/logging/TestStructuredLogging.java @@ -49,6 +49,7 @@ public class TestStructuredLogging extends DremioTest { private static final String LOGGER_NAME = "STRUCTURED-LOG-TEST"; private LoggerContext localLoggerContext; + @SuppressWarnings("Slf4jLoggerShouldBeFinal") private Logger logger; @Rule public TemporaryFolder tempLogFolder = new TemporaryFolder(); diff --git a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/A/First/AFirst.java b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/A/First/AFirst.java index d495cde26c..75e7214c44 100644 --- a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/A/First/AFirst.java +++ b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/A/First/AFirst.java @@ -19,14 +19,13 @@ import com.dremio.TestBlockLevel.TestBlockLevelLogging; -import ch.qos.logback.classic.Logger; import ch.qos.logback.classic.spi.ILoggingEvent; /** * class for testing custom log filtering */ public class AFirst { - private static ch.qos.logback.classic.Logger logger = (Logger) org.slf4j.LoggerFactory.getLogger(AFirst.class); + private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(AFirst.class); public List testLogFiltering() { return TestBlockLevelLogging.testLogFilteringUtil(logger); diff --git a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/A/Second/ASecond.java b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/A/Second/ASecond.java index 80a936b081..4183bde283 100644 --- a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/A/Second/ASecond.java +++ b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/A/Second/ASecond.java @@ -19,14 +19,13 @@ import com.dremio.TestBlockLevel.TestBlockLevelLogging; -import ch.qos.logback.classic.Logger; import ch.qos.logback.classic.spi.ILoggingEvent; /** * class for testing custom log filtering */ public class ASecond { - private static ch.qos.logback.classic.Logger logger = (Logger) org.slf4j.LoggerFactory.getLogger(ASecond.class); + private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ASecond.class); public List testLogFiltering() { return TestBlockLevelLogging.testLogFilteringUtil(logger); diff --git a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/A/Third/AThird.java b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/A/Third/AThird.java index 0d8ce7b3ac..55a5eacd5d 100644 --- a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/A/Third/AThird.java +++ b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/A/Third/AThird.java @@ -19,14 +19,13 @@ import com.dremio.TestBlockLevel.TestBlockLevelLogging; -import ch.qos.logback.classic.Logger; import ch.qos.logback.classic.spi.ILoggingEvent; /** * class for testing custom log filtering */ public class AThird { - private static ch.qos.logback.classic.Logger logger = (Logger) org.slf4j.LoggerFactory.getLogger(AThird.class); + private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(AThird.class); public List testLogFiltering() { return TestBlockLevelLogging.testLogFilteringUtil(logger); diff --git a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/B/First/BFirst.java b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/B/First/BFirst.java index e1222a3712..7ad9f1fedf 100644 --- a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/B/First/BFirst.java +++ b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/B/First/BFirst.java @@ -19,18 +19,15 @@ import com.dremio.TestBlockLevel.TestBlockLevelLogging; -import ch.qos.logback.classic.Logger; import ch.qos.logback.classic.spi.ILoggingEvent; /** * class for testing custom log filtering */ public class BFirst { - - private static ch.qos.logback.classic.Logger logger = (Logger) org.slf4j.LoggerFactory.getLogger(BFirst.class); + private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BFirst.class); public List testLogFiltering() { return TestBlockLevelLogging.testLogFilteringUtil(logger); - } } diff --git a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/B/Second/BSecond.java b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/B/Second/BSecond.java index 83074f25de..2c140e994e 100644 --- a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/B/Second/BSecond.java +++ b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/B/Second/BSecond.java @@ -19,14 +19,13 @@ import com.dremio.TestBlockLevel.TestBlockLevelLogging; -import ch.qos.logback.classic.Logger; import ch.qos.logback.classic.spi.ILoggingEvent; /** *class for testing custom log filtering */ public class BSecond { - private static ch.qos.logback.classic.Logger logger = (Logger) org.slf4j.LoggerFactory.getLogger(BSecond.class); + private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BSecond.class); public List testLogFiltering() { return TestBlockLevelLogging.testLogFilteringUtil(logger); diff --git a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/B/Third/BThird.java b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/B/Third/BThird.java index 0e81bb6bd1..d3db7f1702 100644 --- a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/B/Third/BThird.java +++ b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/B/Third/BThird.java @@ -19,15 +19,13 @@ import com.dremio.TestBlockLevel.TestBlockLevelLogging; -import ch.qos.logback.classic.Logger; import ch.qos.logback.classic.spi.ILoggingEvent; /** * class for testing custom log filtering */ public class BThird { - - private static ch.qos.logback.classic.Logger logger = (Logger) org.slf4j.LoggerFactory.getLogger(BThird.class); + private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BThird.class); public List testLogFiltering() { return TestBlockLevelLogging.testLogFilteringUtil(logger); diff --git a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/C/CFirst.java b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/C/CFirst.java index 27152ce298..2a67572f8d 100644 --- a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/C/CFirst.java +++ b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/C/CFirst.java @@ -19,14 +19,14 @@ import com.dremio.TestBlockLevel.TestBlockLevelLogging; -import ch.qos.logback.classic.Logger; import ch.qos.logback.classic.spi.ILoggingEvent; /* *class for testing custom log filtering */ public class CFirst { - private static final ch.qos.logback.classic.Logger logger = (Logger) org.slf4j.LoggerFactory.getLogger(CFirst.class); + private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(CFirst.class); + public List testLogFiltering() { return TestBlockLevelLogging.testLogFilteringUtil(logger); } diff --git a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/C/Second/CSecond.java b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/C/Second/CSecond.java index c78b225bd6..3822b81ba9 100644 --- a/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/C/Second/CSecond.java +++ b/common/src/test/java/com/dremio/common/logging/obfuscation/TestBlockLevel/C/Second/CSecond.java @@ -20,11 +20,10 @@ import com.dremio.TestBlockLevel.TestBlockLevelLogging; -import ch.qos.logback.classic.Logger; import ch.qos.logback.classic.spi.ILoggingEvent; public class CSecond { - private static final ch.qos.logback.classic.Logger logger = (Logger) org.slf4j.LoggerFactory.getLogger(CSecond.class); + private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(CSecond.class); public List testLogFiltering() { return TestBlockLevelLogging.testLogFilteringUtil(logger); } diff --git a/common/src/test/java/com/dremio/common/util/TestRetryer.java b/common/src/test/java/com/dremio/common/util/TestRetryer.java index 16749b74e3..7d946a73af 100644 --- a/common/src/test/java/com/dremio/common/util/TestRetryer.java +++ b/common/src/test/java/com/dremio/common/util/TestRetryer.java @@ -38,7 +38,7 @@ public class TestRetryer { @Test public void testMaxRetries() { - Retryer retryer = new Retryer.Builder() + Retryer retryer = Retryer.newBuilder() .setWaitStrategy(Retryer.WaitStrategy.FLAT, 1, 1) .retryIfExceptionOfType(RuntimeException.class) .setMaxRetries(MAX_RETRIES).build(); @@ -57,7 +57,7 @@ public void testMaxRetries() { @Test public void testNoRetryAfterSuccess() { - Retryer retryer = new Retryer.Builder() + Retryer retryer = Retryer.newBuilder() .setWaitStrategy(Retryer.WaitStrategy.FLAT, 1, 1) .retryIfExceptionOfType(RuntimeException.class) .setMaxRetries(MAX_RETRIES).build(); @@ -81,7 +81,7 @@ public void testNoRetryAfterSuccess() { public void testFlatWaitStrategy() { final int expectedWait = 100; - Retryer retryer = spy(new Retryer.Builder() + Retryer retryer = spy(Retryer.newBuilder() .setWaitStrategy(Retryer.WaitStrategy.FLAT, expectedWait, expectedWait) .retryIfExceptionOfType(RuntimeException.class) .setMaxRetries(MAX_RETRIES).build()); @@ -100,7 +100,7 @@ public void testFlatWaitStrategy() { @Test(expected = RuntimeException.class) public void testRetryIfException() { - Retryer retryer = new Retryer.Builder() + Retryer retryer = Retryer.newBuilder() .setWaitStrategy(Retryer.WaitStrategy.FLAT, 1, 1) .retryIfExceptionOfType(IOException.class) .retryIfExceptionOfType(SQLException.class) @@ -127,7 +127,7 @@ public void testRetryIfException() { @Test(expected = RuntimeException.class) public void testRetryIfExceptionFunc() { - Retryer retryer = new Retryer.Builder() + Retryer retryer = Retryer.newBuilder() .setWaitStrategy(Retryer.WaitStrategy.FLAT, 1, 1) .retryOnExceptionFunc(ex -> ex instanceof IOException || ex instanceof SQLException) .setMaxRetries(MAX_RETRIES).build(); diff --git a/common/src/test/java/com/dremio/test/GoldenFileMetaTests.java b/common/src/test/java/com/dremio/test/GoldenFileMetaTests.java index 7087553048..c60156b236 100644 --- a/common/src/test/java/com/dremio/test/GoldenFileMetaTests.java +++ b/common/src/test/java/com/dremio/test/GoldenFileMetaTests.java @@ -19,6 +19,7 @@ import java.nio.file.Paths; import org.junit.Assert; +import org.junit.ComparisonFailure; import org.junit.Test; /** @@ -27,7 +28,7 @@ public final class GoldenFileMetaTests { @Test public void testSuccessScenario() { - new GoldenFileTestBuilder(input -> input.left + input.right) + GoldenFileTestBuilder.create(input -> input.left + input.right) .add("3 plus 5", new Input(3, 5)) .add("5 plus 8", new Input(5, 8)) .runTests(); @@ -35,7 +36,7 @@ public void testSuccessScenario() { @Test public void testExpectedExceptionScenario() { - new GoldenFileTestBuilder<>(GoldenFileMetaTests::addWithException) + GoldenFileTestBuilder.create(GoldenFileMetaTests::addWithException) .allowExceptions() .add("3 plus 5", new Input(3, 5)) .add("5 plus 8", new Input(5, 8)) @@ -45,7 +46,7 @@ public void testExpectedExceptionScenario() { @Test public void testUnexpectedExceptionScenario() { try { - new GoldenFileTestBuilder<>(GoldenFileMetaTests::addWithException) + GoldenFileTestBuilder.create(GoldenFileMetaTests::addWithException) .add("3 plus 5", new Input(3, 5)) .runTests(); Assert.fail(); @@ -54,11 +55,28 @@ public void testUnexpectedExceptionScenario() { } } + @Test + public void testIgnoreScenario() { + GoldenFileTestBuilder.create(input -> input.left + input.right) + .add("Correct Output And Ignore = false", new Input(3, 5)) + .addButIgnore("Correct Output And Ignore = true", new Input(3, 5)) + .addButIgnore("Incorrect Output And Ignore = true", new Input(3, 5)) + .runTests(); + } + + @Test(expected = ComparisonFailure.class) + public void testIncorrectOutput() { + GoldenFileTestBuilder.create(input -> input.left + input.right) + .allowExceptions() + .add("Incorrect Output And Ignore = false", new Input(3, 5)) + .runTests(); + } + @SuppressWarnings("AssertionFailureIgnored") @Test public void testFirstRun() { try { - new GoldenFileTestBuilder<>((Integer i) -> i) + GoldenFileTestBuilder.create((Integer i) -> i) .add("Example Test", 1) .runTests(); Assert.fail(); @@ -81,7 +99,7 @@ public void testFirstRun() { @Test public void testNotCasesAdded() { try { - new GoldenFileTestBuilder<>((Integer i) -> i) + GoldenFileTestBuilder.create((Integer i) -> i) .runTests(); Assert.fail(); } catch (IllegalStateException error) { diff --git a/common/src/test/java/com/dremio/test/GoldenFileTestBuilder.java b/common/src/test/java/com/dremio/test/GoldenFileTestBuilder.java index 2de54f0667..cd06ee0a1e 100644 --- a/common/src/test/java/com/dremio/test/GoldenFileTestBuilder.java +++ b/common/src/test/java/com/dremio/test/GoldenFileTestBuilder.java @@ -17,8 +17,6 @@ import java.io.File; import java.io.IOException; -import java.io.PrintWriter; -import java.io.StringWriter; import java.lang.reflect.Method; import java.nio.charset.StandardCharsets; import java.nio.file.FileAlreadyExistsException; @@ -28,8 +26,10 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Base64; +import java.util.HashMap; import java.util.Iterator; import java.util.List; +import java.util.Map; import java.util.function.Function; import org.apache.commons.lang3.tuple.Pair; @@ -37,6 +37,7 @@ import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.yaml.snakeyaml.LoaderOptions; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonInclude; @@ -67,37 +68,36 @@ /** * Class generating golden files used for baseline / data-driven testing */ -public final class GoldenFileTestBuilder { +public final class GoldenFileTestBuilder { private static final Logger LOGGER = LoggerFactory.getLogger(GoldenFileTestBuilder.class); private static final Path LICENSE_HEADER_PATH = Paths.get(Resources.getResource("goldenfiles/header.txt").getPath()); - private static final ObjectMapper objectMapper = getObjectMapper(); + private static final ObjectMapper objectMapper = createObjectMapper(); + private final List> descriptionAndInputs = new ArrayList<>(); private final ThrowingFunction executeTestFunction; - private final List> descriptionAndInputs; - private boolean allowExceptions; - private boolean showFullStackTrace; + private Function exceptionSerializer; + private Function inputSerializer; + private boolean allowUnorderedMatch; - public GoldenFileTestBuilder(ThrowingFunction executeTestFunction) { + public GoldenFileTestBuilder( + ThrowingFunction executeTestFunction, + Function inputSerializer) { this.executeTestFunction = executeTestFunction; - this.descriptionAndInputs = new ArrayList<>(); + this.inputSerializer = inputSerializer; + this.allowUnorderedMatch = false; } - public GoldenFileTestBuilder allowExceptions() { - this.allowExceptions = true; + public GoldenFileTestBuilder add(String description, I input) { + this.descriptionAndInputs.add(new DescriptionAndInput(description, input, false)); return this; } - public GoldenFileTestBuilder showFullStackTrace() { - this.showFullStackTrace = true; + public GoldenFileTestBuilder addButIgnore(String description, I input) { + this.descriptionAndInputs.add(new DescriptionAndInput(description, input, true)); return this; } - public GoldenFileTestBuilder add(String description, I input) { - this.descriptionAndInputs.add(new DescriptionAndInput(description, input)); - return this; - } - - public GoldenFileTestBuilder addListByRule(List list, Function> rule) { + public GoldenFileTestBuilder addListByRule(List list, Function> rule) { for (T item : list) { Pair output = rule.apply(item); String description = output.getLeft(); @@ -108,40 +108,54 @@ public GoldenFileTestBuilder addListByRule(List list, Function setExceptionSerializer(Function exceptionSerializer) { + this.exceptionSerializer = exceptionSerializer; + return this; + } + + public GoldenFileTestBuilder allowExceptions() { + return setExceptionSerializer(GoldenFileTestBuilder::defaultExceptionSerializer); + } + + public GoldenFileTestBuilder allowUnorderedMatch() { + allowUnorderedMatch = true; + return this; + } + public void runTests() { try { Preconditions.checkState(!descriptionAndInputs.isEmpty(), "No test cases found."); // Generate the Input and Output pairs - List> actualInputAndOutputList = new ArrayList<>(); - for (DescriptionAndInput descriptionAndInput : this.descriptionAndInputs) { - InputAndOutput inputAndOutput; + List> actualInputAndOutputList = new ArrayList<>(); + for (DescriptionAndInput descriptionAndInput : descriptionAndInputs) { + InputAndOutput inputAndOutput; + I_W inputForSerialization = inputSerializer.apply(descriptionAndInput.input); + try { inputAndOutput = InputAndOutput.createSuccess( descriptionAndInput.description, - descriptionAndInput.input, - this.executeTestFunction.apply(descriptionAndInput.input)); + inputForSerialization, + executeTestFunction.apply(descriptionAndInput.input)); } catch (Throwable t) { - if (this.allowExceptions) { - inputAndOutput = InputAndOutput.createFailure( - descriptionAndInput.description, - descriptionAndInput.input, - t, - this.showFullStackTrace); - } else { + if (exceptionSerializer == null) { throw new RuntimeException(t); } + + inputAndOutput = InputAndOutput.createFailure( + descriptionAndInput.description, + inputForSerialization, + t, + exceptionSerializer); } actualInputAndOutputList.add(inputAndOutput); } - - // Write the actual values, so user's can diff with the expected and overwrite the golden file if the change is acceptable. Path goldenFileActualPath = getGoldenFileActualPath(); writeActualGoldenFile(goldenFileActualPath, actualInputAndOutputList); - List> expectedInputAndOutputList = readExpectedFile(); + List> expectedInputAndOutputList = readExpectedFile(); // Assert equality assertGoldenFilesAreEqual(expectedInputAndOutputList, actualInputAndOutputList); @@ -150,40 +164,43 @@ public void runTests() { } } - private List> readExpectedFile() { + private List> readExpectedFile() { String path = goldenFileResource(); try { return objectMapper.readValue( Resources.getResource(path), - new TypeReference>>(){}); + new TypeReference>>(){}); } catch(IllegalArgumentException|IOException ex) { LOGGER.error("Exception while read expected file", ex); return ImmutableList.of(); //Return empty list so file is generated for the first run. } } - public static String findFileName() { - Pair callingClassAndMethod = GoldenFileTestBuilder.findCallingTestClassAndMethod(); - + public String findFileName() { + Pair callingClassAndMethod = findCallingTestClassAndMethod(); return callingClassAndMethod.getLeft() + "." + callingClassAndMethod.getRight(); } - private static Pair findCallingTestClassAndMethod() { + private Pair findCallingTestClassAndMethod() { StackTraceElement[] stElements = Thread.currentThread().getStackTrace(); - for (int i=1; i clazz = Class.forName(ste.getClassName()); - for(Method method : clazz.getMethods()) { - if(method.getName().equals(ste.getMethodName()) + for (Method method : clazz.getMethods()) { + if (method.getName().equals(ste.getMethodName()) && method.getDeclaredAnnotation(Test.class) != null) { String[] classNamespaceTokens = ste.getClassName().split("\\."); - return Pair.of(classNamespaceTokens[classNamespaceTokens.length - 1], ste.getMethodName()); + String testClassName = classNamespaceTokens[classNamespaceTokens.length - 1]; + String methodName = ste.getMethodName(); + + return Pair.of(testClassName, methodName); } } } catch (ClassNotFoundException e) { @@ -193,19 +210,19 @@ private static Pair findCallingTestClassAndMethod() { throw new RuntimeException("No @Test method found"); } - private static Path getGoldenFileActualPath() throws IOException { + private Path getGoldenFileActualPath() throws IOException { return Paths.get("target","goldenfiles", "actual", findFileName() + ".yaml"); } - public static String goldenFileResource() { + public String goldenFileResource() { return "goldenfiles/expected/" + findFileName() + ".yaml"; } - public static String inputFileResource() { + public String inputFileResource() { return "goldenfiles/input/" + findFileName() + ".yaml"; } - private static String messageToFix() { + private String messageToFix() { try { String actualPath = getGoldenFileActualPath().toString(); String goldenPath = "src/test/resources/" + goldenFileResource(); @@ -219,6 +236,10 @@ private static String messageToFix() { } } + public static GoldenFileTestBuilder create(ThrowingFunction executeTestFunction) { + return new GoldenFileTestBuilder<>(executeTestFunction, i -> i); + } + private static void writeActualGoldenFile( Path goldenFileActualPath, List> actualInputAndOutputList) throws IOException { @@ -245,41 +266,82 @@ private static void writeActualGoldenFile( Files.write(goldenFileActualPath, fileContentWithLicence.getBytes(StandardCharsets.UTF_8)); } - private static void assertGoldenFilesAreEqual( - List> expectedInputAndOutputList, - List> actualInputAndOutputList) throws JsonProcessingException { + private void assertGoldenFilesAreEqual( + List> expectedInputAndOutputList, + List> actualInputAndOutputList) throws JsonProcessingException { String messageToFix = messageToFix(); Assert.assertEquals(messageToFix, expectedInputAndOutputList.size(), actualInputAndOutputList.size()); for (int i = 0; i < expectedInputAndOutputList.size(); i++) { InputAndOutput expectedInputAndOutput = expectedInputAndOutputList.get(i); InputAndOutput actualInputAndOutput = actualInputAndOutputList.get(i); + DescriptionAndInput descriptionAndInput = descriptionAndInputs.get(i); + + if (!descriptionAndInput.ignore) { + Assert.assertEquals( + "Descriptions differ,\n" + messageToFix, + expectedInputAndOutput.description, + actualInputAndOutput.description); + String expectedInputString = objectMapper.writeValueAsString(expectedInputAndOutput.input); + String actualInputString = objectMapper.writeValueAsString(actualInputAndOutput.input); + Assert.assertEquals( + "Inputs for baseline differ,\n" + messageToFix, + expectedInputString, + actualInputString); + + Assert.assertEquals( + "Exception Message for baselines differ, \n" + messageToFix + " with input " + expectedInputString, + expectedInputAndOutput.exceptionMessage, + actualInputAndOutput.exceptionMessage); + + String expectedOutputString = objectMapper.writeValueAsString(expectedInputAndOutput.output); + String actualOutputString = objectMapper.writeValueAsString(actualInputAndOutput.output); + if (!expectedOutputString.equals(actualOutputString)) { + if (allowUnorderedMatch) { + if (!isPermutation(expectedInputString, actualInputString)) { + Assert.assertEquals( + "Outputs for baselines differ,\n" + messageToFix + " with input " + expectedInputString, + expectedOutputString, + actualOutputString); + } + } else { + Assert.assertEquals( + "Outputs for baselines differ,\n" + messageToFix + " with input " + expectedInputString, + expectedOutputString, + actualOutputString); + } + } - Assert.assertEquals( - "Descriptions differ,\n" + messageToFix, - expectedInputAndOutput.description, - actualInputAndOutput.description); - String expectedInputString = objectMapper.writeValueAsString(expectedInputAndOutput.input); - String actualInputString = objectMapper.writeValueAsString(actualInputAndOutput.input); - Assert.assertEquals( - "Inputs for baseline differ,\n" + messageToFix, - expectedInputString, - actualInputString); - - String expectedOutputString = objectMapper.writeValueAsString(expectedInputAndOutput.output); - String actualOutputString = objectMapper.writeValueAsString(actualInputAndOutput.output); - Assert.assertEquals( - "Outputs for baselines differ,\n" + messageToFix + " with input " + expectedInputString, - expectedOutputString, - actualOutputString); - - Assert.assertEquals( - "Exceptions for baselines differ,\n" + messageToFix+ " with input " + expectedInputString, - expectedInputAndOutput.exceptionMessage, - actualInputAndOutput.exceptionMessage); + Assert.assertEquals( + "Exceptions for baselines differ,\n" + messageToFix+ " with input " + expectedInputString, + expectedInputAndOutput.exceptionMessage, + actualInputAndOutput.exceptionMessage); + } } } + public static boolean isPermutation(String str1, String str2) { + if (str1.length() != str2.length()) { + return false; + } + + Map map1 = new HashMap<>(); + Map map2 = new HashMap<>(); + + for (int i = 0; i < str1.length(); i++) { + char c1 = str1.charAt(i); + char c2 = str2.charAt(i); + map1.put(c1, map1.getOrDefault(c1, 0) + 1); + map2.put(c2, map2.getOrDefault(c2, 0) + 1); + } + + return map1.equals(map2); + } + + private static String defaultExceptionSerializer(Throwable throwable) { + return throwable.getMessage(); + } + @FunctionalInterface public interface ThrowingFunction { R apply(T t) throws Exception; @@ -288,13 +350,15 @@ public interface ThrowingFunction { private static final class DescriptionAndInput { private final String description; private final I input; + private final boolean ignore; - private DescriptionAndInput(String description, I input) { + private DescriptionAndInput(String description, I input, boolean ignore) { assert description != null; assert input != null; this.description = description; this.input = input; + this.ignore = ignore; } } @@ -307,14 +371,14 @@ public static final class InputAndOutput { public final O output; @JsonInclude(JsonInclude.Include.NON_NULL) - public final String exceptionMessage; + public final MultiLineString exceptionMessage; @JsonCreator private InputAndOutput( @JsonProperty("description") String description, @JsonProperty("input") I input, @JsonProperty("output") O output, - @JsonProperty("exceptionMessage") String exceptionMessage) { + @JsonProperty("exceptionMessage") MultiLineString exceptionMessage) { this.description = description; this.input = input; this.output = output; @@ -325,24 +389,16 @@ public static InputAndOutput createSuccess(String description, I input, O return new InputAndOutput(description, input, output, null); } - public static InputAndOutput createFailure(String description, I input, Throwable throwable, boolean showFullStackTrace) { - String exceptionMessage; - if (showFullStackTrace) { - StringWriter sw = new StringWriter(); - PrintWriter pw = new PrintWriter(sw); - throwable.printStackTrace(pw); - - exceptionMessage = sw.toString().replace("\t", ""); - } else { - exceptionMessage = throwable.getMessage(); - - } - - if(exceptionMessage == null) { - exceptionMessage = throwable.toString(); - } - - return new InputAndOutput(description, input, null, exceptionMessage); + public static InputAndOutput createFailure( + String description, I input, + Throwable throwable, + Function exceptionSerializer) { + String exceptionMessage = exceptionSerializer.apply(throwable); + return new InputAndOutput( + description, + input, + null, + MultiLineString.create(exceptionMessage)); } } @@ -471,12 +527,16 @@ public void serialize( } } - private static ObjectMapper getObjectMapper(){ + private static ObjectMapper createObjectMapper(){ + LoaderOptions loaderOptions = new LoaderOptions(); + loaderOptions.setCodePointLimit(10 * 1024 * 1024); // Set loader option to load a file as large as 10 MB return new ObjectMapper( - new YAMLFactory() + YAMLFactory.builder() + .loaderOptions(loaderOptions) .disable(YAMLGenerator.Feature.SPLIT_LINES) .disable(YAMLGenerator.Feature.CANONICAL_OUTPUT) - .enable(YAMLGenerator.Feature.INDENT_ARRAYS)) + .enable(YAMLGenerator.Feature.INDENT_ARRAYS) + .build()) .enable(SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS) .enable(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY) .disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS) @@ -505,7 +565,7 @@ public MultiLineString deserialize(JsonParser jp, DeserializationContext ctxt) } else if (node.isArray()) { List lines = new ArrayList<>(); Iterator iterator = node.iterator(); - while(iterator.hasNext()) { + while (iterator.hasNext()) { JsonNode element = iterator.next(); lines.add(element.asText()); } diff --git a/common/src/test/java/org/joda/time/chrono/DayOfWeekFromSundayDateTimeFieldTest.java b/common/src/test/java/org/joda/time/chrono/DayOfWeekFromSundayDateTimeFieldTest.java index e0c92d5d10..ec6b753d83 100644 --- a/common/src/test/java/org/joda/time/chrono/DayOfWeekFromSundayDateTimeFieldTest.java +++ b/common/src/test/java/org/joda/time/chrono/DayOfWeekFromSundayDateTimeFieldTest.java @@ -39,29 +39,29 @@ public void get() { @Test public void getAsText() { - assertTrue("Sunday".equalsIgnoreCase(instance.getAsText(1526173261000L))); - assertTrue("Monday".equalsIgnoreCase(instance.getAsText(1526259661000L))); - assertTrue("Saturday".equalsIgnoreCase(instance.getAsText(1526086861000L))); + assertTrue("Sunday".equalsIgnoreCase(instance.getAsText(1526173261000L, Locale.US))); + assertTrue("Monday".equalsIgnoreCase(instance.getAsText(1526259661000L, Locale.US))); + assertTrue("Saturday".equalsIgnoreCase(instance.getAsText(1526086861000L, Locale.US))); } @Test public void getAsShortText() { - assertTrue("Sun".equalsIgnoreCase(instance.getAsShortText(1526173261000L))); - assertTrue("Mon".equalsIgnoreCase(instance.getAsShortText(1526259661000L))); - assertTrue("Sat".equalsIgnoreCase(instance.getAsShortText(1526086861000L))); + assertTrue("Sun".equalsIgnoreCase(instance.getAsShortText(1526173261000L, Locale.US))); + assertTrue("Mon".equalsIgnoreCase(instance.getAsShortText(1526259661000L, Locale.US))); + assertTrue("Sat".equalsIgnoreCase(instance.getAsShortText(1526086861000L, Locale.US))); } @Test public void getAsTextFieldValue() { - assertTrue("Sunday".equalsIgnoreCase(instance.getAsText(1, Locale.getDefault()))); - assertTrue("Monday".equalsIgnoreCase(instance.getAsText(2, Locale.getDefault()))); - assertTrue("Saturday".equalsIgnoreCase(instance.getAsText(7, Locale.getDefault()))); + assertTrue("Sunday".equalsIgnoreCase(instance.getAsText(1, Locale.US))); + assertTrue("Monday".equalsIgnoreCase(instance.getAsText(2, Locale.US))); + assertTrue("Saturday".equalsIgnoreCase(instance.getAsText(7, Locale.US))); } @Test public void getAsShortTextFieldValue() { - assertTrue("Sun".equalsIgnoreCase(instance.getAsShortText(1, Locale.getDefault()))); - assertTrue("Mon".equalsIgnoreCase(instance.getAsShortText(2, Locale.getDefault()))); - assertTrue("Sat".equalsIgnoreCase(instance.getAsShortText(7, Locale.getDefault()))); + assertTrue("Sun".equalsIgnoreCase(instance.getAsShortText(1, Locale.US))); + assertTrue("Mon".equalsIgnoreCase(instance.getAsShortText(2, Locale.US))); + assertTrue("Sat".equalsIgnoreCase(instance.getAsShortText(7, Locale.US))); } } diff --git a/common/src/test/resources/goldenfiles/expected/GoldenFileMetaTests.testIgnoreScenario.yaml b/common/src/test/resources/goldenfiles/expected/GoldenFileMetaTests.testIgnoreScenario.yaml new file mode 100644 index 0000000000..61b8e0fdea --- /dev/null +++ b/common/src/test/resources/goldenfiles/expected/GoldenFileMetaTests.testIgnoreScenario.yaml @@ -0,0 +1,36 @@ +# +# Copyright (C) 2017-2019 Dremio Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +--- + - + description: "Correct Output And Ignore = false" + input: + left: 3 + right: 5 + output: 8 + - + description: "Correct Output And Ignore = true" + input: + left: 3 + right: 5 + output: 8 + - + description: "Incorrect Output And Ignore = true" + input: + left: 3 + right: 5 + output: "Intentionally Incorrect Output" diff --git a/common/src/test/resources/goldenfiles/expected/GoldenFileMetaTests.testIncorrectOutput.yaml b/common/src/test/resources/goldenfiles/expected/GoldenFileMetaTests.testIncorrectOutput.yaml new file mode 100644 index 0000000000..fc29bf79c2 --- /dev/null +++ b/common/src/test/resources/goldenfiles/expected/GoldenFileMetaTests.testIncorrectOutput.yaml @@ -0,0 +1,24 @@ +# +# Copyright (C) 2017-2019 Dremio Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +--- + - + description: "Incorrect Output And Ignore = false" + input: + left: 3 + right: 5 + output: "Intentionally Incorrect Output" diff --git a/common/src/test/resources/logback-test.xml b/common/src/test/resources/logback-test.xml index ab31612bd6..4a1f32d9cb 100644 --- a/common/src/test/resources/logback-test.xml +++ b/common/src/test/resources/logback-test.xml @@ -27,7 +27,6 @@ com.dremio.common.logging.obfuscation.TestBlockLevel.B.Second,error com.dremio.common.logging.obfuscation.TestBlockLevel.B.Third,debug com.dremio.common.logging.obfuscation.TestBlockLevel.C.Second,trace - diff --git a/connector/pom.xml b/connector/pom.xml index 3ac0607ecd..f5bee18938 100644 --- a/connector/pom.xml +++ b/connector/pom.xml @@ -23,7 +23,7 @@ com.dremio dremio-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-connector diff --git a/connector/src/main/java/com/dremio/connector/metadata/extensions/SupportsIcebergMetadata.java b/connector/src/main/java/com/dremio/connector/metadata/extensions/SupportsIcebergMetadata.java index 270b2337b3..c7a75628b4 100644 --- a/connector/src/main/java/com/dremio/connector/metadata/extensions/SupportsIcebergMetadata.java +++ b/connector/src/main/java/com/dremio/connector/metadata/extensions/SupportsIcebergMetadata.java @@ -55,14 +55,27 @@ public interface SupportsIcebergMetadata { /** * Provides statistics for number of position/equality delete records. * - * @return a DatasetStats instance with the count of delete records. + * @return a DatasetStats instance with the count of total delete records. */ DatasetStats getDeleteStats(); + /** + * Provides statistics for number of equality delete records. + * + * @return a DatasetStats instance with the count of equality delete records. + */ + DatasetStats getEqualityDeleteStats(); + /** * Provides statistics for number of delete files. * * @return a DatasetStats instance with the count of delete files. */ DatasetStats getDeleteManifestStats(); + + /** + * modification time for the snapshot + */ + long getMtime(); + } diff --git a/connector/src/test/java/com/dremio/connector/sample/SampleHandleImpl.java b/connector/src/test/java/com/dremio/connector/sample/SampleHandleImpl.java index ac5f832a95..511492ac59 100644 --- a/connector/src/test/java/com/dremio/connector/sample/SampleHandleImpl.java +++ b/connector/src/test/java/com/dremio/connector/sample/SampleHandleImpl.java @@ -118,6 +118,7 @@ List getPartitionChunks() { return partitionChunks; } + @Override public List getPartitionColumns() { return datasetMetadata == null ? partitionColumns : diff --git a/contrib/hive2-exec-shade/pom.xml b/contrib/hive2-exec-shade/pom.xml index 95ba95c5e0..4d4f1b5833 100644 --- a/contrib/hive2-exec-shade/pom.xml +++ b/contrib/hive2-exec-shade/pom.xml @@ -22,7 +22,7 @@ com.dremio.contrib dremio-contrib-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-hive2-exec-shaded @@ -124,7 +124,6 @@ org.apache.avro avro - compile @@ -135,7 +134,6 @@ org.apache.hive hive-exec ${hive.version} - compile org.apache.hive @@ -201,6 +199,10 @@ org.apache.calcite.avatica * + + org.pentaho + pentaho-aggdesigner-algorithm + @@ -224,6 +226,11 @@ org.apache.hadoop hadoop-common + + + com.google.protobuf + protobuf-java + @@ -256,9 +263,9 @@ org.apache.commons:commons-lang3 commons-codec:commons-codec com.google.guava:guava + com.google.protobuf:protobuf-java - com.google.protobuf:protobuf-java com.fasterxml.jackson.*:* @@ -279,6 +286,9 @@ org/apache/thrift/** org/apache/calcite/** org/slf4j/** + + com/google/protobuf/** + META-INF/maven/com.google.protobuf/** diff --git a/contrib/hive3-exec-shade/pom.xml b/contrib/hive3-exec-shade/pom.xml index d565f2debb..b760a7b62e 100644 --- a/contrib/hive3-exec-shade/pom.xml +++ b/contrib/hive3-exec-shade/pom.xml @@ -22,7 +22,7 @@ com.dremio.contrib dremio-contrib-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-hive3-exec-shaded @@ -40,7 +40,6 @@ org.apache.hive hive-exec ${hive.version} - compile org.apache.logging.log4j @@ -108,6 +107,11 @@ + + + com.google.protobuf + protobuf-java + @@ -131,6 +135,7 @@ com.fasterxml.jackson.core:jackson-annotations com.fasterxml.jackson.core:jackson-core com.fasterxml.jackson.core:jackson-databind + com.google.protobuf:protobuf-java false @@ -147,6 +152,9 @@ org/apache/hadoop/hive/llap/** org/apache/thrift/** + + com/google/protobuf/** + META-INF/maven/com.google.protobuf/** diff --git a/contrib/maprfs-shade/pom.xml b/contrib/maprfs-shade/pom.xml index ba7c2e1b27..59c4a7b2c4 100644 --- a/contrib/maprfs-shade/pom.xml +++ b/contrib/maprfs-shade/pom.xml @@ -22,7 +22,7 @@ com.dremio.contrib dremio-contrib-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-maprfs-shaded diff --git a/contrib/pom.xml b/contrib/pom.xml index d8f60d550e..e92da6a237 100644 --- a/contrib/pom.xml +++ b/contrib/pom.xml @@ -22,7 +22,7 @@ com.dremio dremio-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 com.dremio.contrib diff --git a/dac/backend/pom.xml b/dac/backend/pom.xml index 3e9a37e1af..9fe3bde8b3 100644 --- a/dac/backend/pom.xml +++ b/dac/backend/pom.xml @@ -22,7 +22,7 @@ com.dremio dremio-dac-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-dac-backend @@ -73,6 +73,12 @@ proto ${project.version} + + com.dremio.sabot + dremio-sabot-serializer + ${project.version} + test + com.dremio.services dremio-services-datastore @@ -182,18 +188,11 @@ com.dremio.services dremio-services-execselector - + org.apache.curator curator-test - ${curator-test.version} test - - - log4j - log4j - - com.dremio.services @@ -261,16 +260,6 @@ org.apache.zookeeper zookeeper - - - org.slf4j - slf4j-log4j12 - - - log4j - log4j - - org.apache.commons @@ -280,7 +269,6 @@ com.dremio.services dremio-services-configuration ${project.version} - compile com.dremio.services @@ -398,7 +386,6 @@ com.dremio.services dremio-services-jobtelemetry-server ${project.version} - compile com.dremio.services @@ -419,6 +406,11 @@ com.dremio.services dremio-services-nessie-grpc-client + + com.dremio.services + dremio-services-nessie-proxy + ${project.version} + com.dremio.services dremio-services-orphanagecleaner @@ -433,13 +425,11 @@ com.dremio.services dremio-services-userpreferences ${project.version} - compile com.dremio.services dremio-services-autocomplete ${project.version} - compile com.dremio.services @@ -456,6 +446,34 @@ dremio-ce-jdbc-plugin ${project.version} + + io.findify + s3mock_2.12 + test + + + org.projectnessie.nessie + nessie-compatibility-common + test + + + org.projectnessie.nessie + nessie-versioned-tests + test + + + org.projectnessie.nessie + nessie-jaxrs-tests + test + + + com.dremio.services + dremio-services-nessie-proxy + tests + test-jar + ${project.version} + test + @@ -693,7 +711,27 @@ + + maven-failsafe-plugin + + + + integration-test + verify + + + + + + + ${project.basedir}/src/test/resources + + + ${project.basedir}/src/test/resources-nessie + true + + @@ -711,12 +749,65 @@ com/dremio/dac/service/admin/TestKVStoreReportService.java + + com/dremio/dac/explore/TestAccelerationSettingsFromAPI.java + com/dremio/dac/explore/TestDatasetResource.java + com/dremio/dac/resource/TestNessieSourceResource.java + com/dremio/dac/resource/TestNessieSourceApi.java + com/dremio/dac/service/TestCatalogServiceHelperForVersioned.java + com/dremio/dac/service/source/TestSourceService.java + com/dremio/exec/catalog/dataplane/ITDataplanePlugin.java + com/dremio/exec/catalog/dataplane/ITDatasetVersionContextTestCases.java + com/dremio/exec/planner/sql/handlers/TestShowTagsHandler.java + com/dremio/exec/planner/sql/handlers/TestDropTagHandler.java + com/dremio/exec/planner/sql/handlers/TestShowLogsHandler.java + com/dremio/exec/planner/sql/handlers/TestUseVersionHandler.java + com/dremio/exec/planner/sql/handlers/TestCreateBranchHandler.java + com/dremio/exec/planner/sql/handlers/TestAssignBranchHandler.java + com/dremio/exec/planner/sql/handlers/TestCreateTagHandler.java + com/dremio/exec/planner/sql/handlers/TestMergeBranchHandler.java + com/dremio/exec/planner/sql/handlers/TestShowBranchesHandler.java + com/dremio/exec/planner/sql/handlers/TestShowTablesHandler.java + com/dremio/exec/planner/sql/handlers/TestShowViewsHandler.java + com/dremio/exec/planner/sql/handlers/TestDropBranchHandler.java + com/dremio/exec/planner/sql/handlers/TestAssignTagHandler.java + com/dremio/exec/planner/sql/handlers/TestCreateFolderHandler.java + com/dremio/dac/service/datasets/ITTestDatasetMutatorForVersionedViews.java + + + default-testCompile + test-compile + + + com/dremio/exec/catalog/dataplane/*.java + + + + testCompile + + + + + notmapr + + + !mapr + + + + + com.dremio.plugins + dremio-dataplane-plugin + ${project.version} + test + + + - diff --git a/dac/backend/src/main/java/com/dremio/dac/admin/LocalAdmin.java b/dac/backend/src/main/java/com/dremio/dac/admin/LocalAdmin.java index d7a3fea7b9..70bb6d4573 100644 --- a/dac/backend/src/main/java/com/dremio/dac/admin/LocalAdmin.java +++ b/dac/backend/src/main/java/com/dremio/dac/admin/LocalAdmin.java @@ -113,9 +113,9 @@ public void backup(String path, String binaryStr, String includeProfilesStr) thr final FileSystem fs = HadoopFileSystem.get(backupDir, new Configuration()); BackupRestoreUtil.checkOrCreateDirectory(fs, backupDir); BackupRestoreUtil.BackupOptions options = new BackupRestoreUtil.BackupOptions(path, - Boolean.parseBoolean(binaryStr), Boolean.parseBoolean(includeProfilesStr)); + Boolean.parseBoolean(binaryStr), Boolean.parseBoolean(includeProfilesStr), ""); BackupRestoreUtil.BackupStats backupStats = BackupRestoreUtil.createBackup(fs, options, - getKVStoreProvider().unwrap(LocalKVStoreProvider.class), LocalAdmin.getInstance().getHomeFileTool().getConf(), + getKVStoreProvider().unwrap(LocalKVStoreProvider.class), LocalAdmin.getInstance().getHomeFileTool().getConfForBackup(), null); System.out.println(format("Backup created at %s, dremio tables %d, uploaded files %d", backupStats.getBackupPath(), backupStats.getTables(), backupStats.getFiles())); diff --git a/dac/backend/src/main/java/com/dremio/dac/api/CatalogEntity.java b/dac/backend/src/main/java/com/dremio/dac/api/CatalogEntity.java index d26150191d..77a4f3aaba 100644 --- a/dac/backend/src/main/java/com/dremio/dac/api/CatalogEntity.java +++ b/dac/backend/src/main/java/com/dremio/dac/api/CatalogEntity.java @@ -20,7 +20,7 @@ import com.fasterxml.jackson.annotation.JsonTypeInfo; /** - * Represents a entity in the Dremio catalog + * Represents an entity in the Dremio catalog */ @JsonIgnoreProperties(ignoreUnknown = true) @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.EXTERNAL_PROPERTY, property = "entityType", visible = true) diff --git a/dac/backend/src/main/java/com/dremio/dac/api/CatalogItem.java b/dac/backend/src/main/java/com/dremio/dac/api/CatalogItem.java index 4febf527cd..edcf46ff03 100644 --- a/dac/backend/src/main/java/com/dremio/dac/api/CatalogItem.java +++ b/dac/backend/src/main/java/com/dremio/dac/api/CatalogItem.java @@ -100,6 +100,10 @@ public static CatalogItem fromSourceConfig(SourceConfig sourceConfig) { return fromSourceConfig(sourceConfig, null); } + public static CatalogItem fromSource(Source source) { + return fromSourceConfig(source.getSourceConfig(), null); + } + public static CatalogItem fromHomeConfig(HomeConfig homeConfig) { return new Builder() .setId(homeConfig.getId().getId()) @@ -126,7 +130,7 @@ private static CatalogItem fromSpaceConfig(SpaceConfig spaceConfig, Collaboratio private static CatalogItem fromFunctionConfig(FunctionConfig functionConfig, CollaborationTag tags) { return new Builder() .setId(functionConfig.getId().getId()) - .setPath(Lists.newArrayList(functionConfig.getName())) + .setPath(Lists.newArrayList(functionConfig.getFullPathList())) .setTag(String.valueOf(functionConfig.getTag())) .setType(CatalogItemType.CONTAINER) .setContainerType(ContainerSubType.FUNCTION) diff --git a/dac/backend/src/main/java/com/dremio/dac/api/CatalogResource.java b/dac/backend/src/main/java/com/dremio/dac/api/CatalogResource.java index 2608fe2047..3215281e17 100644 --- a/dac/backend/src/main/java/com/dremio/dac/api/CatalogResource.java +++ b/dac/backend/src/main/java/com/dremio/dac/api/CatalogResource.java @@ -141,54 +141,27 @@ public void refreshCatalogItem(@PathParam("id") String id) { } } - @POST - @Path("/{id}/metadata/refresh") - public MetadataRefreshResponse refreshCatalogItemMetadata(@PathParam("id") String id, - @QueryParam("deleteWhenMissing") Boolean delete, - @QueryParam("forceUpdate") Boolean force, - @QueryParam("autoPromotion") Boolean promotion) { - try { - boolean changed = false; - boolean deleted = false; - switch(catalogServiceHelper.refreshCatalogItemMetadata(id, delete, force, promotion)) { - case CHANGED: - changed = true; - break; - case UNCHANGED: - break; - case DELETED: - changed = true; - deleted = true; - break; - default: - throw new IllegalStateException(); - } - - return new MetadataRefreshResponse(changed, deleted); - } catch (IllegalArgumentException e) { - throw new NotFoundException(e.getMessage()); - } catch (UnsupportedOperationException e) { - throw new BadRequestException(e.getMessage()); - } - } - @GET @Path("/by-path/{segment:.*}") public CatalogEntity getCatalogItemByPath( - @PathParam("segment") List segments, - @QueryParam("include") final List include, - @QueryParam("exclude") final List exclude - ) throws NamespaceException, BadRequestException { + @PathParam("segment") List segments, + @QueryParam("include") final List include, + @QueryParam("exclude") final List exclude, + @QueryParam("versionType") final String versionType, + @QueryParam("versionValue") final String versionValue) + throws NamespaceException, BadRequestException { List pathList = new ArrayList<>(); for (PathSegment segment : segments) { - // with query parameters we may get a empty final segment + // with query parameters we may get an empty final segment if (!segment.getPath().isEmpty()) { pathList.add(segment.getPath()); } } - Optional entity = catalogServiceHelper.getCatalogEntityByPath(pathList, include, exclude); + final Optional entity = + catalogServiceHelper.getCatalogEntityByPath( + pathList, include, exclude, versionType, versionValue); if (!entity.isPresent()) { throw new NotFoundException(String.format("Could not find entity with path [%s]", pathList)); @@ -200,9 +173,7 @@ public CatalogEntity getCatalogItemByPath( @GET @Path("/search") public ResponseList search(@QueryParam("query") String query) throws NamespaceException { - ResponseList catalogItems = new ResponseList<>(catalogServiceHelper.search(query)); - - return catalogItems; + return new ResponseList<>(catalogServiceHelper.search(query)); } /** diff --git a/dac/backend/src/main/java/com/dremio/dac/api/CollaborationResource.java b/dac/backend/src/main/java/com/dremio/dac/api/CollaborationResource.java index 2715a8a503..d6993d2a62 100644 --- a/dac/backend/src/main/java/com/dremio/dac/api/CollaborationResource.java +++ b/dac/backend/src/main/java/com/dremio/dac/api/CollaborationResource.java @@ -23,7 +23,6 @@ import javax.inject.Inject; import javax.ws.rs.Consumes; import javax.ws.rs.GET; -import javax.ws.rs.NotFoundException; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; @@ -57,12 +56,7 @@ public CollaborationResource(CollaborationHelper collaborationHelper) { @Path("/tag") public Tags getTagsForEntity(@PathParam("id") String id) throws NamespaceException { Optional tags = collaborationHelper.getTags(id); - - if (!tags.isPresent()) { - throw new NotFoundException(String.format("Entity [%s] does not have any tags set.", id)); - } - - return tags.get(); + return tags.orElseGet(() -> new Tags(null, null)); } @POST @@ -77,12 +71,7 @@ public Tags setTagsForEntity(@PathParam("id") String id, Tags tags) throws Names @Path("/wiki") public Wiki getWikiForEntity(@PathParam("id") String id) throws NamespaceException { Optional wiki = collaborationHelper.getWiki(id); - - if (!wiki.isPresent()) { - throw new NotFoundException(String.format("Entity [%s] does not have a wiki set.", id)); - } - - return wiki.get(); + return wiki.orElseGet(() -> new Wiki("", null)); } @POST diff --git a/dac/backend/src/main/java/com/dremio/dac/api/Dataset.java b/dac/backend/src/main/java/com/dremio/dac/api/Dataset.java index c1d91c1ebc..27ec11ae3a 100644 --- a/dac/backend/src/main/java/com/dremio/dac/api/Dataset.java +++ b/dac/backend/src/main/java/com/dremio/dac/api/Dataset.java @@ -100,6 +100,7 @@ public Dataset( this(id, type, path, null, createdAt, tag, accelerationRefreshPolicy, sql, sqlContext, format, approximateStatisticsAllowed); } + @Override public String getId() { return id; } diff --git a/dac/backend/src/main/java/com/dremio/dac/api/SourceResource.java b/dac/backend/src/main/java/com/dremio/dac/api/DepreciatedSourceResource.java similarity index 88% rename from dac/backend/src/main/java/com/dremio/dac/api/SourceResource.java rename to dac/backend/src/main/java/com/dremio/dac/api/DepreciatedSourceResource.java index c90d26889b..782fb894e3 100644 --- a/dac/backend/src/main/java/com/dremio/dac/api/SourceResource.java +++ b/dac/backend/src/main/java/com/dremio/dac/api/DepreciatedSourceResource.java @@ -16,7 +16,8 @@ package com.dremio.dac.api; import static com.dremio.dac.server.UIOptions.ALLOW_HIVE_SOURCE; -import static com.dremio.exec.store.jdbc.JdbcPluginOptions.JDBC_DB2_ENABLED; +import static com.dremio.exec.store.DataplanePluginOptions.NESSIE_PLUGIN_ENABLED; +import static com.dremio.exec.store.jdbc.JdbcPluginOptions.JDBC_OPENSEARCH_ENABLED; import static javax.ws.rs.core.MediaType.APPLICATION_JSON; import java.util.List; @@ -35,9 +36,6 @@ import javax.ws.rs.Produces; import javax.ws.rs.core.Response; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.dremio.common.exceptions.ExecutionSetupException; import com.dremio.dac.annotations.APIResource; import com.dremio.dac.annotations.Secured; @@ -56,16 +54,15 @@ import com.google.common.annotations.VisibleForTesting; /** - * Resource for information about sources. + * Depreciated resource for information about sources. */ @APIResource @Secured @Path("/source") @Consumes(APPLICATION_JSON) @Produces(APPLICATION_JSON) -public class SourceResource { - private static final Logger logger = LoggerFactory.getLogger(SourceResource.class); - +@Deprecated +public class DepreciatedSourceResource { /** * 1.5 changed _type to entityType, this class provides backwards compatibility */ @@ -106,7 +103,7 @@ public String getDeprecatedEntityType() { protected final SabotContext sabotContext; @Inject - public SourceResource(SourceService sourceService, SabotContext sabotContext) { + public DepreciatedSourceResource(SourceService sourceService, SabotContext sabotContext) { this.sourceService = sourceService; this.sabotContext = sabotContext; } @@ -188,27 +185,39 @@ public ResponseList getSourceTypes() { final ConnectionReader connectionReader = sabotContext.getConnectionReaderProvider().get(); final ResponseList types = new ResponseList<>(); - final boolean showHive = sabotContext.getOptionManager().getOption(ALLOW_HIVE_SOURCE); - final boolean showDb2 = sabotContext.getOptionManager().getOption(JDBC_DB2_ENABLED); - for(Class> input : connectionReader.getAllConnectionConfs().values()) { // We can't use isInternal as it's not a static method, instead we only show listable sources. if (isListable(input)) { String sourceType = input.getAnnotation(SourceType.class).value(); - if ((!showHive && "HIVE".equals(sourceType)) || - (!showDb2 && "DB2".equals(sourceType))) { - continue; - } - - if (sabotContext.getSourceVerifierProvider().get().isSourceSupported(sourceType, sabotContext.getSystemOptionManager())) { - types.add(SourceTypeTemplate.fromSourceClass(input, false)); - } + if (isSourceTypeVisible(sourceType) && + sabotContext + .getSourceVerifierProvider() + .get() + .isSourceSupported(sourceType, sabotContext.getSystemOptionManager())) { + types.add(SourceTypeTemplate.fromSourceClass(input, false)); + } } } return types; } + private boolean isSourceTypeVisible(String sourceType) { + if ("HIVE".equals(sourceType)) { + return sabotContext.getOptionManager().getOption(ALLOW_HIVE_SOURCE); + } + + if ("OPENSEARCH".equals(sourceType)) { + return sabotContext.getOptionManager().getOption(JDBC_OPENSEARCH_ENABLED); + } + + if ("NESSIE".equals(sourceType)) { + return sabotContext.getOptionManager().getOption(NESSIE_PLUGIN_ENABLED); + } + + return true; + } + // Returns the specified source type with all its properties expanded @GET @RolesAllowed({"admin", "user"}) diff --git a/dac/backend/src/main/java/com/dremio/dac/api/File.java b/dac/backend/src/main/java/com/dremio/dac/api/File.java index e3cd8246f5..2849fcfecb 100644 --- a/dac/backend/src/main/java/com/dremio/dac/api/File.java +++ b/dac/backend/src/main/java/com/dremio/dac/api/File.java @@ -35,6 +35,7 @@ public File(@JsonProperty("id") String id, @JsonProperty("path") List pa this.path = path; } + @Override public String getId() { return id; } diff --git a/dac/backend/src/main/java/com/dremio/dac/api/Folder.java b/dac/backend/src/main/java/com/dremio/dac/api/Folder.java index 77ba94ca8e..7a109707cc 100644 --- a/dac/backend/src/main/java/com/dremio/dac/api/Folder.java +++ b/dac/backend/src/main/java/com/dremio/dac/api/Folder.java @@ -49,6 +49,7 @@ public String getName() { return Iterables.getLast(getPath()); } + @Override public String getId() { return id; } diff --git a/dac/backend/src/main/java/com/dremio/dac/api/Home.java b/dac/backend/src/main/java/com/dremio/dac/api/Home.java index 5ca148b3f4..047c77b2f1 100644 --- a/dac/backend/src/main/java/com/dremio/dac/api/Home.java +++ b/dac/backend/src/main/java/com/dremio/dac/api/Home.java @@ -42,6 +42,7 @@ public Home( this.children = children; } + @Override public String getId() { return id; } diff --git a/dac/backend/src/main/java/com/dremio/dac/api/ScriptResource.java b/dac/backend/src/main/java/com/dremio/dac/api/ScriptResource.java index 16fd9a7550..f3503a50f9 100644 --- a/dac/backend/src/main/java/com/dremio/dac/api/ScriptResource.java +++ b/dac/backend/src/main/java/com/dremio/dac/api/ScriptResource.java @@ -19,7 +19,6 @@ import static javax.ws.rs.core.MediaType.APPLICATION_JSON; import java.util.List; -import java.util.concurrent.Callable; import java.util.stream.Collectors; import javax.annotation.security.RolesAllowed; @@ -41,13 +40,12 @@ import javax.ws.rs.core.Response; import javax.ws.rs.core.SecurityContext; -import com.dremio.context.RequestContext; -import com.dremio.context.UserContext; import com.dremio.dac.annotations.RestResource; import com.dremio.dac.annotations.Secured; import com.dremio.dac.model.scripts.PaginatedResponse; import com.dremio.dac.model.scripts.ScriptData; import com.dremio.service.script.DuplicateScriptNameException; +import com.dremio.service.script.MaxScriptsLimitReachedException; import com.dremio.service.script.ScriptNotAccessible; import com.dremio.service.script.ScriptNotFoundException; import com.dremio.service.script.ScriptService; @@ -66,7 +64,6 @@ public class ScriptResource { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ScriptResource.class); private final ScriptService scriptService; - private final SecurityContext securityContext; private final UserService userService; @Inject @@ -74,7 +71,6 @@ public ScriptResource(ScriptService scriptService, @Context SecurityContext securityContext, UserService userService) { this.scriptService = scriptService; - this.securityContext = securityContext; this.userService = userService; } @@ -91,16 +87,12 @@ public PaginatedResponse getScripts(@QueryParam("offset") Integer of final String finalOrderBy = (orderBy == null) ? "" : orderBy; try { - - Long totalScripts = runWithUserContext(() -> scriptService.getCountOfMatchingScripts( - finalSearch, "", createdBy)); + Long totalScripts = scriptService.getCountOfMatchingScripts(finalSearch, "", createdBy); List scripts = - runWithUserContext(() -> scriptService.getScripts(finalOffset, - finalMaxResults, finalSearch, - finalOrderBy, "", createdBy) + scriptService.getScripts(finalOffset, finalMaxResults, finalSearch, finalOrderBy, "", createdBy) .parallelStream() .map(this::fromScript) - .collect(Collectors.toList())); + .collect(Collectors.toList()); return new PaginatedResponse<>(totalScripts, scripts); } catch (Exception exception) { logger.error("GET on scripts failed.", exception); @@ -111,9 +103,8 @@ public PaginatedResponse getScripts(@QueryParam("offset") Integer of @POST public ScriptData postScripts(ScriptData scriptData) { try { - return fromScript(runWithUserContext(() -> scriptService.createScript(ScriptData.toScriptRequest( - scriptData)))); - } catch (DuplicateScriptNameException exception) { + return fromScript(scriptService.createScript(ScriptData.toScriptRequest(scriptData))); + } catch (DuplicateScriptNameException | MaxScriptsLimitReachedException exception) { logger.error(exception.getMessage(), exception); throw new BadRequestException(exception.getMessage()); } catch (Exception exception) { @@ -126,7 +117,7 @@ public ScriptData postScripts(ScriptData scriptData) { @Path("/{id}") public ScriptData getScript(@PathParam("id") String scriptId) { try { - return fromScript(runWithUserContext(() -> scriptService.getScriptById(scriptId))); + return fromScript(scriptService.getScriptById(scriptId)); } catch (ScriptNotFoundException exception) { logger.error(exception.getMessage(), exception); throw new NotFoundException(exception.getMessage()); @@ -145,9 +136,7 @@ public ScriptData updateScript(@PathParam("id") String scriptId, ScriptData scri // check if script exists with given scriptId try { // update the script - return fromScript(runWithUserContext(() -> scriptService.updateScript(scriptId, - ScriptData.toScriptRequest( - scriptData)))); + return fromScript(scriptService.updateScript(scriptId, ScriptData.toScriptRequest(scriptData))); } catch (ScriptNotFoundException exception) { logger.error(exception.getMessage(), exception); throw new NotFoundException(exception.getMessage()); @@ -167,10 +156,7 @@ public ScriptData updateScript(@PathParam("id") String scriptId, ScriptData scri @Path(("/{id}")) public Response deleteScript(@PathParam("id") String scriptId) { try { - runWithUserContext(() -> { - scriptService.deleteScriptById(scriptId); - return null; - }); + scriptService.deleteScriptById(scriptId); return Response.noContent().build(); } catch (ScriptNotFoundException exception) { logger.error(exception.getMessage(), exception); @@ -199,25 +185,4 @@ private ScriptData fromScript(ScriptProto.Script script) { getUserInfoById(script.getCreatedBy()), getUserInfoById(script.getModifiedBy())); } - - private String getCurrentUserId() { - try { - return userService.getUser(securityContext.getUserPrincipal().getName()) - .getUID() - .getId(); - } catch (UserNotFoundException exception) { - // ideally this case should never be reached. - logger.error("Couldn't find current logged in user : {}. Error {}", - securityContext.getUserPrincipal().getName(), - exception.getMessage()); - throw new InternalServerErrorException(exception.getMessage()); - } - } - - private V runWithUserContext(Callable callable) throws Exception { - return RequestContext.current() - .with(UserContext.CTX_KEY, new UserContext(getCurrentUserId())) - .call(callable); - } - } diff --git a/dac/backend/src/main/java/com/dremio/dac/api/Source.java b/dac/backend/src/main/java/com/dremio/dac/api/Source.java index 0cc6af8c6a..ccd4374cd3 100644 --- a/dac/backend/src/main/java/com/dremio/dac/api/Source.java +++ b/dac/backend/src/main/java/com/dremio/dac/api/Source.java @@ -130,6 +130,7 @@ void setReader(ConnectionReader reader) { this.reader = reader; } + @Override public String getId() { return this.id; } diff --git a/dac/backend/src/main/java/com/dremio/dac/api/Space.java b/dac/backend/src/main/java/com/dremio/dac/api/Space.java index 59989a3b78..e9c2302990 100644 --- a/dac/backend/src/main/java/com/dremio/dac/api/Space.java +++ b/dac/backend/src/main/java/com/dremio/dac/api/Space.java @@ -54,6 +54,7 @@ public String getName() { return name; } + @Override public String getId() { return id; } diff --git a/dac/backend/src/main/java/com/dremio/dac/api/UserPreferenceResource.java b/dac/backend/src/main/java/com/dremio/dac/api/UserPreferenceResource.java index ea4c1260cb..bbb70ae609 100644 --- a/dac/backend/src/main/java/com/dremio/dac/api/UserPreferenceResource.java +++ b/dac/backend/src/main/java/com/dremio/dac/api/UserPreferenceResource.java @@ -31,6 +31,7 @@ import javax.ws.rs.DefaultValue; import javax.ws.rs.ForbiddenException; import javax.ws.rs.GET; +import javax.ws.rs.NotAuthorizedException; import javax.ws.rs.NotFoundException; import javax.ws.rs.PUT; import javax.ws.rs.Path; @@ -52,6 +53,7 @@ import com.dremio.service.userpreferences.EntityThresholdReachedException; import com.dremio.service.userpreferences.UserPreferenceService; import com.dremio.service.userpreferences.proto.UserPreferenceProto; +import com.dremio.service.users.UserNotFoundException; import com.google.protobuf.util.Timestamps; @APIResource @@ -60,8 +62,6 @@ @Consumes(APPLICATION_JSON) @Produces(APPLICATION_JSON) public class UserPreferenceResource { - private static final org.slf4j.Logger logger = - org.slf4j.LoggerFactory.getLogger(UserPreferenceResource.class); private final UserPreferenceService userPreferenceService; private final NamespaceService namespaceService; @@ -79,38 +79,41 @@ public PreferenceData getPreferenceByType( @PathParam("preferenceType") String preferenceType, @QueryParam("showCatalogInfo") @DefaultValue("false") boolean showCatalogInfo) throws NamespaceNotFoundException { + try { + UserPreferenceProto.Preference preference = userPreferenceService.getPreferenceByType(validatePreferenceType(preferenceType)); + + if (showCatalogInfo) { + Map entityIdToEntityMap = preference.getEntitiesList() + .stream() + .collect(Collectors.toMap(UserPreferenceProto.Entity::getEntityId, entity -> entity)); + + List entities = + namespaceService.getEntitiesByIds(preference.getEntitiesList() + .stream() + .map(UserPreferenceProto.Entity::getEntityId) + .collect( + Collectors.toList())); + return new PreferenceData(preference.getType(), + entities.parallelStream() + .map(container -> getEntityFromNameSpaceContainer( + container, + entityIdToEntityMap)) + .collect( + Collectors.toList())); + } - UserPreferenceProto.Preference preference = - userPreferenceService.getPreferenceByType(validatePreferenceType(preferenceType)); - if (showCatalogInfo) { - - Map entityIdToEntityMap = preference.getEntitiesList() - .stream() - .collect(Collectors.toMap(UserPreferenceProto.Entity::getEntityId, entity -> entity)); - - List entities = - namespaceService.getEntitiesByIds(preference.getEntitiesList() - .stream() - .map(UserPreferenceProto.Entity::getEntityId) - .collect( - Collectors.toList())); return new PreferenceData(preference.getType(), - entities.parallelStream() - .map(container -> getEntityFromNameSpaceContainer( - container, - entityIdToEntityMap)) - .collect( - Collectors.toList())); - } - return new PreferenceData(preference.getType(), - preference.getEntitiesList().stream().map( - entity -> new Entity(entity.getEntityId(), - null, - null, - null, - Timestamps.toMillis(entity.getTimestamp())) + preference.getEntitiesList().stream().map( + entity -> new Entity(entity.getEntityId(), + null, + null, + null, + Timestamps.toMillis(entity.getTimestamp())) ).collect( Collectors.toList())); + } catch (UserNotFoundException e) { + throw new NotAuthorizedException(e.getMessage()); + } } @PUT @@ -134,6 +137,8 @@ public PreferenceData addEntityToPreference(@PathParam("preferenceType") String throw new BadRequestException(exception.getMessage()); } catch (EntityThresholdReachedException | IllegalAccessException exception) { throw new ForbiddenException(exception.getMessage()); + } catch (UserNotFoundException e) { + throw new NotAuthorizedException(e.getMessage()); } } @@ -156,6 +161,8 @@ public PreferenceData removeEntityFromPreference(@PathParam("preferenceType") St Collectors.toList())); } catch (EntityNotFoundInPreferenceException exception) { throw new NotFoundException(exception.getMessage()); + } catch (UserNotFoundException e) { + throw new NotAuthorizedException(e.getMessage()); } } diff --git a/dac/backend/src/main/java/com/dremio/dac/daemon/ConfigurationModuleImpl.java b/dac/backend/src/main/java/com/dremio/dac/daemon/ConfigurationModuleImpl.java index 43eadd56a1..43c777e8a2 100644 --- a/dac/backend/src/main/java/com/dremio/dac/daemon/ConfigurationModuleImpl.java +++ b/dac/backend/src/main/java/com/dremio/dac/daemon/ConfigurationModuleImpl.java @@ -19,6 +19,7 @@ * Helper class to manage AWS configuration server */ public class ConfigurationModuleImpl implements ConfigurationModule { + @Override public void run() throws Exception { throw new UnsupportedOperationException("Configuration mode is not supported in this Dremio edition"); } diff --git a/dac/backend/src/main/java/com/dremio/dac/daemon/DACDaemonModule.java b/dac/backend/src/main/java/com/dremio/dac/daemon/DACDaemonModule.java index 08251d6a27..3b93dfc864 100644 --- a/dac/backend/src/main/java/com/dremio/dac/daemon/DACDaemonModule.java +++ b/dac/backend/src/main/java/com/dremio/dac/daemon/DACDaemonModule.java @@ -56,6 +56,7 @@ import com.dremio.dac.server.DremioServer; import com.dremio.dac.server.DremioServlet; import com.dremio.dac.server.LivenessService; +import com.dremio.dac.server.NessieProxyRestServer; import com.dremio.dac.server.RestServerV2; import com.dremio.dac.server.WebServer; import com.dremio.dac.service.admin.KVStoreReportService; @@ -227,6 +228,7 @@ import com.dremio.service.flight.DremioFlightAuthProvider; import com.dremio.service.flight.DremioFlightAuthProviderImpl; import com.dremio.service.flight.DremioFlightService; +import com.dremio.service.flight.FlightRequestContextDecorator; import com.dremio.service.grpc.GrpcChannelBuilderFactory; import com.dremio.service.grpc.GrpcServerBuilderFactory; import com.dremio.service.grpc.MultiTenantGrpcServerBuilderFactory; @@ -478,6 +480,12 @@ public NodeEndpoint get() { bootstrap.getExecutor() )); + // Bind Credentials Service, this will select between Simple and Executor Credentials + final com.dremio.services.credentials.CredentialsService credentialsService = + com.dremio.services.credentials.CredentialsService.newInstance(config, scanResult); + registry.bind(com.dremio.services.credentials.CredentialsService.class, + credentialsService); + DremioCredentialProviderFactory.configure( registry.provider(com.dremio.services.credentials.CredentialsService.class)); @@ -491,6 +499,7 @@ public NodeEndpoint get() { "conduit" ); + // TODO DX-66220: Make AzureVaultCredentialsProvider available before resolving secret URIs in SSL config conduitSslEngineFactory = SSLEngineFactory.create( conduitSslConfigurator.getSSLConfig(false, fabricAddress)); } catch (Exception e) { @@ -544,11 +553,7 @@ public NodeEndpoint get() { registry.bindProvider(NessieApiV1.class, () -> createNessieClientProvider(config, registry)); - // Bind base credentials on both coordinator and executor - com.dremio.services.credentials.CredentialsService credentialsService = - com.dremio.services.credentials.CredentialsService.newInstance(config, scanResult); - registry.bind(com.dremio.services.credentials.CredentialsService.class, credentialsService); - + // Bind gRPC service for remote lookups if (isCoordinator) { conduitServiceRegistry.registerService(new CredentialsServiceImpl( registry.provider(com.dremio.services.credentials.CredentialsService.class))); @@ -823,7 +828,7 @@ public void close() throws Exception { registry.bindSelf(new SystemTablePluginConfigProvider()); - registry.bind(SysFlightPluginConfigProvider.class, new SysFlightPluginConfigProvider(registry.provider(NodeEndpoint.class))); + registry.bind(SysFlightPluginConfigProvider.class, new SysFlightPluginConfigProvider()); final MetadataRefreshInfoBroadcaster metadataRefreshInfoBroadcaster = new MetadataRefreshInfoBroadcaster( @@ -852,8 +857,6 @@ public void close() throws Exception { }); } - - registry.bind(CatalogService.class, new CatalogServiceImpl( registry.provider(SabotContext.class), registry.provider(SchedulerService.class), @@ -1023,7 +1026,8 @@ public Collection getNodes() { registry.provider(MaestroForwarder.class), bootstrapRegistry.lookup(Tracer.class), registry.provider(RuleBasedEngineSelector.class), - jobResultsAllocator); + jobResultsAllocator, + registry.provider(RequestContext.class)); if (config.getBoolean(DremioConfig.JOBS_ENABLED_BOOL)) { registerJobsServices(conduitServiceRegistry, registry, bootstrap, jobResultsAllocator, optionManagerProvider); @@ -1115,7 +1119,8 @@ public Collection getNodes() { bootstrap.getExecutor(), registry.provider(ForemenWorkManager.class), isDistributedMaster, - bootstrap.getAllocator()); + bootstrap.getAllocator(), + registry.provider(RequestContext.class)); registry.bind(ReflectionService.class, reflectionService); registry.bind(ReflectionAdministrationService.Factory.class, (context) -> reflectionService); @@ -1123,7 +1128,7 @@ public Collection getNodes() { registry.replace(AccelerationManager.class, new AccelerationManagerImpl( registry.provider(ReflectionService.class), registry.provider(ReflectionAdministrationService.Factory.class), - namespaceServiceProvider)); + registry.provider(CatalogService.class))); final StatisticsServiceImpl statisticsService = new StatisticsServiceImpl( registry.provider(LegacyKVStoreProvider.class), @@ -1140,7 +1145,6 @@ public Collection getNodes() { registry.bind(ReflectionStatusService.class, new ReflectionStatusServiceImpl( nodeEndpointsProvider, - namespaceServiceProvider, registry.provider(CatalogService.class), registry.provider(LegacyKVStoreProvider.class), reflectionService.getCacheViewerProvider() @@ -1277,6 +1281,7 @@ public Collection getNodes() { )); registry.bind(RestServerV2.class, new RestServerV2(bootstrap.getClasspathScan())); + registry.bind(NessieProxyRestServer.class, new NessieProxyRestServer()); registry.bind(APIServer.class, new APIServer(bootstrap.getClasspathScan())); registry.bind(DremioServlet.class, new DremioServlet(dacConfig.getConfig(), @@ -1322,6 +1327,7 @@ public Collection getNodes() { registry.provider(UserService.class), registry.provider(TokenManager.class) )); + registry.bind(FlightRequestContextDecorator.class, FlightRequestContextDecorator.DEFAULT); registry.bindSelf(new DremioFlightService( registry.provider(DremioConfig.class), @@ -1332,6 +1338,7 @@ public Collection getNodes() { registry.provider(OptionManager.class), registry.provider(UserSessionService.class), registry.provider(DremioFlightAuthProvider.class), + registry.provider(FlightRequestContextDecorator.class), registry.provider(com.dremio.services.credentials.CredentialsService.class) )); } else { @@ -1395,6 +1402,7 @@ public Collection getNodes() { registry.provider(com.dremio.services.credentials.CredentialsService.class), registry.provider(RestServerV2.class), registry.provider(APIServer.class), + registry.provider(NessieProxyRestServer.class), registry.provider(DremioServer.class), new DremioBinder(registry), "ui", diff --git a/dac/backend/src/main/java/com/dremio/dac/daemon/SampleDataPopulatorService.java b/dac/backend/src/main/java/com/dremio/dac/daemon/SampleDataPopulatorService.java index dc061db2f0..afd41e43b2 100644 --- a/dac/backend/src/main/java/com/dremio/dac/daemon/SampleDataPopulatorService.java +++ b/dac/backend/src/main/java/com/dremio/dac/daemon/SampleDataPopulatorService.java @@ -47,8 +47,8 @@ * Starts the SampleDataPopulator */ public class SampleDataPopulatorService implements Service { - private final Provider contextProvider; - private final Provider userService; + private final Provider sabotContextProvider; + private final Provider userServiceProvider; private final Provider kvStore; private final Provider init; private final Provider jobsService; @@ -63,9 +63,9 @@ public class SampleDataPopulatorService implements Service { private final boolean addDefaultUser; public SampleDataPopulatorService( - Provider contextProvider, + Provider sabotContextProvider, Provider kvStore, - Provider userService, + Provider userServiceProvider, Provider init, Provider jobsService, Provider catalogService, @@ -74,9 +74,9 @@ public SampleDataPopulatorService( Provider optionManager, boolean prepopulate, boolean addDefaultUser) { - this.contextProvider = contextProvider; + this.sabotContextProvider = sabotContextProvider; this.kvStore = kvStore; - this.userService = userService; + this.userServiceProvider = userServiceProvider; this.init = init; this.jobsService = jobsService; this.catalogService = catalogService; @@ -94,24 +94,25 @@ public Provider getOptionManager() { @Override public void start() throws Exception { final LegacyKVStoreProvider kv = kvStore.get(); - final NamespaceService ns = contextProvider.get().getNamespaceService(SystemUser.SYSTEM_USERNAME); + final NamespaceService systemUserNamespaceService = sabotContextProvider.get().getNamespaceService(SystemUser.SYSTEM_USERNAME); addDefaultUser(); if (prepopulate) { - final ReflectionServiceHelper reflectionServiceHelper = new SampleReflectionServiceHelper(ns, kvStore); + final ReflectionServiceHelper reflectionServiceHelper = new SampleReflectionServiceHelper(systemUserNamespaceService, kvStore, optionManager); - final DatasetVersionMutator data = new DatasetVersionMutator(init.get(), kv, ns, jobsService.get(), + final DatasetVersionMutator data = new DatasetVersionMutator(init.get(), kv, systemUserNamespaceService, jobsService.get(), catalogService.get(), optionManager.get()); - SecurityContext context = new DACSecurityContext(new UserName(SystemUser.SYSTEM_USERNAME), SystemUser.SYSTEM_USER, null); - final SourceService ss = new SourceService(contextProvider.get(), ns, data, catalogService.get(), reflectionServiceHelper, null, connectionReader.get(), context); + SecurityContext securityContext = new DACSecurityContext(new UserName(SystemUser.SYSTEM_USERNAME), SystemUser.SYSTEM_USER, null); + final SourceService ss = new SourceService(sabotContextProvider.get(), systemUserNamespaceService, data, catalogService.get(), reflectionServiceHelper, null, connectionReader.get(), securityContext); + final UserService userService = sabotContextProvider.get().getUserService(); sample = new SampleDataPopulator( - contextProvider.get(), + sabotContextProvider.get(), ss, data, - userService.get(), - contextProvider.get().getNamespaceService(SampleDataPopulator.DEFAULT_USER_NAME), + userServiceProvider.get(), + sabotContextProvider.get().getNamespaceService(SampleDataPopulator.DEFAULT_USER_NAME), SampleDataPopulator.DEFAULT_USER_NAME, - new CollaborationHelper(kv, contextProvider.get(), ns, context, searchService.get()) + new CollaborationHelper(kv, systemUserNamespaceService, securityContext, searchService.get(), userService) ); sample.populateInitialData(); @@ -119,10 +120,10 @@ public void start() throws Exception { } public void addDefaultUser() throws Exception { - final NamespaceService ns = contextProvider.get().getNamespaceService(SystemUser.SYSTEM_USERNAME); + final NamespaceService ns = sabotContextProvider.get().getNamespaceService(SystemUser.SYSTEM_USERNAME); if (addDefaultUser) { - addDefaultDremioUser(userService.get(), ns); + addDefaultDremioUser(userServiceProvider.get(), ns); } } @@ -138,15 +139,17 @@ class SampleReflectionServiceHelper extends ReflectionServiceHelper { private final NamespaceService namespace; private final Provider storeProvider; - public SampleReflectionServiceHelper(NamespaceService namespace, Provider storeProvider) { - super(null, null); + public SampleReflectionServiceHelper(NamespaceService namespace, + Provider storeProvider, + Provider optionManagerProvider) { + super(null, null, optionManagerProvider.get()); this.namespace = namespace; this.storeProvider = storeProvider; } @Override public ReflectionSettings getReflectionSettings() { - return new ReflectionSettingsImpl(() -> namespace, storeProvider); + return new ReflectionSettingsImpl(() -> namespace, catalogService, storeProvider); } } } diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/AsyncMetadataJobStatusListener.java b/dac/backend/src/main/java/com/dremio/dac/explore/AsyncMetadataJobStatusListener.java new file mode 100644 index 0000000000..0f4ec47473 --- /dev/null +++ b/dac/backend/src/main/java/com/dremio/dac/explore/AsyncMetadataJobStatusListener.java @@ -0,0 +1,49 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.explore; + +import java.util.ArrayList; +import java.util.List; + +import com.dremio.service.jobs.JobStatusListener; +import com.dremio.service.jobs.metadata.proto.QueryMetadata; + +class AsyncMetadataJobStatusListener implements JobStatusListener { + private List listeners; + + interface MetaDataListener { + void metadataCollected(QueryMetadata metadata); + } + + AsyncMetadataJobStatusListener(MetaDataListener listener) { + listeners = new ArrayList<>(); + listeners.add(listener); + } + + void addMetadataListener(MetaDataListener listener) { + listeners.add(listeners.size(), listener); + } + + @Override + public void metadataCollected(QueryMetadata metadata) { + Thread t = new Thread(() -> { + for (MetaDataListener l: listeners) { + l.metadataCollected(metadata); + } + }); + t.start(); + } +} diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/DatasetResource.java b/dac/backend/src/main/java/com/dremio/dac/explore/DatasetResource.java index 3b1f1c9b08..b916541ea9 100644 --- a/dac/backend/src/main/java/com/dremio/dac/explore/DatasetResource.java +++ b/dac/backend/src/main/java/com/dremio/dac/explore/DatasetResource.java @@ -47,6 +47,7 @@ import com.dremio.dac.explore.model.InitialDataPreviewResponse; import com.dremio.dac.explore.model.VersionContextReq; import com.dremio.dac.explore.model.VersionContextReq.VersionContextType; +import com.dremio.dac.explore.model.VersionContextUtils; import com.dremio.dac.model.job.JobData; import com.dremio.dac.model.job.JobDataWrapper; import com.dremio.dac.proto.model.dataset.VirtualDatasetUI; @@ -60,10 +61,14 @@ import com.dremio.dac.service.reflection.ReflectionServiceHelper; import com.dremio.dac.util.JobRequestUtil; import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.CatalogEntityKey; import com.dremio.exec.catalog.CatalogUtil; +import com.dremio.exec.catalog.DremioTable; import com.dremio.exec.catalog.ResolvedVersionContext; +import com.dremio.exec.catalog.TableVersionContext; import com.dremio.exec.catalog.VersionContext; import com.dremio.exec.physical.base.ViewOptions; +import com.dremio.exec.planner.sql.parser.SqlGrant; import com.dremio.exec.record.BatchSchema; import com.dremio.service.job.QueryType; import com.dremio.service.job.SqlQuery; @@ -95,10 +100,8 @@ import io.protostuff.ByteString; - /** * Serves the datasets - * */ @RestResource @Secured @@ -134,13 +137,6 @@ public DatasetResource( this.collaborationService = collaborationService; } - @GET - @Path("descendants/count") - @Produces(APPLICATION_JSON) - public long getDescendantsCount() { - return datasetService.getDescendantsCount(datasetPath.toNamespaceKey()); - } - @GET @Path("descendants") @Produces(APPLICATION_JSON) @@ -155,8 +151,30 @@ public List> getDescendants() throws NamespaceException { @GET @Path("acceleration/settings") @Produces(APPLICATION_JSON) - public AccelerationSettingsDescriptor getAccelerationSettings() throws NamespaceException { - final DatasetConfig config = namespaceService.getDataset(datasetPath.toNamespaceKey()); + public AccelerationSettingsDescriptor getAccelerationSettings( + @QueryParam("versionType") String versionType, + @QueryParam("versionValue") String versionValue) + throws DatasetNotFoundException, NamespaceException { + final CatalogEntityKey.Builder builder = + CatalogEntityKey.newBuilder().keyComponents(datasetPath.toPathList()); + + if (isDatasetVersioned()) { + final VersionContext versionContext = generateVersionContext(versionType, versionValue); + final TableVersionContext tableVersionContext = TableVersionContext.of(versionContext); + + builder.tableVersionContext(tableVersionContext); + } + + final Catalog catalog = datasetService.getCatalog(); + final CatalogEntityKey catalogEntityKey = builder.build(); + final DremioTable table = CatalogUtil.getTable(catalogEntityKey, catalog); + + if (table == null) { + throw new DatasetNotFoundException(datasetPath); + } + + final DatasetConfig config = table.getDatasetConfig(); + if (config.getType() == DatasetType.VIRTUAL_DATASET) { final String msg = String.format("acceleration settings apply only to physical dataset. %s is a virtual dataset", datasetPath.toPathString()); @@ -164,37 +182,54 @@ public AccelerationSettingsDescriptor getAccelerationSettings() throws Namespace } final ReflectionSettings reflectionSettings = reflectionServiceHelper.getReflectionSettings(); - final AccelerationSettings settings = reflectionSettings.getReflectionSettings(datasetPath.toNamespaceKey()); - final AccelerationSettingsDescriptor descriptor = new AccelerationSettingsDescriptor() - .setAccelerationRefreshPeriod(settings.getRefreshPeriod()) - .setAccelerationGracePeriod(settings.getGracePeriod()) - .setMethod(settings.getMethod()) - .setRefreshField(settings.getRefreshField()) - .setAccelerationNeverExpire(settings.getNeverExpire()) - .setAccelerationNeverRefresh(settings.getNeverRefresh()); - + final AccelerationSettings settings = + reflectionSettings.getReflectionSettings(catalogEntityKey); + + final AccelerationSettingsDescriptor descriptor = + new AccelerationSettingsDescriptor() + .setAccelerationRefreshPeriod(settings.getRefreshPeriod()) + .setAccelerationGracePeriod(settings.getGracePeriod()) + .setMethod(settings.getMethod()) + .setRefreshField(settings.getRefreshField()) + .setAccelerationNeverExpire(settings.getNeverExpire()) + .setAccelerationNeverRefresh(settings.getNeverRefresh()); final ByteString schemaBytes = DatasetHelper.getSchemaBytes(config); + if (schemaBytes != null) { final BatchSchema schema = BatchSchema.deserialize(schemaBytes.toByteArray()); - descriptor.setFieldList(FluentIterable - .from(schema) - .transform(new Function() { - @Nullable - @Override - public String apply(@Nullable final Field field) { - return field.getName(); - } - }) - .toList() - ); + descriptor.setFieldList( + FluentIterable.from(schema) + .transform( + new Function() { + @Nullable + @Override + public String apply(@Nullable final Field field) { + return field.getName(); + } + }) + .toList()); } + return descriptor; } + private VersionContext generateVersionContext(String versionType, String versionValue) { + final VersionContext versionContext = VersionContextUtils.parse(versionType, versionValue); + if (versionContext.getType() == VersionContext.Type.UNSPECIFIED) { + throw new ClientErrorException( + "Missing a versionType/versionValue pair for versioned dataset"); + } + return versionContext; + } + @PUT @Path("acceleration/settings") @Produces(APPLICATION_JSON) - public void updateAccelerationSettings(final AccelerationSettingsDescriptor descriptor) throws NamespaceException { + public void updateAccelerationSettings( + final AccelerationSettingsDescriptor descriptor, + @QueryParam("versionType") String versionType, + @QueryParam("versionValue") String versionValue) + throws DatasetNotFoundException, NamespaceException { Preconditions.checkArgument(descriptor != null, "acceleration settings descriptor is required"); Preconditions.checkArgument(descriptor.getAccelerationRefreshPeriod() != null, "refreshPeriod is required"); Preconditions.checkArgument(descriptor.getAccelerationGracePeriod() != null, "gracePeriod is required"); @@ -202,7 +237,26 @@ public void updateAccelerationSettings(final AccelerationSettingsDescriptor desc Preconditions.checkArgument(descriptor.getAccelerationNeverExpire() //we are good here || descriptor.getAccelerationNeverRefresh() //user never want to refresh, assume they just want to let it expire anyway || descriptor.getAccelerationRefreshPeriod() <= descriptor.getAccelerationGracePeriod() , "refreshPeriod must be less than gracePeriod"); - final DatasetConfig config = namespaceService.getDataset(datasetPath.toNamespaceKey()); + + final CatalogEntityKey.Builder builder = + CatalogEntityKey.newBuilder().keyComponents(datasetPath.toPathList()); + + if (isDatasetVersioned()) { + final VersionContext versionContext = generateVersionContext(versionType, versionValue); + final TableVersionContext tableVersionContext = TableVersionContext.of(versionContext); + + builder.tableVersionContext(tableVersionContext); + } + + final Catalog catalog = datasetService.getCatalog(); + final CatalogEntityKey catalogEntityKey = builder.build(); + final DremioTable table = CatalogUtil.getTable(catalogEntityKey, catalog); + + if (table == null) { + throw new DatasetNotFoundException(datasetPath); + } + + final DatasetConfig config = table.getDatasetConfig(); if (config.getType() == DatasetType.VIRTUAL_DATASET) { final String msg = String.format("acceleration settings apply only to physical dataset. %s is a virtual dataset", @@ -211,7 +265,13 @@ public void updateAccelerationSettings(final AccelerationSettingsDescriptor desc } if (descriptor.getMethod() == RefreshMethod.INCREMENTAL) { - if (config.getType() == DatasetType.PHYSICAL_DATASET) { + if (CatalogUtil.requestedPluginSupportsVersionedTables(table.getPath(), catalog)) { + // Validate Iceberg tables in Nessie Catalog + final String msg = "refresh field is required for incremental updates on Iceberg tables"; + Preconditions.checkArgument(descriptor.getRefreshField() != null, msg); + } else if (config.getType() == DatasetType.PHYSICAL_DATASET) { + // Validate Iceberg tables outside of Nessie Catalog + // Validate non-directory datasets such as RDBMS tables, MongoDB, elasticsearch, etc. final String msg = "refresh field is required for incremental updates on non-filesystem datasets"; Preconditions.checkArgument(descriptor.getRefreshField() != null, msg); } else { @@ -224,26 +284,32 @@ public void updateAccelerationSettings(final AccelerationSettingsDescriptor desc } final ReflectionSettings reflectionSettings = reflectionServiceHelper.getReflectionSettings(); - final AccelerationSettings settings = reflectionSettings.getReflectionSettings(datasetPath.toNamespaceKey()); - final AccelerationSettings descriptorSettings = new AccelerationSettings() - .setAccelerationTTL(settings.getAccelerationTTL()) // needed to use protobuf equals - .setTag(settings.getTag()) // needed to use protobuf equals - .setRefreshPeriod(descriptor.getAccelerationRefreshPeriod()) - .setGracePeriod(descriptor.getAccelerationGracePeriod()) - .setMethod(descriptor.getMethod()) - .setRefreshField(descriptor.getRefreshField()) - .setNeverExpire(descriptor.getAccelerationNeverExpire()) - .setNeverRefresh(descriptor.getAccelerationNeverRefresh()); - final boolean settingsUpdated = !settings.equals(descriptorSettings); - if (settingsUpdated) { - settings.setRefreshPeriod(descriptor.getAccelerationRefreshPeriod()) + final AccelerationSettings settings = + reflectionSettings.getReflectionSettings(catalogEntityKey); + final AccelerationSettings descriptorSettings = + new AccelerationSettings() + .setAccelerationTTL(settings.getAccelerationTTL()) // needed to use protobuf equals + .setTag(settings.getTag()) // needed to use protobuf equals + .setRefreshPeriod(descriptor.getAccelerationRefreshPeriod()) + .setGracePeriod(descriptor.getAccelerationGracePeriod()) + .setMethod(descriptor.getMethod()) + .setRefreshField(descriptor.getRefreshField()) + .setNeverExpire(descriptor.getAccelerationNeverExpire()) + .setNeverRefresh(descriptor.getAccelerationNeverRefresh()); + + if (settings.equals(descriptorSettings)) { + return; + } + + settings + .setRefreshPeriod(descriptor.getAccelerationRefreshPeriod()) .setGracePeriod(descriptor.getAccelerationGracePeriod()) .setMethod(descriptor.getMethod()) .setRefreshField(descriptor.getRefreshField()) .setNeverExpire(descriptor.getAccelerationNeverExpire()) .setNeverRefresh(descriptor.getAccelerationNeverRefresh()); - reflectionSettings.setReflectionSettings(datasetPath.toNamespaceKey(), settings); - } + + reflectionSettings.setReflectionSettings(catalogEntityKey, settings); } /** @@ -285,6 +351,8 @@ public DatasetUI deleteDataset( DatasetUI datasetUI = null; if (versioned) { final Catalog catalog = datasetService.getCatalog(); + //TODO: Once DX-65418 is fixed, injected catalog will validate the right entity accordingly + catalog.validatePrivilege(new NamespaceKey(datasetPath.toPathList()), SqlGrant.Privilege.ALTER); final ResolvedVersionContext resolvedVersionContext = CatalogUtil.resolveVersionContext( catalog, datasetPath.getRoot().getName(), VersionContext.ofBranch(refValue)); @@ -293,10 +361,13 @@ public DatasetUI deleteDataset( catalog.dropView(new NamespaceKey(datasetPath.toPathList()), viewOptions); } else { - final VirtualDatasetUI virtualDataset = datasetService.get(datasetPath); - - datasetUI = newDataset(virtualDataset); - datasetService.deleteDataset(datasetPath, savedTag); + try { + final VirtualDatasetUI virtualDataset = datasetService.get(datasetPath); + datasetUI = newDataset(virtualDataset); + datasetService.deleteDataset(datasetPath, savedTag); + } catch (DatasetVersionNotFoundException e) { + datasetService.deleteDataset(datasetPath, null); + } } final ReflectionSettings reflectionSettings = reflectionServiceHelper.getReflectionSettings(); @@ -306,7 +377,7 @@ public DatasetUI deleteDataset( } private boolean isDatasetVersioned() { - final NamespaceKey namespaceKey = new NamespaceKey(datasetPath.toPathList()); + final NamespaceKey namespaceKey = datasetPath.toNamespaceKey(); final Catalog catalog = datasetService.getCatalog(); return CatalogUtil.requestedPluginSupportsVersionedTables(namespaceKey, catalog); diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/DatasetResourceUtils.java b/dac/backend/src/main/java/com/dremio/dac/explore/DatasetResourceUtils.java index da1ca6ec1f..adaa9ba15b 100644 --- a/dac/backend/src/main/java/com/dremio/dac/explore/DatasetResourceUtils.java +++ b/dac/backend/src/main/java/com/dremio/dac/explore/DatasetResourceUtils.java @@ -20,8 +20,6 @@ import java.util.List; import java.util.Map; -import org.apache.commons.collections4.MapUtils; - import com.dremio.dac.explore.model.VersionContextReq; import com.dremio.dac.proto.model.dataset.SourceVersionReference; import com.dremio.dac.proto.model.dataset.VersionContextType; @@ -31,12 +29,16 @@ /** * Utility classes for DatasetResource */ -public class DatasetResourceUtils { +public final class DatasetResourceUtils { + + private DatasetResourceUtils() { + // utils class + } - public static Map createSourceVersionMapping(Map references) { + public static Map createSourceVersionMapping(final Map references) { final Map sourceVersionMapping = new HashMap<>(); - if (MapUtils.isNotEmpty(references)) { + if (references != null) { for (Map.Entry entry: references.entrySet()) { VersionContextReq.VersionContextType versionContextType = entry.getValue().getType(); switch (versionContextType) { @@ -60,7 +62,7 @@ public static Map createSourceVersionMapping(Map createSourceVersionReferenceList(Map references) { List sourceVersionReferenceList = new ArrayList<>(); - if (MapUtils.isNotEmpty(references)) { + if (references != null) { for (Map.Entry entry: references.entrySet()) { VersionContextReq versionContextReq = entry.getValue(); VersionContextReq.VersionContextType versionContextType = versionContextReq.getType(); diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/DatasetStateMutator.java b/dac/backend/src/main/java/com/dremio/dac/explore/DatasetStateMutator.java index 9517d83218..c83d452d69 100644 --- a/dac/backend/src/main/java/com/dremio/dac/explore/DatasetStateMutator.java +++ b/dac/backend/src/main/java/com/dremio/dac/explore/DatasetStateMutator.java @@ -131,7 +131,7 @@ public String getDatasetAlias() { private void shouldNotExist(String colName) { if (findCol(colName) != null) { throw new IllegalArgumentException( - format("Invalid new col name %s. It is already in the current schema", colName)); + format("Invalid new column name %s. It is already in the current schema", colName)); } } diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/DatasetTool.java b/dac/backend/src/main/java/com/dremio/dac/explore/DatasetTool.java index 089bdf328d..a184dd9aec 100644 --- a/dac/backend/src/main/java/com/dremio/dac/explore/DatasetTool.java +++ b/dac/backend/src/main/java/com/dremio/dac/explore/DatasetTool.java @@ -20,11 +20,9 @@ import java.security.AccessControlException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; @@ -82,14 +80,12 @@ import com.dremio.dac.util.InvalidQueryErrorConverter; import com.dremio.exec.catalog.Catalog; import com.dremio.exec.catalog.CatalogUtil; +import com.dremio.exec.catalog.VersionContext; import com.dremio.exec.record.BatchSchema; import com.dremio.exec.record.RecordBatchHolder; import com.dremio.exec.util.ViewFieldsHelper; import com.dremio.service.job.JobDetails; import com.dremio.service.job.JobDetailsRequest; -import com.dremio.service.job.JobSummary; -import com.dremio.service.job.SearchJobsRequest; -import com.dremio.service.job.VersionedDatasetPath; import com.dremio.service.job.proto.JobId; import com.dremio.service.job.proto.JobInfo; import com.dremio.service.job.proto.JobState; @@ -98,7 +94,6 @@ import com.dremio.service.job.proto.SessionId; import com.dremio.service.jobs.JobDataClientUtils; import com.dremio.service.jobs.JobNotFoundException; -import com.dremio.service.jobs.JobStatusListener; import com.dremio.service.jobs.JobsProtoUtil; import com.dremio.service.jobs.JobsService; import com.dremio.service.jobs.JobsVersionContext; @@ -106,7 +101,6 @@ import com.dremio.service.jobs.metadata.proto.QueryMetadata; import com.dremio.service.namespace.NamespaceException; import com.dremio.service.namespace.NamespaceKey; -import com.dremio.service.namespace.NamespaceUtils; import com.dremio.service.namespace.dataset.DatasetVersion; import com.dremio.service.namespace.dataset.proto.DatasetType; import com.dremio.service.namespace.dataset.proto.FieldOrigin; @@ -116,6 +110,7 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; +import io.opentelemetry.instrumentation.annotations.WithSpan; import io.protostuff.ByteString; /** @@ -149,21 +144,28 @@ public DatasetTool( * @throws DatasetVersionNotFoundException */ InitialPreviewResponse createPreviewResponseForExistingDataset ( - BufferAllocator allocator, VirtualDatasetUI newDataset, DatasetVersionResourcePath tipVersion, - Integer limit, String engineName, String sessionId, String triggerJob - ) throws DatasetVersionNotFoundException, NamespaceException, JobNotFoundException { + ) throws DatasetVersionNotFoundException, NamespaceException { + JobId jobId = null; if (shouldTriggerJob(triggerJob)) { SqlQuery query = new SqlQuery(newDataset.getSql(), newDataset.getState().getContextList(), username(), engineName, sessionId); - JobData jobData = executor.runQueryWithListener(query, QueryType.UI_PREVIEW, tipVersion.getDataset(), newDataset.getVersion(), JobStatusListener.NO_OP); - return createPreviewResponse(newDataset, jobData, tipVersion, allocator, limit, true); - } else { - return getInitialPreviewResponse(newDataset, null, new SessionId().setId(sessionId), tipVersion, null, null); + MetadataJobStatusListener listener = new MetadataJobStatusListener(this, newDataset, null); + // The saved dataset is incomplete, we want save the dataset again once the metadata is collected. + if (newDataset.getSqlFieldsList() == null) { + listener.waitToApplyMetadataAndSaveDataset(); + } + + JobData jobData = executor.runQueryWithListener(query, QueryType.UI_PREVIEW, tipVersion.getDataset(), newDataset.getVersion(), listener); + jobId = jobData.getJobId(); + if (newDataset.getSqlFieldsList() == null) { + listener.setJobId(jobId); + } } + return getInitialPreviewResponse(newDataset, jobId, new SessionId().setId(sessionId), tipVersion, null, null); } // Convert String to boolean, but with default as true. @@ -187,8 +189,8 @@ private String username(){ * Helper method to create {@link InitialPreviewResponse} from given inputs * @param datasetUI * @param job - * @param tipVersion a combination of dataset verion + path to a dataset. It represent a top history version. Path here - * could differs from path that {@code datasetUI} has, as {@code datasetUI} could be + * @param tipVersion a combination of dataset version + path to a dataset. It represents a top history version. Path here + * could differ from path that {@code datasetUI} has, as {@code datasetUI} could be * a history version, that references on other dataset with different path. * @param maxRecords * @param catchExecutionError @@ -231,6 +233,7 @@ InitialPreviewResponse createPreviewResponse(VirtualDatasetUI datasetUI, JobData } if (ex instanceof UserException) { + // TODO - Why is this not thrown? toInvalidQueryException((UserException) ex, datasetUI.getSql(), ImmutableList.of(), job.getJobId(), job.getSessionId()); } error = new ApiErrorModel(ApiErrorModel.ErrorType.INITIAL_PREVIEW_ERROR, ex.getMessage(), GenericErrorMessage.printStackTrace(ex), null); @@ -250,8 +253,7 @@ private InitialPreviewResponse getInitialPreviewResponse(VirtualDatasetUI datase JobDataFragment dataLimited, ApiErrorModel error) throws DatasetVersionNotFoundException, NamespaceException { final History history = getHistory(tipVersion.getDataset(), datasetUI.getVersion(), tipVersion.getVersion()); - // VBesschetnov 2019-01-08 - // this is requires as BE generates apiLinks, that is used by UI to send requests for preview/run. In case, when history + // This is requires as BE generates apiLinks, that is used by UI to send requests for preview/run. In case, when history // of a dataset reference on a version for other dataset. And a user navigate to that version and tries to preview it, // we would not be resolve a tip version and preview will fail. We should always send requests to original dataset // path (tip version path) to be able to get a preview/run data @@ -559,6 +561,7 @@ public InitialPreviewResponse newUntitled( * @throws DatasetVersionNotFoundException * @throws NamespaceException */ + @WithSpan public InitialPreviewResponse newUntitled( BufferAllocator allocator, FromBase from, @@ -573,8 +576,7 @@ public InitialPreviewResponse newUntitled( Map references) throws DatasetNotFoundException, DatasetVersionNotFoundException, NamespaceException, NewDatasetQueryException { - final VirtualDatasetUI newDataset = createNewUntitledMetadataOnly(from, version, context, - DatasetResourceUtils.createSourceVersionReferenceList(references)); + final VirtualDatasetUI newDataset = createNewUntitledMetadataOnly(from, version, context, references); final Map sourceVersionMapping = createSourceVersionMapping(references); final SqlQuery query = new SqlQuery(newDataset.getSql(), newDataset.getState().getContextList(), username(), engineName, sessionId, sourceVersionMapping); @@ -600,6 +602,7 @@ public InitialPreviewResponse newUntitled( List parentDataset = getParentDataset(from); if (ex instanceof UserException) { + // TODO - Why is this not thrown? toInvalidQueryException((UserException) ex, query.getSql(), context, parentSummary, jobId, jobDataSessionId); } @@ -627,12 +630,10 @@ public InitialUntitledRunResponse newTmpUntitled( List context, String engineName, String sessionId, - Map references, - Integer limit) + Map references) throws DatasetNotFoundException, DatasetVersionNotFoundException, NamespaceException, NewDatasetQueryException { - final VirtualDatasetUI vds = createNewUntitledMetadataOnly(from, version, context, - DatasetResourceUtils.createSourceVersionReferenceList(references)); + final VirtualDatasetUI vds = createNewUntitledMetadataOnly(from, version, context, references); final Map sourceVersionMapping = createSourceVersionMapping(references); final SqlQuery query = new SqlQuery(vds.getSql(), vds.getState().getContextList(), username(), engineName, sessionId, sourceVersionMapping); @@ -675,12 +676,15 @@ public InitialPreviewResponse newUntitled( VirtualDatasetUI createNewUntitledMetadataOnly(FromBase from, DatasetVersion version, List context, - List sourceVersionReferences) { - final DatasetPath datasetPath = TMP_DATASET_PATH; - final VirtualDatasetUI newDataset = newDatasetBeforeQueryMetadata(datasetPath, version, from.wrap(), context, username()); + Map references) { + final VirtualDatasetUI newDataset = newDatasetBeforeQueryMetadata(TMP_DATASET_PATH, version, from.wrap(), context, username(), datasetService.getCatalog(), references); newDataset.setLastTransform(new Transform(TransformType.createFromParent).setTransformCreateFromParent(new TransformCreateFromParent(from.wrap()))); + + final List sourceVersionReferences = + DatasetResourceUtils.createSourceVersionReferenceList(references); newDataset.setReferencesList(sourceVersionReferences); newDataset.getState().setReferenceList(sourceVersionReferences); + return newDataset; } @@ -690,10 +694,9 @@ InitialRunResponse newUntitledAndRun(FromBase from, String engineName, String sessionId, Map references) - throws DatasetNotFoundException, NamespaceException, DatasetVersionNotFoundException, InterruptedException { + throws DatasetNotFoundException, NamespaceException, DatasetVersionNotFoundException { - final VirtualDatasetUI newDataset = createNewUntitledMetadataOnly(from, version, context, - DatasetResourceUtils.createSourceVersionReferenceList(references)); + final VirtualDatasetUI newDataset = createNewUntitledMetadataOnly(from, version, context, references); final Map sourceVersionMapping = createSourceVersionMapping(references); final SqlQuery query = new SqlQuery(newDataset.getSql(), newDataset.getState().getContextList(), username(), engineName, sessionId, sourceVersionMapping); @@ -732,7 +735,7 @@ InitialRunResponse newUntitledAndRun(FromBase from, * @param version Initial version of the new dataset * @param context Dataset context or current schema * @param engineName Engine to runt the query - * @param sessionId Session Id + * @param sessionId SessionId * @param references References * @return {@link InitialUntitledRunResponse)} * @throws DatasetNotFoundException @@ -746,8 +749,7 @@ InitialUntitledRunResponse newTmpUntitledAndRun(FromBase from, Map references) throws DatasetNotFoundException, DatasetVersionNotFoundException { - final VirtualDatasetUI newDataset = createNewUntitledMetadataOnly(from, version, context, - DatasetResourceUtils.createSourceVersionReferenceList(references)); + final VirtualDatasetUI newDataset = createNewUntitledMetadataOnly(from, version, context, references); final Map sourceVersionMapping = createSourceVersionMapping(references); final SqlQuery query = new SqlQuery(newDataset.getSql(), newDataset.getState().getContextList(), username(), engineName, sessionId, sourceVersionMapping); @@ -795,13 +797,6 @@ protected Map createSourceVersionMapping(Map context) - throws DatasetNotFoundException, NamespaceException, DatasetVersionNotFoundException, InterruptedException { - return newUntitledAndRun(from, version, context, null, null, null); - } - public void applyQueryMetaToDatasetAndSave(JobId jobId, QueryMetadata queryMetadata, VirtualDatasetUI newDataset, @@ -817,7 +812,7 @@ public void applyQueryMetaToDatasetAndSave(JobId jobId, QuerySemantics.populateSemanticFields(JobsProtoUtil.toStuff(queryMetadata.getFieldTypeList()), newDataset.getState()); applyQueryMetadata(newDataset, jobInfo, queryMetadata); - if (from.wrap().getType() == FromType.SQL) { + if (from == null || from.wrap().getType() == FromType.SQL) { newDataset.setState(QuerySemantics.extract(queryMetadata)); } @@ -825,29 +820,17 @@ public void applyQueryMetaToDatasetAndSave(JobId jobId, } public static VirtualDatasetUI newDatasetBeforeQueryMetadata( - DatasetPath datasetPath, - DatasetVersion version, - From from, - List sqlContext, - String owner) { + final DatasetPath datasetPath, + final DatasetVersion version, + final From from, + final List sqlContext, + final String owner, + Catalog catalog, + final Map references) { VirtualDatasetState dss = new VirtualDatasetState() .setFrom(from); dss.setContextList(sqlContext); VirtualDatasetUI vds = new VirtualDatasetUI(); - switch(from.getType()){ - case SQL: - vds.setDerivation(Derivation.SQL); - break; - case Table: - vds.setDerivation(Derivation.DERIVED_UNKNOWN); - dss.setReferredTablesList(Arrays.asList(from.getTable().getAlias())); - break; - case SubQuery: - default: - vds.setDerivation(Derivation.UNKNOWN); - dss.setReferredTablesList(Arrays.asList(from.getSubQuery().getAlias())); - break; - } vds.setOwner(owner); vds.setIsNamed(false); @@ -859,6 +842,23 @@ public static VirtualDatasetUI newDatasetBeforeQueryMetadata( vds.setId(UUID.randomUUID().toString()); vds.setContextList(sqlContext); + switch (from.getType()) { + case SQL: + vds.setDerivation(Derivation.SQL); + break; + case Table: + vds.setDerivation(Derivation.DERIVED_UNKNOWN); + dss.setReferredTablesList(Collections.singletonList(from.getTable().getAlias())); + + updateVersionedDatasetId(vds, from, catalog, references); + break; + case SubQuery: + default: + vds.setDerivation(Derivation.UNKNOWN); + dss.setReferredTablesList(Collections.singletonList(from.getSubQuery().getAlias())); + break; + } + // if we're doing a select * from table, and the context matches the base path of the table, let's avoid qualifying the table name. if(from.getType() == FromType.Table) { NamespaceKey path = new DatasetPath(from.getTable().getDatasetPath()).toNamespaceKey(); @@ -870,9 +870,33 @@ public static VirtualDatasetUI newDatasetBeforeQueryMetadata( return vds; } + /** + * Update the datasetId in the given dataset UI. This method only applies to versioned table. + */ + private static void updateVersionedDatasetId( + VirtualDatasetUI vds, + final From from, + Catalog catalog, + final Map references) { + if (references == null || references.isEmpty() || catalog == null) { + return; + } + + final NamespaceKey namespaceKey = + new DatasetPath(from.getTable().getDatasetPath()).toNamespaceKey(); + final Map versionContextMapping = + DatasetResourceUtils.createSourceVersionMapping(references); + + if (!CatalogUtil.requestedPluginSupportsVersionedTables(namespaceKey, catalog)) { + return; + } + + vds.setId(catalog.resolveCatalog(versionContextMapping).getDatasetId(namespaceKey)); + } + /** * Get the history before a given version. This should only be used if this version is known to be - * the last version in the history. Otherwise the other version of this method that takes a tip + * the last version in the history. Otherwise, the other version of this method that takes a tip * version as well as a current version. * * @param datasetPath @@ -894,7 +918,7 @@ History getHistory(final DatasetPath datasetPath, DatasetVersion currentDataset) * * @param datasetPath the dataset path of the version at the tip of the history * @param versionToMarkCurrent the version currently selected in the client - * @param tipVersion the latest history item known, which may be passed the selected versionToMarkCurrent, + * @param tipVersion the latest history item known which may be passed the selected versionToMarkCurrent, * this can be null and the tip will be assumed to be the versionToMarkCurrent the * same behavior as the version of this method that lacks the tipVersion entirely * @return @@ -916,27 +940,8 @@ History getHistory(final DatasetPath datasetPath, final DatasetVersion versionTo DatasetVersionResourcePath versionedResourcePath = new DatasetVersionResourcePath(currentPath, currentVersion); - // grab the most recent job for this dataset version (note the use of limit 1 to avoid - // retrieving all results, the API just returns a list, so this also has to index into the returned list - // that will always contain a single element) - final SearchJobsRequest request = SearchJobsRequest.newBuilder() - .setDataset(VersionedDatasetPath.newBuilder() - .addAllPath(currentDataset.getFullPathList()) - .setVersion(currentDataset.getVersion().getVersion()) - .build()) - .setLimit(1) - .build(); - Iterable jobSummaries = jobsService.searchJobs(request); - final JobState jobState; - // jobs are not persisted forever so we may not have a job for this version of the dataset - Iterator iterator = jobSummaries.iterator(); - if (iterator.hasNext()) { - jobState = JobsProtoUtil.toStuff(iterator.next().getJobState()); - } else { - jobState = JobState.COMPLETED; - } historyItems.add( - new HistoryItem(versionedResourcePath, jobState, + new HistoryItem(versionedResourcePath, JobState.COMPLETED, TransformBase.unwrap(currentDataset.getLastTransform()).accept(new DescribeTransformation()), username(), currentDataset.getCreatedAt(), 0L, true, null, null)); @@ -999,7 +1004,7 @@ History getHistory(final DatasetPath datasetPath, final DatasetVersion versionTo * @throws NamespaceException */ void rewriteHistory(final VirtualDatasetUI versionToSave, final DatasetPath newPath) - throws DatasetVersionNotFoundException, DatasetNotFoundException, NamespaceException { + throws DatasetVersionNotFoundException, DatasetNotFoundException { DatasetVersion previousDatasetVersion; DatasetPath previousPath; @@ -1019,6 +1024,16 @@ void rewriteHistory(final VirtualDatasetUI versionToSave, final DatasetPath newP previousPath = new DatasetPath(previousVersion.getDatasetPath()); previousDatasetVersion = new DatasetVersion(previousVersion.getDatasetVersion()); previousVersionRequiresRename = !previousPath.equals(newPath); + VirtualDatasetUI previousDataset = datasetService.getVersion(previousPath, previousDatasetVersion); + // If the previous VDS version is incomplete, ignore that version. This could happen when the user click on a + // PDS, an incomplete VDS version is created to show the PDS in UI. If the user modify the SQL and save the + // VDS, the previous VDS version is incomplete since it never run and doesn't have metadata. + try { + DatasetVersionMutator.validate(previousPath, previousDataset); + } catch (Exception e) { + previousVersionRequiresRename = false; + } + if (previousVersionRequiresRename) { // create a new link to the previous dataset with a changed dataset path NameDatasetRef prev = new NameDatasetRef() @@ -1027,7 +1042,7 @@ void rewriteHistory(final VirtualDatasetUI versionToSave, final DatasetPath newP currentDataset.setPreviousVersion(prev); currentDataset.setName(newPath.getDataset().getName()); datasetService.putVersion(currentDataset); - currentDataset = datasetService.getVersion(previousPath, previousDatasetVersion); + currentDataset = previousDataset; } else { datasetService.putVersion(currentDataset); } @@ -1041,7 +1056,7 @@ void rewriteHistory(final VirtualDatasetUI versionToSave, final DatasetPath newP public static void applyQueryMetadata(VirtualDatasetUI dataset, JobInfo jobInfo, QueryMetadata metadata) { applyQueryMetadata(dataset, Optional.ofNullable(jobInfo.getParentsList()), - Optional.ofNullable(jobInfo.getBatchSchema()).map((b) -> BatchSchema.deserialize(b)), + Optional.ofNullable(jobInfo.getBatchSchema()).map(BatchSchema::deserialize), Optional.ofNullable(jobInfo.getFieldOriginsList()), Optional.ofNullable(jobInfo.getGrandParentsList()), metadata); @@ -1069,12 +1084,8 @@ public static void applyQueryMetadata(VirtualDatasetUI dataset, Optional context, - DatasetSummary parentSummary, Map references) + DatasetType parentType, List parentFullPathList, Map references) throws NamespaceException { - final VirtualDatasetUI newDataset = createNewUntitledMetadataOnly(from, newVersion, context, - DatasetResourceUtils.createSourceVersionReferenceList(references)); + final VirtualDatasetUI newDataset = createNewUntitledMetadataOnly(from, newVersion, context, references); List parents = new ArrayList<>(); - final DatasetType parentType = parentSummary.getDatasetType(); - ParentDataset parent = new ParentDataset().setDatasetPathList(parentSummary.getFullPath()).setType(parentType); + final ParentDataset parent = new ParentDataset().setDatasetPathList(parentFullPathList).setType(parentType); parents.add(parent); newDataset.setParentsList(parents); - if (NamespaceUtils.isPhysicalDataset(parentType)) { - newDataset.setDerivation(Derivation.DERIVED_PHYSICAL); - } else if (parentType == DatasetType.VIRTUAL_DATASET) { - newDataset.setDerivation(Derivation.DERIVED_VIRTUAL); - } else { - newDataset.setDerivation(Derivation.DERIVED_UNKNOWN); + switch (parentType) { + case PHYSICAL_DATASET: + case PHYSICAL_DATASET_SOURCE_FILE: + case PHYSICAL_DATASET_SOURCE_FOLDER: + case PHYSICAL_DATASET_HOME_FILE: + case PHYSICAL_DATASET_HOME_FOLDER: + newDataset.setDerivation(Derivation.DERIVED_PHYSICAL); + break; + + case VIRTUAL_DATASET: + newDataset.setDerivation(Derivation.DERIVED_VIRTUAL); + break; + + default: + newDataset.setDerivation(Derivation.DERIVED_UNKNOWN); } - return InitialPreviewResponse.of(newDataset(newDataset, null), true, null, null); + // Save the incomplete dataset (without metadata) to allow data graph and catalog working on UI. + // Later run/preview calls will save the complete dataset. + datasetService.putTempVersionWithoutValidation(newDataset); + + final DatasetUI datasetUI = newDataset(newDataset, null); + final History history = getHistory(new DatasetPath(datasetUI.getFullPath()), newDataset.getVersion(), null); + + return InitialPreviewResponse.of(datasetUI, null, new SessionId(), null, true, history, null); } } diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/DatasetVersionResource.java b/dac/backend/src/main/java/com/dremio/dac/explore/DatasetVersionResource.java index ca96a8f88b..e18a1c8042 100644 --- a/dac/backend/src/main/java/com/dremio/dac/explore/DatasetVersionResource.java +++ b/dac/backend/src/main/java/com/dremio/dac/explore/DatasetVersionResource.java @@ -33,7 +33,6 @@ import java.util.Map; import java.util.Set; import java.util.TreeSet; -import java.util.concurrent.TimeUnit; import javax.annotation.security.RolesAllowed; import javax.inject.Inject; @@ -71,6 +70,7 @@ import com.dremio.dac.explore.model.DatasetUI; import com.dremio.dac.explore.model.DatasetUIWithHistory; import com.dremio.dac.explore.model.DatasetVersionResourcePath; +import com.dremio.dac.explore.model.FromBase; import com.dremio.dac.explore.model.HistogramValue; import com.dremio.dac.explore.model.History; import com.dremio.dac.explore.model.HistoryItem; @@ -98,8 +98,12 @@ import com.dremio.dac.proto.model.dataset.FromSQL; import com.dremio.dac.proto.model.dataset.NameDatasetRef; import com.dremio.dac.proto.model.dataset.ReplacePatternRule; +import com.dremio.dac.proto.model.dataset.SourceVersionReference; import com.dremio.dac.proto.model.dataset.SplitRule; import com.dremio.dac.proto.model.dataset.Transform; +import com.dremio.dac.proto.model.dataset.TransformCreateFromParent; +import com.dremio.dac.proto.model.dataset.TransformType; +import com.dremio.dac.proto.model.dataset.VirtualDatasetState; import com.dremio.dac.proto.model.dataset.VirtualDatasetUI; import com.dremio.dac.resource.BaseResourceWithAllocator; import com.dremio.dac.server.BufferAllocatorFactory; @@ -108,7 +112,6 @@ import com.dremio.dac.service.errors.ConflictException; import com.dremio.dac.service.errors.DatasetNotFoundException; import com.dremio.dac.service.errors.DatasetVersionNotFoundException; -import com.dremio.dac.service.errors.NewDatasetQueryException; import com.dremio.dac.util.DatasetsUtil; import com.dremio.exec.catalog.Catalog; import com.dremio.exec.catalog.CatalogUtil; @@ -124,7 +127,6 @@ import com.dremio.service.jobs.CompletionListener; import com.dremio.service.jobs.JobNotFoundException; import com.dremio.service.jobs.JobStatusListener; -import com.dremio.service.jobs.JobSubmittedListener; import com.dremio.service.jobs.JobsService; import com.dremio.service.jobs.JobsVersionContext; import com.dremio.service.jobs.SqlQuery; @@ -152,23 +154,16 @@ public class DatasetVersionResource extends BaseResourceWithAllocator { private static final Logger logger = LoggerFactory.getLogger(DatasetVersionResource.class); - private static final int WAIT_FOR_RUN_HISTORY_S = 15; - private final DatasetTool tool; private final QueryExecutor executor; private final DatasetVersionMutator datasetService; - private final JobsService jobsService; - private final Transformer transformer; private final Recommenders recommenders; private final JoinRecommender joinRecommender; - private final SecurityContext securityContext; private final DatasetPath datasetPath; private final DatasetVersion version; private final HistogramGenerator histograms; - private final CatalogService catalogService; - @Inject public DatasetVersionResource ( SabotContext context, @@ -217,7 +212,6 @@ public DatasetVersionResource( super(allocator); this.executor = executor; this.datasetService = datasetService; - this.jobsService = jobsService; this.recommenders = recommenders; this.transformer = transformer; this.joinRecommender = joinRecommender; @@ -226,7 +220,6 @@ public DatasetVersionResource( this.securityContext = securityContext; this.datasetPath = datasetPath; this.version = version; - this.catalogService = catalogService; } protected DatasetVersionResource( @@ -247,7 +240,6 @@ protected DatasetVersionResource( super(allocatorFactory); this.executor = executor; this.datasetService = datasetService; - this.jobsService = jobsService; this.recommenders = recommenders; this.transformer = transformer; this.joinRecommender = joinRecommender; @@ -256,7 +248,6 @@ protected DatasetVersionResource( this.securityContext = securityContext; this.datasetPath = datasetPath; this.version = version; - this.catalogService = catalogService; } @@ -338,7 +329,7 @@ public InitialPreviewResponse getDatasetForVersion( @QueryParam("refType") String refType, @QueryParam("refValue") String refValue, @QueryParam("triggerJob") String triggerJob) // "true" or "false". Default - "true". On error - "true" - throws DatasetVersionNotFoundException, NamespaceException, JobNotFoundException, NewDatasetQueryException, IOException { + throws DatasetVersionNotFoundException, NamespaceException, JobNotFoundException { Catalog catalog = datasetService.getCatalog(); final boolean versioned = isVersionedPlugin(datasetPath, catalog); @@ -348,16 +339,39 @@ public InitialPreviewResponse getDatasetForVersion( final VirtualDatasetUI dataset = getDatasetConfig(versioned); return tool.createPreviewResponseForExistingDataset( - getOrCreateAllocator("getDatasetForVersion"), dataset, new DatasetVersionResourcePath( datasetPath, (tipVersion != null) ? tipVersion : dataset.getVersion()), - limit, engineName, sessionId, triggerJob); } + // Versioned sources + // First check if the version already exists as result of running the query + try { + final VirtualDatasetUI vds = getDatasetConfig(true); + return tool.createPreviewResponseForExistingDataset( + vds, + new DatasetVersionResourcePath(datasetPath, version), + engineName, + sessionId, + triggerJob); + } catch (DatasetVersionNotFoundException e) { + // ignore + } + + // The version doesn't exist, generate initial preview response from source. + return getInitialPreviewResponseForVersionedSource(engineName, sessionId, refType, refValue, triggerJob); + } + + private InitialPreviewResponse getInitialPreviewResponseForVersionedSource( + String engineName, + String sessionId, + String refType, + String refValue, + String triggerJob) + throws NamespaceException, JobNotFoundException { if (refType == null || refValue == null) { throw UserException .validationError() @@ -366,55 +380,53 @@ public InitialPreviewResponse getDatasetForVersion( } final Map versionContextReqMapping = - DatasetResourceUtils.createSourceVersionMapping( - datasetPath.getRoot().getName(), refType, refValue); + DatasetResourceUtils.createSourceVersionMapping(datasetPath.getRoot().getName(), refType, refValue); final Map versionContextMapping = - DatasetResourceUtils.createSourceVersionMapping(versionContextReqMapping); + DatasetResourceUtils.createSourceVersionMapping(versionContextReqMapping); - catalog = catalog.resolveCatalog(versionContextMapping); + final Catalog catalog = datasetService.getCatalog().resolveCatalog(versionContextMapping); DremioTable table = catalog.getTable(new NamespaceKey(datasetPath.toPathList())); if (!(table instanceof ViewTable)) { throw UserException.validationError() - .message("Expecting getting a view but returns a entity type of %s", table.getClass()) - .buildSilently(); + .message("Expecting getting a view but returns a entity type of %s", table.getClass()) + .buildSilently(); } - tool.newUntitled( - getOrCreateAllocator("newUntitled"), - new FromSQL(table.getDatasetConfig().getVirtualDataset().getSql()), - version, - table.getDatasetConfig().getVirtualDataset().getContextList(), - null, - false, - limit, - engineName, - sessionId, - versionContextReqMapping); - - String tag = table.getDatasetConfig().getTag(); - - VirtualDatasetUI virtualDatasetUI = - datasetService.getVersion(tool.TMP_DATASET_PATH, version, true); - virtualDatasetUI.setFullPathList(datasetPath.toPathList()); - virtualDatasetUI.setName(datasetPath.getDataset().getName()); - virtualDatasetUI.setIsNamed(true); - virtualDatasetUI.setSavedTag(tag); - logger.debug("Creating temp version {} in datasetVersion for view {} at version {} ", - DatasetsUtil.printVersionViewInfo(virtualDatasetUI), + VirtualDatasetUI vds = new VirtualDatasetUI(); + vds.setOwner(securityContext.getUserPrincipal().getName()); + vds.setVersion(version); + vds.setFullPathList(datasetPath.toPathList()); + vds.setName(datasetPath.getDataset().getName()); + vds.setIsNamed(true); + vds.setId(table.getDatasetConfig().getId().getId()); + vds.setContextList(table.getDatasetConfig().getVirtualDataset().getContextList()); + vds.setSql(table.getDatasetConfig().getVirtualDataset().getSql()); + vds.setSqlFieldsList(table.getDatasetConfig().getVirtualDataset().getSqlFieldsList()); + final FromBase from = new FromSQL(table.getDatasetConfig().getVirtualDataset().getSql()); + vds.setState(new VirtualDatasetState() + .setContextList(table.getDatasetConfig().getVirtualDataset().getContextList()) + .setFrom(from.wrap())); + vds.setLastTransform(new Transform(TransformType.createFromParent) + .setTransformCreateFromParent(new TransformCreateFromParent(from.wrap()))); + final List sourceVersionReferences = + DatasetResourceUtils.createSourceVersionReferenceList(versionContextReqMapping); + vds.setReferencesList(sourceVersionReferences); + vds.setSavedTag(table.getDatasetConfig().getTag()); + + logger.debug("Creating temp version {} in datasetVersion for view {} at version {}.", + DatasetsUtil.printVersionViewInfo(vds), datasetPath.toUnescapedString(), versionContextMapping.get(datasetPath.getRoot().getName())); - datasetService.putVersion(virtualDatasetUI); - virtualDatasetUI = datasetService.getVersion(datasetPath, version, true); + datasetService.putVersion(vds); + vds = datasetService.getVersion(datasetPath, version, true); return tool.createPreviewResponseForExistingDataset( - getOrCreateAllocator("getDatasetForVersion"), - virtualDatasetUI, - new DatasetVersionResourcePath(datasetPath, version), - limit, - engineName, - sessionId, - triggerJob); + vds, + new DatasetVersionResourcePath(datasetPath, version), + engineName, + sessionId, + triggerJob); } @GET @Path("review") @@ -450,10 +462,33 @@ public InitialPreviewResponse transformAndPreview( throw new ClientErrorException("Query parameter 'newVersion' should not be null"); } - final DatasetAndData datasetAndData = transformer.transformWithExecute(newVersion, datasetPath, getDatasetConfig(), transform, QueryType.UI_PREVIEW); + final DatasetAndData datasetAndData = transformer.transformWithExecute(newVersion, datasetPath, getDatasetConfig(), transform, false, QueryType.UI_PREVIEW); return tool.createPreviewResponse(datasetPath, datasetAndData, getOrCreateAllocator("transformAndPreview"), limit, false); } + @POST @Path("transform_and_preview") + @Produces(APPLICATION_JSON) @Consumes(APPLICATION_JSON) + public InitialPreviewResponse transformAndPreviewAsync( + /* Body */ TransformBase transform, + @QueryParam("newVersion") DatasetVersion newVersion, + @QueryParam("limit") @DefaultValue("50") int limit) + throws DatasetVersionNotFoundException, DatasetNotFoundException, NamespaceException { + if (newVersion == null) { + throw new ClientErrorException("Query parameter 'newVersion' should not be null"); + } + + final DatasetAndData datasetAndData = transformer.transformWithExecute(newVersion, datasetPath, getDatasetConfig(), transform, true, QueryType.UI_PREVIEW); + + return InitialPreviewResponse.of( + newDataset(datasetAndData.getDataset(), null), + datasetAndData.getJobId(), + datasetAndData.getSessionId(), + null, + true, + null, + null); // errors will be retrieved from job status + } + /** * Apply the given transform on the dataset version and return results. Also save the * transformed dataset as given new version. @@ -477,7 +512,7 @@ public InitialTransformAndRunResponse transformAndRun( } final DatasetVersionResourcePath resourcePath = resourcePath(); - final DatasetAndData datasetAndData = transformer.transformWithExecute(newVersion, resourcePath.getDataset(), getDatasetConfig(), transform, QueryType.UI_RUN); + final DatasetAndData datasetAndData = transformer.transformWithExecute(newVersion, resourcePath.getDataset(), getDatasetConfig(), transform, false, QueryType.UI_RUN); final History history = tool.getHistory(resourcePath.getDataset(), datasetAndData.getDataset().getVersion()); return InitialTransformAndRunResponse.of( newDataset(datasetAndData.getDataset(), null), @@ -485,6 +520,38 @@ public InitialTransformAndRunResponse transformAndRun( datasetAndData.getSessionId(), history); } + + /** + * Apply the given transform on the dataset version and return initial results after the job is started. Also Creating + * a thread to save the transformed dataset as given new version after the metadata os retrieved. + * + * @param transform + * @param newVersion + * @return + * @throws DatasetVersionNotFoundException + * @throws DatasetNotFoundException + * @throws NamespaceException + */ + @POST @Path("transform_and_run") + @Produces(APPLICATION_JSON) @Consumes(APPLICATION_JSON) + public InitialTransformAndRunResponse transformAndRunAsync( + /* Body */ TransformBase transform, + @QueryParam("newVersion") DatasetVersion newVersion + ) throws DatasetVersionNotFoundException, DatasetNotFoundException, NamespaceException { + + if (newVersion == null) { + throw new ClientErrorException("Query parameter 'newVersion' should not be null"); + } + + final DatasetVersionResourcePath resourcePath = resourcePath(); + final DatasetAndData datasetAndData = transformer.transformWithExecute(newVersion, resourcePath.getDataset(), getDatasetConfig(), transform, true, QueryType.UI_RUN); + return InitialTransformAndRunResponse.of( + newDataset(datasetAndData.getDataset(), null), + datasetAndData.getJobId(), + datasetAndData.getSessionId(), + null); + } + protected DatasetUI newDataset(VirtualDatasetUI vds, DatasetVersion tipVersion) throws NamespaceException { return DatasetUI.newInstance(vds, null, datasetService.getNamespaceService()); } @@ -507,23 +574,23 @@ public InitialRunResponse run(@QueryParam("tipVersion") DatasetVersion tipVersio final SqlQuery query = new SqlQuery(virtualDatasetUI.getSql(), virtualDatasetUI.getState().getContextList(), securityContext, Strings.isNullOrEmpty(engineName)? null : engineName, sessionId, sourceVersionMapping); - JobSubmittedListener listener = new JobSubmittedListener(); + MetadataJobStatusListener listener = new MetadataJobStatusListener(tool, virtualDatasetUI, null); + // The saved dataset is incomplete, we want save the dataset again once the metadata is collected. + if (virtualDatasetUI.getSqlFieldsList() == null) { + listener.waitToApplyMetadataAndSaveDataset(); + } final JobData jobData = executor.runQueryWithListener(query, QueryType.UI_RUN, datasetPath, version, listener); final JobId jobId = jobData.getJobId(); final SessionId jobDataSessionId = jobData.getSessionId(); - // wait for job to start (or WAIT_FOR_RUN_HISTORY_S seconds). - boolean success = listener.await(WAIT_FOR_RUN_HISTORY_S, TimeUnit.SECONDS); - if (!success) { - throw new InterruptedException("Starting a query timed out after " + WAIT_FOR_RUN_HISTORY_S + - " seconds, please try again."); + if (virtualDatasetUI.getSqlFieldsList() == null) { + listener.setJobId(jobData.getJobId()); } // tip version is optional, as it is only needed when we are navigated back in history // otherwise assume the current version is at the tip of the history tipVersion = tipVersion != null ? tipVersion : virtualDatasetUI.getVersion(); final History history = tool.getHistory(datasetPath, virtualDatasetUI.getVersion(), tipVersion); - // VBesschetnov 2019-01-08 - // this is requires as BE generates apiLinks, that is used by UI to send requests for preview/run. In case, when history + // This is requires as BE generates apiLinks, that is used by UI to send requests for preview/run. In case, when history // of a dataset reference on a version for other dataset. And a user navigate to that version and tries to preview it, // we would not be resolve a tip version and preview will fail. We should always send requests to original dataset // path (tip version path) to be able to get a preview/run data @@ -559,9 +626,9 @@ public InitialPendingTransformResponse transformDataSetPreview( * Saves this version as the current version of a dataset under the asDatasetPath if provided * * @param asDatasetPath - * @param savedTag the last OCC version known the the client. If no one else has saved + * @param savedTag the last OCC version known the client. If no one else has saved * to this name since the client making request learned of this OCC - * version then the request will be successful. Otherwise it will fail + * version then the request will be successful. Otherwise, it will fail * because saving would clobber the already saved dataset that the client * did not know about. * @return @@ -587,13 +654,20 @@ public DatasetUIWithHistory saveAsDataSet( if(versioned && !versionedViewEnabled){ throw UserException.unsupportedError().message("Versioned view is not enabled").buildSilently(); } + //Gets the latest version of the view from DatasetVersion store final VirtualDatasetUI vds = getDatasetConfig(versioned); - if(savedTag != null && branchName == null && versioned){ + if(vds != null && branchName == null && versioned) { branchName = vds.getReferencesList().get(0).getReference().getValue(); } - if(versioned && branchName != null){ - setReference(vds, branchName); + + if (versioned) { + if (branchName != null) { + setReference(vds, branchName); + } else { + throw UserException.unsupportedError().message("Tried to create a versioned view but branch name is null").buildSilently(); + } } + final DatasetUI savedDataset = save(vds, asDatasetPath, savedTag, branchName, versioned); return new DatasetUIWithHistory(savedDataset, tool.getHistory(asDatasetPath, savedDataset.getDatasetVersion())); } @@ -627,19 +701,28 @@ public DatasetUI save(VirtualDatasetUI vds, DatasetPath asDatasetPath, String sa return save(vds, asDatasetPath, savedTag, null, false, attributes); } - public DatasetUI save(VirtualDatasetUI vds, DatasetPath asDatasetPath, String savedTag, String branchName, boolean isVersionedSource, NamespaceAttribute... attributes) + public DatasetUI save(VirtualDatasetUI vds, DatasetPath asDatasetPath, String savedTag, String branchName, final boolean isVersionedSource, NamespaceAttribute... attributes) throws DatasetNotFoundException, UserNotFoundException, NamespaceException, DatasetVersionNotFoundException { checkSaveVersionedView(branchName, isVersionedSource); String queryString = vds.getSql(); - boolean isVersionViewEnabled = datasetService.checkIfVersionedViewEnabled(); + final boolean isVersionViewEnabled = datasetService.checkIfVersionedViewEnabled(); if (isVersionViewEnabled) { ParserUtil.validateViewQuery(queryString); } - final String nameConflictErrorMsg = String.format("VDS '%s' already exists. Please enter a different name.", - asDatasetPath.getLeaf()); + + if (isVersionedSource) { + if (ParserUtil.checkTimeTravelOnView(queryString)){ + throw UserException.unsupportedError() + .message("Versioned views not supported for time travel queries. Please use AT TAG or AT COMMIT instead") + .buildSilently(); + } + } + final List fullPathList = asDatasetPath.toPathList(); if(!isVersionedSource){ if (isAncestor(vds, fullPathList)) { + final String nameConflictErrorMsg = String.format("VDS '%s' already exists. Please enter a different name.", + asDatasetPath.getLeaf()); throw new ConflictException(nameConflictErrorMsg); } if (!datasetPath.equals(asDatasetPath)) { @@ -679,7 +762,9 @@ public DatasetUI save(VirtualDatasetUI vds, DatasetPath asDatasetPath, String sa } catch(NamespaceNotFoundException nfe) { throw new ClientErrorException("Parent folder doesn't exist", nfe); } catch(ConcurrentModificationException cme) { - throw new ConflictException(nameConflictErrorMsg, cme); + final String cmeMessage = String.format("View '%s' experienced a concurrent modification exception. Please ensure there are no self-references in your view and no other systems are editing this view.", + asDatasetPath.getLeaf()); + throw new ConflictException(cmeMessage, cme); } catch (IOException e) { throw UserException.validationError().message("Error saving to the source: %s", e.getMessage()).buildSilently(); } @@ -688,7 +773,7 @@ public DatasetUI save(VirtualDatasetUI vds, DatasetPath asDatasetPath, String sa } /** - * @return true if pathList is an ancestor (parent or grand parent) of the virtual dataset + * @return true if pathList is an ancestor (parent or grandparent) of the virtual dataset */ private static boolean isAncestor(VirtualDatasetUI vds, List pathList) { List parents = vds.getParentsList(); @@ -791,20 +876,19 @@ public Card getSplitCard( @POST @Path("/editOriginalSql") @Produces(APPLICATION_JSON) public InitialPreviewResponse reapplyDatasetAndPreview() throws DatasetVersionNotFoundException, DatasetNotFoundException, NamespaceException, JobNotFoundException { - Transformer.DatasetAndData datasetAndData = reapplyDataset(QueryType.UI_PREVIEW, JobStatusListener.NO_OP); + Transformer.DatasetAndData datasetAndData = reapplyDataset(JobStatusListener.NO_OP); //max records = 0 means, that we should not wait for job completion return tool.createPreviewResponse(new DatasetPath(datasetAndData.getDataset().getFullPathList()), datasetAndData, getOrCreateAllocator("reapplyDatasetAndPreview"), 0, false); } - private Transformer.DatasetAndData reapplyDataset(QueryType queryType, JobStatusListener listener) throws DatasetVersionNotFoundException, DatasetNotFoundException, NamespaceException { + private Transformer.DatasetAndData reapplyDataset(JobStatusListener listener) throws DatasetVersionNotFoundException, DatasetNotFoundException, NamespaceException { List items = getPreviousDatasetVersions(getDatasetConfig()); List transforms = new ArrayList<>(); for(VirtualDatasetUI dataset : items){ transforms.add(dataset.getLastTransform()); } - return transformer.editOriginalSql(version, transforms, queryType, listener); - + return transformer.editOriginalSql(version, transforms, QueryType.UI_PREVIEW, listener); } @POST @Path("/reapplyAndSave") @@ -813,7 +897,7 @@ public DatasetUIWithHistory reapplySave( @QueryParam("as") DatasetPath asDatasetPath ) throws DatasetVersionNotFoundException, UserNotFoundException, DatasetNotFoundException, NamespaceException { final CompletionListener completionListener = new CompletionListener(); - Transformer.DatasetAndData datasetAndData = reapplyDataset(QueryType.UI_PREVIEW, completionListener); + Transformer.DatasetAndData datasetAndData = reapplyDataset(completionListener); completionListener.awaitUnchecked(); DatasetUI savedDataset = save(datasetAndData.getDataset(), asDatasetPath, null); return new DatasetUIWithHistory(savedDataset, tool.getHistory(asDatasetPath, datasetAndData.getDataset().getVersion())); @@ -849,10 +933,10 @@ private ReplaceValuesCard genReplaceValuesCard(List selected, Selection Set selectedSet = new HashSet<>(selected); SqlQuery query = new SqlQuery(virtualDatasetUI.getSql(), virtualDatasetUI.getState().getContextList(), securityContext); DataType colType = getColType(selection.getColName()); - Histogram histo = histograms.getHistogram(datasetPath, version, selection, colType, query, getOrCreateAllocator("genReplaceValuesCard")); + Histogram histogram = histograms.getHistogram(datasetPath, version, selection, colType, query, getOrCreateAllocator("genReplaceValuesCard")); long selectedCount = histograms.getSelectionCount(datasetPath, version, query, colType, selection.getColName(), selectedSet, getOrCreateAllocator("genReplaceValuesCard")); - return new ReplaceValuesCard(histo.getValues(), selectedCount, histo.getAvailableValues() - selectedCount, histo.getAvailableValues()); + return new ReplaceValuesCard(histogram.getValues(), selectedCount, histogram.getAvailableValues() - selectedCount, histogram.getAvailableValues()); } @POST @Path("replace_preview") @@ -942,16 +1026,6 @@ private ReplaceValuesCard getValuesCard(ReplaceValuesPreviewReq req) throws Data return genReplaceValuesCard(req.getReplacedValues(), selection); } - - @GET @Path("history") - @Produces(APPLICATION_JSON) - public History getHistory(@QueryParam("tipVersion") DatasetVersion tipVersion) throws DatasetVersionNotFoundException { - // tip version is optional, as it is only needed when we are navigated back in history - // otherwise assume the current version is at the tip of the history - tipVersion = tipVersion != null ? tipVersion : virtualDatasetUI.getVersion(); - return tool.getHistory(datasetPath, version, tipVersion); - } - public static final List AVAILABLE_TYPES_FOR_CLEANING = unmodifiableList(asList(TEXT, INTEGER, FLOAT)); @POST @Path("clean") @Produces(APPLICATION_JSON) @Consumes(APPLICATION_JSON) diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/DatasetsResource.java b/dac/backend/src/main/java/com/dremio/dac/explore/DatasetsResource.java index 9cd15f0457..747cd8ec92 100644 --- a/dac/backend/src/main/java/com/dremio/dac/explore/DatasetsResource.java +++ b/dac/backend/src/main/java/com/dremio/dac/explore/DatasetsResource.java @@ -15,8 +15,7 @@ */ package com.dremio.dac.explore; -import static com.dremio.dac.explore.DatasetTool.TMP_DATASET_PATH; - +import java.security.AccessControlException; import java.util.Collections; import java.util.List; import java.util.Map; @@ -39,7 +38,6 @@ import com.dremio.dac.annotations.RestResource; import com.dremio.dac.annotations.Secured; import com.dremio.dac.explore.model.CreateFromSQL; -import com.dremio.dac.explore.model.DatasetDetails; import com.dremio.dac.explore.model.DatasetPath; import com.dremio.dac.explore.model.DatasetSearchUI; import com.dremio.dac.explore.model.DatasetSearchUIs; @@ -50,18 +48,7 @@ import com.dremio.dac.explore.model.InitialUntitledRunResponse; import com.dremio.dac.explore.model.NewUntitledFromParentRequest; import com.dremio.dac.explore.model.VersionContextReq; -import com.dremio.dac.model.common.DACRuntimeException; -import com.dremio.dac.model.folder.SourceFolderPath; -import com.dremio.dac.model.namespace.DatasetContainer; -import com.dremio.dac.model.sources.PhysicalDatasetPath; -import com.dremio.dac.model.sources.SourceName; -import com.dremio.dac.model.sources.SourcePath; -import com.dremio.dac.model.sources.SourceUI; -import com.dremio.dac.model.spaces.Home; import com.dremio.dac.model.spaces.HomeName; -import com.dremio.dac.model.spaces.HomePath; -import com.dremio.dac.model.spaces.Space; -import com.dremio.dac.model.spaces.SpacePath; import com.dremio.dac.proto.model.dataset.FromSQL; import com.dremio.dac.proto.model.dataset.FromTable; import com.dremio.dac.resource.BaseResourceWithAllocator; @@ -73,30 +60,27 @@ import com.dremio.dac.service.errors.DatasetNotFoundException; import com.dremio.dac.service.errors.DatasetVersionNotFoundException; import com.dremio.dac.service.errors.NewDatasetQueryException; +import com.dremio.dac.service.reflection.ReflectionServiceHelper; import com.dremio.dac.service.search.SearchContainer; import com.dremio.datastore.SearchTypes.SortOrder; import com.dremio.exec.catalog.CatalogUtil; -import com.dremio.exec.catalog.ConnectionReader; import com.dremio.exec.catalog.DatasetCatalog; import com.dremio.exec.catalog.DremioTable; import com.dremio.file.FilePath; -import com.dremio.file.SourceFilePath; import com.dremio.service.jobs.JobsService; -import com.dremio.service.namespace.BoundedDatasetCount; import com.dremio.service.namespace.NamespaceException; import com.dremio.service.namespace.NamespaceKey; -import com.dremio.service.namespace.NamespaceService; import com.dremio.service.namespace.dataset.DatasetVersion; import com.dremio.service.namespace.dataset.proto.DatasetConfig; import com.dremio.service.namespace.proto.NameSpaceContainer; -import com.dremio.service.namespace.source.proto.SourceConfig; -import com.dremio.service.namespace.space.proto.ExtendedConfig; -import com.dremio.service.namespace.space.proto.HomeConfig; -import com.dremio.service.namespace.space.proto.SpaceConfig; +import com.dremio.service.users.User; +import com.dremio.service.users.UserService; import com.google.common.base.Preconditions; /** - * List datasets from space/folder/home/source + * Creates datasets from SQL Runner + * Searches datasets from Catalog + * Provides dataset summary from Catalog * */ @RestResource @@ -104,50 +88,75 @@ @RolesAllowed({"admin", "user"}) @Path("/datasets") public class DatasetsResource extends BaseResourceWithAllocator { - private final DatasetVersionMutator datasetService; - private final NamespaceService namespaceService; private final DatasetTool tool; - private final ConnectionReader connectionReader; private final DatasetCatalog datasetCatalog; private final CatalogServiceHelper catalogServiceHelper; - private CollaborationHelper collaborationService; + private final CollaborationHelper collaborationService; + private final ReflectionServiceHelper reflectionServiceHelper; + private final UserService userService; @Inject public DatasetsResource( - NamespaceService namespaceService, DatasetVersionMutator datasetService, JobsService jobsService, QueryExecutor executor, - ConnectionReader connectionReader, @Context SecurityContext securityContext, DatasetCatalog datasetCatalog, CatalogServiceHelper catalogServiceHelper, BufferAllocatorFactory allocatorFactory, - CollaborationHelper collaborationService) { - this(namespaceService, datasetService, + CollaborationHelper collaborationService, + ReflectionServiceHelper reflectionServiceHelper, + UserService userService) { + this(datasetService, new DatasetTool(datasetService, jobsService, executor, securityContext), - connectionReader, datasetCatalog, catalogServiceHelper, allocatorFactory, collaborationService); + datasetCatalog, catalogServiceHelper, allocatorFactory, collaborationService, reflectionServiceHelper, userService); } - protected DatasetsResource(NamespaceService namespaceService, + protected DatasetsResource( DatasetVersionMutator datasetService, DatasetTool tool, - ConnectionReader connectionReader, DatasetCatalog datasetCatalog, CatalogServiceHelper catalogServiceHelper, BufferAllocatorFactory allocatorFactory, - CollaborationHelper collaborationService + CollaborationHelper collaborationService, + ReflectionServiceHelper reflectionServiceHelper, + UserService userService ) { super(allocatorFactory); - this.namespaceService = namespaceService; this.datasetService = datasetService; this.tool = tool; - this.connectionReader = connectionReader; this.datasetCatalog = datasetCatalog; this.catalogServiceHelper = catalogServiceHelper; this.collaborationService = collaborationService; + this.reflectionServiceHelper = reflectionServiceHelper; + this.userService = userService; + } + + private DatasetConfig getDatasetConfig(DatasetPath datasetPath, Map references) { + DatasetCatalog datasetNewCatalog = datasetCatalog.resolveCatalog(DatasetResourceUtils.createSourceVersionMapping(references)); + NamespaceKey namespaceKey = datasetPath.toNamespaceKey(); + final DremioTable table = datasetNewCatalog.getTable(namespaceKey); + if (table == null) { + throw new DatasetNotFoundException(datasetPath); + } + return table.getDatasetConfig(); + } + + private DatasetSummary getDatasetSummary(DatasetPath datasetPath, + Map references) throws NamespaceException, DatasetNotFoundException { + NamespaceKey namespaceKey = datasetPath.toNamespaceKey(); + final DatasetConfig datasetConfig = getDatasetConfig(datasetPath, references); + + return newDatasetSummary(datasetConfig, + datasetService.getJobsCount(namespaceKey), + datasetService.getDescendantsCount(namespaceKey), + references, + Collections.emptyList(), + null, + null, + null); } private InitialPreviewResponse newUntitled(DatasetPath fromDatasetPath, @@ -159,11 +168,13 @@ private InitialPreviewResponse newUntitled(DatasetPath fromDatasetPath, String triggerJob) throws DatasetNotFoundException, DatasetVersionNotFoundException, NamespaceException, NewDatasetQueryException { FromTable from = new FromTable(fromDatasetPath.toPathString()); - DatasetSummary summary = getDatasetSummary(fromDatasetPath, references); if (DatasetTool.shouldTriggerJob(triggerJob)) { + DatasetSummary summary = getDatasetSummary(fromDatasetPath, references); return newUntitled(from, newVersion, fromDatasetPath.toParentPathList(), summary, limit, engineName, sessionId, references); } else { - return tool.createPreviewResponseForPhysicalDataset(from, newVersion, fromDatasetPath.toParentPathList(), summary, references); + DatasetConfig datasetConfig = getDatasetConfig(fromDatasetPath, references); + return tool.createPreviewResponseForPhysicalDataset(from, newVersion, fromDatasetPath.toParentPathList(), + datasetConfig.getType(), datasetConfig.getFullPathList(), references); } } @@ -176,7 +187,7 @@ private InitialPreviewResponse newUntitled(FromBase from, DatasetVersion newVers } /** - * A user clicked "new query" and then wrote a SQL query. This is the first version of the dataset we will be creating (this is a "initial commit") + * A user clicked "new query" and then wrote a SQL query. This is the first version of the dataset we will be creating (this is an "initial commit") * * @param newVersion The version id we should use for the new version of dataset (generated by client) * @param sql The sql information to generate the new dataset @@ -188,6 +199,7 @@ private InitialPreviewResponse newUntitled(FromBase from, DatasetVersion newVers @POST @Path("new_untitled_sql") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) + @Deprecated public InitialPreviewResponse newUntitledSql( @QueryParam("newVersion") DatasetVersion newVersion, @QueryParam("limit") Integer limit, @@ -208,7 +220,7 @@ public InitialPreviewResponse newUntitledSql( } /** - * A user clicked "SQL Runner", then wrote a SQL query and then clicked "Preview". This is the first version of the dataset we will be creating (this is a "initial commit") + * A user clicked "SQL Runner", then wrote a SQL query and then clicked "Preview". This is the first version of the dataset we will be creating (this is an "initial commit") * * @param newVersion The version id we should use for the new version of dataset (generated by client) * @param sql The sql information to generate the new dataset @@ -233,19 +245,18 @@ public InitialUntitledRunResponse newTmpUntitledSql( sql.getContext(), sql.getEngineName(), sessionId, - sql.getReferences(), - limit // ignored - ); + sql.getReferences()); } @POST @Path("new_untitled_sql_and_run") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) + @Deprecated public InitialRunResponse newUntitledSqlAndRun( @QueryParam("newVersion") DatasetVersion newVersion, @QueryParam("sessionId") String sessionId, /* body */ CreateFromSQL sql) - throws DatasetNotFoundException, DatasetVersionNotFoundException, NamespaceException, InterruptedException { + throws DatasetNotFoundException, DatasetVersionNotFoundException, NamespaceException { Preconditions.checkNotNull(newVersion, "newVersion should not be null"); return tool.newUntitledAndRun( @@ -264,7 +275,7 @@ public InitialUntitledRunResponse newTmpUntitledSqlAndRun( @QueryParam("newVersion") DatasetVersion newVersion, @QueryParam("sessionId") String sessionId, /* body */ CreateFromSQL sql) - throws DatasetNotFoundException, DatasetVersionNotFoundException, NamespaceException, InterruptedException { + throws DatasetNotFoundException, DatasetVersionNotFoundException { Preconditions.checkNotNull(newVersion, "newVersion should not be null"); return tool.newTmpUntitledAndRun( @@ -315,24 +326,6 @@ public InitialPreviewResponse newUntitledFromParent( } } - public InitialPreviewResponse createUntitledFromSourceFile(SourceName sourceName, String path, Integer limit) - throws DatasetNotFoundException, DatasetVersionNotFoundException, NamespaceException, NewDatasetQueryException { - SourceFilePath filePath = SourceFilePath.fromURLPath(sourceName, path); - return tool.newUntitled(getOrCreateAllocator("createUntitledFromSourceFile"), new FromTable(filePath.toPathString()), DatasetVersion.newVersion(), filePath.toParentPathList(), limit); - } - - public InitialPreviewResponse createUntitledFromSourceFolder(SourceName sourceName, String path, Integer limit) - throws DatasetNotFoundException, DatasetVersionNotFoundException, NamespaceException, NewDatasetQueryException { - SourceFolderPath folderPath = SourceFolderPath.fromURLPath(sourceName, path); - return tool.newUntitled(getOrCreateAllocator("createUntitledFromSourceFolder"), new FromTable(folderPath.toPathString()), DatasetVersion.newVersion(), folderPath.toPathList(), limit); - } - - public InitialPreviewResponse createUntitledFromPhysicalDataset(SourceName sourceName, String path, Integer limit) - throws DatasetNotFoundException, DatasetVersionNotFoundException, NamespaceException, NewDatasetQueryException { - PhysicalDatasetPath datasetPath = PhysicalDatasetPath.fromURLPath(sourceName, path); - return tool.newUntitled(getOrCreateAllocator("createUntitledFromPhysicalDataset"), new FromTable(datasetPath.toPathString()), DatasetVersion.newVersion(), datasetPath.toParentPathList(), limit); - } - public InitialPreviewResponse createUntitledFromHomeFile(HomeName homeName, String path, Integer limit) throws DatasetNotFoundException, DatasetVersionNotFoundException, NamespaceException, NewDatasetQueryException { FilePath filePath = FilePath.fromURLPath(homeName, path); @@ -368,31 +361,55 @@ public DatasetSummary getDatasetSummary( @QueryParam("refType") String refType, @QueryParam("refValue") String refValue) throws NamespaceException, DatasetNotFoundException { final DatasetPath datasetPath = new DatasetPath(PathUtils.toPathComponents(path)); - return getDatasetSummary(datasetPath, DatasetResourceUtils.createSourceVersionMapping(datasetPath.getRoot().getName(), refType, refValue)); + return getEnhancedDatasetSummary(datasetPath, DatasetResourceUtils.createSourceVersionMapping(datasetPath.getRoot().getName(), refType, refValue)); } - private DatasetSummary getDatasetSummary(DatasetPath datasetPath, - Map references) throws NamespaceException, DatasetNotFoundException { - DatasetCatalog datasetNewCatalog = datasetCatalog.resolveCatalog(DatasetResourceUtils.createSourceVersionMapping(references)); - final DremioTable table = datasetNewCatalog.getTable(datasetPath.toNamespaceKey()); - if (table == null) { - throw new DatasetNotFoundException(datasetPath); + protected User getUser(String username, String entityId) { + User user = null; + if (username != null) { + try { + user = userService.getUser(username); + } catch (Exception e) { + // ignore + } } - final DatasetConfig datasetConfig = table.getDatasetConfig(); + return user; + } + + private DatasetSummary getEnhancedDatasetSummary(DatasetPath datasetPath, + Map references) + throws NamespaceException, DatasetNotFoundException { + NamespaceKey namespaceKey = datasetPath.toNamespaceKey(); + final DatasetConfig datasetConfig = getDatasetConfig(datasetPath, references); + + String entityId = datasetConfig.getId().getId(); Optional tags = Optional.empty(); - String sourceName = datasetPath.toNamespaceKey().getRoot(); - Boolean isVersioned = CatalogUtil.requestedPluginSupportsVersionedTables(sourceName, datasetService.getCatalog()); + String sourceName = namespaceKey.getRoot(); + boolean isVersioned = CatalogUtil.requestedPluginSupportsVersionedTables(sourceName, datasetService.getCatalog()); if (!isVersioned) { // only use CollaborationHelper for non-versioned dataset from non-arctic source // arctic source doesn't rely on NamespaceService while CollaborationHelper use NamespaceService underneath - tags = collaborationService.getTags(datasetConfig.getId().getId()); + tags = collaborationService.getTags(entityId); + } + + // TODO: DX-61580 Add last modified user to DatasetConfig + // For now, using the owner as the last modified user. The code is messy. Will be improved in the follow-up story. + User owner = getUser(datasetConfig.getOwner(), entityId); + User lastModifyingUser = getUser(datasetConfig.getOwner(), entityId); // datasetConfig.getLastUser(); + Boolean hasReflection; + try { + hasReflection = reflectionServiceHelper.doesDatasetHaveReflection(entityId); + } catch (AccessControlException e) { + // If the user doesn't have the proper privilege, set it to null specifically so that it's not even sent back + hasReflection = null; } return newDatasetSummary(datasetConfig, - datasetService.getJobsCount(datasetPath.toNamespaceKey()), - datasetService.getDescendantsCount(datasetPath.toNamespaceKey()), + datasetService.getJobsCount(namespaceKey), + datasetService.getDescendantsCount(namespaceKey), references, - tags.isPresent() ? tags.get().getTags() : Collections.emptyList()); + tags.isPresent() ? tags.get().getTags() : Collections.emptyList(), + hasReflection, owner, lastModifyingUser); } protected DatasetSummary newDatasetSummary( @@ -400,63 +417,10 @@ protected DatasetSummary newDatasetSummary( int jobCount, int descendants, Map references, - List tags) throws NamespaceException { - return DatasetSummary.newInstance(datasetConfig, jobCount, descendants, references, tags); - } - - @GET - @Path("/context/{type}/{datasetContainer}/{path: .*}") - @Produces(MediaType.APPLICATION_JSON) - public DatasetDetails getDatasetContext(@PathParam("type") String type, - @PathParam("datasetContainer") String datasetContainer, - @PathParam("path") String path) - throws Exception { - // TODO - DX-4072 - this is a bit hacky, but not sure of a better way to do this right now, handling - // of dataset paths inside of URL paths could use overall review and standardization - final DatasetPath datasetPath = new DatasetPath(datasetContainer + "." + path); - if (datasetPath.equals(TMP_DATASET_PATH)) { - // TODO - this can be removed if the UI prevents sending tmp.UNTITLED, for now handle it gracefully and hand - // back a response that will not cause a rendering failure - return new DatasetDetails( - TMP_DATASET_PATH.toPathList(), - "", 0, 0, System.currentTimeMillis(), - new Space(null, "None", null, null, null, 0, null)); - } - - final DatasetConfig datasetConfig = namespaceService.getDataset(datasetPath.toNamespaceKey()); - String containerName = datasetConfig.getFullPathList().get(0); - DatasetContainer spaceInfo; - if ("home".equals(type)) { - HomePath homePath = new HomePath(containerName); - HomeConfig home = namespaceService.getHome(homePath.toNamespaceKey()); - long dsCount = namespaceService.getAllDatasetsCount(homePath.toNamespaceKey()); - home.setExtendedConfig(new ExtendedConfig().setDatasetCount(dsCount)); - spaceInfo = newHome(homePath, home); - } else if ("space".equals(type)) { - final NamespaceKey spaceKey = new SpacePath(containerName).toNamespaceKey(); - SpaceConfig space = namespaceService.getSpace(spaceKey); - spaceInfo = newSpace(space, namespaceService.getAllDatasetsCount(spaceKey)); - } else if ("source".equals(type)) { - final NamespaceKey sourceKey = new SourcePath(containerName).toNamespaceKey(); - SourceConfig source = namespaceService.getSource(sourceKey); - BoundedDatasetCount datasetCount = namespaceService.getDatasetCount(sourceKey, BoundedDatasetCount.SEARCH_TIME_LIMIT_MS, BoundedDatasetCount.COUNT_LIMIT_TO_STOP_SEARCH); - spaceInfo = SourceUI.get(source, connectionReader) - .setNumberOfDatasets(datasetCount.getCount()); - } else { - throw new DACRuntimeException("Incorrect dataset container type provided:" + type); - } - return new DatasetDetails(datasetConfig, - datasetService.getJobsCount(datasetPath.toNamespaceKey()), - datasetService.getDescendantsCount(datasetPath.toNamespaceKey()), - spaceInfo - ); - } - - protected Home newHome(HomePath homePath, HomeConfig home) { - return new Home(homePath, home); - } - - protected Space newSpace(SpaceConfig spaceConfig, int datasetCount) throws Exception { - return Space.newInstance(spaceConfig, null, datasetCount); + List tags, + Boolean hasReflection, + User owner, + User lastModifyingUser) throws NamespaceException { + return DatasetSummary.newInstance(datasetConfig, jobCount, descendants, references, tags, hasReflection, owner, lastModifyingUser); } } diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/ExtractRecommender.java b/dac/backend/src/main/java/com/dremio/dac/explore/ExtractRecommender.java index 32601eb702..895f8ba44d 100644 --- a/dac/backend/src/main/java/com/dremio/dac/explore/ExtractRecommender.java +++ b/dac/backend/src/main/java/com/dremio/dac/explore/ExtractRecommender.java @@ -70,6 +70,7 @@ public List getRules(Selection selection, DataType selColType) { return rules; } + @Override public TransformRuleWrapper wrapRule(ExtractRule extractRule) { switch (extractRule.getType()) { case pattern: diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/HistogramGenerator.java b/dac/backend/src/main/java/com/dremio/dac/explore/HistogramGenerator.java index 129e8ab60d..89017da0f8 100644 --- a/dac/backend/src/main/java/com/dremio/dac/explore/HistogramGenerator.java +++ b/dac/backend/src/main/java/com/dremio/dac/explore/HistogramGenerator.java @@ -383,6 +383,7 @@ static void produceRanges(List ranges, LocalDateTime min, LocalDateTime break; case MILLENNIUM: timeValue = toMillis(tmpValue.plusYears(1000)); + break; default: break; } diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/JSONElementLocator.java b/dac/backend/src/main/java/com/dremio/dac/explore/JSONElementLocator.java index 40fec58ae2..8f4d1c16f6 100644 --- a/dac/backend/src/main/java/com/dremio/dac/explore/JSONElementLocator.java +++ b/dac/backend/src/main/java/com/dremio/dac/explore/JSONElementLocator.java @@ -379,10 +379,12 @@ public JsonPathElement last() { return path.peek(); } + @Override public Iterator iterator() { return path.descendingIterator(); } + @Override public String toString() { return Joiner.on("").join(path.descendingIterator()); } diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/MetadataJobStatusListener.java b/dac/backend/src/main/java/com/dremio/dac/explore/MetadataJobStatusListener.java index 433eee5fd5..86ea5fb66a 100644 --- a/dac/backend/src/main/java/com/dremio/dac/explore/MetadataJobStatusListener.java +++ b/dac/backend/src/main/java/com/dremio/dac/explore/MetadataJobStatusListener.java @@ -45,7 +45,6 @@ class MetadataJobStatusListener implements JobStatusListener { MetadataJobStatusListener(DatasetTool datasetTool, VirtualDatasetUI newDataset, FromBase from) { Preconditions.checkArgument(datasetTool != null, "datasetTool can't be null."); Preconditions.checkArgument(newDataset != null, "newDataset can't be null."); - Preconditions.checkArgument(from != null, "from can't be null."); this.datasetTool = datasetTool; this.newDataset = newDataset; this.from = from; @@ -62,7 +61,9 @@ public void waitToApplyMetadataAndSaveDataset() { private void applyMetadataAndSaveDataset() { try { - latch.await(); + synchronized (latch) { + latch.await(); + } } catch (final InterruptedException ex) { Throwables.propagate(ex); } @@ -98,17 +99,23 @@ public void metadataCollected(QueryMetadata metadata) { @Override public void jobFailed(Exception e) { error = e; - latch.notifyAll(); + synchronized (latch) { + latch.notifyAll(); + } } @Override public void jobCompleted() { - latch.notifyAll(); + synchronized (latch) { + latch.notifyAll(); + } } @Override public void jobCancelled(String reason) { cancelled = true; - latch.notifyAll(); + synchronized (latch) { + latch.notifyAll(); + } } } diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/QueryExecutor.java b/dac/backend/src/main/java/com/dremio/dac/explore/QueryExecutor.java index 72baecdec8..5fb0c3ff53 100644 --- a/dac/backend/src/main/java/com/dremio/dac/explore/QueryExecutor.java +++ b/dac/backend/src/main/java/com/dremio/dac/explore/QueryExecutor.java @@ -68,6 +68,8 @@ import com.dremio.service.namespace.file.FileFormat; import com.dremio.service.users.SystemUser; +import io.opentelemetry.instrumentation.annotations.WithSpan; + /** * A per RequestScoped class used to execute queries. */ @@ -136,6 +138,7 @@ JobData runQueryWithListener(SqlQuery query, QueryType queryType, DatasetPath da * @param runInSameThread runs attemptManager in a single thread * @param ignoreColumnLimits ignores the max number of columns allowed for a scan */ + @WithSpan JobData runQueryWithListener(SqlQuery query, QueryType queryType, DatasetPath datasetPath, DatasetVersion version, JobStatusListener statusListener, boolean runInSameThread, boolean ignoreColumnLimits) { String messagePath = datasetPath + (version == null ? "" : "/" + version); @@ -219,7 +222,7 @@ public JobData runQueryAndWaitForCompletion(SqlQuery query, QueryType queryType, return data; } - public List getColumnList(final String username, DatasetPath path, List referenceList) { + public List getColumnList(DatasetPath path, List referenceList) { Map sourceVersionMapping = QueryExecutorUtils.createSourceVersionMapping(referenceList); EntityExplorer entityExplorer = catalogService.getCatalog(MetadataRequestOptions.of( SchemaConfig.newBuilder(CatalogUser.from(context.getUserPrincipal().getName())).build(), sourceVersionMapping)); diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/QueryParser.java b/dac/backend/src/main/java/com/dremio/dac/explore/QueryParser.java index 1dabcd91d1..ce597934eb 100644 --- a/dac/backend/src/main/java/com/dremio/dac/explore/QueryParser.java +++ b/dac/backend/src/main/java/com/dremio/dac/explore/QueryParser.java @@ -19,6 +19,7 @@ import java.security.AccessControlException; import java.util.List; +import java.util.Map; import java.util.concurrent.atomic.AtomicLong; import org.apache.calcite.plan.RelOptCost; @@ -195,7 +196,8 @@ public void planCompleted(ExecutionPlan plan) { } @Override - public void planRelTransform(PlannerPhase phase, RelOptPlanner planner, RelNode before, RelNode after, long millisTaken) { + public void planRelTransform(PlannerPhase phase, RelOptPlanner planner, RelNode before, RelNode after, + long millisTaken, Map timeBreakdownPerRule) { switch(phase){ case JOIN_PLANNING_MULTI_JOIN: // Join optimization starts with multijoin analysis phase @@ -210,8 +212,10 @@ public void planRelTransform(PlannerPhase phase, RelOptPlanner planner, RelNode break; case REDUCE_EXPRESSIONS: builder.addExpandedPlan(before); + break; default: // noop. + break; } } diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/ReplaceRecommender.java b/dac/backend/src/main/java/com/dremio/dac/explore/ReplaceRecommender.java index f8481c0723..5628c67cfc 100644 --- a/dac/backend/src/main/java/com/dremio/dac/explore/ReplaceRecommender.java +++ b/dac/backend/src/main/java/com/dremio/dac/explore/ReplaceRecommender.java @@ -126,6 +126,7 @@ private List recommendReplacePattern(Selection selection) { return rules; } + @Override public TransformRuleWrapper wrapRule(ReplacePatternRule rule) { return new ReplaceTransformRuleWrapper(rule); } diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/SplitRecommender.java b/dac/backend/src/main/java/com/dremio/dac/explore/SplitRecommender.java index 940392c166..77480b485e 100644 --- a/dac/backend/src/main/java/com/dremio/dac/explore/SplitRecommender.java +++ b/dac/backend/src/main/java/com/dremio/dac/explore/SplitRecommender.java @@ -54,6 +54,7 @@ public List getRules(Selection selection, DataType selColType) { return rules; } + @Override public TransformRuleWrapper wrapRule(SplitRule rule) { return new SplitTransformRuleWrapper(rule); } diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/TransformActor.java b/dac/backend/src/main/java/com/dremio/dac/explore/TransformActor.java index 46209299f5..0b4edfcc8d 100644 --- a/dac/backend/src/main/java/com/dremio/dac/explore/TransformActor.java +++ b/dac/backend/src/main/java/com/dremio/dac/explore/TransformActor.java @@ -183,7 +183,7 @@ public TransformResult visit(TransformJoin join) throws Exception { List columns = new ArrayList<>(); List joinedColumns = new ArrayList<>(); List allJoinedColumns = new ArrayList<>(); - columns.addAll(executor.getColumnList(username, rightPath, join.getReferencesList())); + columns.addAll(executor.getColumnList(rightPath, join.getReferencesList())); final int edge = m.columnCount(); for (JoinCondition jc : join.getJoinConditionsList()) { diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/Transformer.java b/dac/backend/src/main/java/com/dremio/dac/explore/Transformer.java index 358b8823f0..bec2684d9d 100644 --- a/dac/backend/src/main/java/com/dremio/dac/explore/Transformer.java +++ b/dac/backend/src/main/java/com/dremio/dac/explore/Transformer.java @@ -33,6 +33,8 @@ import com.dremio.dac.explore.model.TransformBase; import com.dremio.dac.model.job.JobData; import com.dremio.dac.proto.model.dataset.FilterType; +import com.dremio.dac.proto.model.dataset.From; +import com.dremio.dac.proto.model.dataset.FromSQL; import com.dremio.dac.proto.model.dataset.FromType; import com.dremio.dac.proto.model.dataset.NameDatasetRef; import com.dremio.dac.proto.model.dataset.SourceVersionReference; @@ -243,8 +245,10 @@ private VirtualDatasetUI asDataset( SQLGenerator.generateSQL(protectAgainstNull(result, transform), isSupportedTransform(transform), catalogService); baseDataset.setSql(sql); baseDataset.setLastTransform(transform.wrap()); - DatasetTool.applyQueryMetadata(baseDataset, actor.getParents(), actor.getBatchSchema(), actor.getFieldOrigins(), - actor.getGrandParents(), actor.getMetadata()); + if (actor != null && actor.hasMetadata()) { + DatasetTool.applyQueryMetadata(baseDataset, actor.getParents(), actor.getBatchSchema(), actor.getFieldOrigins(), + actor.getGrandParents(), actor.getMetadata()); + } return baseDataset; } @@ -267,9 +271,10 @@ public DatasetAndData transformWithExecute( DatasetPath path, VirtualDatasetUI original, TransformBase transform, + boolean isAsync, QueryType queryType) - throws DatasetNotFoundException, NamespaceException { - return this.transformWithExecute(newVersion, path, original, transform, queryType, false); + throws DatasetNotFoundException { + return this.transformWithExecute(newVersion, path, original, transform, isAsync, queryType, false); } /** @@ -292,7 +297,7 @@ public InitialPendingTransformResponse transformPreviewWithExecute( BufferAllocator allocator, int limit) throws DatasetNotFoundException, NamespaceException { - final TransformResultDatsetAndData result = this.transformWithExecute(newVersion, path, original, transform, QueryType.UI_PREVIEW, true); + final TransformResultDatasetAndData result = this.transformWithExecute(newVersion, path, original, transform, false, QueryType.UI_PREVIEW, true); final TransformResult transformResult = result.getTransformResult(); final List highlightedColumnNames = Lists.newArrayList(transformResult.getModifiedColumns()); highlightedColumnNames.addAll(transformResult.getAddedColumns()); @@ -338,14 +343,19 @@ public VirtualDatasetUI getDataset() { } - private TransformResultDatsetAndData transformWithExecute( + private TransformResultDatasetAndData transformWithExecute( DatasetVersion newVersion, DatasetPath path, VirtualDatasetUI original, TransformBase transform, + boolean isAsync, QueryType queryType, boolean isPreview) - throws DatasetNotFoundException, NamespaceException { + throws DatasetNotFoundException { + + if (isAsync && transform.wrap().getType() == updateSQL) { + return updateSQLTransformWithExecuteAsync(newVersion, path, original, transform, queryType, isPreview); + } final ExecuteTransformActor actor = new ExecuteTransformActor(queryType, newVersion, original.getState(), isPreview, username(), path, executor); final TransformResult transformResult = transform.accept(actor); @@ -357,7 +367,7 @@ private TransformResultDatsetAndData transformWithExecute( final SqlQuery query = new SqlQuery(sql, vss.getContextList(), securityContext, sourceVersionMapping); actor.getMetadata(query); } - final TransformResultDatsetAndData resultToReturn = new TransformResultDatsetAndData(actor.getJobData(), + final TransformResultDatasetAndData resultToReturn = new TransformResultDatasetAndData(actor.getJobData(), asDataset(newVersion, path, original, transform, transformResult, actor, catalogService), transformResult); // save this dataset version. datasetService.putVersion(resultToReturn.getDataset()); @@ -365,6 +375,52 @@ private TransformResultDatsetAndData transformWithExecute( return resultToReturn; } + // If isAsync is true and the transform type is updateSQL, we need skip the actor visit and submit the query later + // asynchronously. The reason is that actor executes the query in visit() when the transform type is updateSQL. + private TransformResultDatasetAndData updateSQLTransformWithExecuteAsync( + DatasetVersion newVersion, + DatasetPath path, + VirtualDatasetUI original, + TransformBase transform, + QueryType queryType, + boolean isPreview) + throws DatasetNotFoundException { + + Preconditions.checkArgument(transform.wrap().getType() == updateSQL); + final ExecuteTransformActor actor = new ExecuteTransformActor(queryType, newVersion, original.getState(), isPreview, username(), path, executor); + final TransformResult transformResult = new TransformResult( + new VirtualDatasetState() + .setFrom(new From(FromType.SQL).setSql(new FromSQL(transform.wrap().getUpdateSQL().getSql()))) + .setContextList(transform.wrap().getUpdateSQL().getSqlContextList())); + setReferencesInVirtualDatasetUI(original, transform); + VirtualDatasetUI dataset = asDataset(newVersion, path, original, transform, transformResult, actor, catalogService); + + VirtualDatasetState vss = protectAgainstNull(transformResult, transform); + Map sourceVersionMapping = TransformerUtils.createSourceVersionMapping(transform.getReferencesList()); + String sql = SQLGenerator.generateSQL(vss, isSupportedTransform(transform), catalogService); + + SqlQuery query = new SqlQuery(sql, vss.getContextList(), securityContext, transform.wrap().getUpdateSQL().getEngineName(), + transform.wrap().getUpdateSQL().getSessionId(), sourceVersionMapping); + AsyncMetadataJobStatusListener.MetaDataListener listener = new AsyncMetadataJobStatusListener.MetaDataListener() { + @Override + public void metadataCollected(com.dremio.service.jobs.metadata.proto.QueryMetadata metadata) { + // save this dataset version. + if (actor.hasMetadata()) { + DatasetTool.applyQueryMetadata(dataset, actor.getParents(), actor.getBatchSchema(), actor.getFieldOrigins(), + actor.getGrandParents(), actor.getMetadata()); + dataset.setState(QuerySemantics.extract(actor.getMetadata())); + } + datasetService.putVersion(dataset); + } + }; + actor.getMetadataAsync(query, listener); + + final TransformResultDatasetAndData resultToReturn = new TransformResultDatasetAndData(actor.getJobData(), + dataset, transformResult); + + return resultToReturn; + } + public boolean isSupportedTransform(TransformBase transform) { return transform.wrap().getType() != updateSQL; } @@ -387,10 +443,10 @@ private void setReferencesInVirtualDatasetUI( } } - private static class TransformResultDatsetAndData extends DatasetAndData { + private static class TransformResultDatasetAndData extends DatasetAndData { private final TransformResult transformResult; - public TransformResultDatsetAndData(JobData jobData, VirtualDatasetUI dataset, TransformResult transformResult) { + public TransformResultDatasetAndData(JobData jobData, VirtualDatasetUI dataset, TransformResult transformResult) { super(jobData, dataset); this.transformResult = transformResult; } @@ -478,19 +534,17 @@ public ExecuteTransformActor( this.queryType = queryType; } - @Override - protected com.dremio.service.jobs.metadata.proto.QueryMetadata getMetadata(SqlQuery query) { - this.jobData = executor.runQueryWithListener(query, queryType, path, newVersion, collector); + private void applyMetadata(com.dremio.service.jobs.metadata.proto.QueryMetadata metadata, SqlQuery query) { JobId jobId = null; SessionId sessionId = null; try { jobId = jobData.getJobId(); sessionId = jobData.getSessionId(); - this.metadata = collector.getMetadata(); + this.metadata = metadata; final JobDetails jobDetails = jobsService.getJobDetails( JobDetailsRequest.newBuilder() .setJobId(JobsProtoUtil.toBuf(jobId)) - .setUserName(query.getUsername()) + .setUserName(username()) .build()); final JobInfo jobInfo = JobsProtoUtil.getLastAttempt(jobDetails).getInfo(); this.batchSchema = Optional.ofNullable(jobInfo.getBatchSchema()).map((b) -> BatchSchema.deserialize(b)); @@ -513,10 +567,29 @@ protected com.dremio.service.jobs.metadata.proto.QueryMetadata getMetadata(SqlQu this.batchSchema = queryMetadata.getBatchSchema(); this.parents = queryMetadata.getParents(); } + } + + @Override + protected com.dremio.service.jobs.metadata.proto.QueryMetadata getMetadata(SqlQuery query) { + this.jobData = executor.runQueryWithListener(query, queryType, path, newVersion, collector); + applyMetadata(collector.getMetadata(), query); return metadata; } + protected void getMetadataAsync(SqlQuery query, AsyncMetadataJobStatusListener.MetaDataListener listener) { + AsyncMetadataJobStatusListener.MetaDataListener metadataListener = new AsyncMetadataJobStatusListener.MetaDataListener() { + @Override + public void metadataCollected(com.dremio.service.jobs.metadata.proto.QueryMetadata metadata) { + ExecuteTransformActor.this.applyMetadata(metadata, query); + } + }; + AsyncMetadataJobStatusListener asyncListener = new AsyncMetadataJobStatusListener(metadataListener); + asyncListener.addMetadataListener(listener); + + this.jobData = executor.runQueryWithListener(query, queryType, path, newVersion, asyncListener); + } + @Override protected boolean hasMetadata() { return (metadata != null); diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/model/Dataset.java b/dac/backend/src/main/java/com/dremio/dac/explore/model/Dataset.java index 2674029627..9edd3110a6 100644 --- a/dac/backend/src/main/java/com/dremio/dac/explore/model/Dataset.java +++ b/dac/backend/src/main/java/com/dremio/dac/explore/model/Dataset.java @@ -20,8 +20,12 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.UUID; +import java.util.stream.Collectors; import com.dremio.dac.model.common.AddressableResource; +import com.dremio.dac.model.common.RootEntity; +import com.dremio.dac.model.folder.FolderName; import com.dremio.dac.model.job.JobFilters; import com.dremio.dac.proto.model.dataset.VirtualDatasetUI; import com.dremio.dac.util.JSONUtil; @@ -89,6 +93,33 @@ public static Dataset newInstance( return new Dataset(datasetConfig.getId(), resourcePath, versionedResourcePath, datasetName, sql, datasetConfig, null, jobCount, tags); } + public static Dataset newInstance( + RootEntity rootEntity, + List folderNamespace, + String folderName, + String id) { + final List folderPath = + folderNamespace.stream() + .map(name -> new FolderName(name)) + .collect(Collectors.toList()); + final DatasetName datasetName = new DatasetName(folderName); + final DatasetPath datasetPath = new DatasetPath(rootEntity, folderPath, datasetName); + + final DatasetVersion datasetVersion = DatasetVersion.newVersion(); + final VirtualDatasetUI vds = new VirtualDatasetUI(); + vds.setFullPathList(datasetPath.toPathList()); + vds.setName(datasetName.getName()); + vds.setId((id == null) ? UUID.randomUUID().toString() : id); + vds.setVersion(datasetVersion); + + // For the iceberg view in nessie, we generate a datasetVersion for it. + final DatasetResourcePath datasetResourcePath = new DatasetResourcePath(datasetPath); + final DatasetVersionResourcePath datasetVersionResourcePath = + new DatasetVersionResourcePath(datasetPath, datasetVersion); + + return new Dataset(vds.getId(), datasetResourcePath, datasetVersionResourcePath, datasetName, null, vds, null,0, null); + } + public int getJobCount() { return jobCount; } diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetPath.java b/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetPath.java index 1fec9faa52..1bd21d416d 100644 --- a/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetPath.java +++ b/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetPath.java @@ -42,6 +42,8 @@ public class DatasetPath extends NamespacePath { private static final String URL_PATH_TYPE = "dataset"; + private String version = " "; + public static DatasetPath fromURLPath(RootEntity root, String path) { List components = PathUtils.toPathComponents(path); @@ -56,6 +58,11 @@ public DatasetPath(RootEntity root, List folderPath, DatasetName dat super(root, folderPath, dataset); } + public DatasetPath(String path, String version) { + super(path); + this.version = version; + } + @JsonCreator public DatasetPath(String path) { super(path); @@ -104,6 +111,10 @@ public DatasetName getDataset() { return (DatasetName)getLeaf(); } + public String getVersion() { + return version; + } + @Override public LeafEntity getLeaf(String name) throws IllegalArgumentException { return new DatasetName(name); diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetSearchUI.java b/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetSearchUI.java index 36fb0c80ef..3cc550a9f3 100644 --- a/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetSearchUI.java +++ b/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetSearchUI.java @@ -17,14 +17,11 @@ import static com.dremio.common.utils.PathUtils.encodeURIComponent; -import java.io.UnsupportedEncodingException; import java.util.HashMap; import java.util.List; import java.util.Map; -import com.dremio.common.utils.PathUtils; import com.dremio.dac.proto.model.collaboration.CollaborationTag; -import com.dremio.service.namespace.NamespaceKey; import com.dremio.service.namespace.dataset.DatasetVersion; import com.dremio.service.namespace.dataset.proto.DatasetConfig; import com.dremio.service.namespace.dataset.proto.DatasetType; @@ -171,38 +168,4 @@ public Map getLinks() { return links; } - public Map getApiLinks() throws UnsupportedEncodingException { - final NamespaceKey datasetPath = new NamespaceKey(fullPath); - final String dottedFullPath = datasetPath.toUrlEncodedString(); - final String fullPathString = PathUtils.toFSPath(fullPath).toString(); - - Map links = new HashMap(); - switch (datasetType) { - case VIRTUAL_DATASET: - links.put("edit", "/dataset/" + dottedFullPath + "/version/" + datasetVersion + "?view=explore"); //edit dataset - final DatasetVersion datasetVersion = DatasetVersion.newVersion(); - links.put("run", "/datasets/new_untitled?parentDataset=" + dottedFullPath + "&newVersion=" - + (datasetVersion == null ? datasetVersion : encodeURIComponent(datasetVersion.toString()))); //create new dataset - break; - case PHYSICAL_DATASET_HOME_FILE: - links.put("run", "/home/" + fullPath.get(0) + "new_untitled_from_file" + fullPathString); - break; - case PHYSICAL_DATASET_HOME_FOLDER: - // Folder not supported yet - break; - case PHYSICAL_DATASET_SOURCE_FILE: - links.put("run", "/source/" + fullPath.get(0) + "new_untitled_from_file" + fullPathString); - break; - case PHYSICAL_DATASET_SOURCE_FOLDER: - links.put("run", "/source/" + fullPath.get(0) + "new_untitled_from_folder" + fullPathString); - break; - case PHYSICAL_DATASET: - links.put("run", "/source/" + fullPath.get(0) + "new_untitled_from_physical_dataset" + fullPathString); - break; - default: - break; - } - return links; - } - } diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetSummary.java b/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetSummary.java index 2bd419b777..2757847f47 100644 --- a/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetSummary.java +++ b/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetSummary.java @@ -22,7 +22,7 @@ import java.util.Map; import java.util.Set; -import com.dremio.common.utils.PathUtils; +import com.dremio.dac.api.JsonISODateTime; import com.dremio.dac.model.job.JobFilters; import com.dremio.dac.util.DatasetsUtil; import com.dremio.service.jobs.JobIndexKeys; @@ -31,11 +31,11 @@ import com.dremio.service.namespace.dataset.proto.DatasetConfig; import com.dremio.service.namespace.dataset.proto.DatasetType; import com.dremio.service.namespace.dataset.proto.VirtualDataset; +import com.dremio.service.users.User; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Function; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; /** * Dataset summary for overlay @@ -50,6 +50,16 @@ public class DatasetSummary { private final DatasetVersion datasetVersion; private final Map references; private final List tags; + private final String entityId; + private Boolean hasReflection; + private final String ownerName; + private final String ownerEmail; + private final String lastModifyingUserName; + private final String lastModifyingUserEmail; + @JsonISODateTime + private final Long createdAt; + @JsonISODateTime + private final Long lastModified; public DatasetSummary(@JsonProperty("fullPath") List fullPath, @JsonProperty("jobCount") int jobCount, @@ -58,7 +68,15 @@ public DatasetSummary(@JsonProperty("fullPath") List fullPath, @JsonProperty("datasetType") DatasetType datasetType, @JsonProperty("datasetVersion") DatasetVersion datasetVersion, @JsonProperty("tags") List tags, - @JsonProperty("references") Map references) { + @JsonProperty("references") Map references, + @JsonProperty("entityId") String entityId, + @JsonProperty("hasReflection") Boolean hasReflection, + @JsonProperty("ownerName") String ownerName, + @JsonProperty("ownerEmail") String ownerEmail, + @JsonProperty("lastModifyingUserName") String lastModifyingUserName, + @JsonProperty("lastModifyingUserEmail") String lastModifyingUserEmail, + @JsonProperty("createdAt") Long createdAt, + @JsonProperty("lastModified") Long lastModified) { this.fullPath = fullPath; this.jobCount = jobCount; this.descendants = descendants; @@ -67,9 +85,18 @@ public DatasetSummary(@JsonProperty("fullPath") List fullPath, this.datasetVersion = datasetVersion; this.tags = tags; this.references = references; + this.entityId = entityId; + this.hasReflection = hasReflection; + this.ownerName = ownerName; + this.ownerEmail = ownerEmail; + this.lastModifyingUserName = lastModifyingUserName; + this.lastModifyingUserEmail = lastModifyingUserEmail; + this.createdAt = createdAt; + this.lastModified = lastModified; } - public static DatasetSummary newInstance(DatasetConfig datasetConfig, int jobCount, int descendants, Map references, List tags) { + public static DatasetSummary newInstance(DatasetConfig datasetConfig, int jobCount, int descendants, Map references, List tags, + Boolean hasReflection, User owner, User lastModifyingUser) { List fullPath = datasetConfig.getFullPathList(); DatasetType datasetType = datasetConfig.getType(); @@ -99,7 +126,16 @@ public Field apply(com.dremio.dac.model.common.Field input) { datasetVersion = null; } - return new DatasetSummary(fullPath, jobCount, descendants, fields, datasetType, datasetVersion, tags, references); + final String entityId = datasetConfig.getId() == null ? null : datasetConfig.getId().getId(); + final String ownerName = owner != null ? owner.getUserName() : null; + final String ownerEmail = owner != null ? owner.getEmail() : null; + final String lastModifyingUserName = lastModifyingUser != null ? lastModifyingUser.getUserName() : null; + final String lastModifyingUserEmail = lastModifyingUser != null ? lastModifyingUser.getEmail() : null; + final Long createdAt = datasetConfig.getCreatedAt(); + final Long lastModified = datasetConfig.getLastModified(); + + return new DatasetSummary(fullPath, jobCount, descendants, fields, datasetType, datasetVersion, tags, references, + entityId, hasReflection, ownerName, ownerEmail, lastModifyingUserName, lastModifyingUserEmail, createdAt, lastModified); } public DatasetVersion getDatasetVersion() { @@ -134,6 +170,36 @@ public Map getReferences() { return references; } + public String getEntityId() { + return entityId; + } + + public Boolean getHasReflection() { return hasReflection; } + + public String getOwnerName() { + return ownerName; + } + + public String getOwnerEmail() { + return ownerEmail; + } + + public String getLastModifyingUserName() { + return lastModifyingUserName; + } + + public String getLastModifyingUserEmail() { + return lastModifyingUserEmail; + } + + public Long getCreatedAt() { + return createdAt; + } + + public Long getLastModified() { + return lastModified; + } + // links // TODO make this consistent with DatasetUI.createLinks. In ideal case, both methods should use the same util method public Map getLinks() { @@ -150,42 +216,6 @@ public Map getLinks() { return links; } - // api links - public Map getApiLinks() { - final Map links = Maps.newHashMap(); - final NamespaceKey datasetPath = new NamespaceKey(fullPath); - final String dottedFullPath = datasetPath.toUrlEncodedString(); - final String fullPathString = PathUtils.toFSPath(fullPath).toString(); - - links.put("jobs", this.getJobsUrl()); - switch (datasetType) { - case VIRTUAL_DATASET: - links.put("edit", "/dataset/" + dottedFullPath + "/version/" + datasetVersion + "/preview"); // edit dataset - final DatasetVersion datasetVersion = DatasetVersion.newVersion(); - links.put("run", "/datasets/new_untitled?parentDataset=" + dottedFullPath + "&newVersion=" - + (datasetVersion == null ? datasetVersion : encodeURIComponent(datasetVersion.toString()))); //create new dataset - break; - case PHYSICAL_DATASET_HOME_FILE: - links.put("run", "/home/" + fullPath.get(0) + "new_untitled_from_file" + fullPathString); - break; - case PHYSICAL_DATASET_HOME_FOLDER: - // Folder not supported yet - break; - case PHYSICAL_DATASET_SOURCE_FILE: - links.put("run", "/source/" + fullPath.get(0) + "new_untitled_from_file" + fullPathString); - break; - case PHYSICAL_DATASET_SOURCE_FOLDER: - links.put("run", "/source/" + fullPath.get(0) + "new_untitled_from_folder" + fullPathString); - break; - case PHYSICAL_DATASET: - links.put("run", "/source/" + fullPath.get(0) + "new_untitled_from_physical_dataset" + fullPathString); - break; - default: - break; - } - return links; - } - private String getJobsUrl() { final NamespaceKey datasetPath = new NamespaceKey(fullPath); final JobFilters jobFilters = new JobFilters() diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetUI.java b/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetUI.java index 1aedb88911..0ca17d7151 100644 --- a/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetUI.java +++ b/dac/backend/src/main/java/com/dremio/dac/explore/model/DatasetUI.java @@ -27,11 +27,13 @@ import com.dremio.dac.model.folder.SourceFolderPath; import com.dremio.dac.model.job.JobFilters; import com.dremio.dac.model.sources.PhysicalDatasetPath; +import com.dremio.dac.model.sources.VirtualDatasetPath; import com.dremio.dac.model.spaces.HomeName; import com.dremio.dac.proto.model.dataset.Derivation; import com.dremio.dac.proto.model.dataset.VirtualDatasetUI; import com.dremio.dac.util.DatasetUIUtils; import com.dremio.dac.util.DatasetsUtil; +import com.dremio.exec.catalog.VersionedDatasetId; import com.dremio.file.FilePath; import com.dremio.file.SourceFilePath; import com.dremio.service.jobs.JobIndexKeys; @@ -43,6 +45,7 @@ import com.dremio.service.namespace.dataset.proto.ParentDataset; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; @@ -52,7 +55,7 @@ public class DatasetUI { private final String id; - private String entityId; + private final String entityId; private final String sql; private final List context; // full path to use when making transforms on this dataset like transforms @@ -97,7 +100,7 @@ public static DatasetUI newInstance( datasetType = DatasetType.VIRTUAL_DATASET; } else { // if its tmp.UNTITLED we want to get the parent dataset path to display. The UI uses displayFullPath for history - // requests and therefore we need to be precise here. We manually check the path as this code would previously get + // requests, and therefore we need to be precise here. We manually check the path as this code would previously get // triggered for history dataset entries that derive from another dataset. if (isUnsaved && vds.getDerivation() == Derivation.DERIVED_VIRTUAL && parentsList.size() > 0 && Arrays.asList("tmp", "UNTITLED").equals(fullPath)) { @@ -118,17 +121,37 @@ public static DatasetUI newInstance( entityId = namespaceService.getEntityIdByPath(new NamespaceKey(displayFullPath)); } + final String datasetId = vds.getId(); Map versionContextReqMap = DatasetUIUtils.createVersionContextMap(vds.getReferencesList()); - // if it's versioned, vds'id will be the same as entityId - if(entityId == null && context!=null && context.size() >= 1 && versionContextReqMap.containsKey(context.get(0))) { - entityId = vds.getId(); + if (VersionedDatasetId.tryParse(datasetId) != null) { + Preconditions.checkArgument(entityId == null); + entityId = datasetId; } - return new DatasetUI(vds.getId(), sql, context, fullPath, displayFullPath, vds.getSavedTag(), vds.getVersion(), - null, null, canReapply, datasetType, - createLinks(fullPath, displayFullPath, vds.getVersion(), isUnsavedDirectPhysicalDataset), - createApiLinks(fullPath, displayFullPath, datasetType, vds.getVersion(), isUnsaved, isDerivedDirectly), - /* entityId */ entityId, versionContextReqMap); + + return new DatasetUI( + datasetId, + sql, + context, + fullPath, + displayFullPath, + vds.getSavedTag(), + vds.getVersion(), + null, + null, + canReapply, + datasetType, + createLinks( + fullPath, + displayFullPath, + vds.getVersion(), + isUnsavedDirectPhysicalDataset, + entityId, + datasetType), + createApiLinks( + fullPath, displayFullPath, datasetType, vds.getVersion(), isUnsaved, isDerivedDirectly), + entityId, + versionContextReqMap); } @JsonCreator @@ -258,15 +281,29 @@ public Map getApiLinks() { public String getEntityId() { return entityId; } // TODO make this consistent with DatasetSummary.getLinks. In ideal case, both methods should use the same util method - public static Map createLinks(List fullPath, List displayFullPath, DatasetVersion datasetVersion, boolean isUnsavedDirectPhysicalDataset) { + public static Map createLinks( + List fullPath, + List displayFullPath, + DatasetVersion datasetVersion, + boolean isUnsavedDirectPhysicalDataset, + String entityId, + DatasetType datasetType) { String dottedFullPath = PathUtils.constructFullPath(fullPath); String queryUrlPath; + + final boolean isVersionedDataset = VersionedDatasetId.tryParse(entityId) != null; if (isUnsavedDirectPhysicalDataset) { if (displayFullPath.get(0).startsWith(HomeName.HOME_PREFIX)) { queryUrlPath = new DatasetPath(displayFullPath).getQueryUrlPath(); } else { queryUrlPath = new PhysicalDatasetPath(displayFullPath).getQueryUrlPath(); } + } else if (isVersionedDataset) { + queryUrlPath = + ((datasetType == DatasetType.VIRTUAL_DATASET) + ? new VirtualDatasetPath(displayFullPath) + : new PhysicalDatasetPath(displayFullPath)) + .getQueryUrlPath(); } else { queryUrlPath = new DatasetPath(displayFullPath).getQueryUrlPath(); } @@ -279,7 +316,6 @@ public static Map createLinks(List fullPath, List suggestions; - - @JsonCreator - public SuggestionResponse(@JsonProperty("suggestions") List suggestions) { - this.suggestions = ImmutableList.copyOf(suggestions); - } - - /** - * Get list of suggestions. - */ - public ImmutableList getSuggestions() { - return suggestions; - } - - @Override - public String toString() { - return "SuggestionResponse{" + - "suggestions=" + suggestions + - '}'; - } - - /** - * Query Suggestion object to return in SQL Analyze response. - */ - public static class Suggestion { - - private final String name; - private final String type; - - @JsonCreator - public Suggestion( - @JsonProperty("name") String name, - @JsonProperty("type") String type) { - this.name = name; - this.type = type; - } - - /** - * Get Name - * - * @return the suggestion value. - */ - public String getName() { - return name; - } - - /** - * Get Type - * - * @return the suggestion type. - */ - public String getType() { - return type; - } - - @Override - public String toString() { - return "Suggestion{" + - "name='" + name + '\'' + - ", type='" + type + '\'' + - '}'; - } - } -} diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/model/ValidationResponse.java b/dac/backend/src/main/java/com/dremio/dac/explore/model/ValidationResponse.java deleted file mode 100644 index e40a072d3a..0000000000 --- a/dac/backend/src/main/java/com/dremio/dac/explore/model/ValidationResponse.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.dremio.dac.explore.model; - -import java.util.List; - -import com.dremio.dac.model.job.QueryError; -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.collect.ImmutableList; - - -/** - * SQL Analyze API Response object to return for SQL query validation. - * Wrapper for a list of QueryError objects. - */ -public class ValidationResponse { - - private ImmutableList sqlErrors; - - @JsonCreator - public ValidationResponse(@JsonProperty("errors") List sqlErrors) { - this.sqlErrors = ImmutableList.copyOf(sqlErrors); - } - - /** - * Get list of errors. - */ - public ImmutableList getErrors() { - return sqlErrors; - } - - @Override - public String toString() { - return "ValidationResponse{" + - "sqlErrors=" + sqlErrors + - '}'; - } -} diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/model/VersionContextReq.java b/dac/backend/src/main/java/com/dremio/dac/explore/model/VersionContextReq.java index ed92a5800f..54926ae977 100644 --- a/dac/backend/src/main/java/com/dremio/dac/explore/model/VersionContextReq.java +++ b/dac/backend/src/main/java/com/dremio/dac/explore/model/VersionContextReq.java @@ -17,6 +17,8 @@ import java.util.Locale; +import javax.annotation.Nullable; + import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Strings; @@ -48,11 +50,10 @@ public VersionContextReq( * Case-insensitive enum conversion. * Returns null on most failures. */ - public static VersionContextReq tryParse(String type, String value) { + public static @Nullable VersionContextReq tryParse(String type, String value) { if (Strings.isNullOrEmpty(type) || Strings.isNullOrEmpty(value)) { return null; } - return new VersionContextReq( VersionContextType.valueOf(type.toUpperCase(Locale.ROOT)), value); diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/model/VersionContextUtils.java b/dac/backend/src/main/java/com/dremio/dac/explore/model/VersionContextUtils.java index a05395462c..23ddbc97cb 100644 --- a/dac/backend/src/main/java/com/dremio/dac/explore/model/VersionContextUtils.java +++ b/dac/backend/src/main/java/com/dremio/dac/explore/model/VersionContextUtils.java @@ -21,7 +21,8 @@ public final class VersionContextUtils { private VersionContextUtils() { } - public static VersionContext map(VersionContextReq from) { + public static VersionContext parse(String refType, String refValue) { + VersionContextReq from = VersionContextReq.tryParse(refType, refValue); if (from == null) { return VersionContext.NOT_SPECIFIED; } diff --git a/dac/backend/src/main/java/com/dremio/dac/explore/udfs/ConvertCase.java b/dac/backend/src/main/java/com/dremio/dac/explore/udfs/ConvertCase.java index 7cdecd8c51..41a6b66ca0 100644 --- a/dac/backend/src/main/java/com/dremio/dac/explore/udfs/ConvertCase.java +++ b/dac/backend/src/main/java/com/dremio/dac/explore/udfs/ConvertCase.java @@ -48,8 +48,8 @@ public void setup() { @Override public void eval() { - out.buffer = buffer.reallocIfNeeded(input.end - input.start); - buffer = out.buffer; + buffer = buffer.reallocIfNeeded(input.end - input.start); + out.buffer = buffer; out.start = 0; out.end = input.end - input.start; diff --git a/dac/backend/src/main/java/com/dremio/dac/homefiles/HomeFileConf.java b/dac/backend/src/main/java/com/dremio/dac/homefiles/HomeFileConf.java index 242943d191..c283e76f82 100644 --- a/dac/backend/src/main/java/com/dremio/dac/homefiles/HomeFileConf.java +++ b/dac/backend/src/main/java/com/dremio/dac/homefiles/HomeFileConf.java @@ -197,38 +197,47 @@ public SchemaMutability getSchemaMutability() { return SchemaMutability.USER_VIEW; } + @Override public String getAccessKey() { return accessKey; } + @Override public String getSecretKey() { return secretKey; } + @Override public String getIamRole() { return iamRole; } + @Override public String getExternalId() { return externalId; } + @Override public String getTokenEndpoint() { return tokenEndpoint; } + @Override public String getClientId() { return clientId; } + @Override public String getClientSecret() { return clientSecret; } + @Override public String getAccountName() { return accountName; } + @Override public String getAccountKind() { return accountKind; } diff --git a/dac/backend/src/main/java/com/dremio/dac/homefiles/HomeFileTool.java b/dac/backend/src/main/java/com/dremio/dac/homefiles/HomeFileTool.java index 283a860207..12520b9329 100644 --- a/dac/backend/src/main/java/com/dremio/dac/homefiles/HomeFileTool.java +++ b/dac/backend/src/main/java/com/dremio/dac/homefiles/HomeFileTool.java @@ -34,7 +34,6 @@ import com.dremio.dac.model.spaces.HomeName; import com.dremio.dac.service.errors.SourceBadStateException; import com.dremio.exec.proto.UserBitShared; -import com.dremio.exec.server.SabotContext; import com.dremio.exec.store.CatalogService; import com.dremio.exec.store.StoragePlugin; import com.dremio.file.FilePath; @@ -46,6 +45,8 @@ import com.google.common.base.Preconditions; import com.google.common.base.Supplier; +import io.opentelemetry.instrumentation.annotations.WithSpan; + /** * Injectable tool for doing home file manipulation. */ @@ -57,10 +58,10 @@ public class HomeFileTool { private final HostNameProvider hostNameProvider; private final SecurityContext securityContext; - public interface HostNameProvider extends Supplier {}; + public interface HostNameProvider extends Supplier {} @Inject - public HomeFileTool(SabotContext context, CatalogService catalog, HostNameProvider hostnameProvider, @Context SecurityContext securityContext) throws ExecutionSetupException { + public HomeFileTool(CatalogService catalog, HostNameProvider hostnameProvider, @Context SecurityContext securityContext) throws ExecutionSetupException { StoragePlugin plugin; try { plugin = catalog.getSource(HomeFileSystemStoragePlugin.HOME_PLUGIN_NAME); @@ -101,11 +102,11 @@ public HomeFileTool(HomeFileConf config, FileSystem fs, String hostname, Securit */ @VisibleForTesting public Path getStagingLocation(FilePath filePath, String extension) { - FilePath uniquePath = filePath.rename(format("%s_%s-%s", filePath.getFileName().toString(), extension, UUID.randomUUID().toString())); + FilePath uniquePath = filePath.rename(format("%s_%s-%s", filePath.getFileName().toString(), extension, UUID.randomUUID())); return Path.mergePaths(config.getStagingPath(hostNameProvider.get()), PathUtils.toFSPath(uniquePath.toPathList())); } - public HomeFileConf getConf() { + public HomeFileConf getConfForBackup() { return config; } @@ -114,7 +115,7 @@ public HomeFileConf getConf() { * * @param parent parent directory * @param fileName file name - * @return + * @return Returns the file path. */ private Path filePath(Path parent, String fileName) throws IOException { return fs.canonicalizePath(parent.resolve(fileName)); @@ -130,8 +131,9 @@ private Path getUploadLocation(FilePath filePath, String extension) { * @param filePath file path in under home space * @param input input stream containing file's data * @return location where file is staged - * @throws IOException + * @throws IOException - An exception that might occur if the file system cannot be written to. */ + @WithSpan public Path stageFile(FilePath filePath, String extension, InputStream input) throws IOException { final Path stagingLocation = getStagingLocation(filePath, extension); fs.mkdirs(stagingLocation, HomeFileSystemStoragePlugin.DEFAULT_PERMISSIONS); @@ -142,6 +144,7 @@ public Path stageFile(FilePath filePath, String extension, InputStream input) th return fs.makeQualified(stagingLocation); } + @WithSpan public Path saveFile(String stagingLocation, FilePath filePath, FileType fileType) throws IOException { return saveFile(Path.of(stagingLocation), filePath, FileFormat.getExtension(fileType)); } @@ -151,8 +154,9 @@ public Path saveFile(String stagingLocation, FilePath filePath, FileType fileTyp * @param stagingLocation staging directory where file is uploaded * @param filePath file path in under home space * @return final location of file - * @throws IOException + * @throws IOException - An exception if the file system cannot be written to. */ + @VisibleForTesting public Path saveFile(Path stagingLocation, FilePath filePath, String extension) throws IOException { if (!validStagingLocation(stagingLocation)) { throw new IllegalArgumentException("Invalid staging location provided"); @@ -160,7 +164,6 @@ public Path saveFile(Path stagingLocation, FilePath filePath, String extension) final Path uploadLocation = getUploadLocation(filePath, extension); fs.mkdirs(uploadLocation.getParent()); - // rename staging dir to uploadPath fs.rename(stagingLocation, uploadLocation); return uploadLocation; } @@ -171,6 +174,7 @@ public Path saveFile(Path stagingLocation, FilePath filePath, String extension) * @param stagingLocation staging directory where file is uploaded * @return if the location is valid or not */ + @WithSpan public boolean validStagingLocation(Path stagingLocation) { final Path stagingPath = fs.makeQualified(stagingLocation); @@ -184,8 +188,9 @@ public boolean validStagingLocation(Path stagingLocation) { /** * Delete file uploaded by user - * @throws IOException + * @throws IOException - An exception if the file system cannot be written to. */ + @WithSpan public void deleteFile(String fileLocation) throws IOException { if (fileLocation != null) { fs.delete(Path.of(fileLocation), true); @@ -198,10 +203,11 @@ public boolean fileExists(String fileLocation) throws IOException { /** * Delete the contents in given user home. - * @param userHome + * @param userHome - The location of a user's home space. * @return Whether successful or not. - * @throws IOException + * @throws IOException - An exception if the file system cannot be written to. */ + @WithSpan public boolean deleteHomeAndContents(String userHome) throws IOException { final Path homePath = config.getInnerUploads().resolve(userHome); if (fs.exists(homePath)) { diff --git a/dac/backend/src/main/java/com/dremio/dac/model/folder/Folder.java b/dac/backend/src/main/java/com/dremio/dac/model/folder/Folder.java index 62b9980ad1..2f6ff8e065 100644 --- a/dac/backend/src/main/java/com/dremio/dac/model/folder/Folder.java +++ b/dac/backend/src/main/java/com/dremio/dac/model/folder/Folder.java @@ -22,8 +22,10 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.UUID; import java.util.regex.Matcher; import java.util.regex.Pattern; +import java.util.stream.Collectors; import java.util.stream.Stream; import com.dremio.dac.model.common.NamespacePath; @@ -167,8 +169,6 @@ public Map getLinks() { links.put("file_format", folderPath.toUrlPathWithAction("folder_format")); links.put("file_prefix", folderPath.toUrlPathWithAction("file")); } - // renames not allowed on source folders - links.put("rename", folderPath.toUrlPathWithAction("rename_folder")); } // add jobs if not already added. if (!links.containsKey("jobs")) { @@ -214,6 +214,28 @@ static NamespacePath parseUrlPath(String urlPath) { throw new IllegalArgumentException("Not a valid filePath: " + urlPath); } + public static Folder newInstance(RootEntity rootEntity, FolderConfig folderConfig, String id) { + return new Folder( + (id == null) ? UUID.randomUUID().toString() : id, + folderConfig.getName(), + new FolderPath( + rootEntity, + folderConfig.getFullPathList().subList(1, folderConfig.getFullPathList().size() - 1) + .stream() + .map(FolderName::new) + .collect(Collectors.toList()), + new FolderName(folderConfig.getName())).toUrlPath(), + false, + false, + false, + null, + null, + null, + null, + null, + 0); + } + public static Folder newInstance(FolderPath folderPath, FolderConfig folderConfig, NamespaceTree contents, boolean isQueryable, boolean isFileSystemFolder) { return newInstance(folderPath, folderConfig, null, contents, isQueryable, isFileSystemFolder, null, 0); } diff --git a/dac/backend/src/main/java/com/dremio/dac/model/job/JobDetailsUI.java b/dac/backend/src/main/java/com/dremio/dac/model/job/JobDetailsUI.java index 98d0bc2c5c..010e11686b 100644 --- a/dac/backend/src/main/java/com/dremio/dac/model/job/JobDetailsUI.java +++ b/dac/backend/src/main/java/com/dremio/dac/model/job/JobDetailsUI.java @@ -47,6 +47,7 @@ import com.dremio.service.job.proto.TableDatasetProfile; import com.dremio.service.job.proto.TopOperation; import com.dremio.service.jobs.JobsProtoUtil; +import com.dremio.service.jobs.JobsServiceUtil; import com.dremio.service.namespace.dataset.proto.DatasetType; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; @@ -318,7 +319,7 @@ public JobDetailsUI( failureInfo, cancellationInfo, attempts.get(0).getInfo().getSql(), - attempts.get(0).getInfo().getDescription(), + JobsServiceUtil.getJobDescription(attempts.get(0).getInfo().getRequestType(), attempts.get(0).getInfo().getSql(), attempts.get(0).getInfo().getDescription()), Util.last(attempts).getStats(), DatasetType.VIRTUAL_DATASET, // TODO: return correct result. This is closest since only the ui submits queries and they are using virtual datasets... datasetVersion, diff --git a/dac/backend/src/main/java/com/dremio/dac/model/job/JobInfoDetailsUI.java b/dac/backend/src/main/java/com/dremio/dac/model/job/JobInfoDetailsUI.java index 68ae229f4e..7a28c8e872 100644 --- a/dac/backend/src/main/java/com/dremio/dac/model/job/JobInfoDetailsUI.java +++ b/dac/backend/src/main/java/com/dremio/dac/model/job/JobInfoDetailsUI.java @@ -43,7 +43,7 @@ import java.util.stream.Collectors; import org.apache.calcite.util.Util; -import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; import com.dremio.dac.api.CatalogEntity; @@ -72,6 +72,7 @@ import com.dremio.service.job.proto.ReflectionType; import com.dremio.service.job.proto.ScannedDataset; import com.dremio.service.jobs.JobsProtoUtil; +import com.dremio.service.jobs.JobsServiceUtil; import com.dremio.service.namespace.NamespaceException; import com.dremio.service.namespace.NamespaceKey; import com.dremio.service.namespace.NamespaceService; @@ -110,6 +111,8 @@ public class JobInfoDetailsUI { private Long inputRecords; private Long outputBytes; private Long outputRecords; + private Long addedFiles; + private Long removedFiles; private Long duration; private List durationDetails; private int nrReflectionsConsidered; @@ -169,6 +172,8 @@ public JobInfoDetailsUI( @JsonProperty("inputRecords") Long inputRecords, @JsonProperty("outputBytes") Long outputBytes, @JsonProperty("outputRecords") Long outputRecords, + @JsonProperty("addedFiles") Long addedFiles, + @JsonProperty("removedFiles") Long removedFiles, @JsonProperty("duration") Long duration, @JsonProperty("durationDetails") List durationDetails, @JsonProperty("nrReflectionsConsidered") int nrReflectionsConsidered, @@ -220,6 +225,8 @@ public JobInfoDetailsUI( this.inputRecords = inputRecords; this.outputBytes = outputBytes; this.outputRecords = outputRecords; + this.addedFiles = addedFiles; + this.removedFiles = removedFiles; this.duration = duration; this.durationDetails = durationDetails; this.nrReflectionsConsidered = nrReflectionsConsidered; @@ -280,10 +287,12 @@ public JobInfoDetailsUI of(JobDetails jobDetails, UserBitShared.QueryProfile pro inputRecords = jobAttempt.getStats().getInputRecords(); outputBytes = jobAttempt.getStats().getOutputBytes(); outputRecords = jobAttempt.getStats().getOutputRecords(); + addedFiles = jobAttempt.getStats().getAddedFiles(); + removedFiles = jobAttempt.getStats().getRemovedFiles(); duration = JobUtil.getTotalDuration(jobDetails, attemptIndex); durationDetails = JobUtil.buildDurationDetails(jobAttempt.getStateListList()); requestType = RequestType.valueOf(jobInfo.getRequestType().toString()); - description = jobInfo.getDescription(); + description = JobsServiceUtil.getJobDescription(RequestType.valueOf(jobInfo.getRequestType().toString()), jobInfo.getSql(), jobInfo.getDescription()); attemptDetails = AttemptsUIHelper.fromAttempts(jobId, attempts); attemptsSummary = AttemptsUIHelper.constructSummary(attempts); datasetPaths = jobInfo.getDatasetPathList(); @@ -341,6 +350,8 @@ public JobInfoDetailsUI of(JobDetails jobDetails, UserBitShared.QueryProfile pro inputRecords, outputBytes, outputRecords, + addedFiles, + removedFiles, duration, durationDetails, nrReflectionsConsidered, @@ -530,6 +541,14 @@ public Long getOutputRecords() { return outputRecords; } + public Long getAddedFiles() { + return addedFiles; + } + + public Long getRemovedFiles() { + return removedFiles; + } + public boolean isStarFlakeAccelerated() { return isStarFlakeAccelerated; } diff --git a/dac/backend/src/main/java/com/dremio/dac/model/job/JobProfileVisualizerUI.java b/dac/backend/src/main/java/com/dremio/dac/model/job/JobProfileVisualizerUI.java index 02398100f3..62e4540df1 100644 --- a/dac/backend/src/main/java/com/dremio/dac/model/job/JobProfileVisualizerUI.java +++ b/dac/backend/src/main/java/com/dremio/dac/model/job/JobProfileVisualizerUI.java @@ -320,12 +320,14 @@ private void buildPhaseData(UserBitShared.MajorFragmentProfile major) { } Comparator minorIdComparator = new Comparator() { + @Override public int compare(final UserBitShared.MinorFragmentProfile o1, final UserBitShared.MinorFragmentProfile o2) { return Long.compare(o1.getMinorFragmentId(), o2.getMinorFragmentId()); } }; Comparator operatorIdComparator = new Comparator() { + @Override public int compare(final UserBitShared.OperatorProfile o1, final UserBitShared.OperatorProfile o2) { return Long.compare(o1.getOperatorId(), o2.getOperatorId()); } diff --git a/dac/backend/src/main/java/com/dremio/dac/model/job/JobSummaryUI.java b/dac/backend/src/main/java/com/dremio/dac/model/job/JobSummaryUI.java index de716f5a27..5fc234ced9 100644 --- a/dac/backend/src/main/java/com/dremio/dac/model/job/JobSummaryUI.java +++ b/dac/backend/src/main/java/com/dremio/dac/model/job/JobSummaryUI.java @@ -25,6 +25,7 @@ import com.dremio.service.job.proto.JobState; import com.dremio.service.job.proto.ParentDatasetInfo; import com.dremio.service.jobs.JobsProtoUtil; +import com.dremio.service.jobs.JobsServiceUtil; import com.dremio.service.namespace.NamespaceService; import com.dremio.service.namespace.dataset.proto.DatasetType; import com.fasterxml.jackson.annotation.JsonCreator; @@ -101,6 +102,7 @@ public JobSummaryUI( } public static JobSummaryUI of(com.dremio.service.job.JobSummary input, NamespaceService service) { + String desc = JobsServiceUtil.getJobDescription(input.getRequestType(), input.getSql(), input.getDescription()); final ParentDatasetInfo datasetInfo = JobsUI.getDatasetToDisplay(input, service); return new JobSummaryUI( input.getJobId().getId(), @@ -113,7 +115,7 @@ public static JobSummaryUI of(com.dremio.service.job.JobSummary input, Namespace input.getUser(), input.getStartTime() == 0 ? null : input.getStartTime(), input.getEndTime() == 0 ? null : input.getEndTime(), - Strings.isNullOrEmpty(input.getDescription()) ? null : obfuscateSql(input.getDescription()), + Strings.isNullOrEmpty(desc) ? null : obfuscateSql(desc), JobsProtoUtil.toStuff(input.getRequestType()), input.getAccelerated(), input.getDatasetVersion(), diff --git a/dac/backend/src/main/java/com/dremio/dac/model/job/JobUI.java b/dac/backend/src/main/java/com/dremio/dac/model/job/JobUI.java index 256518cb70..9cf949e866 100644 --- a/dac/backend/src/main/java/com/dremio/dac/model/job/JobUI.java +++ b/dac/backend/src/main/java/com/dremio/dac/model/job/JobUI.java @@ -35,6 +35,7 @@ import com.dremio.service.jobs.JobNotFoundException; import com.dremio.service.jobs.JobsProtoUtil; import com.dremio.service.jobs.JobsService; +import com.dremio.service.jobs.JobsServiceUtil; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; @@ -149,7 +150,7 @@ private static JobInfoUI convertJobInfo(JobInfo info) { .setAcceleration(info.getAcceleration()) .setGrandParentsList(info.getGrandParentsList()) .setDownloadInfo(info.getDownloadInfo()) - .setDescription(info.getDescription()) + .setDescription(JobsServiceUtil.getJobDescription(info.getRequestType(), info.getSql(), info.getDescription())) .setMaterializationFor(info.getMaterializationFor()) .setOriginalCost(info.getOriginalCost()) .setPartitionsList(info.getPartitionsList()) diff --git a/dac/backend/src/main/java/com/dremio/dac/model/job/PartialJobListItem.java b/dac/backend/src/main/java/com/dremio/dac/model/job/PartialJobListItem.java index f4e4dfd83a..866dc78f6d 100644 --- a/dac/backend/src/main/java/com/dremio/dac/model/job/PartialJobListItem.java +++ b/dac/backend/src/main/java/com/dremio/dac/model/job/PartialJobListItem.java @@ -29,6 +29,7 @@ import com.dremio.service.job.proto.JobStats; import com.dremio.service.jobs.Job; import com.dremio.service.jobs.JobsProtoUtil; +import com.dremio.service.jobs.JobsServiceUtil; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; @@ -105,7 +106,7 @@ public PartialJobListItem(Job input) { this.user = firstAttempt.getInfo().getUser(); this.startTime = firstAttempt.getInfo().getStartTime(); this.endTime = lastAttempt.getInfo().getFinishTime(); - this.description = firstAttempt.getInfo().getDescription(); + this.description = JobsServiceUtil.getJobDescription(lastAttempt.getInfo().getRequestType(), lastAttempt.getInfo().getSql(), lastAttempt.getInfo().getDescription()); this.accelerated = lastAttempt.getInfo().getAcceleration() != null; this.requestType = firstAttempt.getInfo().getRequestType(); this.datasetVersion = firstAttempt.getInfo().getDatasetVersion(); @@ -129,7 +130,7 @@ public PartialJobListItem(JobSummary input) { this.user = input.getUser(); this.startTime = input.getStartTime() == 0 ? null : input.getStartTime(); this.endTime = input.getEndTime() == 0 ? null : input.getEndTime(); - this.description = Strings.isNullOrEmpty(input.getDescription()) ? null : input.getDescription(); + this.description = JobsServiceUtil.getJobDescription(input.getRequestType(), input.getSql(), input.getDescription()); this.accelerated = input.getAccelerated(); this.requestType = JobsProtoUtil.toStuff(input.getRequestType()); this.datasetVersion = input.getDatasetVersion(); diff --git a/dac/backend/src/main/java/com/dremio/dac/model/job/PartialJobListingItem.java b/dac/backend/src/main/java/com/dremio/dac/model/job/PartialJobListingItem.java index 5a36472054..04bc6c95ab 100644 --- a/dac/backend/src/main/java/com/dremio/dac/model/job/PartialJobListingItem.java +++ b/dac/backend/src/main/java/com/dremio/dac/model/job/PartialJobListingItem.java @@ -26,6 +26,7 @@ import com.dremio.service.job.proto.JobState; import com.dremio.service.job.proto.QueryType; import com.dremio.service.jobs.JobsProtoUtil; +import com.dremio.service.jobs.JobsServiceUtil; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; @@ -158,7 +159,7 @@ public PartialJobListingItem(JobSummary input) { input.getOutputRecords() + " Records"; this.spilled = input.getSpilled(); this.isStarFlakeAccelerated = input.getSnowflakeAccelerated(); - this.description = input.getDescription(); + this.description = JobsServiceUtil.getJobDescription(input.getRequestType(), input.getSql(), input.getDescription()); this.requestType = input.getRequestType(); this.datasetVersion = input.getDatasetVersion(); this.outputLimited = input.getOutputLimited(); diff --git a/dac/backend/src/main/java/com/dremio/dac/model/namespace/ExternalNamespaceTreeUtils.java b/dac/backend/src/main/java/com/dremio/dac/model/namespace/ExternalNamespaceTreeUtils.java new file mode 100644 index 0000000000..0bbcef32b1 --- /dev/null +++ b/dac/backend/src/main/java/com/dremio/dac/model/namespace/ExternalNamespaceTreeUtils.java @@ -0,0 +1,89 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.model.namespace; + +import java.util.List; +import java.util.Objects; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import com.dremio.dac.explore.model.Dataset; +import com.dremio.dac.model.folder.Folder; +import com.dremio.dac.model.sources.PhysicalDataset; +import com.dremio.dac.model.sources.SourceName; +import com.dremio.exec.catalog.TableVersionContext; +import com.dremio.exec.catalog.VersionedDatasetId; +import com.dremio.plugins.ExternalNamespaceEntry; +import com.dremio.service.namespace.space.proto.FolderConfig; + +/** + * Helpers for making NamespaceTrees from external catalogs (e.g. Nessie) + */ +public final class ExternalNamespaceTreeUtils { + private ExternalNamespaceTreeUtils() {} + + public static NamespaceTree namespaceTreeOf( + SourceName sourceName, List entries) { + Objects.requireNonNull(sourceName); + + final NamespaceTree namespaceTree = new NamespaceTree(); + entries.forEach( + entry -> { + final String id = entry.getId(); + final String name = entry.getName(); + final List namespace = entry.getNamespace(); + final List fullPathList = + Stream.of(Stream.of(sourceName.getName()), entry.getNameElements().stream()) + .flatMap(Function.identity()) + .collect(Collectors.toList()); + final TableVersionContext tableVersionContext = entry.getTableVersionContext(); + final String versionedDatasetId = + (id == null || tableVersionContext == null) + ? null + : VersionedDatasetId.newBuilder() + .setTableKey(fullPathList) + .setContentId(id) + .setTableVersionContext(tableVersionContext) + .build() + .asString(); + + switch (entry.getType()) { + case UNKNOWN: + break; // Unknown types are ignored + case FOLDER: + namespaceTree.addFolder( + Folder.newInstance( + sourceName, + new FolderConfig().setFullPathList(fullPathList).setName(name), + versionedDatasetId)); + break; + case ICEBERG_TABLE: + namespaceTree.addPhysicalDataset( + PhysicalDataset.newInstance(sourceName, namespace, name, versionedDatasetId)); + break; + case ICEBERG_VIEW: + namespaceTree.addDataset( + Dataset.newInstance(sourceName, namespace, name, versionedDatasetId)); + break; + default: + throw new IllegalStateException("Unexpected value: " + entry.getType()); + } + }); + + return namespaceTree; + } +} diff --git a/dac/backend/src/main/java/com/dremio/dac/model/namespace/NamespaceTree.java b/dac/backend/src/main/java/com/dremio/dac/model/namespace/NamespaceTree.java index 7743972034..ad3861c4b4 100644 --- a/dac/backend/src/main/java/com/dremio/dac/model/namespace/NamespaceTree.java +++ b/dac/backend/src/main/java/com/dremio/dac/model/namespace/NamespaceTree.java @@ -15,7 +15,6 @@ */ package com.dremio.dac.model.namespace; -import static com.dremio.service.namespace.proto.NameSpaceContainer.Type.FUNCTION; import static com.dremio.service.namespace.proto.NameSpaceContainer.Type.SOURCE; import java.util.ArrayList; @@ -49,6 +48,7 @@ import com.dremio.file.File; import com.dremio.file.FilePath; import com.dremio.file.SourceFilePath; +import com.dremio.options.OptionManager; import com.dremio.service.namespace.NamespaceException; import com.dremio.service.namespace.NamespaceNotFoundException; import com.dremio.service.namespace.NamespaceUtils; @@ -96,21 +96,22 @@ public static NamespaceTree newInstance( Type rootEntityType, CollaborationHelper collaborationService) throws NamespaceException, DatasetNotFoundException { - return newInstance(datasetService, children, rootEntityType, collaborationService, null, null); + return newInstance(datasetService, children, rootEntityType, collaborationService, null, null, null); } public static NamespaceTree newInstance( - final DatasetVersionMutator datasetService, - List children, - Type rootEntityType, - CollaborationHelper collaborationService, - Boolean fileSystemSource, - Boolean isImpersonationEnabled) throws NamespaceException, DatasetNotFoundException { + final DatasetVersionMutator datasetService, + List children, + Type rootEntityType, + CollaborationHelper collaborationService, + Boolean fileSystemSource, + Boolean isImpersonationEnabled, + OptionManager optionManager) throws NamespaceException, DatasetNotFoundException { NamespaceTree result = new NamespaceTree(); result.setIsFileSystemSource(fileSystemSource); result.setIsImpersonationEnabled(isImpersonationEnabled); - populateInstance(result, datasetService, children, rootEntityType, collaborationService); + populateInstance(result, datasetService, children, rootEntityType, collaborationService, optionManager); return result; } @@ -120,14 +121,14 @@ protected static void populateInstance( DatasetVersionMutator datasetService, List children, Type rootEntityType, - CollaborationHelper collaborationService) + CollaborationHelper collaborationService, OptionManager optionManager) throws NamespaceException, DatasetNotFoundException { // get a list of all ids so we can fetch all collaboration tags in one search final Map tags = new HashMap<>(); if (collaborationService != null) { TagsSearchResult tagsInfo = collaborationService.getTagsForIds(children.stream(). - map(NamespaceUtils::getId).collect(Collectors.toSet())); + map(NamespaceUtils::getIdOrNull).collect(Collectors.toSet())); tags.putAll(tagsInfo.getTags()); tree.setCanTagsBeSkipped(tagsInfo.getCanTagsBeSkipped()); @@ -157,7 +158,7 @@ protected static void populateInstance( datasetPath.getDataset(), vds.getSql(), vds, - datasetService.getJobsCount(datasetPath.toNamespaceKey()), + datasetService.getJobsCount(datasetPath.toNamespaceKey(), optionManager), rootEntityType, tags.get(datasetConfig.getId().getId()) ); @@ -170,7 +171,7 @@ protected static void populateInstance( fileDSId, new FilePath(container.getFullPathList()), fileFormat, - datasetService.getJobsCount(datasetPath.toNamespaceKey()), false, true, + datasetService.getJobsCount(datasetPath.toNamespaceKey(), optionManager), false, true, fileFormat.getFileType() != FileType.UNKNOWN, datasetConfig.getType(), tags.get(fileDSId) ); @@ -183,7 +184,7 @@ protected static void populateInstance( sourceFileDSId, new SourceFilePath(container.getFullPathList()), sourceFileFormat, - datasetService.getJobsCount(datasetPath.toNamespaceKey()), false, false, + datasetService.getJobsCount(datasetPath.toNamespaceKey(), optionManager), false, false, sourceFileFormat.getFileType() != FileType.UNKNOWN, datasetConfig.getType(), tags.get(sourceFileDSId) ); @@ -207,7 +208,7 @@ protected static void populateInstance( new PhysicalDatasetResourcePath(new SourceName(container.getFullPathList().get(0)), path), new PhysicalDatasetName(path.getFileName().getName()), DatasetsUtil.toPhysicalDatasetConfig(container.getDataset()), - datasetService.getJobsCount(datasetPath.toNamespaceKey()), + datasetService.getJobsCount(datasetPath.toNamespaceKey(), optionManager), tags.get(container.getDataset().getId().getId()) ); break; @@ -333,4 +334,7 @@ public void setIsImpersonationEnabled(final Boolean isImpersonationEnabled) { this.isImpersonationEnabled = isImpersonationEnabled; } + public long totalCount() { + return getFolders().size() + getDatasets().size() + getFiles().size() + getPhysicalDatasets().size(); + } } diff --git a/dac/backend/src/main/java/com/dremio/dac/model/sources/FormatTools.java b/dac/backend/src/main/java/com/dremio/dac/model/sources/FormatTools.java index 51f99da858..abec3895a9 100644 --- a/dac/backend/src/main/java/com/dremio/dac/model/sources/FormatTools.java +++ b/dac/backend/src/main/java/com/dremio/dac/model/sources/FormatTools.java @@ -79,6 +79,9 @@ import com.google.common.base.Throwables; import com.google.common.collect.Iterators; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.instrumentation.annotations.WithSpan; + /** * A resource focused on guessing, previewing and applying formats to files and folders. */ @@ -137,7 +140,10 @@ public FileFormat getOrDetectFormat(NamespacePath folderPath, DatasetType expect // determine whether folder or file. final boolean isFolder; - switch(physicalDatasetConfig.getType()) { + final DatasetType datasetType = physicalDatasetConfig.getType(); + Span.current().setAttribute("formattools.getOrDetectFormat.datasetType", datasetType.name()); + + switch(datasetType) { case PHYSICAL_DATASET_HOME_FILE: case PHYSICAL_DATASET_SOURCE_FILE: isFolder = false; @@ -157,7 +163,7 @@ public FileFormat getOrDetectFormat(NamespacePath folderPath, DatasetType expect fileFormat.setVersion(physicalDatasetConfig.getTag()); return fileFormat; } catch (PhysicalDatasetNotFoundException nfe) { - // ignore and fall through to detect the format so we don't have extra nested blocks. + // ignore and fall through to detect the format, so we don't have extra nested blocks. } final NamespaceKey key = folderPath.toNamespaceKey(); @@ -187,7 +193,7 @@ private FileFormat detectFileFormat(NamespaceKey key) { } } } catch(IOException ex) { - // we could return unknown but if there no files, what's the point. + // we could return unknown but if there are no files, what's the point. throw UserException.ioExceptionError(ex) .message("No files detected or unable to read file format with selected option.") .build(logger); @@ -205,7 +211,7 @@ private FileFormat detectFileFormat(NamespaceKey key) { return asFormat(key, path,false, nullableFileFormat.get()); } } catch(IOException ex) { - // we could return unknown but if there no files, what's the point. + // we could return unknown but if there are no files, what's the point. throw UserException.ioExceptionError(ex) .message("No files detected or unable to read file format with selected option.") .build(logger); @@ -278,6 +284,7 @@ private static FileFormat asLayerFormat(NamespaceKey key, FileFormat fileFormat) return FileFormat.getForFolder(config); } + @WithSpan public JobDataFragment previewData(FileFormat format, NamespacePath namespacePath, boolean useFormatLocation) { final NamespaceKey key = namespacePath.toNamespaceKey(); final FileSystemPlugin plugin = getPlugin(key); @@ -294,7 +301,7 @@ public JobDataFragment previewData(FileFormat format, NamespacePath namespacePat try { attributes = fs.getFileAttributes(path); } catch(IOException ex) { - // we could return unknown but if there no files, what's the point. + // we could return unknown but if there are no files, what's the point. throw new IllegalStateException("No files detected or unable to read data.", ex); } @@ -426,7 +433,7 @@ private JobDataFragment getData(FormatPlugin formatPlugin, FileSystem filesystem } - private final FileSystemPlugin getPlugin(NamespaceKey key) { + private FileSystemPlugin getPlugin(NamespaceKey key) { StoragePlugin plugin = catalogService.getSource(key.getRoot()); if(plugin instanceof FileSystemPlugin) { return (FileSystemPlugin) plugin; diff --git a/dac/backend/src/main/java/com/dremio/dac/model/sources/PhysicalDataset.java b/dac/backend/src/main/java/com/dremio/dac/model/sources/PhysicalDataset.java index 297853f3c3..e637ef00d8 100644 --- a/dac/backend/src/main/java/com/dremio/dac/model/sources/PhysicalDataset.java +++ b/dac/backend/src/main/java/com/dremio/dac/model/sources/PhysicalDataset.java @@ -18,9 +18,13 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.UUID; +import java.util.stream.Collectors; +import java.util.stream.Stream; import com.dremio.dac.model.common.AddressableResource; import com.dremio.dac.model.common.ResourcePath; +import com.dremio.dac.model.common.RootEntity; import com.dremio.dac.model.job.JobFilters; import com.dremio.service.jobs.JobIndexKeys; import com.dremio.service.namespace.physicaldataset.proto.PhysicalDatasetConfig; @@ -85,6 +89,30 @@ public Map getLinks() { return links; } + public static PhysicalDataset newInstance(RootEntity rootEntity, + List folderNamespace, + String folderName, + String id) { + List fullPathList = Stream.of( + Stream.of(rootEntity.getName()), + folderNamespace.stream(), + Stream.of(folderName)) + .reduce(Stream::concat) + .orElseThrow(IllegalStateException::new) + .collect(Collectors.toList()); + + final PhysicalDatasetPath path = new PhysicalDatasetPath(fullPathList); + + return new PhysicalDataset( + new PhysicalDatasetResourcePath(new SourceName(rootEntity.getName()), path), + new PhysicalDatasetName(path.getFileName().getName()), + new PhysicalDatasetConfig() + .setId((id == null) ? UUID.randomUUID().toString() : id) + .setFullPathList(fullPathList), + null, + null); + } + public List getTags() { return tags; } diff --git a/dac/backend/src/main/java/com/dremio/dac/model/sources/VirtualDatasetPath.java b/dac/backend/src/main/java/com/dremio/dac/model/sources/VirtualDatasetPath.java new file mode 100644 index 0000000000..8942e86186 --- /dev/null +++ b/dac/backend/src/main/java/com/dremio/dac/model/sources/VirtualDatasetPath.java @@ -0,0 +1,67 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.model.sources; + +import java.util.List; + +import com.dremio.dac.model.common.LeafEntity; +import com.dremio.dac.model.common.NamespacePath; +import com.dremio.dac.model.common.RootEntity; +import com.dremio.dac.model.folder.FolderName; +import com.dremio.file.FileName; + +public class VirtualDatasetPath extends NamespacePath { + public VirtualDatasetPath(SourceName source, List folderPath, FileName fileName) { + super(source, folderPath, fileName); + } + + public VirtualDatasetPath(List path) { + super(path); + } + + @Override + public RootEntity getRoot(String name) throws IllegalArgumentException { + return new SourceName(name); + } + + @Override + public LeafEntity getLeaf(String name) throws IllegalArgumentException { + return new FileName(name); + } + + @Override + public int getMinimumComponents() { + return 2; + } + + @Override + public SourceName getRoot() { + return (SourceName) super.getRoot(); + } + + public FileName getFileName() { + return new FileName(getLeaf().getName()); + } + + public SourceName getSourceName() { + return new SourceName(getRoot().getName()); + } + + @Override + protected String getDefaultUrlPathType() { + return "dataset"; + } +} diff --git a/dac/backend/src/main/java/com/dremio/dac/model/userpreferences/PreferenceData.java b/dac/backend/src/main/java/com/dremio/dac/model/userpreferences/PreferenceData.java index 0a86b3bd12..1e91ab413b 100644 --- a/dac/backend/src/main/java/com/dremio/dac/model/userpreferences/PreferenceData.java +++ b/dac/backend/src/main/java/com/dremio/dac/model/userpreferences/PreferenceData.java @@ -23,11 +23,11 @@ /** * Class PreferenceData */ -public class PreferenceData { +public final class PreferenceData { private final UserPreferenceProto.PreferenceType preferenceType; private final List entities; - public PreferenceData(UserPreferenceProto.PreferenceType preferenceType, List entities) { + public PreferenceData(final UserPreferenceProto.PreferenceType preferenceType, final List entities) { this.preferenceType = preferenceType; this.entities = entities; } diff --git a/dac/backend/src/main/java/com/dremio/dac/resource/BackupResource.java b/dac/backend/src/main/java/com/dremio/dac/resource/BackupResource.java index 3798318045..5c5a3ff414 100644 --- a/dac/backend/src/main/java/com/dremio/dac/resource/BackupResource.java +++ b/dac/backend/src/main/java/com/dremio/dac/resource/BackupResource.java @@ -73,7 +73,7 @@ public BackupStats createBackup(BackupOptions options) throws IOException, Names final FileSystem fs = HadoopFileSystem.get(backupDirPath, new Configuration()); // Checking if directory already exists and that the daemon can access it BackupRestoreUtil.checkOrCreateDirectory(fs, backupDirPath); - return BackupRestoreUtil.createBackup(fs, options, kvStoreProvider, fileStore.get().getConf(), null); + return BackupRestoreUtil.createBackup(fs, options, kvStoreProvider, fileStore.get().getConfForBackup(), null); } @@ -99,7 +99,7 @@ public BackupStats backupUploads( final com.dremio.io.file.Path backupRootDirPath = backupDestinationDir.getParent(); final FileSystem fs = HadoopFileSystem.get(backupRootDirPath, new Configuration()); final BackupStats backupStats = new BackupStats(options.getBackupDestinationDirectory(), 0, 0); - BackupRestoreUtil.backupUploadedFiles(fs, backupDestinationDir, fileStore.get().getConf(), backupStats); + BackupRestoreUtil.backupUploadedFiles(fs, backupDestinationDir, fileStore.get().getConfForBackup(), backupStats); return backupStats; } diff --git a/dac/backend/src/main/java/com/dremio/dac/resource/FolderResource.java b/dac/backend/src/main/java/com/dremio/dac/resource/FolderResource.java index 1ac8d5a3ee..8524dd06eb 100644 --- a/dac/backend/src/main/java/com/dremio/dac/resource/FolderResource.java +++ b/dac/backend/src/main/java/com/dremio/dac/resource/FolderResource.java @@ -16,7 +16,6 @@ package com.dremio.dac.resource; import static com.dremio.service.namespace.proto.NameSpaceContainer.Type.SPACE; -import static javax.ws.rs.core.MediaType.APPLICATION_JSON; import java.util.Arrays; import java.util.ConcurrentModificationException; @@ -35,10 +34,10 @@ import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; -import com.dremio.common.exceptions.UserException; import com.dremio.common.utils.PathUtils; import com.dremio.dac.annotations.RestResource; import com.dremio.dac.annotations.Secured; +import com.dremio.dac.model.common.ResourcePath; import com.dremio.dac.model.folder.Folder; import com.dremio.dac.model.folder.FolderName; import com.dremio.dac.model.folder.FolderPath; @@ -50,7 +49,10 @@ import com.dremio.dac.service.errors.ClientErrorException; import com.dremio.dac.service.errors.DatasetNotFoundException; import com.dremio.dac.service.errors.FolderNotFoundException; +import com.dremio.dac.service.errors.NotSupportedException; import com.dremio.dac.util.ResourceUtil; +import com.dremio.exec.catalog.CatalogFeatures; +import com.dremio.options.OptionManager; import com.dremio.service.namespace.NamespaceException; import com.dremio.service.namespace.NamespaceNotFoundException; import com.dremio.service.namespace.NamespaceService; @@ -65,29 +67,32 @@ @RolesAllowed({"admin", "user"}) @Path("/space/{space}") public class FolderResource { - static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(FolderResource.class); - private final DatasetVersionMutator datasetService; private final NamespaceService namespaceService; private final CollaborationHelper collaborationHelper; private final SpaceName spaceName; + private final OptionManager optionManager; @Inject public FolderResource( DatasetVersionMutator datasetService, NamespaceService namespaceService, CollaborationHelper collaborationHelper, - @PathParam("space") SpaceName spaceName) { + @PathParam("space") SpaceName spaceName, + OptionManager optionManager) { this.datasetService = datasetService; this.namespaceService = namespaceService; this.collaborationHelper = collaborationHelper; this.spaceName = spaceName; + this.optionManager = optionManager; } @GET @Path("/folder/{path: .*}") @Produces(MediaType.APPLICATION_JSON) public Folder getFolder(@PathParam("path") String path, @QueryParam("includeContents") @DefaultValue("true") boolean includeContents) throws NamespaceException, FolderNotFoundException, DatasetNotFoundException { + throwIfNotSupported(); + FolderPath folderPath = FolderPath.fromURLPath(spaceName, path); try { final FolderConfig folderConfig = namespaceService.getFolder(folderPath.toNamespaceKey()); @@ -104,6 +109,8 @@ public Folder getFolder(@PathParam("path") String path, @QueryParam("includeCont @Path("/folder/{path: .*}") @Produces(MediaType.APPLICATION_JSON) public void deleteFolder(@PathParam("path") String path, @QueryParam("version") String version) throws NamespaceException, FolderNotFoundException { + throwIfNotSupported(); + FolderPath folderPath = FolderPath.fromURLPath(spaceName, path); if (version == null) { throw new ClientErrorException(GenericErrorMessage.MISSING_VERSION_PARAM_MSG); @@ -123,6 +130,8 @@ public void deleteFolder(@PathParam("path") String path, @QueryParam("version") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) public Folder createFolder(FolderName name, @PathParam("path") String path) throws NamespaceException { + throwIfNotSupported(); + String fullPath = PathUtils.toFSPathString(Arrays.asList(path, name.toString())); FolderPath folderPath = FolderPath.fromURLPath(spaceName, fullPath); @@ -146,14 +155,14 @@ protected NamespaceTree newNamespaceTree(List children) thro return NamespaceTree.newInstance(datasetService, children, SPACE, collaborationHelper); } - @POST - @Produces(APPLICATION_JSON) - @Path("/rename_folder/{path: .*}") - public Folder renameFolder(@PathParam("path") String path, @QueryParam("renameTo") String renameTo) - throws NamespaceException, FolderNotFoundException { - throw UserException.unsupportedError() - .message("Renaming a folder is not supported") - .build(logger); + protected OptionManager getOptionManager() { + return optionManager; } + private void throwIfNotSupported() throws NotSupportedException { + CatalogFeatures features = CatalogFeatures.get(optionManager); + if (!features.isFeatureEnabled(CatalogFeatures.Feature.SPACE)) { + throw new NotSupportedException(ResourcePath.defaultImpl("/space")); + } + } } diff --git a/dac/backend/src/main/java/com/dremio/dac/resource/HomeResource.java b/dac/backend/src/main/java/com/dremio/dac/resource/HomeResource.java index b4acca126b..5a390a6eb4 100644 --- a/dac/backend/src/main/java/com/dremio/dac/resource/HomeResource.java +++ b/dac/backend/src/main/java/com/dremio/dac/resource/HomeResource.java @@ -53,14 +53,12 @@ import com.dremio.common.utils.SqlUtils; import com.dremio.dac.annotations.RestResource; import com.dremio.dac.annotations.Secured; -import com.dremio.dac.explore.DatasetsResource; import com.dremio.dac.explore.model.Dataset; import com.dremio.dac.explore.model.DatasetName; import com.dremio.dac.explore.model.DatasetPath; import com.dremio.dac.explore.model.DatasetResourcePath; import com.dremio.dac.explore.model.DatasetVersionResourcePath; import com.dremio.dac.explore.model.FileFormatUI; -import com.dremio.dac.explore.model.InitialPreviewResponse; import com.dremio.dac.homefiles.HomeFileSystemStoragePlugin; import com.dremio.dac.homefiles.HomeFileTool; import com.dremio.dac.model.common.DACException; @@ -71,10 +69,10 @@ import com.dremio.dac.model.job.JobDataFragment; import com.dremio.dac.model.job.JobDataWrapper; import com.dremio.dac.model.namespace.NamespaceTree; -import com.dremio.dac.model.sources.FormatTools; import com.dremio.dac.model.spaces.Home; import com.dremio.dac.model.spaces.HomeName; import com.dremio.dac.model.spaces.HomePath; +import com.dremio.dac.model.spaces.HomeResourcePath; import com.dremio.dac.proto.model.dataset.VirtualDatasetUI; import com.dremio.dac.server.BufferAllocatorFactory; import com.dremio.dac.server.GenericErrorMessage; @@ -85,14 +83,14 @@ import com.dremio.dac.service.datasets.DatasetVersionMutator; import com.dremio.dac.service.errors.ClientErrorException; import com.dremio.dac.service.errors.DatasetNotFoundException; -import com.dremio.dac.service.errors.DatasetVersionNotFoundException; import com.dremio.dac.service.errors.FileNotFoundException; import com.dremio.dac.service.errors.FolderNotFoundException; import com.dremio.dac.service.errors.HomeNotFoundException; -import com.dremio.dac.service.errors.NewDatasetQueryException; +import com.dremio.dac.service.errors.NotSupportedException; import com.dremio.dac.service.errors.SourceNotFoundException; import com.dremio.dac.util.JobRequestUtil; import com.dremio.dac.util.ResourceUtil; +import com.dremio.exec.catalog.CatalogFeatures; import com.dremio.exec.catalog.DatasetCatalog; import com.dremio.exec.server.options.ProjectOptionManager; import com.dremio.file.File; @@ -121,7 +119,7 @@ import com.dremio.service.namespace.space.proto.HomeConfig; /** - * Resource for user home. + * Resource for user home's space. */ @RestResource @Secured @@ -137,12 +135,10 @@ public class HomeResource extends BaseResourceWithAllocator { private final CollaborationHelper collaborationService; private final HomeName homeName; private final HomePath homePath; - private final DatasetsResource datasetsResource; - private final HomeFileTool fileStore; + private final HomeFileTool homeFileTool; private final CatalogServiceHelper catalogServiceHelper; private final DatasetCatalog datasetCatalog; private final ProjectOptionManager projectOptionManager; - private final FormatTools formatTools; @Inject public HomeResource( @@ -150,13 +146,11 @@ public HomeResource( DatasetVersionMutator datasetService, @Context SecurityContext securityContext, JobsService jobsService, - DatasetsResource datasetsResource, - HomeFileTool fileStore, + HomeFileTool homeFileTool, CatalogServiceHelper catalogServiceHelper, DatasetCatalog datasetCatalog, ProjectOptionManager projectOptionManager, CollaborationHelper collaborationService, - FormatTools formatTools, @PathParam("homeName") HomeName homeName, BufferAllocatorFactory allocatorFactory) { @@ -165,29 +159,18 @@ public HomeResource( this.datasetService = datasetService; this.securityContext = securityContext; this.jobsService = jobsService; - this.datasetsResource = datasetsResource; this.collaborationService = collaborationService; this.homeName = homeName; this.homePath = new HomePath(homeName); - this.fileStore = fileStore; + this.homeFileTool = homeFileTool; this.catalogServiceHelper = catalogServiceHelper; this.datasetCatalog = datasetCatalog; this.projectOptionManager = projectOptionManager; - this.formatTools = formatTools; - } - - protected Dataset newDataset(DatasetResourcePath resourcePath, - DatasetVersionResourcePath versionedResourcePath, - DatasetName datasetName, - String sql, - VirtualDatasetUI datasetConfig, - int jobCount) { - return Dataset.newInstance(resourcePath, versionedResourcePath, datasetName, sql, datasetConfig, jobCount, null); } protected File newFile(String id, NamespacePath filePath, FileFormat fileFormat, Integer jobCount, - boolean isStaged, boolean isHomeFile, boolean isQueryable, DatasetType datasetType) throws Exception { - return File.newInstance(id, filePath, fileFormat, jobCount, isStaged, isHomeFile, isQueryable, null); + boolean isStaged, boolean isQueryable, DatasetType datasetType) throws Exception { + return File.newInstance(id, filePath, fileFormat, jobCount, isStaged, true, isQueryable, null); } protected Folder newFolder(FolderPath folderPath, FolderConfig folderConfig, NamespaceTree contents) throws NamespaceNotFoundException { @@ -205,6 +188,8 @@ protected NamespaceTree newNamespaceTree(List children) thro @GET @Produces(MediaType.APPLICATION_JSON) public Home getHome(@QueryParam("includeContents") @DefaultValue("true") boolean includeContents) throws NamespaceException, HomeNotFoundException, DatasetNotFoundException { + throwIfNotSupported(); + try { checkHomeSpaceExists(homePath); long dsCount = namespaceService.getDatasetCount(homePath.toNamespaceKey(), BoundedDatasetCount.SEARCH_TIME_LIMIT_MS, BoundedDatasetCount.COUNT_LIMIT_TO_STOP_SEARCH).getCount(); @@ -219,25 +204,6 @@ public Home getHome(@QueryParam("includeContents") @DefaultValue("true") boolean } } - @GET - @Path("dataset/{path: .*}") - @Produces(MediaType.APPLICATION_JSON) - public Dataset getDataset(@PathParam("path") String path) - throws NamespaceException, FileNotFoundException, DatasetNotFoundException { - DatasetPath datasetPath = DatasetPath.fromURLPath(homeName, path); - final DatasetConfig datasetConfig = namespaceService.getDataset(datasetPath.toNamespaceKey()); - final VirtualDatasetUI vds = datasetService.get(datasetPath, datasetConfig.getVirtualDataset().getVersion()); - return newDataset( - new DatasetResourcePath(datasetPath), - new DatasetVersionResourcePath(datasetPath, vds.getVersion()), - datasetPath.getDataset(), - vds.getSql(), - vds, - datasetService.getJobsCount(datasetPath.toNamespaceKey()) - ); - } - - @POST @Path("upload_start/{path: .*}") @Consumes(MediaType.MULTIPART_FORM_DATA) @@ -247,6 +213,8 @@ public File uploadFile(@PathParam("path") String path, @FormDataParam("file") FormDataContentDisposition contentDispositionHeader, @FormDataParam("fileName") FileName fileName, @QueryParam("extension") String extension) throws Exception { + throwIfNotSupported(); + checkFileUploadPermissions(); // add some validation @@ -264,7 +232,7 @@ public File uploadFile(@PathParam("path") String path, final FileConfig config = new FileConfig(); try { // upload file to staging area - final com.dremio.io.file.Path stagingLocation = fileStore.stageFile(filePath, extension, fileInputStream); + final com.dremio.io.file.Path stagingLocation = homeFileTool.stageFile(filePath, extension, fileInputStream); config.setLocation(stagingLocation.toString()); config.setName(filePath.getLeaf().getName()); config.setCtime(System.currentTimeMillis()); @@ -274,19 +242,20 @@ public File uploadFile(@PathParam("path") String path, } catch (IOException ioe) { throw new DACException("Error writing to file at " + filePath, ioe); } - final File file = newFile(filePath.toUrlPath(), - filePath, FileFormat.getForFile(config), 0, true, true, true, + return newFile(filePath.toUrlPath(), + filePath, FileFormat.getForFile(config), 0, true, true, DatasetType.PHYSICAL_DATASET_HOME_FILE ); - return file; } @POST @Path("upload_cancel/{path: .*}") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) - public void cancelUploadFile(FileFormat fileFormat, @PathParam("path") String path) throws IOException, DACException { - fileStore.deleteFile(fileFormat.getLocation()); + public void cancelUploadFile(FileFormat fileFormat, @PathParam("path") String path) throws IOException { + throwIfNotSupported(); + + homeFileTool.deleteFile(fileFormat.getLocation()); } @POST @@ -294,6 +263,8 @@ public void cancelUploadFile(FileFormat fileFormat, @PathParam("path") String pa @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public File finishUploadFile(FileFormat fileFormat, @PathParam("path") String path) throws Exception { + throwIfNotSupported(); + checkFileUploadPermissions(); final FilePath filePath = FilePath.fromURLPath(homeName, path); @@ -303,8 +274,7 @@ public File finishUploadFile(FileFormat fileFormat, @PathParam("path") String pa .build(logger); } final String fileName = filePath.getFileName().getName(); - final com.dremio.io.file.Path finalLocation = fileStore.saveFile(fileFormat.getLocation(), filePath, fileFormat.getFileType()); - // save new name and location, full path + final com.dremio.io.file.Path finalLocation = homeFileTool.saveFile(fileFormat.getLocation(), filePath, fileFormat.getFileType()); fileFormat.setLocation(finalLocation.toString()); fileFormat.setName(fileName); fileFormat.setFullPath(filePath.toPathList()); @@ -318,7 +288,7 @@ public File finishUploadFile(FileFormat fileFormat, @PathParam("path") String pa filePath, fileFormat, datasetService.getJobsCount(filePath.toNamespaceKey()), - false, true, false, + false, false, DatasetType.PHYSICAL_DATASET_HOME_FILE ); } @@ -338,8 +308,9 @@ protected void checkFileUploadPermissions() { @Consumes(MediaType.APPLICATION_JSON) public JobDataFragment previewFormatSettingsStaging(FileFormat fileFormat, @PathParam("path") String path) throws FileNotFoundException, SourceNotFoundException { + throwIfNotSupported(); - if (!fileStore.validStagingLocation(com.dremio.io.file.Path.of(fileFormat.getLocation()))) { + if (!homeFileTool.validStagingLocation(com.dremio.io.file.Path.of(fileFormat.getLocation()))) { throw new IllegalArgumentException("Invalid staging location provided"); } @@ -366,6 +337,7 @@ public JobDataFragment previewFormatSettingsStaging(FileFormat fileFormat, @Path @Consumes(MediaType.APPLICATION_JSON) public JobDataFragment previewFormatSettings(FileFormat fileFormat, @PathParam("path") String path) throws FileNotFoundException, SourceNotFoundException { + throwIfNotSupported(); FilePath filePath = FilePath.fromURLPath(homeName, path); logger.debug("filePath: " + filePath.toPathString()); @@ -387,20 +359,21 @@ public JobDataFragment previewFormatSettings(FileFormat fileFormat, @PathParam(" @Produces(MediaType.APPLICATION_JSON) public File getFile(@PathParam("path") String path) throws Exception { + throwIfNotSupported(); + FilePath filePath = FilePath.fromURLPath(homeName, path); try { final DatasetConfig datasetConfig = namespaceService.getDataset(filePath.toNamespaceKey()); final FileConfig fileConfig = toFileConfig(datasetConfig); - final File file = newFile( + return newFile( datasetConfig.getId().getId(), filePath, FileFormat.getForFile(fileConfig), datasetService.getJobsCount(filePath.toNamespaceKey()), - false, true, + false, fileConfig.getType() != FileType.UNKNOWN, DatasetType.PHYSICAL_DATASET_HOME_FILE ); - return file; } catch (NamespaceNotFoundException nfe) { throw new FileNotFoundException(filePath, nfe); } @@ -410,11 +383,13 @@ public File getFile(@PathParam("path") String path) @Path("file/{path: .*}") @Produces(MediaType.APPLICATION_JSON) public void deleteFile(@PathParam("path") String path, @QueryParam("version") String version) throws NamespaceException, DACException { - FilePath filePath = FilePath.fromURLPath(homeName, path); + throwIfNotSupported(); + if (version == null) { throw new ClientErrorException(GenericErrorMessage.MISSING_VERSION_PARAM_MSG); } + FilePath filePath = FilePath.fromURLPath(homeName, path); try { catalogServiceHelper.deleteHomeDataset(namespaceService.getDataset(filePath.toNamespaceKey()), version, filePath.toNamespaceKey().getPathComponents()); } catch (IOException ioe) { @@ -424,29 +399,13 @@ public void deleteFile(@PathParam("path") String path, @QueryParam("version") St } } - @POST - @Path("file_rename/{path: .*}") - @Produces(MediaType.APPLICATION_JSON) - public File renameFile(@PathParam("path") String path, @QueryParam("renameTo") FileName renameTo) throws Exception { - FilePath filePath = FilePath.fromURLPath(homeName, path); - final FilePath newFilePath = filePath.rename(renameTo.getName()); - final DatasetConfig datasetConfig = namespaceService.renameDataset(filePath.toNamespaceKey(), newFilePath.toNamespaceKey()); - final FileConfig fileConfig = toFileConfig(datasetConfig); - return newFile( - datasetConfig.getId().getId(), - newFilePath, - FileFormat.getForFile(fileConfig), - datasetService.getJobsCount(filePath.toNamespaceKey()), - false, true, false, - DatasetType.PHYSICAL_DATASET_HOME_FILE - ); - } - @GET @Path("file_format/{path: .*}") @Produces(MediaType.APPLICATION_JSON) public FileFormatUI getFormatSettings(@PathParam("path") String path) throws FileNotFoundException, HomeNotFoundException, NamespaceException { + throwIfNotSupported(); + FilePath filePath = FilePath.fromURLPath(homeName, path); final FileConfig fileConfig = toFileConfig(namespaceService.getDataset(filePath.toNamespaceKey())); return new FileFormatUI(FileFormat.getForFile(fileConfig), filePath); @@ -458,6 +417,8 @@ public FileFormatUI getFormatSettings(@PathParam("path") String path) @Consumes(MediaType.APPLICATION_JSON) public FileFormatUI saveFormatSettings(FileFormat fileFormat, @PathParam("path") String path) throws FileNotFoundException, HomeNotFoundException, NamespaceException { + throwIfNotSupported(); + FilePath filePath = FilePath.fromURLPath(homeName, path); // merge file configs final DatasetConfig existingDSConfig = namespaceService.getDataset(filePath.toNamespaceKey()); @@ -478,6 +439,8 @@ public FileFormatUI saveFormatSettings(FileFormat fileFormat, @PathParam("path") @Path("/folder/{path: .*}") @Produces(MediaType.APPLICATION_JSON) public Folder getFolder(@PathParam("path") String path, @QueryParam("includeContents") @DefaultValue("true") boolean includeContents) throws Exception { + throwIfNotSupported(); + FolderPath folderPath = FolderPath.fromURLPath(homeName, path); try { final FolderConfig folderConfig = namespaceService.getFolder(folderPath.toNamespaceKey()); @@ -494,6 +457,8 @@ public Folder getFolder(@PathParam("path") String path, @QueryParam("includeCont @Path("/folder/{path: .*}") @Produces(MediaType.APPLICATION_JSON) public void deleteFolder(@PathParam("path") String path, @QueryParam("version") String version) throws NamespaceException, FolderNotFoundException { + throwIfNotSupported(); + FolderPath folderPath = FolderPath.fromURLPath(homeName, path); if (version == null) { throw new ClientErrorException(GenericErrorMessage.MISSING_VERSION_PARAM_MSG); @@ -513,6 +478,8 @@ public void deleteFolder(@PathParam("path") String path, @QueryParam("version") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) public Folder createFolder(FolderName name, @PathParam("path") String path) throws Exception { + throwIfNotSupported(); + String fullPath = PathUtils.toFSPathString(Arrays.asList(path, name.toString())); FolderPath folderPath = FolderPath.fromURLPath(homeName, fullPath); @@ -528,17 +495,41 @@ public Folder createFolder(FolderName name, @PathParam("path") String path) thro return newFolder(folderPath, folderConfig, null); } - @POST - @Path("/new_untitled_from_file/{path: .*}") + @GET + @Path("dataset/{path: .*}") @Produces(MediaType.APPLICATION_JSON) - @Consumes(MediaType.APPLICATION_JSON) - public InitialPreviewResponse createUntitledFromHomeFile( - @PathParam("path") String path, - @QueryParam("limit") Integer limit) - throws DatasetNotFoundException, DatasetVersionNotFoundException, NamespaceException, NewDatasetQueryException { - return datasetsResource.createUntitledFromHomeFile(homeName, path, limit); + public Dataset getDataset(@PathParam("path") String path) + throws NamespaceException, FileNotFoundException, DatasetNotFoundException { + DatasetPath datasetPath = DatasetPath.fromURLPath(homeName, path); + final DatasetConfig datasetConfig = namespaceService.getDataset(datasetPath.toNamespaceKey()); + final VirtualDatasetUI vds = datasetService.get(datasetPath, datasetConfig.getVirtualDataset().getVersion()); + return newDataset( + new DatasetResourcePath(datasetPath), + new DatasetVersionResourcePath(datasetPath, vds.getVersion()), + datasetPath.getDataset(), + vds.getSql(), + vds, + datasetService.getJobsCount(datasetPath.toNamespaceKey()) + ); } + protected Dataset newDataset(DatasetResourcePath resourcePath, + DatasetVersionResourcePath versionedResourcePath, + DatasetName datasetName, + String sql, + VirtualDatasetUI datasetConfig, + int jobCount) { + return Dataset.newInstance(resourcePath, versionedResourcePath, datasetName, sql, datasetConfig, jobCount, null); + } + + protected void checkHomeSpaceExists(HomePath homePath) { } + + private void throwIfNotSupported() throws NotSupportedException { + CatalogFeatures features = CatalogFeatures.get(projectOptionManager); + if (!features.isFeatureEnabled(CatalogFeatures.Feature.HOME)) { + throw new NotSupportedException(new HomeResourcePath(homePath.getHomeName())); + } + } } diff --git a/dac/backend/src/main/java/com/dremio/dac/resource/JobResource.java b/dac/backend/src/main/java/com/dremio/dac/resource/JobResource.java index 17ea891446..2ed48255d5 100644 --- a/dac/backend/src/main/java/com/dremio/dac/resource/JobResource.java +++ b/dac/backend/src/main/java/com/dremio/dac/resource/JobResource.java @@ -138,7 +138,7 @@ public NotificationResponse cancel() throws JobResourceNotFoundException { .build()); return new NotificationResponse(ResponseType.OK, "Job cancellation requested"); } catch (JobNotFoundException e) { - if (e.getErrorType() == JobNotFoundException.causeOfFailure.CANCEL_FAILED) { + if (e.getErrorType() == JobNotFoundException.CauseOfFailure.CANCEL_FAILED) { throw new ConflictException(String.format("Job %s may have completed and cannot be canceled.", jobId.getId())); } else { throw JobResourceNotFoundException.fromJobNotFoundException(e); diff --git a/dac/backend/src/main/java/com/dremio/dac/resource/NessieSourceResource.java b/dac/backend/src/main/java/com/dremio/dac/resource/NessieSourceResource.java new file mode 100644 index 0000000000..e81e31c713 --- /dev/null +++ b/dac/backend/src/main/java/com/dremio/dac/resource/NessieSourceResource.java @@ -0,0 +1,92 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.resource; + +import static com.dremio.exec.ExecConstants.NESSIE_SOURCE_API; +import static javax.ws.rs.core.Response.Status.BAD_REQUEST; + +import javax.annotation.security.RolesAllowed; +import javax.inject.Inject; +import javax.ws.rs.NotFoundException; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; + +import org.projectnessie.client.api.NessieApi; +import org.projectnessie.client.api.NessieApiV2; + +import com.dremio.common.exceptions.UserException; +import com.dremio.dac.annotations.Secured; +import com.dremio.dac.service.errors.NessieSourceNotValidException; +import com.dremio.dac.service.errors.NessieSourceResourceException; +import com.dremio.dac.service.errors.SourceNotFoundException; +import com.dremio.exec.store.CatalogService; +import com.dremio.exec.store.NessieApiProvider; +import com.dremio.options.OptionManager; +import com.dremio.options.Options; +import com.dremio.services.nessie.proxy.ProxyV2TreeResource; +import com.google.common.base.Preconditions; + +/** + * Resource for providing APIs for Nessie As a Source. + */ +@Secured +@RolesAllowed({"admin", "user"}) +@Path("/v2/source/{sourceName}/trees") +@Options +public class NessieSourceResource { + + private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(NessieSourceResource.class); + private CatalogService catalogService; + private OptionManager optionManager; + + @Inject + public NessieSourceResource( + CatalogService catalogService, + OptionManager optionManager + ) { + this.catalogService = catalogService; + this.optionManager = optionManager; + }; + + @Path("/") + public ProxyV2TreeResource handle(@PathParam("sourceName") String sourceName) { + if (optionManager.getOption(NESSIE_SOURCE_API)) { + NessieApiProvider provider; + try { + provider = catalogService.getSource(sourceName); + } catch (UserException namespaceNotFoundException) { + logger.error(String.format("Cannot find source: %s", sourceName)); + throw new SourceNotFoundException(sourceName, namespaceNotFoundException); + } catch (ClassCastException classCastException) { + logger.error(String.format("%s is not versioned source", sourceName)); + throw new NessieSourceNotValidException(classCastException, String.format("%s is not versioned source", sourceName)); + } catch (Exception exception) { + logger.error("Unexpected Error"); + throw new NessieSourceResourceException(exception, "Unexpected Error", BAD_REQUEST); + } + NessieApi nessieApi = provider.getNessieApi(); + Preconditions.checkArgument(nessieApi instanceof NessieApiV2, "nessieApi provided by NessieApiProvider is not V2. V2 is required."); + return getTreeResource(nessieApi); + } else { + logger.error(String.format("Using nessie-as-a-source is disabled. The support key '%s' must be enabled.", NESSIE_SOURCE_API.getOptionName())); + throw new NotFoundException(String.format("Using nessie-as-a-source is disabled. The support key '%s' must be enabled.", NESSIE_SOURCE_API.getOptionName())); + } + } + + protected ProxyV2TreeResource getTreeResource(NessieApi nessieApi) { + return new V2TreeResource((NessieApiV2) nessieApi); + } +} diff --git a/dac/backend/src/main/java/com/dremio/dac/resource/NessieTestSourceResource.java b/dac/backend/src/main/java/com/dremio/dac/resource/NessieTestSourceResource.java new file mode 100644 index 0000000000..bab5fd3f2f --- /dev/null +++ b/dac/backend/src/main/java/com/dremio/dac/resource/NessieTestSourceResource.java @@ -0,0 +1,42 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.resource; + +import javax.inject.Inject; + +import org.projectnessie.client.api.NessieApi; +import org.projectnessie.client.api.NessieApiV2; + +import com.dremio.exec.store.CatalogService; +import com.dremio.options.OptionManager; +import com.dremio.services.nessie.proxy.ProxyV2TreeResource; + +/** + * Resource for providing APIs for Nessie as a Source for Test. + */ + +public class NessieTestSourceResource extends NessieSourceResource { + + @Inject + public NessieTestSourceResource(CatalogService catalogService, OptionManager optionManager) { + super(catalogService, optionManager); + } + + @Override + protected ProxyV2TreeResource getTreeResource(NessieApi nessieApi) { + return new ProxyV2TreeResource((NessieApiV2) nessieApi); + } +} diff --git a/dac/backend/src/main/java/com/dremio/dac/resource/PutSpaceResource.java b/dac/backend/src/main/java/com/dremio/dac/resource/PutSpaceResource.java deleted file mode 100644 index 5dc8580682..0000000000 --- a/dac/backend/src/main/java/com/dremio/dac/resource/PutSpaceResource.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.dremio.dac.resource; - -import javax.annotation.security.RolesAllowed; -import javax.inject.Inject; -import javax.ws.rs.PUT; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.core.MediaType; - -import com.dremio.dac.annotations.RestResource; -import com.dremio.dac.annotations.Secured; -import com.dremio.dac.model.spaces.Space; -import com.dremio.dac.model.spaces.SpaceName; -import com.dremio.dac.model.spaces.SpacePath; -import com.dremio.service.namespace.BoundedDatasetCount; -import com.dremio.service.namespace.NamespaceException; -import com.dremio.service.namespace.NamespaceService; -import com.dremio.service.namespace.proto.EntityId; -import com.dremio.service.namespace.space.proto.SpaceConfig; -import com.dremio.service.users.UserNotFoundException; - -/** - * Rest resource for spaces. - */ -@RestResource -@Secured -@RolesAllowed({"admin", "user"}) -@Path("/space/{spaceName}") -public class PutSpaceResource { - private final NamespaceService namespaceService; - private final SpacePath spacePath; - - @Inject - public PutSpaceResource( - NamespaceService namespaceService, - @PathParam("spaceName") SpaceName spaceName) { - this.namespaceService = namespaceService; - this.spacePath = new SpacePath(spaceName); - } - - public static SpaceConfig addOrUpdateSpace(NamespaceService service, - SpacePath spacePath, Space space) - throws NamespaceException, UserNotFoundException { - - SpaceConfig spaceConfig = new SpaceConfig() - .setId(space.getId() != null ? new EntityId(space.getId()) : null) - .setName(space.getName()) - .setDescription(space.getDescription()) - .setTag(space.getVersion()); - - service.addOrUpdateSpace(spacePath.toNamespaceKey(), spaceConfig); - return service.getSpace(spacePath.toNamespaceKey()); - } - - @PUT - @Produces(MediaType.APPLICATION_JSON) - public Space putSpace(Space space) throws NamespaceException, UserNotFoundException { - SpaceConfig spaceConfig = PutSpaceResource.addOrUpdateSpace(namespaceService, spacePath, space); - return Space.newInstance(spaceConfig, null, namespaceService.getDatasetCount(spacePath.toNamespaceKey(), BoundedDatasetCount.SEARCH_TIME_LIMIT_MS, BoundedDatasetCount.COUNT_LIMIT_TO_STOP_SEARCH).getCount()); - } -} diff --git a/dac/backend/src/main/java/com/dremio/dac/resource/ResourceTreeResource.java b/dac/backend/src/main/java/com/dremio/dac/resource/ResourceTreeResource.java index 786eac8d89..49cfd85c03 100644 --- a/dac/backend/src/main/java/com/dremio/dac/resource/ResourceTreeResource.java +++ b/dac/backend/src/main/java/com/dremio/dac/resource/ResourceTreeResource.java @@ -272,5 +272,4 @@ public List getSources() throws NamespaceException, Unsuppor } return resources; } - } diff --git a/dac/backend/src/main/java/com/dremio/dac/resource/SQLResource.java b/dac/backend/src/main/java/com/dremio/dac/resource/SQLResource.java index 6751fe9d5a..8e3cdd093d 100644 --- a/dac/backend/src/main/java/com/dremio/dac/resource/SQLResource.java +++ b/dac/backend/src/main/java/com/dremio/dac/resource/SQLResource.java @@ -15,12 +15,6 @@ */ package com.dremio.dac.resource; -import static com.dremio.common.utils.SqlUtils.quoteIdentifier; - -import java.util.ArrayList; -import java.util.List; -import java.util.stream.Collectors; - import javax.annotation.security.RolesAllowed; import javax.inject.Inject; import javax.ws.rs.Consumes; @@ -31,27 +25,21 @@ import javax.ws.rs.core.MediaType; import javax.ws.rs.core.SecurityContext; -import org.apache.calcite.sql.advise.SqlAdvisor; -import org.apache.calcite.sql.validate.SqlMoniker; -import org.apache.calcite.sql.validate.SqlMonikerType; - import com.dremio.dac.annotations.RestResource; import com.dremio.dac.annotations.Secured; -import com.dremio.dac.explore.model.AnalyzeRequest; import com.dremio.dac.explore.model.CreateFromSQL; -import com.dremio.dac.explore.model.SuggestionResponse; -import com.dremio.dac.explore.model.ValidationResponse; import com.dremio.dac.model.job.JobDataFragment; import com.dremio.dac.model.job.JobDataWrapper; -import com.dremio.dac.model.job.QueryError; import com.dremio.dac.server.BufferAllocatorFactory; import com.dremio.dac.service.autocomplete.AutocompleteEngineProxy; +import com.dremio.dac.service.autocomplete.AutocompleteV2Proxy; +import com.dremio.dac.service.catalog.CatalogServiceHelper; import com.dremio.dac.util.JobRequestUtil; -import com.dremio.exec.planner.sql.SQLAnalyzer; -import com.dremio.exec.planner.sql.SQLAnalyzerFactory; import com.dremio.exec.server.SabotContext; import com.dremio.exec.server.options.ProjectOptionManager; import com.dremio.service.autocomplete.AutocompleteRequestImplementation; +import com.dremio.service.autocomplete.AutocompleteV2Request; +import com.dremio.service.autocomplete.AutocompleteV2Response; import com.dremio.service.autocomplete.completions.Completions; import com.dremio.service.job.QueryType; import com.dremio.service.job.SqlQuery; @@ -60,11 +48,10 @@ import com.dremio.service.jobs.CompletionListener; import com.dremio.service.jobs.JobsService; import com.dremio.service.namespace.NamespaceException; -import com.google.common.base.Joiner; import com.google.common.base.Preconditions; /** - * run external sql + * The REST resource that serves a query API, SQL autocomplete, & a functions list. */ @RestResource @Secured @@ -76,6 +63,7 @@ public class SQLResource extends BaseResourceWithAllocator { private final SabotContext sabotContext; private final ProjectOptionManager projectOptionManager; private final FunctionsListService functionsListService; + private final CatalogServiceHelper catalogServiceHelper; @Inject public SQLResource( @@ -83,7 +71,8 @@ public SQLResource( JobsService jobs, SecurityContext securityContext, BufferAllocatorFactory allocatorFactory, - ProjectOptionManager projectOptionManager) { + ProjectOptionManager projectOptionManager, + CatalogServiceHelper catalogServiceHelper) { super(allocatorFactory); this.jobs = jobs; this.securityContext = securityContext; @@ -93,11 +82,13 @@ public SQLResource( sabotContext, securityContext, projectOptionManager); + this.catalogServiceHelper = catalogServiceHelper; } @POST @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) + @Deprecated public JobDataFragment query(CreateFromSQL sql) { final SqlQuery query = JobRequestUtil.createSqlQuery(sql.getSql(), sql.getContext(), securityContext.getUserPrincipal().getName()); // Pagination is not supported in this API, so we need to truncate the results to 500 records @@ -108,46 +99,6 @@ public JobDataFragment query(CreateFromSQL sql) { .truncate(getOrCreateAllocator("query"), 500); } - @POST - @Path("/analyze/suggest") - @Consumes(MediaType.APPLICATION_JSON) - @Produces(MediaType.APPLICATION_JSON) - public SuggestionResponse suggestSQL(AnalyzeRequest analyzeRequest) { - final String sql = analyzeRequest.getSql(); - final List context = analyzeRequest.getContext(); - final int cursorPosition = analyzeRequest.getCursorPosition(); - - // Setup dependencies and execute suggestion acquisition - SQLAnalyzer SQLAnalyzer = - SQLAnalyzerFactory.createSQLAnalyzer( - securityContext.getUserPrincipal().getName(), sabotContext, context, true, projectOptionManager); - - List sqlEditorHints = SQLAnalyzer.suggest(sql, cursorPosition); - - // Build response object and return - return buildSuggestionResponse(sqlEditorHints); - } - - @POST - @Path("/analyze/validate") - @Consumes(MediaType.APPLICATION_JSON) - @Produces(MediaType.APPLICATION_JSON) - public ValidationResponse validateSQL(AnalyzeRequest analyzeRequest) { - - final String sql = analyzeRequest.getSql(); - final List context = analyzeRequest.getContext(); - - // Setup dependencies and execute validation - SQLAnalyzer SQLAnalyzer = - SQLAnalyzerFactory.createSQLAnalyzer( - securityContext.getUserPrincipal().getName(), sabotContext, context, false, projectOptionManager); - - List validationErrors = SQLAnalyzer.validate(sql); - - // Build response object and return - return buildValidationResponse(validationErrors); - } - @POST @Path("/autocomplete") @Consumes(MediaType.APPLICATION_JSON) @@ -164,64 +115,17 @@ public Completions getCompletions(AutocompleteRequestImplementation request) thr request.getCursor()); } - /** - * Builds the response object for query suggestions. - * - * @param suggestionList The suggestion list returned from the SqlAdvisor. - * @return The built SuggestionResponse object or null if there are no suggestions. - */ - public SuggestionResponse buildSuggestionResponse(List suggestionList) { - - // Return empty response in REST request - if (suggestionList == null || suggestionList.isEmpty()) { - return null; - } - - // Create and populate suggestion response list - List suggestions = new ArrayList<>(); - for (SqlMoniker hint : suggestionList) { - - // Quote the identifiers if they are not keywords or functions, - // and are required to be quoted. - List qualifiedNames = hint.getFullyQualifiedNames(); - if ((hint.getType() != SqlMonikerType.KEYWORD) && (hint.getType() != SqlMonikerType.FUNCTION)) { - qualifiedNames = qualifiedNames.stream().map(name -> quoteIdentifier(name)).collect(Collectors.toList()); - } - - suggestions.add( - new SuggestionResponse.Suggestion(Joiner.on(".").join(qualifiedNames), hint.getType().name())); - } - - SuggestionResponse response = new SuggestionResponse(suggestions); - return response; - } - - /** - * Builds the response object for query validation. - * - * @param errorList The list of query errors returned from the SqlAdvisor. - * @return The built ValidationResponse object or null if there are no available validation errors. - */ - protected ValidationResponse buildValidationResponse(List errorList) { - - // Return empty response in REST request - if (errorList == null || errorList.isEmpty()) { - return null; - } - - // Create and populate error response list - List sqlErrors = new ArrayList<>(); - for (SqlAdvisor.ValidateErrorInfo error : errorList) { - sqlErrors.add( - new QueryError(error.getMessage(), - new QueryError.Range(error.getStartLineNum(), - error.getStartColumnNum(), - error.getEndLineNum() + 1, - error.getEndColumnNum() + 1))); - } + @POST + @Path("/autocomplete/v2") + @Consumes(MediaType.APPLICATION_JSON) + @Produces(MediaType.APPLICATION_JSON) + public AutocompleteV2Response getSuggestions(AutocompleteV2Request request) { + Preconditions.checkNotNull(request); - ValidationResponse response = new ValidationResponse(sqlErrors); - return response; + return AutocompleteV2Proxy.getSuggestions( + catalogServiceHelper, + request + ); } @GET diff --git a/dac/backend/src/main/java/com/dremio/dac/resource/SourceResource.java b/dac/backend/src/main/java/com/dremio/dac/resource/SourceResource.java index 1b8a118ced..30cf1e4776 100644 --- a/dac/backend/src/main/java/com/dremio/dac/resource/SourceResource.java +++ b/dac/backend/src/main/java/com/dremio/dac/resource/SourceResource.java @@ -14,6 +14,7 @@ * limitations under the License. */ package com.dremio.dac.resource; + import java.io.IOException; import java.security.AccessControlException; import java.util.Arrays; @@ -39,17 +40,14 @@ import com.dremio.common.utils.PathUtils; import com.dremio.dac.annotations.RestResource; import com.dremio.dac.annotations.Secured; -import com.dremio.dac.explore.DatasetsResource; import com.dremio.dac.explore.QueryExecutor; import com.dremio.dac.explore.model.FileFormatUI; -import com.dremio.dac.explore.model.InitialPreviewResponse; import com.dremio.dac.model.common.NamespacePath; import com.dremio.dac.model.folder.Folder; import com.dremio.dac.model.folder.FolderName; import com.dremio.dac.model.folder.SourceFolderPath; import com.dremio.dac.model.job.JobDataFragment; import com.dremio.dac.model.sources.FormatTools; -import com.dremio.dac.model.sources.PhysicalDataset; import com.dremio.dac.model.sources.PhysicalDatasetPath; import com.dremio.dac.model.sources.SourceName; import com.dremio.dac.model.sources.SourcePath; @@ -57,9 +55,6 @@ import com.dremio.dac.server.BufferAllocatorFactory; import com.dremio.dac.server.GenericErrorMessage; import com.dremio.dac.service.errors.ClientErrorException; -import com.dremio.dac.service.errors.DatasetNotFoundException; -import com.dremio.dac.service.errors.DatasetVersionNotFoundException; -import com.dremio.dac.service.errors.NewDatasetQueryException; import com.dremio.dac.service.errors.PhysicalDatasetNotFoundException; import com.dremio.dac.service.errors.SourceFileNotFoundException; import com.dremio.dac.service.errors.SourceFolderNotFoundException; @@ -82,6 +77,7 @@ import com.dremio.service.namespace.dataset.proto.AccelerationSettings; import com.dremio.service.namespace.dataset.proto.DatasetType; import com.dremio.service.namespace.file.FileFormat; +import com.dremio.service.namespace.file.proto.UnknownFileConfig; import com.dremio.service.namespace.physicaldataset.proto.PhysicalDatasetConfig; import com.dremio.service.namespace.source.proto.SourceConfig; import com.dremio.service.reflection.ReflectionAdministrationService; @@ -102,8 +98,7 @@ public class SourceResource extends BaseResourceWithAllocator { private final SourceName sourceName; private final SecurityContext securityContext; private final SourcePath sourcePath; - private final DatasetsResource datasetsResource; - private final ConnectionReader cReader; + private final ConnectionReader connectionReader; private final SourceCatalog sourceCatalog; private final FormatTools formatTools; private final ContextService context; @@ -116,8 +111,7 @@ public SourceResource( @PathParam("sourceName") SourceName sourceName, QueryExecutor executor, SecurityContext securityContext, - DatasetsResource datasetsResource, - ConnectionReader cReader, + ConnectionReader connectionReader, SourceCatalog sourceCatalog, FormatTools formatTools, ContextService context, @@ -129,17 +123,16 @@ public SourceResource( this.sourceService = sourceService; this.sourceName = sourceName; this.securityContext = securityContext; - this.datasetsResource = datasetsResource; this.sourcePath = new SourcePath(sourceName); this.executor = executor; - this.cReader = cReader; + this.connectionReader = connectionReader; this.sourceCatalog = sourceCatalog; this.formatTools = formatTools; this.context = context; } protected SourceUI newSource(SourceConfig config) throws Exception { - return SourceUI.get(config, cReader); + return SourceUI.get(config, connectionReader); } @GET @@ -150,15 +143,15 @@ public SourceUI getSource( @QueryParam("refValue") String refValue) throws Exception { try { - final SourceConfig config = namespaceService.getSource(sourcePath.toNamespaceKey()); + final SourceConfig sourceConfig = namespaceService.getSource(sourcePath.toNamespaceKey()); final SourceState sourceState = sourceService.getSourceState(sourcePath.getSourceName().getName()); if (sourceState == null) { throw new SourceNotFoundException(sourcePath.getSourceName().getName()); } - final BoundedDatasetCount datasetCount = namespaceService.getDatasetCount(new NamespaceKey(config.getName()), + final BoundedDatasetCount datasetCount = namespaceService.getDatasetCount(new NamespaceKey(sourceConfig.getName()), BoundedDatasetCount.SEARCH_TIME_LIMIT_MS, BoundedDatasetCount.COUNT_LIMIT_TO_STOP_SEARCH); - final SourceUI source = newSource(config) + final SourceUI source = newSource(sourceConfig) .setNumberOfDatasets(datasetCount.getCount()); source.setDatasetCountBounded(datasetCount.isCountBound() || datasetCount.isTimeBound()); @@ -173,7 +166,7 @@ public SourceUI getSource( source.setContents( sourceService.listSource( sourcePath.getSourceName(), - namespaceService.getSource(sourcePath.toNamespaceKey()), + sourceConfig, securityContext.getUserPrincipal().getName(), refType, refValue)); @@ -256,16 +249,6 @@ public Folder createFolder( refValue); } - @GET - @Path("/dataset/{path: .*}") - @Produces(MediaType.APPLICATION_JSON) - public PhysicalDataset getPhysicalDataset(@PathParam("path") String path) - throws SourceNotFoundException, NamespaceException { - sourceService.checkSourceExists(sourceName); - PhysicalDatasetPath datasetPath = PhysicalDatasetPath.fromURLPath(sourceName, path); - return sourceService.getPhysicalDataset(sourceName, datasetPath); - } - private boolean useFastPreview() { return context.get().getOptionManager().getOption(FormatTools.FAST_PREVIEW); } @@ -276,13 +259,13 @@ private boolean useFastPreview() { public File getFile(@PathParam("path") String path) throws SourceNotFoundException, NamespaceException, PhysicalDatasetNotFoundException { if (useFastPreview()) { - return sourceService.getFileDataset(sourceName, asFilePath(path), null); + return sourceService.getFileDataset(asFilePath(path), null); } sourceService.checkSourceExists(sourceName); final SourceFilePath filePath = SourceFilePath.fromURLPath(sourceName, path); - return sourceService.getFileDataset(sourceName, filePath, null); + return sourceService.getFileDataset(filePath, null); } /** @@ -318,7 +301,7 @@ public FileFormatUI getFileFormatSettings(@PathParam("path") String path) SourceFilePath filePath = SourceFilePath.fromURLPath(sourceName, path); FileFormat fileFormat; try { - final PhysicalDatasetConfig physicalDatasetConfig = sourceService.getFilesystemPhysicalDataset(sourceName, filePath); + final PhysicalDatasetConfig physicalDatasetConfig = sourceService.getFilesystemPhysicalDataset(filePath); fileFormat = FileFormat.getForFile(physicalDatasetConfig.getFormatSettings()); fileFormat.setVersion(physicalDatasetConfig.getTag()); } catch (PhysicalDatasetNotFoundException nfe) { @@ -334,6 +317,7 @@ public FileFormatUI getFileFormatSettings(@PathParam("path") String path) @Consumes(MediaType.APPLICATION_JSON) public FileFormatUI saveFormatSettings(FileFormat fileFormat, @PathParam("path") String path) throws NamespaceException, SourceNotFoundException { + checkUnknownFileConfig(fileFormat); SourceFilePath filePath = SourceFilePath.fromURLPath(sourceName, path); sourceService.checkSourceExists(filePath.getSourceName()); fileFormat.setFullPath(filePath.toPathList()); @@ -391,7 +375,7 @@ public void deleteFileFormat(@PathParam("path") String path, } try { - sourceService.deletePhysicalDataset(sourceName, new PhysicalDatasetPath(filePath), version, CatalogUtil.getDeleteCallback(context.get().getOrphanageFactory().get())); + sourceService.deletePhysicalDataset(sourceName, new PhysicalDatasetPath(filePath), version, CatalogUtil.getDeleteCallback(context.get().getOrphanageFactory().get())); } catch (ConcurrentModificationException e) { throw ResourceUtil.correctBadVersionErrorMessage(e, "file format", path); } @@ -413,7 +397,7 @@ public FileFormatUI getFolderFormat(@PathParam("path") String path) FileFormat fileFormat; try { - final PhysicalDatasetConfig physicalDatasetConfig = sourceService.getFilesystemPhysicalDataset(sourceName, folderPath); + final PhysicalDatasetConfig physicalDatasetConfig = sourceService.getFilesystemPhysicalDataset(folderPath); fileFormat = FileFormat.getForFolder(physicalDatasetConfig.getFormatSettings()); fileFormat.setVersion(physicalDatasetConfig.getTag()); } catch (PhysicalDatasetNotFoundException nfe) { @@ -428,6 +412,7 @@ public FileFormatUI getFolderFormat(@PathParam("path") String path) @Consumes(MediaType.APPLICATION_JSON) public FileFormatUI saveFolderFormat(FileFormat fileFormat, @PathParam("path") String path) throws NamespaceException, SourceNotFoundException { + checkUnknownFileConfig(fileFormat); SourceFolderPath folderPath = SourceFolderPath.fromURLPath(sourceName, path); sourceService.checkSourceExists(folderPath.getSourceName()); fileFormat.setFullPath(folderPath.toPathList()); @@ -460,37 +445,15 @@ public void deleteFolderFormat(@PathParam("path") String path, } } - @POST - @Path("new_untitled_from_file/{path: .*}") - @Produces(MediaType.APPLICATION_JSON) - @Consumes(MediaType.APPLICATION_JSON) - public InitialPreviewResponse createUntitledFromSourceFile( - @PathParam("path") String path, - @QueryParam("limit") Integer limit) - throws DatasetNotFoundException, DatasetVersionNotFoundException, NamespaceException, NewDatasetQueryException { - return datasetsResource.createUntitledFromSourceFile(sourceName, path, limit); - } - - @POST - @Path("new_untitled_from_folder/{path: .*}") - @Produces(MediaType.APPLICATION_JSON) - @Consumes(MediaType.APPLICATION_JSON) - public InitialPreviewResponse createUntitledFromSourceFolder( - @PathParam("path") String path, - @QueryParam("limit") Integer limit) - throws DatasetNotFoundException, DatasetVersionNotFoundException, NamespaceException, NewDatasetQueryException { - return datasetsResource.createUntitledFromSourceFolder(sourceName, path, limit); - } - - @POST - @Path("new_untitled_from_physical_dataset/{path: .*}") - @Produces(MediaType.APPLICATION_JSON) - @Consumes(MediaType.APPLICATION_JSON) - public InitialPreviewResponse createUntitledFromPhysicalDataset( - @PathParam("path") String path, - @QueryParam("limit") Integer limit) - throws DatasetNotFoundException, DatasetVersionNotFoundException, NamespaceException, NewDatasetQueryException { - return datasetsResource.createUntitledFromPhysicalDataset(sourceName, path, limit); + /** + * checks if format was set to UNKNOWN. If so, an error message is sent to the user + * @param fileFormat: format configuration set by dropdown table when "save" was pressed + * @throws ClientErrorException + */ + private void checkUnknownFileConfig(FileFormat fileFormat) throws ClientErrorException { + if (fileFormat instanceof UnknownFileConfig) { + throw new ClientErrorException(GenericErrorMessage.UNKNOWN_FORMAT_MSG); + } } } diff --git a/dac/backend/src/main/java/com/dremio/dac/resource/SourcesResource.java b/dac/backend/src/main/java/com/dremio/dac/resource/SourcesResource.java index 5239af1987..d8952c67a2 100644 --- a/dac/backend/src/main/java/com/dremio/dac/resource/SourcesResource.java +++ b/dac/backend/src/main/java/com/dremio/dac/resource/SourcesResource.java @@ -28,9 +28,6 @@ import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.dremio.dac.annotations.RestResource; import com.dremio.dac.annotations.Secured; import com.dremio.dac.model.sources.SourceUI; @@ -51,8 +48,6 @@ @Path("/sources") @Consumes(APPLICATION_JSON) @Produces(APPLICATION_JSON) public class SourcesResource { - private static final Logger logger = LoggerFactory.getLogger(SourcesResource.class); - private final NamespaceService namespaceService; private final SourceService sourceService; diff --git a/dac/backend/src/main/java/com/dremio/dac/resource/SpaceResource.java b/dac/backend/src/main/java/com/dremio/dac/resource/SpaceResource.java index 35eeda4090..2dc68a4378 100644 --- a/dac/backend/src/main/java/com/dremio/dac/resource/SpaceResource.java +++ b/dac/backend/src/main/java/com/dremio/dac/resource/SpaceResource.java @@ -18,15 +18,12 @@ import static com.dremio.service.namespace.proto.NameSpaceContainer.Type.SPACE; import java.security.AccessControlException; -import java.util.ConcurrentModificationException; import java.util.List; import javax.annotation.security.RolesAllowed; import javax.inject.Inject; -import javax.ws.rs.DELETE; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; -import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; @@ -43,19 +40,20 @@ import com.dremio.dac.explore.model.DatasetPath; import com.dremio.dac.explore.model.DatasetResourcePath; import com.dremio.dac.explore.model.DatasetVersionResourcePath; +import com.dremio.dac.model.common.ResourcePath; import com.dremio.dac.model.namespace.NamespaceTree; import com.dremio.dac.model.spaces.Space; import com.dremio.dac.model.spaces.SpaceName; import com.dremio.dac.model.spaces.SpacePath; import com.dremio.dac.proto.model.dataset.VirtualDatasetUI; -import com.dremio.dac.server.GenericErrorMessage; import com.dremio.dac.service.collaboration.CollaborationHelper; import com.dremio.dac.service.datasets.DatasetVersionMutator; -import com.dremio.dac.service.errors.ClientErrorException; import com.dremio.dac.service.errors.DatasetNotFoundException; import com.dremio.dac.service.errors.FileNotFoundException; +import com.dremio.dac.service.errors.NotSupportedException; import com.dremio.dac.service.errors.SpaceNotFoundException; -import com.dremio.dac.util.ResourceUtil; +import com.dremio.exec.catalog.CatalogFeatures; +import com.dremio.options.OptionManager; import com.dremio.service.namespace.BoundedDatasetCount; import com.dremio.service.namespace.NamespaceException; import com.dremio.service.namespace.NamespaceNotFoundException; @@ -79,18 +77,21 @@ public class SpaceResource { private final CollaborationHelper collaborationService; private final SpaceName spaceName; private final SpacePath spacePath; + private final OptionManager optionManager; @Inject public SpaceResource( NamespaceService namespaceService, DatasetVersionMutator datasetService, CollaborationHelper collaborationService, - @PathParam("spaceName") SpaceName spaceName) { + @PathParam("spaceName") SpaceName spaceName, + OptionManager optionManager) { this.namespaceService = namespaceService; this.datasetService = datasetService; this.collaborationService = collaborationService; this.spaceName = spaceName; this.spacePath = new SpacePath(spaceName); + this.optionManager = optionManager; } protected Space newSpace(SpaceConfig spaceConfig, NamespaceTree contents, int datasetCount) throws Exception { @@ -101,6 +102,8 @@ protected Space newSpace(SpaceConfig spaceConfig, NamespaceTree contents, int da @Produces(MediaType.APPLICATION_JSON) public Space getSpace(@QueryParam("includeContents") @DefaultValue("true") boolean includeContents) throws Exception { + throwIfNotSupported(); + try { final SpaceConfig config = namespaceService.getSpace(spacePath.toNamespaceKey()); final int datasetCount = namespaceService.getDatasetCount(spacePath.toNamespaceKey(), BoundedDatasetCount.SEARCH_TIME_LIMIT_MS, BoundedDatasetCount.COUNT_LIMIT_TO_STOP_SEARCH).getCount(); @@ -117,39 +120,13 @@ public Space getSpace(@QueryParam("includeContents") @DefaultValue("true") boole } } - @DELETE - @Produces(MediaType.APPLICATION_JSON) - @Deprecated - public void deleteSpace(@QueryParam("version") String version) throws NamespaceException, SpaceNotFoundException, UserException { - if (version == null) { - throw new ClientErrorException(GenericErrorMessage.MISSING_VERSION_PARAM_MSG); - } - - try { - namespaceService.deleteSpace(spacePath.toNamespaceKey(), version); - } catch (NamespaceNotFoundException nfe) { - throw new SpaceNotFoundException(spacePath.getSpaceName().getName(), nfe); - } catch (ConcurrentModificationException e) { - throw ResourceUtil.correctBadVersionErrorMessage(e, "space", spaceName.getName()); - } - } - - @POST - @Path("/rename") - @Produces(MediaType.APPLICATION_JSON) - @Deprecated // UI does not allow to rename a space - public Space renameSpace(@QueryParam("renameTo") String renameTo) - throws NamespaceException, SpaceNotFoundException { - throw UserException.unsupportedError() - .message("Renaming a space is not supported") - .build(logger); - } - @GET @Path("dataset/{path: .*}") @Produces(MediaType.APPLICATION_JSON) public Dataset getDataset(@PathParam("path") String path) - throws NamespaceException, FileNotFoundException, DatasetNotFoundException { + throws NamespaceException, FileNotFoundException, DatasetNotFoundException, NotSupportedException { + throwIfNotSupported(); + DatasetPath datasetPath = DatasetPath.fromURLPath(spaceName, path); final DatasetConfig datasetConfig = namespaceService.getDataset(datasetPath.toNamespaceKey()); final VirtualDatasetUI vds = datasetService.get(datasetPath, datasetConfig.getVirtualDataset().getVersion()); @@ -167,4 +144,15 @@ public Dataset getDataset(@PathParam("path") String path) protected NamespaceTree newNamespaceTree(List children) throws DatasetNotFoundException, NamespaceException { return NamespaceTree.newInstance(datasetService, children, SPACE, collaborationService); } + + protected OptionManager getOptionManager() { + return optionManager; + } + + protected void throwIfNotSupported() { + CatalogFeatures features = CatalogFeatures.get(optionManager); + if (!features.isFeatureEnabled(CatalogFeatures.Feature.SPACE)) { + throw new NotSupportedException(ResourcePath.defaultImpl("/space")); + } + } } diff --git a/dac/backend/src/main/java/com/dremio/dac/resource/SpacesResource.java b/dac/backend/src/main/java/com/dremio/dac/resource/SpacesResource.java deleted file mode 100644 index fd9132b5cd..0000000000 --- a/dac/backend/src/main/java/com/dremio/dac/resource/SpacesResource.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.dremio.dac.resource; - - -import javax.annotation.security.RolesAllowed; -import javax.inject.Inject; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.core.MediaType; - -import com.dremio.dac.annotations.RestResource; -import com.dremio.dac.annotations.Secured; -import com.dremio.dac.model.spaces.Space; -import com.dremio.dac.model.spaces.SpacePath; -import com.dremio.dac.model.spaces.Spaces; -import com.dremio.service.namespace.BoundedDatasetCount; -import com.dremio.service.namespace.NamespaceException; -import com.dremio.service.namespace.NamespaceNotFoundException; -import com.dremio.service.namespace.NamespaceService; -import com.dremio.service.namespace.space.proto.SpaceConfig; - -/** - * Resource for information about spaces. - */ -@RestResource -@Secured -@RolesAllowed({"admin", "user"}) -@Path("/spaces") -@Deprecated -public class SpacesResource { - static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SpacesResource.class); - - private final NamespaceService namespaceService; - - @Inject - public SpacesResource(NamespaceService namespaceService) { - this.namespaceService = namespaceService; - } - - @GET - @Produces(MediaType.APPLICATION_JSON) - public Spaces getSpaces() throws Exception { - final Spaces spaces = new Spaces(); - for (SpaceConfig spaceConfig : namespaceService.getSpaces()) { - int datasetCount = 0; - - try { - datasetCount = namespaceService.getDatasetCount(new SpacePath(spaceConfig.getName()).toNamespaceKey(), BoundedDatasetCount.SEARCH_TIME_LIMIT_MS, BoundedDatasetCount.COUNT_LIMIT_TO_STOP_SEARCH).getCount(); - } catch (IllegalArgumentException e) { - logger.warn("Could not load dataset count for {} because it has a invalid name: {}", spaceConfig.getName(), e.getMessage()); - } catch (NamespaceException e) { - logger.warn("Could not load dataset count for {}: {}", spaceConfig.getName(), e.getMessage()); - } - - try { - // we catch exceptions here so the user can still see their other Spaces - spaces.add(newSpace(spaceConfig, datasetCount)); - } catch (NamespaceNotFoundException e) { - logger.warn("Skipping Space {} because namespace not found: {}", spaceConfig.getName(), e.getMessage()); - } - } - return spaces; - } - - protected Space newSpace(SpaceConfig spaceConfig, int datasetCount) throws Exception { - return Space.newInstance(spaceConfig, null, datasetCount); - } -} diff --git a/dac/backend/src/main/java/com/dremio/dac/resource/V2TreeResource.java b/dac/backend/src/main/java/com/dremio/dac/resource/V2TreeResource.java new file mode 100644 index 0000000000..f24d7de1d5 --- /dev/null +++ b/dac/backend/src/main/java/com/dremio/dac/resource/V2TreeResource.java @@ -0,0 +1,65 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.resource; + +import javax.inject.Inject; +import javax.ws.rs.WebApplicationException; +import javax.ws.rs.core.Response; + +import org.projectnessie.api.v2.params.Transplant; +import org.projectnessie.client.api.NessieApiV2; +import org.projectnessie.error.NessieConflictException; +import org.projectnessie.error.NessieNotFoundException; +import org.projectnessie.model.CommitResponse; +import org.projectnessie.model.MergeResponse; +import org.projectnessie.model.Operations; +import org.projectnessie.model.Reference; +import org.projectnessie.model.SingleReferenceResponse; +import org.projectnessie.model.ser.Views; + +import com.dremio.services.nessie.proxy.ProxyV2TreeResource; +import com.fasterxml.jackson.annotation.JsonView; + +/** + * Nessie-specific extension of {@link ProxyV2TreeResource}. + * Disables certain API calls that are not needed in the NaaS proxy. + */ + +public class V2TreeResource extends ProxyV2TreeResource { + + @Inject + public V2TreeResource(NessieApiV2 api) { + super(api); + } + + @Override + @JsonView(Views.V2.class) + public SingleReferenceResponse assignReference(Reference.ReferenceType type, String ref, Reference assignTo) throws NessieConflictException, NessieNotFoundException { + throw new WebApplicationException("transplantCommitsIntoBranch is not supported", Response.Status.FORBIDDEN); + } + + @Override + @JsonView(Views.V2.class) + public MergeResponse transplantCommitsIntoBranch(String branch, Transplant transplant) throws NessieConflictException, NessieNotFoundException { + throw new WebApplicationException("transplantCommitsIntoBranch is not supported", Response.Status.FORBIDDEN); + } + + @Override + @JsonView(Views.V2.class) + public CommitResponse commitMultipleOperations(String branch, Operations operations) throws NessieConflictException, NessieNotFoundException { + throw new WebApplicationException("commitMultipleOperations is not supported", Response.Status.FORBIDDEN); + } +} diff --git a/dac/backend/src/main/java/com/dremio/dac/server/APIServer.java b/dac/backend/src/main/java/com/dremio/dac/server/APIServer.java index 4acd3de67b..8a1758e958 100644 --- a/dac/backend/src/main/java/com/dremio/dac/server/APIServer.java +++ b/dac/backend/src/main/java/com/dremio/dac/server/APIServer.java @@ -23,7 +23,6 @@ import com.dremio.common.perf.Timer; import com.dremio.common.scanner.persistence.ScanResult; import com.dremio.dac.annotations.APIResource; -import com.fasterxml.jackson.jaxrs.base.JsonMappingExceptionMapper; import com.fasterxml.jackson.jaxrs.base.JsonParseExceptionMapper; /** @@ -46,6 +45,9 @@ protected void init(ScanResult result) { register(resource); } + // Enable request contextualization. + register(new AuthenticationBinder()); + // FEATURES register(DACAuthFilterFeature.class); register(DACJacksonJaxbJsonFeature.class); @@ -53,7 +55,7 @@ protected void init(ScanResult result) { // EXCEPTION MAPPERS register(JsonParseExceptionMapper.class); - register(JsonMappingExceptionMapper.class); + register(RestApiJsonMappingExceptionMapper.class); // PROPERTIES property(ServerProperties.RESPONSE_SET_STATUS_OVER_SEND_ERROR, "true"); diff --git a/dac/backend/src/main/java/com/dremio/dac/server/AuthenticationBinder.java b/dac/backend/src/main/java/com/dremio/dac/server/AuthenticationBinder.java new file mode 100644 index 0000000000..013301ba3b --- /dev/null +++ b/dac/backend/src/main/java/com/dremio/dac/server/AuthenticationBinder.java @@ -0,0 +1,29 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.server; + +import org.glassfish.hk2.utilities.binding.AbstractBinder; +import org.glassfish.jersey.server.spi.internal.ResourceMethodInvocationHandlerProvider; + +/** + * Auth Binder + */ +public class AuthenticationBinder extends AbstractBinder { + @Override + protected void configure() { + bind(ContextualizedResourceMethodInvocationHandlerProvider.class).to(ResourceMethodInvocationHandlerProvider.class); + } +} diff --git a/dac/backend/src/main/java/com/dremio/dac/server/ContextualizedResourceMethodInvocationHandlerProvider.java b/dac/backend/src/main/java/com/dremio/dac/server/ContextualizedResourceMethodInvocationHandlerProvider.java new file mode 100644 index 0000000000..c46e844b79 --- /dev/null +++ b/dac/backend/src/main/java/com/dremio/dac/server/ContextualizedResourceMethodInvocationHandlerProvider.java @@ -0,0 +1,56 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.server; + +import java.lang.reflect.InvocationHandler; +import java.util.HashMap; +import java.util.Map; + +import javax.servlet.http.HttpServletRequest; +import javax.ws.rs.core.Context; + +import org.glassfish.jersey.server.model.Invocable; +import org.glassfish.jersey.server.spi.internal.ResourceMethodInvocationHandlerProvider; + +import com.dremio.context.RequestContext; +import com.dremio.context.UserContext; + +/** + * The ContextualizedResourceMethodInvocationHandlerProvider extracts the UserContext from attributes + * within the HttpServletRequest which is set by the DACAuthFilter. + */ +public class ContextualizedResourceMethodInvocationHandlerProvider implements ResourceMethodInvocationHandlerProvider { + public static final String USER_CONTEXT_ATTRIBUTE = + ContextualizedResourceMethodInvocationHandlerProvider.class.getCanonicalName() + ".UserContext"; + + @Context + private HttpServletRequest httpServletRequest; + + @Override + public InvocationHandler create(Invocable invocable) { + return (proxy, method, args) -> RequestContext.current() + .with(getRequestContext()) + .call(() -> method.invoke(proxy, args)); + } + + private Map, Object> getRequestContext() { + final Map, Object> contextMap = new HashMap<>(); + + contextMap.put(UserContext.CTX_KEY, httpServletRequest.getAttribute(USER_CONTEXT_ATTRIBUTE)); + + return contextMap; + } +} diff --git a/dac/backend/src/main/java/com/dremio/dac/server/DACAuthFilter.java b/dac/backend/src/main/java/com/dremio/dac/server/DACAuthFilter.java index 572746c5eb..65abec61e1 100644 --- a/dac/backend/src/main/java/com/dremio/dac/server/DACAuthFilter.java +++ b/dac/backend/src/main/java/com/dremio/dac/server/DACAuthFilter.java @@ -15,6 +15,8 @@ */ package com.dremio.dac.server; +import static com.dremio.dac.server.ContextualizedResourceMethodInvocationHandlerProvider.USER_CONTEXT_ATTRIBUTE; + import java.util.List; import java.util.Map; @@ -29,6 +31,7 @@ import javax.ws.rs.ext.Provider; import com.dremio.common.collections.Tuple; +import com.dremio.context.UserContext; import com.dremio.dac.annotations.Secured; import com.dremio.dac.annotations.TemporaryAccess; import com.dremio.dac.model.usergroup.UserName; @@ -62,6 +65,7 @@ public void filter(ContainerRequestContext requestContext) { final UserName userName = getUserNameFromToken(requestContext); final User userConfig = userService.get().getUser(userName.getName()); requestContext.setSecurityContext(new DACSecurityContext(userName, userConfig, requestContext)); + requestContext.setProperty(USER_CONTEXT_ATTRIBUTE, new UserContext(userConfig.getUID().getId())); } catch (UserNotFoundException | NotAuthorizedException e) { requestContext.abortWith(Response.status(Response.Status.UNAUTHORIZED).build()); } diff --git a/dac/backend/src/main/java/com/dremio/dac/server/DremioServlet.java b/dac/backend/src/main/java/com/dremio/dac/server/DremioServlet.java index 6eec2c61ba..965303f830 100644 --- a/dac/backend/src/main/java/com/dremio/dac/server/DremioServlet.java +++ b/dac/backend/src/main/java/com/dremio/dac/server/DremioServlet.java @@ -146,7 +146,9 @@ protected ServerData.Builder getDataBuilder() { .setShowNewJobsPage(options.getOption(UIOptions.JOBS_UI_CHECK)) .setShowOldReflectionsListing(options.getOption(UIOptions.REFLECTIONSLISTING_UI_CHECK)) .setAllowAutoComplete(options.getOption(UIOptions.ALLOW_AUTOCOMPLETE)) - .setAllowFormatting(options.getOption(UIOptions.ALLOW_FORMATTING)); + .setAllowDownload(options.getOption(UIOptions.ALLOW_DOWNLOAD)) + .setAllowFormatting(options.getOption(UIOptions.ALLOW_FORMATTING)) + .setUseNewDatasetNavigation(options.getOption(UIOptions.DATASET_NAVIGATION_CHECK)); } protected Provider getSupportService() { diff --git a/dac/backend/src/main/java/com/dremio/dac/server/GenericErrorMessage.java b/dac/backend/src/main/java/com/dremio/dac/server/GenericErrorMessage.java index a896ab3418..09be47fec6 100644 --- a/dac/backend/src/main/java/com/dremio/dac/server/GenericErrorMessage.java +++ b/dac/backend/src/main/java/com/dremio/dac/server/GenericErrorMessage.java @@ -31,6 +31,7 @@ public class GenericErrorMessage { public static final String NO_USER_MSG = "No User Available"; public static final String MISSING_VERSION_PARAM_MSG = "Missing Version Parameter"; + public static final String UNKNOWN_FORMAT_MSG = "The table cannot be saved when the format is set to UNKNOWN. Please select the correct format for the table"; public static final String GENERIC_ERROR_MSG = "Something went wrong."; private final String errorMessage; diff --git a/dac/backend/src/main/java/com/dremio/dac/server/NessieProxyRestServer.java b/dac/backend/src/main/java/com/dremio/dac/server/NessieProxyRestServer.java new file mode 100644 index 0000000000..dc858b562d --- /dev/null +++ b/dac/backend/src/main/java/com/dremio/dac/server/NessieProxyRestServer.java @@ -0,0 +1,72 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.server; + +import org.glassfish.jersey.message.GZipEncoder; +import org.glassfish.jersey.server.ResourceConfig; +import org.glassfish.jersey.server.filter.EncodingFilter; +import org.projectnessie.services.restjavax.ContentKeyParamConverterProvider; +import org.projectnessie.services.restjavax.NamespaceParamConverterProvider; +import org.projectnessie.services.restjavax.NessieExceptionMapper; +import org.projectnessie.services.restjavax.ReferenceTypeParamConverterProvider; + +import com.dremio.common.perf.Timer; +import com.dremio.dac.resource.NessieSourceResource; +import com.dremio.dac.resource.NessieTestSourceResource; +import com.dremio.dac.service.errors.NotFoundExceptionMapper; +import com.dremio.services.nessie.proxy.ProxyExceptionMapper; +import com.dremio.services.nessie.proxy.ProxyNessieConfig; +import com.dremio.services.nessie.proxy.ProxyRuntimeExceptionMapper; + +public class NessieProxyRestServer extends ResourceConfig { + + public NessieProxyRestServer() { + try (Timer.TimedBlock b = Timer.time("new ProxyRestServer")) { + init(); + } + } + + protected void init() { + // FILTERS // + register(JSONPrettyPrintFilter.class); + + // Enable request contextualization. + register(new AuthenticationBinder()); + + // FEATURES + register(DACAuthFilterFeature.class); + register(DACJacksonJaxbJsonFeature.class); + + // LISTENERS // + register(TimingApplicationEventListener.class); + + //Nessie + if (Boolean.getBoolean("nessie.source.resource.testing.enabled")) { + register(NessieTestSourceResource.class); + } else { + register(NessieSourceResource.class); + } + register(ContentKeyParamConverterProvider.class); + register(NamespaceParamConverterProvider.class); + register(ReferenceTypeParamConverterProvider.class); + register(new NessieExceptionMapper(new ProxyNessieConfig()), 10); + register(NotFoundExceptionMapper.class); + register(ProxyExceptionMapper.class, 10); + register(ProxyRuntimeExceptionMapper.class, 10); + register(EncodingFilter.class); + register(GZipEncoder.class); + } +} diff --git a/dac/backend/src/main/java/com/dremio/dac/server/RestApiJsonMappingExceptionMapper.java b/dac/backend/src/main/java/com/dremio/dac/server/RestApiJsonMappingExceptionMapper.java new file mode 100644 index 0000000000..54075302fe --- /dev/null +++ b/dac/backend/src/main/java/com/dremio/dac/server/RestApiJsonMappingExceptionMapper.java @@ -0,0 +1,78 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.server; + +import java.util.Iterator; + +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import javax.ws.rs.ext.ExceptionMapper; + +import com.fasterxml.jackson.databind.JsonMappingException; + +/** + * The default {@code com.fasterxml.jackson.jaxrs.base.JsonMappingExceptionMapper} + * exposes internal class and package names when an incoming request contains input + * that does not correctly map to the internal request object for the requested path + * Example: the user supplied a value with the wrong type for a property, so a + * JsonMappingException is thrown when we attempt to deserialize the request body. + * This custom exception mapper generates responses that tell the user the path to + * the invalid input in their JSON request without exposing internal details about + * the system. + */ +public class RestApiJsonMappingExceptionMapper implements ExceptionMapper { + @Override + public Response toResponse(JsonMappingException exception) { + if (exception.getPath().isEmpty()) { + return errorResponse("Invalid value"); + } + + StringBuilder errorMessage = new StringBuilder("Invalid value found at: "); + Iterator iter = exception.getPath().iterator(); + + if (iter.hasNext()) { + errorMessage.append(referenceToString(iter.next())); + } + + while (iter.hasNext()) { + JsonMappingException.Reference ref = iter.next(); + errorMessage.append(referenceToString(ref, ".")); + } + + return errorResponse(errorMessage.toString()); + } + + private Response errorResponse(String errorMessage) { + return Response.status(Response.Status.BAD_REQUEST) + .entity(new GenericErrorMessage(errorMessage)) + .type(MediaType.APPLICATION_JSON_TYPE) + .build(); + } + + private String referenceToString(JsonMappingException.Reference ref) { + return referenceToString(ref, ""); + } + + private String referenceToString(JsonMappingException.Reference ref, String fieldNamePrefix) { + if (ref.getFieldName() != null) { + return fieldNamePrefix + ref.getFieldName(); + } else if (ref.getIndex() != -1) { + return String.format("[%s]", ref.getIndex()); + } + + return fieldNamePrefix + "UNKNOWN"; + } +} diff --git a/dac/backend/src/main/java/com/dremio/dac/server/RestServerV2.java b/dac/backend/src/main/java/com/dremio/dac/server/RestServerV2.java index a2ecc217a1..00f84a2fb9 100644 --- a/dac/backend/src/main/java/com/dremio/dac/server/RestServerV2.java +++ b/dac/backend/src/main/java/com/dremio/dac/server/RestServerV2.java @@ -21,6 +21,9 @@ import org.glassfish.jersey.server.ResourceConfig; import org.glassfish.jersey.server.ServerProperties; import org.glassfish.jersey.server.mvc.freemarker.FreemarkerMvcFeature; +import org.projectnessie.services.restjavax.ContentKeyParamConverterProvider; +import org.projectnessie.services.restjavax.NamespaceParamConverterProvider; +import org.projectnessie.services.restjavax.ReferenceTypeParamConverterProvider; import com.dremio.common.perf.Timer; import com.dremio.common.perf.Timer.TimedBlock; @@ -53,6 +56,12 @@ public RestServerV2(ScanResult result) { } protected void init(ScanResult result) { + // PROVIDERS // + // We manually registered provider needed for nessie-as-a-source + register(ContentKeyParamConverterProvider.class); + register(NamespaceParamConverterProvider.class); + register(ReferenceTypeParamConverterProvider.class); + // FILTERS // register(JSONPrettyPrintFilter.class); register(MediaTypeFilter.class); @@ -62,6 +71,9 @@ protected void init(ScanResult result) { register(resource); } + // Enable request contextualization. + register(new AuthenticationBinder()); + // FEATURES property(FreemarkerMvcFeature.TEMPLATE_OBJECT_FACTORY, getFreemarkerConfiguration()); register(FreemarkerMvcFeature.class); diff --git a/dac/backend/src/main/java/com/dremio/dac/server/SentinelSecureFilter.java b/dac/backend/src/main/java/com/dremio/dac/server/SentinelSecureFilter.java index 97717a32e1..d75da8b809 100644 --- a/dac/backend/src/main/java/com/dremio/dac/server/SentinelSecureFilter.java +++ b/dac/backend/src/main/java/com/dremio/dac/server/SentinelSecureFilter.java @@ -108,6 +108,7 @@ public void serializeAsField(Object bean, } } + @Override public void serializeAsElement(Object bean, JsonGenerator gen, SerializerProvider prov) throws Exception { diff --git a/dac/backend/src/main/java/com/dremio/dac/server/UIOptions.java b/dac/backend/src/main/java/com/dremio/dac/server/UIOptions.java index 63454afc59..3084a644aa 100644 --- a/dac/backend/src/main/java/com/dremio/dac/server/UIOptions.java +++ b/dac/backend/src/main/java/com/dremio/dac/server/UIOptions.java @@ -32,6 +32,10 @@ public final class UIOptions { public static final BooleanValidator ALLOW_AUTOCOMPLETE = new BooleanValidator("ui.autocomplete.allow", true); + public static final BooleanValidator AUTOCOMPLETE_V2 = new BooleanValidator("ui.autocomplete.v2.enabled", false); + + public static final BooleanValidator ALLOW_DOWNLOAD = new BooleanValidator("ui.download.allow", true); + public static final StringValidator WHITE_LABEL_URL = new StringValidator("ui.whitelabel.url", "dremio"); public static final BooleanValidator ALLOW_FORMATTING = new BooleanValidator("ui.formatter.allow", true); @@ -69,4 +73,9 @@ public final class UIOptions { * Specifies whether new jobs profile UI should be shown */ public static final BooleanValidator JOBS_PROFILE_UI_CHECK = new BooleanValidator("dremio.query.visualiser.enabled", false); + + /* + * Specifies whether the UI will use the new dataset navigation behavior + */ + public static final BooleanValidator DATASET_NAVIGATION_CHECK = new BooleanValidator("ui.dataset.navigation.new", true); } diff --git a/dac/backend/src/main/java/com/dremio/dac/server/WebServer.java b/dac/backend/src/main/java/com/dremio/dac/server/WebServer.java index d2ff8e3e58..edc2870a02 100644 --- a/dac/backend/src/main/java/com/dremio/dac/server/WebServer.java +++ b/dac/backend/src/main/java/com/dremio/dac/server/WebServer.java @@ -110,6 +110,7 @@ public MediaType(String type, String subType) { private final SingletonRegistry registry; private final Provider restServerProvider; private final Provider apiServerProvider; + private final Provider nessieProxyResetServerV2; private final DremioServer server; private final DACConfig config; private final Provider credentialsServiceProvider; @@ -123,6 +124,7 @@ public WebServer( Provider credentialsServiceProvider, Provider restServer, Provider apiServer, + Provider nessieProxyResetServerV2, Provider server, DremioBinder dremioBinder, String uiType, @@ -132,6 +134,7 @@ public WebServer( this.credentialsServiceProvider = credentialsServiceProvider; this.restServerProvider = restServer; this.apiServerProvider = apiServer; + this.nessieProxyResetServerV2 = nessieProxyResetServerV2; this.dremioBinder = dremioBinder; this.uiType = uiType; this.isInternalUS = isInternalUS; @@ -196,6 +199,16 @@ protected void registerEndpoints(ServletContextHandler servletContextHandler) { final ServletHolder apiHolder = new ServletHolder(new ServletContainer(apiServer)); apiHolder.setInitOrder(3); servletContextHandler.addServlet(apiHolder, "/api/v3/*"); + + // Nessie Source REST API + ResourceConfig nessieProxyRestServerV2 = nessieProxyResetServerV2.get(); + + nessieProxyRestServerV2.register(dremioBinder); + nessieProxyRestServerV2.register((DynamicFeature) (resourceInfo, context) -> context.register(DremioServer.TracingFilter.class)); + + final ServletHolder proxyNessieRestHolder = new ServletHolder(new ServletContainer(nessieProxyRestServerV2)); + proxyNessieRestHolder.setInitOrder(4); + servletContextHandler.addServlet(proxyNessieRestHolder, "/nessie-proxy/*"); } public int getPort() { diff --git a/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/AccelerationWrapper.java b/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/AccelerationWrapper.java index 996ce8dc02..05ada2eb3f 100644 --- a/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/AccelerationWrapper.java +++ b/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/AccelerationWrapper.java @@ -26,6 +26,8 @@ import com.dremio.dac.model.job.acceleration.UiMapper; import com.dremio.service.accelerator.proto.AccelerationDetails; import com.dremio.service.accelerator.proto.ReflectionRelationship; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableMap; @@ -52,6 +54,18 @@ public String getReflectionDatasetPath(String layoutId) { } } + public String getReflectionDatasetVersion(String layoutId) { + try { + String unparsedJson = relationshipMap.get(layoutId).getDataset().getId(); + ObjectMapper mapper = new ObjectMapper(); + JsonNode parsedJson = mapper.readTree(unparsedJson); + JsonNode versionContext = parsedJson.get("versionContext"); + return String.format(" [%s %s] ", versionContext.get("type").textValue(), versionContext.get("value").textValue()); + } catch (Exception e) { + return " "; + } + } + public Long getRefreshChainStartTime(String layoutId) { return relationshipMap.get(layoutId).getMaterialization().getRefreshChainStartTime(); } diff --git a/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/Comparators.java b/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/Comparators.java index 13047e1571..9f607d44bd 100644 --- a/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/Comparators.java +++ b/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/Comparators.java @@ -33,48 +33,56 @@ interface Comparators { Comparator majorId = new Comparator() { + @Override public int compare(final MajorFragmentProfile o1, final MajorFragmentProfile o2) { return Long.compare(o1.getMajorFragmentId(), o2.getMajorFragmentId()); } }; Comparator minorId = new Comparator() { + @Override public int compare(final MinorFragmentProfile o1, final MinorFragmentProfile o2) { return Long.compare(o1.getMinorFragmentId(), o2.getMinorFragmentId()); } }; Comparator startTime = new Comparator() { + @Override public int compare(final MinorFragmentProfile o1, final MinorFragmentProfile o2) { return Long.compare(o1.getStartTime(), o2.getStartTime()); } }; Comparator lastUpdate = new Comparator() { + @Override public int compare(final MinorFragmentProfile o1, final MinorFragmentProfile o2) { return Long.compare(o1.getLastUpdate(), o2.getLastUpdate()); } }; Comparator lastProgress = new Comparator() { + @Override public int compare(final MinorFragmentProfile o1, final MinorFragmentProfile o2) { return Long.compare(o1.getLastProgress(), o2.getLastProgress()); } }; Comparator endTime = new Comparator() { + @Override public int compare(final MinorFragmentProfile o1, final MinorFragmentProfile o2) { return Long.compare(o1.getEndTime(), o2.getEndTime()); } }; Comparator fragmentPeakMemory = new Comparator() { + @Override public int compare(final MinorFragmentProfile o1, final MinorFragmentProfile o2) { return Long.compare(o1.getMaxMemoryUsed(), o2.getMaxMemoryUsed()); } }; Comparator runTime = new Comparator() { + @Override public int compare(final MinorFragmentProfile o1, final MinorFragmentProfile o2) { return Long.compare(o1.getEndTime() - o1.getStartTime(), o2.getEndTime() - o2.getStartTime()); } @@ -88,30 +96,35 @@ public int compare(NodeQueryProfile o1, NodeQueryProfile o2) { }; Comparator operatorId = new Comparator() { + @Override public int compare(final OperatorProfile o1, final OperatorProfile o2) { return Long.compare(o1.getOperatorId(), o2.getOperatorId()); } }; Comparator> setupTime = new Comparator>() { + @Override public int compare(final Pair o1, final Pair o2) { return Long.compare(o1.getLeft().getSetupNanos(), o2.getLeft().getSetupNanos()); } }; Comparator> processTime = new Comparator>() { + @Override public int compare(final Pair o1, final Pair o2) { return Long.compare(o1.getLeft().getProcessNanos(), o2.getLeft().getProcessNanos()); } }; Comparator> waitTime = new Comparator>() { + @Override public int compare(final Pair o1, final Pair o2) { return Long.compare(o1.getLeft().getWaitNanos(), o2.getLeft().getWaitNanos()); } }; Comparator> operatorPeakMemory = new Comparator>() { + @Override public int compare(final Pair o1, final Pair o2) { return Long.compare(o1.getLeft().getPeakLocalMemoryAllocated(), o2.getLeft().getPeakLocalMemoryAllocated()); } @@ -125,6 +138,7 @@ public int compare(NodePhaseProfile o1, NodePhaseProfile o2) { }; Comparator stateStartTime = new Comparator() { + @Override public int compare(final AttemptEvent a1, final AttemptEvent a2) { return Long.compare(a1.getStartTime(), a2.getStartTime()); } diff --git a/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/Filters.java b/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/Filters.java index 361e064e1b..550d75c551 100644 --- a/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/Filters.java +++ b/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/Filters.java @@ -21,12 +21,14 @@ interface Filters { Predicate hasOperators = new Predicate() { + @Override public boolean apply(MinorFragmentProfile arg0) { return arg0.getOperatorProfileCount() != 0; } }; Predicate hasTimes = new Predicate() { + @Override public boolean apply(MinorFragmentProfile arg0) { return arg0.hasStartTime() && arg0.hasEndTime(); } diff --git a/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/FragmentWrapper.java b/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/FragmentWrapper.java index 9c5bf133b9..1ff0b76611 100644 --- a/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/FragmentWrapper.java +++ b/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/FragmentWrapper.java @@ -102,7 +102,7 @@ public String getId() { "First End", "Last End", "Min First-run", "Avg First-run", "Max First-run", "Min Wall-clock", "Avg Wall-clock", "Max Wall-clock", "Min Sleep", "Avg Sleep", "Max Sleep", "Min Blocked", "Avg Blocked", "Max Blocked", "Last Update", "Last Progress", "Max Peak Memory"}; - // Not including Major Fragment ID and Minor Fragments Reporting + // Not including Major Fragment ID, Phase Weight and Minor Fragments Reporting public static final int NUM_NULLABLE_OVERVIEW_COLUMNS = FRAGMENT_OVERVIEW_COLUMNS.length - 3; public void addSummary(TableBuilder tb) { diff --git a/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/OperatorWrapper.java b/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/OperatorWrapper.java index 67993c9d07..e623940009 100644 --- a/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/OperatorWrapper.java +++ b/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/OperatorWrapper.java @@ -28,7 +28,7 @@ import java.util.Set; import java.util.stream.Collectors; -import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.tuple.ImmutablePair; diff --git a/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/ProfileWrapper.java b/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/ProfileWrapper.java index 93e0d56fb4..093976ccac 100644 --- a/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/ProfileWrapper.java +++ b/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/ProfileWrapper.java @@ -470,7 +470,8 @@ public Map> getDa if ("".equals(reflectionDatasetPath)) { path = new DatasetPath(Arrays.asList("unknown", "missing dataset")); } else { - path = new DatasetPath(reflectionDatasetPath); + String datasetVersion = accelerationDetails.getReflectionDatasetVersion(viewProfile.getLayoutId()); + path = new DatasetPath(reflectionDatasetPath, datasetVersion); } if (!map.containsKey(path)) { @@ -523,4 +524,34 @@ public String getPerdiodFromStart(Long datetime) { } return DurationFormatUtils.formatDurationWords( this.profile.getStart() - datetime, true, true); } + + public int getConsideredReflectionsCount() { + return profile.hasAccelerationProfile() ? profile.getAccelerationProfile().getLayoutProfilesCount() : 0; + } + + public int getMatchedReflectionsCount() { + int ret = 0; + if (profile.hasAccelerationProfile() && profile.getAccelerationProfile().getLayoutProfilesCount() > 0) { + UserBitShared.AccelerationProfile accelerationProfile = profile.getAccelerationProfile(); + for (UserBitShared.LayoutMaterializedViewProfile profile: accelerationProfile.getLayoutProfilesList()) { + if (profile.hasNumSubstitutions() && profile.getNumSubstitutions() > 0) { + ret ++; + } + } + } + return ret; + } + + public int getChosenReflectionsCount() { + int ret = 0; + if (profile.hasAccelerationProfile() && profile.getAccelerationProfile().getLayoutProfilesCount() > 0) { + UserBitShared.AccelerationProfile accelerationProfile = profile.getAccelerationProfile(); + for (UserBitShared.LayoutMaterializedViewProfile profile: accelerationProfile.getLayoutProfilesList()) { + if (profile.hasNumUsed() && profile.getNumUsed() > 0) { + ret ++; + } + } + } + return ret; + } } diff --git a/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/TableBuilder.java b/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/TableBuilder.java index d87332172c..82d434fc29 100644 --- a/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/TableBuilder.java +++ b/dac/backend/src/main/java/com/dremio/dac/server/admin/profile/TableBuilder.java @@ -33,6 +33,7 @@ public TableBuilder(final String[] columns) { sb.append("\n\n"); } + @Override public void appendCell(final String s) { if (w == 0) { sb.append(""); diff --git a/dac/backend/src/main/java/com/dremio/dac/server/models/ServerData.java b/dac/backend/src/main/java/com/dremio/dac/server/models/ServerData.java index 0996bc2dc9..cd55e3f20c 100644 --- a/dac/backend/src/main/java/com/dremio/dac/server/models/ServerData.java +++ b/dac/backend/src/main/java/com/dremio/dac/server/models/ServerData.java @@ -46,7 +46,9 @@ public class ServerData { private final boolean showNewJobsPage; private final boolean showOldReflectionsListing; private final boolean allowAutoComplete; + private final boolean allowDownload; private final boolean allowFormatting; + private final boolean useNewDatasetNavigation; protected ServerData(Builder builder) { this.serverEnvironment = builder.serverEnvironment; @@ -73,7 +75,9 @@ protected ServerData(Builder builder) { this.showNewJobsPage = builder.showNewJobsPage; this.showOldReflectionsListing = builder.showOldReflectionsListing; this.allowAutoComplete = builder.allowAutoComplete; + this.allowDownload = builder.allowDownload; this.allowFormatting = builder.allowFormatting; + this.useNewDatasetNavigation = builder.useNewDatasetNavigation; } public String getServerEnvironment() { @@ -181,8 +185,16 @@ public boolean isAllowAutoComplete() { return allowAutoComplete; } + public boolean isAllowDownload() { + return allowDownload; + } + public boolean isAllowFormatting() { return allowFormatting; } + public boolean isUseNewDatasetNavigation() { + return useNewDatasetNavigation; + } + /** * A builder for server data */ @@ -211,7 +223,9 @@ public static class Builder { private boolean showNewJobsPage; private boolean showOldReflectionsListing; private boolean allowAutoComplete; + private boolean allowDownload; private boolean allowFormatting; + private boolean useNewDatasetNavigation; protected Builder() { } @@ -241,7 +255,9 @@ protected Builder(Builder builder) { this.showNewJobsPage = builder.showNewJobsPage; this.showOldReflectionsListing = builder.showOldReflectionsListing; this.allowAutoComplete = builder.allowAutoComplete; + this.allowDownload = builder.allowDownload; this.allowFormatting = builder.allowFormatting; + this.useNewDatasetNavigation = builder.useNewDatasetNavigation; } public Builder setServerEnvironment(String serverEnvironment) { @@ -364,11 +380,21 @@ public Builder setAllowAutoComplete(boolean allowAutoComplete) { return this; } + public Builder setAllowDownload(boolean allowDownload) { + this.allowDownload = allowDownload; + return this; + } + public Builder setAllowFormatting(boolean allowFormatting) { this.allowFormatting = allowFormatting; return this; } + public Builder setUseNewDatasetNavigation(boolean useNewDatasetNavigation) { + this.useNewDatasetNavigation = useNewDatasetNavigation; + return this; + } + public ServerData build() { return new ServerData(this); } diff --git a/dac/backend/src/main/java/com/dremio/dac/server/test/SampleDataPopulator.java b/dac/backend/src/main/java/com/dremio/dac/server/test/SampleDataPopulator.java index 75473e584c..aa2848370a 100644 --- a/dac/backend/src/main/java/com/dremio/dac/server/test/SampleDataPopulator.java +++ b/dac/backend/src/main/java/com/dremio/dac/server/test/SampleDataPopulator.java @@ -328,18 +328,20 @@ private void populateInitialData0() throws NamespaceException, IOException, Data // add physical datasets { PhysicalDatasetPath dacSampleAllTypes = new PhysicalDatasetPath(asList("LocalFS1", allTypesJson)); - DatasetConfig dacSample1 = new DatasetConfig(); - dacSample1.setCreatedAt(System.currentTimeMillis()); - dacSample1.setFullPathList(dacSampleAllTypes.toPathList()); - dacSample1.setType(DatasetType.PHYSICAL_DATASET_SOURCE_FILE); - dacSample1.setName(allTypesJson); - dacSample1.setPhysicalDataset(new PhysicalDataset().setFormatSettings(new JsonFileConfig().asFileConfig())); - namespaceService.addOrUpdateDataset(dacSampleAllTypes.toNamespaceKey(), dacSample1); + DatasetConfig dacAllTypes = new DatasetConfig(); + dacAllTypes.setOwner(username); + dacAllTypes.setCreatedAt(System.currentTimeMillis()); + dacAllTypes.setFullPathList(dacSampleAllTypes.toPathList()); + dacAllTypes.setType(DatasetType.PHYSICAL_DATASET_SOURCE_FILE); + dacAllTypes.setName(allTypesJson); + dacAllTypes.setPhysicalDataset(new PhysicalDataset().setFormatSettings(new JsonFileConfig().asFileConfig())); + namespaceService.addOrUpdateDataset(dacSampleAllTypes.toNamespaceKey(), dacAllTypes); } { PhysicalDatasetPath dacSample1Path = new PhysicalDatasetPath(asList("LocalFS1", "dac-sample1.json")); DatasetConfig dacSample1 = new DatasetConfig(); + dacSample1.setOwner(username); dacSample1.setCreatedAt(System.currentTimeMillis()); dacSample1.setFullPathList(dacSample1Path.toPathList()); dacSample1.setType(DatasetType.PHYSICAL_DATASET_SOURCE_FILE); @@ -355,6 +357,7 @@ private void populateInitialData0() throws NamespaceException, IOException, Data { PhysicalDatasetPath dacSample2Path = new PhysicalDatasetPath(asList("LocalFS2", "dac-sample2.json")); DatasetConfig dacSample2 = new DatasetConfig(); + dacSample2.setOwner(username); dacSample2.setCreatedAt(System.currentTimeMillis()); dacSample2.setFullPathList(dacSample2Path.toPathList()); dacSample2.setType(DatasetType.PHYSICAL_DATASET_SOURCE_FILE); @@ -429,7 +432,9 @@ private static String getFileContentsFromClassPath(String resource) throws IOExc private VirtualDatasetUI newDataset(DatasetPath datasetPath, DatasetVersion version, From from, List sqlContext) { - final VirtualDatasetUI ds = com.dremio.dac.explore.DatasetTool.newDatasetBeforeQueryMetadata(datasetPath, version, from, sqlContext, username); + final VirtualDatasetUI ds = + com.dremio.dac.explore.DatasetTool.newDatasetBeforeQueryMetadata( + datasetPath, version, from, sqlContext, username, null, null); final SqlQuery query = new SqlQuery(ds.getSql(), ds.getState().getContextList(), username); ds.setLastTransform(new Transform(TransformType.createFromParent).setTransformCreateFromParent(new TransformCreateFromParent(from))); final QueryMetadata metadata; diff --git a/dac/backend/src/main/java/com/dremio/dac/service/admin/Setting.java b/dac/backend/src/main/java/com/dremio/dac/service/admin/Setting.java index a0c81b5e8a..9b040e848b 100644 --- a/dac/backend/src/main/java/com/dremio/dac/service/admin/Setting.java +++ b/dac/backend/src/main/java/com/dremio/dac/service/admin/Setting.java @@ -65,6 +65,7 @@ public IntegerSetting( this.value = value; } + @Override public Long getValue(){ return value; } @@ -87,6 +88,7 @@ public FloatSetting( this.value = value; } + @Override public Double getValue(){ return value; } @@ -109,6 +111,7 @@ public BooleanSetting( this.value = value; } + @Override public Boolean getValue(){ return value; } @@ -131,6 +134,7 @@ public TextSetting( this.value = value; } + @Override public String getValue(){ return value; } diff --git a/dac/backend/src/main/java/com/dremio/dac/service/autocomplete/AutocompleteV2Helper.java b/dac/backend/src/main/java/com/dremio/dac/service/autocomplete/AutocompleteV2Helper.java new file mode 100644 index 0000000000..6c0ff39a03 --- /dev/null +++ b/dac/backend/src/main/java/com/dremio/dac/service/autocomplete/AutocompleteV2Helper.java @@ -0,0 +1,116 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.service.autocomplete; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.apache.arrow.vector.types.pojo.Field; + +import com.dremio.common.util.MajorTypeHelper; +import com.dremio.dac.api.CatalogItem; +import com.dremio.dac.explore.DataTypeUtil; +import com.dremio.exec.store.ReferenceInfo; +import com.dremio.service.autocomplete.ColumnSuggestions; +import com.dremio.service.autocomplete.ContainerSuggestions; +import com.dremio.service.autocomplete.ReferenceSuggestions; +import com.dremio.service.autocomplete.SuggestionEntity; +import com.dremio.service.autocomplete.SuggestionEntityType; +import com.dremio.service.autocomplete.SuggestionsType; + +/** + * Manage Autocomplete logic for what's been returned for various types + */ +public final class AutocompleteV2Helper { + public static ContainerSuggestions buildContainerSuggestions(List matchingContainers) { + List suggestedContainers = new ArrayList<>(); + for (CatalogItem item : matchingContainers) { + switch (item.getType()) { + case CONTAINER: { + suggestedContainers.add(new SuggestionEntity(item.getPath().toString(), convertContainerTypeToSuggestionEntityType(item.getContainerType()).getType())); + break; + } + + case DATASET: { + suggestedContainers.add(new SuggestionEntity(item.getPath().toString(), convertDatasetTypeToSuggestionEntityType(item.getDatasetType()).getType())); + break; + } + + case FILE: { + suggestedContainers.add(new SuggestionEntity(item.getPath().toString(), SuggestionEntityType.FILE.getType())); + break; + } + + default: { + throw new RuntimeException("Unknown type"); + } + } + } + return new ContainerSuggestions(SuggestionsType.CONTAINER.getType(), suggestedContainers.size(), suggestedContainers.size(), suggestedContainers); + } + + private static SuggestionEntityType convertContainerTypeToSuggestionEntityType(CatalogItem.ContainerSubType type) { + switch (type) { + case SPACE: + return SuggestionEntityType.SPACE; + case HOME: + return SuggestionEntityType.HOME; + case FOLDER: + return SuggestionEntityType.FOLDER; + case SOURCE: + return SuggestionEntityType.SOURCE; + case FUNCTION: + return SuggestionEntityType.FUNCTION; + default: + throw new RuntimeException("Unknown Container subtype: " + type); + } + } + + private static SuggestionEntityType convertDatasetTypeToSuggestionEntityType(CatalogItem.DatasetSubType type) { + switch (type) { + case VIRTUAL: + return SuggestionEntityType.VIRTUAL; + case PROMOTED: + // DX-63812: Can't tell if it's promoted from file or folder. + return SuggestionEntityType.PROMOTED; + case DIRECT: + return SuggestionEntityType.DIRECT; + default: + throw new RuntimeException("Unknown Dataset subtype: " + type); + } + } + + public static ColumnSuggestions buildColumnSuggestions(Map, List> matchingColumnsMap) { + List suggestedColumns = new ArrayList<>(); + for (List catalogEntityKey : matchingColumnsMap.keySet()) { + for (Field column : matchingColumnsMap.get(catalogEntityKey)) { + List columnFullName = Stream.concat(catalogEntityKey.stream(), Stream.of(column.getName())).collect(Collectors.toList()); + suggestedColumns.add(new SuggestionEntity(columnFullName.toString(), DataTypeUtil.getDataType(MajorTypeHelper.getMajorTypeForField(column)).toString())); + } + } + return new ColumnSuggestions(SuggestionsType.COLUMN.getType(), suggestedColumns.size(), suggestedColumns.size(), suggestedColumns); + } + + public static ReferenceSuggestions buildReferenceSuggestions(List matchingReferences) { + List suggestedReferences = matchingReferences.stream() + .map(entity -> new SuggestionEntity(entity.refName, entity.type)) + .collect(Collectors.toList()); + return new ReferenceSuggestions(SuggestionsType.REFERENCE.getType(), suggestedReferences.size(), suggestedReferences.size(), suggestedReferences); + } +} diff --git a/dac/backend/src/main/java/com/dremio/dac/service/autocomplete/AutocompleteV2Proxy.java b/dac/backend/src/main/java/com/dremio/dac/service/autocomplete/AutocompleteV2Proxy.java new file mode 100644 index 0000000000..9e18529354 --- /dev/null +++ b/dac/backend/src/main/java/com/dremio/dac/service/autocomplete/AutocompleteV2Proxy.java @@ -0,0 +1,247 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.service.autocomplete; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.commons.lang3.StringUtils; + +import com.dremio.dac.api.CatalogEntity; +import com.dremio.dac.api.CatalogItem; +import com.dremio.dac.api.Dataset; +import com.dremio.dac.api.Source; +import com.dremio.dac.service.catalog.CatalogServiceHelper; +import com.dremio.dac.service.errors.SourceNotFoundException; +import com.dremio.exec.store.ReferenceInfo; +import com.dremio.exec.store.ischema.InfoSchemaStoragePlugin; +import com.dremio.exec.store.sys.SystemStoragePlugin; +import com.dremio.service.autocomplete.AutocompleteV2Request; +import com.dremio.service.autocomplete.AutocompleteV2Response; +import com.dremio.service.namespace.NamespaceException; +import com.google.common.base.Preconditions; +import com.google.common.collect.Iterables; + +/** + * Manage to get the catalog entities based on the namespace key(s) and prefix + */ +public final class AutocompleteV2Proxy { + private static final List SYSTEM_SOURCES = Arrays.asList(InfoSchemaStoragePlugin.NAME, SystemStoragePlugin.NAME); + + public static AutocompleteV2Response getSuggestions( + CatalogServiceHelper catalogServiceHelper, + AutocompleteV2Request request) { + Preconditions.checkArgument(request.getCatalogEntityKeys() != null, "CatalogEntity keys can be empty but must be provided"); + + switch(request.getType()) { + case CONTAINER: { + return getContainerSuggestions(catalogServiceHelper, request.getCatalogEntityKeys(), request.getQueryContext(), request.getPrefix(), request.getRefType(), request.getRefValue()); + } + + case COLUMN: { + return getColumnSuggestions(catalogServiceHelper, request.getCatalogEntityKeys(), request.getPrefix(), request.getRefType(), request.getRefValue()); + } + + case REFERENCE: { + return getReferenceSuggestions(catalogServiceHelper, request.getCatalogEntityKeys(), request.getPrefix()); + } + + default: + throw new RuntimeException("Unknown autocomplete type " + request.getType()); + } + } + + private static AutocompleteV2Response getContainerSuggestions( + CatalogServiceHelper catalogServiceHelper, + List> catalogEntityKeys, + List queryContext, + String prefix, + String refType, + String refValue) { + Preconditions.checkArgument(catalogEntityKeys.size() == 1, "Must have one and only one path to follow from the top level."); + + final List matchingContainers = getMatchingContainers(catalogServiceHelper, catalogEntityKeys.get(0), queryContext, prefix, refType, refValue); + return AutocompleteV2Helper.buildContainerSuggestions(matchingContainers); + } + + private static AutocompleteV2Response getColumnSuggestions( + CatalogServiceHelper catalogServiceHelper, + List> catalogEntityKeys, + String prefix, + String refType, + String refValue) { + // Assuming each CatalogEntityKey is the full path to a table/view + Preconditions.checkArgument(catalogEntityKeys.size() >= 1 ); + catalogEntityKeys.forEach(path -> Preconditions.checkArgument(!path.isEmpty(), "Table context can't be empty.")); + + final Map, List> matchingColumnsMap = getMatchingColumnsMap(catalogServiceHelper, catalogEntityKeys, prefix, refType, refValue); + return AutocompleteV2Helper.buildColumnSuggestions(matchingColumnsMap); + } + + private static AutocompleteV2Response getReferenceSuggestions( + CatalogServiceHelper catalogServiceHelper, + List> catalogEntityKeys, + String prefix) { + Preconditions.checkArgument(catalogEntityKeys.size() == 1 && catalogEntityKeys.get(0).size() >= 1 && !catalogEntityKeys.get(0).get(0).isEmpty(), "Must have a source name to start with."); + + final List matchingReferences = getMatchingReferences(catalogServiceHelper, catalogEntityKeys.get(0).get(0), prefix); + return AutocompleteV2Helper.buildReferenceSuggestions(matchingReferences); + } + + private static List getMatchingContainers( + CatalogServiceHelper catalogServiceHelper, + List catalogEntityKey, + List queryContext, + String prefix, + String refType, + String refValue) { + // At each level, there can't be any two entities with the same name. + // For instance, at the top level if you have a source named s3, you can't create a space named s3 vice versa. + // Query context can be considered as a special path to follow. So at most we have two paths to follow. + List matchingContainers = getMatchingContainersFromPath(catalogServiceHelper, catalogEntityKey, prefix, refType, refValue); + + // Using queryContext is null or empty as a hint whether we have to explore containers in that path + if (queryContext != null && !queryContext.isEmpty() && !queryContext.equals(catalogEntityKey)) { + matchingContainers.addAll(getMatchingContainersFromPath(catalogServiceHelper, queryContext, prefix, refType, refValue)); + } + + return matchingContainers; + } + + private static List getMatchingContainersFromPath( + CatalogServiceHelper catalogServiceHelper, + List catalogEntityKey, + String prefix, + String refType, + String refValue) { + List matchingContainers; + if (catalogEntityKey.isEmpty()) { + matchingContainers = catalogServiceHelper.getTopLevelCatalogItems(Collections.EMPTY_LIST); + addSystemSources(matchingContainers, catalogServiceHelper); + } else { + try { + matchingContainers = catalogServiceHelper.getCatalogChildrenForPath(catalogEntityKey, refType, refValue); + } catch (NamespaceException e) { + matchingContainers = Collections.EMPTY_LIST; + } + } + + return matchingContainers.stream() + .filter((item) -> { + String itemName = Iterables.getLast(item.getPath(), null); + if (itemName == null) { + return false; + } else if (prefix == null) { + return true; + } else { + return StringUtils.startsWithIgnoreCase(itemName, prefix); + } + }) + .collect(Collectors.toList()); + } + + private static void addSystemSources(List matchingContainers, CatalogServiceHelper catalogServiceHelper) { + for (String sourceName : SYSTEM_SOURCES) { + try { + // Would return null if the user has no permission + final Optional source = catalogServiceHelper.getCatalogEntityByPath(Collections.singletonList(sourceName), Collections.EMPTY_LIST, Collections.singletonList("children")); + if (source.isPresent() && source.get() instanceof Source) { + CatalogItem item = CatalogItem.fromSource((Source) source.get()); + if (item != null) { + matchingContainers.add(item); + } + } + } catch (NamespaceException e) { + // ignore + } + } + } + + public static Map, List> getMatchingColumnsMap( + CatalogServiceHelper catalogServiceHelper, + List> catalogEntityKeys, + String prefix, + String refType, + String refValue) { + // Dedupe the incoming catalogEntityKeys first to avoid duplicated work + List> dedupedCatalogEntityKeys = catalogEntityKeys.stream().distinct().collect(Collectors.toList()); + Map, List> matchingColumnsMap = new HashMap<>(); + for (List catalogEntityKey : dedupedCatalogEntityKeys) { + matchingColumnsMap.put(catalogEntityKey, getMatchingColumnsFromPath(catalogServiceHelper, catalogEntityKey, prefix, refType, refValue)); + } + return matchingColumnsMap; + } + + private static List getMatchingColumnsFromPath( + CatalogServiceHelper catalogServiceHelper, + List catalogEntityKey, + String prefix, + String refType, + String refValue) { + try { + Optional entity = catalogServiceHelper.getCatalogEntityByPath(catalogEntityKey, Collections.EMPTY_LIST, Collections.EMPTY_LIST, refType, refValue); + if (entity.isPresent() && entity.get() instanceof Dataset) { + List matchingColumns = ((Dataset) entity.get()).getFields(); + return matchingColumns.stream() + .filter((column) -> { + String columnName = column.getName(); + if (columnName == null) { + return false; + } else if (prefix == null) { + return true; + } else { + return StringUtils.startsWithIgnoreCase(columnName, prefix); + } + }) + .collect(Collectors.toList()); + } + } catch (NamespaceException e) { + // ignore + } + return Collections.EMPTY_LIST; + } + + private static List getMatchingReferences( + CatalogServiceHelper catalogServiceHelper, + String sourceName, + String prefix) { + try { + Stream matchingReferences = catalogServiceHelper.getReferencesForVersionedSource(sourceName); + return matchingReferences + .filter((reference) -> { + String refName = reference.refName; + if (refName == null) { + return false; + } else if (prefix == null) { + return true; + } else { + return StringUtils.startsWithIgnoreCase(refName, prefix); + } + }) + .collect(Collectors.toList()); + } catch (SourceNotFoundException e) { + // ignore + } + return Collections.EMPTY_LIST; + } +} diff --git a/dac/backend/src/main/java/com/dremio/dac/service/autocomplete/NessieElementReaderImpl.java b/dac/backend/src/main/java/com/dremio/dac/service/autocomplete/NessieElementReaderImpl.java index 0eca45ad82..e295c57c0b 100644 --- a/dac/backend/src/main/java/com/dremio/dac/service/autocomplete/NessieElementReaderImpl.java +++ b/dac/backend/src/main/java/com/dremio/dac/service/autocomplete/NessieElementReaderImpl.java @@ -21,6 +21,8 @@ import org.projectnessie.client.api.NessieApiV1; import org.projectnessie.error.NessieNotFoundException; +import org.projectnessie.model.Reference; +import org.projectnessie.model.Reference.ReferenceType; import com.dremio.service.autocomplete.nessie.Branch; import com.dremio.service.autocomplete.nessie.Commit; @@ -41,15 +43,29 @@ public NessieElementReaderImpl(NessieApiV1 nessieApi) { this.nessieApi = nessieApi; } + private static Branch toBranch(Reference reference) { + if (reference.getType() != ReferenceType.BRANCH) { + throw new IllegalStateException("Nessie did not respond with branch: " + reference); + } + return new Branch(reference.getName(), new Hash(reference.getHash())); + } + + private static Tag toTag(Reference reference) { + if (reference.getType() != ReferenceType.TAG) { + throw new IllegalStateException("Nessie did not respond with tag: " + reference); + } + return new Tag(reference.getName(), new Hash(reference.getHash())); + } + @Override public List getBranches() { return nessieApi .getAllReferences() + .filter("refType == 'BRANCH'") .get() .getReferences() .stream() - .filter(reference -> reference instanceof org.projectnessie.model.Branch) - .map(reference -> new Branch(reference.getName(), new Hash(reference.getHash()))) + .map(NessieElementReaderImpl::toBranch) .collect(Collectors.toList()); } @@ -76,11 +92,11 @@ public List getCommits() { public List getTags() { return nessieApi .getAllReferences() + .filter("refType == 'TAG'") .get() .getReferences() .stream() - .filter(reference -> reference instanceof org.projectnessie.model.Tag) - .map(reference -> new Tag(reference.getName(), new Hash(reference.getHash()))) + .map(NessieElementReaderImpl::toTag) .collect(Collectors.toList()); } diff --git a/dac/backend/src/main/java/com/dremio/dac/service/catalog/CatalogServiceHelper.java b/dac/backend/src/main/java/com/dremio/dac/service/catalog/CatalogServiceHelper.java index c9aa6a0ee2..f52f1c9d7f 100644 --- a/dac/backend/src/main/java/com/dremio/dac/service/catalog/CatalogServiceHelper.java +++ b/dac/backend/src/main/java/com/dremio/dac/service/catalog/CatalogServiceHelper.java @@ -23,7 +23,6 @@ import java.util.Arrays; import java.util.Collections; import java.util.ConcurrentModificationException; -import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Optional; @@ -35,7 +34,6 @@ import javax.inject.Inject; import javax.ws.rs.core.SecurityContext; -import org.apache.commons.collections.CollectionUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -51,6 +49,7 @@ import com.dremio.dac.api.Source; import com.dremio.dac.api.Space; import com.dremio.dac.explore.model.DatasetPath; +import com.dremio.dac.explore.model.VersionContextUtils; import com.dremio.dac.homefiles.HomeFileSystemStoragePlugin; import com.dremio.dac.homefiles.HomeFileTool; import com.dremio.dac.model.sources.PhysicalDatasetPath; @@ -58,6 +57,7 @@ import com.dremio.dac.model.spaces.HomePath; import com.dremio.dac.model.spaces.SpaceName; import com.dremio.dac.service.datasets.DatasetVersionMutator; +import com.dremio.dac.service.errors.ClientErrorException; import com.dremio.dac.service.errors.SourceNotFoundException; import com.dremio.dac.service.reflection.ReflectionServiceHelper; import com.dremio.dac.service.search.SearchContainer; @@ -65,15 +65,23 @@ import com.dremio.dac.service.source.SourceService; import com.dremio.dac.util.DatasetsUtil; import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.CatalogEntityKey; import com.dremio.exec.catalog.CatalogUtil; -import com.dremio.exec.catalog.DatasetCatalog.UpdateStatus; import com.dremio.exec.catalog.DremioTable; +import com.dremio.exec.catalog.TableVersionContext; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.catalog.VersionedDatasetId; +import com.dremio.exec.catalog.VersionedPlugin; import com.dremio.exec.dotfile.View; import com.dremio.exec.server.SabotContext; -import com.dremio.exec.store.DatasetRetrievalOptions; +import com.dremio.exec.store.NoDefaultBranchException; +import com.dremio.exec.store.ReferenceInfo; +import com.dremio.exec.store.ReferenceNotFoundException; +import com.dremio.exec.store.ReferenceTypeConflictException; import com.dremio.exec.store.SchemaEntity; import com.dremio.exec.store.StoragePlugin; import com.dremio.exec.store.dfs.FileSystemPlugin; +import com.dremio.plugins.ExternalNamespaceEntry; import com.dremio.service.namespace.BoundedDatasetCount; import com.dremio.service.namespace.NamespaceAttribute; import com.dremio.service.namespace.NamespaceException; @@ -95,12 +103,14 @@ import com.dremio.service.namespace.space.proto.HomeConfig; import com.dremio.service.namespace.space.proto.SpaceConfig; import com.dremio.service.users.SystemUser; -import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.instrumentation.annotations.WithSpan; + /** * Catalog Service Helper * @@ -142,16 +152,15 @@ Stream addInfo(Stream items, final Cat children; - private static final Set availableValues; + private static final Set AVAILABLE_VALUES; static { - availableValues = new HashSet(Arrays.stream(DetailType.values()) - .map(Enum::name) - .collect(Collectors.toList())); + AVAILABLE_VALUES = Arrays.stream(DetailType.values()) + .map(Enum::name).collect(Collectors.toSet()); } public static boolean hasValue(final String key) { - return availableValues.contains(key); + return AVAILABLE_VALUES.contains(key); } Stream addInfo(final Stream items, @@ -161,7 +170,7 @@ Stream addInfo(final Stream items, } private final Catalog catalog; - private final SecurityContext context; + private final SecurityContext securityContext; private final SourceService sourceService; private final NamespaceService namespaceService; private final SabotContext sabotContext; @@ -173,7 +182,7 @@ Stream addInfo(final Stream items, @Inject public CatalogServiceHelper( Catalog catalog, - SecurityContext context, + SecurityContext securityContext, SourceService sourceService, NamespaceService namespaceService, SabotContext sabotContext, @@ -183,7 +192,7 @@ public CatalogServiceHelper( SearchService searchService ) { this.catalog = catalog; - this.context = context; + this.securityContext = securityContext; this.sourceService = sourceService; this.namespaceService = namespaceService; this.sabotContext = sabotContext; @@ -193,6 +202,7 @@ public CatalogServiceHelper( this.searchService = searchService; } + @WithSpan public Optional getDatasetById(String datasetId) { DremioTable table = catalog.getTable(datasetId); @@ -203,23 +213,13 @@ public Optional getDatasetById(String datasetId) { return Optional.ofNullable(table.getDatasetConfig()); } - public Optional getContainerById(String id) { - try { - NameSpaceContainer container = namespaceService.getEntityById(id); - - return Optional.ofNullable(container); - } catch (NamespaceNotFoundException e) { - return Optional.empty(); - } - } - - private HomeConfig getHomeForCurrentUser() throws NamespaceException { - HomePath homePath = new HomePath(HomeName.getUserHomePath(context.getUserPrincipal().getName())); + HomePath homePath = new HomePath(HomeName.getUserHomePath(securityContext.getUserPrincipal().getName())); return namespaceService.getHome(homePath.toNamespaceKey()); } + @WithSpan public List getTopLevelCatalogItems(final List include) { Preconditions.checkNotNull(include); @@ -229,8 +229,8 @@ public List getTopLevelCatalogItems(final List in HomeConfig homeForCurrentUser = getHomeForCurrentUser(); topLevelItems.add(CatalogItem.fromHomeConfig(homeForCurrentUser)); } catch (NamespaceException e) { - // if for some reason we can't find a home space, log it but keep going - logger.warn("Failed to find home space for user [{}]", context.getUserPrincipal().getName()); + // If for some reason we can't find a home space, log it but keep going. + logger.warn("Failed to find home space for user [{}]", securityContext.getUserPrincipal().getName()); } for (SpaceConfig spaceConfig : namespaceService.getSpaces()) { @@ -255,12 +255,52 @@ protected NameSpaceContainer getRootContainer(List path) throws Namespac return namespaceService.getEntities(Collections.singletonList(parentKey)).get(0); } - public Optional getCatalogEntityByPath(List path, final List include, - final List exclude) throws NamespaceException { - NameSpaceContainer entity = getNamespaceEntity(new NamespaceKey(path)); + @WithSpan + public Optional getCatalogEntityByPath( + final List path, final List include, final List exclude) + throws NamespaceException { + return getCatalogEntityByPath(path, include, exclude, null, null); + } + + @WithSpan + public Optional getCatalogEntityByPath( + final List path, + final List include, + final List exclude, + final String versionType, + final String versionValue) + throws NamespaceException { + final boolean isSource = path.size() == 1; + final NamespaceKey namespaceKey = new NamespaceKey(path); + + if (!isSource && CatalogUtil.requestedPluginSupportsVersionedTables(namespaceKey, catalog)) { + final TableVersionContext tableVersionContext = generateTableVersionContext(versionType, versionValue); + final CatalogEntityKey catalogEntityKey = + CatalogEntityKey.newBuilder() + .keyComponents(path) + .tableVersionContext(tableVersionContext) + .build(); + final DremioTable table = CatalogUtil.getTable(catalogEntityKey, catalog); + + if (table == null) { + return Optional.empty(); + } + + final DatasetConfig datasetConfig = table.getDatasetConfig(); + final Optional settings = + getStoredReflectionSettingsForDataset(datasetConfig); + final Dataset dataset = + toDatasetAPI( + datasetConfig, + settings.map(Dataset.RefreshSettings::new).orElse(null)); + + return Optional.of(dataset); + } + + final NameSpaceContainer entity = getNamespaceEntity(namespaceKey); if (entity == null) { - // if we can't find it in the namespace, check if its a non-promoted file/folder in a filesystem source + // if we can't find it in the namespace, check if it is a non-promoted file/folder in a filesystem source Optional internalItem = getInternalItemByPath(path); if (!internalItem.isPresent()) { return Optional.empty(); @@ -272,6 +312,18 @@ public Optional getCatalogEntityByPath(List path, final L } } + private TableVersionContext generateTableVersionContext(String versionType, String versionValue) { + final Optional tableVersionContext = + TableVersionContext.tryParse(versionType, versionValue); + + if (tableVersionContext.isPresent()) { + return tableVersionContext.get(); + } + + throw new ClientErrorException("Missing a versionType/versionValue pair for versioned dataset"); + } + + @WithSpan public Optional getCatalogEntityById(String id, final List include, final List exclude) throws NamespaceException { Optional entity = getById(id); @@ -283,6 +335,7 @@ public Optional getCatalogEntityById(String id, final List getCatalogEntity(Object object, boolean includeChildren) throws NamespaceException { if (object instanceof NameSpaceContainer) { return getCatalogEntityFromNamespaceContainer((NameSpaceContainer) object, includeChildren); @@ -354,6 +407,7 @@ private Optional getCatalogEntityFromNamespaceContainer(NameSpace * Note: this returns the namespace container found in the namespace. For non-namespace * items it returns the appropriate CatalogEntity item (Folder/File only). */ + @WithSpan private Optional getById(String id) { try { if (isInternalId(id)) { @@ -371,6 +425,12 @@ private Optional getById(String id) { } return getCatalogEntityFromCatalogItem(item); + } else if (VersionedDatasetId.tryParse(id) != null) { + final DatasetConfig datasetConfig = catalog.getTable(id).getDatasetConfig(); + final Optional settings = getStoredReflectionSettingsForDataset(datasetConfig); + final Dataset dataset = toDatasetAPI(datasetConfig, settings.map(Dataset.RefreshSettings::new).orElse(null)); + + return Optional.of(dataset); } else { NameSpaceContainer container = namespaceService.getEntityById(id); if (container == null) { @@ -403,51 +463,6 @@ private Optional getCatalogEntityFromCatalogItem(CatalogItem cata throw new RuntimeException(String.format("Could not retrieve internal item [%s]", catalogItem.toString())); } - // TODO: "?" is losing ACLs info, which requires another lookup against ACS - private Optional extractFromNamespaceContainer(NameSpaceContainer entity) { - if (entity == null) { - // if we can't find it by id, maybe its not in the namespace - return Optional.empty(); - } - - Optional result = Optional.empty(); - - switch (entity.getType()) { - case SOURCE: { - result = Optional.of(entity.getSource()); - break; - } - - case SPACE: { - result = Optional.of(entity.getSpace()); - break; - } - - case DATASET: { - // for datasets go to the catalog to ensure we have schema. - DatasetConfig dataset = entity.getDataset(); - result = Optional.of(catalog.getTable(dataset.getId().getId()).getDatasetConfig()); - break; - } - - case HOME: { - result = Optional.of(entity.getHome()); - break; - } - - case FOLDER: { - result = Optional.of(entity.getFolder()); - break; - } - - default: { - throw new RuntimeException(String.format("Unsupported namespace entity type [%s]", entity.getType())); - } - } - - return result; - } - private List getListingForInternalItem(List path) throws NamespaceException { NameSpaceContainer rootEntity = getNamespaceEntity(new NamespaceKey(path.get(0))); @@ -476,7 +491,7 @@ private CatalogItem getInternalItemFromSource(SourceConfig sourceConfig, List getChildrenForPath(NamespaceKey path) throws NamespaceException { final List catalogItems = new ArrayList<>(); @@ -569,15 +583,12 @@ public List getChildrenForPath(NamespaceKey path) throws NamespaceE private List getNamespaceChildrenForPath(NamespaceKey path) { final List catalogItems = new ArrayList<>(); - try { final List list = namespaceService.list(path); for (NameSpaceContainer container : list) { final Optional item = CatalogItem.fromNamespaceContainer(container); - if (item.isPresent()) { - catalogItems.add(item.get()); - } + item.ifPresent(catalogItems::add); } } catch (NamespaceException e) { logger.warn(e.getMessage()); @@ -597,7 +608,7 @@ protected List getChildrenForSourcePath(NameSpaceContainer source, if (plugin instanceof FileSystemPlugin) { // For file based plugins, use the list method to get the listing. That code will merge in any promoted datasets // that are in the namespace for us. This is in line with what the UI does. - final List list = ((FileSystemPlugin) plugin).list(listingPath, context.getUserPrincipal().getName()); + final List list = ((FileSystemPlugin) plugin).list(listingPath, securityContext.getUserPrincipal().getName()); for (SchemaEntity entity : list) { final CatalogItem catalogItem = convertSchemaEntityToCatalogItem(entity, listingPath); @@ -614,6 +625,7 @@ protected List getChildrenForSourcePath(NameSpaceContainer source, return catalogItems; } + @WithSpan public CatalogEntity createCatalogItem(CatalogEntity entity) throws NamespaceException, UnsupportedOperationException, ExecutionSetupException { if (entity instanceof Space) { Space space = (Space) entity; @@ -653,7 +665,7 @@ protected CatalogEntity createDataset(Dataset dataset, NamespaceAttribute... att List types = Arrays.asList(NameSpaceContainer.Type.SPACE, NameSpaceContainer.Type.HOME); Preconditions.checkArgument(types.contains(entity.getType()), "Virtual datasets can only be saved into spaces or home space."); - sabotContext.getViewCreator(context.getUserPrincipal().getName()).createView(dataset.getPath(), dataset.getSql(), dataset.getSqlContext(), attributes); + sabotContext.getViewCreator(securityContext.getUserPrincipal().getName()).createView(dataset.getPath(), dataset.getSql(), dataset.getSqlContext(), attributes); NameSpaceContainer created = namespaceService.getEntityByPath(namespaceKey); @@ -663,10 +675,11 @@ protected CatalogEntity createDataset(Dataset dataset, NamespaceAttribute... att /** * Promotes the target to a PDS using the formatting options submitted via dataset. */ + @WithSpan public Dataset promoteToDataset(String targetId, Dataset dataset) throws NamespaceException, UnsupportedOperationException { Preconditions.checkArgument(dataset.getType() == Dataset.DatasetType.PHYSICAL_DATASET, "Promoting can only create physical datasets."); - // The id can either be a internal id or a namespace id. It will be a namespace id if the entity had been promoted + // The id can either be an internal id or a namespace id. It will be a namespace id if the entity had been promoted // before and then unpromoted. final List path; if (isInternalId(targetId)) { @@ -682,7 +695,7 @@ public Dataset promoteToDataset(String targetId, Dataset dataset) throws Namespa // getPathFromInternalId will return a path without quotes so make sure we do the same for the dataset path List normalizedPath = dataset.getPath().stream().map(PathUtils::removeQuotes).collect(Collectors.toList()); - Preconditions.checkArgument(CollectionUtils.isEqualCollection(path, normalizedPath), "Entity id does not match the path specified in the dataset."); + Preconditions.checkArgument(normalizedPath.equals(path), "Entity id does not match the path specified in the dataset."); // validation validateDataset(dataset); @@ -709,7 +722,7 @@ public Dataset promoteToDataset(String targetId, Dataset dataset) throws Namespa physicalDatasetConfig.setFullPathList(path); catalog.createOrUpdateDataset(namespaceService, new NamespaceKey(namespaceKey.getRoot()), - new PhysicalDatasetPath(path).toNamespaceKey(), toDatasetConfig(physicalDatasetConfig, context.getUserPrincipal().getName()), getNamespaceAttributes(dataset)); + new PhysicalDatasetPath(path).toNamespaceKey(), toDatasetConfig(physicalDatasetConfig, securityContext.getUserPrincipal().getName()), getNamespaceAttributes(dataset)); if (dataset.getAccelerationRefreshPolicy() != null) { reflectionServiceHelper.getReflectionSettings() @@ -743,7 +756,7 @@ private void updateDataset(Dataset dataset, NamespaceAttribute... attributes) th if (dataset.getType() == Dataset.DatasetType.PHYSICAL_DATASET) { // cannot change the path of a physical dataset - Preconditions.checkArgument(CollectionUtils.isEqualCollection(dataset.getPath(), currentDatasetConfig.getFullPathList()), "Dataset path can not be modified."); + Preconditions.checkArgument(dataset.getPath().equals(currentDatasetConfig.getFullPathList()), "Dataset path can not be modified."); Preconditions.checkArgument( type != VIRTUAL_DATASET, "Dataset type can not be modified"); // PDS specific config @@ -751,14 +764,13 @@ private void updateDataset(Dataset dataset, NamespaceAttribute... attributes) th if (type == com.dremio.service.namespace.dataset.proto.DatasetType.PHYSICAL_DATASET_HOME_FILE) { DatasetConfig datasetConfig = toDatasetConfig(dataset.getFormat().asFileConfig(), type, - context.getUserPrincipal().getName(), currentDatasetConfig.getId()); + securityContext.getUserPrincipal().getName(), currentDatasetConfig.getId()); catalog.createOrUpdateDataset(namespaceService, new NamespaceKey(HomeFileSystemStoragePlugin.HOME_PLUGIN_NAME), namespaceKey, datasetConfig, attributes); } else if (type == com.dremio.service.namespace.dataset.proto.DatasetType.PHYSICAL_DATASET_SOURCE_FILE || type == com.dremio.service.namespace.dataset.proto.DatasetType.PHYSICAL_DATASET_SOURCE_FOLDER) { Preconditions.checkArgument(dataset.getFormat() != null, "Promoted dataset needs to have a format set."); - //DatasetConfig datasetConfig = toDatasetConfig(dataset.getFormat().asFileConfig(), currentDatasetConfig.getType(), context.getUserPrincipal().getName(), currentDatasetConfig.getId()); // only thing that can change is the formatting currentDatasetConfig.getPhysicalDataset().setFormatSettings(dataset.getFormat().asFileConfig()); @@ -778,7 +790,6 @@ private void updateDataset(Dataset dataset, NamespaceAttribute... attributes) th } else if (dataset.getType() == Dataset.DatasetType.VIRTUAL_DATASET) { Preconditions.checkArgument(type == VIRTUAL_DATASET, "Dataset type can not be modified"); VirtualDataset virtualDataset = currentDatasetConfig.getVirtualDataset(); - Dataset currentDataset = toDatasetAPI(container, null); // Check if the dataset is being renamed if (!Objects.equals(currentDatasetConfig.getFullPathList(), dataset.getPath())) { @@ -837,6 +848,8 @@ private void deleteDataset(DatasetConfig config, String tag) throws NamespaceExc public void deleteHomeDataset(DatasetConfig config, String version, List pathComponents) throws IOException, NamespaceException { FileConfig formatSettings = config.getPhysicalDataset().getFormatSettings(); Preconditions.checkArgument(pathComponents != null && !pathComponents.isEmpty(), "Cannot find path to dataset"); + + // TODO: Probably should be combined into one call for safe home file deletion. if (homeFileTool.fileExists(formatSettings.getLocation())) { homeFileTool.deleteFile(formatSettings.getLocation()); } @@ -905,21 +918,25 @@ protected CatalogEntity createSource(Source source, NamespaceAttribute... attrib getChildrenForPath(new NamespaceKey(sourceConfig.getName()))); } + @WithSpan public CatalogEntity updateCatalogItem(CatalogEntity entity, String id) throws NamespaceException, UnsupportedOperationException, ExecutionSetupException, IOException { Preconditions.checkArgument(entity.getId().equals(id), "Ids must match."); - String finalId = id; if (entity instanceof Dataset) { + Span.current().setAttribute("dremio.catalog.entityType", "Dataset"); Dataset dataset = (Dataset) entity; updateDataset(dataset, getNamespaceAttributes(entity)); } else if (entity instanceof Source) { + Span.current().setAttribute("dremio.catalog.entityType", "Source"); Source source = (Source) entity; sourceService.updateSource(id, source.toSourceConfig(), getNamespaceAttributes(entity)); } else if (entity instanceof Space) { + Span.current().setAttribute("dremio.catalog.entityType", "Space"); Space space = (Space) entity; updateSpace(space, getNamespaceAttributes(space)); } else if (entity instanceof Folder) { + Span.current().setAttribute("dremio.catalog.entityType", "Folder"); Folder folder = (Folder) entity; FolderConfig folderConfig = updateFolder(folder, getNamespaceAttributes(entity)); finalId = folderConfig.getId().getId(); @@ -937,6 +954,7 @@ public CatalogEntity updateCatalogItem(CatalogEntity entity, String id) throws N } } + @WithSpan public void deleteCatalogItem(String id, String tag) throws NamespaceException, UnsupportedOperationException { Optional entity = getById(id); @@ -950,6 +968,7 @@ public void deleteCatalogItem(String id, String tag) throws NamespaceException, NameSpaceContainer container = (NameSpaceContainer) object; switch (container.getType()) { case SOURCE: + Span.current().setAttribute("dremio.catalog.entityType", "Source"); SourceConfig config = container.getSource(); if (tag != null) { @@ -959,6 +978,7 @@ public void deleteCatalogItem(String id, String tag) throws NamespaceException, sourceService.deleteSource(config); break; case SPACE: + Span.current().setAttribute("dremio.catalog.entityType", "Space"); SpaceConfig spaceConfig = container.getSpace(); String version = spaceConfig.getTag(); @@ -969,6 +989,8 @@ public void deleteCatalogItem(String id, String tag) throws NamespaceException, deleteSpace(spaceConfig, version); break; case DATASET: + Span.current().setAttribute("dremio.catalog.entityType", "Dataset"); + DatasetConfig datasetConfig = container.getDataset(); try { @@ -978,6 +1000,8 @@ public void deleteCatalogItem(String id, String tag) throws NamespaceException, } break; case FOLDER: + Span.current().setAttribute("dremio.catalog.entityType", "Folder"); + FolderConfig folderConfig = container.getFolder(); String folderVersion = folderConfig.getTag(); @@ -1003,7 +1027,7 @@ protected Folder createFolder(Folder folder, NamespaceAttribute... attributes) t NameSpaceContainer container = entities.get(0); if (container == null) { - // if we can't find it by id, maybe its not in the namespace + // If we can't find it by id, maybe it is not in the namespace. throw new IllegalArgumentException(String.format("Could not find entity with path [%s].", folder.getPath())); } @@ -1038,7 +1062,7 @@ protected FolderConfig updateFolder(Folder folder, NamespaceAttribute... attribu } else { FolderConfig folderConfig = namespaceService.getFolder(namespaceKey); - Preconditions.checkArgument(CollectionUtils.isEqualCollection(folder.getPath(), folderConfig.getFullPathList()), "Folder path is immutable."); + Preconditions.checkArgument(folder.getPath().equals(folderConfig.getFullPathList()), "Folder path is immutable."); namespaceService.addOrUpdateFolder(namespaceKey, getFolderConfig(folder), attributes); } @@ -1054,67 +1078,39 @@ public Source toSourceAPI(NameSpaceContainer container, List childr /** * Refresh a catalog item. Only supports datasets currently. */ + @WithSpan public void refreshCatalogItem(String id) throws UnsupportedOperationException { - Optional entity = getById(id); - - if (!entity.isPresent()) { - throw new IllegalArgumentException(String.format("Could not find entity with id [%s].", id)); - } - - Object object = entity.get(); - - if (object instanceof NameSpaceContainer && ((NameSpaceContainer) object).getType() == NameSpaceContainer.Type.DATASET) { - reflectionServiceHelper.refreshReflectionsForDataset(id); - } else { - throw new UnsupportedOperationException(String.format("Can only refresh datasets but found [%s].", object.getClass().getName())); + DatasetConfig config = CatalogUtil.getDatasetConfig(catalog, id); + if (config == null) { + throw new IllegalArgumentException(String.format("Could not find dataset with id [%s].", id)); } + reflectionServiceHelper.refreshReflectionsForDataset(config.getId().getId()); } - /** - * Refresh a catalog item's metadata. Only supports datasets currently. - */ - public UpdateStatus refreshCatalogItemMetadata(String id, - Boolean delete, - Boolean force, - Boolean promotion) - throws UnsupportedOperationException { - Optional entity = getById(id); - - if (!entity.isPresent()) { - throw new IllegalArgumentException(String.format("Could not find entity with id [%s].", id)); - } - - Object object = entity.get(); - - if (object instanceof NameSpaceContainer && ((NameSpaceContainer) object).getType() == NameSpaceContainer.Type.DATASET) { - final NamespaceKey namespaceKey = catalog.resolveSingle(new NamespaceKey(((NameSpaceContainer) object).getFullPathList())); - final DatasetRetrievalOptions.Builder retrievalOptionsBuilder = DatasetRetrievalOptions.newBuilder(); - - if (delete != null) { - retrievalOptionsBuilder.setDeleteUnavailableDatasets(delete.booleanValue()); - } - if (force != null) { - retrievalOptionsBuilder.setForceUpdate(force.booleanValue()); - } - if (promotion != null) { - retrievalOptionsBuilder.setAutoPromote(promotion.booleanValue()); - } - - return catalog.refreshDataset(namespaceKey, retrievalOptionsBuilder.build()); + private Optional getStoredReflectionSettingsForDataset( + DatasetConfig datasetConfig) { + final String id = datasetConfig.getId().getId(); + final VersionedDatasetId versionedDatasetId = VersionedDatasetId.tryParse(id); + final CatalogEntityKey.Builder builder = CatalogEntityKey.newBuilder(); + if (versionedDatasetId == null) { + builder.keyComponents(datasetConfig.getFullPathList()); } else { - throw new UnsupportedOperationException( - String.format("Cannot refresh metadata on %s type. Metadata refresh can only operate on physical datasets.", - object.getClass().getName())); + builder + .keyComponents(versionedDatasetId.getTableKey()) + .tableVersionContext(versionedDatasetId.getVersionContext()); } - } - private Optional getStoredReflectionSettingsForDataset(DatasetConfig datasetConfig) { - return reflectionServiceHelper.getReflectionSettings().getStoredReflectionSettings(new NamespaceKey(datasetConfig.getFullPathList())); + return reflectionServiceHelper + .getReflectionSettings() + .getStoredReflectionSettings(builder.build()); } public Dataset toDatasetAPI(NameSpaceContainer container, Dataset.RefreshSettings refreshSettings) { - final DatasetConfig config = container.getDataset(); + return toDatasetAPI(container.getDataset(), refreshSettings); + } + + public Dataset toDatasetAPI(DatasetConfig config, Dataset.RefreshSettings refreshSettings) { if (config.getType() == VIRTUAL_DATASET) { String sql = null; List sqlContext = null; @@ -1154,19 +1150,19 @@ public Dataset toDatasetAPI(NameSpaceContainer container, Dataset.RefreshSetting } } } + return new Dataset( - config.getId().getId(), - Dataset.DatasetType.PHYSICAL_DATASET, - config.getFullPathList(), - DatasetsUtil.getArrowFieldsFromDatasetConfig(config), - config.getCreatedAt(), - String.valueOf(config.getTag()), - refreshSettings, - null, - null, - format, - physicalDataset.getAllowApproxStats() - ); + config.getId().getId(), + Dataset.DatasetType.PHYSICAL_DATASET, + config.getFullPathList(), + DatasetsUtil.getArrowFieldsFromDatasetConfig(config), + config.getCreatedAt(), + String.valueOf(config.getTag()), + refreshSettings, + null, + null, + format, + (physicalDataset != null) ? physicalDataset.getAllowApproxStats() : Boolean.FALSE); } } @@ -1223,9 +1219,9 @@ protected NamespaceAttribute[] getNamespaceAttributes(CatalogEntity entity) { return DEFAULT_NS_ATTRIBUTES; } - // Catalog items that are not in the namespace (files/folders in file based sources are given a fake id that + // Catalog items that are not in the namespace (files/folders) in file-based sources are given a fake id that // is dremio:/path/to/entity - the prefix helps us distinguish between fake and real ids. - private static String INTERNAL_ID_PREFIX = "dremio:"; + private static final String INTERNAL_ID_PREFIX = "dremio:"; public static String generateInternalId(List path) { return INTERNAL_ID_PREFIX + com.dremio.common.utils.PathUtils.toFSPathString(path); @@ -1253,6 +1249,7 @@ public List searchByQuery(String query) throws NamespaceExcepti return searchService.search(query, null); } + @WithSpan public List search(String query) throws NamespaceException { List searchResults = searchByQuery(query); @@ -1264,7 +1261,7 @@ public List search(String query) throws NamespaceException { .collect(Collectors.toList()); } - public List applyAdditionalInfoToContainers( + private List applyAdditionalInfoToContainers( final List items, final List include) { Stream resultList = items.stream().map(CatalogItem.Builder::new); @@ -1277,10 +1274,11 @@ public List applyAdditionalInfoToContainers( .collect(Collectors.toList()); } - public void createHomeSpace(String userName) { + public void ensureUserHasHomespace(String userName) { try { CatalogServiceHelper.ensureUserHasHomespace(sabotContext.getNamespaceService(SystemUser.SYSTEM_USERNAME), userName); } catch (NamespaceException ignored) { + logger.warn("Could not ensure user has homespace."); } } @@ -1289,10 +1287,88 @@ public static void ensureUserHasHomespace(NamespaceService namespaceService, Str try { namespaceService.getHome(homeKey); } catch (NamespaceNotFoundException ignored) { - // create home namespaceService.addOrUpdateHome(homeKey, new HomeConfig().setCtime(System.currentTimeMillis()).setOwner(userName) ); } } + + /** + * Retrieve the children for a catalog entity based on the entity path. + * If the path represents a versioned source, use VersionedPlugin to look for the children; + * Otherwise, look for them in Namespace. + * So far this is specifically used by Autocomplete V2. + */ + public List getCatalogChildrenForPath(List path, String refType, String refValue) throws NamespaceException { + NamespaceKey sourceKey = new NamespaceKey(path); + // TODO: we should remove the call to NamespaceSerivce since we should have known if it's a versioned source or not. Need to coordinate with the frontend. + // We can use SourceConfig.type to tell a VersionedSource if it's NESSIE or ARCTIC. + // Add SourceSubType {Versioned, Non-Versioned} in CatalogItem, then send it properly to FE. + try { + final StoragePlugin plugin = getStoragePlugin(path.get(0)); + if (plugin instanceof VersionedPlugin) { + return getChildrenForVersionedSourcePath((VersionedPlugin) plugin, sourceKey, refType, refValue); + } + } catch (UserException | SourceNotFoundException e) { + // ignore + } + return getNamespaceChildrenForPath(sourceKey); + } + + private List getChildrenForVersionedSourcePath( + VersionedPlugin plugin, + NamespaceKey sourceKey, + String refType, + String refValue + ) { + VersionContext version = VersionContextUtils.parse(refType, refValue); + try { + List entities = plugin.listEntries(sourceKey.getPathWithoutRoot(), version).collect(Collectors.toList()); + return generateCatalogItemList(sourceKey.getRoot(), entities); + } catch (ReferenceNotFoundException | NoDefaultBranchException | ReferenceTypeConflictException e) { + // ignore + return Collections.emptyList(); + } + } + + private List generateCatalogItemList(String sourceName, List entities) { + return entities.stream() + .map((entity) -> { + CatalogItem.Builder builder = new CatalogItem.Builder(); + switch (entity.getType()) { + case FOLDER: + builder.setType(CatalogItem.CatalogItemType.CONTAINER) + .setContainerType(CatalogItem.ContainerSubType.FOLDER); + break; + + case ICEBERG_VIEW: + builder.setType(CatalogItem.CatalogItemType.DATASET) + .setDatasetType(CatalogItem.DatasetSubType.VIRTUAL); + break; + + case ICEBERG_TABLE: + builder.setType(CatalogItem.CatalogItemType.DATASET) + .setDatasetType(CatalogItem.DatasetSubType.PROMOTED); + break; + + case UNKNOWN: + default: + // ignore UNKNOWN entities + return null; + } + return builder.setId(entity.getId()) + .setPath(Stream.concat(Stream.of(sourceName), entity.getNameElements().stream()).collect(Collectors.toList())) + .build(); + }) + .filter(entity -> entity != null) + .collect(Collectors.toList()); + } + + public Stream getReferencesForVersionedSource(String sourceName) throws SourceNotFoundException { + final StoragePlugin plugin = getStoragePlugin(sourceName); + if (!(plugin instanceof VersionedPlugin)) { + throw new SourceNotFoundException(sourceName + " is not a versioned source."); + } + return ((VersionedPlugin) plugin).listReferences(); + } } diff --git a/dac/backend/src/main/java/com/dremio/dac/service/collaboration/CollaborationHelper.java b/dac/backend/src/main/java/com/dremio/dac/service/collaboration/CollaborationHelper.java index ff6dbf341e..db1be7760f 100644 --- a/dac/backend/src/main/java/com/dremio/dac/service/collaboration/CollaborationHelper.java +++ b/dac/backend/src/main/java/com/dremio/dac/service/collaboration/CollaborationHelper.java @@ -37,13 +37,13 @@ import com.dremio.datastore.SearchTypes.SearchQuery; import com.dremio.datastore.api.LegacyIndexedStore.LegacyFindByCondition; import com.dremio.datastore.api.LegacyKVStoreProvider; -import com.dremio.exec.server.SabotContext; import com.dremio.service.namespace.NamespaceException; import com.dremio.service.namespace.NamespaceService; import com.dremio.service.namespace.NamespaceServiceImpl; import com.dremio.service.namespace.proto.NameSpaceContainer; import com.dremio.service.users.User; import com.dremio.service.users.UserNotFoundException; +import com.dremio.service.users.UserService; import com.google.common.base.Preconditions; /** @@ -52,22 +52,38 @@ public class CollaborationHelper { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(CollaborationHelper.class); + private static final String DEFAULT_HOME_WIKI_TEXT = "# Wikis & Tags\n" + + "\n" + + "![Gnarly Catalog](https://d33wubrfki0l68.cloudfront.net/c1a54376c45a9276c080f3d10ed25ce61c17bcd2/2b946/img/home/open-source-for-everyone.svg)\n" + + "\n" + + "You are reading the wiki for your home space! You can create and edit this information for any source, space, or folder." + + "\n" + + "\n" + + "This sidebar always shows the wiki for the current source, space or folder you are browsing.\n" + + "\n" + + "When previewing datasets, click on the `Catalog` tab to create a wiki or add tags to that dataset.\n" + + "\n" + + "**Tip:** You can hide the wiki by clicking on the sidebar icon on upper right hand side."; + private final CollaborationTagStore tagsStore; private final CollaborationWikiStore wikiStore; private final NamespaceService namespaceService; - private final SecurityContext context; - private final SabotContext sabotContext; + private final SecurityContext securityContext; private final SearchService searchService; + private final UserService userService; @Inject - public CollaborationHelper(LegacyKVStoreProvider kvStoreProvider, SabotContext sabotContext, - NamespaceService namespaceService, SecurityContext context, SearchService searchService) { + public CollaborationHelper(final LegacyKVStoreProvider kvStoreProvider, + final NamespaceService namespaceService, + final SecurityContext securityContext, + final SearchService searchService, + final UserService userService) { this.tagsStore = new CollaborationTagStore(kvStoreProvider); this.wikiStore = new CollaborationWikiStore(kvStoreProvider); this.namespaceService = namespaceService; - this.context = context; - this.sabotContext = sabotContext; + this.securityContext = securityContext; this.searchService = searchService; + this.userService = userService; } public Optional getTags(String entityId) throws NamespaceException { @@ -89,7 +105,7 @@ public void setTags(String entityId, Tags tags) throws NamespaceException { final Optional existingTag = tagsStore.getTagsForEntityId(entityId); - // if it is a update, copy over the id so we overwrite the existing entry + // If it is an update, copy over the id, so we overwrite the existing entry. collaborationTag.setId(existingTag.map(CollaborationTag::getId).orElse(UUID.randomUUID().toString())); tagsStore.save(collaborationTag); @@ -114,7 +130,7 @@ public Optional getWiki(String entityId) throws NamespaceException { return wiki.map(Wiki::fromCollaborationWiki); } - private final String getDescription(NameSpaceContainer container) { + private String getDescription(NameSpaceContainer container) { String description = null; switch (container.getType()) { case SOURCE: @@ -123,20 +139,9 @@ private final String getDescription(NameSpaceContainer container) { case SPACE: description = container.getSpace().getDescription(); break; - case HOME: // for home space we should pre populate wiki with default text - description = "# Wikis & Tags\n" + - "\n" + - "![Gnarly Catalog](https://d33wubrfki0l68.cloudfront.net/c1a54376c45a9276c080f3d10ed25ce61c17bcd2/2b946/img/home/open-source-for-everyone.svg)\n" + - "\n" + - "You are reading the wiki for your home space! You can create and edit this information for any source, space, or folder." + - "\n" + - "\n" + - "This sidebar always shows the wiki for the current source, space or folder you are browsing.\n" + - "\n" + - "When previewing datasets, click on the `Catalog` tab to create a wiki or add tags to that dataset.\n" + - "\n" + - "**Tip:** You can hide the wiki by clicking on the sidebar icon on upper right hand side."; - break; + case HOME: + description = DEFAULT_HOME_WIKI_TEXT; + break; default: break; } @@ -153,11 +158,11 @@ public void setWiki(String entityId, Wiki wiki) throws NamespaceException { collaborationWiki.setCreatedAt(System.currentTimeMillis()); // store the user - User user = null; + User user; try { - user = sabotContext.getUserService().getUser(context.getUserPrincipal().getName()); + user = userService.getUser(securityContext.getUserPrincipal().getName()); } catch (UserNotFoundException e) { - throw new RuntimeException(String.format("Could not load user [%s].", context.getUserPrincipal().getName())); + throw new RuntimeException(String.format("Could not load user [%s].", securityContext.getUserPrincipal().getName())); } collaborationWiki.setId(UUID.randomUUID().toString()); @@ -268,15 +273,11 @@ public TagsSearchResult getTagsForIds(Set ids) { Map tags = new HashMap<>(); List queries = new ArrayList<>(); - ids.stream().limit(maxTagRequestCount).forEach(input -> { - queries.add(SearchQueryUtils.newTermQuery(CollaborationTagStore.ENTITY_ID, input)); - }); + ids.stream().limit(maxTagRequestCount).forEach(input -> queries.add(SearchQueryUtils.newTermQuery(CollaborationTagStore.ENTITY_ID, input))); findByCondition.setCondition(SearchQueryUtils.or(queries)); - tagsStore.find(findByCondition).forEach(pair -> { - tags.put(pair.getKey(), pair.getValue()); - }); + tagsStore.find(findByCondition).forEach(pair -> tags.put(pair.getKey(), pair.getValue())); return new TagsSearchResult(tags, ids.size() > maxTagRequestCount); } diff --git a/dac/backend/src/main/java/com/dremio/dac/service/collaboration/CollaborationTagStore.java b/dac/backend/src/main/java/com/dremio/dac/service/collaboration/CollaborationTagStore.java index d00f6a2355..a68f838b15 100644 --- a/dac/backend/src/main/java/com/dremio/dac/service/collaboration/CollaborationTagStore.java +++ b/dac/backend/src/main/java/com/dremio/dac/service/collaboration/CollaborationTagStore.java @@ -141,11 +141,9 @@ public void setTag(CollaborationTag value, String tag) { } private static final class CollaborationTagConverter implements DocumentConverter { - private Integer version = 0; - @Override public Integer getVersion() { - return version; + return 0; } @Override diff --git a/dac/backend/src/main/java/com/dremio/dac/service/datasets/DACViewCreatorFactory.java b/dac/backend/src/main/java/com/dremio/dac/service/datasets/DACViewCreatorFactory.java index 6050f30b7b..16566b4e4e 100644 --- a/dac/backend/src/main/java/com/dremio/dac/service/datasets/DACViewCreatorFactory.java +++ b/dac/backend/src/main/java/com/dremio/dac/service/datasets/DACViewCreatorFactory.java @@ -32,6 +32,7 @@ import com.dremio.dac.proto.model.dataset.FromSQL; import com.dremio.dac.proto.model.dataset.TransformUpdateSQL; import com.dremio.dac.proto.model.dataset.VirtualDatasetUI; +import com.dremio.dac.service.errors.DatasetVersionNotFoundException; import com.dremio.dac.util.DatasetsUtil; import com.dremio.datastore.api.LegacyKVStoreProvider; import com.dremio.exec.catalog.ViewCreatorFactory; @@ -186,12 +187,16 @@ protected DatasetVersionResource newDatasetVersionResource(SecurityContext secur @Override public void dropView(List path) { + DatasetPath datasetPath = new DatasetPath(path); try { - DatasetPath datasetPath = new DatasetPath(path); - final VirtualDatasetUI virtualDataset = datasetService.get(datasetPath); - String savedTag = virtualDataset.getSavedTag(); - datasetService.deleteDataset(datasetPath, savedTag); - } catch (Exception e) { + try { + final VirtualDatasetUI virtualDataset = datasetService.get(datasetPath); + String savedTag = virtualDataset.getSavedTag(); + datasetService.deleteDataset(datasetPath, savedTag); + } catch (DatasetVersionNotFoundException e) { + datasetService.deleteDataset(datasetPath, null); + } + }catch (Exception e) { Throwables.propagate(e); } } diff --git a/dac/backend/src/main/java/com/dremio/dac/service/datasets/DatasetVersionMutator.java b/dac/backend/src/main/java/com/dremio/dac/service/datasets/DatasetVersionMutator.java index 03678fab5c..fec7fd714b 100644 --- a/dac/backend/src/main/java/com/dremio/dac/service/datasets/DatasetVersionMutator.java +++ b/dac/backend/src/main/java/com/dremio/dac/service/datasets/DatasetVersionMutator.java @@ -71,6 +71,7 @@ import com.dremio.exec.physical.base.ViewOptions; import com.dremio.exec.planner.logical.ViewTable; import com.dremio.exec.planner.sql.CalciteArrowHelper; +import com.dremio.exec.planner.sql.parser.SqlGrant; import com.dremio.exec.record.BatchSchema; import com.dremio.exec.store.CatalogService; import com.dremio.exec.store.SchemaConfig; @@ -139,7 +140,8 @@ public DatasetDownloadManager downloadManager() { return new DatasetDownloadManager(jobsService, namespaceService, downloadPlugin.getConfig().getPath(), downloadPlugin.getSystemUserFS(), jobResultsPlugin.getConfig().isPdfsBased(), optionManager); } - private void validate(DatasetPath path, VirtualDatasetUI ds) { + + public static void validate(DatasetPath path, VirtualDatasetUI ds) { if (ds.getSqlFieldsList() == null || ds.getSqlFieldsList().isEmpty()) { throw new IllegalArgumentException("SqlFields can't be null for " + path); } @@ -148,15 +150,26 @@ private void validate(DatasetPath path, VirtualDatasetUI ds) { } } - - public void putVersion(VirtualDatasetUI ds) throws DatasetNotFoundException, NamespaceException { + private void putVersion(VirtualDatasetUI ds, boolean doValidation) throws DatasetNotFoundException { DatasetPath path = new DatasetPath(ds.getFullPathList()); - validate(path, ds); + if (doValidation) { + validate(path, ds); + } ds.setCreatedAt(System.currentTimeMillis()); final VersionDatasetKey datasetKey = new VersionDatasetKey(path, ds.getVersion()); datasetVersions.put(datasetKey, toVirtualDatasetVersion(ds)); } + public void putVersion(VirtualDatasetUI ds) throws DatasetNotFoundException { + putVersion(ds, true); + } + + public void putTempVersionWithoutValidation(VirtualDatasetUI ds) throws DatasetNotFoundException { + Preconditions.checkArgument(isTemporaryPath(ds.getFullPathList()), + "Only temp untitled dataset can bypass validation."); + putVersion(ds, false); + } + public void put(VirtualDatasetUI ds, NamespaceAttribute... attributes) throws DatasetNotFoundException, NamespaceException { DatasetPath path = new DatasetPath(ds.getFullPathList()); validatePaths(path, null); @@ -170,13 +183,16 @@ public void put(VirtualDatasetUI ds, NamespaceAttribute... attributes) throws Da } public void putWithVersionedSource(VirtualDatasetUI ds, DatasetPath path, String branchName, - String savedTag, NamespaceAttribute... attributes) + String savedTag) throws DatasetNotFoundException, IOException, NamespaceException { DatasetConfig datasetConfig = toVirtualDatasetVersion(ds).getDataset(); Preconditions.checkNotNull(path); VersionContext versionContext = VersionContext.ofBranch(branchName); Map contextMap = ImmutableMap.of(path.getRoot().toString(), versionContext); Catalog catalog = getCatalog().resolveCatalog(contextMap); + + //TODO: Once DX-65418 is fixed, injected catalog will validate the right entity accordingly + catalog.validatePrivilege(new NamespaceKey(path.getRoot().getName()), SqlGrant.Privilege.ALTER); BatchSchema schema = DatasetHelper.getSchemaBytes(datasetConfig) != null ? CalciteArrowHelper.fromDataset(datasetConfig) : null; View view = Views.fieldTypesToView( Iterables.getLast(datasetConfig.getFullPathList()), @@ -187,20 +203,25 @@ public void putWithVersionedSource(VirtualDatasetUI ds, DatasetPath path, String ); DremioTable exist = catalog.getTable(new NamespaceKey(path.toPathList())); if (exist != null && !(exist instanceof ViewTable)) { - throw UserException.validationError().message("Expecting getting a view but returns a entity type of %s", exist.getClass()).buildSilently(); - } else if (exist != null && savedTag == null){ - throw UserException.validationError().message("SavedTag cannot be null when updating a view").buildSilently(); - } else if (exist != null && !savedTag.equals(exist.getDatasetConfig().getTag())){ - throw UserException.resourceError().message("Your dataset may not be the most updated, please refresh").buildSilently(); + throw UserException.validationError().message("Expecting getting a view but returns a entity type of %s", exist.getDatasetConfig().getType()).buildSilently(); + } else if (exist != null && (savedTag == null|| !savedTag.equals(exist.getDatasetConfig().getTag()))) { + throw UserException.concurrentModificationError() + .message("The specified location already contains a view named \"%s\". Please provide a unique view name or open the existing view, edit and then save.", + path.toPathList().get(path.toPathList().size() - 1)) + .build(logger); } final boolean viewExists = (exist != null); ResolvedVersionContext resolvedVersionContext = CatalogUtil.resolveVersionContext(catalog, path.getRoot().getName(), versionContext); - ViewOptions viewOptions = new ViewOptions.ViewOptionsBuilder() - .version(resolvedVersionContext) - .batchSchema(schema) - .viewUpdate(viewExists) - .build(); + ViewOptions viewOptions = + new ViewOptions.ViewOptionsBuilder() + .version(resolvedVersionContext) + .batchSchema(schema) + .actionType( + viewExists + ? ViewOptions.ActionType.UPDATE_VIEW + : ViewOptions.ActionType.CREATE_VIEW) + .build(); if (viewExists) { catalog.updateView(new NamespaceKey(path.toPathList()), view, viewOptions); } else { @@ -220,6 +241,7 @@ public void putWithVersionedSource(VirtualDatasetUI ds, DatasetPath path, String } public Catalog getCatalog() { + // TODO - Why are we using the System User when interacting with Catalog when most of the DatasetTool should be in the context of a user? return catalogService.getCatalog(MetadataRequestOptions.of(SchemaConfig.newBuilder(CatalogUser.from(SYSTEM_USERNAME)) .build())); } @@ -305,12 +327,13 @@ public VirtualDatasetUI getVersion(DatasetPath path, DatasetVersion version) */ public VirtualDatasetUI getVersion(DatasetPath path, DatasetVersion version, boolean isVersionedSource) - throws DatasetVersionNotFoundException, DatasetNotFoundException { - VirtualDatasetVersion datasetVersion = datasetVersions.get(new VersionDatasetKey(path, version)); - VirtualDatasetUI virtualDatasetUI = toVirtualDatasetUI(datasetVersions.get(new VersionDatasetKey(path, version))); + throws DatasetVersionNotFoundException { + VirtualDatasetUI virtualDatasetUI = null; try { + VirtualDatasetVersion datasetVersion = getVirtualDatasetVersion(path, version); + virtualDatasetUI = toVirtualDatasetUI(datasetVersion); DatasetConfig datasetConfig; - if(isVersionedSource){ + if(isVersionedSource && virtualDatasetUI != null) { datasetConfig = datasetVersion.getDataset(); logger.debug("For versioned view {} got datasetConfig {} from datasetVersion store", path.toUnescapedString(), @@ -337,13 +360,34 @@ public VirtualDatasetUI getVersion(DatasetPath path, DatasetVersion version, boo return virtualDatasetUI; } + private DatasetPath getDatasetPathInOriginalCase(DatasetPath path) { + // namespaceService key is by default case-insensitive, but datasetVersions is case-sensitive. + // Here we use the Dataset Path in original case preserved in namespaceService value. + DatasetPath datasetPath = path; + + // Temporary VDS are not saved in namespace. + if (!isTemporaryPath(path.toPathList())) { + try { + final DatasetConfig datasetConfig = namespaceService.getDataset(path.toNamespaceKey()); + datasetPath = new DatasetPath(datasetConfig.getFullPathList()); + } catch (NamespaceException e) { + // We have tests save dataset versions without saving to namespace. If the dataset is not found in namespace, + // fallback to the path passed in. + } + } + + return datasetPath; + } + public VirtualDatasetVersion getVirtualDatasetVersion(DatasetPath path, DatasetVersion version) { - return datasetVersions.get(new VersionDatasetKey(path, version)); + DatasetPath datasetPath = getDatasetPathInOriginalCase(path); + return datasetVersions.get(new VersionDatasetKey(datasetPath, version)); } - public Iterable getAllVersions(DatasetPath path) throws DatasetVersionNotFoundException { + public Iterable getAllVersions(DatasetPath path) throws DatasetVersionNotFoundException, NamespaceException { + DatasetPath datasetPath = getDatasetPathInOriginalCase(path); return Iterables.transform(datasetVersions.find( - new LegacyFindByRange<>(new VersionDatasetKey(path, MIN_VERSION), false, new VersionDatasetKey(path, MAX_VERSION), false)), + new LegacyFindByRange<>(new VersionDatasetKey(datasetPath, MIN_VERSION), false, new VersionDatasetKey(datasetPath, MAX_VERSION), false)), new Function, VirtualDatasetUI> () { @Override public VirtualDatasetUI apply(Entry input) { @@ -362,9 +406,9 @@ public VirtualDatasetUI apply(Entry in public VirtualDatasetUI get(DatasetPath path) throws DatasetNotFoundException, NamespaceException { try { final DatasetConfig datasetConfig = namespaceService.getDataset(path.toNamespaceKey()); - final VirtualDatasetVersion virtualDatasetVersion = datasetVersions.get(new VersionDatasetKey(path, datasetConfig.getVirtualDataset().getVersion())); + final VirtualDatasetVersion virtualDatasetVersion = getVirtualDatasetVersion(path, datasetConfig.getVirtualDataset().getVersion()); if (virtualDatasetVersion == null) { - throw new DatasetNotFoundException(path, format("Missing version %s.", datasetConfig.getVirtualDataset().getVersion().toString())); + throw new DatasetVersionNotFoundException(path, datasetConfig.getVirtualDataset().getVersion()); } final VirtualDatasetUI virtualDatasetUI = toVirtualDatasetUI(virtualDatasetVersion) .setId(datasetConfig.getId().getId()) @@ -378,9 +422,9 @@ public VirtualDatasetUI get(DatasetPath path) throws DatasetNotFoundException, N public VirtualDatasetUI get(DatasetPath path, DatasetVersion version) throws DatasetNotFoundException, NamespaceException { try { final DatasetConfig datasetConfig = namespaceService.getDataset(path.toNamespaceKey()); - final VirtualDatasetVersion virtualDatasetVersion = datasetVersions.get(new VersionDatasetKey(path, version)); + final VirtualDatasetVersion virtualDatasetVersion = getVirtualDatasetVersion(path, version); if (virtualDatasetVersion == null) { - throw new DatasetNotFoundException(path, format("Missing version %s.", version.toString())); + throw new DatasetVersionNotFoundException(path, datasetConfig.getVirtualDataset().getVersion()); } final VirtualDatasetUI virtualDatasetUI = toVirtualDatasetUI(virtualDatasetVersion) .setId(datasetConfig.getId().getId()) @@ -436,6 +480,14 @@ public DatasetPath apply(Entry input) { }); } + public int getJobsCount(NamespaceKey path, OptionManager optionManager) { + int jobCount = 0; + if((optionManager == null) || (optionManager.getOption(ExecConstants.CATALOG_JOB_COUNT_ENABLED))){ + jobCount = getJobsCount(path); + } + return jobCount; + } + public int getJobsCount(NamespaceKey path) { JobCountsRequest.Builder builder = JobCountsRequest.newBuilder(); builder.addDatasets(VersionedDatasetPath.newBuilder() diff --git a/sabot/kernel/src/main/java/com/dremio/exec/hadoop/HadoopFsSupplierProviderDremioClassLoader.java b/dac/backend/src/main/java/com/dremio/dac/service/errors/NessieSourceNotValidException.java similarity index 59% rename from sabot/kernel/src/main/java/com/dremio/exec/hadoop/HadoopFsSupplierProviderDremioClassLoader.java rename to dac/backend/src/main/java/com/dremio/dac/service/errors/NessieSourceNotValidException.java index 4c763f13df..62f4f9a0b2 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/hadoop/HadoopFsSupplierProviderDremioClassLoader.java +++ b/dac/backend/src/main/java/com/dremio/dac/service/errors/NessieSourceNotValidException.java @@ -13,17 +13,17 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.dremio.exec.hadoop; +package com.dremio.dac.service.errors; -import java.util.Map; -import java.util.function.Supplier; - -import org.apache.hadoop.fs.FileSystem; +import javax.ws.rs.WebApplicationException; +import javax.ws.rs.core.Response; /** - * - * Interface for getting FileSystem objects created using Dremio class loader + * Thrown when source is not versioned in {@link com.dremio.dac.resource.NessieSourceResource}. */ -public interface HadoopFsSupplierProviderDremioClassLoader extends AutoCloseable { - Supplier getHadoopFsSupplierDremioClassLoader(String path, Iterable> conf); +public class NessieSourceNotValidException extends WebApplicationException { + + public NessieSourceNotValidException(Exception error, String msg) { + super(msg, error, Response.Status.NOT_ACCEPTABLE); + } } diff --git a/dac/backend/src/main/java/com/dremio/dac/service/errors/NessieSourceResourceException.java b/dac/backend/src/main/java/com/dremio/dac/service/errors/NessieSourceResourceException.java new file mode 100644 index 0000000000..99df94e6b7 --- /dev/null +++ b/dac/backend/src/main/java/com/dremio/dac/service/errors/NessieSourceResourceException.java @@ -0,0 +1,29 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.service.errors; + +import javax.ws.rs.WebApplicationException; +import javax.ws.rs.core.Response; + +/** + * Thrown when unhandled error is found in {@link com.dremio.dac.resource.NessieSourceResource}. + */ + +public class NessieSourceResourceException extends WebApplicationException { + public NessieSourceResourceException(Exception error, String msg, Response.Status status) { + super(msg, error, status); + } +} diff --git a/tools/redis-test-runner/src/main/java/com/dremio/test/redis/AbstractRedisResource.java b/dac/backend/src/main/java/com/dremio/dac/service/errors/NotFoundExceptionMapper.java similarity index 54% rename from tools/redis-test-runner/src/main/java/com/dremio/test/redis/AbstractRedisResource.java rename to dac/backend/src/main/java/com/dremio/dac/service/errors/NotFoundExceptionMapper.java index 5e41b63d64..0e956daa75 100644 --- a/tools/redis-test-runner/src/main/java/com/dremio/test/redis/AbstractRedisResource.java +++ b/dac/backend/src/main/java/com/dremio/dac/service/errors/NotFoundExceptionMapper.java @@ -13,28 +13,19 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.dremio.test.redis; +package com.dremio.dac.service.errors; -import org.junit.rules.ExternalResource; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import javax.ws.rs.ext.ExceptionMapper; -import redis.clients.jedis.Jedis; -import redis.clients.jedis.JedisPool; - -class AbstractRedisResource extends ExternalResource implements RedisResource { - private int port; - - public void setPort(int port) { - this.port = port; - } - - @Override - public int getPort() { - return port == 0 ? RedisResource.REDIS_PORT : port; - } +public class NotFoundExceptionMapper implements ExceptionMapper { @Override - public Jedis newClient() { - JedisPool pool = new JedisPool(RedisResource.REDIS_HOST, getPort()); - return pool.getResource(); + public Response toResponse(SourceNotFoundException exception) { + Response.ResponseBuilder responseBuilder = + Response.status(exception.getResponse().getStatus(), exception.getMessage()).entity(exception) + .type(MediaType.APPLICATION_JSON_TYPE); + return responseBuilder.build(); } } diff --git a/dac/backend/src/main/java/com/dremio/dac/service/errors/NotSupportedException.java b/dac/backend/src/main/java/com/dremio/dac/service/errors/NotSupportedException.java new file mode 100644 index 0000000000..45a82ed61d --- /dev/null +++ b/dac/backend/src/main/java/com/dremio/dac/service/errors/NotSupportedException.java @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.service.errors; + +import com.dremio.dac.model.common.ResourcePath; + +/** + * Thrown when home space is disabled. + */ +public class NotSupportedException extends NotFoundException { + + public NotSupportedException(ResourcePath path) { + super(path, "Not supported."); + } +} diff --git a/dac/backend/src/main/java/com/dremio/dac/service/reflection/ReflectionServiceHelper.java b/dac/backend/src/main/java/com/dremio/dac/service/reflection/ReflectionServiceHelper.java index a91db3b2c4..a8aac01204 100644 --- a/dac/backend/src/main/java/com/dremio/dac/service/reflection/ReflectionServiceHelper.java +++ b/dac/backend/src/main/java/com/dremio/dac/service/reflection/ReflectionServiceHelper.java @@ -15,6 +15,8 @@ */ package com.dremio.dac.service.reflection; +import static com.dremio.exec.catalog.CatalogOptions.REFLECTION_ARCTIC_ENABLED; + import java.util.List; import java.util.Optional; @@ -24,7 +26,9 @@ import com.dremio.dac.api.Reflection; import com.dremio.dac.service.errors.ReflectionNotFound; +import com.dremio.exec.catalog.VersionedDatasetId; import com.dremio.exec.ops.ReflectionContext; +import com.dremio.options.OptionManager; import com.dremio.service.reflection.ReflectionAdministrationService; import com.dremio.service.reflection.ReflectionSettings; import com.dremio.service.reflection.ReflectionStatus; @@ -33,6 +37,9 @@ import com.dremio.service.reflection.proto.MaterializationMetrics; import com.dremio.service.reflection.proto.ReflectionGoal; import com.dremio.service.reflection.proto.ReflectionId; +import com.google.common.collect.Iterables; + +import io.opentelemetry.instrumentation.annotations.WithSpan; /** * Reflection service helper @@ -40,14 +47,16 @@ public class ReflectionServiceHelper { private final ReflectionAdministrationService.Factory reflectionAdministrationServiceFactory; private final ReflectionStatusService reflectionStatusService; + private final OptionManager optionManager; @Inject public ReflectionServiceHelper( ReflectionAdministrationService.Factory reflectionAdministrationServiceFactory, - ReflectionStatusService reflectionStatusService - ) { + ReflectionStatusService reflectionStatusService, + OptionManager optionManager) { this.reflectionAdministrationServiceFactory = reflectionAdministrationServiceFactory; this.reflectionStatusService = reflectionStatusService; + this.optionManager = optionManager; } public ReflectionAdministrationService getReflectionAdministrationService() { return reflectionAdministrationServiceFactory.get(ReflectionContext.SYSTEM_USER_CONTEXT); @@ -66,6 +75,7 @@ public Iterable getAllReflections() { } public Iterable getReflectionsForDataset(String datasetid) { + isVersionedSourceEnabled(datasetid); return getReflectionAdministrationService().getReflectionsByDatasetId(datasetid); } @@ -76,12 +86,14 @@ public ReflectionStatusUI getStatusForReflection(String reflectionId) { } public ReflectionGoal createReflection(ReflectionGoal goal) { + isVersionedSourceEnabled(goal.getDatasetId()); ReflectionId id = getReflectionAdministrationService().create(goal); return getReflectionAdministrationService().getGoal(id).get(); } public ReflectionGoal updateReflection(ReflectionGoal goal) { + isVersionedSourceEnabled(goal.getDatasetId()); Optional existingGoal = getReflectionAdministrationService().getGoal(goal.getId()); if (!existingGoal.isPresent()) { @@ -156,6 +168,7 @@ public boolean doesDatasetHaveActiveReflection(String datasetId) { return getReflectionAdministrationService().getEnabledReflectionCountForDataset(datasetId) > 0; } + @WithSpan public void refreshReflectionsForDataset(String datasetId) { getReflectionAdministrationService().requestRefresh(datasetId); } @@ -169,4 +182,14 @@ public Reflection newReflection(ReflectionGoal goal) { Pair currentSize = getCurrentSize(goalId); return new Reflection(goal, getStatusForReflection(goalId), currentSize.left, getTotalSize(goalId)); } + + public boolean doesDatasetHaveReflection(String datasetId) { + return Iterables.size(getReflectionAdministrationService().getReflectionsByDatasetId(datasetId)) > 0; + } + + public void isVersionedSourceEnabled(String datasetId) { + if (!optionManager.getOption(REFLECTION_ARCTIC_ENABLED) && VersionedDatasetId.isVersionedDatasetId(datasetId)) { + throw new UnsupportedOperationException("Versioned source does not support reflection."); + } + } } diff --git a/dac/backend/src/main/java/com/dremio/dac/service/search/SearchServiceImpl.java b/dac/backend/src/main/java/com/dremio/dac/service/search/SearchServiceImpl.java index e14cf0bcf6..c34212b68e 100644 --- a/dac/backend/src/main/java/com/dremio/dac/service/search/SearchServiceImpl.java +++ b/dac/backend/src/main/java/com/dremio/dac/service/search/SearchServiceImpl.java @@ -155,7 +155,7 @@ private void fillCollaborationTags(List results) { final LegacyIndexedStore.LegacyFindByCondition findByCondition = new LegacyIndexedStore.LegacyFindByCondition(); final List searchQueries = StreamSupport.stream(results.spliterator(), false) .map(input -> { - return newTermQuery(CollaborationTagStore.ENTITY_ID, NamespaceUtils.getId(input.getNamespaceContainer())); + return newTermQuery(CollaborationTagStore.ENTITY_ID, NamespaceUtils.getIdOrNull(input.getNamespaceContainer())); }).collect(Collectors.toList()); findByCondition.setCondition(or(searchQueries)); @@ -168,7 +168,7 @@ private void fillCollaborationTags(List results) { // fill in results.forEach(input -> { - String id = NamespaceUtils.getId(input.getNamespaceContainer()); + String id = NamespaceUtils.getIdOrNull(input.getNamespaceContainer()); if (hash.containsKey(id)) { input.setCollaborationTag(hash.get(id)); } @@ -251,6 +251,7 @@ private long getNextReleaseLeadership() { public void close() throws Exception { } + @Override public void wakeupManager(String reason) { if (wakeupHandler != null) { wakeupHandler.handle(reason); diff --git a/dac/backend/src/main/java/com/dremio/dac/service/source/ExternalResourceTreeUtils.java b/dac/backend/src/main/java/com/dremio/dac/service/source/ExternalResourceTreeUtils.java new file mode 100644 index 0000000000..7290d94e86 --- /dev/null +++ b/dac/backend/src/main/java/com/dremio/dac/service/source/ExternalResourceTreeUtils.java @@ -0,0 +1,101 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.service.source; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import com.dremio.dac.model.resourcetree.ResourceTreeEntity; +import com.dremio.dac.model.resourcetree.ResourceTreeEntity.ResourceType; +import com.dremio.exec.catalog.TableVersionContext; +import com.dremio.exec.catalog.VersionedDatasetId; +import com.dremio.plugins.ExternalNamespaceEntry; +import com.dremio.service.namespace.NamespaceKey; + +/** + * Helpers for making resource tree entity list from external catalogs (e.g. Nessie) + */ +public final class ExternalResourceTreeUtils { + private ExternalResourceTreeUtils() {} + + public static List generateResourceTreeEntityList( + NamespaceKey path, List entries) { + Objects.requireNonNull(path); + + final String sourceName = path.getRoot(); + final List resources = new ArrayList<>(); + + entries.forEach( + entry -> { + final String id = entry.getId(); + final String name = entry.getName(); + final List namespace = entry.getNamespace(); + final List fullPathList = + Stream.of(Stream.of(sourceName), entry.getNameElements().stream()) + .flatMap(Function.identity()) + .collect(Collectors.toList()); + final TableVersionContext tableVersionContext = entry.getTableVersionContext(); + final String versionedDatasetId = + (id == null || tableVersionContext == null) + ? null + : VersionedDatasetId.newBuilder() + .setTableKey(fullPathList) + .setContentId(id) + .setTableVersionContext(tableVersionContext) + .build() + .asString(); + + switch (entry.getType()) { + case UNKNOWN: + break; // Unknown sources are ignored + case FOLDER: + final String url = "/resourcetree/" + path.toUrlEncodedString(); + resources.add( + new ResourceTreeEntity( + ResourceType.FOLDER, name, fullPathList, url, null, versionedDatasetId)); + break; + case ICEBERG_TABLE: + resources.add( + new ResourceTreeEntity( + ResourceType.PHYSICAL_DATASET, + name, + fullPathList, + null, + null, + versionedDatasetId)); + break; + case ICEBERG_VIEW: + resources.add( + new ResourceTreeEntity( + ResourceType.VIRTUAL_DATASET, + name, + fullPathList, + null, + null, + versionedDatasetId)); + break; + default: + throw new IllegalStateException("Unexpected value: " + entry.getType()); + } + }); + + return resources; + } +} diff --git a/dac/backend/src/main/java/com/dremio/dac/service/source/SourceService.java b/dac/backend/src/main/java/com/dremio/dac/service/source/SourceService.java index e021163c3d..693c8559bc 100644 --- a/dac/backend/src/main/java/com/dremio/dac/service/source/SourceService.java +++ b/dac/backend/src/main/java/com/dremio/dac/service/source/SourceService.java @@ -16,6 +16,8 @@ package com.dremio.dac.service.source; import static com.dremio.dac.api.MetadataPolicy.ONE_MINUTE_IN_MS; +import static com.dremio.dac.model.namespace.ExternalNamespaceTreeUtils.namespaceTreeOf; +import static com.dremio.dac.service.source.ExternalResourceTreeUtils.generateResourceTreeEntityList; import static com.dremio.dac.util.DatasetsUtil.toDatasetConfig; import static com.dremio.dac.util.DatasetsUtil.toPhysicalDatasetConfig; import static com.dremio.service.namespace.proto.NameSpaceContainer.Type.SOURCE; @@ -40,6 +42,7 @@ import com.dremio.common.exceptions.UserException; import com.dremio.dac.api.CatalogItem; import com.dremio.dac.api.Source; +import com.dremio.dac.explore.model.VersionContextUtils; import com.dremio.dac.model.common.NamespacePath; import com.dremio.dac.model.folder.Folder; import com.dremio.dac.model.folder.SourceFolderPath; @@ -69,15 +72,22 @@ import com.dremio.exec.catalog.ConnectionReader; import com.dremio.exec.catalog.MetadataRequestOptions; import com.dremio.exec.catalog.SourceCatalog; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.catalog.VersionedPlugin; import com.dremio.exec.catalog.conf.ConnectionConf; import com.dremio.exec.server.SabotContext; import com.dremio.exec.store.CatalogService; +import com.dremio.exec.store.NessieNamespaceAlreadyExistsException; +import com.dremio.exec.store.NoDefaultBranchException; +import com.dremio.exec.store.ReferenceNotFoundException; +import com.dremio.exec.store.ReferenceTypeConflictException; import com.dremio.exec.store.SchemaConfig; import com.dremio.exec.store.SchemaEntity; import com.dremio.exec.store.StoragePlugin; import com.dremio.exec.store.dfs.FileSystemPlugin; import com.dremio.file.File; import com.dremio.file.SourceFilePath; +import com.dremio.plugins.ExternalNamespaceEntry; import com.dremio.service.namespace.DatasetHelper; import com.dremio.service.namespace.NamespaceAttribute; import com.dremio.service.namespace.NamespaceException; @@ -104,12 +114,18 @@ import com.google.common.base.Throwables; import com.google.common.collect.Lists; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.instrumentation.annotations.WithSpan; + /** * Source service. */ public class SourceService { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SourceService.class); + public static final String LIST_SOURCE_TOTAL_COUNT_SPAN_ATTRIBUTE_NAME = "dremio.source_service.list_source_total_count"; + public static final String IS_VERSIONED_PLUGIN_SPAN_ATTRIBUTE_NAME = "dremio.source_service.isVersionedPlugin"; + public static final String IS_FILE_SYSTEM_PLUGIN_SPAN_ATTRIBUTE_NAME = "dremio.source_service.isFileSystemPlugin"; private final SabotContext sabotContext; private final NamespaceService namespaceService; @@ -207,6 +223,7 @@ public void unregisterSourceWithRuntime(SourceName sourceName) { } } + @WithSpan public SourceConfig createSource(SourceConfig sourceConfig, NamespaceAttribute... attributes) throws ExecutionSetupException, NamespaceException, ResourceExistsException { validateSourceConfig(sourceConfig); validateConnectionConf(getConnectionConf(sourceConfig)); @@ -224,6 +241,7 @@ public SourceConfig createSource(SourceConfig sourceConfig, NamespaceAttribute.. return registerSourceWithRuntimeInternal(sourceConfig, createCatalog(), attributes); } + @WithSpan public SourceConfig updateSource(String id, SourceConfig sourceConfig, NamespaceAttribute... attributes) throws NamespaceException, SourceNotFoundException { validateSourceConfig(sourceConfig); validateConnectionConf(getConnectionConf(sourceConfig)); @@ -283,11 +301,11 @@ public void checkSourceExists(SourceName sourceName) throws SourceNotFoundExcept } } - protected void addFileToNamespaceTree(NamespaceTree ns, SourceName source, SourceFilePath path, String owner) throws NamespaceNotFoundException { + protected void addFileToNamespaceTree(NamespaceTree ns, SourceFilePath path, String owner) throws NamespaceNotFoundException { final File file = File.newInstance( path.toUrlPath(), path, - getUnknownFileFormat(source, path), + getUnknownFileFormat(path), 0, // files should not have any jobs, no need to check false, false, @@ -298,7 +316,7 @@ protected void addFileToNamespaceTree(NamespaceTree ns, SourceName source, Sourc ns.addFile(file); } - protected FileFormat getUnknownFileFormat(SourceName sourceName, SourceFilePath sourceFilePath) { + protected FileFormat getUnknownFileFormat(SourceFilePath sourceFilePath) { final FileConfig config = new FileConfig(); config.setCtime(System.currentTimeMillis()); config.setFullPathList(sourceFilePath.toPathList()); @@ -323,7 +341,7 @@ protected void addTableToNamespaceTree(NamespaceTree ns, PhysicalDatasetResource ns.addPhysicalDataset(new PhysicalDataset(path, name, datasetConfig, jobsCount, null)); } - private void addToNamespaceTree(NamespaceTree ns, List entities, SourceName source, String prefix) + private void addToNamespaceTree(NamespaceTree ns, List entities, SourceName sourceName, String prefix) throws IOException, PhysicalDatasetNotFoundException, NamespaceException { for (SchemaEntity entity: entities) { switch (entity.getType()) { @@ -348,10 +366,10 @@ private void addToNamespaceTree(NamespaceTree ns, List entities, S datasetConfig.setTag("0"); datasetConfig.setFullPathList(path.toPathList()); addTableToNamespaceTree(ns, - new PhysicalDatasetResourcePath(source, path), + new PhysicalDatasetResourcePath(sourceName, path), new PhysicalDatasetName(path.getFileName().getName()), datasetConfig, - datasetService.getJobsCount(path.toNamespaceKey())); + datasetService.getJobsCount(path.toNamespaceKey(), sabotContext.getOptionManager())); } break; @@ -360,7 +378,7 @@ private void addToNamespaceTree(NamespaceTree ns, List entities, S // TODO(Amit H): Should we ignore exceptions from getFilesystemPhysicalDataset? // Dataset could be marked as deleted by the time we come here. final SourceFilePath filePath = new SourceFilePath(prefix + '.' + entity.getPath()); - final File file = getFileDataset(source, filePath, entity.getOwner()); + final File file = getFileDataset(filePath, entity.getOwner()); ns.addFile(file); } break; @@ -370,7 +388,7 @@ private void addToNamespaceTree(NamespaceTree ns, List entities, S // TODO(Amit H): Should we ignore exceptions from getFilesystemPhysicalDataset? // Dataset could be marked as deleted by the time we come here. - final PhysicalDatasetConfig physicalDatasetConfig = getFilesystemPhysicalDataset(source, folderPath); + final PhysicalDatasetConfig physicalDatasetConfig = getFilesystemPhysicalDataset(folderPath); final FileConfig fileConfig = physicalDatasetConfig.getFormatSettings(); fileConfig.setOwner(entity.getOwner()); @@ -383,14 +401,14 @@ private void addToNamespaceTree(NamespaceTree ns, List entities, S folderConfig.setTag(physicalDatasetConfig.getTag()); fileConfig.setTag(physicalDatasetConfig.getTag()); - addFolderTableToNamespaceTree(ns, folderPath, folderConfig, FileFormat.getForFolder(fileConfig), fileConfig.getType() != FileType.UNKNOWN, datasetService.getJobsCount(folderPath.toNamespaceKey())); + addFolderTableToNamespaceTree(ns, folderPath, folderConfig, FileFormat.getForFolder(fileConfig), fileConfig.getType() != FileType.UNKNOWN, datasetService.getJobsCount(folderPath.toNamespaceKey(), sabotContext.getOptionManager())); } break; case FILE: { final SourceFilePath path = new SourceFilePath(prefix + '.' + entity.getPath()); - addFileToNamespaceTree(ns, source, path, entity.getOwner()); + addFileToNamespaceTree(ns, path, entity.getOwner()); } break; @@ -400,20 +418,20 @@ private void addToNamespaceTree(NamespaceTree ns, List entities, S } } - public File getFileDataset(SourceName source, final SourceFilePath filePath, String owner) + public File getFileDataset(final SourceFilePath filePath, String owner) throws PhysicalDatasetNotFoundException, NamespaceException { final PhysicalDatasetConfig physicalDatasetConfig = getFilesystemPhysicalDataset(filePath, DatasetType.PHYSICAL_DATASET_SOURCE_FILE); final FileConfig fileConfig = physicalDatasetConfig.getFormatSettings(); fileConfig.setOwner(owner); fileConfig.setTag(physicalDatasetConfig.getTag()); - final File file = File.newInstance(physicalDatasetConfig.getId(), filePath, FileFormat.getForFile(fileConfig), - datasetService.getJobsCount(filePath.toNamespaceKey()), + return File.newInstance(physicalDatasetConfig.getId(), filePath, FileFormat.getForFile(fileConfig), + datasetService.getJobsCount(filePath.toNamespaceKey(), sabotContext.getOptionManager()), false, false, fileConfig.getType() != FileType.UNKNOWN, null ); - return file; } + @WithSpan public NamespaceTree listSource( SourceName sourceName, SourceConfig sourceConfig, @@ -422,17 +440,28 @@ public NamespaceTree listSource( String refValue) throws IOException, PhysicalDatasetNotFoundException, NamespaceException { try { + final NamespaceKey sourceKey = new NamespaceKey(sourceName.getName()); + final NamespaceTree namespaceTree; final StoragePlugin plugin = checkNotNull(catalogService.getSource(sourceName.getName()), "storage plugin %s not found", sourceName); - if (plugin instanceof FileSystemPlugin) { - final NamespaceTree ns = new NamespaceTree(); - ns.setIsFileSystemSource(true); - ns.setIsImpersonationEnabled(((FileSystemPlugin) plugin).getConfig().isImpersonationEnabled()); - addToNamespaceTree(ns, ((FileSystemPlugin) plugin).list(singletonList(sourceName.getName()), userName), sourceName, sourceName.getName()); - fillInTags(ns); - return ns; + if (plugin instanceof VersionedPlugin) { + List entries = versionedPluginListEntriesHelper( + (VersionedPlugin) plugin, + sourceKey, + refType, + refValue); + namespaceTree = namespaceTreeOf(sourceName, entries); + } else if (plugin instanceof FileSystemPlugin) { + namespaceTree = new NamespaceTree(); + namespaceTree.setIsFileSystemSource(true); + namespaceTree.setIsImpersonationEnabled(((FileSystemPlugin) plugin).getConfig().isImpersonationEnabled()); + addToNamespaceTree(namespaceTree, ((FileSystemPlugin) plugin).list(sourceKey.getPathComponents(), userName), sourceName, sourceName.getName()); + fillInTags(namespaceTree); } else { - return newNamespaceTree(namespaceService.list(new NamespaceKey(singletonList(sourceConfig.getName()))), false, false); + namespaceTree = newNamespaceTree(namespaceService.list(sourceKey), false, false); } + + Span.current().setAttribute(LIST_SOURCE_TOTAL_COUNT_SPAN_ATTRIBUTE_NAME, namespaceTree.totalCount()); + return namespaceTree; } catch (IOException | DatasetNotFoundException e) { throw new RuntimeException(e); } @@ -462,10 +491,12 @@ public Folder getFolder( throw new SourceFolderNotFoundException(sourceName, folderPath, null); } final boolean isFileSystemPlugin = (plugin instanceof FileSystemPlugin); + Span.current().setAttribute(IS_FILE_SYSTEM_PLUGIN_SPAN_ATTRIBUTE_NAME, isFileSystemPlugin); + FolderConfig folderConfig; if (isFileSystemPlugin) { // this could be a physical dataset folder - DatasetConfig datasetConfig = null; + DatasetConfig datasetConfig; try { datasetConfig = namespaceService.getDataset(folderPath.toNamespaceKey()); if (datasetConfig.getType() != DatasetType.VIRTUAL_DATASET) { @@ -480,7 +511,7 @@ public Folder getFolder( new IllegalArgumentException(folderPath.toString() + " is a virtual dataset")); } } catch (NamespaceNotFoundException nfe) { - // folder on fileystem + // folder on filesystem folderConfig = new FolderConfig() .setFullPathList(folderPath.toPathList()) .setName(folderPath.getFolderName().getName()); @@ -496,7 +527,23 @@ public Folder getFolder( public void deleteFolder(SourceFolderPath folderPath, SourceName sourceName, String refType, String refValue) { - throw new UnsupportedOperationException("Deleting a folder in a source is not supported."); + final StoragePlugin plugin = + checkNotNull( + catalogService.getSource(sourceName.getName()), + "storage plugin %s not found", + sourceName); + final boolean isVersionedPlugin = plugin instanceof VersionedPlugin; + Span.current().setAttribute(IS_VERSIONED_PLUGIN_SPAN_ATTRIBUTE_NAME, isVersionedPlugin); + if (isVersionedPlugin) { + final VersionContext version = VersionContextUtils.parse(refType, refValue); + deleteFolderForVersionedPlugin(folderPath, (VersionedPlugin) plugin, version); + } else { + throw new UnsupportedOperationException("Deleting a folder in a source is not supported."); + } + } + + public void deleteFolderForVersionedPlugin(SourceFolderPath folderPath, VersionedPlugin plugin, VersionContext version) { + plugin.deleteFolder(folderPath.toNamespaceKey(), version); } public Folder createFolder( @@ -505,18 +552,60 @@ public Folder createFolder( String userName, String refType, String refValue) { - throw new UnsupportedOperationException("Creating folder is not supported"); + final StoragePlugin plugin = + checkNotNull( + catalogService.getSource(sourceName.getName()), + "storage plugin %s not found", + sourceName); + final boolean isVersionedPlugin = plugin instanceof VersionedPlugin; + Span.current().setAttribute(IS_VERSIONED_PLUGIN_SPAN_ATTRIBUTE_NAME, isVersionedPlugin); + + if (!isVersionedPlugin) { + throw new UnsupportedOperationException("Creating folder is not supported"); + } + + final VersionContext version = getVersionContext(refType, refValue); + FolderConfig folderConfig = getFolderConfig(folderPath); + + try { + ((VersionedPlugin) plugin).createNamespace(folderPath.toNamespaceKey(), version); + return Folder.newInstance( + sourceName, + folderConfig, + null); + } catch (NessieNamespaceAlreadyExistsException e) { + throw UserException.validationError(e) + .message( + "Nessie namespace %s already exists on source %s.", + folderPath.getPathWithoutRoot().toPathString(), sourceName.getName()) + .buildSilently(); + } catch (ReferenceNotFoundException e) { + throw UserException.validationError(e) + .message("Requested %s not found on source %s.", version, sourceName.getName()) + .buildSilently(); + } catch (NoDefaultBranchException e) { + throw UserException.validationError(e) + .message( + "Unable to resolve source version. Version was not specified and Source %s does not" + + " have a default branch set.", + sourceName.getName()) + .buildSilently(); + } catch (ReferenceTypeConflictException e) { + throw UserException.validationError(e) + .message( + "Requested %s in source %s is not the requested type.", version, sourceName.getName()) + .buildSilently(); + } } protected Folder newFolder(SourceFolderPath folderPath, FolderConfig folderConfig, NamespaceTree contents, boolean isQueryable, boolean isFileSystemPlugin) throws NamespaceNotFoundException { // TODO: why do we need to look up the dataset again in isPhysicalDataset? - Folder folder = Folder.newInstance(folderPath, folderConfig, null, contents, isQueryable, isFileSystemPlugin, 0); - return folder; + return Folder.newInstance(folderPath, folderConfig, null, contents, isQueryable, isFileSystemPlugin, 0); } protected NamespaceTree newNamespaceTree(List children, boolean isFileSystemSource, boolean isImpersonationEnabled) throws DatasetNotFoundException, NamespaceException { - return NamespaceTree.newInstance(datasetService, children, SOURCE, collaborationService, isFileSystemSource, isImpersonationEnabled); + return NamespaceTree.newInstance(datasetService, children, SOURCE, collaborationService, isFileSystemSource, isImpersonationEnabled, null); } public NamespaceTree listFolder( @@ -530,7 +619,16 @@ public NamespaceTree listFolder( final String prefix = folderPath.toPathString(); try { final StoragePlugin plugin = checkNotNull(catalogService.getSource(name), "storage plugin %s not found", sourceName); - if (plugin instanceof FileSystemPlugin) { + if (plugin instanceof VersionedPlugin) { + final NamespaceKey folderKey = folderPath.toNamespaceKey(); + List entries = versionedPluginListEntriesHelper( + (VersionedPlugin) plugin, + folderKey, + refType, + refValue); + + return namespaceTreeOf(sourceName, entries); + } else if (plugin instanceof FileSystemPlugin) { final NamespaceTree ns = new NamespaceTree(); ns.setIsFileSystemSource(true); ns.setIsImpersonationEnabled(((FileSystemPlugin) plugin).getConfig().isImpersonationEnabled()); @@ -552,6 +650,7 @@ public NamespaceTree listFolder(SourceName sourceName, SourceFolderPath folderPa return listFolder(sourceName, folderPath, userName, null, null); } + @WithSpan public List listPath( NamespaceKey path, boolean showDatasets, @@ -559,6 +658,24 @@ public List listPath( String refValue) throws NamespaceException, UnsupportedEncodingException { final List resources = Lists.newArrayList(); + final String sourceName = path.getRoot(); + final StoragePlugin plugin = + checkNotNull( + catalogService.getSource(sourceName), + "storage plugin %s not found", + sourceName); + final boolean isVersionedPlugin = plugin instanceof VersionedPlugin; + Span.current().setAttribute(IS_VERSIONED_PLUGIN_SPAN_ATTRIBUTE_NAME, isVersionedPlugin); + + if (isVersionedPlugin) { + List entries = versionedPluginListEntriesHelper( + (VersionedPlugin) plugin, + path, + refType, + refValue); + + return generateResourceTreeEntityList(path, entries); + } for (NameSpaceContainer container : namespaceService.list(path)) { if (container.getType() == Type.FOLDER) { @@ -571,6 +688,44 @@ public List listPath( return resources; } + protected VersionContext getVersionContext(String refType, String refValue) { + return VersionContextUtils.parse(refType, refValue); + } + + protected FolderConfig getFolderConfig(SourceFolderPath folderPath) { + return new FolderConfig() + .setFullPathList(folderPath.toPathList()) + .setName(folderPath.getFolderName().getName()); + } + + protected List versionedPluginListEntriesHelper( + VersionedPlugin plugin, + NamespaceKey namespaceKey, + String refType, + String refValue) { + VersionContext version = VersionContextUtils.parse(refType, refValue); + String sourceName = namespaceKey.getRoot(); + try { + return plugin.listEntries( + namespaceKey.getPathWithoutRoot(), + version) + .collect(Collectors.toList()); + } catch (ReferenceNotFoundException e) { + throw UserException.validationError(e) + .message("Requested %s not found on source %s.", version, sourceName) + .buildSilently(); + } catch (NoDefaultBranchException e) { + throw UserException.validationError(e) + .message("Unable to resolve source version. Version was not specified and Source %s does not have a default branch set.", + sourceName) + .buildSilently(); + } catch (ReferenceTypeConflictException e) { + throw UserException.validationError(e) + .message("Requested %s in source %s is not the requested type.", version, sourceName) + .buildSilently(); + } + } + // Process all items in the namespacetree and get their tags in one go private void fillInTags(NamespaceTree ns) { List files = ns.getFiles(); @@ -581,8 +736,7 @@ private void fillInTags(NamespaceTree ns) { //we populate tags not for all files ns.setCanTagsBeSkipped(tagsInfo.getCanTagsBeSkipped()); - for(int i = 0; i < files.size(); i++) { - File input = files.get(i); + for (File input : files) { CollaborationTag collaborationTag = tags.get(input.getId()); if (collaborationTag != null) { input.setTags(collaborationTag.getTagsList()); @@ -658,18 +812,18 @@ public PhysicalDatasetConfig getFilesystemPhysicalDataset(NamespacePath path, Da } } - public PhysicalDatasetConfig getFilesystemPhysicalDataset(SourceName sourceName, SourceFolderPath path) throws NamespaceException { + public PhysicalDatasetConfig getFilesystemPhysicalDataset(SourceFolderPath path) throws NamespaceException { return getFilesystemPhysicalDataset(path, DatasetType.PHYSICAL_DATASET_HOME_FOLDER); } - public PhysicalDatasetConfig getFilesystemPhysicalDataset(SourceName sourceName, SourceFilePath path) throws NamespaceException { + public PhysicalDatasetConfig getFilesystemPhysicalDataset(SourceFilePath path) throws NamespaceException { return getFilesystemPhysicalDataset(path, DatasetType.PHYSICAL_DATASET_HOME_FILE); } // For all tables including filesystem tables. // Physical datasets may be missing - public PhysicalDataset getPhysicalDataset(SourceName sourceName, PhysicalDatasetPath physicalDatasetPath) throws NamespaceException { + public PhysicalDataset getPhysicalDataset(PhysicalDatasetPath physicalDatasetPath) throws NamespaceException { final int jobsCount = datasetService.getJobsCount(physicalDatasetPath.toNamespaceKey()); try { final DatasetConfig datasetConfig = namespaceService.getDataset(physicalDatasetPath.toNamespaceKey()); @@ -724,15 +878,7 @@ public void deletePhysicalDataset(SourceName sourceName, PhysicalDatasetPath dat } public SourceState getSourceState(String sourceName) { - try { - SourceState state = catalogService.getSourceState(sourceName); - if(state == null) { - return SourceState.badState(String.format("Source %s could not be found, please verify the source name.", "Unable to find source.")); - } - return state; - } catch (Exception e) { - return SourceState.badState("", e); - } + return catalogService.getSourceState(sourceName); } @VisibleForTesting @@ -744,6 +890,7 @@ public StoragePlugin getStoragePlugin(String sourceName) throws SourceNotFoundEx return plugin; } + @WithSpan public List getSources() { final List sources = new ArrayList<>(); @@ -760,8 +907,7 @@ public List getSources() { public SourceConfig getById(String id) throws SourceNotFoundException, NamespaceException { try { - SourceConfig config = namespaceService.getSourceById(id); - return config; + return namespaceService.getSourceById(id); } catch (NamespaceNotFoundException e) { throw new SourceNotFoundException(id); } diff --git a/dac/backend/src/main/java/com/dremio/dac/util/BackupRestoreUtil.java b/dac/backend/src/main/java/com/dremio/dac/util/BackupRestoreUtil.java index 62f83281a8..5c3e5fc746 100644 --- a/dac/backend/src/main/java/com/dremio/dac/util/BackupRestoreUtil.java +++ b/dac/backend/src/main/java/com/dremio/dac/util/BackupRestoreUtil.java @@ -24,6 +24,7 @@ import java.io.DataOutputStream; import java.io.File; import java.io.IOException; +import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; @@ -36,6 +37,7 @@ import java.time.LocalDateTime; import java.time.ZoneId; import java.util.ArrayList; +import java.util.Arrays; import java.util.EnumSet; import java.util.HashMap; import java.util.Iterator; @@ -49,6 +51,7 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicLong; +import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Collectors; @@ -56,6 +59,8 @@ import javax.ws.rs.core.UriBuilder; import org.apache.commons.io.IOUtils; +import org.xerial.snappy.SnappyInputStream; +import org.xerial.snappy.SnappyOutputStream; import com.dremio.common.VM; import com.dremio.common.concurrent.CloseableSchedulerThreadPool; @@ -93,6 +98,9 @@ import com.google.common.collect.Maps; import com.google.common.collect.Sets; +import net.jpountz.lz4.LZ4BlockInputStream; +import net.jpountz.lz4.LZ4BlockOutputStream; + /** * Backup Service running only on master. */ @@ -105,11 +113,12 @@ public final class BackupRestoreUtil { PosixFilePermission.OWNER_WRITE, PosixFilePermission.OWNER_EXECUTE ); - private static final String BACKUP_FILE_SUFFIX_JSON = "_backup.json"; private static final String BACKUP_FILE_SUFFIX_BINARY = "_backup.pb"; private static final String BACKUP_INFO_FILE_SUFFIX = "_info.json"; + private static final String[] SUPPORTED_COMPRESSION_METHODS = {"lz4", "snappy", "none"}; + private static final Predicate BACKUP_FILES_FILTER_JSON = PathFilters.endsWith(BACKUP_FILE_SUFFIX_JSON); private static final Predicate BACKUP_FILES_FILTER_BINARY = PathFilters.endsWith(BACKUP_FILE_SUFFIX_BINARY); private static final Predicate BACKUP_INFO_FILES_FILTER = PathFilters.endsWith(BACKUP_INFO_FILE_SUFFIX); @@ -157,15 +166,15 @@ public static CheckpointInfo createCheckpoint(final BackupOptions options, FileS } private static void dumpTable(FileSystem fs, Path backupRootDir, BackupFileInfo backupFileInfo, - CoreKVStore coreKVStore, boolean binary) throws IOException { + CoreKVStore coreKVStore, boolean binary,Compression compression) throws IOException { final Path backupFile = backupRootDir.resolve(format("%s%s", backupFileInfo.getKvstoreInfo().getTablename(), binary ? BACKUP_FILE_SUFFIX_BINARY : BACKUP_FILE_SUFFIX_JSON)); final Iterator, KVStoreTuple>> iterator = coreKVStore.find().iterator(); long records = 0; if (binary) { + OutputStream fsout = compression.getOutputStream(fs.create(backupFile, true)); try ( - final OutputStream fsout = fs.create(backupFile, true); final DataOutputStream bos = new DataOutputStream(fsout); ) { while (iterator.hasNext()) { @@ -187,9 +196,9 @@ private static void dumpTable(FileSystem fs, Path backupRootDir, BackupFi backupFileInfo.setBinary(true); } } else { + OutputStream fsout = compression.getOutputStream(fs.create(backupFile, true)); final ObjectMapper objectMapper = new ObjectMapper(); try ( - final OutputStream fsout = fs.create(backupFile, true); final BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(fsout))) { while (iterator.hasNext()) { Document, KVStoreTuple> keyval = iterator.next(); @@ -212,9 +221,12 @@ private static void dumpTable(FileSystem fs, Path backupRootDir, BackupFi } } - private static void restoreTable(FileSystem fs, CoreKVStore coreKVStore, Path filePath, boolean binary, long records) throws IOException { + private static void restoreTable(FileSystem fs, CoreKVStore coreKVStore, Path filePath, boolean binary, long records, + BackupFileInfo.Compression compressionValue) throws IOException { if (binary) { - try(DataInputStream dis = new DataInputStream(fs.open(filePath))) { + Compression compression = Compression.valueOf(compressionValue.toString().toUpperCase()); + InputStream in = compression.getInputStream(fs.open(filePath)); + try(DataInputStream dis = new DataInputStream(in)) { for(long i =0; i < records; i++) { final KVStoreTuple key = coreKVStore.newKey(); { @@ -237,8 +249,9 @@ private static void restoreTable(FileSystem fs, CoreKVStore coreKVS } return; } - - try(final BufferedReader reader = new BufferedReader(new InputStreamReader(fs.open(filePath)));) { + Compression compression = Compression.valueOf(compressionValue.toString().toUpperCase()); + InputStream in = compression.getInputStream(fs.open(filePath)); + try(final BufferedReader reader = new BufferedReader(new InputStreamReader(in))) { final ObjectMapper objectMapper = new ObjectMapper(); String line; while ((line = reader.readLine()) != null) { @@ -337,7 +350,6 @@ private static void copyFiles(FileSystem srcFs, Path srcPath, FileSystem dstFs, } } - public static void restoreUploadedFiles(FileSystem fs, Path backupDir, HomeFileConf homeFileStore, BackupStats backupStats, String hostname) throws IOException { // restore uploaded files final Path uploadsBackupDir = Path.withoutSchemeAndAuthority(backupDir).resolve("uploads"); @@ -365,13 +377,16 @@ public static class BackupOptions { private final boolean binary; private final boolean includeProfiles; + private String compression; + @JsonCreator public BackupOptions(@JsonProperty("backupDir") String backupDir, @JsonProperty("binary") boolean binary, - @JsonProperty("includeProfiles") boolean includeProfiles) { + @JsonProperty("includeProfiles") boolean includeProfiles, @JsonProperty("compression") String compression) { super(); this.backupDir = backupDir; this.binary = binary; this.includeProfiles = includeProfiles; + this.compression = compression; } public String getBackupDir() { @@ -390,13 +405,23 @@ public boolean isBinary() { public boolean isIncludeProfiles() { return includeProfiles; } + + public String getCompression() { + return this.compression; + } + } public static BackupStats createBackup(FileSystem fs, BackupOptions options, LocalKVStoreProvider localKVStoreProvider, HomeFileConf homeFileStore, @Nullable CheckpointInfo checkpointInfo) throws IOException, NamespaceException { String msg = checkpointInfo == null ? "Tables and uploads" : "Tables"; - logger.info("{} Backup started", msg); + if (options.getCompression() == (null) || options.getCompression().equals("")) { + logger.info("{} Backup started.", msg); + } else { + logger.info("{} Backup started with {} compression.", msg, options.getCompression()); + } + final BackupStats backupStats = new BackupStats(); final LocalDateTime now = LocalDateTime.now(ZoneId.of("UTC")); @@ -462,7 +487,9 @@ private static CompletableFuture asFuture(Executor e, Map.Entry> futureMap = new HashMap<>(); for (String tableName : tableToInfo.keySet()) { CompletableFuture future = CompletableFuture.runAsync(() -> { - try { BackupFileInfo info = tableToInfo.get(tableName); final CoreKVStore store = localKVStoreProvider.getStore(info.getKvstoreInfo()); try { restoreTable(fs, store, tableToBackupFiles.get(tableName), - info.getBinary(), info.getRecords()); + info.getBinary(), info.getRecords(), info.getCompression()); backupStats.incrementTables(); } catch (Exception e) { throw new CompletionException( @@ -630,4 +656,46 @@ public long getFiles() { } } + private static Compression validateSupportedCompression(BackupOptions options) { + if (options.getCompression() == null || options.getCompression().equals("")) { + options.compression = "none"; + } + Compression compression; + if (Arrays.stream(SUPPORTED_COMPRESSION_METHODS).anyMatch(options.getCompression()::equals)) { + compression = Compression.valueOf(options.getCompression().toUpperCase()); + } else { + logger.warn("Compression value should be a string and can either be empty or snappy or lz4."); + throw new RuntimeException("Compression value should be a string and can either be empty or snappy or lz4."); + } + return compression; + } + + enum Compression { + NONE(outputStream -> outputStream, inputStream -> inputStream), + SNAPPY(outputStream -> new SnappyOutputStream(outputStream), inputStream -> { + try { + return new SnappyInputStream(inputStream); + } catch (IOException e) { + throw new RuntimeException(e); + } + }), + LZ4(outputStream -> new LZ4BlockOutputStream(outputStream), inputStream -> new LZ4BlockInputStream(inputStream)); + + private final Function outputStreamFunction; + private final Function inputStreamFunction; + + Compression(Function outputStreamFunction, Function inputStreamFunction) { + this.outputStreamFunction = outputStreamFunction; + this.inputStreamFunction = inputStreamFunction; + } + + public InputStream getInputStream(InputStream in) { + return this.inputStreamFunction.apply(in); + } + + public OutputStream getOutputStream(OutputStream out) { + return this.outputStreamFunction.apply(out); + } + } + } diff --git a/dac/backend/src/main/java/com/dremio/dac/util/DatasetsUtil.java b/dac/backend/src/main/java/com/dremio/dac/util/DatasetsUtil.java index 9137f61f66..f7f5c5c186 100644 --- a/dac/backend/src/main/java/com/dremio/dac/util/DatasetsUtil.java +++ b/dac/backend/src/main/java/com/dremio/dac/util/DatasetsUtil.java @@ -27,7 +27,7 @@ import java.util.Set; import org.apache.calcite.sql.type.SqlTypeName; -import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections4.CollectionUtils; import com.dremio.common.util.MajorTypeHelper; import com.dremio.dac.explore.DataTypeUtil; diff --git a/dac/backend/src/main/java/com/dremio/dac/util/JobUtil.java b/dac/backend/src/main/java/com/dremio/dac/util/JobUtil.java index fb0df397c8..b3af348fa6 100644 --- a/dac/backend/src/main/java/com/dremio/dac/util/JobUtil.java +++ b/dac/backend/src/main/java/com/dremio/dac/util/JobUtil.java @@ -16,7 +16,6 @@ package com.dremio.dac.util; import static com.dremio.service.jobs.JobsConstant.BYTES; -import static com.dremio.service.jobs.JobsConstant.DEFAULT; import static com.dremio.service.jobs.JobsConstant.DEFAULT_DATASET_TYPE; import static com.dremio.service.jobs.JobsConstant.EMPTY_DATASET_FIELD; import static com.dremio.service.jobs.JobsConstant.EXTERNAL_QUERY; @@ -63,45 +62,65 @@ public static List getQueriedDatasets(JobInfo jobInfo, RequestType requ return buildQueriedDatasets(jobInfo.getParentsList(), requestType, jobInfo.getDatasetPathList()); } - public static List buildQueriedDatasets(List parents, RequestType requestType, List pathList) { + public static List buildQueriedDatasets( + List parents, RequestType requestType, List pathList) { List queriedDatasets = new ArrayList<>(); if (parents != null && parents.size() > 0) { - parents.stream().forEach( - parent -> { - String datasetName = DEFAULT; - String datasetType = DEFAULT_DATASET_TYPE; - String datasetPath; - List datasetPathList = parent.getDatasetPathList(); - datasetName = datasetPathList.get(datasetPathList.size() - 1); - datasetPath = StringUtils.join(datasetPathList, "."); - if (!queriedDatasets.stream().anyMatch(dataSet -> dataSet.getDatasetPath().equals(datasetPath))) { - if (!parent.getDatasetPathList().contains(EXTERNAL_QUERY)) { - try { - datasetType = parent.getType().name(); - } catch (NullPointerException ex) { - datasetType = com.dremio.service.namespace.dataset.proto.DatasetType.values()[0].toString(); - } - } - populateQueriedDataset(queriedDatasets, datasetName, datasetType, datasetPath, parent.getDatasetPathList()); - } - } - ); + parents.stream() + .forEach( + parent -> { + final List datasetPathList = parent.getDatasetPathList(); + final String datasetName = datasetPathList.get(datasetPathList.size() - 1); + final String datasetPath = StringUtils.join(datasetPathList, "."); + final String versionContext = parent.getVersionContext(); + + if (!queriedDatasets.stream() + .anyMatch(dataSet -> dataSet.getDatasetPath().equals(datasetPath))) { + String datasetType = DEFAULT_DATASET_TYPE; + if (!datasetPathList.contains(EXTERNAL_QUERY)) { + try { + datasetType = parent.getType().name(); + } catch (NullPointerException ex) { + datasetType = + com.dremio.service.namespace.dataset.proto.DatasetType.values()[0] + .toString(); + } + } + + populateQueriedDataset( + queriedDatasets, + datasetName, + datasetType, + datasetPath, + datasetPathList, + versionContext); + } + }); } else if (isTruePath(pathList)) { - String datasetName = pathList.get(pathList.size() - 1); - String datasetPath = StringUtils.join(pathList, "."); - String datasetType = EMPTY_DATASET_FIELD; - populateQueriedDataset(queriedDatasets, datasetName, datasetType, datasetPath, pathList); + final String datasetName = pathList.get(pathList.size() - 1); + final String datasetPath = StringUtils.join(pathList, "."); + final String datasetType = EMPTY_DATASET_FIELD; + populateQueriedDataset(queriedDatasets, datasetName, datasetType, datasetPath, pathList, ""); } else { - populateQueriedDataset(queriedDatasets, UNAVAILABLE, EMPTY_DATASET_FIELD, EMPTY_DATASET_FIELD, new ArrayList<>()); + populateQueriedDataset( + queriedDatasets, + UNAVAILABLE, + EMPTY_DATASET_FIELD, + EMPTY_DATASET_FIELD, + new ArrayList<>(), + ""); switch (requestType) { case GET_CATALOGS: case GET_COLUMNS: case GET_SCHEMAS: case GET_TABLES: queriedDatasets.get(queriedDatasets.size() - 1).setDatasetName(METADATA); + break; default: + break; } } + return queriedDatasets; } @@ -280,15 +299,29 @@ public static boolean isComplete(JobState state) { } } - private static void populateQueriedDataset(List queriedDatasets, String datasetName, String datasetType, String datasetPath, List datasetPathList) { - queriedDatasets.add(new DataSet()); - queriedDatasets.get(queriedDatasets.size() - 1).setDatasetName(datasetName); - queriedDatasets.get(queriedDatasets.size() - 1).setDatasetPath(datasetPath); - queriedDatasets.get(queriedDatasets.size() - 1).setDatasetType(datasetType); - queriedDatasets.get(queriedDatasets.size() - 1).setDatasetPathsList(datasetPathList); + private static void populateQueriedDataset( + List queriedDatasets, + String datasetName, + String datasetType, + String datasetPath, + List datasetPathList, + String versionContext) { + final DataSet dataset = + new DataSet() + .setDatasetName(datasetName) + .setDatasetPath(datasetPath) + .setDatasetType(datasetType) + .setDatasetPathsList(datasetPathList); + + if (versionContext != null) { + dataset.setVersionContext(versionContext); + } + + queriedDatasets.add(dataset); } private static Comparator stateStartTime = new Comparator() { + @Override public int compare(final UserBitShared.AttemptEvent a1, final UserBitShared.AttemptEvent a2) { return Long.compare(a1.getStartTime(), a2.getStartTime()); } diff --git a/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/AssignBranchHandler.java b/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/AssignBranchHandler.java new file mode 100644 index 0000000000..918c994ab1 --- /dev/null +++ b/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/AssignBranchHandler.java @@ -0,0 +1,96 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +import static java.util.Objects.requireNonNull; + +import java.util.Collections; +import java.util.List; + +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlNode; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.catalog.VersionedPlugin; +import com.dremio.exec.ops.QueryContext; +import com.dremio.exec.planner.sql.handlers.direct.SimpleCommandResult; +import com.dremio.exec.planner.sql.handlers.direct.SqlNodeUtil; +import com.dremio.exec.planner.sql.parser.ReferenceTypeUtils; +import com.dremio.exec.planner.sql.parser.SqlAssignBranch; +import com.dremio.exec.store.ReferenceConflictException; +import com.dremio.exec.store.ReferenceNotFoundException; +import com.dremio.exec.work.foreman.ForemanSetupException; +import com.dremio.sabot.rpc.user.UserSession; + +/** + * Handler for updating the reference to the given branch. + * + * ALTER BRANCH branchName ASSIGN + * ( REF[ERENCE] | BRANCH | TAG | COMMIT ) refValue + * [ IN sourceName ] + */ +public class AssignBranchHandler extends BaseVersionHandler { + private final UserSession userSession; + public AssignBranchHandler(QueryContext context) { + super(context.getCatalog(), context.getOptions()); + this.userSession = requireNonNull(context.getSession()); + } + + @Override + public List toResult(String sql, SqlNode sqlNode) + throws ForemanSetupException { + checkFeatureEnabled("ALTER BRANCH ASSIGN syntax is not supported."); + + final SqlAssignBranch assignBranch = + requireNonNull(SqlNodeUtil.unwrap(sqlNode, SqlAssignBranch.class)); + final SqlIdentifier sourceIdentifier = assignBranch.getSourceName(); + final String sourceName = VersionedHandlerUtils.resolveSourceName( + sourceIdentifier, + userSession.getDefaultSchemaPath()); + + final VersionContext statementVersion = + ReferenceTypeUtils.map(assignBranch.getRefType(), assignBranch.getRefValue()); + final String branchName = requireNonNull(assignBranch.getBranchName()).toString(); + + final VersionedPlugin versionedPlugin = getVersionedPlugin(sourceName); + try { + versionedPlugin.assignBranch(branchName, statementVersion); + } catch (ReferenceConflictException e) { + throw UserException.validationError(e) + .message( + "Assign %s to branch %s on source %s failed with hash change.", + statementVersion, branchName, sourceName) + .buildSilently(); + } catch (ReferenceNotFoundException e) { + throw UserException.validationError(e) + .message( + "Assign %s to branch %s on source %s failed with not found.", + statementVersion, branchName, sourceName) + .buildSilently(); + } + + return Collections.singletonList( + SimpleCommandResult.successful( + "Assigned %s to branch %s on source %s.", + statementVersion, branchName, sourceName)); + } + + @Override + public Class getResultType() { + return SimpleCommandResult.class; + } +} diff --git a/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/AssignTagHandler.java b/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/AssignTagHandler.java new file mode 100644 index 0000000000..d45633b178 --- /dev/null +++ b/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/AssignTagHandler.java @@ -0,0 +1,97 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +import static java.util.Objects.requireNonNull; + +import java.util.Collections; +import java.util.List; + +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlNode; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.catalog.VersionedPlugin; +import com.dremio.exec.ops.QueryContext; +import com.dremio.exec.planner.sql.handlers.direct.SimpleCommandResult; +import com.dremio.exec.planner.sql.handlers.direct.SqlNodeUtil; +import com.dremio.exec.planner.sql.parser.ReferenceTypeUtils; +import com.dremio.exec.planner.sql.parser.SqlAssignTag; +import com.dremio.exec.store.ReferenceConflictException; +import com.dremio.exec.store.ReferenceNotFoundException; +import com.dremio.exec.work.foreman.ForemanSetupException; +import com.dremio.sabot.rpc.user.UserSession; + +/** + * Handler for updating the reference to the given tag. + * + * ALTER TAG tagName ASSIGN + * ( REF[ERENCE] | BRANCH | TAG | COMMIT ) refValue + * [ IN sourceName ] + */ +public class AssignTagHandler extends BaseVersionHandler { + private final UserSession userSession; + + public AssignTagHandler(QueryContext context) { + super(context.getCatalog(), context.getOptions()); + this.userSession = requireNonNull(context.getSession()); + } + + @Override + public List toResult(String sql, SqlNode sqlNode) + throws ForemanSetupException { + checkFeatureEnabled("ALTER TAG ASSIGN syntax is not supported."); + + final SqlAssignTag assignTag = + requireNonNull(SqlNodeUtil.unwrap(sqlNode, SqlAssignTag.class)); + final SqlIdentifier sourceIdentifier = assignTag.getSourceName(); + final String sourceName = VersionedHandlerUtils.resolveSourceName( + sourceIdentifier, + userSession.getDefaultSchemaPath()); + + final VersionContext statementVersion = + ReferenceTypeUtils.map(assignTag.getRefType(), assignTag.getRefValue()); + final String tagName = requireNonNull(assignTag.getTagName()).toString(); + + final VersionedPlugin versionedPlugin = getVersionedPlugin(sourceName); + try { + versionedPlugin.assignTag(tagName, statementVersion); + } catch (ReferenceConflictException e) { + throw UserException.validationError(e) + .message( + "Assign %s to tag %s on source %s failed with hash change.", + statementVersion, tagName, sourceName) + .buildSilently(); + } catch (ReferenceNotFoundException e) { + throw UserException.validationError(e) + .message( + "Assign %s to tag %s on source %s failed with not found.", + statementVersion, tagName, sourceName) + .buildSilently(); + } + + return Collections.singletonList( + SimpleCommandResult.successful( + "Assigned %s to tag %s on source %s.", + statementVersion, tagName, sourceName)); + } + + @Override + public Class getResultType() { + return SimpleCommandResult.class; + } +} diff --git a/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/BaseVersionHandler.java b/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/BaseVersionHandler.java new file mode 100644 index 0000000000..a8641b4634 --- /dev/null +++ b/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/BaseVersionHandler.java @@ -0,0 +1,83 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +import static java.util.Objects.requireNonNull; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.ExecConstants; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.VersionedPlugin; +import com.dremio.exec.planner.sql.handlers.direct.SqlDirectHandler; +import com.dremio.exec.proto.UserBitShared; +import com.dremio.exec.store.StoragePlugin; +import com.dremio.options.OptionResolver; +import com.dremio.service.namespace.NamespaceNotFoundException; + +/** + * Base class for show handlers, create folder handler. + */ +public abstract class BaseVersionHandler implements SqlDirectHandler { + private final Catalog catalog; + private OptionResolver optionResolver; + + // for CREATE FOLDERS + protected BaseVersionHandler(Catalog catalog) { + this.catalog = requireNonNull(catalog); + } + protected BaseVersionHandler(Catalog catalog, OptionResolver optionResolver) { + this(catalog); + this.optionResolver = requireNonNull(optionResolver); + } + + protected void checkFeatureEnabled(String message) { + if (!optionResolver.getOption(ExecConstants.ENABLE_USE_VERSION_SYNTAX)) { + throw UserException.unsupportedError().message(message).buildSilently(); + } + } + + protected VersionedPlugin getVersionedPlugin(String sourceName) { + final StoragePlugin storagePlugin; + try { + storagePlugin = catalog.getSource(sourceName); + } catch (UserException e) { + if (e.getErrorType() != UserBitShared.DremioPBError.ErrorType.VALIDATION) { + // Some unknown error, rethrow + throw e; + } + + if(e.getCause() instanceof NamespaceNotFoundException){ + throw UserException.validationError(e) + .message("Source %s does not exist.", sourceName) + .buildSilently(); + } else { + // Source was not found (probably wrong type, like home) + throw UserException.unsupportedError(e) + .message("Source %s does not support versioning.", sourceName) + .buildSilently(); + } + } + if (!(storagePlugin instanceof VersionedPlugin)) { + throw UserException.unsupportedError() + .message("Source %s does not support versioning.", sourceName) + .buildSilently(); + } + + return (VersionedPlugin) storagePlugin; + } + + +} diff --git a/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/CreateBranchHandler.java b/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/CreateBranchHandler.java new file mode 100644 index 0000000000..530f5f5664 --- /dev/null +++ b/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/CreateBranchHandler.java @@ -0,0 +1,119 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +import static java.util.Objects.requireNonNull; + +import java.util.Collections; +import java.util.List; + +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlNode; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.catalog.VersionedPlugin; +import com.dremio.exec.planner.sql.handlers.direct.SimpleCommandResult; +import com.dremio.exec.planner.sql.handlers.direct.SqlNodeUtil; +import com.dremio.exec.planner.sql.parser.ReferenceTypeUtils; +import com.dremio.exec.planner.sql.parser.SqlCreateBranch; +import com.dremio.exec.store.NoDefaultBranchException; +import com.dremio.exec.store.ReferenceAlreadyExistsException; +import com.dremio.exec.store.ReferenceNotFoundException; +import com.dremio.exec.store.ReferenceTypeConflictException; +import com.dremio.exec.work.foreman.ForemanSetupException; +import com.dremio.options.OptionResolver; +import com.dremio.sabot.rpc.user.UserSession; + +/** + * Handler for creating a branch. + * + * CREATE BRANCH [ IF NOT EXISTS ] branchName + * [ AT ( REF[ERENCE] | BRANCH | TAG | COMMIT ) refValue ] + * [ IN sourceName ] + */ +public class CreateBranchHandler extends BaseVersionHandler { + private final UserSession userSession; + + public CreateBranchHandler(Catalog catalog, OptionResolver optionResolver, UserSession userSession) { + super(catalog, optionResolver); + this.userSession = requireNonNull(userSession); + } + + @Override + public List toResult(String sql, SqlNode sqlNode) + throws ForemanSetupException { + checkFeatureEnabled("CREATE BRANCH syntax is not supported."); + + final SqlCreateBranch createBranch = requireNonNull(SqlNodeUtil.unwrap(sqlNode, SqlCreateBranch.class)); + final SqlIdentifier sourceIdentifier = createBranch.getSourceName(); + final String sourceName = VersionedHandlerUtils.resolveSourceName( + sourceIdentifier, + userSession.getDefaultSchemaPath()); + + final boolean existenceCheck = createBranch.getExistenceCheck().booleanValue(); + final String branchName = requireNonNull(createBranch.getBranchName()).toString(); + + VersionContext statementSourceVersion = + ReferenceTypeUtils.map(createBranch.getRefType(), createBranch.getRefValue()); + VersionContext sessionVersion = userSession.getSessionVersionForSource(sourceName); + VersionContext sourceVersion = statementSourceVersion.orElse(sessionVersion); + + final VersionedPlugin versionedPlugin = getVersionedPlugin(sourceName); + try { + versionedPlugin.createBranch(branchName, sourceVersion); + } catch (ReferenceAlreadyExistsException e) { + if (existenceCheck) { + throw UserException.validationError(e) + .message(HandlerUtils.REFERENCE_ALREADY_EXISTS_MESSAGE, branchName, sourceName) + .buildSilently(); + } + return Collections.singletonList( + SimpleCommandResult.successful( + HandlerUtils.REFERENCE_ALREADY_EXISTS_MESSAGE, + branchName, + sourceName)); + } catch (ReferenceNotFoundException e) { + throw UserException.validationError(e) + .message("Source %s not found in Source %s.", sourceVersion, sourceName) + .buildSilently(); + } catch (NoDefaultBranchException e) { + throw UserException.validationError(e) + .message("Unable to resolve source version. Version was not specified and Source %s does not have a default branch set.", sourceName) + .buildSilently(); + } catch (ReferenceTypeConflictException e) { + throw UserException.validationError(e) + .message("Requested %s in source %s is not the requested type.", sourceVersion, sourceName) + .buildSilently(); + } + + String sourceVersionMessage = sourceVersion.isSpecified() + ? sourceVersion.toString() + : "the default branch"; + return Collections.singletonList( + SimpleCommandResult.successful( + "Branch %s has been created at %s in source %s.", + branchName, + sourceVersionMessage, + sourceName)); + } + + @Override + public Class getResultType() { + return SimpleCommandResult.class; + } +} diff --git a/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/CreateFolderHandler.java b/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/CreateFolderHandler.java new file mode 100644 index 0000000000..248e798959 --- /dev/null +++ b/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/CreateFolderHandler.java @@ -0,0 +1,109 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +import static java.util.Objects.requireNonNull; + +import java.util.Collections; +import java.util.List; + +import org.apache.calcite.sql.SqlNode; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.catalog.VersionedPlugin; +import com.dremio.exec.planner.sql.handlers.direct.SimpleCommandResult; +import com.dremio.exec.planner.sql.handlers.direct.SqlNodeUtil; +import com.dremio.exec.planner.sql.parser.ReferenceTypeUtils; +import com.dremio.exec.planner.sql.parser.SqlCreateFolder; +import com.dremio.exec.planner.sql.parser.SqlGrant; +import com.dremio.exec.store.NessieNamespaceAlreadyExistsException; +import com.dremio.exec.store.NoDefaultBranchException; +import com.dremio.exec.store.ReferenceNotFoundException; +import com.dremio.exec.store.ReferenceTypeConflictException; +import com.dremio.sabot.rpc.user.UserSession; +import com.dremio.service.namespace.NamespaceKey; + +public class CreateFolderHandler extends BaseVersionHandler { + + private final Catalog catalog; + + private final UserSession userSession; + + public CreateFolderHandler(Catalog catalog, UserSession userSession) { + super(catalog); + this.catalog = requireNonNull(catalog); + this.userSession = requireNonNull(userSession); + } + + @Override + public List toResult(String sql, SqlNode sqlNode) throws Exception { + + final SqlCreateFolder createFolder = requireNonNull(SqlNodeUtil.unwrap(sqlNode, SqlCreateFolder.class)); + //If the path has single item, we add context. + NamespaceKey path = catalog.resolveSingle(createFolder.getPath()); + catalog.validatePrivilege(path, SqlGrant.Privilege.ALTER); + String sourceName = path.getRoot(); + + final boolean ifNotExists = createFolder.getIfNotExists().booleanValue(); + VersionContext statementSourceVersion = + ReferenceTypeUtils.map(createFolder.getRefType(), createFolder.getRefValue()); + VersionContext sessionVersion = userSession.getSessionVersionForSource(sourceName); + VersionContext sourceVersion = statementSourceVersion.orElse(sessionVersion); + + final VersionedPlugin versionedPlugin = getVersionedPlugin(sourceName); + + try{ + versionedPlugin.createNamespace(path, statementSourceVersion); + } catch (NessieNamespaceAlreadyExistsException e) { + if (ifNotExists) { + return Collections.singletonList( + SimpleCommandResult.successful(e.getMessage())); + } + throw UserException.validationError(e) + .message(e.getMessage()) + .buildSilently(); + } catch (ReferenceNotFoundException e) { + throw UserException.validationError(e) + .message("Source %s not found in Source %s.", sourceVersion, sourceName) + .buildSilently(); + } catch (NoDefaultBranchException e) { + throw UserException.validationError(e) + .message("Unable to resolve source version. Version was not specified and Source %s does not have a default branch set.", sourceName) + .buildSilently(); + } catch (ReferenceTypeConflictException e) { + throw UserException.validationError(e) + .message("Requested %s in source %s is not the requested type.", sourceVersion, sourceName) + .buildSilently(); + } + + String sourceVersionMessage = sourceVersion.isSpecified() + ? sourceVersion.toString() + : "the default branch"; + return Collections.singletonList( + SimpleCommandResult.successful( + "Folder %s has been created at %s in source %s.", + path.getName(), + sourceVersionMessage, + sourceName)); + } + + @Override + public Class getResultType() { + return SimpleCommandResult.class; + } +} diff --git a/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/CreateTagHandler.java b/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/CreateTagHandler.java new file mode 100644 index 0000000000..b9515437cb --- /dev/null +++ b/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/CreateTagHandler.java @@ -0,0 +1,119 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +import static java.util.Objects.requireNonNull; + +import java.util.Collections; +import java.util.List; + +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlNode; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.catalog.VersionedPlugin; +import com.dremio.exec.planner.sql.handlers.direct.SimpleCommandResult; +import com.dremio.exec.planner.sql.handlers.direct.SqlNodeUtil; +import com.dremio.exec.planner.sql.parser.ReferenceTypeUtils; +import com.dremio.exec.planner.sql.parser.SqlCreateTag; +import com.dremio.exec.store.NoDefaultBranchException; +import com.dremio.exec.store.ReferenceAlreadyExistsException; +import com.dremio.exec.store.ReferenceNotFoundException; +import com.dremio.exec.store.ReferenceTypeConflictException; +import com.dremio.exec.work.foreman.ForemanSetupException; +import com.dremio.options.OptionResolver; +import com.dremio.sabot.rpc.user.UserSession; + +/** + * Handler for creating a tag. + * + * CREATE TAG [ IF NOT EXISTS ] tagName + * [ AT ( REF[ERENCE] | BRANCH | TAG | COMMIT ) refValue ] + * [ IN sourceName ] + */ +public class CreateTagHandler extends BaseVersionHandler { + private final UserSession userSession; + + public CreateTagHandler(Catalog catalog, OptionResolver optionResolver, UserSession userSession) { + super(catalog, optionResolver); + this.userSession = requireNonNull(userSession); + } + + @Override + public List toResult(String sql, SqlNode sqlNode) + throws ForemanSetupException { + checkFeatureEnabled("CREATE TAG syntax is not supported."); + + final SqlCreateTag createTag = requireNonNull(SqlNodeUtil.unwrap(sqlNode, SqlCreateTag.class)); + final SqlIdentifier sourceIdentifier = createTag.getSourceName(); + final String sourceName = VersionedHandlerUtils.resolveSourceName + (sourceIdentifier, + userSession.getDefaultSchemaPath()); + + final boolean existenceCheck = createTag.getExistenceCheck().booleanValue(); + final String tagName = requireNonNull(createTag.getTagName()).toString(); + + VersionContext statementSourceVersion = + ReferenceTypeUtils.map(createTag.getRefType(), createTag.getRefValue()); + VersionContext sessionVersion = userSession.getSessionVersionForSource(sourceName); + VersionContext sourceVersion = statementSourceVersion.orElse(sessionVersion); + + final VersionedPlugin versionedPlugin = getVersionedPlugin(sourceName); + try { + versionedPlugin.createTag(tagName, sourceVersion); + } catch (ReferenceAlreadyExistsException e) { + if (existenceCheck) { + throw UserException.validationError(e) + .message(HandlerUtils.REFERENCE_ALREADY_EXISTS_MESSAGE, tagName, sourceName) + .buildSilently(); + } + return Collections.singletonList( + SimpleCommandResult.successful( + HandlerUtils.REFERENCE_ALREADY_EXISTS_MESSAGE, + tagName, + sourceName)); + } catch (ReferenceNotFoundException e) { + throw UserException.validationError(e) + .message("Source %s not found in source %s.", sourceVersion, sourceName) + .buildSilently(); + } catch (NoDefaultBranchException e) { + throw UserException.validationError(e) + .message("Unable to resolve source version. Version was not specified and Source %s does not have a default branch set.", sourceName) + .buildSilently(); + } catch (ReferenceTypeConflictException e) { + throw UserException.validationError(e) + .message("Requested %s in source %s is not the requested type.", sourceVersion, sourceName) + .buildSilently(); + } + + String sourceVersionMessage = sourceVersion.isSpecified() + ? sourceVersion.toString() + : "the default branch"; + return Collections.singletonList( + SimpleCommandResult.successful( + "Tag %s has been created at %s in source %s.", + tagName, + sourceVersionMessage, + sourceName)); + } + + @Override + public Class getResultType() { + return SimpleCommandResult.class; + } +} diff --git a/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/DropBranchHandler.java b/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/DropBranchHandler.java new file mode 100644 index 0000000000..f8e5edf9e5 --- /dev/null +++ b/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/DropBranchHandler.java @@ -0,0 +1,118 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +import static java.util.Objects.requireNonNull; + +import java.util.Collections; +import java.util.List; + +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlNode; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.catalog.VersionedPlugin; +import com.dremio.exec.planner.sql.handlers.direct.SimpleCommandResult; +import com.dremio.exec.planner.sql.handlers.direct.SqlNodeUtil; +import com.dremio.exec.planner.sql.parser.SqlDropBranch; +import com.dremio.exec.store.ReferenceConflictException; +import com.dremio.exec.store.ReferenceNotFoundException; +import com.dremio.exec.work.foreman.ForemanSetupException; +import com.dremio.options.OptionResolver; +import com.dremio.sabot.rpc.user.UserSession; +import com.google.common.base.Strings; + +/** + * Handler for dropping branch. + * + * DROP BRANCH [ IF EXISTS ] branchName + * ( AT COMMIT commitHash | FORCE ) + * [ IN sourceName ] + */ +public class DropBranchHandler extends BaseVersionHandler { + + private final UserSession userSession; + + public DropBranchHandler(Catalog catalog, OptionResolver optionResolver, UserSession userSession) { + super(catalog, optionResolver); + this.userSession = requireNonNull(userSession); + } + + @Override + public List toResult(String sql, SqlNode sqlNode) + throws ForemanSetupException { + checkFeatureEnabled("DROP BRANCH syntax is not supported."); + + final SqlDropBranch dropBranch = requireNonNull(SqlNodeUtil.unwrap(sqlNode, SqlDropBranch.class)); + final SqlIdentifier sourceIdentifier = dropBranch.getSourceName(); + final String sourceName = VersionedHandlerUtils.resolveSourceName( + sourceIdentifier, + userSession.getDefaultSchemaPath()); + + String commitHash = (dropBranch.getCommitHash() != null) + ? dropBranch.getCommitHash().toString() + : ""; // Will imply force drop + final String branchName = requireNonNull(dropBranch.getBranchName()).toString(); + final boolean forceDrop = dropBranch.getForceDrop().booleanValue(); + final boolean existenceCheck = dropBranch.getExistenceCheck().booleanValue(); + + if (!forceDrop && Strings.isNullOrEmpty(commitHash)) { + // This shouldn't be possible, enforced by SQL parser + throw UserException.validationError() + .message("Need commit hash to drop branch %s on source %s.", branchName, sourceName) + .buildSilently(); + } + + //Prevent dropping current branch + VersionContext currentSessionVersion = userSession.getSessionVersionForSource(sourceName); + if (currentSessionVersion.isBranch() && currentSessionVersion.getValue().equals(branchName)){ + throw UserException.validationError() + .message("Cannot drop branch %s for source %s while it is set in the current session's reference context ", branchName, sourceName) + .buildSilently(); + } + + final VersionedPlugin versionedPlugin = getVersionedPlugin(sourceName); + try { + versionedPlugin.dropBranch(branchName, commitHash); + } catch (ReferenceConflictException e) { + // TODO: DX-43145 Retries if forceDrop is true? + throw UserException.validationError(e) + .message("Branch %s has conflict on source %s.", branchName, sourceName) + .buildSilently(); + } catch (ReferenceNotFoundException e) { + if (existenceCheck) { + throw UserException.validationError(e) + .message("Branch %s not found on source %s.", branchName, sourceName) + .buildSilently(); + } + // Return success, but still give message about not found + return Collections.singletonList( + SimpleCommandResult.successful( + "Branch %s not found on source %s.", branchName, sourceName)); + } + + return Collections.singletonList( + SimpleCommandResult.successful( + "Branch %s has been dropped on source %s.", branchName, sourceName)); + } + + @Override + public Class getResultType() { + return SimpleCommandResult.class; + } +} diff --git a/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/DropTagHandler.java b/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/DropTagHandler.java new file mode 100644 index 0000000000..4b48cd3e84 --- /dev/null +++ b/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/DropTagHandler.java @@ -0,0 +1,118 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +import static java.util.Objects.requireNonNull; + +import java.util.Collections; +import java.util.List; + +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlNode; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.catalog.VersionedPlugin; +import com.dremio.exec.planner.sql.handlers.direct.SimpleCommandResult; +import com.dremio.exec.planner.sql.handlers.direct.SqlNodeUtil; +import com.dremio.exec.planner.sql.parser.SqlDropTag; +import com.dremio.exec.store.ReferenceConflictException; +import com.dremio.exec.store.ReferenceNotFoundException; +import com.dremio.exec.work.foreman.ForemanSetupException; +import com.dremio.options.OptionResolver; +import com.dremio.sabot.rpc.user.UserSession; +import com.google.common.base.Strings; + +/** + * Handler for dropping tag. + * + * DROP TAG [ IF EXISTS ] tagName + * ( AT COMMIT commitHash | FORCE ) + * [ IN sourceName ] + */ +public class DropTagHandler extends BaseVersionHandler { + + private final UserSession userSession; + + public DropTagHandler(Catalog catalog, OptionResolver optionResolver, UserSession userSession) { + super(catalog, optionResolver); + this.userSession = requireNonNull(userSession); + } + + @Override + public List toResult(String sql, SqlNode sqlNode) + throws ForemanSetupException { + checkFeatureEnabled("DROP TAG syntax is not supported."); + + final SqlDropTag dropTag = requireNonNull(SqlNodeUtil.unwrap(sqlNode, SqlDropTag.class)); + final SqlIdentifier sourceIdentifier = dropTag.getSourceName(); + final String sourceName = VersionedHandlerUtils.resolveSourceName( + sourceIdentifier, + userSession.getDefaultSchemaPath()); + + String commitHash = (dropTag.getCommitHash() != null) + ? dropTag.getCommitHash().toString() + : ""; // Will imply force drop + final String tagName = requireNonNull(dropTag.getTagName()).toString(); + final boolean forceDrop = dropTag.getForceDrop().booleanValue(); + final boolean existenceCheck = dropTag.getExistenceCheck().booleanValue(); + + if (!forceDrop && Strings.isNullOrEmpty(commitHash)) { + // This shouldn't be possible, enforced by SQL parser + throw UserException.validationError() + .message("Need commit hash to drop tag %s on source %s.", tagName, sourceName) + .buildSilently(); + } + + //Prevent dropping current tag + VersionContext currentSessionVersion = userSession.getSessionVersionForSource(sourceName); + if (currentSessionVersion.isTag() && currentSessionVersion.getValue().equals(tagName)){ + throw UserException.validationError() + .message("Cannot drop tag %s for source %s while it is set in the current session's reference context ", tagName, sourceName) + .buildSilently(); + } + + final VersionedPlugin versionedPlugin = getVersionedPlugin(sourceName); + try { + versionedPlugin.dropTag(tagName, commitHash); + } catch (ReferenceConflictException e) { + // TODO: DX-43145 Retries if forceDrop is true? + throw UserException.validationError(e) + .message("Tag %s has conflict on source %s.", tagName, sourceName) + .buildSilently(); + } catch (ReferenceNotFoundException e) { + if (existenceCheck) { + throw UserException.validationError(e) + .message("Tag %s not found on source %s.", tagName, sourceName) + .buildSilently(); + } + // Return success, but still give message about not found + return Collections.singletonList( + SimpleCommandResult.successful( + "Tag %s not found on source %s.", tagName, sourceName)); + } + + return Collections.singletonList( + SimpleCommandResult.successful( + "Tag %s has been dropped on source %s.", tagName, sourceName)); + } + + @Override + public Class getResultType() { + return SimpleCommandResult.class; + } +} diff --git a/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/HandlerUtils.java b/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/HandlerUtils.java new file mode 100644 index 0000000000..77b9a02818 --- /dev/null +++ b/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/HandlerUtils.java @@ -0,0 +1,21 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +public class HandlerUtils { + protected static final String REFERENCE_ALREADY_EXISTS_MESSAGE = "Reference %s already exists in Source %s."; + +} diff --git a/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/MergeBranchHandler.java b/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/MergeBranchHandler.java new file mode 100644 index 0000000000..f672d2b014 --- /dev/null +++ b/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/MergeBranchHandler.java @@ -0,0 +1,99 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +import static java.util.Objects.requireNonNull; + +import java.util.Collections; +import java.util.List; + +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlNode; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.catalog.VersionedPlugin; +import com.dremio.exec.ops.QueryContext; +import com.dremio.exec.planner.sql.handlers.direct.SimpleCommandResult; +import com.dremio.exec.planner.sql.handlers.direct.SqlNodeUtil; +import com.dremio.exec.planner.sql.parser.SqlMergeBranch; +import com.dremio.exec.store.ReferenceConflictException; +import com.dremio.exec.store.ReferenceNotFoundException; +import com.dremio.exec.work.foreman.ForemanSetupException; +import com.dremio.sabot.rpc.user.UserSession; + +/** Handler for merging branch. */ +public class MergeBranchHandler extends BaseVersionHandler { + private final UserSession userSession; + + public MergeBranchHandler(QueryContext context) { + super(context.getCatalog(), context.getOptions()); + this.userSession = requireNonNull(context.getSession()); + } + + @Override + public List toResult(String sql, SqlNode sqlNode) + throws ForemanSetupException { + checkFeatureEnabled("MERGE BRANCH syntax is not supported."); + + final SqlMergeBranch mergeBranch = requireNonNull(SqlNodeUtil.unwrap(sqlNode, SqlMergeBranch.class)); + final SqlIdentifier sourceIdentifier = mergeBranch.getSourceName(); + final String sourceName = VersionedHandlerUtils.resolveSourceName( + sourceIdentifier, + userSession.getDefaultSchemaPath()); + + final VersionedPlugin versionedPlugin = getVersionedPlugin(sourceName); + final String sourceBranchName = requireNonNull(mergeBranch.getSourceBranchName()).toString(); + final VersionContext sessionVersion = userSession.getSessionVersionForSource(sourceName); + + String targetBranchName; + if (mergeBranch.getTargetBranchName() != null) { + targetBranchName = mergeBranch.getTargetBranchName().toString(); + } else if (sessionVersion.isBranch()) { + targetBranchName = sessionVersion.getValue(); + } else { + throw UserException.validationError() + .message("No target branch to merge into.") + .buildSilently(); + } + + try { + versionedPlugin.mergeBranch(sourceBranchName, targetBranchName); + } catch (ReferenceConflictException e) { + throw UserException.validationError(e) + .message( + "Merge branch %s into branch %s failed due to commit conflict on source %s.", + sourceBranchName, targetBranchName, sourceName) + .buildSilently(); + } catch (ReferenceNotFoundException e) { + throw UserException.validationError(e) + .message( + "Merge branch %s into branch %s failed due to missing branch on source %s.", + sourceBranchName, targetBranchName, sourceName) + .buildSilently(); + } + + return Collections.singletonList( + SimpleCommandResult.successful( + "Branch %s has been merged into %s on source %s.", + sourceBranchName, targetBranchName, sourceName)); + } + + @Override + public Class getResultType() { + return SimpleCommandResult.class; + } +} diff --git a/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/ShowBranchesHandler.java b/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/ShowBranchesHandler.java new file mode 100644 index 0000000000..587a20df42 --- /dev/null +++ b/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/ShowBranchesHandler.java @@ -0,0 +1,68 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +import static java.util.Objects.requireNonNull; + +import java.util.List; +import java.util.stream.Collectors; + +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlNode; + +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.VersionedPlugin; +import com.dremio.exec.planner.sql.handlers.direct.SqlNodeUtil; +import com.dremio.exec.planner.sql.parser.SqlShowBranches; +import com.dremio.exec.store.ReferenceInfo; +import com.dremio.exec.work.foreman.ForemanSetupException; +import com.dremio.options.OptionResolver; +import com.dremio.sabot.rpc.user.UserSession; + +/** + * Handler to show source's branches. + * + * SHOW BRANCHES [ IN sourceName ] + */ +public class ShowBranchesHandler extends BaseVersionHandler { + + private final UserSession userSession; + + public ShowBranchesHandler(Catalog catalog, OptionResolver optionResolver, UserSession userSession) { + super(catalog, optionResolver); + this.userSession = requireNonNull(userSession); + } + + @Override + public List toResult(String sql, SqlNode sqlNode) + throws ForemanSetupException { + checkFeatureEnabled("SHOW BRANCHES syntax is not supported."); + + final SqlShowBranches showBranches = requireNonNull(SqlNodeUtil.unwrap(sqlNode, SqlShowBranches.class)); + final SqlIdentifier sourceIdentifier = showBranches.getSourceName(); + final String sourceName = VersionedHandlerUtils.resolveSourceName( + sourceIdentifier, + userSession.getDefaultSchemaPath()); + + final VersionedPlugin versionedPlugin = getVersionedPlugin(sourceName); + return versionedPlugin.listBranches().collect(Collectors.toList()); + } + + @Override + public Class getResultType() { + return ReferenceInfo.class; + } +} diff --git a/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/ShowLogsHandler.java b/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/ShowLogsHandler.java new file mode 100644 index 0000000000..fdf14a1cfd --- /dev/null +++ b/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/ShowLogsHandler.java @@ -0,0 +1,94 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +import static java.util.Objects.requireNonNull; + +import java.util.List; +import java.util.stream.Collectors; + +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlNode; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.catalog.VersionedPlugin; +import com.dremio.exec.planner.sql.handlers.direct.SqlNodeUtil; +import com.dremio.exec.planner.sql.parser.ReferenceTypeUtils; +import com.dremio.exec.planner.sql.parser.SqlShowLogs; +import com.dremio.exec.store.ChangeInfo; +import com.dremio.exec.store.NoDefaultBranchException; +import com.dremio.exec.store.ReferenceNotFoundException; +import com.dremio.exec.store.ReferenceTypeConflictException; +import com.dremio.exec.work.foreman.ForemanSetupException; +import com.dremio.options.OptionResolver; +import com.dremio.sabot.rpc.user.UserSession; + +/** + * Handler to show logs for specific ref. + * + * SHOW LOGS + * [ AT ( REF[ERENCE] | BRANCH | TAG | COMMIT ) refValue ] + * [ IN sourceName ] + */ +public class ShowLogsHandler extends BaseVersionHandler { + private final UserSession userSession; + + public ShowLogsHandler(Catalog catalog, OptionResolver optionResolver, UserSession userSession) { + super(catalog, optionResolver); + this.userSession = requireNonNull(userSession); + } + + @Override + public List toResult(String sql, SqlNode sqlNode) + throws ForemanSetupException { + checkFeatureEnabled("SHOW LOGS syntax is not supported."); + + final SqlShowLogs showLogs = requireNonNull(SqlNodeUtil.unwrap(sqlNode, SqlShowLogs.class)); + final SqlIdentifier sourceIdentifier = showLogs.getSourceName(); + final String sourceName = VersionedHandlerUtils.resolveSourceName( + sourceIdentifier, + userSession.getDefaultSchemaPath()); + + final VersionContext statementVersion = + ReferenceTypeUtils.map(showLogs.getRefType(), showLogs.getRefValue()); + final VersionContext sessionVersion = userSession.getSessionVersionForSource(sourceName); + final VersionContext version = statementVersion.orElse(sessionVersion); + + final VersionedPlugin versionedPlugin = getVersionedPlugin(sourceName); + try { + return versionedPlugin.listChanges(version).collect(Collectors.toList()); + } catch (ReferenceNotFoundException e) { + throw UserException.validationError(e) + .message("Requested %s not found in source %s.", version, sourceName) + .buildSilently(); + } catch (NoDefaultBranchException e) { + throw UserException.validationError(e) + .message("Unable to resolve requested version. Version was not specified and Source %s does not have a default branch set.", sourceName) + .buildSilently(); + } catch (ReferenceTypeConflictException e) { + throw UserException.validationError(e) + .message("Requested %s in source %s is not the requested type.", version, sourceName) + .buildSilently(); + } + } + + @Override + public Class getResultType() { + return ChangeInfo.class; + } +} diff --git a/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/ShowTagsHandler.java b/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/ShowTagsHandler.java new file mode 100644 index 0000000000..27303f4a51 --- /dev/null +++ b/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/ShowTagsHandler.java @@ -0,0 +1,68 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +import static java.util.Objects.requireNonNull; + +import java.util.List; +import java.util.stream.Collectors; + +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlNode; + +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.VersionedPlugin; +import com.dremio.exec.planner.sql.handlers.direct.SqlNodeUtil; +import com.dremio.exec.planner.sql.parser.SqlShowTags; +import com.dremio.exec.store.ReferenceInfo; +import com.dremio.exec.work.foreman.ForemanSetupException; +import com.dremio.options.OptionResolver; +import com.dremio.sabot.rpc.user.UserSession; + +/** + * Handler to show source's tags. + * + * SHOW TAGS [ IN sourceName ] + */ +public class ShowTagsHandler extends BaseVersionHandler { + + private final UserSession userSession; + + public ShowTagsHandler(Catalog catalog, OptionResolver optionResolver, UserSession userSession) { + super(catalog, optionResolver); + this.userSession = requireNonNull(userSession); + } + + @Override + public List toResult(String sql, SqlNode sqlNode) + throws ForemanSetupException { + checkFeatureEnabled("SHOW TAGS syntax is not supported."); + + final SqlShowTags showTags = requireNonNull(SqlNodeUtil.unwrap(sqlNode, SqlShowTags.class)); + final SqlIdentifier sourceIdentifier = showTags.getSourceName(); + final String sourceName = VersionedHandlerUtils.resolveSourceName( + sourceIdentifier, + userSession.getDefaultSchemaPath()); + + final VersionedPlugin versionedPlugin = getVersionedPlugin(sourceName); + return versionedPlugin.listTags().collect(Collectors.toList()); + } + + @Override + public Class getResultType() { + return ReferenceInfo.class; + } +} diff --git a/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/UseVersionHandler.java b/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/UseVersionHandler.java new file mode 100644 index 0000000000..c0064cf993 --- /dev/null +++ b/dac/backend/src/main/java/com/dremio/exec/planner/sql/handlers/UseVersionHandler.java @@ -0,0 +1,110 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +import static java.util.Objects.requireNonNull; + +import java.util.Collections; +import java.util.List; + +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlNode; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.ResolvedVersionContext; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.catalog.VersionedPlugin; +import com.dremio.exec.planner.sql.handlers.direct.SimpleCommandResult; +import com.dremio.exec.planner.sql.handlers.direct.SqlNodeUtil; +import com.dremio.exec.planner.sql.parser.ReferenceTypeUtils; +import com.dremio.exec.planner.sql.parser.SqlUseVersion; +import com.dremio.exec.store.NoDefaultBranchException; +import com.dremio.exec.store.ReferenceNotFoundException; +import com.dremio.exec.store.ReferenceTypeConflictException; +import com.dremio.exec.work.foreman.ForemanSetupException; +import com.dremio.options.OptionResolver; +import com.dremio.sabot.rpc.user.UserSession; +import com.google.common.base.Preconditions; + +public class UseVersionHandler extends BaseVersionHandler { + private final UserSession userSession; + + public UseVersionHandler(Catalog catalog, UserSession userSession, OptionResolver optionResolver) { + super(catalog, optionResolver); + this.userSession = Preconditions.checkNotNull(userSession); + } + + @Override + public List toResult(String sql, SqlNode sqlNode) throws ForemanSetupException { + final SqlUseVersion useVersion = requireNonNull(SqlNodeUtil.unwrap(sqlNode, SqlUseVersion.class)); + + checkFeatureEnabled(String.format("USE %s syntax is not supported.", useVersion.getRefType())); + + final SqlIdentifier sourceIdentifier = useVersion.getSourceName(); + final String sourceName = VersionedHandlerUtils.resolveSourceName( + sourceIdentifier, + userSession.getDefaultSchemaPath()); + + VersionContext requestedVersion = + ReferenceTypeUtils.map(useVersion.getRefType(), useVersion.getRefValue()); + if (!requestedVersion.isSpecified()) { + // Defensive, this shouldn't be possible + throw new IllegalStateException("Must request a real version."); + } + + // Validate that the requested version exists + final VersionedPlugin versionedPlugin = getVersionedPlugin(sourceName); + final ResolvedVersionContext resolvedVersionContext; + try { + resolvedVersionContext = versionedPlugin.resolveVersionContext(requestedVersion); + } catch (ReferenceNotFoundException e) { + throw UserException.validationError() + .message("Requested %s not found in source %s.", requestedVersion, sourceName) + .buildSilently(); + } catch (NoDefaultBranchException e) { + // This only happens if we try to resolve VersionContext.NOT_SPECIFIED, + // which should not be possible here. + throw new IllegalStateException(e); + } catch (ReferenceTypeConflictException e) { + throw UserException.validationError(e) + .message("Requested %s in source %s is not the requested type.", requestedVersion, sourceName) + .buildSilently(); + } + + // Resolving a bare commit does not validate existence for performance reasons. Check explicitly + // so that we can fail early and inform the user. + if (resolvedVersionContext.isBareCommit() && !versionedPlugin.commitExists(resolvedVersionContext.getCommitHash())) { + throw UserException.validationError() + .message("Commit %s not found in source %s.", resolvedVersionContext.getCommitHash(), sourceName) + .buildSilently(); + } + + userSession.setSessionVersionForSource(sourceName, requestedVersion); + + return Collections.singletonList( + SimpleCommandResult.successful( + "Current version context set to %s in source %s.", + requestedVersion, + sourceName)); + } + + @Override + public Class getResultType() { + return SimpleCommandResult.class; + } + +} diff --git a/dac/backend/src/main/java/com/dremio/file/FileName.java b/dac/backend/src/main/java/com/dremio/file/FileName.java index 7b4612efbd..cb87955ca2 100644 --- a/dac/backend/src/main/java/com/dremio/file/FileName.java +++ b/dac/backend/src/main/java/com/dremio/file/FileName.java @@ -33,6 +33,7 @@ public FileName(String name) { this.name = name; } + @Override @Pattern(regexp = "^[^@:{/.][^@:{/]*$", message = "File name cannot start with a period, contain a colon, forward slash, at sign, or open curly bracket.") public String getName() { return super.getName(); diff --git a/dac/backend/src/main/proto/backup.proto b/dac/backend/src/main/proto/backup.proto index b1cb114fb5..9b3d67f935 100644 --- a/dac/backend/src/main/proto/backup.proto +++ b/dac/backend/src/main/proto/backup.proto @@ -21,8 +21,14 @@ option java_package = "com.dremio.dac.proto.model.backup"; option optimize_for = SIZE; message BackupFileInfo { + enum Compression { + NONE = 0; + LZ4 = 1; + SNAPPY = 2; + } required com.dremio.datastore.KVStoreInfo kvstoreInfo = 1; required int64 checksum = 2; // crc32 checksum required int64 records = 3; optional bool binary = 4 [default = false]; + optional Compression compression = 5 [default = NONE]; } diff --git a/dac/backend/src/main/resources/rest/profile/profile.ftl b/dac/backend/src/main/resources/rest/profile/profile.ftl index a629dbd9fa..0478072b98 100644 --- a/dac/backend/src/main/resources/rest/profile/profile.ftl +++ b/dac/backend/src/main/resources/rest/profile/profile.ftl @@ -201,7 +201,8 @@ <#list layoutList as k, v> <#assign dsName = k.getDataset().getName()> <#assign dsPath = k.toParentPath()> -
  • ${dsName} (${dsPath})
  • + <#assign dsVersion = k.getVersion()> +
  • ${dsName}${dsVersion}(${dsPath})
    • <#list v as layout> <#if layout.name?? && layout.name?trim?has_content > @@ -279,7 +280,7 @@ Reflection Id: ${layout.getLayoutId()}, Materialization Id: ${layout.getMaterializationId()}
      Expiration: ${layout.materializationExpirationTimestamp?number_to_datetime?iso_utc}
      <#if model.accelerationDetails?? && model.accelerationDetails.hasRelationship(layout.layoutId) > - Dataset: ${model.accelerationDetails.getReflectionDatasetPath(layout.layoutId)}
      + Dataset: ${model.accelerationDetails.getReflectionDatasetPath(layout.layoutId)}${model.accelerationDetails.getReflectionDatasetVersion(layout.layoutId)}
      Age: ${(model.getPerdiodFromStart(model.accelerationDetails.getRefreshChainStartTime(layout.layoutId)))}
      <#if layout.snowflake?has_content && layout.snowflake> @@ -422,6 +423,40 @@ <#if planPhase.hasSizeStats()>

      ${planPhase.sizeStats}

      +

      Rule Execution Times

      +
      +
      + +
      +
      + + <#list model.profile.planPhasesList as planPhase> + <#if planPhase.timeBreakdownPerRuleMap?size gt 0> + + + + + <#list planPhase.timeBreakdownPerRuleMap?keys as k> + + + + + + + +
      + ${planPhase.getPhaseName()} +
      ${k}${planPhase.timeBreakdownPerRuleMap[k]}mS
      +
      +
      +
      +
      <#else>

      No planning phase information to show

      @@ -464,6 +499,20 @@
      ${model.getCommandPoolWaitMillis()}
      Total Query Time:
      ${model.getTotalTime()}
      + <#if model.profile.hasNumJoinsInUserQuery() > +
      # Joins in user query:
      +
      ${model.profile.getNumJoinsInUserQuery()}
      + + <#if model.profile.hasNumJoinsInFinalPrel() > +
      # Joins in final plan:
      +
      ${model.profile.getNumJoinsInFinalPrel()}
      + +
      Considered Reflections:
      +
      ${model.getConsideredReflectionsCount()}
      +
      Matched Reflections:
      +
      ${model.getMatchedReflectionsCount()}
      +
      Chosed Reflections:
      +
      ${model.getChosenReflectionsCount()}
      <#if model.getPlanCacheUsed() != 0 >
      Cached plan was used
      diff --git a/dac/backend/src/test/java/com/dremio/dac/api/TestBlockedHomeAPIs.java b/dac/backend/src/test/java/com/dremio/dac/api/TestBlockedHomeAPIs.java new file mode 100644 index 0000000000..352522d8b8 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/dac/api/TestBlockedHomeAPIs.java @@ -0,0 +1,73 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.api; + +import static com.dremio.options.OptionValue.OptionType.SYSTEM; + +import javax.ws.rs.client.Entity; +import javax.ws.rs.core.Response; + +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import com.dremio.dac.server.BaseTestServer; +import com.dremio.exec.catalog.CatalogOptions; +import com.dremio.options.OptionValue; +import com.dremio.service.namespace.file.proto.TextFileConfig; + +/** + * Tests for {@link com.dremio.dac.resource.HomeResource} when Home space is disabled + */ +public class TestBlockedHomeAPIs extends BaseTestServer { + private static String HOME_API_PATH = "/home"; + + private static boolean saveArsEnabled; + + @BeforeClass + public static void init() throws Exception { + BaseTestServer.init(); + saveArsEnabled = getSabotContext().getOptionManager().getOption(CatalogOptions.CATALOG_ARS_ENABLED); + + // Enable CATALOG_ARS_ENABLED + getSabotContext().getOptionManager().setOption( + OptionValue.createBoolean(SYSTEM, CatalogOptions.CATALOG_ARS_ENABLED.getOptionName(), true)); + } + + @AfterClass + public static void tearDown() throws Exception { + // Restore CATALOG_ARS_ENABLED + getSabotContext().getOptionManager().setOption( + OptionValue.createBoolean(SYSTEM, CatalogOptions.CATALOG_ARS_ENABLED.getOptionName(), saveArsEnabled)); + } + + @Test + public void testBlockedHomeApis() { + expectStatus(Response.Status.NOT_FOUND, getBuilder(getAPIv2().path(HOME_API_PATH).path("@dremio").path("file")).buildDelete()); + expectStatus(Response.Status.NOT_FOUND, getBuilder(getAPIv2().path(HOME_API_PATH).path("@dremio").path("folder")).buildDelete()); + expectStatus(Response.Status.NOT_FOUND, getBuilder(getAPIv2().path(HOME_API_PATH).path("@dremio")).buildGet()); + expectStatus(Response.Status.NOT_FOUND, getBuilder(getAPIv2().path(HOME_API_PATH).path("@dremio").path("file_format")).buildGet()); + expectStatus(Response.Status.NOT_FOUND, getBuilder(getAPIv2().path(HOME_API_PATH).path("@dremio").path("file")).buildGet()); + expectStatus(Response.Status.NOT_FOUND, getBuilder(getAPIv2().path(HOME_API_PATH).path("@dremio").path("folder")).buildGet()); + expectStatus(Response.Status.NOT_FOUND, getBuilder(getAPIv2().path(HOME_API_PATH).path("@dremio").path("file_preview_unsaved")).buildPost(null)); + expectStatus(Response.Status.NOT_FOUND, getBuilder(getAPIv2().path(HOME_API_PATH).path("@dremio").path("file_preview")).buildPost(null)); + expectStatus(Response.Status.NOT_FOUND, getBuilder(getAPIv2().path(HOME_API_PATH).path("@dremio").path("folder")).buildPost(null)); + expectStatus(Response.Status.NOT_FOUND, getBuilder(getAPIv2().path(HOME_API_PATH).path("@dremio").path("upload_cancel")).buildPost(null)); + expectStatus(Response.Status.NOT_FOUND, getBuilder(getAPIv2().path(HOME_API_PATH).path("@dremio").path("upload_finish")).buildPost(null)); + expectStatus(Response.Status.NOT_FOUND, getBuilder(getAPIv2().path(HOME_API_PATH).path("@dremio").path("upload_start")).buildPost(null)); + expectStatus(Response.Status.NOT_FOUND, getBuilder(getAPIv2().path(HOME_API_PATH).path("@dremio").path("file_format")).buildPut(Entity.entity(new TextFileConfig(), JSON))); + } +} diff --git a/dac/backend/src/test/java/com/dremio/dac/api/TestBlockedSpaceAPIs.java b/dac/backend/src/test/java/com/dremio/dac/api/TestBlockedSpaceAPIs.java new file mode 100644 index 0000000000..595b1cf027 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/dac/api/TestBlockedSpaceAPIs.java @@ -0,0 +1,65 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.api; + +import static com.dremio.options.OptionValue.OptionType.SYSTEM; + +import javax.ws.rs.core.Response; + +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import com.dremio.dac.server.BaseTestServer; +import com.dremio.exec.catalog.CatalogOptions; +import com.dremio.options.OptionValue; + +public class TestBlockedSpaceAPIs extends BaseTestServer { + private static final String SPACE_API_PATH = "/space"; + + private static boolean saveArsEnabled; + + @BeforeClass + public static void init() throws Exception { + BaseTestServer.init(); + saveArsEnabled = getSabotContext().getOptionManager().getOption(CatalogOptions.CATALOG_ARS_ENABLED); + + // Enable CATALOG_ARS_ENABLED + getSabotContext().getOptionManager().setOption( + OptionValue.createBoolean(SYSTEM, CatalogOptions.CATALOG_ARS_ENABLED.getOptionName(), true)); + } + + @AfterClass + public static void tearDown() throws Exception { + // Restore CATALOG_ARS_ENABLED + getSabotContext().getOptionManager().setOption( + OptionValue.createBoolean(SYSTEM, CatalogOptions.CATALOG_ARS_ENABLED.getOptionName(), saveArsEnabled)); + } + + @Test + public void testBlockedSpaceAPIs() { + expectStatus(Response.Status.NOT_FOUND, getBuilder(getAPIv2().path(SPACE_API_PATH).path("dummy")).buildGet()); + expectStatus(Response.Status.NOT_FOUND, getBuilder(getAPIv2().path(SPACE_API_PATH).path("dummy/dataset/dummy")).buildGet()); + } + + @Test + public void testBlockedFolderAPIs() { + expectStatus(Response.Status.NOT_FOUND, getBuilder(getAPIv2().path(SPACE_API_PATH).path("dummy/folder/dummy")).buildGet()); + expectStatus(Response.Status.NOT_FOUND, getBuilder(getAPIv2().path(SPACE_API_PATH).path("dummy/folder/dummy")).buildPost(null)); + expectStatus(Response.Status.NOT_FOUND, getBuilder(getAPIv2().path(SPACE_API_PATH).path("dummy/folder/dummy")).buildDelete()); + } + +} diff --git a/dac/backend/src/test/java/com/dremio/dac/api/TestCatalogResource.java b/dac/backend/src/test/java/com/dremio/dac/api/TestCatalogResource.java index 2a5449cae2..11d4cc9ad1 100644 --- a/dac/backend/src/test/java/com/dremio/dac/api/TestCatalogResource.java +++ b/dac/backend/src/test/java/com/dremio/dac/api/TestCatalogResource.java @@ -39,13 +39,14 @@ import javax.ws.rs.core.GenericType; import javax.ws.rs.core.Response; -import org.apache.commons.io.FileUtils; import org.junit.After; import org.junit.AfterClass; +import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.junit.rules.TemporaryFolder; +import com.dremio.common.exceptions.UserException; import com.dremio.common.util.TestTools; import com.dremio.common.utils.PathUtils; import com.dremio.dac.explore.model.DatasetPath; @@ -53,6 +54,8 @@ import com.dremio.dac.model.common.Field; import com.dremio.dac.model.sources.SourceUI; import com.dremio.dac.server.BaseTestServer; +import com.dremio.dac.server.FamilyExpectation; +import com.dremio.dac.server.ValidationErrorMessage; import com.dremio.dac.service.catalog.CatalogServiceHelper; import com.dremio.dac.util.DatasetsUtil; import com.dremio.exec.catalog.CatalogServiceImpl; @@ -184,34 +187,84 @@ public void testSpace() throws Exception { @Test public void testFoldersInSpace() throws Exception { - // create a new space - Space newSpace = new Space(null, "final frontier", null, null, null); - Space space = expectSuccess(getBuilder(getPublicAPI(3).path(CATALOG_PATH)).buildPost(Entity.json(newSpace)), new GenericType() {}); + for (boolean deleteFolderFirst : new boolean[] { false, true }) { + // create a new space + Space newSpace = new Space(null, "final frontier", null, null, null); + Space space = expectSuccess(getBuilder(getPublicAPI(3).path(CATALOG_PATH)).buildPost(Entity.json(newSpace)), new GenericType() {}); - // no children at this point - assertNull(space.getChildren()); + // no children at this point + assertNull(space.getChildren()); - // add a folder - Folder newFolder = getFolderConfig(Arrays.asList(space.getName(), "myFolder")); - Folder folder = createFolder(newFolder); - assertEquals(newFolder.getPath(), folder.getPath()); + // add a folder + Folder newFolder = getFolderConfig(Arrays.asList(space.getName(), "myFolder")); + Folder folder = createFolder(newFolder); + assertEquals(newFolder.getPath(), folder.getPath()); - // make sure folder shows up under space - space = expectSuccess(getBuilder(getPublicAPI(3).path(CATALOG_PATH).path(space.getId())).buildGet(), new GenericType() {}); + // make sure folder shows up under space + space = expectSuccess(getBuilder(getPublicAPI(3).path(CATALOG_PATH).path(space.getId())).buildGet(), new GenericType() {}); - // make sure that trying to create the folder again fails - expectStatus(Response.Status.CONFLICT, getBuilder(getPublicAPI(3).path(CATALOG_PATH)).buildPost(Entity.json(newFolder))); + // make sure that trying to create the folder again fails + expectStatus(Response.Status.CONFLICT, getBuilder(getPublicAPI(3).path(CATALOG_PATH)).buildPost(Entity.json(newFolder))); - // one child at this point - assertEquals(space.getChildren().size(), 1); - assertEquals(space.getChildren().get(0).getId(), folder.getId()); + // one child at this point + assertEquals(space.getChildren().size(), 1); + assertEquals(space.getChildren().get(0).getId(), folder.getId()); - // delete the folder - expectSuccess(getBuilder(getPublicAPI(3).path(CATALOG_PATH).path(folder.getId())).buildDelete()); - space = expectSuccess(getBuilder(getPublicAPI(3).path(CATALOG_PATH).path(space.getId())).buildGet(), new GenericType() {}); - assertEquals(space.getChildren().size(), 0); + if (deleteFolderFirst) { + // delete the folder + expectSuccess(getBuilder(getPublicAPI(3).path(CATALOG_PATH).path(folder.getId())).buildDelete()); + space = expectSuccess(getBuilder(getPublicAPI(3).path(CATALOG_PATH).path(space.getId())).buildGet(), new GenericType() {}); + assertEquals(space.getChildren().size(), 0); - newNamespaceService().deleteSpace(new NamespaceKey(space.getName()), space.getTag()); + newNamespaceService().deleteSpace(new NamespaceKey(space.getName()), space.getTag()); + } else { + newNamespaceService().deleteSpace(new NamespaceKey(space.getName()), space.getTag()); + + // delete the folder + expectError( + FamilyExpectation.CLIENT_ERROR, + getBuilder(getPublicAPI(3).path(CATALOG_PATH).path(folder.getId())).buildDelete(), + ValidationErrorMessage.class); + } + } + } + + @Test + public void testFunctionsInSpace() throws Exception { + for (boolean deleteFunctionFirst : new boolean[] { false, true }) { + // create a new space + Space newSpace = new Space(null, "mySpace123", null, null, null); + Space space = expectSuccess(getBuilder(getPublicAPI(3).path(CATALOG_PATH)).buildPost(Entity.json(newSpace)), new GenericType() {}); + + // no children at this point + assertNull(space.getChildren()); + + // add a function + runQuery("CREATE FUNCTION mySpace123.foo()\n" + + "RETURNS int\n" + + "RETURN 6"); + + // make sure function shows up under space + space = expectSuccess(getBuilder(getPublicAPI(3).path(CATALOG_PATH).path(space.getId())).buildGet(), new GenericType() {}); + assertEquals(space.getChildren().size(), 1); + + if (deleteFunctionFirst) { + runQuery("DROP FUNCTION mySpace123.foo"); + + space = expectSuccess(getBuilder(getPublicAPI(3).path(CATALOG_PATH).path(space.getId())).buildGet(), new GenericType() {}); + assertEquals(space.getChildren().size(), 0); + + newNamespaceService().deleteSpace(new NamespaceKey(space.getName()), space.getTag()); + } else { + newNamespaceService().deleteSpace(new NamespaceKey(space.getName()), space.getTag()); + try { + runQuery("DROP FUNCTION mySpace123.foo"); + Assert.fail("Should not be able to drop a function when we already dropped the space."); + } catch (UserException ue) { + // We expect a user exception, since we deleted the space. + } + } + } } @Test @@ -1122,189 +1175,6 @@ public void testSourcePromoting() throws Exception { } - @Test - public void testSourceMetadataRefresh() throws Exception { - - // create a temporary copy of some test data - TemporaryFolder tempFolder = new TemporaryFolder(); - tempFolder.create(); - tempFolder.newFolder("json"); - - // Copys test data - String numbers_src = TestTools.getWorkingPath() + "/src/test/resources/json/numbers.json"; - java.io.File numbers = new java.io.File(numbers_src); - java.io.File numbers_copy = tempFolder.newFile("json/numbers.json"); - FileUtils.copyFile(numbers, numbers_copy); - - NASConf nasConf = new NASConf(); - nasConf.path = tempFolder.getRoot().getAbsolutePath(); - Source newSource = new Source(); - newSource.setName("catalog-test"); - newSource.setType("NAS"); - newSource.setConfig(nasConf); - - Source source = expectSuccess(getBuilder(getPublicAPI(3).path(CATALOG_PATH)).buildPost(Entity.json(newSource)), new GenericType() {}); - - // browse to the json directory - String id = getFolderIdByName(source.getChildren(), "json"); - assertNotNull(id, "Failed to find json directory"); - - // load the json dir - Folder folder = expectSuccess(getBuilder(getPublicAPI(3).path(CATALOG_PATH).path(com.dremio.common.utils.PathUtils.encodeURIComponent(id))).buildGet(), new GenericType() {}); - assertEquals(folder.getChildren().size(), 1); - - String fileId = null; - - for (CatalogItem item : folder.getChildren()) { - List path = item.getPath(); - // get the numbers.json file - if (item.getType() == CatalogItem.CatalogItemType.FILE && path.get(path.size() - 1).equals("numbers.json")) { - fileId = item.getId(); - break; - } - } - - assertNotNull(fileId, "Failed to find numbers.json file"); - - // load the file - File file = expectSuccess(getBuilder(getPublicAPI(3).path(CATALOG_PATH).path(com.dremio.common.utils.PathUtils.encodeURIComponent(fileId))).buildGet(), new GenericType() { - }); - - // promote the file (dac/backend/src/test/resources/json/numbers.json) - Dataset dataset = createPDS(CatalogServiceHelper.getPathFromInternalId(file.getId()), new JsonFileConfig()); - - dataset = expectSuccess( - getBuilder(getPublicAPI(3) - .path(CATALOG_PATH) - .path(com.dremio.common.utils.PathUtils.encodeURIComponent(fileId))) - .buildPost(Entity.json(dataset)), - new GenericType() { }); - - // load the dataset - dataset = expectSuccess( - getBuilder(getPublicAPI(3) - .path(CATALOG_PATH) - .path(dataset.getId())).buildGet(), - new GenericType() { }); - - // verify listing - folder = expectSuccess( - getBuilder(getPublicAPI(3) - .path(CATALOG_PATH) - .path(com.dremio.common.utils.PathUtils.encodeURIComponent(id))) - .buildGet(), - new GenericType() { }); - assertEquals(folder.getChildren().size(), 1); - - // test metadata/refresh endpoint - CatalogResource.MetadataRefreshResponse response = new CatalogResource.MetadataRefreshResponse(false, false); - - // test with wrong ID type (expect BAD_REQUEST) - expectStatus(Response.Status.BAD_REQUEST, - getBuilder(getPublicAPI(3) - .path(CATALOG_PATH) - .path(com.dremio.common.utils.PathUtils.encodeURIComponent(id)) - .path("metadata/refresh")) - .buildPost(Entity.json(response))); - - // test with bad ID (expect NOT_FOUND) - expectStatus(Response.Status.NOT_FOUND, - getBuilder(getPublicAPI(3) - .path(CATALOG_PATH) - .path(com.dremio.common.utils.PathUtils.encodeURIComponent("asdfasdf")) - .path("metadata/refresh")) - .buildPost(Entity.json(response))); - - /*** test with promoted data ***/ - response = expectSuccess(getBuilder(getPublicAPI(3) - .path(CATALOG_PATH) - .path(com.dremio.common.utils.PathUtils.encodeURIComponent(dataset.getId())) - .path("metadata/refresh")) - .buildPost(Entity.json(response)), - new GenericType() { }); - assertTrue(response.getChanged()); - assertFalse(response.getDeleted()); - - // test forceUpdate - response = expectSuccess(getBuilder(getPublicAPI(3) - .path(CATALOG_PATH) - .path(com.dremio.common.utils.PathUtils.encodeURIComponent(dataset.getId())) - .path("metadata/refresh") - .queryParam("forceUpdate", "true")) - .buildPost(Entity.json(response)), - new GenericType() { }); - assertTrue(response.getChanged()); - assertFalse(response.getDeleted()); - - // test deleteWhenMissing - response = expectSuccess(getBuilder(getPublicAPI(3) - .path(CATALOG_PATH) - .path(com.dremio.common.utils.PathUtils.encodeURIComponent(dataset.getId())) - .path("metadata/refresh") - .queryParam("deleteWhenMissing", "true")) - .buildPost(Entity.json(response)), - new GenericType() { }); - assertTrue(response.getChanged()); - assertFalse(response.getDeleted()); - - // test autoPromotion - response = expectSuccess(getBuilder(getPublicAPI(3) - .path(CATALOG_PATH) - .path(com.dremio.common.utils.PathUtils.encodeURIComponent(dataset.getId())) - .path("metadata/refresh") - .queryParam("autoPromotion", "true")) - .buildPost(Entity.json(response)), - new GenericType() { }); - assertTrue(response.getChanged()); - assertFalse(response.getDeleted()); - - // test all query params - response = expectSuccess(getBuilder(getPublicAPI(3) - .path(CATALOG_PATH) - .path(com.dremio.common.utils.PathUtils.encodeURIComponent(dataset.getId())) - .path("metadata/refresh") - .queryParam("forceUpdate", "true") - .queryParam("deleteWhenMissing", "true") - .queryParam("autoPromotion", "true")) - .buildPost(Entity.json(response)), - new GenericType() { }); - assertTrue(response.getChanged()); - assertFalse(response.getDeleted()); - - // now delete the temporary folder - numbers_copy.delete(); - - // test keep missing metadata - response = expectSuccess(getBuilder(getPublicAPI(3) - .path(CATALOG_PATH) - .path(com.dremio.common.utils.PathUtils.encodeURIComponent(dataset.getId())) - .path("metadata/refresh") - .queryParam("forceUpdate", "false") - .queryParam("deleteWhenMissing", "false") - .queryParam("autoPromotion", "false")) - .buildPost(Entity.json(response)), - new GenericType() { }); - assertFalse(response.getChanged()); - assertFalse(response.getDeleted()); - - // test enabling metadata deletion when missing - response = expectSuccess(getBuilder(getPublicAPI(3) - .path(CATALOG_PATH) - .path(com.dremio.common.utils.PathUtils.encodeURIComponent(dataset.getId())) - .path("metadata/refresh") - .queryParam("forceUpdate", "false") - .queryParam("deleteWhenMissing", "true") - .queryParam("autoPromotion", "false")) - .buildPost(Entity.json(response)), - new GenericType() { }); - assertTrue(response.getChanged()); - assertTrue(response.getDeleted()); - - // cleanup - tempFolder.delete(); - expectSuccess(getBuilder(getPublicAPI(3).path(CATALOG_PATH).path(source.getId())).buildDelete()); - } - @Test public void testSourceEditWithoutSecret() throws Exception { // fakesource only works if password is the same as the name, else with fail to create @@ -1640,4 +1510,12 @@ private Folder createFolder(List path) { private Folder createFolder(Folder folder) { return expectSuccess(getBuilder(getPublicAPI(3).path(CATALOG_PATH)).buildPost(Entity.json(folder)), new GenericType() {}); } + + private void runQuery(String query) { + submitJobAndWaitUntilCompletion( + JobRequest.newBuilder() + .setSqlQuery(new SqlQuery(query, DEFAULT_USERNAME)) + .setQueryType(QueryType.UI_INTERNAL_RUN) + .build()); + } } diff --git a/dac/backend/src/test/java/com/dremio/dac/api/TestCollaborationResource.java b/dac/backend/src/test/java/com/dremio/dac/api/TestCollaborationResource.java index e2e4973647..a2f5734c1c 100644 --- a/dac/backend/src/test/java/com/dremio/dac/api/TestCollaborationResource.java +++ b/dac/backend/src/test/java/com/dremio/dac/api/TestCollaborationResource.java @@ -17,7 +17,6 @@ import static javax.ws.rs.core.Response.Status.BAD_REQUEST; import static javax.ws.rs.core.Response.Status.CONFLICT; -import static javax.ws.rs.core.Response.Status.NOT_FOUND; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; @@ -64,8 +63,9 @@ public void testGetTags() throws Exception { createSpaceAndVDS(spacePath, vdsPath); DatasetConfig dataset = newNamespaceService().getDataset(new NamespaceKey(vdsPath)); - // no tags initially, so expect a 404 - expectStatus(NOT_FOUND, getBuilder(getPublicAPI(3).path("catalog").path(dataset.getId().getId()).path("collaboration").path("tag")).buildGet()); + // Test no tags + Tags noTags = expectSuccess(getBuilder(getPublicAPI(3).path("catalog").path(dataset.getId().getId()).path("collaboration").path("tag")).buildGet(), Tags.class); + assertEquals(noTags.getTags().size(), 0); CollaborationHelper collaborationHelper = l(CollaborationHelper.class); @@ -74,7 +74,6 @@ public void testGetTags() throws Exception { Tags newTags = new Tags(tagList, null); collaborationHelper.setTags(dataset.getId().getId(), newTags); - // tags exist now Tags tags = expectSuccess(getBuilder(getPublicAPI(3).path("catalog").path(dataset.getId().getId()).path("collaboration").path("tag")).buildGet(), Tags.class); assertEquals(tags.getTags().size(), 2); assertTrue(tags.getTags().containsAll(tagList)); @@ -110,7 +109,7 @@ public void testSetTags() throws Exception { assertNotNull(tags.getVersion()); // clear out tags - tagList = Arrays.asList(); + tagList = Collections.emptyList(); newTags = new Tags(tagList, tags.getVersion()); tags = expectSuccess(getBuilder(getPublicAPI(3).path("catalog").path(dataset.getId().getId()).path("collaboration").path("tag")).buildPost(Entity.json(newTags)), Tags.class); @@ -171,15 +170,14 @@ public void testGetWiki() throws Exception { createSpaceAndVDS(spacePath, vdsPath); DatasetConfig dataset = newNamespaceService().getDataset(new NamespaceKey(vdsPath)); - // no tags initially, so expect a 404 - expectStatus(NOT_FOUND, getBuilder(getPublicAPI(3).path("catalog").path(dataset.getId().getId()).path("collaboration").path("wiki")).buildGet()); + Wiki emptyWiki = expectSuccess(getBuilder(getPublicAPI(3).path("catalog").path(dataset.getId().getId()).path("collaboration").path("wiki")).buildGet(), Wiki.class); + assertEquals(emptyWiki.getText(), ""); CollaborationHelper collaborationHelper = l(CollaborationHelper.class); Wiki newWiki = new Wiki("sample wiki text", null); collaborationHelper.setWiki(dataset.getId().getId(), newWiki); - // tags exist now Wiki wiki = expectSuccess(getBuilder(getPublicAPI(3).path("catalog").path(dataset.getId().getId()).path("collaboration").path("wiki")).buildGet(), Wiki.class); assertEquals(wiki.getText(), newWiki.getText()); @@ -314,14 +312,14 @@ private void addWiki(List path, String text) throws Exception { final NameSpaceContainer container = newNamespaceService().getEntities(Collections.singletonList(new NamespaceKey(path))).get(0); final CollaborationHelper collaborationHelper = l(CollaborationHelper.class); - collaborationHelper.setWiki(NamespaceUtils.getId(container), new Wiki(text, null)); + collaborationHelper.setWiki(NamespaceUtils.getIdOrNull(container), new Wiki(text, null)); } private void addTags(List path, List tags) throws Exception { final NameSpaceContainer container = newNamespaceService().getEntities(Collections.singletonList(new NamespaceKey(path))).get(0); final CollaborationHelper collaborationHelper = l(CollaborationHelper.class); - collaborationHelper.setTags(NamespaceUtils.getId(container), new Tags(tags, null)); + collaborationHelper.setTags(NamespaceUtils.getIdOrNull(container), new Tags(tags, null)); } private String createSpaceAndVDS(NamespaceKey spacePath, List vdsPath) throws NamespaceException { @@ -339,7 +337,7 @@ private void createVDS(List vdsPath) { final VirtualDataset virtualDataset = new VirtualDataset(); virtualDataset.setSql("select * from sys.version"); - DatasetConfig datasetConfig = new DatasetConfig(); + final DatasetConfig datasetConfig = new DatasetConfig(); datasetConfig.setName(vdsPath.get(vdsPath.size() - 1)); datasetConfig.setFullPathList(vdsPath); datasetConfig.setType(DatasetType.VIRTUAL_DATASET); diff --git a/dac/backend/src/test/java/com/dremio/dac/api/TestSourceResource.java b/dac/backend/src/test/java/com/dremio/dac/api/TestDepreciatedSourceResource.java similarity index 85% rename from dac/backend/src/test/java/com/dremio/dac/api/TestSourceResource.java rename to dac/backend/src/test/java/com/dremio/dac/api/TestDepreciatedSourceResource.java index 73701c7588..64eaadaf2a 100644 --- a/dac/backend/src/test/java/com/dremio/dac/api/TestSourceResource.java +++ b/dac/backend/src/test/java/com/dremio/dac/api/TestDepreciatedSourceResource.java @@ -43,29 +43,29 @@ import com.dremio.test.DremioTest; /** - * Tests {@link SourceResource} API + * Tests {@link DepreciatedSourceResource} API */ -public class TestSourceResource extends BaseTestServer { +public class TestDepreciatedSourceResource extends BaseTestServer { private static final String SOURCES_PATH = "/source/"; private final ConnectionReader reader = ConnectionReader.of(DremioTest.CLASSPATH_SCAN_RESULT, DremioTest.DEFAULT_SABOT_CONFIG); @Test public void testListSources() throws Exception { - ResponseList sources = expectSuccess(getBuilder(getPublicAPI(3).path(SOURCES_PATH)).buildGet(), new GenericType>() {}); + ResponseList sources = expectSuccess(getBuilder(getPublicAPI(3).path(SOURCES_PATH)).buildGet(), new GenericType>() {}); assertEquals(sources.getData().size(), newSourceService().getSources().size()); } @Test public void testAddSource() throws Exception { - SourceResource.SourceDeprecated newSource = new SourceResource.SourceDeprecated(); + DepreciatedSourceResource.SourceDeprecated newSource = new DepreciatedSourceResource.SourceDeprecated(); newSource.setName("Foopy"); newSource.setType("NAS"); NASConf config = new NASConf(); config.path = "/"; newSource.setConfig(config); - SourceResource.SourceDeprecated source = expectSuccess(getBuilder(getPublicAPI(3).path(SOURCES_PATH)).buildPost(Entity.entity(newSource, JSON)), SourceResource.SourceDeprecated.class); + DepreciatedSourceResource.SourceDeprecated source = expectSuccess(getBuilder(getPublicAPI(3).path(SOURCES_PATH)).buildPost(Entity.entity(newSource, JSON)), DepreciatedSourceResource.SourceDeprecated.class); assertEquals(source.getName(), newSource.getName()); assertNotNull(source.getState()); assertEquals(CatalogService.DEFAULT_REFRESH_MILLIS, source.getMetadataPolicy().getDatasetRefreshAfterMs()); @@ -76,7 +76,7 @@ public void testAddSource() throws Exception { @Test public void testAddSourceWithMetadataPolicy() throws Exception { - SourceResource.SourceDeprecated newSource = new SourceResource.SourceDeprecated(); + DepreciatedSourceResource.SourceDeprecated newSource = new DepreciatedSourceResource.SourceDeprecated(); newSource.setName("Src" + System.currentTimeMillis()); newSource.setType("NAS"); NASConf config = new NASConf(); @@ -87,7 +87,7 @@ public void testAddSourceWithMetadataPolicy() throws Exception { policy.setDatasetUpdateMode(UpdateMode.PREFETCH_QUERIED.name()); newSource.setMetadataPolicy(policy); - SourceResource.SourceDeprecated source = expectSuccess(getBuilder(getPublicAPI(3).path(SOURCES_PATH)).buildPost(Entity.entity(newSource, JSON)), SourceResource.SourceDeprecated.class); + DepreciatedSourceResource.SourceDeprecated source = expectSuccess(getBuilder(getPublicAPI(3).path(SOURCES_PATH)).buildPost(Entity.entity(newSource, JSON)), DepreciatedSourceResource.SourceDeprecated.class); assertEquals(source.getName(), newSource.getName()); assertNotNull(source.getState()); assertEquals(CatalogService.DEFAULT_REFRESH_MILLIS, source.getMetadataPolicy().getDatasetRefreshAfterMs()); @@ -100,7 +100,7 @@ public void testAddSourceWithMetadataPolicy() throws Exception { @Test public void testAddSourceErrors() throws Exception { // test invalid sources - SourceResource.SourceDeprecated newSource = new SourceResource.SourceDeprecated(); + DepreciatedSourceResource.SourceDeprecated newSource = new DepreciatedSourceResource.SourceDeprecated(); // no config newSource.setName("Foobar"); @@ -125,10 +125,10 @@ public void testUpdateSource() throws Exception { .setMethod(RefreshMethod.FULL) .setRefreshPeriod(TimeUnit.HOURS.toMillis(2)) .setGracePeriod(TimeUnit.HOURS.toMillis(6)); - SourceResource.SourceDeprecated updatedSource = new SourceResource.SourceDeprecated(createdSourceConfig, settings, reader, null); + DepreciatedSourceResource.SourceDeprecated updatedSource = new DepreciatedSourceResource.SourceDeprecated(createdSourceConfig, settings, reader, null); updatedSource.setDescription("Desc"); - SourceResource.SourceDeprecated source = expectSuccess(getBuilder(getPublicAPI(3).path(SOURCES_PATH).path(createdSourceConfig.getId().getId())).buildPut(Entity.entity(updatedSource, JSON)), SourceResource.SourceDeprecated.class); + DepreciatedSourceResource.SourceDeprecated source = expectSuccess(getBuilder(getPublicAPI(3).path(SOURCES_PATH).path(createdSourceConfig.getId().getId())).buildPut(Entity.entity(updatedSource, JSON)), DepreciatedSourceResource.SourceDeprecated.class); assertEquals("Desc", source.getDescription()); assertNotNull(source.getState()); @@ -178,7 +178,7 @@ public void testUpdateSourceErrors() throws Exception { .setMethod(RefreshMethod.FULL) .setRefreshPeriod(TimeUnit.HOURS.toMillis(2)) .setGracePeriod(TimeUnit.HOURS.toMillis(6)); - SourceResource.SourceDeprecated updatedSource = new SourceResource.SourceDeprecated(createdSourceConfig, settings, reader, null); + DepreciatedSourceResource.SourceDeprecated updatedSource = new DepreciatedSourceResource.SourceDeprecated(createdSourceConfig, settings, reader, null); // test updating non-existent source expectStatus(Response.Status.NOT_FOUND, getBuilder(getPublicAPI(3).path(SOURCES_PATH).path("badid")).buildPut(Entity.entity(updatedSource, JSON))); @@ -215,12 +215,12 @@ public void testUpdateSourceBoundaryValues() throws Exception { .setRefreshPeriod(TimeUnit.HOURS.toMillis(2)) .setGracePeriod(TimeUnit.HOURS.toMillis(6)); - SourceResource.SourceDeprecated updatedSource = new SourceResource.SourceDeprecated(createdSourceConfig, settings, reader, null); + DepreciatedSourceResource.SourceDeprecated updatedSource = new DepreciatedSourceResource.SourceDeprecated(createdSourceConfig, settings, reader, null); updatedSource.getMetadataPolicy().setDatasetRefreshAfterMs(MetadataPolicy.ONE_MINUTE_IN_MS); updatedSource.getMetadataPolicy().setAuthTTLMs(MetadataPolicy.ONE_MINUTE_IN_MS); updatedSource.getMetadataPolicy().setNamesRefreshMs(MetadataPolicy.ONE_MINUTE_IN_MS); - SourceResource.SourceDeprecated source = expectSuccess(getBuilder(getPublicAPI(3).path(SOURCES_PATH).path(createdSourceConfig.getId().getId())).buildPut(Entity.entity(updatedSource, JSON)), SourceResource.SourceDeprecated.class); + DepreciatedSourceResource.SourceDeprecated source = expectSuccess(getBuilder(getPublicAPI(3).path(SOURCES_PATH).path(createdSourceConfig.getId().getId())).buildPut(Entity.entity(updatedSource, JSON)), DepreciatedSourceResource.SourceDeprecated.class); assertEquals(source.getMetadataPolicy().getAuthTTLMs(), updatedSource.getMetadataPolicy().getAuthTTLMs()); assertEquals(source.getMetadataPolicy().getDatasetRefreshAfterMs(), updatedSource.getMetadataPolicy().getDatasetRefreshAfterMs()); assertEquals(source.getMetadataPolicy().getNamesRefreshMs(), updatedSource.getMetadataPolicy().getNamesRefreshMs()); @@ -274,7 +274,7 @@ public void testGetSource() throws Exception { sourceConfig.setConfig(nasConfig.toBytesString()); SourceConfig createdSourceConfig = newSourceService().registerSourceWithRuntime(sourceConfig); - SourceResource.SourceDeprecated source = expectSuccess(getBuilder(getPublicAPI(3).path(SOURCES_PATH).path(createdSourceConfig.getId().getId())).buildGet(), SourceResource.SourceDeprecated.class); + DepreciatedSourceResource.SourceDeprecated source = expectSuccess(getBuilder(getPublicAPI(3).path(SOURCES_PATH).path(createdSourceConfig.getId().getId())).buildGet(), DepreciatedSourceResource.SourceDeprecated.class); assertEquals(source.getName(), sourceConfig.getName()); assertNotNull(source.getState()); @@ -319,8 +319,8 @@ public void testRemovingSensitiveFields() throws Exception { priv.password = "hello"; config.setConnectionConf(priv); - SourceResource sourceResource = new SourceResource(newSourceService(), null); - SourceResource.SourceDeprecated source = sourceResource.fromSourceConfig(config); + DepreciatedSourceResource depreciatedSourceResource = new DepreciatedSourceResource(newSourceService(), null); + DepreciatedSourceResource.SourceDeprecated source = depreciatedSourceResource.fromSourceConfig(config); APrivateSource newConfig = (APrivateSource) source.getConfig(); // make sure the sensitive fields have been removed @@ -406,7 +406,7 @@ private void testMetadataPolicyWithInvalidValues(MetadataPolicy policy) throws E .setRefreshPeriod(TimeUnit.HOURS.toMillis(2)) .setGracePeriod(TimeUnit.HOURS.toMillis(6)); - SourceResource.SourceDeprecated updatedSource = new SourceResource.SourceDeprecated(createdSourceConfig, settings, reader, null); + DepreciatedSourceResource.SourceDeprecated updatedSource = new DepreciatedSourceResource.SourceDeprecated(createdSourceConfig, settings, reader, null); updatedSource.getMetadataPolicy().setDatasetRefreshAfterMs(policy.getDatasetRefreshAfterMs()); updatedSource.getMetadataPolicy().setDatasetExpireAfterMs(policy.getDatasetExpireAfterMs()); diff --git a/dac/backend/src/test/java/com/dremio/dac/daemon/TestSystemStoragePluginInitializer.java b/dac/backend/src/test/java/com/dremio/dac/daemon/TestSystemStoragePluginInitializer.java index f0e632ff1c..d3dc8d23a1 100644 --- a/dac/backend/src/test/java/com/dremio/dac/daemon/TestSystemStoragePluginInitializer.java +++ b/dac/backend/src/test/java/com/dremio/dac/daemon/TestSystemStoragePluginInitializer.java @@ -237,7 +237,7 @@ public void close() throws Exception { () -> sabotContext, () -> new LocalSchedulerService(1), () -> new SystemTablePluginConfigProvider(), - () -> new SysFlightPluginConfigProvider(() -> sabotContext.getEndpoint()), + () -> new SysFlightPluginConfigProvider(), () -> fabricService, () -> ConnectionReader.of(sabotContext.getClasspathScan(), sabotConfig), () -> allocator, diff --git a/dac/backend/src/test/java/com/dremio/dac/explore/FakeVersionedPlugin.java b/dac/backend/src/test/java/com/dremio/dac/explore/FakeVersionedPlugin.java deleted file mode 100644 index 3fbcc5a99b..0000000000 --- a/dac/backend/src/test/java/com/dremio/dac/explore/FakeVersionedPlugin.java +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.dremio.dac.explore; - -import java.io.IOException; -import java.util.List; -import java.util.Optional; -import java.util.stream.Stream; - -import com.dremio.connector.ConnectorException; -import com.dremio.connector.metadata.DatasetHandle; -import com.dremio.connector.metadata.DatasetMetadata; -import com.dremio.connector.metadata.EntityPath; -import com.dremio.connector.metadata.GetDatasetOption; -import com.dremio.connector.metadata.GetMetadataOption; -import com.dremio.connector.metadata.ListPartitionChunkOption; -import com.dremio.connector.metadata.PartitionChunkListing; -import com.dremio.exec.catalog.DataplaneTableInfo; -import com.dremio.exec.catalog.DataplaneViewInfo; -import com.dremio.exec.catalog.ResolvedVersionContext; -import com.dremio.exec.catalog.VersionContext; -import com.dremio.exec.catalog.VersionedPlugin; -import com.dremio.exec.planner.logical.ViewTable; -import com.dremio.exec.store.SchemaConfig; -import com.dremio.exec.store.StoragePlugin; -import com.dremio.exec.store.StoragePluginRulesFactory; -import com.dremio.plugins.ExternalNamespaceEntry; -import com.dremio.service.catalog.Schema; -import com.dremio.service.catalog.SearchQuery; -import com.dremio.service.catalog.Table; -import com.dremio.service.catalog.TableSchema; -import com.dremio.service.catalog.View; -import com.dremio.service.namespace.NamespaceKey; -import com.dremio.service.namespace.SourceState; -import com.dremio.service.namespace.capabilities.SourceCapabilities; -import com.dremio.service.namespace.dataset.proto.DatasetConfig; - -/** - * Fake Versioned Plugin class for test - */ -public class FakeVersionedPlugin implements VersionedPlugin, StoragePlugin { - @Override - public Optional getDatasetHandle(EntityPath datasetPath, GetDatasetOption... options) throws ConnectorException { - return Optional.empty(); - } - - @Override - public PartitionChunkListing listPartitionChunks(DatasetHandle datasetHandle, ListPartitionChunkOption... options) throws ConnectorException { - return null; - } - - @Override - public DatasetMetadata getDatasetMetadata(DatasetHandle datasetHandle, PartitionChunkListing chunkListing, GetMetadataOption... options) throws ConnectorException { - return null; - } - - @Override - public boolean containerExists(EntityPath containerPath) { - return false; - } - - @Override - public ResolvedVersionContext resolveVersionContext(VersionContext versionContext) { - return null; - } - - @Override - public Stream listTablesIncludeNested(List catalogPath, VersionContext version) { - return null; - } - - @Override - public Stream listViewsIncludeNested(List catalogPath, VersionContext version) { - return null; - } - - @Override - public EntityType getType(List key, ResolvedVersionContext version) { - return null; - } - - @Override - public Stream getAllTableInfo() { - return null; - } - - @Override - public Stream getAllViewInfo() { - return null; - } - - @Override - public Stream getAllInformationSchemaTableInfo(SearchQuery searchQuery) { - return null; - } - - @Override - public Stream getAllInformationSchemaViewInfo(SearchQuery searchQuery) { - return null; - } - - @Override - public Stream getAllInformationSchemaSchemataInfo(SearchQuery searchQuery) { - return null; - } - - @Override - public Stream getAllInformationSchemaColumnInfo(SearchQuery searchQuery) { - return null; - } - - @Override - public boolean hasAccessPermission(String user, NamespaceKey key, DatasetConfig datasetConfig) { - return false; - } - - @Override - public SourceState getState() { - return null; - } - - @Override - public SourceCapabilities getSourceCapabilities() { - return null; - } - - @Override - public ViewTable getView(List tableSchemaPath, SchemaConfig schemaConfig) { - return null; - } - - @Override - public Class getRulesFactoryClass() { - return null; - } - - @Override - public void start() throws IOException { - - } - - @Override - public void close() throws Exception { - - } -} diff --git a/dac/backend/src/test/java/com/dremio/dac/explore/TestAccelerationSettingsFromAPI.java b/dac/backend/src/test/java/com/dremio/dac/explore/TestAccelerationSettingsFromAPI.java new file mode 100644 index 0000000000..4ea0e9e909 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/dac/explore/TestAccelerationSettingsFromAPI.java @@ -0,0 +1,258 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.explore; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.when; + +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import javax.ws.rs.core.SecurityContext; + +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnit; +import org.mockito.junit.MockitoRule; +import org.mockito.quality.Strictness; + +import com.dremio.dac.explore.model.DatasetPath; +import com.dremio.dac.server.BufferAllocatorFactory; +import com.dremio.dac.service.collaboration.CollaborationHelper; +import com.dremio.dac.service.datasets.DatasetVersionMutator; +import com.dremio.dac.service.errors.ClientErrorException; +import com.dremio.dac.service.errors.DatasetNotFoundException; +import com.dremio.dac.service.reflection.ReflectionServiceHelper; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.CatalogEntityKey; +import com.dremio.exec.catalog.DremioTable; +import com.dremio.exec.catalog.TableVersionContext; +import com.dremio.plugins.dataplane.store.DataplanePlugin; +import com.dremio.service.jobs.JobsService; +import com.dremio.service.namespace.NamespaceKey; +import com.dremio.service.namespace.NamespaceService; +import com.dremio.service.namespace.dataset.proto.AccelerationSettings; +import com.dremio.service.namespace.dataset.proto.DatasetConfig; +import com.dremio.service.namespace.dataset.proto.DatasetType; +import com.dremio.service.namespace.dataset.proto.RefreshMethod; +import com.dremio.service.namespace.physicaldataset.proto.AccelerationSettingsDescriptor; +import com.dremio.service.reflection.ReflectionSettings; +import com.dremio.test.DremioTest; + +/** + * Tests for acceleration settings. + */ +public class TestAccelerationSettingsFromAPI extends DremioTest { + @Rule public MockitoRule rule = MockitoJUnit.rule().strictness(Strictness.LENIENT); + + @Mock private DatasetVersionMutator datasetService; + @Mock private JobsService jobsService; + @Mock private SecurityContext securityContext; + @Mock private ReflectionServiceHelper reflectionServiceHelper; + @Mock private NamespaceService namespaceService; + @Mock private CollaborationHelper collaborationService; + @Mock private BufferAllocatorFactory bufferAllocatorFactory; + @Mock private ReflectionSettings reflectionSettings; + @Mock private Catalog catalog; + @Mock private DataplanePlugin dataplanePlugin; + @Mock private DremioTable dremioTable; + @Mock private DatasetPath datasetPath; + + private List path = Arrays.asList("versioned", "table"); + + private AccelerationSettings accelerationSettings = + new AccelerationSettings() + .setMethod(RefreshMethod.FULL) + .setRefreshPeriod(TimeUnit.HOURS.toMillis(1)) + .setGracePeriod(TimeUnit.HOURS.toMillis(2)) + .setNeverExpire(false) + .setNeverRefresh(false); + + private AccelerationSettingsDescriptor accelerationSettingsDescriptor = + new AccelerationSettingsDescriptor() + .setMethod(RefreshMethod.FULL) + .setAccelerationRefreshPeriod(TimeUnit.HOURS.toMillis(1)) + .setAccelerationGracePeriod(TimeUnit.HOURS.toMillis(2)) + .setAccelerationNeverExpire(false) + .setAccelerationNeverRefresh(false); + + @InjectMocks private DatasetResource datasetResource; + + @Before + public void setup() { + when(datasetService.getCatalog()).thenReturn(catalog); + when(catalog.getSource(anyString())).thenReturn(dataplanePlugin); + when(datasetPath.toPathList()).thenReturn(path); + when(datasetPath.toPathString()).thenReturn(String.join(".", path)); + when(datasetPath.toNamespaceKey()).thenReturn(new NamespaceKey(path)); + } + + @Test + public void getAccelerationSettingsWithNullReference() throws Exception { + assertThatThrownBy(() -> datasetResource.getAccelerationSettings(null, null)) + .isInstanceOf(ClientErrorException.class) + .hasMessageContaining("Missing a versionType/versionValue"); + } + + @Test + public void getAccelerationSettingsNotFoundTable() throws Exception { + when(catalog.getTableSnapshot(any(NamespaceKey.class), any(TableVersionContext.class))) + .thenReturn(null); + + assertThatThrownBy(() -> datasetResource.getAccelerationSettings("BRANCH", "main")) + .isInstanceOf(DatasetNotFoundException.class) + .hasMessageContaining("not found"); + } + + @Test + public void getAccelerationSettingsForView() throws Exception { + final DatasetConfig datasetConfig = new DatasetConfig().setType(DatasetType.VIRTUAL_DATASET); + + when(catalog.getTableSnapshot(any(NamespaceKey.class), any(TableVersionContext.class))) + .thenReturn(dremioTable); + when(dremioTable.getDatasetConfig()).thenReturn(datasetConfig); + + assertThatThrownBy(() -> datasetResource.getAccelerationSettings("BRANCH", "main")) + .isInstanceOf(IllegalArgumentException.class) + .hasMessageContaining("only to physical dataset"); + } + + @Test + public void getAccelerationSettings() throws Exception { + final DatasetConfig datasetConfig = new DatasetConfig().setType(DatasetType.PHYSICAL_DATASET); + + when(reflectionServiceHelper.getReflectionSettings()).thenReturn(reflectionSettings); + when(reflectionSettings.getReflectionSettings(any(CatalogEntityKey.class))) + .thenReturn(accelerationSettings); + when(catalog.getTableSnapshot(any(NamespaceKey.class), any(TableVersionContext.class))) + .thenReturn(dremioTable); + when(dremioTable.getDatasetConfig()).thenReturn(datasetConfig); + + final AccelerationSettingsDescriptor descriptor = + datasetResource.getAccelerationSettings("BRANCH", "main"); + + assertThat(descriptor).isNotNull(); + assertThat(descriptor.getMethod()).isEqualTo(RefreshMethod.FULL); + assertThat(descriptor.getAccelerationNeverRefresh()).isFalse(); + assertThat(descriptor.getAccelerationRefreshPeriod()).isEqualTo(TimeUnit.HOURS.toMillis(1)); + assertThat(descriptor.getAccelerationNeverExpire()).isFalse(); + assertThat(descriptor.getAccelerationGracePeriod()).isEqualTo(TimeUnit.HOURS.toMillis(2)); + } + + @Test + public void updateAccelerationSettingsNullDescriptor() throws Exception { + assertThatThrownBy(() -> datasetResource.updateAccelerationSettings(null, null, null)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessageContaining("descriptor is required"); + } + + @Test + public void updateAccelerationSettingsNonRefreshPeriod() throws Exception { + assertThatThrownBy( + () -> + datasetResource.updateAccelerationSettings( + new AccelerationSettingsDescriptor(), null, null)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessageContaining("refreshPeriod is required"); + } + + @Test + public void updateAccelerationSettingsNonGracePeriod() throws Exception { + assertThatThrownBy( + () -> + datasetResource.updateAccelerationSettings( + new AccelerationSettingsDescriptor() + .setAccelerationRefreshPeriod(TimeUnit.HOURS.toMillis(1)), + null, + null)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessageContaining("gracePeriod is required"); + } + + @Test + public void updateAccelerationSettingsInvalidPeriod() throws Exception { + assertThatThrownBy( + () -> + datasetResource.updateAccelerationSettings( + new AccelerationSettingsDescriptor() + .setAccelerationRefreshPeriod(TimeUnit.HOURS.toMillis(2)) + .setAccelerationGracePeriod(TimeUnit.HOURS.toMillis(1)), + null, + null)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessageContaining("refreshPeriod must be less than gracePeriod"); + } + + @Test + public void updateAccelerationSettingsNullReference() throws Exception { + assertThatThrownBy( + () -> + datasetResource.updateAccelerationSettings( + accelerationSettingsDescriptor, null, null)) + .isInstanceOf(ClientErrorException.class) + .hasMessageContaining("Missing a versionType/versionValue"); + } + + @Test + public void updateAccelerationSettingsNotFoundTable() throws Exception { + when(catalog.getTableSnapshot(any(NamespaceKey.class), any(TableVersionContext.class))) + .thenReturn(null); + + assertThatThrownBy( + () -> + datasetResource.updateAccelerationSettings( + accelerationSettingsDescriptor, "BRANCH", "main")) + .isInstanceOf(DatasetNotFoundException.class) + .hasMessageContaining("not found"); + } + + @Test + public void updateAccelerationSettingsForView() throws Exception { + final DatasetConfig datasetConfig = new DatasetConfig().setType(DatasetType.VIRTUAL_DATASET); + + when(catalog.getTableSnapshot(any(NamespaceKey.class), any(TableVersionContext.class))) + .thenReturn(dremioTable); + when(dremioTable.getDatasetConfig()).thenReturn(datasetConfig); + + assertThatThrownBy( + () -> + datasetResource.updateAccelerationSettings( + accelerationSettingsDescriptor, "BRANCH", "main")) + .isInstanceOf(IllegalArgumentException.class) + .hasMessageContaining("only to physical dataset"); + } + + @Test + public void updateAccelerationSettings() throws Exception { + final DatasetConfig datasetConfig = new DatasetConfig().setType(DatasetType.PHYSICAL_DATASET); + + when(reflectionServiceHelper.getReflectionSettings()).thenReturn(reflectionSettings); + when(reflectionSettings.getReflectionSettings(any(CatalogEntityKey.class))) + .thenReturn(accelerationSettings); + when(catalog.getTableSnapshot(any(NamespaceKey.class), any(TableVersionContext.class))) + .thenReturn(dremioTable); + when(dremioTable.getDatasetConfig()).thenReturn(datasetConfig); + + datasetResource.updateAccelerationSettings(accelerationSettingsDescriptor, "BRANCH", "main"); + } +} diff --git a/dac/backend/src/test/java/com/dremio/dac/explore/TestDatasetResource.java b/dac/backend/src/test/java/com/dremio/dac/explore/TestDatasetResource.java new file mode 100644 index 0000000000..35c20da39a --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/dac/explore/TestDatasetResource.java @@ -0,0 +1,81 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.explore; + +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.util.Arrays; + +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.MockitoJUnit; +import org.mockito.junit.MockitoRule; +import org.mockito.quality.Strictness; + +import com.dremio.dac.explore.model.DatasetPath; +import com.dremio.dac.server.BufferAllocatorFactory; +import com.dremio.dac.service.datasets.DatasetVersionMutator; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.planner.sql.parser.SqlGrant; +import com.dremio.plugins.dataplane.store.DataplanePlugin; +import com.dremio.service.namespace.NamespaceKey; + +/** + * Unit Tests for {@link DatasetResource} + */ +public class TestDatasetResource { + + @Rule + public MockitoRule rule = MockitoJUnit.rule().strictness(Strictness.STRICT_STUBS); + @Mock + private Catalog catalog; + + @Mock + private DatasetVersionMutator datasetService; + + @Mock + private BufferAllocatorFactory bufferAllocatorFactory; + + @Mock + private DataplanePlugin dataplanePlugin; + + private DatasetResource datasetResource; + + private DatasetPath datasetPath; + + @Before + public void setup() { + datasetPath = new DatasetPath(Arrays.asList("source", "v1")); + datasetResource = new DatasetResource( + null, datasetService, null, null, null, null, datasetPath, bufferAllocatorFactory); + } + + @Test + public void testValidatePrivilegeWithinDroppingViewForVersionedSource() { + when(datasetService.getCatalog()).thenReturn(catalog); + when(catalog.getSource(Mockito.anyString())).thenReturn(dataplanePlugin); + try { + datasetResource.deleteDataset(null, "BRANCH", "main"); + } catch (Exception ex) { + //ignoring this exception as the test is to verify the catalog.validatePrivilege call + } + verify(catalog).validatePrivilege(new NamespaceKey(datasetPath.toPathList()), SqlGrant.Privilege.ALTER); + } +} diff --git a/dac/backend/src/test/java/com/dremio/dac/explore/TestDatasetTool.java b/dac/backend/src/test/java/com/dremio/dac/explore/TestDatasetTool.java index d5c6d68093..ac9377d043 100644 --- a/dac/backend/src/test/java/com/dremio/dac/explore/TestDatasetTool.java +++ b/dac/backend/src/test/java/com/dremio/dac/explore/TestDatasetTool.java @@ -21,10 +21,14 @@ import static org.mockito.Mockito.when; import java.security.Principal; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; +import java.util.stream.Collectors; +import java.util.stream.Stream; import javax.ws.rs.core.SecurityContext; @@ -34,6 +38,8 @@ import com.dremio.dac.explore.model.DatasetPath; import com.dremio.dac.explore.model.History; import com.dremio.dac.explore.model.VersionContextReq; +import com.dremio.dac.proto.model.dataset.From; +import com.dremio.dac.proto.model.dataset.FromTable; import com.dremio.dac.proto.model.dataset.NameDatasetRef; import com.dremio.dac.proto.model.dataset.Transform; import com.dremio.dac.proto.model.dataset.TransformType; @@ -42,6 +48,11 @@ import com.dremio.dac.service.datasets.DatasetVersionMutator; import com.dremio.dac.service.errors.DatasetVersionNotFoundException; import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.TableVersionContext; +import com.dremio.exec.catalog.TableVersionType; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.catalog.VersionedDatasetId; +import com.dremio.exec.catalog.VersionedPlugin; import com.dremio.exec.store.StoragePlugin; import com.dremio.service.jobs.JobsService; import com.dremio.service.jobs.JobsVersionContext; @@ -88,8 +99,7 @@ public void testRewriteHistory() throws Exception { VirtualDatasetUI newTipDataset = buildDataset(newDatasetPath, tip, null); // Set previous version to null newTipDataset.setPreviousVersion(tipDataset.getPreviousVersion()); - VirtualDatasetUI newHistory1Dataset = buildDataset(newDatasetPath, history1, history2.getVersion()); - VirtualDatasetUI newHistory2Dataset = buildDataset(newDatasetPath, history2, null); + VirtualDatasetUI newHistory1Dataset = buildDataset(newDatasetPath, history1, history2.getVersion()); VirtualDatasetUI newHistory2Dataset = buildDataset(newDatasetPath, history2, null); DatasetVersionMutator datasetVersionMutator = mock(DatasetVersionMutator.class); when(datasetVersionMutator.getVersion(datasetPath, tip)).thenReturn(tipDataset); @@ -129,6 +139,40 @@ public void testSourceVersionMapping() { assertThat(datasetTool.createSourceVersionMapping(references)).usingRecursiveComparison().isEqualTo(expectedSourceVersionMapping); } + @Test + public void testUpdateVirtualDatasetId() { + final Catalog catalog = mock(Catalog.class); + final StoragePlugin plugin = mock(FakeVersionedPlugin.class); + final String sourceName = "source1"; + final List tableKey = + Stream.of(sourceName, "table").collect(Collectors.toCollection(ArrayList::new)); + final DatasetPath datasetPath = DatasetTool.TMP_DATASET_PATH; + final String contentId = "8d43f534-b97e-48e8-9b39-35e6309ed110"; + final From from = new FromTable(String.join(".", tableKey)).wrap(); + final Map references = + Collections.singletonMap( + sourceName, + new VersionContextReq(VersionContextReq.VersionContextType.BRANCH, "branch")); + final Map versionContextMapping = + DatasetResourceUtils.createSourceVersionMapping(references); + final VersionedDatasetId versionedDatasetId = + VersionedDatasetId.newBuilder() + .setTableKey(tableKey) + .setContentId(contentId) + .setTableVersionContext(new TableVersionContext(TableVersionType.BRANCH, "branch")) + .build(); + + when(catalog.getSource(sourceName)).thenReturn(plugin); + when(catalog.resolveCatalog(versionContextMapping)).thenReturn(catalog); + when(catalog.getDatasetId(any())).thenReturn(versionedDatasetId.asString()); + + final VirtualDatasetUI vds = + DatasetTool.newDatasetBeforeQueryMetadata( + datasetPath, null, from, null, null, catalog, references); + + assertThat(vds.getId()).isEqualTo(versionedDatasetId.asString()); + } + private DatasetTool buildDatasetTool(DatasetVersionMutator datasetVersionMutator) { final Catalog catalog = mock(Catalog.class); when(datasetVersionMutator.getCatalog()).thenReturn(catalog); @@ -186,4 +230,10 @@ private VirtualDatasetUI buildDataset(DatasetPath datasetPath, DatasetVersion ve return dataset; } + + /** + * Fake Versioned Plugin interface for test + */ + private interface FakeVersionedPlugin extends VersionedPlugin, StoragePlugin { + } } diff --git a/dac/backend/src/test/java/com/dremio/dac/explore/TestDatasetUI.java b/dac/backend/src/test/java/com/dremio/dac/explore/TestDatasetUI.java new file mode 100644 index 0000000000..ea6295e910 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/dac/explore/TestDatasetUI.java @@ -0,0 +1,135 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.explore; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +import org.junit.jupiter.api.Test; + +import com.dremio.dac.explore.model.DatasetUI; +import com.dremio.exec.catalog.TableVersionContext; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.catalog.VersionedDatasetId; +import com.dremio.service.namespace.dataset.DatasetVersion; +import com.dremio.service.namespace.dataset.proto.DatasetType; + +public class TestDatasetUI { + private final List homeDatasetFullPath = Arrays.asList("@dremio", "table1"); + private final List spaceDatasetFullPath = Arrays.asList("view1"); + private final List sourceDatasetFullPath = Arrays.asList("other", "table1"); + private final List arcticPhysicalDatasetFullPath = Arrays.asList("versioned", "table1"); + private final List arcticVirtualDatasetFullPath = Arrays.asList("versioned", "view1"); + + private final String branchName = "main"; + private final DatasetVersion datasetVersion = DatasetVersion.MAX_VERSION; + private final TableVersionContext versionContext = + TableVersionContext.of(VersionContext.ofBranch(branchName)); + private final VersionedDatasetId arcticPhysicalDatasetId = + VersionedDatasetId.newBuilder() + .setTableKey(arcticPhysicalDatasetFullPath) + .setContentId("5befad6b-9d77-4e36-a26c-a4b0c4eb0d08") + .setTableVersionContext(versionContext) + .build(); + private final VersionedDatasetId arcticVirtualDatasetId = + VersionedDatasetId.newBuilder() + .setTableKey(arcticVirtualDatasetFullPath) + .setContentId("5befad6b-9d77-4e36-a26c-a4b0c4eb0d09") + .setTableVersionContext(versionContext) + .build(); + + @Test + public void testCreateLinksForArcticPhysicalDataset() throws Exception { + final Map linksMap = + DatasetUI.createLinks( + arcticPhysicalDatasetFullPath, + arcticPhysicalDatasetFullPath, + datasetVersion, + false, + arcticPhysicalDatasetId.asString(), + DatasetType.PHYSICAL_DATASET); + + assertThat(linksMap.get("edit")) + .isEqualTo("/source/versioned/table1?mode=edit&version=7fffffffffffffff"); + assertThat(linksMap.get("self")).isEqualTo("/source/versioned/table1?version=7fffffffffffffff"); + } + + @Test + public void testCreateLinksForArcticVirtualDataset() throws Exception { + final Map linksMap = + DatasetUI.createLinks( + arcticVirtualDatasetFullPath, + arcticVirtualDatasetFullPath, + datasetVersion, + false, + arcticVirtualDatasetId.asString(), + DatasetType.VIRTUAL_DATASET); + + assertThat(linksMap.get("edit")) + .isEqualTo("/source/versioned/view1?mode=edit&version=7fffffffffffffff"); + assertThat(linksMap.get("self")).isEqualTo("/source/versioned/view1?version=7fffffffffffffff"); + } + + @Test + public void testCreateLinksForHomePhysicalDataset() throws Exception { + final Map linksMap = + DatasetUI.createLinks( + homeDatasetFullPath, + homeDatasetFullPath, + datasetVersion, + true, + null, + DatasetType.PHYSICAL_DATASET); + + assertThat(linksMap.get("edit")) + .isEqualTo("/home/%40dremio/table1?mode=edit&version=7fffffffffffffff"); + assertThat(linksMap.get("self")).isEqualTo("/home/%40dremio/table1?version=7fffffffffffffff"); + } + + @Test + public void testCreateLinksForSourcePhysicalDataset() throws Exception { + final Map linksMap = + DatasetUI.createLinks( + sourceDatasetFullPath, + sourceDatasetFullPath, + datasetVersion, + true, + null, + DatasetType.PHYSICAL_DATASET); + + assertThat(linksMap.get("edit")) + .isEqualTo("/source/other/table1?mode=edit&version=7fffffffffffffff"); + assertThat(linksMap.get("self")).isEqualTo("/source/other/table1?version=7fffffffffffffff"); + } + + @Test + public void testCreateLinksForSpaceVirtualDataset() throws Exception { + final Map linksMap = + DatasetUI.createLinks( + spaceDatasetFullPath, + spaceDatasetFullPath, + datasetVersion, + false, + null, + DatasetType.VIRTUAL_DATASET); + + assertThat(linksMap.get("edit")).isEqualTo("/space/view1/?mode=edit&version=7fffffffffffffff"); + assertThat(linksMap.get("self")).isEqualTo("/space/view1/?version=7fffffffffffffff"); + } +} diff --git a/dac/backend/src/test/java/com/dremio/dac/explore/TestDatasetVersionResource.java b/dac/backend/src/test/java/com/dremio/dac/explore/TestDatasetVersionResource.java index bed7a19d0a..00746dcf47 100644 --- a/dac/backend/src/test/java/com/dremio/dac/explore/TestDatasetVersionResource.java +++ b/dac/backend/src/test/java/com/dremio/dac/explore/TestDatasetVersionResource.java @@ -15,6 +15,7 @@ */ package com.dremio.dac.explore; +import static com.dremio.dac.server.JobsServiceTestUtils.submitJobAndGetData; import static javax.ws.rs.core.Response.Status.BAD_REQUEST; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.Assert.assertEquals; @@ -30,6 +31,7 @@ import javax.ws.rs.core.GenericType; import javax.ws.rs.core.Response; +import org.apache.arrow.memory.BufferAllocator; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -41,6 +43,7 @@ import com.dremio.dac.explore.model.InitialPreviewResponse; import com.dremio.dac.explore.model.NewUntitledFromParentRequest; import com.dremio.dac.explore.model.VersionContextReq; +import com.dremio.dac.model.job.JobDataFragment; import com.dremio.dac.proto.model.dataset.SourceVersionReference; import com.dremio.dac.proto.model.dataset.TransformUpdateSQL; import com.dremio.dac.proto.model.dataset.VersionContext; @@ -50,6 +53,9 @@ import com.dremio.dac.server.BaseTestServer; import com.dremio.dac.service.datasets.DatasetVersionMutator; import com.dremio.dac.service.errors.InvalidQueryException; +import com.dremio.service.jobs.JobRequest; +import com.dremio.service.jobs.JobsService; +import com.dremio.service.jobs.SqlQuery; import com.dremio.service.namespace.NamespaceKey; import com.dremio.service.namespace.dataset.DatasetVersion; import com.dremio.service.namespace.space.proto.SpaceConfig; @@ -203,6 +209,66 @@ public void testPreviewApiWithReferences() throws Exception { assertThat(initialPreviewResponse.getDataset().getReferences()).usingRecursiveComparison().isEqualTo(references); } + @Test + public void testAsyncTransformAndPreviewApiWithReferences() throws Exception { + // create a VDS in the space + Dataset newVDS = createVDS(Arrays.asList("dsvTest", "preview_VDS"),"select * from sys.version"); + Dataset vds = expectSuccess(getBuilder(getPublicAPI(3).path("catalog")).buildPost(Entity.json(newVDS)), new GenericType() {}); + + // create a derivation of the VDS + String parentDataset = String.join(".", vds.getPath()); + DatasetVersion datasetVersion = DatasetVersion.newVersion(); + WebTarget target = getAPIv2() + .path("datasets") + .path("new_untitled") + .queryParam("parentDataset", parentDataset) + .queryParam("newVersion", datasetVersion) + .queryParam("limit", 120); + Map references = new HashMap<>(); + references.put("source1", new VersionContextReq(VersionContextReq.VersionContextType.BRANCH, "branch")); + references.put("source2", new VersionContextReq(VersionContextReq.VersionContextType.TAG, "tag")); + references.put("source3", new VersionContextReq(VersionContextReq.VersionContextType.COMMIT, "d0628f078890fec234b98b873f9e1f3cd140988a")); + InitialPreviewResponse initialPreviewResponse = expectSuccess(getBuilder(target).buildPost(Entity.json(new NewUntitledFromParentRequest(references))), + new GenericType() {}); + assertThat(initialPreviewResponse.getDataset().getReferences()).usingRecursiveComparison().isEqualTo(references); + + // save the derivation a new VDS + target = getAPIv2() + .path("dataset") + .path("tmp.UNTITLED") + .path("version") + .path(datasetVersion.getVersion()) + .path("save") + .queryParam("as", "dsvTest.preview_VDS2"); + DatasetUIWithHistory dswh = expectSuccess(getBuilder(target).buildPost(Entity.json(null)), new GenericType() {}); + + // modify the sql of the new VDS by doing a transform + DatasetVersion datasetVersion2 = DatasetVersion.newVersion(); + String dsPath = String.join(".", dswh.getDataset().getFullPath()); + List sourceVersionReferenceList = new ArrayList<>(); + VersionContext versionContext1 = new VersionContext(VersionContextType.BRANCH, "branch"); + VersionContext versionContext2 = new VersionContext(VersionContextType.TAG, "tag"); + VersionContext versionContext3 = new VersionContext(VersionContextType.COMMIT, "d0628f078890fec234b98b873f9e1f3cd140988a"); + sourceVersionReferenceList.add(new SourceVersionReference("source1", versionContext1)); + sourceVersionReferenceList.add(new SourceVersionReference("source2", versionContext2)); + sourceVersionReferenceList.add(new SourceVersionReference("source3", versionContext3)); + + target = getAPIv2() + .path("dataset") + .path(dsPath) + .path("version") + .path(dswh.getDataset().getDatasetVersion().getVersion()) + .path("transform_and_preview") + .queryParam("newVersion", datasetVersion2); + + TransformUpdateSQL transformSql = new TransformUpdateSQL(); + transformSql.setSql("SELECT \"version\" FROM dsvTest.preview_VDS"); + transformSql.setReferencesList(sourceVersionReferenceList); + + initialPreviewResponse = expectSuccess(getBuilder(target).buildPost(Entity.json(transformSql)), new GenericType() {}); + assertThat(initialPreviewResponse.getDataset().getReferences()).usingRecursiveComparison().isEqualTo(references); + } + @Test public void testTransformAndRunApiWithReferences() throws Exception { Dataset newVDS = createVDS(Arrays.asList("dsvTest", "transformAndRunVDS"),"select * from sys.version"); @@ -276,6 +342,79 @@ public void testTransformAndRunApiWithReferences() throws Exception { assertThat(initialPreviewResponse.getDataset().getReferences()).usingRecursiveComparison().isEqualTo(references); } + @Test + public void testAsyncTransformAndRunApiWithReferences() throws Exception { + Dataset newVDS = createVDS(Arrays.asList("dsvTest", "transformAndRun_VDS"),"select * from sys.version"); + Dataset vds = expectSuccess(getBuilder(getPublicAPI(3).path("catalog")).buildPost(Entity.json(newVDS)), new GenericType() {}); + + // create a derivation of the VDS + String parentDataset = String.join(".", vds.getPath()); + DatasetVersion datasetVersion = DatasetVersion.newVersion(); + WebTarget target = getAPIv2() + .path("datasets") + .path("new_untitled") + .queryParam("parentDataset", parentDataset) + .queryParam("newVersion", datasetVersion) + .queryParam("limit", 120); + expectSuccess(getBuilder(target).buildPost(Entity.json(null)), new GenericType() {}); + + target = getAPIv2() + .path("dataset") + .path("tmp.UNTITLED") + .path("version") + .path(datasetVersion.getVersion()) + .path("save") + .queryParam("as", "dsvTest.transformAndRun_VDS2"); + DatasetUIWithHistory dswh = expectSuccess(getBuilder(target).buildPost(Entity.json(null)), new GenericType() {}); + String dsPath = String.join(".", dswh.getDataset().getFullPath()); + + //set references payload + datasetVersion = DatasetVersion.newVersion(); + target = getAPIv2() + .path("dataset") + .path(dsPath) + .path("version") + .path(dswh.getDataset().getDatasetVersion().getVersion()) + .path("transform_and_run") + .queryParam("newVersion", datasetVersion); + List sourceVersionReferenceList = new ArrayList<>(); + VersionContext versionContext1 = new VersionContext(VersionContextType.BRANCH, "branch"); + VersionContext versionContext2 = new VersionContext(VersionContextType.TAG, "tag"); + VersionContext versionContext3 = new VersionContext(VersionContextType.COMMIT, "d0628f078890fec234b98b873f9e1f3cd140988a"); + sourceVersionReferenceList.add(new SourceVersionReference("source1", versionContext1)); + sourceVersionReferenceList.add(new SourceVersionReference("source2", versionContext2)); + sourceVersionReferenceList.add(new SourceVersionReference("source3", versionContext3)); + + //set references payload + TransformUpdateSQL transformSql1 = new TransformUpdateSQL(); + transformSql1.setSql("SELECT \"version\" FROM dsvTest.transformAndRun_VDS"); + transformSql1.setReferencesList(sourceVersionReferenceList); + + InitialPreviewResponse initialPreviewResponse = expectSuccess(getBuilder(target).buildPost( + Entity.json(transformSql1)), new GenericType() {}); + + Map references = new HashMap<>(); + references.put("source1", new VersionContextReq(VersionContextReq.VersionContextType.BRANCH, "branch")); + references.put("source2", new VersionContextReq(VersionContextReq.VersionContextType.TAG, "tag")); + references.put("source3", new VersionContextReq(VersionContextReq.VersionContextType.COMMIT, "d0628f078890fec234b98b873f9e1f3cd140988a")); + assertThat(initialPreviewResponse.getDataset().getReferences()).usingRecursiveComparison().isEqualTo(references); + + //set null references payload + references = new HashMap<>(); + TransformUpdateSQL transformSql2 = new TransformUpdateSQL(); + transformSql2.setSql("SELECT \"version\" FROM dsvTest.transformAndRun_VDS"); + initialPreviewResponse = expectSuccess(getBuilder(target).buildPost(Entity.json(transformSql2)), new GenericType() {}); + assertThat(initialPreviewResponse.getDataset().getReferences()).usingRecursiveComparison().isEqualTo(references); + + //set empty references payload + references = new HashMap<>(); + TransformUpdateSQL transformSql3 = new TransformUpdateSQL(); + transformSql3.setSql("SELECT \"version\" FROM dsvTest.transformAndRun_VDS"); + transformSql3.setReferencesList(new ArrayList<>()); + initialPreviewResponse = expectSuccess(getBuilder(target).buildPost(Entity.json(transformSql3)), new GenericType() {}); + assertThat(initialPreviewResponse.getDataset().getReferences()).usingRecursiveComparison().isEqualTo(references); + } + @Test public void testVersionHistory() throws Exception { // Test for DX-12601 @@ -562,6 +701,27 @@ public void testSaveAfterTransformWithInvalidSql() throws Exception { } + /** + * Views created through the Catalog API (as opposed to CREATE VIEW DDL statement) are allowed to contain + * duplicate column names. Verify that we can still select from such a view. See DX-63350 + * @throws Exception + */ + @Test + public void testDuplicateColumns() { + BufferAllocator allocator = getSabotContext().getAllocator().newChildAllocator(getClass().getName(), 0, Long.MAX_VALUE); + Dataset newVDS = createVDS(Arrays.asList("dsvTest", "testDuplicateColumns"),"select n_name, n_name from cp.\"tpch/nation.parquet\" order by 1 asc"); + expectSuccess(getBuilder(getPublicAPI(3).path("catalog")).buildPost(Entity.json(newVDS)), new GenericType() {}); + + try (final JobDataFragment data = submitJobAndGetData(l(JobsService.class), + JobRequest.newBuilder().setSqlQuery(new SqlQuery("select * from dsvTest.testDuplicateColumns", DEFAULT_USERNAME)).build(), 0, 20, allocator)) { + assertEquals(20, data.getReturnedRowCount()); + assertEquals("ALGERIA", data.extractValue("n_name", 0).toString()); + assertEquals("ALGERIA", data.extractValue("n_name0", 0).toString()); + } finally { + allocator.close(); + } + } + private Dataset createVDS(List path, String sql) { return new Dataset( null, diff --git a/dac/backend/src/test/java/com/dremio/dac/explore/TestSQLGenerator.java b/dac/backend/src/test/java/com/dremio/dac/explore/TestSQLGenerator.java index 75685e03ec..bca2230318 100644 --- a/dac/backend/src/test/java/com/dremio/dac/explore/TestSQLGenerator.java +++ b/dac/backend/src/test/java/com/dremio/dac/explore/TestSQLGenerator.java @@ -819,7 +819,7 @@ public void testReplaceRange() { private TransformResult transform(TransformBase tb, VirtualDatasetState state) { QueryExecutor executor = new QueryExecutor(null, null, null){ @Override - public List getColumnList(String username, DatasetPath path, List sourceVersionReferenceList) { + public List getColumnList(DatasetPath path, List sourceVersionReferenceList) { return asList("bar", "baz"); } }; diff --git a/dac/backend/src/test/java/com/dremio/dac/explore/TestSQLGeneratorForVersionedSources.java b/dac/backend/src/test/java/com/dremio/dac/explore/TestSQLGeneratorForVersionedSources.java index 9a32c5d75c..0ee1b1424d 100644 --- a/dac/backend/src/test/java/com/dremio/dac/explore/TestSQLGeneratorForVersionedSources.java +++ b/dac/backend/src/test/java/com/dremio/dac/explore/TestSQLGeneratorForVersionedSources.java @@ -19,6 +19,7 @@ import static com.dremio.dac.proto.model.dataset.MeasureType.Sum; import static java.util.Arrays.asList; import static org.junit.Assert.assertEquals; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.util.ArrayList; @@ -28,7 +29,6 @@ import org.junit.Rule; import org.junit.Test; import org.mockito.Mock; -import org.mockito.Mockito; import org.mockito.junit.MockitoJUnit; import org.mockito.junit.MockitoRule; import org.mockito.quality.Strictness; @@ -64,6 +64,7 @@ import com.dremio.dac.proto.model.dataset.VersionContext; import com.dremio.dac.proto.model.dataset.VersionContextType; import com.dremio.dac.proto.model.dataset.VirtualDatasetState; +import com.dremio.exec.catalog.VersionedPlugin; import com.dremio.exec.record.BatchSchema; import com.dremio.exec.store.CatalogService; import com.dremio.exec.store.StoragePlugin; @@ -99,8 +100,8 @@ public void testJoin() { .setFrom(ref) .setReferenceList(sourceVersionReferenceListForRight); - when(catalogService.getSource("versioned_source1")).thenReturn(new FakeVersionedPlugin()); - when(catalogService.getSource("versioned_source2")).thenReturn(new FakeVersionedPlugin()); + when(catalogService.getSource("versioned_source1")).thenReturn(mock(FakeVersionedPlugin.class)); + when(catalogService.getSource("versioned_source2")).thenReturn(mock(FakeVersionedPlugin.class)); when(catalogService.getSource("non_versioned_source")).thenReturn(storagePlugin); //Both left and right tables are versioned sources @@ -152,7 +153,7 @@ public void testJoin() { public void testOrderBy() { List sourceVersionReferenceList = new ArrayList<>(); sourceVersionReferenceList.add(new SourceVersionReference("versioned_source", new VersionContext(VersionContextType.BRANCH, "branch"))); - when(catalogService.getSource("versioned_source")).thenReturn(new FakeVersionedPlugin()); + when(catalogService.getSource("versioned_source")).thenReturn(mock(FakeVersionedPlugin.class)); VirtualDatasetState state = new VirtualDatasetState() .setFrom(new FromTable("versioned_source.tables.1234.test").wrap()) .setReferenceList(sourceVersionReferenceList) @@ -166,7 +167,7 @@ public void testFilters() { List sourceVersionReferenceList = new ArrayList<>(); sourceVersionReferenceList.add(new SourceVersionReference("versioned_source", new VersionContext(VersionContextType.BRANCH, "branch"))); From fromTable = new FromTable("versioned_source.parentDS").wrap(); - when(catalogService.getSource("versioned_source")).thenReturn(new FakeVersionedPlugin()); + when(catalogService.getSource("versioned_source")).thenReturn(mock(FakeVersionedPlugin.class)); VirtualDatasetState state = new VirtualDatasetState() .setFrom(fromTable) .setReferenceList(sourceVersionReferenceList); @@ -196,7 +197,7 @@ public void testReplaceValue() { List sourceVersionReferenceList = new ArrayList<>(); sourceVersionReferenceList.add(new SourceVersionReference("versioned_source", new VersionContext(VersionContextType.BRANCH, "branch"))); From fromTable = new FromTable("versioned_source.parentDS").wrap(); - when(catalogService.getSource("versioned_source")).thenReturn(new FakeVersionedPlugin()); + when(catalogService.getSource("versioned_source")).thenReturn(mock(FakeVersionedPlugin.class)); VirtualDatasetState state = new VirtualDatasetState() .setFrom(fromTable) .setReferenceList(sourceVersionReferenceList); @@ -227,7 +228,7 @@ public void testExcludeValue() { List sourceVersionReferenceList = new ArrayList<>(); sourceVersionReferenceList.add(new SourceVersionReference("versioned_source", new VersionContext(VersionContextType.BRANCH, "branch"))); From fromTable = new FromTable("versioned_source.parentDS").wrap(); - when(catalogService.getSource("versioned_source")).thenReturn(new FakeVersionedPlugin()); + when(catalogService.getSource("versioned_source")).thenReturn(mock(FakeVersionedPlugin.class)); VirtualDatasetState state = new VirtualDatasetState() .setFrom(fromTable) .setReferenceList(sourceVersionReferenceList); @@ -270,7 +271,7 @@ public void testGroupBy() { List sourceVersionReferenceList = new ArrayList<>(); sourceVersionReferenceList.add(new SourceVersionReference("versioned_source", new VersionContext(VersionContextType.BRANCH, "branch"))); From fromTable = new FromTable("versioned_source.parentDS").wrap(); - when(catalogService.getSource("versioned_source")).thenReturn(new FakeVersionedPlugin()); + when(catalogService.getSource("versioned_source")).thenReturn(mock(FakeVersionedPlugin.class)); VirtualDatasetState state = new VirtualDatasetState() .setFrom(fromTable) .setReferenceList(sourceVersionReferenceList); @@ -309,7 +310,7 @@ public void testGroupBy() { private TransformResult transform(TransformBase tb, VirtualDatasetState state) { QueryExecutor executor = new QueryExecutor(null, null, null){ @Override - public List getColumnList(String username, DatasetPath path, List sourceVersionReferenceList) { + public List getColumnList(DatasetPath path, List sourceVersionReferenceList) { return asList("bar", "baz"); } }; @@ -317,7 +318,7 @@ public List getColumnList(String username, DatasetPath path, List getColumnList(String username, DatasetPath path, List sourceVersionReferenceList) { + public List getColumnList(DatasetPath path, List sourceVersionReferenceList) { return asList("bar", "baz"); } }; diff --git a/dac/backend/src/test/java/com/dremio/dac/explore/bi/TestTableauMessageBodyGenerator.java b/dac/backend/src/test/java/com/dremio/dac/explore/bi/TestTableauMessageBodyGenerator.java index 61e9186584..c430bf9eed 100644 --- a/dac/backend/src/test/java/com/dremio/dac/explore/bi/TestTableauMessageBodyGenerator.java +++ b/dac/backend/src/test/java/com/dremio/dac/explore/bi/TestTableauMessageBodyGenerator.java @@ -47,7 +47,6 @@ import org.apache.arrow.vector.types.pojo.Field; import org.apache.arrow.vector.types.pojo.FieldType; import org.glassfish.jersey.media.multipart.ContentDisposition; -import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; @@ -100,8 +99,9 @@ public static Object[] getTestCases() { private final String tableName; private final String customProperties; + @SuppressWarnings("checkstyle:VisibilityModifier") @Mock - private Configuration configuration; + protected Configuration configuration; @SuppressWarnings("checkstyle:VisibilityModifier") @Mock protected OptionManager optionManager; @@ -115,18 +115,14 @@ public TestTableauMessageBodyGenerator(String testName, String path, String tabl this.customProperties = customProperties; } - @Before - public void setUp() { - when(optionManager.getOption(EXTRA_CONNECTION_PROPERTIES)).thenReturn(customProperties); - } - - protected TableauMessageBodyGenerator getGenerator() { + protected TableauMessageBodyGenerator buildGenerator() { return new TableauMessageBodyGenerator(configuration, ENDPOINT, optionManager, config); } @Test public void verifyOutput() throws IOException, SAXException, ParserConfigurationException, ParseException { + when(optionManager.getOption(EXTRA_CONNECTION_PROPERTIES)).thenReturn(customProperties); when(optionManager.getOption(TABLEAU_EXPORT_TYPE)) .thenReturn(TableauExportType.ODBC.toString()); final DatasetConfig datasetConfig = new DatasetConfig(); @@ -138,7 +134,7 @@ public void verifyOutput() final MultivaluedMap httpHeaders = new MultivaluedHashMap<>(); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); - final TableauMessageBodyGenerator generator = new TableauMessageBodyGenerator(configuration, ENDPOINT, optionManager, config); + final TableauMessageBodyGenerator generator = buildGenerator(); assertTrue(generator.isWriteable(datasetConfig.getClass(), null, null, WebServer.MediaType.APPLICATION_TDS_TYPE)); generator.writeTo(datasetConfig, DatasetConfig.class, null, new Annotation[] {}, WebServer.MediaType.APPLICATION_TDS_TYPE, httpHeaders, baos); @@ -154,10 +150,13 @@ public void verifyOutput() final Element connection = (Element) connections.item(0); assertEquals("genericodbc", connection.getAttribute("class")); assertEquals("Dremio Connector", connection.getAttribute("odbc-driver")); + + String expectedExtras = "AUTHENTICATIONTYPE=Basic Authentication;CONNECTIONTYPE=Direct;HOST=" + generator.getEndpoint().getAddress(); + String actualExtras = connection.getAttribute("odbc-connect-string-extras"); if (customProperties.isEmpty()) { - assertEquals("AUTHENTICATIONTYPE=Basic Authentication;CONNECTIONTYPE=Direct;HOST=foo", connection.getAttribute("odbc-connect-string-extras")); + assertEquals(expectedExtras, actualExtras); } else { - assertEquals(customProperties + ";AUTHENTICATIONTYPE=Basic Authentication;CONNECTIONTYPE=Direct;HOST=foo", connection.getAttribute("odbc-connect-string-extras")); + assertEquals(customProperties + ";" + expectedExtras, actualExtras); } assertEquals("DREMIO", connection.getAttribute("dbname")); assertEquals(path.toParentPath(), connection.getAttribute("schema")); @@ -263,7 +262,7 @@ protected Element verifySdkOutput(String properties, String sslmode) final MultivaluedMap httpHeaders = new MultivaluedHashMap<>(); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); - final TableauMessageBodyGenerator tableauMessageBodyGenerator = getGenerator(); + final TableauMessageBodyGenerator tableauMessageBodyGenerator = buildGenerator(); assertTrue(tableauMessageBodyGenerator.isWriteable(datasetConfig.getClass(), null, null, WebServer.MediaType.APPLICATION_TDS_TYPE)); tableauMessageBodyGenerator.writeTo(datasetConfig, DatasetConfig.class, null, new Annotation[] {}, WebServer.MediaType.APPLICATION_TDS_TYPE, httpHeaders, baos); @@ -314,7 +313,7 @@ protected Element verifyFlightOutput(String properties, String sslmode) final MultivaluedMap httpHeaders = new MultivaluedHashMap<>(); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); - final TableauMessageBodyGenerator tableauMessageBodyGenerator = getGenerator(); + final TableauMessageBodyGenerator tableauMessageBodyGenerator = buildGenerator(); assertTrue(tableauMessageBodyGenerator.isWriteable(datasetConfig.getClass(), null, null, WebServer.MediaType.APPLICATION_TDS_TYPE)); tableauMessageBodyGenerator.writeTo(datasetConfig, DatasetConfig.class, null, new Annotation[] {}, WebServer.MediaType.APPLICATION_TDS_TYPE, httpHeaders, baos); @@ -368,7 +367,7 @@ public void verifyNativeOutput() final MultivaluedMap httpHeaders = new MultivaluedHashMap<>(); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); - final TableauMessageBodyGenerator generator = new TableauMessageBodyGenerator(configuration, ENDPOINT, optionManager, config); + final TableauMessageBodyGenerator generator = buildGenerator(); assertTrue(generator.isWriteable(datasetConfig.getClass(), null, null, WebServer.MediaType.APPLICATION_TDS_DRILL_TYPE)); generator.writeTo(datasetConfig, DatasetConfig.class, null, new Annotation[] {}, WebServer.MediaType.APPLICATION_TDS_DRILL_TYPE, httpHeaders, baos); @@ -383,8 +382,8 @@ public void verifyNativeOutput() assertEquals("drill", connection.getAttribute("class")); assertEquals("Direct", connection.getAttribute("connection-type")); - assertEquals("foo", connection.getAttribute("server")); - assertEquals("12345", connection.getAttribute("port")); + assertEquals(generator.getEndpoint().getAddress(), connection.getAttribute("server")); + assertEquals(generator.getEndpoint().getUserPort(), Integer.parseInt(connection.getAttribute("port"))); assertEquals(path.toParentPath(), connection.getAttribute("schema")); verifyRelationElement(connection); diff --git a/dac/backend/src/test/java/com/dremio/dac/explore/model/TestDatasetSummary.java b/dac/backend/src/test/java/com/dremio/dac/explore/model/TestDatasetSummary.java new file mode 100644 index 0000000000..9247f56890 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/dac/explore/model/TestDatasetSummary.java @@ -0,0 +1,39 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.explore.model; + +import java.util.Collections; + +import org.junit.Test; + +import com.dremio.service.namespace.dataset.proto.DatasetConfig; +import com.dremio.service.namespace.dataset.proto.DatasetType; + +public class TestDatasetSummary { + + @Test + public void testCreateNewInstance() { + DatasetSummary.newInstance( + new DatasetConfig().setType(DatasetType.PHYSICAL_DATASET), + 0, + 0, + Collections.emptyMap(), + Collections.emptyList(), + false, + null, + null); + } +} diff --git a/dac/backend/src/test/java/com/dremio/dac/metadata/TestFieldOriginExtractor.java b/dac/backend/src/test/java/com/dremio/dac/metadata/TestFieldOriginExtractor.java index 772ec270c8..63ba236b9e 100644 --- a/dac/backend/src/test/java/com/dremio/dac/metadata/TestFieldOriginExtractor.java +++ b/dac/backend/src/test/java/com/dremio/dac/metadata/TestFieldOriginExtractor.java @@ -18,6 +18,7 @@ import static org.junit.Assert.assertEquals; import java.util.List; +import java.util.Map; import org.apache.calcite.plan.RelOptPlanner; import org.apache.calcite.rel.RelNode; @@ -76,7 +77,8 @@ public void planValidated(RelDataType rowType, SqlNode node, long millisTaken) { } @Override - public void planRelTransform(PlannerPhase phase, RelOptPlanner planner, RelNode before, RelNode after, long millisTaken) { + public void planRelTransform(PlannerPhase phase, RelOptPlanner planner, RelNode before, RelNode after, + long millisTaken, Map timeBreakdownPerRule) { if (phase == PlannerPhase.LOGICAL) { fields = FieldOriginExtractor.getFieldOrigins(before, rowType); } diff --git a/dac/backend/src/test/java/com/dremio/dac/model/sources/TestFormatTools.java b/dac/backend/src/test/java/com/dremio/dac/model/sources/TestFormatTools.java index 37e4707d09..42c130801e 100644 --- a/dac/backend/src/test/java/com/dremio/dac/model/sources/TestFormatTools.java +++ b/dac/backend/src/test/java/com/dremio/dac/model/sources/TestFormatTools.java @@ -48,7 +48,7 @@ * Test FormatTools via REST api explored by {@SourceResource} */ public class TestFormatTools extends BaseTestServer { - private static ch.qos.logback.classic.Logger rootLogger = ((ch.qos.logback.classic.Logger)org.slf4j.LoggerFactory.getLogger("com.dremio")); + private static final ch.qos.logback.classic.Logger rootLogger = ((ch.qos.logback.classic.Logger)org.slf4j.LoggerFactory.getLogger("com.dremio")); private static Level originalLogLevel; @BeforeClass diff --git a/dac/backend/src/test/java/com/dremio/dac/resource/TestDatasetDependencyChanges.java b/dac/backend/src/test/java/com/dremio/dac/resource/TestDatasetDependencyChanges.java index 17ee4d8088..617e1d165a 100644 --- a/dac/backend/src/test/java/com/dremio/dac/resource/TestDatasetDependencyChanges.java +++ b/dac/backend/src/test/java/com/dremio/dac/resource/TestDatasetDependencyChanges.java @@ -21,6 +21,7 @@ import javax.ws.rs.client.Entity; import javax.ws.rs.client.Invocation; +import javax.ws.rs.core.GenericType; import org.junit.Before; import org.junit.Test; @@ -30,7 +31,6 @@ import com.dremio.dac.explore.model.DatasetPath; import com.dremio.dac.explore.model.DatasetUI; import com.dremio.dac.explore.model.InitialDataPreviewResponse; -import com.dremio.dac.model.spaces.Space; import com.dremio.dac.server.ApiErrorModel; import com.dremio.dac.server.BaseTestServer; import com.dremio.dac.server.FamilyExpectation; @@ -188,8 +188,7 @@ public static Iterable data() { @Test public void testDatasetDependencyChange() { // Create initial dataset - expectSuccess(getBuilder(getAPIv2().path("space/spaceCreateDataset")) - .buildPut(Entity.json(new Space(null, "spaceCreateDataset", null, null, null, 0, null))), Space.class); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "spaceCreateDataset", null, null, null))), new GenericType() {}); DatasetUI ds1 = createDatasetFromSQLAndSave(new DatasetPath("spaceCreateDataset.ds1"), datasetDefOne, Arrays.asList("cp")); diff --git a/dac/backend/src/test/java/com/dremio/dac/resource/TestDatasetResource.java b/dac/backend/src/test/java/com/dremio/dac/resource/TestDatasetResource.java index fcd2d4946f..8171ccfc7b 100644 --- a/dac/backend/src/test/java/com/dremio/dac/resource/TestDatasetResource.java +++ b/dac/backend/src/test/java/com/dremio/dac/resource/TestDatasetResource.java @@ -20,6 +20,8 @@ import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; +import java.io.File; +import java.io.PrintWriter; import java.util.concurrent.TimeUnit; import javax.ws.rs.client.Entity; @@ -40,7 +42,6 @@ import com.dremio.dac.service.source.SourceService; import com.dremio.exec.store.CatalogService; import com.dremio.exec.store.dfs.NASConf; -import com.dremio.service.namespace.NamespaceKey; import com.dremio.service.namespace.NamespaceService; import com.dremio.service.namespace.dataset.proto.DatasetConfig; import com.dremio.service.namespace.dataset.proto.DatasetType; @@ -55,9 +56,12 @@ */ public class TestDatasetResource extends BaseTestServer { private static final String SOURCE_NAME = "mysrc"; + private static final String DATASET_NAME = "ds1.json"; + private static final String DATASET_NAME_2 = "ds2.json"; private static final long DEFAULT_REFRESH_PERIOD = TimeUnit.HOURS.toMillis(4); private static final long DEFAULT_GRACE_PERIOD = TimeUnit.HOURS.toMillis(12); - private static final DatasetPath DATASET_PATH = new DatasetPath(ImmutableList.of(SOURCE_NAME, "ds1")); + private static final DatasetPath DATASET_PATH = new DatasetPath(ImmutableList.of(SOURCE_NAME, DATASET_NAME)); + private static final DatasetPath DATASET_PATH_2 = new DatasetPath(ImmutableList.of(SOURCE_NAME, DATASET_NAME_2)); @Rule public final TemporaryFolder folder = new TemporaryFolder(); @@ -72,29 +76,40 @@ protected SourceService getSourceService() { return Preconditions.checkNotNull(service, "source service is required"); } - public void addPhysicalDataset(final DatasetPath path, final DatasetType type) throws Exception { - NamespaceKey datasetPath = path.toNamespaceKey(); - final DatasetConfig datasetConfig = new DatasetConfig(); - datasetConfig.setName(datasetPath.getName()); - datasetConfig.setType(type); - datasetConfig.setPhysicalDataset(new PhysicalDataset()); - getNamespaceService().tryCreatePhysicalDataset(datasetPath, datasetConfig); + private void createNewFile(String datasetName) throws Exception { + final File file2 = folder.newFile(datasetName); + try (PrintWriter writer = new PrintWriter(file2)) { + writer.print("{ \"key\" : \"A\", \"value\" : 0 }"); + } } @Before public void setup() throws Exception { final NASConf nas = new NASConf(); nas.path = folder.getRoot().getPath(); - SourceUI source = new SourceUI(); + + final SourceUI source = new SourceUI(); source.setName(SOURCE_NAME); source.setCtime(System.currentTimeMillis()); source.setAccelerationRefreshPeriod(DEFAULT_REFRESH_PERIOD); source.setAccelerationGracePeriod(DEFAULT_GRACE_PERIOD); - // Please note: if this source is ever refreshed, the physical dataset added below will disappear - source.setMetadataPolicy(UIMetadataPolicy.of(CatalogService.NEVER_REFRESH_POLICY)); + source.setMetadataPolicy( + UIMetadataPolicy.of(CatalogService.NEVER_REFRESH_POLICY_WITH_AUTO_PROMOTE)); source.setConfig(nas); getSourceService().registerSourceWithRuntime(source); - addPhysicalDataset(DATASET_PATH, DatasetType.PHYSICAL_DATASET); + + createNewFile(DATASET_NAME); + getPreview(DATASET_PATH); + + final DatasetConfig datasetConfig = new DatasetConfig(); + datasetConfig.setName(DATASET_PATH_2.getLeaf().getName()); + datasetConfig.setFullPathList(DATASET_PATH_2.toPathList()); + datasetConfig.setType(DatasetType.PHYSICAL_DATASET); + datasetConfig.setPhysicalDataset(new PhysicalDataset()); + getNamespaceService().addOrUpdateDataset(DATASET_PATH_2.toNamespaceKey(), datasetConfig); + + createNewFile(DATASET_NAME_2); + getPreview(DATASET_PATH_2); } @After @@ -175,11 +190,20 @@ public void testAccelerationSettingsRefreshLessthanExpire() throws Exception { goodDescriptor.setAccelerationNeverExpire(false); goodDescriptor.setAccelerationNeverRefresh(false); - expectSuccess( getBuilder(getAPIv2().path(String.format("/dataset/%s/acceleration/settings", DATASET_PATH.toPathString()))) .buildPut(Entity.entity(goodDescriptor, JSON))); + final AccelerationSettingsDescriptor descriptor = new AccelerationSettingsDescriptor() + .setAccelerationRefreshPeriod(DEFAULT_REFRESH_PERIOD) + .setAccelerationGracePeriod(DEFAULT_GRACE_PERIOD) + .setMethod(RefreshMethod.FULL); + + expectSuccess( + getBuilder(getAPIv2().path(String.format("/dataset/%s/acceleration/settings", DATASET_PATH.toPathString()))) + .buildPut(Entity.entity(descriptor, JSON))); + + final AccelerationSettingsDescriptor badDescriptor = expectSuccess( getBuilder(getAPIv2().path(endpoint)).buildGet(), AccelerationSettingsDescriptor.class @@ -231,11 +255,11 @@ public void testUpdateSettingsInIncrementalMode() throws Exception { .setRefreshField("test-field"); expectSuccess( - getBuilder(getAPIv2().path(String.format("/dataset/%s/acceleration/settings", DATASET_PATH.toPathString()))) + getBuilder(getAPIv2().path(String.format("/dataset/%s/acceleration/settings", DATASET_PATH_2.toPathString()))) .buildPut(Entity.entity(descriptor, JSON))); final AccelerationSettingsDescriptor newDescriptor = expectSuccess( - getBuilder(getAPIv2().path(String.format("/dataset/%s/acceleration/settings", DATASET_PATH.toPathString()))).buildGet(), + getBuilder(getAPIv2().path(String.format("/dataset/%s/acceleration/settings", DATASET_PATH_2.toPathString()))).buildGet(), AccelerationSettingsDescriptor.class ); @@ -256,7 +280,7 @@ public void testValidation() throws Exception { .setMethod(RefreshMethod.INCREMENTAL); expectStatus(Response.Status.BAD_REQUEST, - getBuilder(getAPIv2().path(String.format("/dataset/%s/acceleration/settings", DATASET_PATH.toPathString()))) + getBuilder(getAPIv2().path(String.format("/dataset/%s/acceleration/settings", DATASET_PATH_2.toPathString()))) .buildPut(Entity.entity(descriptor, JSON))); } @@ -273,9 +297,6 @@ public void testValidation() throws Exception { } { - final DatasetPath path2 = new DatasetPath(ImmutableList.of(SOURCE_NAME, "ds2")); - addPhysicalDataset(path2, DatasetType.PHYSICAL_DATASET_SOURCE_FILE); - final AccelerationSettingsDescriptor descriptor = new AccelerationSettingsDescriptor() .setAccelerationRefreshPeriod(DEFAULT_REFRESH_PERIOD) .setAccelerationGracePeriod(DEFAULT_GRACE_PERIOD) @@ -283,7 +304,7 @@ public void testValidation() throws Exception { .setRefreshField("some-field"); expectStatus(Response.Status.BAD_REQUEST, - getBuilder(getAPIv2().path(String.format("/dataset/%s/acceleration/settings", path2.toPathString()))) + getBuilder(getAPIv2().path(String.format("/dataset/%s/acceleration/settings", DATASET_PATH.toPathString()))) .buildPut(Entity.entity(descriptor, JSON))); } @@ -296,7 +317,7 @@ public void testValidation() throws Exception { { expectStatus(Response.Status.BAD_REQUEST, getBuilder(getAPIv2().path(String.format("/dataset/%s/rename/", DATASET_PATH.toPathString())) - .queryParam("renameTo", DATASET_PATH.getLeaf().toString())) + .queryParam("renameTo", DATASET_PATH_2.getLeaf().toString())) .build("POST")); } diff --git a/dac/backend/src/test/java/com/dremio/dac/resource/TestNessieSourceApi.java b/dac/backend/src/test/java/com/dremio/dac/resource/TestNessieSourceApi.java new file mode 100644 index 0000000000..0ec2cc03e6 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/dac/resource/TestNessieSourceApi.java @@ -0,0 +1,292 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.resource; + +import static com.dremio.exec.ExecConstants.NESSIE_SOURCE_API; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.ALTERNATIVE_BUCKET_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.BUCKET_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DATAPLANE_PLUGIN_NAME; +import static com.dremio.exec.store.DataplanePluginOptions.DATAPLANE_PLUGIN_ENABLED; +import static com.dremio.exec.store.DataplanePluginOptions.NESSIE_PLUGIN_ENABLED; +import static com.dremio.options.OptionValue.OptionType.SYSTEM; +import static javax.ws.rs.core.Response.Status.NOT_FOUND; + +import java.io.File; +import java.io.IOException; +import java.io.Serializable; +import java.net.URI; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Arrays; +import java.util.Collections; + +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.api.extension.ParameterContext; +import org.junit.jupiter.api.extension.ParameterResolutionException; +import org.junit.jupiter.api.extension.ParameterResolver; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.junit.jupiter.api.io.TempDir; +import org.projectnessie.client.api.NessieApiV2; +import org.projectnessie.client.ext.NessieApiVersion; +import org.projectnessie.client.ext.NessieApiVersions; +import org.projectnessie.client.ext.NessieClientCustomizer; +import org.projectnessie.client.ext.NessieClientFactory; +import org.projectnessie.client.ext.NessieClientResolver; +import org.projectnessie.client.ext.NessieClientUri; +import org.projectnessie.client.http.HttpClientBuilder; +import org.projectnessie.error.NessieNotFoundException; +import org.projectnessie.jaxrs.tests.BaseTestNessieRest; +import org.projectnessie.tools.compatibility.api.NessieBaseUri; +import org.projectnessie.tools.compatibility.internal.OlderNessieServersExtension; + +import com.dremio.common.AutoCloseables; +import com.dremio.dac.server.BaseTestServerJunit5; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.CatalogServiceImpl; +import com.dremio.exec.catalog.conf.NessieAuthType; +import com.dremio.exec.catalog.conf.Property; +import com.dremio.exec.store.CatalogService; +import com.dremio.options.OptionValue; +import com.dremio.plugins.dataplane.store.DataplanePlugin; +import com.dremio.plugins.dataplane.store.NessiePluginConfig; +import com.dremio.plugins.s3.store.S3FileSystem; +import com.dremio.service.namespace.NamespaceService; +import com.dremio.service.namespace.source.proto.SourceConfig; +import com.dremio.service.users.SystemUser; +import com.google.common.base.Preconditions; + +import io.findify.s3mock.S3Mock; + +@ExtendWith(OlderNessieServersExtension.class) +public class TestNessieSourceApi extends BaseTestServerJunit5 { + + @TempDir + static File temporaryDirectory; + @NessieBaseUri + private static URI nessieUri; + private static int S3_PORT; + private static S3Mock s3Mock; + private static Path bucketPath; + private static NessieApiV2 nessieClient; + private static DataplanePlugin dataplanePlugin; + private static Catalog catalog; + private static NamespaceService namespaceService; + + private static String createNessieURIString() { + return nessieUri.toString() + "v2"; + } + + @BeforeAll + public static void setup() throws Exception { + setUpS3Mock(); + setUpNessie(); + setUpDataplanePlugin(); + } + + @AfterAll + public static void arcticCleanUp() throws Exception { + AutoCloseables.close( + dataplanePlugin, + nessieClient); + if (s3Mock != null) { + s3Mock.shutdown(); + s3Mock = null; + } + } + + protected static void setUpS3Mock() throws IOException { + bucketPath = Paths.get(temporaryDirectory.getAbsolutePath(), BUCKET_NAME); + Files.createDirectory(bucketPath); + Files.createDirectory(Paths.get(temporaryDirectory.getAbsolutePath(), ALTERNATIVE_BUCKET_NAME)); + + Preconditions.checkState(s3Mock == null); + s3Mock = + new S3Mock.Builder() + .withPort(0) + .withFileBackend(temporaryDirectory.getAbsolutePath()) + .build(); + S3_PORT = s3Mock.start().localAddress().getPort(); + } + + protected static void setUpNessie() { + nessieClient = + HttpClientBuilder.builder() + .withUri(createNessieURIString()) + .fromConfig(Collections.singletonMap("nessie.force-url-connection-client", "true")::get) + .build(NessieApiV2.class); + } + + protected static void setUpDataplanePlugin() { + getSabotContext() + .getOptionManager() + .setOption( + OptionValue.createBoolean(SYSTEM, DATAPLANE_PLUGIN_ENABLED.getOptionName(), true)); + getSabotContext() + .getOptionManager() + .setOption( + OptionValue.createBoolean(SYSTEM, NESSIE_PLUGIN_ENABLED.getOptionName(), true)); + getSabotContext() + .getOptionManager() + .setOption( + OptionValue.createBoolean(SYSTEM, NESSIE_SOURCE_API.getOptionName(), true)); + + CatalogServiceImpl catalogImpl = (CatalogServiceImpl) getSabotContext().getCatalogService(); + + SourceConfig sourceConfig = + new SourceConfig() + .setConnectionConf(prepareConnectionConf(BUCKET_NAME)) + .setName(DATAPLANE_PLUGIN_NAME) + .setMetadataPolicy(CatalogService.NEVER_REFRESH_POLICY_WITH_AUTO_PROMOTE); + catalogImpl.getSystemUserCatalog().createSource(sourceConfig); + dataplanePlugin = catalogImpl.getSystemUserCatalog().getSource(DATAPLANE_PLUGIN_NAME); + catalog = catalogImpl.getSystemUserCatalog(); + + namespaceService = getSabotContext().getNamespaceService(SystemUser.SYSTEM_USERNAME); + } + + private static NessiePluginConfig prepareConnectionConf(String bucket) { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + nessiePluginConfig.nessieEndpoint = createNessieURIString(); + nessiePluginConfig.nessieAuthType = NessieAuthType.NONE; + nessiePluginConfig.secure = false; + nessiePluginConfig.awsAccessKey = "foo"; // Unused, just needs to be set + nessiePluginConfig.awsAccessSecret = "bar"; // Unused, just needs to be set + nessiePluginConfig.awsRootPath = bucket; + + // S3Mock settings + nessiePluginConfig.propertyList = + Arrays.asList( + new Property("fs.s3a.endpoint", "localhost:" + S3_PORT), + new Property("fs.s3a.path.style.access", "true"), + new Property("fs.s3a.connection.ssl.enabled", "false"), + new Property(S3FileSystem.COMPATIBILITY_MODE, "true")); + + return nessiePluginConfig; + } + + @Test + public void testWrongAPI() { + expectStatus(NOT_FOUND, getBuilder(getNessieProxy().path(String.format("/source/%s/treez", DATAPLANE_PLUGIN_NAME))).buildGet()); + } + + @Test + public void testInvalidSource() { + expectStatus(NOT_FOUND, getBuilder(getNessieProxy().path(String.format("/v2/source/%s/trees", "invalidSource"))).buildGet()); + } + + @Nested + @NessieApiVersions(versions = NessieApiVersion.V2) + class NessieApiTest extends BaseTestNessieRest { + + @RegisterExtension + private NessieClientResolverImpl proxyResolver = new NessieClientResolverImpl(TestNessieSourceApi.this); + + @Override + @Disabled + // Disabled because NaaS proxy only proxies /trees endpoints, so /config is not available + public void config() throws NessieNotFoundException {} + + @Override + @Disabled + // Disabled because NaaS proxy only proxies /trees endpoints, so /config is not available + public void specVersion() {} + } + + private static class NessieClientResolverImpl extends NessieClientResolver + implements ParameterResolver { + + private static String nessieSourcePath = String.format("v2/source/%s", DATAPLANE_PLUGIN_NAME); + private TestNessieSourceApi base; + + public NessieClientResolverImpl(TestNessieSourceApi base) { + this.base = base; + } + + private boolean isNessieClient(ParameterContext parameterContext) { + return parameterContext.getParameter().getType().isAssignableFrom(NessieClientFactory.class); + } + + @Override + public boolean supportsParameter( + ParameterContext parameterContext, ExtensionContext extensionContext) + throws ParameterResolutionException { + return isNessieClient(parameterContext) || isNessieUri(parameterContext); + } + + @Override + protected URI getBaseUri(ExtensionContext extensionContext) { + return getNessieSourceUri(); + } + + private boolean isNessieUri(ParameterContext parameterContext) { + return parameterContext.isAnnotated(NessieClientUri.class); + } + + private URI getNessieSourceUri() { + return getNessieProxy().getUriBuilder().path(nessieSourcePath).build(); + } + + @Override + public Object resolveParameter( + ParameterContext parameterContext, ExtensionContext extensionContext) + throws ParameterResolutionException { + + if (isNessieUri(parameterContext)) { + return getNessieSourceUri(); + } + + if (isNessieClient(parameterContext)) { + return clientFactoryForThisNessieSource(); + } + + throw new IllegalStateException("Unsupported parameter: " + parameterContext); + } + + private NessieClientFactory clientFactoryForThisNessieSource() { + return new NessieClientResolverImpl.ClientFactory(getNessieSourceUri()); + } + + private static final class ClientFactory implements NessieClientFactory, Serializable { + + private URI nessieUri; + + private ClientFactory(URI nessieUri) { + this.nessieUri = nessieUri; + } + + @Override + public NessieApiVersion apiVersion() { + return NessieApiVersion.V2; + } + + @Override + public NessieApiV2 make(NessieClientCustomizer customizer) { + return HttpClientBuilder + .builder() + .withUri(nessieUri) + .withEnableApiCompatibilityCheck(false) // Nessie API proxy in the source does not have the /config endpoint + .build(NessieApiV2.class); + } + } + } +} diff --git a/dac/backend/src/test/java/com/dremio/dac/resource/TestNessieSourceResource.java b/dac/backend/src/test/java/com/dremio/dac/resource/TestNessieSourceResource.java new file mode 100644 index 0000000000..2289fb54f6 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/dac/resource/TestNessieSourceResource.java @@ -0,0 +1,90 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.resource; + +import static com.dremio.exec.ExecConstants.NESSIE_SOURCE_API; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.when; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.projectnessie.client.api.NessieApiV2; + +import com.dremio.common.exceptions.UserException; +import com.dremio.dac.service.errors.NessieSourceNotValidException; +import com.dremio.dac.service.errors.SourceNotFoundException; +import com.dremio.exec.store.CatalogService; +import com.dremio.options.OptionManager; +import com.dremio.plugins.dataplane.store.DataplanePlugin; +import com.dremio.plugins.s3.store.S3StoragePlugin; +import com.dremio.services.nessie.proxy.ProxyV2TreeResource; + +@ExtendWith(MockitoExtension.class) +public class TestNessieSourceResource { + + @Mock + private CatalogService catalogService; + + @InjectMocks + private NessieSourceResource nessieSourceResource; + + @Mock + private DataplanePlugin dataplanePlugin; + + @Mock + private NessieApiV2 nessieApiV2; + + @Mock + private S3StoragePlugin s3StoragePlugin; + + @Mock + private UserException userException; + + @Mock + private OptionManager optionManager; + + private final String sourceName = "MY_SOURCE"; + + @Test + public void testNessieSourceNotFound() { + when(catalogService.getSource(sourceName)).thenThrow(userException); + when(optionManager.getOption(NESSIE_SOURCE_API)).thenReturn(true); + assertThatThrownBy(()->nessieSourceResource.handle(sourceName)) + .isInstanceOf(SourceNotFoundException.class); + } + + + @Test + public void testNessieSourceSuccess() { + when(catalogService.getSource(sourceName)).thenReturn(dataplanePlugin); + when(dataplanePlugin.getNessieApi()).thenReturn(nessieApiV2); + when(optionManager.getOption(NESSIE_SOURCE_API)).thenReturn(true); + ProxyV2TreeResource expected = nessieSourceResource.handle(sourceName); + assertTrue(expected instanceof ProxyV2TreeResource); + } + + @Test + public void testNessieSourceNotAcceptable() { + when(catalogService.getSource(sourceName)).thenReturn(s3StoragePlugin); + when(optionManager.getOption(NESSIE_SOURCE_API)).thenReturn(true); + assertThatThrownBy(()->nessieSourceResource.handle(sourceName)) + .isInstanceOf(NessieSourceNotValidException.class); + } +} diff --git a/dac/backend/src/test/java/com/dremio/dac/resource/TestSQLResource.java b/dac/backend/src/test/java/com/dremio/dac/resource/TestSQLResource.java index 9a183a5faa..bcccf17460 100644 --- a/dac/backend/src/test/java/com/dremio/dac/resource/TestSQLResource.java +++ b/dac/backend/src/test/java/com/dremio/dac/resource/TestSQLResource.java @@ -16,12 +16,10 @@ package com.dremio.dac.resource; import static java.util.Arrays.asList; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.concurrent.TimeUnit; @@ -30,8 +28,6 @@ import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; -import org.apache.calcite.sql.parser.SqlParserUtil; -import org.apache.calcite.sql.parser.StringAndPos; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -41,19 +37,22 @@ import org.junit.rules.TemporaryFolder; import com.dremio.dac.api.Folder; -import com.dremio.dac.explore.model.AnalyzeRequest; import com.dremio.dac.explore.model.DatasetPath; -import com.dremio.dac.explore.model.SuggestionResponse; -import com.dremio.dac.explore.model.ValidationResponse; -import com.dremio.dac.model.job.QueryError; import com.dremio.dac.model.sources.SourceUI; import com.dremio.dac.model.sources.UIMetadataPolicy; -import com.dremio.dac.model.spaces.Space; import com.dremio.dac.server.BaseTestServer; +import com.dremio.dac.server.FamilyExpectation; +import com.dremio.dac.server.GenericErrorMessage; import com.dremio.dac.service.source.SourceService; import com.dremio.exec.store.CatalogService; import com.dremio.exec.store.dfs.NASConf; import com.dremio.service.autocomplete.AutocompleteRequestImplementation; +import com.dremio.service.autocomplete.AutocompleteV2Request; +import com.dremio.service.autocomplete.AutocompleteV2RequestType; +import com.dremio.service.autocomplete.ColumnSuggestions; +import com.dremio.service.autocomplete.ContainerSuggestions; +import com.dremio.service.autocomplete.SuggestionEntity; +import com.dremio.service.autocomplete.SuggestionsType; import com.dremio.service.namespace.NamespaceKey; import com.dremio.service.namespace.NamespaceService; import com.dremio.service.namespace.dataset.proto.DatasetConfig; @@ -61,13 +60,11 @@ import com.dremio.service.namespace.dataset.proto.PhysicalDataset; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; /** * Tests {@link com.dremio.dac.resource.SQLResource} API */ public class TestSQLResource extends BaseTestServer { - private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestSQLResource.class); private static final String SOURCE_NAME = "mysrc"; private static final long DEFAULT_REFRESH_PERIOD = TimeUnit.HOURS.toMillis(4); private static final long DEFAULT_GRACE_PERIOD = TimeUnit.HOURS.toMillis(12); @@ -114,12 +111,14 @@ public void setup() throws Exception { addPhysicalDataset(DATASET_PATH_TWO, DatasetType.PHYSICAL_DATASET); addPhysicalDataset(DATASET_PATH_THREE, DatasetType.PHYSICAL_DATASET); - expectSuccess(getBuilder(getAPIv2().path("space/testSpace")).buildPut(Entity.json(new Space(null, "testSpace", null, null, null, 0, null))), Space.class); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "testSpace", null, null, null))), new GenericType() {}); + DatasetPath d1Path = new DatasetPath("testSpace.supplier"); createDatasetFromSQLAndSave(d1Path, "select s_name, s_phone from cp.\"tpch/supplier.parquet\"", asList("cp")); Folder newFolder = new Folder(null, Arrays.asList("testSpace", "myFolder"), null, null); - expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(newFolder)), new GenericType() {}); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(newFolder)), new GenericType() { + }); DatasetPath d2Path = new DatasetPath("testSpace.myFolder.supplier"); createDatasetFromSQLAndSave(d2Path, "select s_name, s_phone from cp.\"tpch/supplier.parquet\"", asList("cp")); @@ -130,519 +129,391 @@ public void clear() throws Exception { clearAllDataExceptUser(); } - /** - * Logs the returned suggestions or validation errors - * - * @param advisorResponse The SuggestionResponse or ValidationResponse containing suggestions or validation errors. - */ - private void logAdvisorResponse(T advisorResponse) { - if (advisorResponse == null || !logger.isTraceEnabled()) { - return; - } - - StringBuilder sb = new StringBuilder(); - sb.append("SQLAnalyzer response:\n"); - sb.append(advisorResponse.toString()); - logger.trace(sb.toString()); + @Test + public void testBasicAutocomplete() throws Exception { + String query = "SELECT "; + String autocompleteResponse = testAutocomplete(query, query.length(), new ArrayList<>()); + Assert.assertNotNull(autocompleteResponse); } @Test - public void testSpaceFullSchemaCompletion() throws Exception { - final String partialQuery = "SELECT * from ^"; - final StringAndPos stringAndPos = SqlParserUtil.findPos(partialQuery); - final SuggestionResponse returnedSuggestions = testSuggestSQL(stringAndPos.sql, stringAndPos.cursor, asList("mysrc")); - - logAdvisorResponse(returnedSuggestions); - assertNotNull(returnedSuggestions); - assertNotNull(returnedSuggestions.getSuggestions()); - assertEquals(23, returnedSuggestions.getSuggestions().size()); + public void testCatalogEntryCompletionWithContext() { + String query = "SELECT * FROM "; + String autocompleteResponse = testAutocomplete(query, query.length(), ImmutableList.of("testSpace")); + Assert.assertNotNull(autocompleteResponse); + Assert.assertTrue(autocompleteResponse.contains("CatalogEntry")); + Assert.assertTrue(autocompleteResponse.contains("supplier")); } @Test - public void testSpaceVDSPartialSchemaCompletion() throws Exception { - final String partialQuery = "SELECT * from t^"; - final StringAndPos stringAndPos = SqlParserUtil.findPos(partialQuery); - final SuggestionResponse returnedSuggestions = testSuggestSQL(stringAndPos.sql, stringAndPos.cursor, asList("mysrc")); - - ArrayList expectedTables = Lists.newArrayList(); - expectedTables.addAll( - asList( - "INFORMATION_SCHEMA.\"TABLES\"", - "cp.\"tpch/supplier.parquet\"", - "testSpace.supplier")); - - logAdvisorResponse(returnedSuggestions); - assertNotNull(returnedSuggestions); - assertNotNull(returnedSuggestions.getSuggestions()); - assertEquals(7, returnedSuggestions.getSuggestions().size()); - for (int i = 0; i < 4; i++) { - SuggestionResponse.Suggestion suggestion = returnedSuggestions.getSuggestions().get(i); - if (suggestion.getType().equals("TABLE")) { - assertTrue(expectedTables.contains(suggestion.getName())); - } else if (suggestion.getType().equals("SCHEMA")) { - assertEquals("testSpace", suggestion.getName()); - } else if (suggestion.getType().equals("KEYWORD")) { - assertEquals("TABLE", suggestion.getName()); - } - } + public void testColumnCompletion() { + String query = "SELECT FROM testSpace.supplier"; + String autocompleteResponse = testAutocomplete(query, 7, new ArrayList<>()); + Assert.assertNotNull(autocompleteResponse); + Assert.assertTrue(autocompleteResponse.contains("Column")); } @Test - public void spacePDSPartialSchemaCompletion() throws Exception { - final String partialQuery = "SELECT * from c^"; - final StringAndPos stringAndPos = SqlParserUtil.findPos(partialQuery); - final SuggestionResponse returnedSuggestions = testSuggestSQL(stringAndPos.sql, stringAndPos.cursor, asList("mysrc")); - - ArrayList expectedTables = Lists.newArrayList(); - expectedTables.addAll( - asList( - "INFORMATION_SCHEMA.CATALOGS", - "INFORMATION_SCHEMA.COLUMNS", - "cp.\"tpch/supplier.parquet\"" - )); - - logAdvisorResponse(returnedSuggestions); - assertNotNull(returnedSuggestions); - assertNotNull(returnedSuggestions.getSuggestions()); - assertEquals((expectedTables.size() + 1), returnedSuggestions.getSuggestions().size()); - for (int i = 0; i < (expectedTables.size() + 1); i++) { - SuggestionResponse.Suggestion suggestion = returnedSuggestions.getSuggestions().get(i); - if (suggestion.getType().equals("TABLE")) { - assertTrue(expectedTables.contains(suggestion.getName())); - } else if (suggestion.getType().equals("SCHEMA")) { - assertEquals("cp", suggestion.getName()); - } - } + @Ignore + public void testColumnCompletionWithNessie() { + String query = "SELECT FROM testSpace.supplier AT BRANCH branchA"; + String autocompleteResponse = testAutocomplete(query, 7, new ArrayList<>()); + Assert.assertNotNull(autocompleteResponse); + Assert.assertTrue(autocompleteResponse.contains("Column")); } @Test - public void testSpaceVDSFullDatasetCompletionNoPeriod() throws Exception { - final String partialQuery = "SELECT * from testSpace^"; - final StringAndPos stringAndPos = SqlParserUtil.findPos(partialQuery); - final SuggestionResponse returnedSuggestions = testSuggestSQL(stringAndPos.sql, stringAndPos.cursor, asList("mysrc")); - - logAdvisorResponse(returnedSuggestions); - assertNotNull(returnedSuggestions); - assertNotNull(returnedSuggestions.getSuggestions()); - assertEquals(4, returnedSuggestions.getSuggestions().size()); + public void testColumnCompletionWithContext() throws Exception { + String query = "SELECT FROM supplier"; + String autocompleteResponse = testAutocomplete(query, 7, ImmutableList.of("testSpace", "myFolder")); + Assert.assertNotNull(autocompleteResponse); + Assert.assertTrue(autocompleteResponse.contains("Column")); + Assert.assertTrue(autocompleteResponse.contains("supplier")); + Assert.assertTrue(autocompleteResponse.contains("s_name")); } @Test - public void testPDSFullDatasetCompletionNoPeriod() throws Exception { - final String partialQuery = "SELECT * from cp^"; - final StringAndPos stringAndPos = SqlParserUtil.findPos(partialQuery); - final SuggestionResponse returnedSuggestions = testSuggestSQL(stringAndPos.sql, stringAndPos.cursor, asList("mysrc")); - - logAdvisorResponse(returnedSuggestions); - assertNotNull(returnedSuggestions); - assertNotNull(returnedSuggestions.getSuggestions()); - assertEquals(2, returnedSuggestions.getSuggestions().size()); - for (int i = 0; i < 2; i++) { - SuggestionResponse.Suggestion suggestion = returnedSuggestions.getSuggestions().get(i); - if (suggestion.getType().equals("TABLE")) { - assertEquals("cp.\"tpch/supplier.parquet\"", suggestion.getName()); - } else if (suggestion.getType().equals("SCHEMA")) { - assertEquals("cp", suggestion.getName()); - } - } + public void testFunctionCompletion() throws Exception { + String query = "SELECT * FROM testSpace.supplier WHERE AB"; + String autocompleteResponse = testAutocomplete(query, query.length(), new ArrayList<>()); + Assert.assertNotNull(autocompleteResponse); + Assert.assertTrue(autocompleteResponse.contains("Function")); } @Test - public void testSpaceVDSFullDatasetCompletionWPeriod() throws Exception { - final String partialQuery = "SELECT * from testSpace.^"; - final StringAndPos stringAndPos = SqlParserUtil.findPos(partialQuery); - final SuggestionResponse returnedSuggestions = testSuggestSQL(stringAndPos.sql, stringAndPos.cursor, asList("mysrc")); - - logAdvisorResponse(returnedSuggestions); - assertNotNull(returnedSuggestions); - assertNotNull(returnedSuggestions.getSuggestions()); - assertEquals(1, returnedSuggestions.getSuggestions().size()); - SuggestionResponse.Suggestion suggestion = returnedSuggestions.getSuggestions().get(0); - assertEquals("TABLE", suggestion.getType()); - assertEquals("testSpace.supplier", suggestion.getName()); + public void testFunctionCompletion2() throws Exception { + String query = "SELECT * FROM testSpace.supplier WHERE ASCII("; + String autocompleteResponse = testAutocomplete(query, query.length(), new ArrayList<>()); + Assert.assertNotNull(autocompleteResponse); + Assert.assertTrue(autocompleteResponse.contains("Column")); } @Test - public void testPDSFullDatasetCompletionWPeriod() throws Exception { - final String partialQuery = "SELECT * from cp.^"; - final StringAndPos stringAndPos = SqlParserUtil.findPos(partialQuery); - final SuggestionResponse returnedSuggestions = testSuggestSQL(stringAndPos.sql, stringAndPos.cursor, asList("mysrc")); - - logAdvisorResponse(returnedSuggestions); - assertNotNull(returnedSuggestions); - assertNotNull(returnedSuggestions.getSuggestions()); - assertEquals(1, returnedSuggestions.getSuggestions().size()); - SuggestionResponse.Suggestion suggestion = returnedSuggestions.getSuggestions().get(0); - assertEquals("TABLE", suggestion.getType()); - assertEquals("cp.\"tpch/supplier.parquet\"", suggestion.getName()); + public void testFolderRegression() throws Exception { + String query = "SELECT * FROM testSpace."; + String autocompleteResponse = testAutocomplete(query, query.length(), new ArrayList<>()); + Assert.assertNotNull(autocompleteResponse); + Assert.assertTrue(autocompleteResponse.contains("CatalogEntry")); + Assert.assertTrue(autocompleteResponse.contains("myFolder")); + Assert.assertTrue(autocompleteResponse.contains("Folder")); } @Test - public void testSpaceVDSPartialDatasetCompletion() throws Exception { - final String partialQuery = "SELECT * from testSpace.sup^"; - final StringAndPos stringAndPos = SqlParserUtil.findPos(partialQuery); - final SuggestionResponse returnedSuggestions = testSuggestSQL(stringAndPos.sql, stringAndPos.cursor, asList("mysrc")); - - logAdvisorResponse(returnedSuggestions); - assertNotNull(returnedSuggestions); - assertNotNull(returnedSuggestions.getSuggestions()); - assertEquals(1, returnedSuggestions.getSuggestions().size()); - SuggestionResponse.Suggestion suggestion = returnedSuggestions.getSuggestions().get(0); - assertEquals("TABLE", suggestion.getType()); - assertEquals("testSpace.supplier", suggestion.getName()); + public void testNullCatalogEntityKeys() throws Exception { + final String prefix = ""; + final AutocompleteV2RequestType type = AutocompleteV2RequestType.CONTAINER; + final List> catalogEntityKeys = null; + final List queryContext = Collections.emptyList(); + testAutocompleteV2Error(prefix, type, catalogEntityKeys, queryContext); } @Test - public void testSpacePDSPartialDatasetCompletion() throws Exception { - final String partialQuery = "SELECT * from cp.t^"; - final StringAndPos stringAndPos = SqlParserUtil.findPos(partialQuery); - final SuggestionResponse returnedSuggestions = testSuggestSQL(stringAndPos.sql, stringAndPos.cursor, asList("mysrc")); - - logAdvisorResponse(returnedSuggestions); - assertNotNull(returnedSuggestions); - assertNotNull(returnedSuggestions.getSuggestions()); - assertEquals(1, returnedSuggestions.getSuggestions().size()); - SuggestionResponse.Suggestion suggestion = returnedSuggestions.getSuggestions().get(0); - assertEquals("TABLE", suggestion.getType()); - assertEquals("cp.\"tpch/supplier.parquet\"", suggestion.getName()); + public void testCatalogEntityKeysForContainerTypeIsSize2() throws Exception { + final String prefix = ""; + final AutocompleteV2RequestType type = AutocompleteV2RequestType.CONTAINER; + final List> catalogEntityKeys = Arrays.asList(Collections.EMPTY_LIST, Arrays.asList("@dremio", "mySpace")); + final List queryContext = Collections.emptyList(); + testAutocompleteV2Error(prefix, type, catalogEntityKeys, queryContext); } - @Test - public void testPartialColumnCompletionWithAlias() throws Exception { - final String partialQuery = "SELECT t1.s^ from testSpace.supplier t1"; - final StringAndPos stringAndPos = SqlParserUtil.findPos(partialQuery); - final SuggestionResponse returnedSuggestions = testSuggestSQL(stringAndPos.sql, stringAndPos.cursor, asList("mysrc")); - - ArrayList expectedColumns = Lists.newArrayList(); - expectedColumns.addAll( - asList( - "s_name", - "s_phone")); - - logAdvisorResponse(returnedSuggestions); - assertNotNull(returnedSuggestions); - assertNotNull(returnedSuggestions.getSuggestions()); - assertEquals(2, returnedSuggestions.getSuggestions().size()); - for (int i = 0; i < 2; i++) { - SuggestionResponse.Suggestion suggestion = returnedSuggestions.getSuggestions().get(i); - if (suggestion.getType().equals("COLUMN")) { - assertTrue(expectedColumns.contains(suggestion.getName())); - } - } + public void testCatalogEntityKeysForColumnTypeHasEmpty() throws Exception { + final String prefix = ""; + final AutocompleteV2RequestType type = AutocompleteV2RequestType.COLUMN; + final List> catalogEntityKeys = Arrays.asList(Collections.EMPTY_LIST); + final List queryContext = Collections.emptyList(); + testAutocompleteV2Error(prefix, type, catalogEntityKeys, queryContext); } @Test - public void testSQLAnalyzeSuggestInfoSchema() throws Exception { - final String partialQuery = "select * from i^"; - final StringAndPos stringAndPos = SqlParserUtil.findPos(partialQuery); - final SuggestionResponse returnedSuggestions = testSuggestSQL(stringAndPos.sql, stringAndPos.cursor, asList("@dremio")); - - ArrayList expectedTables = Lists.newArrayList(); - expectedTables.addAll( - asList( - "INFORMATION_SCHEMA.CATALOGS", - "INFORMATION_SCHEMA.COLUMNS", - "INFORMATION_SCHEMA.SCHEMATA", - "INFORMATION_SCHEMA.\"TABLES\"", - "INFORMATION_SCHEMA.VIEWS")); - - logAdvisorResponse(returnedSuggestions); - assertNotNull(returnedSuggestions); - assertNotNull(returnedSuggestions.getSuggestions()); - assertEquals(6, returnedSuggestions.getSuggestions().size()); - - for (SuggestionResponse.Suggestion suggestion : returnedSuggestions.getSuggestions()) { - if (suggestion.getType().equals("TABLE")) { - assertTrue(expectedTables.contains(suggestion.getName())); - } else if (suggestion.getType().equals("SCHEMA")) { - assertEquals("INFORMATION_SCHEMA", suggestion.getName()); - } - } + public void testCatalogEntityKeysForBranchTypeHasEmpty() throws Exception { + final String prefix = "foo"; + final AutocompleteV2RequestType type = AutocompleteV2RequestType.REFERENCE; + final List> catalogEntityKeys = Arrays.asList(Collections.EMPTY_LIST); + final List queryContext = Collections.emptyList(); + testAutocompleteV2Error(prefix, type, catalogEntityKeys, queryContext); } @Test - public void testSuggestFromPartialSchema() throws Exception { - final String partialQuery = "Select * from m^"; - final StringAndPos stringAndPos = SqlParserUtil.findPos(partialQuery); - final SuggestionResponse returnedSuggestions = testSuggestSQL(stringAndPos.sql, stringAndPos.cursor, asList("mysrc")); - - ArrayList expectedTables = Lists.newArrayList(); - expectedTables.addAll( - asList( - "mysrc.ds1", - "mysrc.ds2", - "mysrc.ds3")); - - logAdvisorResponse(returnedSuggestions); - assertNotNull(returnedSuggestions); - assertNotNull(returnedSuggestions.getSuggestions()); - assertEquals((expectedTables.size() + 1), returnedSuggestions.getSuggestions().size()); - for (int i = 0; i < (expectedTables.size() + 1); i++) { - SuggestionResponse.Suggestion suggestion = returnedSuggestions.getSuggestions().get(i); - if (suggestion.getType().equals("TABLE")) { - assertTrue(expectedTables.contains(suggestion.getName())); - } else if (suggestion.getType().equals("SCHEMA")) { - assertEquals("mysrc", suggestion.getName()); - } - } + public void testTopLevelContainersWithoutPrefix() { + final String prefix = ""; + final AutocompleteV2RequestType type = AutocompleteV2RequestType.CONTAINER; + final List> catalogEntityKeys = Arrays.asList(Collections.EMPTY_LIST); + final List queryContext = Arrays.asList("testSpace"); + final List expected = Arrays.asList( + new SuggestionEntity("[@dremio]","home"), + new SuggestionEntity("[testSpace]", "space"), + new SuggestionEntity("[cp]","source"), + new SuggestionEntity("[mysrc]", "source"), + new SuggestionEntity("[INFORMATION_SCHEMA]","source"), + new SuggestionEntity("[sys]","source"), + new SuggestionEntity("[testSpace, myFolder]","folder"), + new SuggestionEntity("[testSpace, supplier]","virtual")); + ContainerSuggestions containerSuggestions = testAutocompleteV2Success(prefix, type, catalogEntityKeys, queryContext, ContainerSuggestions.class); + Assert.assertNotNull(containerSuggestions); + Assert.assertTrue(containerSuggestions.getSuggestionsType().equals(SuggestionsType.CONTAINER.getType())); + Assert.assertArrayEquals(expected.toArray(), containerSuggestions.getContainers().toArray()); } @Test - public void testSuggestFromFullSchema() throws Exception { - final String partialQuery = "Select * from mysrc^"; - final StringAndPos stringAndPos = SqlParserUtil.findPos(partialQuery); - final SuggestionResponse returnedSuggestions = testSuggestSQL(stringAndPos.sql, stringAndPos.cursor, asList("mysrc")); - - logAdvisorResponse(returnedSuggestions); - assertNotNull(returnedSuggestions); - assertNotNull(returnedSuggestions.getSuggestions()); - assertEquals(4, returnedSuggestions.getSuggestions().size()); - for (int i = 0; i < 4; i++) { - SuggestionResponse.Suggestion suggestion = returnedSuggestions.getSuggestions().get(i); - if (suggestion.getType().equals("TABLE")) { - assertEquals(String.format("mysrc.ds%d", i + 1), suggestion.getName()); - } else if (suggestion.getType().equals("SCHEMA")) { - assertEquals("mysrc", suggestion.getName()); - } - } + public void testTopLevelContainersWithPrefix() { + final String prefix = "s"; + final AutocompleteV2RequestType type = AutocompleteV2RequestType.CONTAINER; + final List> catalogEntityKeys = Arrays.asList(Collections.EMPTY_LIST); + final List queryContext = Arrays.asList("testSpace"); + final List expected = Arrays.asList( + new SuggestionEntity("[sys]","source"), + new SuggestionEntity("[testSpace, supplier]","virtual")); + ContainerSuggestions containerSuggestions = testAutocompleteV2Success(prefix, type, catalogEntityKeys, queryContext, ContainerSuggestions.class); + Assert.assertNotNull(containerSuggestions); + Assert.assertTrue(containerSuggestions.getSuggestionsType().equals(SuggestionsType.CONTAINER.getType())); + Assert.assertArrayEquals(expected.toArray(), containerSuggestions.getContainers().toArray()); } @Test - public void testSuggestFromSchemaSeparator() throws Exception { - final String partialQuery = "Select * from mysrc.^"; - final StringAndPos stringAndPos = SqlParserUtil.findPos(partialQuery); - final SuggestionResponse returnedSuggestions = testSuggestSQL(stringAndPos.sql, stringAndPos.cursor, new ArrayList()); - - logAdvisorResponse(returnedSuggestions); - assertNotNull(returnedSuggestions); - assertNotNull(returnedSuggestions.getSuggestions()); - assertEquals(3, returnedSuggestions.getSuggestions().size()); - for (int i = 0; i < 3; i++) { - SuggestionResponse.Suggestion suggestion = returnedSuggestions.getSuggestions().get(i); - assertEquals("TABLE", suggestion.getType()); - assertEquals(String.format("mysrc.ds%d", i + 1), suggestion.getName()); - } + public void testTopLevelContainersWithPrefixMatchingQueryContext() { + final String prefix = "test"; + final AutocompleteV2RequestType type = AutocompleteV2RequestType.CONTAINER; + final List> catalogEntityKeys = Arrays.asList(Collections.EMPTY_LIST); + final List queryContext = Arrays.asList("testSpace"); + final List expected = Arrays.asList( + new SuggestionEntity("[testSpace]", "space")); + ContainerSuggestions containerSuggestions = testAutocompleteV2Success(prefix, type, catalogEntityKeys, queryContext, ContainerSuggestions.class); + Assert.assertNotNull(containerSuggestions); + Assert.assertTrue(containerSuggestions.getSuggestionsType().equals(SuggestionsType.CONTAINER.getType())); + Assert.assertArrayEquals(expected.toArray(), containerSuggestions.getContainers().toArray()); } @Test - public void testSuggestFromPartialDataset() throws Exception { - final String partialQuery = "Select * from mysrc.d^"; - final StringAndPos stringAndPos = SqlParserUtil.findPos(partialQuery); - final SuggestionResponse returnedSuggestions = testSuggestSQL(stringAndPos.sql, stringAndPos.cursor, asList("mysrc")); - - logAdvisorResponse(returnedSuggestions); - assertNotNull(returnedSuggestions); - assertNotNull(returnedSuggestions.getSuggestions()); - assertEquals(3, returnedSuggestions.getSuggestions().size()); - for (int i = 0; i < 3; i++) { - SuggestionResponse.Suggestion suggestion = returnedSuggestions.getSuggestions().get(i); - assertEquals("TABLE", suggestion.getType()); - assertEquals(String.format("mysrc.ds%d", i + 1), suggestion.getName()); - } - } - - @Test // Could improve to suggest Dremio specific keywords - public void testSuggestSelectList() throws Exception { - final String partialQuery = "Select ^ from mysrc.ds1"; - final StringAndPos stringAndPos = SqlParserUtil.findPos(partialQuery); - final SuggestionResponse returnedSuggestions = testSuggestSQL(stringAndPos.sql, stringAndPos.cursor, asList("mysrc")); - - logAdvisorResponse(returnedSuggestions); - assertNotNull(returnedSuggestions); - assertNotNull(returnedSuggestions.getSuggestions()); - for (int i = 0; i < returnedSuggestions.getSuggestions().size(); i++) { - assertEquals("KEYWORD", returnedSuggestions.getSuggestions().get(i).getType()); - } + public void testTopLevelContainersWithPrefixHappenToBeQueryContext() { + final String prefix = "testSpace"; + final AutocompleteV2RequestType type = AutocompleteV2RequestType.CONTAINER; + final List> catalogEntityKeys = Arrays.asList(Collections.EMPTY_LIST); + final List queryContext = Arrays.asList("testSpace"); + final List expected = Arrays.asList( + new SuggestionEntity("[testSpace]", "space")); + ContainerSuggestions containerSuggestions = testAutocompleteV2Success(prefix, type, catalogEntityKeys, queryContext, ContainerSuggestions.class); + Assert.assertNotNull(containerSuggestions); + Assert.assertTrue(containerSuggestions.getSuggestionsType().equals(SuggestionsType.CONTAINER.getType())); + Assert.assertArrayEquals(expected.toArray(), containerSuggestions.getContainers().toArray()); } @Test - public void testSuggestColumn() throws Exception { - final String partialQuery = "SELECT t.^ FROM testSpace.supplier t"; - final StringAndPos stringAndPos = SqlParserUtil.findPos(partialQuery); - final SuggestionResponse returnedSuggestions = testSuggestSQL(stringAndPos.sql, stringAndPos.cursor, asList("cp")); - - ArrayList expectedColumns = Lists.newArrayList(); - expectedColumns.addAll( - asList( - "s_name", - "s_phone")); - - logAdvisorResponse(returnedSuggestions); - assertNotNull(returnedSuggestions); - assertNotNull(returnedSuggestions.getSuggestions()); - assertEquals(3, returnedSuggestions.getSuggestions().size()); - for (SuggestionResponse.Suggestion suggestion : returnedSuggestions.getSuggestions()) { - if (suggestion.getType().equals("COLUMN")) { - assertTrue(expectedColumns.contains(suggestion.getName())); - } else if (suggestion.getType().equals("KEYWORD")) { - assertEquals("*", suggestion.getName()); - } - } - } - - @Test // Suggestions for partial require update to Calcite - public void testSuggestColumnPartial() throws Exception { - final String partialQuery = "SELECT t.s^ FROM testSpace.supplier t"; - final StringAndPos stringAndPos = SqlParserUtil.findPos(partialQuery); - final SuggestionResponse returnedSuggestions = testSuggestSQL(stringAndPos.sql, stringAndPos.cursor, asList("cp")); - - ArrayList expectedColumns = Lists.newArrayList(); - expectedColumns.addAll( - asList( - "s_name", - "s_phone")); - - logAdvisorResponse(returnedSuggestions); - assertNotNull(returnedSuggestions); - assertNotNull(returnedSuggestions.getSuggestions()); - assertEquals(2, returnedSuggestions.getSuggestions().size()); - for (SuggestionResponse.Suggestion suggestion : returnedSuggestions.getSuggestions()) { - assertTrue(expectedColumns.contains(suggestion.getName())); - } - } - - @Test // Range can be improved - public void testErrorUnrecognizedTable() throws Exception { - final String partialQuery = "Select * from m^"; - final StringAndPos stringAndPos = SqlParserUtil.findPos(partialQuery); - final ValidationResponse returnedSuggestions = testValidateSQL(stringAndPos.sql, stringAndPos.cursor, asList("@dremio")); - - logAdvisorResponse(returnedSuggestions); - assertNotNull(returnedSuggestions); - assertNotNull(returnedSuggestions.getErrors()); - QueryError firstError = returnedSuggestions.getErrors().get(0); - assertContains("'M' not found", firstError.getMessage()); - assertEquals(new QueryError.Range(1,15,2,16), firstError.getRange()); - } - - @Test // Error message identical to current. (unrecognized * intead of missing keyword FROM) Can be improved. - public void testErrorIncompleteFrom() throws Exception { - final String partialQuery = "Select * fro^"; - final StringAndPos stringAndPos = SqlParserUtil.findPos(partialQuery); - final ValidationResponse returnedSuggestions = testValidateSQL(stringAndPos.sql, stringAndPos.cursor, asList("@dremio")); - - logAdvisorResponse(returnedSuggestions); - assertNotNull(returnedSuggestions); - assertNotNull(returnedSuggestions.getErrors()); - QueryError firstError = returnedSuggestions.getErrors().get(0); - assertEquals("Unknown identifier '*'", firstError.getMessage()); - assertEquals(new QueryError.Range(1,8,2,9), firstError.getRange()); - } - - @Test // Current error-handling wraps this error in a generic parse error. - public void testErrorIncompleteSelect() throws Exception { - final String partialQuery = "Sel^"; - final StringAndPos stringAndPos = SqlParserUtil.findPos(partialQuery); - final ValidationResponse returnedSuggestions = testValidateSQL(stringAndPos.sql, stringAndPos.cursor, asList("@dremio")); - - logAdvisorResponse(returnedSuggestions); - assertNotNull(returnedSuggestions); - assertNotNull(returnedSuggestions.getErrors()); - QueryError firstError = returnedSuggestions.getErrors().get(0); - assertEquals("Non-query expression encountered in illegal context", firstError.getMessage()); - assertEquals(new QueryError.Range(1,1,2,4), firstError.getRange()); + public void testTopLevelContainersWithPrefixIgnoredCase() { + final String prefix = "info"; + final AutocompleteV2RequestType type = AutocompleteV2RequestType.CONTAINER; + final List> catalogEntityKeys = Arrays.asList(Collections.EMPTY_LIST); + final List queryContext = Arrays.asList("testSpace"); + final List expected = Arrays.asList( + new SuggestionEntity("[INFORMATION_SCHEMA]","source")); + ContainerSuggestions containerSuggestions = testAutocompleteV2Success(prefix, type, catalogEntityKeys, queryContext, ContainerSuggestions.class); + Assert.assertNotNull(containerSuggestions); + Assert.assertTrue(containerSuggestions.getSuggestionsType().equals(SuggestionsType.CONTAINER.getType())); + Assert.assertArrayEquals(expected.toArray(), containerSuggestions.getContainers().toArray()); } @Test - public void testErrorUnrecognizedColumn() throws Exception { - final String partialQuery = "SELECT testCol^ FROM testSpace.supplier"; - final StringAndPos stringAndPos = SqlParserUtil.findPos(partialQuery); - final ValidationResponse returnedSuggestions = testValidateSQL(stringAndPos.sql, stringAndPos.cursor, asList("cp")); + public void testTopLevelContainersWithoutMatch() { + final String prefix = "dremio"; + final AutocompleteV2RequestType type = AutocompleteV2RequestType.CONTAINER; + final List> catalogEntityKeys = Arrays.asList(Collections.EMPTY_LIST); + final List queryContext = Arrays.asList("testSpace"); + final List expected = Collections.EMPTY_LIST; + ContainerSuggestions containerSuggestions = testAutocompleteV2Success(prefix, type, catalogEntityKeys, queryContext, ContainerSuggestions.class); + Assert.assertNotNull(containerSuggestions); + Assert.assertTrue(containerSuggestions.getSuggestionsType().equals(SuggestionsType.CONTAINER.getType())); + Assert.assertArrayEquals(expected.toArray(), containerSuggestions.getContainers().toArray()); + } - logAdvisorResponse(returnedSuggestions); - assertNotNull(returnedSuggestions); - assertNotNull(returnedSuggestions.getErrors()); - QueryError firstError = returnedSuggestions.getErrors().get(0); - assertEquals("Column 'TESTCOL' not found in any table", firstError.getMessage()); - assertEquals(new QueryError.Range(1,8,2,15), firstError.getRange()); + /** + * Home space @dremio is empty + */ + @Test + public void testContainersInHomeWithoutPrefix() { + final String prefix = ""; + final AutocompleteV2RequestType type = AutocompleteV2RequestType.CONTAINER; + final List> catalogEntityKeys = Arrays.asList(Arrays.asList("@dremio")); + final List queryContext = Collections.EMPTY_LIST; + final List expected = Collections.EMPTY_LIST; + ContainerSuggestions containerSuggestions = testAutocompleteV2Success(prefix, type, catalogEntityKeys, queryContext, ContainerSuggestions.class); + Assert.assertNotNull(containerSuggestions); + Assert.assertTrue(containerSuggestions.getSuggestionsType().equals(SuggestionsType.CONTAINER.getType())); + Assert.assertArrayEquals(expected.toArray(), containerSuggestions.getContainers().toArray()); } @Test - public void testBasicAutocomplete() throws Exception { - String query = "SELECT "; - String autocompleteResponse = testAutocomplete(query, query.length(), new ArrayList<>()); - Assert.assertNotNull(autocompleteResponse); + public void testContainersInHomeWithPrefix() { + final String prefix = "no-match"; + final AutocompleteV2RequestType type = AutocompleteV2RequestType.CONTAINER; + final List> catalogEntityKeys = Arrays.asList(Arrays.asList("@dremio")); + final List queryContext = Collections.EMPTY_LIST; + final List expected = Collections.EMPTY_LIST; + ContainerSuggestions containerSuggestions = testAutocompleteV2Success(prefix, type, catalogEntityKeys, queryContext, ContainerSuggestions.class); + Assert.assertNotNull(containerSuggestions); + Assert.assertTrue(containerSuggestions.getSuggestionsType().equals(SuggestionsType.CONTAINER.getType())); + Assert.assertArrayEquals(expected.toArray(), containerSuggestions.getContainers().toArray()); } @Test - public void testCatalogEntryCompletionWithContext() { - String query = "SELECT * FROM "; - String autocompleteResponse = testAutocomplete(query, query.length(), ImmutableList.of("testSpace")); - Assert.assertNotNull(autocompleteResponse); - Assert.assertTrue(autocompleteResponse.contains("CatalogEntry")); - Assert.assertTrue(autocompleteResponse.contains("supplier")); + public void testContainersInSpaceWithoutPrefix() { + final String prefix = ""; + final AutocompleteV2RequestType type = AutocompleteV2RequestType.CONTAINER; + final List> catalogEntityKeys = Arrays.asList(Arrays.asList("testSpace")); + final List queryContext = Collections.EMPTY_LIST; + final List expected = Arrays.asList( + new SuggestionEntity("[testSpace, myFolder]","folder"), + new SuggestionEntity("[testSpace, supplier]","virtual")); + ContainerSuggestions containerSuggestions = testAutocompleteV2Success(prefix, type, catalogEntityKeys, queryContext, ContainerSuggestions.class); + Assert.assertNotNull(containerSuggestions); + Assert.assertTrue(containerSuggestions.getSuggestionsType().equals(SuggestionsType.CONTAINER.getType())); + Assert.assertArrayEquals(expected.toArray(), containerSuggestions.getContainers().toArray()); } @Test - public void testColumnCompletion() { - String query = "SELECT FROM testSpace.supplier"; - String autocompleteResponse = testAutocomplete(query, 7, new ArrayList<>()); - Assert.assertNotNull(autocompleteResponse); - Assert.assertTrue(autocompleteResponse.contains("Column")); + public void testContainersInSpaceWithPrefix() { + final String prefix = "My"; + final AutocompleteV2RequestType type = AutocompleteV2RequestType.CONTAINER; + final List> catalogEntityKeys = Arrays.asList(Arrays.asList("testSpace")); + final List queryContext = Collections.EMPTY_LIST; + final List expected = Arrays.asList( + new SuggestionEntity("[testSpace, myFolder]","folder")); + ContainerSuggestions containerSuggestions = testAutocompleteV2Success(prefix, type, catalogEntityKeys, queryContext, ContainerSuggestions.class); + Assert.assertNotNull(containerSuggestions); + Assert.assertTrue(containerSuggestions.getSuggestionsType().equals(SuggestionsType.CONTAINER.getType())); + Assert.assertArrayEquals(expected.toArray(), containerSuggestions.getContainers().toArray()); } @Test - @Ignore - public void testColumnCompletionWithNessie() { - String query = "SELECT FROM testSpace.supplier AT BRANCH branchA"; - String autocompleteResponse = testAutocomplete(query, 7, new ArrayList<>()); - Assert.assertNotNull(autocompleteResponse); - Assert.assertTrue(autocompleteResponse.contains("Column")); + public void testContainersInSpaceFolderWithoutPrefix() { + final String prefix = ""; + final AutocompleteV2RequestType type = AutocompleteV2RequestType.CONTAINER; + final List> catalogEntityKeys = Arrays.asList(Arrays.asList("testSpace", "myFolder")); + final List queryContext = Collections.EMPTY_LIST; + final List expected = Arrays.asList( + new SuggestionEntity("[testSpace, myFolder, supplier]","virtual")); + ContainerSuggestions containerSuggestions = testAutocompleteV2Success(prefix, type, catalogEntityKeys, queryContext, ContainerSuggestions.class); + Assert.assertNotNull(containerSuggestions); + Assert.assertTrue(containerSuggestions.getSuggestionsType().equals(SuggestionsType.CONTAINER.getType())); + Assert.assertArrayEquals(expected.toArray(), containerSuggestions.getContainers().toArray()); } @Test - public void testColumnCompletionWithContext() throws Exception { - String query = "SELECT FROM supplier"; - String autocompleteResponse = testAutocomplete(query, 7, ImmutableList.of("testSpace", "myFolder")); - Assert.assertNotNull(autocompleteResponse); - Assert.assertTrue(autocompleteResponse.contains("Column")); - Assert.assertTrue(autocompleteResponse.contains("supplier")); - Assert.assertTrue(autocompleteResponse.contains("s_name")); + public void testContainersInSourceWithoutPrefix() { + final String prefix = ""; + final AutocompleteV2RequestType type = AutocompleteV2RequestType.CONTAINER; + final List> catalogEntityKeys = Arrays.asList(Arrays.asList("INFORMATION_SCHEMA")); + final List queryContext = Collections.EMPTY_LIST; + final List expected = Arrays.asList( + new SuggestionEntity("[INFORMATION_SCHEMA, CATALOGS]", "direct"), + new SuggestionEntity("[INFORMATION_SCHEMA, COLUMNS]", "direct"), + new SuggestionEntity("[INFORMATION_SCHEMA, SCHEMATA]", "direct"), + new SuggestionEntity("[INFORMATION_SCHEMA, TABLES]", "direct"), + new SuggestionEntity("[INFORMATION_SCHEMA, VIEWS]", "direct")); + ContainerSuggestions containerSuggestions = testAutocompleteV2Success(prefix, type, catalogEntityKeys, queryContext, ContainerSuggestions.class); + Assert.assertNotNull(containerSuggestions); + Assert.assertTrue(containerSuggestions.getSuggestionsType().equals(SuggestionsType.CONTAINER.getType())); + Assert.assertArrayEquals(expected.toArray(), containerSuggestions.getContainers().toArray()); } @Test - public void testFunctionCompletion() throws Exception { - String query = "SELECT * FROM testSpace.supplier WHERE AB"; - String autocompleteResponse = testAutocomplete(query, query.length(), new ArrayList<>()); - Assert.assertNotNull(autocompleteResponse); - Assert.assertTrue(autocompleteResponse.contains("Function")); + public void testContainersInSourceWithPrefix() { + final String prefix = "c"; + final AutocompleteV2RequestType type = AutocompleteV2RequestType.CONTAINER; + final List> catalogEntityKeys = Arrays.asList(Arrays.asList("INFORMATION_SCHEMA")); + final List queryContext = Collections.EMPTY_LIST; + final List expected = Arrays.asList( + new SuggestionEntity("[INFORMATION_SCHEMA, CATALOGS]", "direct"), + new SuggestionEntity("[INFORMATION_SCHEMA, COLUMNS]", "direct")); + ContainerSuggestions containerSuggestions = testAutocompleteV2Success(prefix, type, catalogEntityKeys, queryContext, ContainerSuggestions.class); + Assert.assertNotNull(containerSuggestions); + Assert.assertTrue(containerSuggestions.getSuggestionsType().equals(SuggestionsType.CONTAINER.getType())); + Assert.assertArrayEquals(expected.toArray(), containerSuggestions.getContainers().toArray()); } @Test - public void testFunctionCompletion2() throws Exception { - String query = "SELECT * FROM testSpace.supplier WHERE ASCII("; - String autocompleteResponse = testAutocomplete(query, query.length(), new ArrayList<>()); - Assert.assertNotNull(autocompleteResponse); - Assert.assertTrue(autocompleteResponse.contains("Column")); + public void testColumnsWithoutPrefix() { + final String prefix = ""; + final AutocompleteV2RequestType type = AutocompleteV2RequestType.COLUMN; + final List> catalogEntityKeys = Arrays.asList(Arrays.asList("testSpace", "supplier")); + final List queryContext = Collections.EMPTY_LIST; + final List expected = Arrays.asList( + new SuggestionEntity("[testSpace, supplier, s_name]", "TEXT"), + new SuggestionEntity("[testSpace, supplier, s_phone]", "TEXT")); + ColumnSuggestions columnSuggestions = testAutocompleteV2Success(prefix, type, catalogEntityKeys, queryContext, ColumnSuggestions.class); + Assert.assertNotNull(columnSuggestions); + Assert.assertTrue(columnSuggestions.getSuggestionsType().equals(SuggestionsType.COLUMN.getType())); + Assert.assertArrayEquals(expected.toArray(), columnSuggestions.getColumns().toArray()); } @Test - public void testFolderRegression() throws Exception { - String query = "SELECT * FROM testSpace."; - String autocompleteResponse = testAutocomplete(query, query.length(), new ArrayList<>()); - Assert.assertNotNull(autocompleteResponse); - Assert.assertTrue(autocompleteResponse.contains("CatalogEntry")); - Assert.assertTrue(autocompleteResponse.contains("myFolder")); - Assert.assertTrue(autocompleteResponse.contains("Folder")); + public void testColumnsWithPrefix() { + final String prefix = "S_n"; + final AutocompleteV2RequestType type = AutocompleteV2RequestType.COLUMN; + final List> catalogEntityKeys = Arrays.asList(Arrays.asList("testSpace", "supplier")); + final List queryContext = Collections.EMPTY_LIST; + final List expected = Arrays.asList( + new SuggestionEntity("[testSpace, supplier, s_name]", "TEXT")); + ColumnSuggestions columnSuggestions = testAutocompleteV2Success(prefix, type, catalogEntityKeys, queryContext, ColumnSuggestions.class); + Assert.assertNotNull(columnSuggestions); + Assert.assertTrue(columnSuggestions.getSuggestionsType().equals(SuggestionsType.COLUMN.getType())); + Assert.assertArrayEquals(expected.toArray(), columnSuggestions.getColumns().toArray()); } - public SuggestionResponse testSuggestSQL(String queryString, int cursorPos, List context) throws Exception { - final String endpoint = "/sql/analyze/suggest"; + @Test + public void testColumnsWithQueryContextIgnored() { + final String prefix = "S_n"; + final AutocompleteV2RequestType type = AutocompleteV2RequestType.COLUMN; + final List> catalogEntityKeys = Arrays.asList(Arrays.asList("testSpace", "supplier")); + final List queryContext = Arrays.asList("testSpace"); + final List expected = Arrays.asList( + new SuggestionEntity("[testSpace, supplier, s_name]", "TEXT")); + ColumnSuggestions columnSuggestions = testAutocompleteV2Success(prefix, type, catalogEntityKeys, queryContext, ColumnSuggestions.class); + Assert.assertNotNull(columnSuggestions); + Assert.assertTrue(columnSuggestions.getSuggestionsType().equals(SuggestionsType.COLUMN.getType())); + Assert.assertArrayEquals(expected.toArray(), columnSuggestions.getColumns().toArray()); + } - return expectSuccess( - getBuilder(getAPIv2().path(endpoint)).buildPost(Entity.entity(new AnalyzeRequest(queryString, context, cursorPos), MediaType.APPLICATION_JSON_TYPE)), - SuggestionResponse.class - ); + @Test + public void testColumnsInInformationSchemaSourceWithoutPrefix() { + final String prefix = ""; + final AutocompleteV2RequestType type = AutocompleteV2RequestType.COLUMN; + final List> catalogEntityKeys = Arrays.asList(Arrays.asList("INFORMATION_SCHEMA", "CATALOGS")); + final List queryContext = Collections.EMPTY_LIST; + final List expected = Arrays.asList( + new SuggestionEntity("[INFORMATION_SCHEMA, CATALOGS, CATALOG_NAME]", "TEXT"), + new SuggestionEntity("[INFORMATION_SCHEMA, CATALOGS, CATALOG_DESCRIPTION]", "TEXT"), + new SuggestionEntity("[INFORMATION_SCHEMA, CATALOGS, CATALOG_CONNECT]", "TEXT")); + ColumnSuggestions columnSuggestions = testAutocompleteV2Success(prefix, type, catalogEntityKeys, queryContext, ColumnSuggestions.class); + Assert.assertNotNull(columnSuggestions); + Assert.assertTrue(columnSuggestions.getSuggestionsType().equals(SuggestionsType.COLUMN.getType())); + Assert.assertArrayEquals(expected.toArray(), columnSuggestions.getColumns().toArray()); } - public ValidationResponse testValidateSQL(String queryString, int cursorPos, List context) { - final String endpoint = "/sql/analyze/validate"; + @Test + public void testColumnsInInformationSchemaSourceWithPrefix() { + final String prefix = "catalog_con"; + final AutocompleteV2RequestType type = AutocompleteV2RequestType.COLUMN; + final List> catalogEntityKeys = Arrays.asList(Arrays.asList("INFORMATION_SCHEMA", "CATALOGS")); + final List queryContext = Collections.EMPTY_LIST; + final List expected = Arrays.asList( + new SuggestionEntity("[INFORMATION_SCHEMA, CATALOGS, CATALOG_CONNECT]", "TEXT")); + ColumnSuggestions columnSuggestions = testAutocompleteV2Success(prefix, type, catalogEntityKeys, queryContext, ColumnSuggestions.class); + Assert.assertNotNull(columnSuggestions); + Assert.assertTrue(columnSuggestions.getSuggestionsType().equals(SuggestionsType.COLUMN.getType())); + Assert.assertArrayEquals(expected.toArray(), columnSuggestions.getColumns().toArray()); + } - return expectSuccess( - getBuilder(getAPIv2().path(endpoint)).buildPost(Entity.entity(new AnalyzeRequest(queryString, context, cursorPos), MediaType.APPLICATION_JSON_TYPE)), - ValidationResponse.class - ); + @Test + public void testColumnsInNonDatasetContainer() { + final String prefix = ""; + final AutocompleteV2RequestType type = AutocompleteV2RequestType.COLUMN; + final List> catalogEntityKeys = Arrays.asList(Arrays.asList("INFORMATION_SCHEMA")); + final List queryContext = Collections.EMPTY_LIST; + final List expected = Collections.EMPTY_LIST; + ColumnSuggestions columnSuggestions = testAutocompleteV2Success(prefix, type, catalogEntityKeys, queryContext, ColumnSuggestions.class); + Assert.assertNotNull(columnSuggestions); + Assert.assertTrue(columnSuggestions.getSuggestionsType().equals(SuggestionsType.COLUMN.getType())); + Assert.assertArrayEquals(expected.toArray(), columnSuggestions.getColumns().toArray()); } private String testAutocomplete(String queryString, int cursorPos, List context) { @@ -657,4 +528,35 @@ private String testAutocomplete(String queryString, int cursorPos, List String json = response.readEntity(String.class); return json; } + + private void testAutocompleteV2Error( + String prefix, + AutocompleteV2RequestType type, + List> catalogEntityKeys, + List queryContext) { + final String endpoint = "/sql/autocomplete/v2"; + + expectError( + FamilyExpectation.CLIENT_ERROR, + getBuilder(getAPIv2().path(endpoint)) + .buildPost( + Entity.entity(new AutocompleteV2Request(prefix, type, catalogEntityKeys, queryContext, null, null), MediaType.APPLICATION_JSON_TYPE)), + GenericErrorMessage.class); + } + + private T testAutocompleteV2Success( + String prefix, + AutocompleteV2RequestType type, + List> catalogEntityKeys, + List queryContext, + Class entityType) { + final String endpoint = "/sql/autocomplete/v2"; + + Response response = expectSuccess( + getBuilder(getAPIv2().path(endpoint)) + .buildPost( + Entity.entity(new AutocompleteV2Request(prefix, type, catalogEntityKeys, queryContext, null, null), MediaType.APPLICATION_JSON_TYPE))); + T suggestions = response.readEntity(entityType); + return suggestions; + } } diff --git a/dac/backend/src/test/java/com/dremio/dac/resource/TestSpaceResource.java b/dac/backend/src/test/java/com/dremio/dac/resource/TestSpaceResource.java deleted file mode 100644 index 1e265ef883..0000000000 --- a/dac/backend/src/test/java/com/dremio/dac/resource/TestSpaceResource.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.dremio.dac.resource; - -import static com.dremio.dac.server.FamilyExpectation.CLIENT_ERROR; -import static javax.ws.rs.core.Response.Status.CONFLICT; -import static org.junit.Assert.assertEquals; - -import javax.ws.rs.client.Entity; - -import org.junit.Test; - -import com.dremio.dac.model.spaces.Space; -import com.dremio.dac.server.BaseTestServer; -import com.dremio.dac.server.GenericErrorMessage; - -/** - * Tests for APIs in {@link SpaceResource} - */ -public class TestSpaceResource extends BaseTestServer { - - @Test - public void testCreateUpdateDeleteSpace() throws Exception { - doc("create space"); - expectSuccess(getBuilder(getAPIv2().path("space/s1")).buildPut(Entity.json(new Space(null, "s1", null, null, null, 0, null))), Space.class); - - doc("get space"); - Space s1 = expectSuccess(getBuilder(getAPIv2().path("space/s1")).buildGet(), Space.class); - - doc("update space - add some description"); - Space newS1 = new Space(s1.getId(), s1.getName(), "I am s1", s1.getVersion(), null, 0, s1.getCtime()); - s1 = expectSuccess(getBuilder(getAPIv2().path("space/s1")).buildPut(Entity.json(newS1)), Space.class); - - assertEquals("s1", s1.getName()); - assertEquals("I am s1", s1.getDescription()); - - doc("delete with bad version"); - long badVersion = 1234L; - String expectedErrorMessage = String.format("Cannot delete space \"%s\", version provided \"%s\" is different from version found \"%s\"", - s1.getName(), badVersion, s1.getVersion()); - final GenericErrorMessage errorDelete2 = expectStatus(CONFLICT, - getBuilder(getAPIv2().path("space/s1").queryParam("version", badVersion)).buildDelete(), - GenericErrorMessage.class); - assertErrorMessage(errorDelete2, expectedErrorMessage); - - doc("delete space"); - expectSuccess(getBuilder(getAPIv2().path("space/s1").queryParam("version", s1.getVersion())).buildDelete(), Space.class); - - doc("try to fetch the space - expect a failure"); - expectError(CLIENT_ERROR, getBuilder(getAPIv2().path("space/s1")).buildGet(), GenericErrorMessage.class); - } -} diff --git a/dac/backend/src/test/java/com/dremio/dac/resource/TestV2TreeResource.java b/dac/backend/src/test/java/com/dremio/dac/resource/TestV2TreeResource.java new file mode 100644 index 0000000000..8f69157267 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/dac/resource/TestV2TreeResource.java @@ -0,0 +1,43 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.resource; + +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import javax.ws.rs.WebApplicationException; + +import org.junit.Test; + +public class TestV2TreeResource { + + @Test + public void testDisableAssignReference() { + assertThatThrownBy(() -> new V2TreeResource(null).assignReference(null, "main", null)) + .isInstanceOf(WebApplicationException.class); + } + + @Test + public void testDisableTransplantCommitsIntoBranch() { + assertThatThrownBy(() -> new V2TreeResource(null).transplantCommitsIntoBranch("main", null)) + .isInstanceOf(WebApplicationException.class); + } + + @Test + public void testDisableCommitMultipleOperations() { + assertThatThrownBy(() -> new V2TreeResource(null).commitMultipleOperations("main", null)) + .isInstanceOf(WebApplicationException.class); + } +} diff --git a/dac/backend/src/test/java/com/dremio/dac/server/BaseTestServer.java b/dac/backend/src/test/java/com/dremio/dac/server/BaseTestServer.java index 104640415f..f0dd760c68 100644 --- a/dac/backend/src/test/java/com/dremio/dac/server/BaseTestServer.java +++ b/dac/backend/src/test/java/com/dremio/dac/server/BaseTestServer.java @@ -31,6 +31,7 @@ import static org.awaitility.Awaitility.await; import static org.glassfish.jersey.CommonProperties.FEATURE_AUTO_DISCOVERY_DISABLE; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; import java.io.File; import java.io.FileNotFoundException; @@ -93,6 +94,7 @@ import com.dremio.dac.explore.model.ViewFieldTypeMixin; import com.dremio.dac.model.folder.FolderPath; import com.dremio.dac.model.job.JobDataFragment; +import com.dremio.dac.model.job.JobUI; import com.dremio.dac.model.spaces.HomeName; import com.dremio.dac.model.spaces.SpaceName; import com.dremio.dac.model.spaces.SpacePath; @@ -126,6 +128,7 @@ import com.dremio.service.BindingProvider; import com.dremio.service.conduit.server.ConduitServer; import com.dremio.service.job.proto.JobId; +import com.dremio.service.job.proto.JobState; import com.dremio.service.job.proto.JobSubmission; import com.dremio.service.job.proto.QueryType; import com.dremio.service.jobs.JobNotFoundException; @@ -162,6 +165,8 @@ public abstract class BaseTestServer extends BaseClientUtils { private static final String API_LOCATION = "apiv2"; private static final String PUBLIC_API_LOCATION = "api"; private static final String SCIM_V2_API_LOCATION = "scim/v2"; + private static final String OAUTH_API_LOCATION = "oauth"; + protected static final String DEFAULT_USERNAME = SampleDataPopulator.DEFAULT_USER_NAME; protected static final String DEFAULT_PASSWORD = SampleDataPopulator.PASSWORD; @@ -278,6 +283,7 @@ public void resetDefaultUser() { private static WebTarget publicAPI; private static WebTarget masterPublicAPI; private static WebTarget scimV2API; + private static WebTarget oAuthApi; private static DACDaemon executorDaemon; private static DACDaemon currentDremioDaemon; private static DACDaemon masterDremioDaemon; @@ -309,6 +315,10 @@ protected static WebTarget getScimAPIv2() { return scimV2API; } + protected static WebTarget getOAuthApi() { + return oAuthApi; + } + protected static WebTarget getMetricsEndpoint() { return metricsEndpoint; } @@ -419,6 +429,8 @@ private static void initClient(ObjectMapper mapper) throws Exception { apiV2 = rootTarget.path(API_LOCATION); publicAPI = rootTarget.path(PUBLIC_API_LOCATION); scimV2API = rootTarget.path(SCIM_V2_API_LOCATION); + oAuthApi = rootTarget.path(OAUTH_API_LOCATION); + if (isMultinode()) { masterApiV2 = client.target("http://localhost:" + masterDremioDaemon.getWebServer().getPort()).path(API_LOCATION); masterPublicAPI = client.target("http://localhost:" + masterDremioDaemon.getWebServer().getPort()).path(PUBLIC_API_LOCATION); @@ -1000,6 +1012,34 @@ protected InitialPreviewResponse createDatasetFromSQL(String sql, List c InitialPreviewResponse.class); // <= receiving } + // Wait for the job complete + protected void waitForJobComplete(String jobId) { + int retry = 20; + while (retry-- >= 0) { + try { + Thread.sleep(1000); + JobUI job = expectSuccess( + getBuilder(getAPIv2().path(getPathJoiner().join("job", jobId))).buildGet(), // => sending + JobUI.class); // <= receiving + if (job.getJobAttempt().getState() == JobState.COMPLETED) { + break; + } else if (job.getJobAttempt().getState() == JobState.FAILED || + job.getJobAttempt().getState() == JobState.CANCELED || + job.getJobAttempt().getState() == JobState.CANCELLATION_REQUESTED) { + fail(String.format("Job (%s) failed.", jobId)); + } + } catch (InterruptedException e) { + fail(e.getMessage()); + } catch (Exception e) { + // Ignore, retry + } + } + + if (retry == 0) { + fail(String.format("Job (%s) timed out.", jobId)); + } + } + protected InitialPreviewResponse createDatasetFromParent(String parentDataset) { final Invocation invocation = getBuilder( getAPIv2() diff --git a/dac/backend/src/test/java/com/dremio/dac/server/BaseTestServerJunit5.java b/dac/backend/src/test/java/com/dremio/dac/server/BaseTestServerJunit5.java new file mode 100644 index 0000000000..9f03ceedaa --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/dac/server/BaseTestServerJunit5.java @@ -0,0 +1,1357 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.server; + +import static com.dremio.common.utils.PathUtils.getKeyJoiner; +import static com.dremio.common.utils.PathUtils.getPathJoiner; +import static com.dremio.dac.server.FamilyExpectation.CLIENT_ERROR; +import static com.dremio.dac.server.JobsServiceTestUtils.submitJobAndGetData; +import static com.dremio.dac.server.test.SampleDataPopulator.DEFAULT_USER_NAME; +import static com.dremio.exec.ExecConstants.ENABLE_ICEBERG; +import static com.dremio.exec.ExecConstants.VERSIONED_VIEW_ENABLED; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.fullyQualifiedTableName; +import static com.dremio.exec.planner.physical.PlannerSettings.UNLIMITED_SPLITS_SUPPORT; +import static com.dremio.service.namespace.dataset.DatasetVersion.newVersion; +import static java.lang.String.format; +import static java.util.Arrays.asList; +import static javax.ws.rs.client.Entity.entity; +import static org.assertj.core.api.Assertions.assertThat; +import static org.awaitility.Awaitility.await; +import static org.glassfish.jersey.CommonProperties.FEATURE_AUTO_DISCOVERY_DISABLE; +import static org.junit.Assert.assertEquals; + +import java.io.File; +import java.io.IOException; +import java.io.PrintWriter; +import java.net.InetAddress; +import java.net.ServerSocket; +import java.nio.file.Files; +import java.nio.file.Path; +import java.security.Principal; +import java.time.Duration; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.function.Function; + +import javax.inject.Provider; +import javax.ws.rs.client.Client; +import javax.ws.rs.client.ClientBuilder; +import javax.ws.rs.client.Entity; +import javax.ws.rs.client.Invocation; +import javax.ws.rs.client.WebTarget; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response.Status; +import javax.ws.rs.core.SecurityContext; + +import org.apache.arrow.memory.BufferAllocator; +import org.assertj.core.api.AssertionsForClassTypes; +import org.eclipse.jetty.http.HttpHeader; +import org.glassfish.jersey.media.multipart.MultiPartFeature; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.io.TempDir; + +import com.dremio.common.AutoCloseables; +import com.dremio.common.SentinelSecure; +import com.dremio.common.exceptions.ExecutionSetupException; +import com.dremio.common.perf.Timer; +import com.dremio.common.perf.Timer.TimedBlock; +import com.dremio.common.util.TestTools; +import com.dremio.config.DremioConfig; +import com.dremio.dac.daemon.DACDaemon; +import com.dremio.dac.daemon.DACDaemon.ClusterMode; +import com.dremio.dac.daemon.DACDaemonModule; +import com.dremio.dac.daemon.DACModule; +import com.dremio.dac.daemon.ZkServer; +import com.dremio.dac.explore.model.CreateFromSQL; +import com.dremio.dac.explore.model.DataPOJO; +import com.dremio.dac.explore.model.DatasetPath; +import com.dremio.dac.explore.model.DatasetSearchUIs; +import com.dremio.dac.explore.model.DatasetUI; +import com.dremio.dac.explore.model.DatasetUIWithHistory; +import com.dremio.dac.explore.model.DatasetVersionResourcePath; +import com.dremio.dac.explore.model.InitialDataPreviewResponse; +import com.dremio.dac.explore.model.InitialPendingTransformResponse; +import com.dremio.dac.explore.model.InitialPreviewResponse; +import com.dremio.dac.explore.model.TransformBase; +import com.dremio.dac.explore.model.VersionContextReq; +import com.dremio.dac.explore.model.ViewFieldTypeMixin; +import com.dremio.dac.model.folder.FolderPath; +import com.dremio.dac.model.job.JobDataFragment; +import com.dremio.dac.model.spaces.HomeName; +import com.dremio.dac.model.spaces.SpaceName; +import com.dremio.dac.model.spaces.SpacePath; +import com.dremio.dac.model.usergroup.UserLogin; +import com.dremio.dac.model.usergroup.UserLoginSession; +import com.dremio.dac.proto.model.dataset.VirtualDatasetUI; +import com.dremio.dac.server.test.SampleDataPopulator; +import com.dremio.dac.service.collaboration.CollaborationHelper; +import com.dremio.dac.service.datasets.DatasetVersionMutator; +import com.dremio.dac.service.errors.DatasetNotFoundException; +import com.dremio.dac.service.errors.DatasetVersionNotFoundException; +import com.dremio.dac.service.reflection.ReflectionServiceHelper; +import com.dremio.dac.service.source.SourceService; +import com.dremio.dac.util.JSONUtil; +import com.dremio.datastore.api.LegacyKVStoreProvider; +import com.dremio.exec.ExecConstants; +import com.dremio.exec.catalog.CatalogServiceImpl; +import com.dremio.exec.catalog.ConnectionReader; +import com.dremio.exec.client.DremioClient; +import com.dremio.exec.planner.physical.PlannerSettings; +import com.dremio.exec.proto.UserBitShared; +import com.dremio.exec.rpc.RpcException; +import com.dremio.exec.server.SabotContext; +import com.dremio.exec.store.CatalogService; +import com.dremio.exec.util.TestUtilities; +import com.dremio.file.FilePath; +import com.dremio.options.OptionManager; +import com.dremio.options.OptionValidator; +import com.dremio.sabot.rpc.user.UserServer; +import com.dremio.service.Binder; +import com.dremio.service.BindingProvider; +import com.dremio.service.conduit.server.ConduitServer; +import com.dremio.service.job.proto.JobId; +import com.dremio.service.job.proto.JobSubmission; +import com.dremio.service.job.proto.QueryType; +import com.dremio.service.jobs.JobNotFoundException; +import com.dremio.service.jobs.JobRequest; +import com.dremio.service.jobs.JobStatusListener; +import com.dremio.service.jobs.JobsService; +import com.dremio.service.jobs.SqlQuery; +import com.dremio.service.namespace.NamespaceException; +import com.dremio.service.namespace.NamespaceService; +import com.dremio.service.namespace.dataset.DatasetVersion; +import com.dremio.service.namespace.dataset.proto.ViewFieldType; +import com.dremio.service.namespace.space.proto.FolderConfig; +import com.dremio.service.namespace.space.proto.SpaceConfig; +import com.dremio.service.users.SimpleUserService; +import com.dremio.service.users.UserService; +import com.dremio.services.fabric.api.FabricService; +import com.dremio.test.DremioTest; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.module.SimpleModule; +import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider; +import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; + +public abstract class BaseTestServerJunit5 extends BaseClientUtils { + private static final org.slf4j.Logger logger = + org.slf4j.LoggerFactory.getLogger(BaseTestServerJunit5.class); + private static final String API_LOCATION = "apiv2"; + private static final String NESSIE_PROXY = "nessie-proxy"; + private static final String PUBLIC_API_LOCATION = "api"; + private static final String SCIM_V2_API_LOCATION = "scim/v2"; + protected static final String DEFAULT_USERNAME = SampleDataPopulator.DEFAULT_USER_NAME; + protected static final String DEFAULT_PASSWORD = SampleDataPopulator.PASSWORD; + + private PrintWriter docLog; + private UserLoginSession uls; + + public static final String USERNAME = SampleDataPopulator.TEST_USER_NAME; + + private static boolean defaultUser = true; + private static boolean testApiEnabled = true; + private static boolean inMemoryStorage = true; + private static boolean addDefaultUser = false; + + protected static boolean isDefaultUserEnabled() { + return defaultUser; + } + + protected static void enableDefaultUser(boolean enabled) { + defaultUser = enabled; + } + + protected static void enableTestAPi(boolean enabled) { + testApiEnabled = enabled; + } + + protected static void inMemoryStorage(boolean enabled) { + inMemoryStorage = enabled; + } + + protected static void addDefaultUser(boolean enabled) { + addDefaultUser = enabled; + } + + protected void doc(String message) { + docLog.println("[doc] " + message); + } + + protected static boolean isComplexTypeSupport() { + return PlannerSettings.FULL_NESTED_SCHEMA_SUPPORT.getDefault().getBoolVal(); + } + + @BeforeEach + public void resetDefaultUser() { + if (defaultUser) { + try { + SampleDataPopulator.addDefaultFirstUser(l(UserService.class), newNamespaceService()); + } catch (Exception e) { + throw new RuntimeException(e); + } + + login(); + } + } + + protected static final MediaType JSON = MediaType.APPLICATION_JSON_TYPE; + private static Client client; + private static DremioClient dremioClient; + private static WebTarget rootTarget; + private static WebTarget metricsEndpoint; + private static WebTarget apiV2; + private static WebTarget nessieProxy; + private static WebTarget masterApiV2; + private static WebTarget publicAPI; + private static WebTarget masterPublicAPI; + private static WebTarget scimV2API; + private static DACDaemon executorDaemon; + private static DACDaemon currentDremioDaemon; + private static DACDaemon masterDremioDaemon; + private static Binder dremioBinder; + private static SampleDataPopulator populator; + private static boolean executorDaemonClosed = false; + + public static void setClient(Client client) { + BaseTestServerJunit5.client = client; + } + + public static void setCurrentDremioDaemon(DACDaemon currentDremioDaemon) { + BaseTestServerJunit5.currentDremioDaemon = currentDremioDaemon; + } + + public static void setMasterDremioDaemon(DACDaemon masterDremioDaemon) { + BaseTestServerJunit5.masterDremioDaemon = masterDremioDaemon; + } + + public static void setPopulator(SampleDataPopulator populator) { + BaseTestServerJunit5.populator = populator; + } + + protected static WebTarget getAPIv2() { + return apiV2; + } + + protected static WebTarget getNessieProxy() { + return nessieProxy; + } + + protected static WebTarget getScimAPIv2() { + return scimV2API; + } + + protected static WebTarget getMetricsEndpoint() { + return metricsEndpoint; + } + + protected static WebTarget getMasterAPIv2() { + return masterApiV2; + } + + public static WebTarget getPublicAPI(Integer version) { + return publicAPI.path("v" + version); + } + + public static WebTarget getMasterPublicAPI(Integer version) { + return masterPublicAPI.path("v" + version); + } + + protected static DACDaemon getCurrentDremioDaemon() { + return currentDremioDaemon; + } + + protected static DACDaemon getMasterDremioDaemon() { + return masterDremioDaemon; + } + + protected static DACDaemon getExecutorDaemon() { + return executorDaemon; + } + + public static boolean isMultinode() { + return System.getProperty("dremio_multinode", null) != null; + } + + protected DremioClient getRpcClient() throws RpcException { + if (dremioClient == null) { + dremioClient = new DremioClient(true); + dremioClient.connect( + new Properties() { + { + put("direct", "localhost:" + l(UserServer.class).getPort()); + put("user", "dremio"); + put("password", "dremio123"); + } + }); + } + return dremioClient; + } + + @TempDir static File folder0; + @TempDir static File folder1; + @TempDir static File folder2; + @TempDir static File folder3; + + private static ObjectMapper configureObjectMapper() { + ObjectMapper objectMapper = JSONUtil.prettyMapper(); + JSONUtil.registerStorageTypes( + objectMapper, + DremioTest.CLASSPATH_SCAN_RESULT, + ConnectionReader.of(DremioTest.CLASSPATH_SCAN_RESULT, DremioTest.DEFAULT_SABOT_CONFIG)); + objectMapper + .registerModule( + new SimpleModule() + .addDeserializer( + JobDataFragment.class, + new JsonDeserializer() { + @Override + public JobDataFragment deserialize( + JsonParser jsonParser, DeserializationContext deserializationContext) + throws IOException { + return jsonParser.readValueAs(DataPOJO.class); + } + })) + .addMixIn(ViewFieldType.class, ViewFieldTypeMixin.class); + objectMapper.setFilterProvider( + new SimpleFilterProvider() + .addFilter(SentinelSecure.FILTER_NAME, SentinelSecureFilter.TEST_ONLY)); + return objectMapper; + } + + protected static void initClient() throws Exception { + initClient(configureObjectMapper()); + } + + private static void initClient(ObjectMapper mapper) throws Exception { + setBinder(createBinder(currentDremioDaemon.getBindingProvider())); + + final Path path = new File(folder0.getAbsolutePath() + "/testplugins").toPath(); + if (!Files.exists(path)) { + TestUtilities.addDefaultTestPlugins(l(CatalogService.class), path.toString()); + } + + setPopulator( + new SampleDataPopulator( + l(SabotContext.class), + newSourceService(), + newDatasetVersionMutator(), + l(UserService.class), + newNamespaceService(), + DEFAULT_USERNAME, + l(CollaborationHelper.class))); + + final JacksonJaxbJsonProvider provider = new JacksonJaxbJsonProvider(); + provider.setMapper(mapper); + + client = + ClientBuilder.newBuilder() + .property(FEATURE_AUTO_DISCOVERY_DISABLE, true) + .register(provider) + .register(MultiPartFeature.class) + .build(); + rootTarget = client.target("http://localhost:" + currentDremioDaemon.getWebServer().getPort()); + final WebTarget livenessServiceTarget = + client.target( + "http://localhost:" + currentDremioDaemon.getLivenessService().getLivenessPort()); + metricsEndpoint = livenessServiceTarget.path("metrics"); + apiV2 = rootTarget.path(API_LOCATION); + nessieProxy = rootTarget.path(NESSIE_PROXY); + publicAPI = rootTarget.path(PUBLIC_API_LOCATION); + scimV2API = rootTarget.path(SCIM_V2_API_LOCATION); + if (isMultinode()) { + masterApiV2 = + client + .target("http://localhost:" + masterDremioDaemon.getWebServer().getPort()) + .path(API_LOCATION); + masterPublicAPI = + client + .target("http://localhost:" + masterDremioDaemon.getWebServer().getPort()) + .path(PUBLIC_API_LOCATION); + } else { + masterApiV2 = apiV2; + masterPublicAPI = publicAPI; + } + } + + private static void startCurrentDaemon() throws Exception { + currentDremioDaemon.init(); + } + + @BeforeAll + public static void init() throws Exception { + try (TimedBlock b = Timer.time("BaseTestServerJunit5.@BeforeAll")) { + initializeCluster(new DACDaemonModule()); + } + } + + protected static void initializeCluster(DACModule dacModule) throws Exception { + initializeCluster(dacModule, o -> o); + } + + protected static void initializeCluster( + DACModule dacModule, Function mapperUpdate) throws Exception { + final String hostname = InetAddress.getLocalHost().getCanonicalHostName(); + Provider jobsPortProvider = + () -> currentDremioDaemon.getBindingProvider().lookup(ConduitServer.class).getPort(); + + //Turning on flag for NaaS + System.setProperty("nessie.source.resource.testing.enabled", "true"); + + if (isMultinode()) { + logger.info("Running tests in multinode mode"); + + // run all tests on remote coordinator node relying on additional remote executor node + + // we'll share the same file:/// writepath for all nodes. Pdfs causes + // problems as it uses static variables so resolution doesn't work in a + // single ClassLoader + final String distpath = "file://" + folder0.getAbsolutePath(); + // jobsResultsStore stored in /results and accelerator store in + // /accelerator, both of which + // need to exist + Files.createDirectories(new File(folder0.getAbsolutePath() + "/results").toPath()); + Files.createDirectories(new File(folder0.getAbsolutePath() + "/accelerator").toPath()); + Files.createDirectories(new File(folder0.getAbsolutePath() + "/scratch").toPath()); + Files.createDirectories(new File(folder0.getAbsolutePath() + "/metadata").toPath()); + Files.createDirectories(new File(folder0.getAbsolutePath() + "/gandiva").toPath()); + + // Get a random port + int port; + try (ServerSocket socket = new ServerSocket(0)) { + socket.setReuseAddress(true); + port = socket.getLocalPort(); + } + // create master node. + masterDremioDaemon = + DACDaemon.newDremioDaemon( + DACConfig.newDebugConfig(DremioTest.DEFAULT_SABOT_CONFIG) + .autoPort(true) + .allowTestApis(testApiEnabled) + .serveUI(false) + .jobServerEnabled(true) + .inMemoryStorage(inMemoryStorage) + .writePath(folder1.getAbsolutePath()) + .with(DremioConfig.DIST_WRITE_PATH_STRING, distpath) + .with(DremioConfig.ENABLE_EXECUTOR_BOOL, false) + .with(DremioConfig.EMBEDDED_MASTER_ZK_ENABLED_PORT_INT, port) + .with(DremioConfig.FLIGHT_SERVICE_ENABLED_BOOLEAN, false) + .with(DremioConfig.NESSIE_SERVICE_ENABLED_BOOLEAN, true) + .with(DremioConfig.NESSIE_SERVICE_IN_MEMORY_BOOLEAN, true) + .clusterMode(ClusterMode.DISTRIBUTED), + DremioTest.CLASSPATH_SCAN_RESULT, + dacModule); + masterDremioDaemon.init(); + + // remote coordinator node + int zkPort = masterDremioDaemon.getBindingProvider().lookup(ZkServer.class).getPort(); + currentDremioDaemon = + DACDaemon.newDremioDaemon( + DACConfig.newDebugConfig(DremioTest.DEFAULT_SABOT_CONFIG) + .isMaster(false) + .autoPort(true) + .allowTestApis(testApiEnabled) + .serveUI(false) + .inMemoryStorage(inMemoryStorage) + .jobServerEnabled(true) + .writePath(folder2.getAbsolutePath()) + .with(DremioConfig.DIST_WRITE_PATH_STRING, distpath) + .with(DremioConfig.FLIGHT_SERVICE_ENABLED_BOOLEAN, false) + .clusterMode(ClusterMode.DISTRIBUTED) + .localPort( + masterDremioDaemon.getBindingProvider().lookup(FabricService.class).getPort() + + 1) + .isRemote(true) + .with(DremioConfig.ENABLE_EXECUTOR_BOOL, false) + .with(DremioConfig.NESSIE_SERVICE_ENABLED_BOOLEAN, true) + .with(DremioConfig.NESSIE_SERVICE_IN_MEMORY_BOOLEAN, true) + .zk("localhost:" + zkPort), + DremioTest.CLASSPATH_SCAN_RESULT, + dacModule); + startCurrentDaemon(); + + // remote executor node + executorDaemon = + DACDaemon.newDremioDaemon( + DACConfig.newDebugConfig(DremioTest.DEFAULT_SABOT_CONFIG) + .autoPort(true) + .allowTestApis(testApiEnabled) + .serveUI(false) + .inMemoryStorage(inMemoryStorage) + .with(DremioConfig.ENABLE_COORDINATOR_BOOL, false) + .writePath(folder3.getAbsolutePath()) + .with(DremioConfig.DIST_WRITE_PATH_STRING, distpath) + .clusterMode(ClusterMode.DISTRIBUTED) + .localPort( + masterDremioDaemon.getBindingProvider().lookup(FabricService.class).getPort() + + 1) + .isRemote(true) + .zk("localhost:" + zkPort), + DremioTest.CLASSPATH_SCAN_RESULT, + dacModule); + executorDaemonClosed = false; + executorDaemon.init(); + } else { + logger.info("Running tests in local mode"); + final String distpath = "file://" + folder0.getAbsolutePath(); + currentDremioDaemon = + DACDaemon.newDremioDaemon( + DACConfig.newDebugConfig(DremioTest.DEFAULT_SABOT_CONFIG) + .autoPort(true) + .allowTestApis(testApiEnabled) + .serveUI(false) + .addDefaultUser(addDefaultUser) + .inMemoryStorage(inMemoryStorage) + .writePath(folder1.getAbsolutePath()) + .with(DremioConfig.METADATA_PATH_STRING, distpath + "/metadata") + .with(DremioConfig.ACCELERATOR_PATH_STRING, distpath + "/accelerator") + .with(DremioConfig.GANDIVA_CACHE_PATH_STRING, distpath + "/gandiva") + .with(DremioConfig.FLIGHT_SERVICE_ENABLED_BOOLEAN, false) + .with(DremioConfig.NESSIE_SERVICE_ENABLED_BOOLEAN, true) + .with(DremioConfig.NESSIE_SERVICE_IN_MEMORY_BOOLEAN, true) + .clusterMode(DACDaemon.ClusterMode.LOCAL), + DremioTest.CLASSPATH_SCAN_RESULT, + dacModule); + masterDremioDaemon = null; + startCurrentDaemon(); + } + + initClient(mapperUpdate.apply(configureObjectMapper())); + } + + protected static NamespaceService newNamespaceService() { + return l(NamespaceService.class); + } + + protected static CatalogService newCatalogService() { + return l(CatalogService.class); + } + + protected static SourceService newSourceService() { + return l(SourceService.class); + } + + protected static ReflectionServiceHelper newReflectionServiceHelper() { + return l(ReflectionServiceHelper.class); + } + + protected static DatasetVersionMutator newDatasetVersionMutator() { + return l(DatasetVersionMutator.class); + } + + protected static SabotContext getSabotContext() { + return l(SabotContext.class); + } + + protected static T l(Class clazz) { + return dremioBinder.lookup(clazz); + } + + protected static Provider p(Class clazz) { + return dremioBinder.provider(clazz); + } + + protected static Provider pMaster(Class clazz) { + if (masterDremioDaemon != null) { + return masterDremioDaemon.getBindingProvider().provider(clazz); + } + + return p(clazz); + } + + protected void deleteSource(String name) { + ((CatalogServiceImpl) l(CatalogService.class)).deleteSource(name); + } + + protected static void setBinder(Binder binder) { + dremioBinder = binder; + } + + public static Binder createBinder(BindingProvider dremioBindingProvider) { + Binder dremioBinder = dremioBindingProvider.newChild(); + dremioBinder.bind( + SecurityContext.class, + new SecurityContext() { + @Override + public Principal getUserPrincipal() { + return new Principal() { + @Override + public String getName() { + return DEFAULT_USERNAME; + } + }; + } + + @Override + public boolean isUserInRole(String role) { + return true; // admin + } + + @Override + public boolean isSecure() { + return true; + } + + @Override + public String getAuthenticationScheme() { + return null; + } + }); + SabotContext context = dremioBinder.lookup(SabotContext.class); + dremioBinder.bind(OptionManager.class, context.getOptionManager()); + return dremioBinder; + } + + protected static void closeExecutorDaemon() throws Exception { + if (!executorDaemonClosed) { + executorDaemon.close(); + executorDaemonClosed = true; + } + } + + private static int getResourceAllocatorCount() { + return l(BufferAllocatorFactory.class).getBaseAllocator().getChildAllocators().size(); + } + + private static int getQueryPlanningAllocatorCount() { + final BufferAllocator queryPlanningAllocator = + l(SabotContext.class).getQueryPlanningAllocator(); + if (queryPlanningAllocator == null) { + return 0; + } + return queryPlanningAllocator.getChildAllocators().size(); + } + + @AfterAll + public static void serverClose() throws Exception { + if (dremioBinder == null) { + return; + } + try (TimedBlock b = Timer.time("BaseTestServerJunit5.@AfterAll")) { + + await() + .atMost(Duration.ofSeconds(50)) + .untilAsserted( + () -> + assertEquals( + String.format("Not all the resource/query planning allocators were closed. ResourceAllocator count = %d; QueryPlanningAllocatorCount = %d", getResourceAllocatorCount(), getQueryPlanningAllocatorCount()), + 0, + getResourceAllocatorCount() + getQueryPlanningAllocatorCount())); + + defaultUser = + true; // in case another test disables the default user and forgets to enable it back + // again at the end + AutoCloseables.close( + new AutoCloseable() { + @Override + public void close() throws Exception { + if (dremioClient != null) { + // since the client is only created when needed, make sure we don't re-close an old + // client. + DremioClient localClient = dremioClient; + dremioClient = null; + localClient.close(); + } + } + }, + new AutoCloseable() { + @Override + public void close() throws Exception { + if (client != null) { + client.close(); + } + } + }, + (executorDaemonClosed ? null : executorDaemon), + currentDremioDaemon, + masterDremioDaemon, + populator); + executorDaemonClosed = true; + dremioClient = null; + } + } + + protected void login() { + login(DEFAULT_USERNAME, DEFAULT_PASSWORD); + } + + protected void login(final String userName, final String password) { + UserLogin userLogin = new UserLogin(userName, password); + this.setUls( + expectSuccess( + getAPIv2().path("/login").request(JSON).buildPost(Entity.json(userLogin)), + UserLoginSession.class)); + } + + protected static SampleDataPopulator getPopulator() { + return populator; + } + + protected String getAuthHeaderName() { + return HttpHeader.AUTHORIZATION.toString(); + } + + protected String getAuthHeaderValue() { + return "_dremio" + getUls().getToken(); + } + + protected Invocation.Builder getBuilder(WebTarget webTarget) { + return webTarget.request(JSON).header(getAuthHeaderName(), getAuthHeaderValue()); + } + + protected Invocation.Builder getBuilder(String apiUrl) { + return getBuilder( + client.target( + "http://localhost:" + + currentDremioDaemon.getWebServer().getPort() + + "/" + + API_LOCATION + + apiUrl)); + } + + public static void assertContains(String expectedContains, String string) { + assertThat(string).contains(expectedContains); + } + + public static void assertNotContains(String expectedNotContains, String string) { + assertThat(string).doesNotContain(expectedNotContains); + } + + protected UserLoginSession getUls() { + return uls; + } + + protected void setUls(UserLoginSession uls) { + this.uls = uls; + } + + protected static void populateInitialData() + throws DatasetNotFoundException, DatasetVersionNotFoundException, ExecutionSetupException, + NamespaceException, IOException { + populator.populateInitialData(); + } + + public static void clearAllDataExceptUser() throws IOException, NamespaceException { + @SuppressWarnings("resource") + DACDaemon daemon = isMultinode() ? getMasterDremioDaemon() : getCurrentDremioDaemon(); + TestUtilities.clear( + daemon.getBindingProvider().lookup(CatalogService.class), + daemon.getBindingProvider().lookup(LegacyKVStoreProvider.class), + ImmutableList.of(SimpleUserService.USER_STORE), + ImmutableList.of("cp")); + if (isMultinode()) { + ((CatalogServiceImpl) + getCurrentDremioDaemon().getBindingProvider().lookup(CatalogService.class)) + .synchronizeSources(); + } + } + + protected void setSpace() throws NamespaceException, IOException { + clearAllDataExceptUser(); + final SpaceConfig foo = new SpaceConfig().setName("spacefoo"); + SpacePath spacePath = new SpacePath(new SpaceName(foo.getName())); + newNamespaceService().addOrUpdateSpace(spacePath.toNamespaceKey(), foo); + newNamespaceService() + .addOrUpdateFolder( + new FolderPath("spacefoo.folderbar").toNamespaceKey(), + new FolderConfig() + .setName("folderbar") + .setFullPathList(asList("spacefoo", "folderbar"))); + newNamespaceService() + .addOrUpdateFolder( + new FolderPath("spacefoo.folderbar.folderbaz").toNamespaceKey(), + new FolderConfig() + .setName("folderbaz") + .setFullPathList(asList("spacefoo", "folderbar", "folderbaz"))); + } + + protected DatasetPath getDatasetPath(DatasetUI datasetUI) { + return new DatasetPath(datasetUI.getFullPath()); + } + + protected DatasetVersionResourcePath getDatasetVersionPath(DatasetUI datasetUI) { + return new DatasetVersionResourcePath(getDatasetPath(datasetUI), datasetUI.getDatasetVersion()); + } + + protected String versionedResourcePath(DatasetUI datasetUI) { + return getPathJoiner() + .join( + "/dataset", + getKeyJoiner().join(datasetUI.getFullPath()), + "version", + datasetUI.getDatasetVersion()); + } + + protected String resourcePath(DatasetUI datasetUI) { + return getPathJoiner().join("/dataset", getKeyJoiner().join(datasetUI.getFullPath())); + } + + protected String getName(DatasetUI datasetUI) { + return datasetUI.getFullPath().get(datasetUI.getFullPath().size() - 1); + } + + protected String getRoot(DatasetUI datasetUI) { + return datasetUI.getFullPath().get(0); + } + + protected Invocation getDatasetInvocation(DatasetPath datasetPath) { + return getBuilder(getAPIv2().path("dataset/" + datasetPath.toString())).buildGet(); + } + + protected DatasetUI getDataset(DatasetPath datasetPath) { + return expectSuccess(getDatasetInvocation(datasetPath), DatasetUI.class); + } + + protected DatasetUI getVersionedDataset(DatasetVersionResourcePath datasetVersionPath) { + final Invocation invocation = + getBuilder(getAPIv2().path(getPathJoiner().join(datasetVersionPath.toString(), "preview"))) + .buildGet(); + + return expectSuccess(invocation, InitialPreviewResponse.class).getDataset(); + } + + protected InitialPreviewResponse getPreview(DatasetUI datasetUI) { + final Invocation invocation = + getBuilder( + getAPIv2().path(getPathJoiner().join(versionedResourcePath(datasetUI), "preview"))) + .buildGet(); + + return expectSuccess(invocation, InitialPreviewResponse.class); + } + + /** + * Get the preview response for given dataset path. Dataset can be physical or virutal. + * + * @param datasetPath + * @return + */ + protected InitialDataPreviewResponse getPreview(DatasetPath datasetPath) { + final Invocation invocation = + getBuilder(getAPIv2().path("dataset/" + datasetPath.toPathString() + "/preview")) + .buildGet(); + + return expectSuccess(invocation, InitialDataPreviewResponse.class); + } + + protected JobDataFragment getData(String paginationUrl, long offset, long limit) { + final Invocation invocation = + getBuilder( + getAPIv2() + .path(paginationUrl) + .queryParam("offset", offset) + .queryParam("limit", limit)) + .buildGet(); + + return expectSuccess(invocation, JobDataFragment.class); + } + + protected InitialPreviewResponse transform(DatasetUI datasetUI, TransformBase transformBase) { + final Invocation invocation = + getBuilder( + getAPIv2() + .path( + getPathJoiner() + .join(versionedResourcePath(datasetUI), "transformAndPreview")) + .queryParam("newVersion", newVersion())) + .buildPost(entity(transformBase, JSON)); + + return expectSuccess(invocation, InitialPreviewResponse.class); + } + + protected DatasetSearchUIs search(String filter) { + final Invocation invocation = + getBuilder(getAPIv2().path("datasets/search").queryParam("filter", filter)).buildGet(); + + return expectSuccess(invocation, DatasetSearchUIs.class); + } + + protected Invocation reapplyInvocation(DatasetVersionResourcePath versionResourcePath) { + return getBuilder(getAPIv2().path(versionResourcePath.toString() + "/editOriginalSql")) + .buildPost(null); + } + + protected InitialPreviewResponse reapply(DatasetVersionResourcePath versionResourcePath) { + return expectSuccess(reapplyInvocation(versionResourcePath), InitialPreviewResponse.class); + } + + protected DatasetUIWithHistory save(DatasetUI datasetUI, String saveVersion) { + final Invocation invocation = + getBuilder( + getAPIv2() + .path(versionedResourcePath(datasetUI) + "/save") + .queryParam("savedTag", saveVersion)) + .buildPost(entity("", JSON)); + + return expectSuccess(invocation, DatasetUIWithHistory.class); + } + + protected DatasetUI rename(DatasetPath datasetPath, String newName) { + final Invocation invocation = + getBuilder( + getAPIv2() + .path("dataset/" + datasetPath.toString() + "/rename") + .queryParam("renameTo", newName)) + .buildPost(null); + + return expectSuccess(invocation, DatasetUI.class); + } + + protected DatasetUI move(DatasetPath currenPath, DatasetPath newPath) { + final Invocation invocation = + getBuilder( + getAPIv2() + .path("dataset/" + currenPath.toString() + "/moveTo/" + newPath.toString())) + .buildPost(null); + + return expectSuccess(invocation, DatasetUI.class); + } + + protected DatasetUIWithHistory saveAs(DatasetUI datasetUI, DatasetPath newName) { + final Invocation invocation = + getBuilder( + getAPIv2() + .path(versionedResourcePath(datasetUI) + "/save") + .queryParam("as", newName)) + .buildPost(entity("", JSON)); + + return expectSuccess(invocation, DatasetUIWithHistory.class); + } + + protected DatasetUIWithHistory saveAsInBranch(DatasetUI datasetUI, DatasetPath newName, String savedTag, String branchName) { + final Invocation invocation = + getBuilder( + getAPIv2() + .path(versionedResourcePath(datasetUI) + "/save") + .queryParam("as", newName) + .queryParam("branchName", branchName) + .queryParam("savedTag", savedTag)) + + .buildPost(entity("", JSON)); + + return expectSuccess(invocation, DatasetUIWithHistory.class); + } + + protected DatasetUIWithHistory saveInBranch(DatasetUI datasetUI, String saveVersion, String branchName) { + final Invocation invocation = + getBuilder( + getAPIv2() + .path(versionedResourcePath(datasetUI) + "/save") + .queryParam("savedTag", saveVersion) + .queryParam("branchName", branchName)) + .buildPost(entity("", JSON)); + + return expectSuccess(invocation, DatasetUIWithHistory.class); + } + + protected UserExceptionMapper.ErrorMessageWithContext saveAsInBranchExpectError(DatasetUI datasetUI, DatasetPath newName, String savedTag, String branchName) { + final Invocation invocation = + getBuilder( + getAPIv2() + .path(versionedResourcePath(datasetUI) + "/save") + .queryParam("as", newName) + .queryParam("branchName", branchName) + .queryParam("savedTag", savedTag)) + + .buildPost(entity("", JSON)); + + return expectError(CLIENT_ERROR,invocation, UserExceptionMapper.ErrorMessageWithContext.class); + } + + protected void saveAsExpectError(DatasetUI datasetUI, DatasetPath newName) { + final Invocation invocation = + getBuilder( + getAPIv2() + .path(versionedResourcePath(datasetUI) + "/save") + .queryParam("as", newName)) + .buildPost(entity("", JSON)); + + expectError(CLIENT_ERROR, invocation, ValidationErrorMessage.class); + } + + protected DatasetUI delete(String datasetResourcePath, String savedVersion) { + final Invocation invocation = + getBuilder(getAPIv2().path(datasetResourcePath).queryParam("savedTag", savedVersion)) + .buildDelete(); + + return expectSuccess(invocation, DatasetUI.class); + } + + protected void saveExpectConflict(DatasetUI datasetUI, String saveVersion) { + final Invocation invocation = + getBuilder( + getAPIv2() + .path(versionedResourcePath(datasetUI) + "/save") + .queryParam("savedTag", saveVersion)) + .buildPost(entity("", JSON)); + + expectStatus(Status.CONFLICT, invocation); + } + + protected InitialPendingTransformResponse transformPeek( + DatasetUI datasetUI, TransformBase transform) { + final Invocation invocation = + getBuilder( + getAPIv2() + .path(versionedResourcePath(datasetUI) + "/transformPeek") + .queryParam("newVersion", newVersion())) + .buildPost(entity(transform, JSON)); + + return expectSuccess(invocation, InitialPendingTransformResponse.class); + } + + protected DatasetUI createDatasetFromSQLAndSave( + DatasetPath datasetPath, String sql, List context) { + InitialPreviewResponse datasetCreateResponse = createDatasetFromSQL(sql, context); + return saveAs(datasetCreateResponse.getDataset(), datasetPath).getDataset(); + } + + protected DatasetUI createVersionedDatasetFromSQLAndSave( + DatasetPath datasetPath, String sql, List context, String pluginName, String branchName) { + final Map references = new HashMap<>(); + references.put(pluginName, new VersionContextReq(VersionContextReq.VersionContextType.BRANCH, branchName)); + InitialPreviewResponse datasetCreateResponse = createVersionedDatasetFromSQL(sql, context, pluginName, branchName); + return saveAsInBranch(datasetCreateResponse.getDataset(), datasetPath, null, branchName).getDataset(); + } + + protected InitialPreviewResponse createDatasetFromSQL(String sql, List context) { + return expectSuccess( + getBuilder( + getAPIv2().path("datasets/new_untitled_sql").queryParam("newVersion", newVersion())) + .buildPost(entity(new CreateFromSQL(sql, context), JSON)), // => sending + InitialPreviewResponse.class); // <= receiving + } + + protected InitialPreviewResponse createDatasetFromParent(String parentDataset) { + final Invocation invocation = + getBuilder( + getAPIv2() + .path("datasets/new_untitled") + .queryParam("newVersion", newVersion()) + .queryParam("parentDataset", parentDataset)) + .buildPost(null); + + return expectSuccess(invocation, InitialPreviewResponse.class); + } + + protected InitialPreviewResponse createVersionedDatasetFromSQL(String sql, List context, String pluginName, String branchName) { + final Map references = new HashMap<>(); + references.put(pluginName, new VersionContextReq(VersionContextReq.VersionContextType.BRANCH, branchName)); + + return expectSuccess( + getBuilder( + getAPIv2().path("datasets/new_untitled_sql").queryParam("newVersion", newVersion())) + .buildPost(entity(new CreateFromSQL(sql, context, references), JSON)), // => sending + InitialPreviewResponse.class); // <= receiving + } + + + protected DatasetUI createDatasetFromParentAndSave( + DatasetPath newDatasetPath, String parentDataset) { + InitialPreviewResponse response = createDatasetFromParent(parentDataset); + + return saveAs(response.getDataset(), newDatasetPath).getDataset(); + } + + protected DatasetUI createDatasetFromParentAndSave(String newDataSetName, String parentDataset) + throws Exception { + setSpace(); + InitialPreviewResponse response = createDatasetFromParent(parentDataset); + + return saveAs( + response.getDataset(), + new DatasetPath("spacefoo.folderbar.folderbaz." + newDataSetName)) + .getDataset(); + } + + protected SqlQuery getQueryFromConfig(DatasetUI config) { + return new SqlQuery(config.getSql(), config.getContext(), DEFAULT_USERNAME); + } + + protected SqlQuery getQueryFromConfig(VirtualDatasetUI config) { + return new SqlQuery(config.getSql(), config.getState().getContextList(), DEFAULT_USERNAME); + } + + protected SqlQuery getQueryFromSQL(String sql) { + return new SqlQuery(sql, DEFAULT_USERNAME); + } + + protected void assertErrorMessage(final GenericErrorMessage error, final String errorMessage) { + assertEquals( + "error message should be '" + errorMessage + "'", errorMessage, error.getErrorMessage()); + } + + protected void assertErrorMessage( + final GenericErrorMessage error, final String errorMessage, final String expectedMoreInfo) { + assertEquals( + "error message should be '" + errorMessage + "'", errorMessage, error.getErrorMessage()); + assertThat(error.getMoreInfo()).contains(expectedMoreInfo); + } + + protected JobId submitAndWaitUntilSubmitted(JobRequest request, JobStatusListener listener) { + return JobsServiceTestUtils.submitAndWaitUntilSubmitted( + l(JobsService.class), request, listener); + } + + protected JobId submitAndWaitUntilSubmitted(JobRequest request) { + return JobsServiceTestUtils.submitAndWaitUntilSubmitted(l(JobsService.class), request); + } + + protected JobId submitJobAndWaitUntilCompletion(JobRequest request, JobStatusListener listener) { + return JobsServiceTestUtils.submitJobAndWaitUntilCompletion( + l(JobsService.class), request, listener) + .getJobId(); + } + + protected boolean submitJobAndCancelOnTimeOut(JobRequest request, long timeOutInMillis) + throws Exception { + return JobsServiceTestUtils.submitJobAndCancelOnTimeout( + l(JobsService.class), request, timeOutInMillis); + } + + protected static JobId submitJobAndWaitUntilCompletion(JobRequest request) { + return JobsServiceTestUtils.submitJobAndWaitUntilCompletion(l(JobsService.class), request); + } + + protected JobSubmission getJobSubmissionAfterJobCompletion(JobRequest request) { + return JobsServiceTestUtils.submitJobAndWaitUntilCompletion( + l(JobsService.class), request, JobStatusListener.NO_OP); + } + + protected void runQuery( + JobsService jobsService, + String name, + int rows, + int columns, + FolderPath parent, + BufferAllocator allocator) + throws JobNotFoundException { + FilePath filePath; + if (parent == null) { + filePath = + new FilePath( + ImmutableList.of(HomeName.getUserHomePath(DEFAULT_USER_NAME).getName(), name)); + } else { + List path = Lists.newArrayList(parent.toPathList()); + path.add(name); + filePath = new FilePath(path); + } + try (final JobDataFragment truncData = + submitJobAndGetData( + jobsService, + JobRequest.newBuilder() + .setSqlQuery( + new SqlQuery( + format("select * from %s", filePath.toPathString()), DEFAULT_USER_NAME)) + .build(), + 0, + rows + 1, + allocator)) { + assertEquals(rows, truncData.getReturnedRowCount()); + assertEquals(columns, truncData.getColumns().size()); + } + } + + protected static UserBitShared.QueryProfile getQueryProfile(JobRequest request) throws Exception { + return JobsServiceTestUtils.getQueryProfile(l(JobsService.class), request); + } + + protected static void setSystemOption(final OptionValidator option, final String value) { + setSystemOption(option.getOptionName(), value); + } + + protected static void setSystemOption(String optionName, String optionValue) { + JobsServiceTestUtils.setSystemOption(l(JobsService.class), optionName, optionValue); + } + + protected static void resetSystemOption(String optionName) { + JobsServiceTestUtils.resetSystemOption(l(JobsService.class), optionName); + } + + protected static AutoCloseable enableDeltaLake() { + setSystemOption(PlannerSettings.ENABLE_DELTALAKE.getOptionName(), "true"); + return () -> + setSystemOption( + PlannerSettings.ENABLE_DELTALAKE.getOptionName(), + PlannerSettings.ENABLE_DELTALAKE.getDefault().getBoolVal().toString()); + } + + protected static AutoCloseable enableRowCountStat(boolean enableStatAfterTest) { + setSystemOption(PlannerSettings.USE_ROW_COUNT_STATISTICS.getOptionName(), "true"); + setSystemOption(PlannerSettings.USE_STATISTICS.getOptionName(), "false"); + return () -> { + setSystemOption(PlannerSettings.USE_ROW_COUNT_STATISTICS.getOptionName(), "false"); + setSystemOption( + PlannerSettings.USE_STATISTICS.getOptionName(), String.valueOf(enableStatAfterTest)); + }; + } + + protected static AutoCloseable withSystemOption(String optionName, String optionValue) { + setSystemOption(optionName, optionValue); + return () -> resetSystemOption(optionName); + } + + protected static UserBitShared.QueryProfile getTestProfile() throws Exception { + String query = + "select sin(val_int_64) + 10 from cp" + + ".\"parquet/decimals/mixedDecimalsInt32Int64FixedLengthWithStats.parquet\""; + + UserBitShared.QueryProfile queryProfile = + getQueryProfile( + JobRequest.newBuilder() + .setSqlQuery(new SqlQuery(query, DEFAULT_USERNAME)) + .setQueryType(QueryType.UI_INTERNAL_RUN) + .setDatasetPath(DatasetPath.NONE.toNamespaceKey()) + .setDatasetVersion(DatasetVersion.NONE) + .build()); + return queryProfile; + } + + protected static AutoCloseable enableIcebergTables() { + setSystemOption(ENABLE_ICEBERG.getOptionName(), "true"); + setSystemOption(ExecConstants.CTAS_CAN_USE_ICEBERG.getOptionName(), "true"); + return () -> { + setSystemOption( + ENABLE_ICEBERG.getOptionName(), ENABLE_ICEBERG.getDefault().getBoolVal().toString()); + setSystemOption( + ExecConstants.CTAS_CAN_USE_ICEBERG.getOptionName(), + ExecConstants.CTAS_CAN_USE_ICEBERG.getDefault().getBoolVal().toString()); + }; + } + + protected static AutoCloseable enableVersionedViews() { + setSystemOption(VERSIONED_VIEW_ENABLED.getOptionName(), "true"); + return () -> { + setSystemOption( + VERSIONED_VIEW_ENABLED.getOptionName(), + VERSIONED_VIEW_ENABLED.getDefault().getBoolVal().toString()); + }; + } + + protected static AutoCloseable enableUnlimitedSplitsSupportFlags() { + setSystemOption(PlannerSettings.UNLIMITED_SPLITS_SUPPORT, "true"); + setSystemOption(ExecConstants.ENABLE_ICEBERG, "true"); + + return () -> { + setSystemOption( + PlannerSettings.UNLIMITED_SPLITS_SUPPORT, + PlannerSettings.UNLIMITED_SPLITS_SUPPORT.getDefault().getBoolVal().toString()); + setSystemOption( + ExecConstants.ENABLE_ICEBERG, + ExecConstants.ENABLE_ICEBERG.getDefault().getBoolVal().toString()); + }; + } + + protected static AutoCloseable disableUnlimitedSplitsSupport() { + setSystemOption(UNLIMITED_SPLITS_SUPPORT.getOptionName(), "false"); + setSystemOption(ENABLE_ICEBERG.getOptionName(), "false"); + return () -> { + try { + setSystemOption( + UNLIMITED_SPLITS_SUPPORT.getOptionName(), + UNLIMITED_SPLITS_SUPPORT.getDefault().getBoolVal().toString()); + setSystemOption( + ENABLE_ICEBERG.getOptionName(), ENABLE_ICEBERG.getDefault().getBoolVal().toString()); + } catch (Exception e) { + // ignore + } + }; + } + + protected static String readResourceAsString(String fileName) { + return TestTools.readTestResourceAsString(fileName); + } + + protected static JobRequest createNewJobRequestFromSql(String sql) { + return JobRequest.newBuilder() + .setSqlQuery(new SqlQuery(sql, DEFAULT_USERNAME)) + .build(); + } + + protected static JobRequest createJobRequestFromSqlAndSessionId(String sql, String sessionId) { + return JobRequest.newBuilder() + .setSqlQuery(new SqlQuery(sql, ImmutableList.of(), DEFAULT_USERNAME, + null, sessionId)) + .build(); + } + + protected void runQueryInSession(String sql , String sessionId){ + String executedSesssionId = getJobSubmissionAfterJobCompletion( + createJobRequestFromSqlAndSessionId(sql, sessionId)).getSessionId().getId(); + AssertionsForClassTypes.assertThat(executedSesssionId).isEqualTo(sessionId); + } + + protected String runQueryAndGetSessionId(String sql) { + return getJobSubmissionAfterJobCompletion(createNewJobRequestFromSql(sql)).getSessionId().getId(); + } + + protected void runQueryCheckResults( + JobsService jobsService, + String sourcePluginName, + List tablePath, + int rows, + int columns, + BufferAllocator allocator, + String sessionId) + throws JobNotFoundException { + + try (final JobDataFragment truncData = + submitJobAndGetData( + jobsService, + JobRequest.newBuilder() + .setSqlQuery(new SqlQuery(format("select * from %s", + fullyQualifiedTableName(sourcePluginName, tablePath)), + ImmutableList.of(), + DEFAULT_USERNAME, + null, + sessionId)) + .build(), + 0, + rows + 1, + allocator)) { + assertEquals(rows, truncData.getReturnedRowCount()); + assertEquals(columns, truncData.getColumns().size()); + } + } + + protected static JobId runQuery(String query) { + return submitJobAndWaitUntilCompletion( + JobRequest.newBuilder() + .setSqlQuery(new SqlQuery(query, SampleDataPopulator.DEFAULT_USER_NAME)) + .setQueryType(QueryType.UI_INTERNAL_RUN) + .build()); + } + + protected JobId runQuery(String query, String sessionId) { + final SqlQuery sqlQuery = + (sessionId != null) + ? new SqlQuery(query, Collections.emptyList(), DEFAULT_USERNAME, null, sessionId) + : new SqlQuery(query, DEFAULT_USERNAME); + return submitJobAndWaitUntilCompletion( + JobRequest.newBuilder() + .setSqlQuery(sqlQuery) + .setQueryType(QueryType.UI_RUN) + .setDatasetPath(DatasetPath.NONE.toNamespaceKey()) + .build()); + } +} diff --git a/dac/backend/src/test/java/com/dremio/dac/server/TestDatasetProfiles.java b/dac/backend/src/test/java/com/dremio/dac/server/TestDatasetProfiles.java index db52c16ae8..9d8243b7c1 100644 --- a/dac/backend/src/test/java/com/dremio/dac/server/TestDatasetProfiles.java +++ b/dac/backend/src/test/java/com/dremio/dac/server/TestDatasetProfiles.java @@ -27,6 +27,7 @@ import java.util.Collections; import javax.ws.rs.client.Entity; +import javax.ws.rs.core.GenericType; import org.apache.arrow.vector.types.Types.MinorType; import org.apache.arrow.vector.types.pojo.Field; @@ -37,7 +38,6 @@ import org.junit.rules.TemporaryFolder; import com.dremio.dac.explore.model.DatasetPath; -import com.dremio.dac.model.spaces.Space; import com.dremio.exec.proto.UserBitShared.QueryProfile; import com.dremio.exec.record.BatchSchema; import com.dremio.service.job.proto.QueryType; @@ -112,8 +112,7 @@ public void testVds() throws Exception { try { getNamespaceService().getSpace(new NamespaceKey(spaceName)); } catch (NamespaceNotFoundException e) { - expectSuccess(getBuilder(getAPIv2().path("space/" + spaceName)) - .buildPut(Entity.json(new Space(null, spaceName, null, null, null, 0, null))), Space.class); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, spaceName, null, null, null))), new GenericType() {}); } final DatasetPath vdsPath = new DatasetPath(spaceName + "." + vdsName); @@ -133,8 +132,7 @@ public void testJoinVds() throws Exception { try { getNamespaceService().getSpace(new NamespaceKey(spaceName)); } catch (NamespaceNotFoundException e) { - expectSuccess(getBuilder(getAPIv2().path("space/" + spaceName)) - .buildPut(Entity.json(new Space(null, spaceName, null, null, null, 0, null))), Space.class); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, spaceName, null, null, null))), new GenericType() {}); } final DatasetPath vdsPath = new DatasetPath(spaceName + "." + vdsName); diff --git a/dac/backend/src/test/java/com/dremio/dac/server/TestDropView.java b/dac/backend/src/test/java/com/dremio/dac/server/TestDropView.java new file mode 100644 index 0000000000..6ce7b8cd30 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/dac/server/TestDropView.java @@ -0,0 +1,123 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.server; + +import static org.junit.jupiter.api.Assertions.assertThrowsExactly; + +import java.util.Arrays; +import java.util.List; + +import javax.ws.rs.client.WebTarget; +import javax.ws.rs.core.GenericType; + +import org.junit.BeforeClass; +import org.junit.Test; + +import com.dremio.dac.explore.model.DatasetPath; +import com.dremio.dac.explore.model.DatasetUI; +import com.dremio.dac.server.test.SampleDataPopulator; +import com.dremio.dac.service.datasets.DatasetVersionMutator; +import com.dremio.datastore.api.LegacyKVStoreProvider; +import com.dremio.service.job.proto.QueryType; +import com.dremio.service.jobs.JobRequest; +import com.dremio.service.jobs.SqlQuery; +import com.dremio.service.namespace.NamespaceException; +import com.dremio.service.namespace.NamespaceKey; +import com.dremio.service.namespace.NamespaceNotFoundException; +import com.dremio.service.namespace.dataset.proto.DatasetConfig; +import com.dremio.service.namespace.space.proto.SpaceConfig; + + +public class TestDropView extends BaseTestServer { + + @BeforeClass + public static void init() throws Exception { + BaseTestServer.init(); + + // setup a space + NamespaceKey key = new NamespaceKey("space"); + SpaceConfig spaceConfig = new SpaceConfig(); + spaceConfig.setName("space"); + newNamespaceService().addOrUpdateSpace(key, spaceConfig); + } + + @Test + public void testDropView() throws NamespaceException { + DatasetConfig dsConfig = createView("space", "vds1"); + dropView(dsConfig.getFullPathList()); + verifyViewDeleted(dsConfig.getFullPathList()); + } + + @Test + public void testDeleteView() throws NamespaceException { + DatasetConfig dsConfig = createView("space", "vds2"); + deleteView(dsConfig.getFullPathList(), dsConfig.getTag()); + verifyViewDeleted(dsConfig.getFullPathList()); + } + + @Test + public void testDropViewWithMissingHistory() throws NamespaceException { + DatasetConfig dsConfig = createView("space", "vds3"); + final LegacyKVStoreProvider provider = l(LegacyKVStoreProvider.class); + DatasetVersionMutator.deleteDatasetVersion(provider, dsConfig.getFullPathList(), dsConfig.getVirtualDataset().getVersion().getVersion()); + dropView(dsConfig.getFullPathList()); + verifyViewDeleted(dsConfig.getFullPathList()); + } + + @Test + public void testDeleteViewWithMissingHistory() throws NamespaceException { + DatasetConfig dsConfig = createView("space", "vds4"); + final LegacyKVStoreProvider provider = l(LegacyKVStoreProvider.class); + DatasetVersionMutator.deleteDatasetVersion(provider, dsConfig.getFullPathList(), dsConfig.getVirtualDataset().getVersion().getVersion()); + deleteView(dsConfig.getFullPathList(), dsConfig.getTag()); + verifyViewDeleted(dsConfig.getFullPathList()); + } + + private DatasetConfig createView(String space, String name) throws NamespaceException { + NamespaceKey datasetPath = new NamespaceKey(Arrays.asList(space, name)); + final String query = "CREATE VIEW " + datasetPath.getSchemaPath() + " AS SELECT * FROM INFORMATION_SCHEMA.\"tables\""; + submitJob(query); + + return newNamespaceService().getDataset(datasetPath); + } + + private void dropView(List path) { + final String query = "DROP VDS " + String.join(".", path); + submitJob(query); + } + + private void submitJob(String query) { + submitJobAndWaitUntilCompletion( + JobRequest.newBuilder() + .setSqlQuery(new SqlQuery(query, SampleDataPopulator.DEFAULT_USER_NAME)) + .setQueryType(QueryType.UI_PREVIEW) + .setDatasetPath(DatasetPath.NONE.toNamespaceKey()) + .build()); + } + + private void deleteView(List path, String tag) { + WebTarget target = getAPIv2() + .path("dataset") + .path(String.join(".", path)) + .queryParam("savedTag", tag); + expectSuccess(getBuilder(target).buildDelete(), new GenericType() {}); + } + + private void verifyViewDeleted(List path) throws NamespaceException { + NamespaceKey datasetPath = new NamespaceKey(path); + assertThrowsExactly(NamespaceNotFoundException.class, () -> newNamespaceService().getDataset(datasetPath)); + } +} diff --git a/dac/backend/src/test/java/com/dremio/dac/server/TestHomeFileStoragePlugin.java b/dac/backend/src/test/java/com/dremio/dac/server/TestHomeFileStoragePlugin.java index 7244049b83..1514b404a6 100644 --- a/dac/backend/src/test/java/com/dremio/dac/server/TestHomeFileStoragePlugin.java +++ b/dac/backend/src/test/java/com/dremio/dac/server/TestHomeFileStoragePlugin.java @@ -22,6 +22,7 @@ import java.util.List; import javax.ws.rs.client.Entity; +import javax.ws.rs.core.GenericType; import org.junit.BeforeClass; import org.junit.ClassRule; @@ -31,7 +32,6 @@ import com.dremio.connector.metadata.EntityPath; import com.dremio.dac.explore.model.DatasetPath; import com.dremio.dac.homefiles.HomeFileSystemStoragePlugin; -import com.dremio.dac.model.spaces.Space; import com.dremio.exec.catalog.CatalogServiceImpl; import com.dremio.exec.store.CatalogService; import com.dremio.exec.store.DatasetRetrievalOptions; @@ -75,8 +75,7 @@ public void testGetDatasetHandle() throws Exception { try { getNamespaceService().getSpace(new NamespaceKey(spaceName)); } catch (NamespaceNotFoundException e) { - expectSuccess(getBuilder(getAPIv2().path("space/" + spaceName)) - .buildPut(Entity.json(new Space(null, spaceName, null, null, null, 0, null))), Space.class); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, spaceName, null, null, null))), new GenericType() {}); } final DatasetPath vdsPath = new DatasetPath(spaceName + "." + vdsName); diff --git a/dac/backend/src/test/java/com/dremio/dac/server/TestHomeFiles.java b/dac/backend/src/test/java/com/dremio/dac/server/TestHomeFiles.java index 14afcdf24f..77d307af1b 100644 --- a/dac/backend/src/test/java/com/dremio/dac/server/TestHomeFiles.java +++ b/dac/backend/src/test/java/com/dremio/dac/server/TestHomeFiles.java @@ -57,7 +57,6 @@ import com.dremio.common.utils.SqlUtils; import com.dremio.dac.api.Dataset; import com.dremio.dac.explore.model.FileFormatUI; -import com.dremio.dac.explore.model.InitialPreviewResponse; import com.dremio.dac.homefiles.HomeFileConf; import com.dremio.dac.homefiles.HomeFileSystemStoragePlugin; import com.dremio.dac.homefiles.HomeFileTool; @@ -91,7 +90,6 @@ import com.dremio.service.namespace.dataset.proto.DatasetType; import com.dremio.service.namespace.file.FileFormat; import com.dremio.service.namespace.file.proto.ExcelFileConfig; -import com.dremio.service.namespace.file.proto.FileConfig; import com.dremio.service.namespace.file.proto.FileType; import com.dremio.service.namespace.file.proto.JsonFileConfig; import com.dremio.service.namespace.file.proto.TextFileConfig; @@ -120,7 +118,7 @@ public class TestHomeFiles extends BaseTestServer { public void setup() throws Exception { clearAllDataExceptUser(); getPopulator().populateTestUsers(); - this.fs = l(HomeFileTool.class).getConf().getFilesystemAndCreatePaths(getCurrentDremioDaemon().getDACConfig().thisNode); + this.fs = l(HomeFileTool.class).getConfForBackup().getFilesystemAndCreatePaths(getCurrentDremioDaemon().getDACConfig().thisNode); allocator = getSabotContext().getAllocator().newChildAllocator(getClass().getName(), 0, Long.MAX_VALUE); } @@ -250,26 +248,6 @@ public void testHome() throws Exception { assertEquals(2, truncData.getColumns().size()); } - doc("creating dataset from home file"); - InitialPreviewResponse response = expectSuccess(getBuilder(getAPIv2().path( - "/home/" + HOME_NAME + "/new_untitled_from_file/file1")).buildPost(Entity.json("")), InitialPreviewResponse.class); - assertEquals(2, response.getData().getColumns().size()); - - doc("renaming file"); - File file3 = expectSuccess(getBuilder(getAPIv2().path("home/" + HOME_NAME + "/file_rename/file1").queryParam("renameTo", "file1r")) - .buildPost(Entity.json(new FileConfig())), File.class); - FileFormat file3Format = file3.getFileFormat().getFileFormat(); - - assertEquals("file1r", file3Format.getName()); - assertEquals(asList(HOME_NAME, "file1r"), file3Format.getFullPath()); - assertEquals(FileType.JSON, file3Format.getFileType()); - - expectSuccess(getBuilder(getAPIv2().path("home/" + HOME_NAME + "/file/file1r")).buildGet(), File.class); - expectError(CLIENT_ERROR, getBuilder(getAPIv2().path("home/" + HOME_NAME + "/file/file1")).buildGet(), NotFoundErrorMessage.class); - - Home home1 = expectSuccess(getBuilder(getAPIv2().path("home/" + HOME_NAME)).buildGet(), Home.class); - assertEquals(1, home1.getContents().getFiles().size()); - doc("creating a folder"); String folderPath = "home/" + HOME_NAME + "/folder/"; @@ -384,11 +362,6 @@ private void testUploadExcelFile(final boolean isXLS) throws Exception { JobDataFragment data = expectSuccess(getBuilder(getAPIv2().path("/home/" + HOME_NAME + "/file_preview/excel")).buildPost(Entity.json(file2Format)), JobDataFragment.class); assertEquals(5, data.getReturnedRowCount()); assertEquals(5, data.getColumns().size()); - - doc("creating dataset from excel file"); - InitialPreviewResponse previewResponse = expectSuccess(getBuilder(getAPIv2().path( - "/home/" + HOME_NAME + "/new_untitled_from_file/excel")).buildPost(Entity.json("")), InitialPreviewResponse.class); - assertEquals(5, previewResponse.getData().getColumns().size()); } public static void uploadFile(HomeFileConf homeFileStore, Path inputFile, String name, String extension ,FileFormat fileFormat, FolderPath parent) throws Exception { diff --git a/dac/backend/src/test/java/com/dremio/dac/server/TestMasterDown.java b/dac/backend/src/test/java/com/dremio/dac/server/TestMasterDown.java index 04e0f51926..7b2597b6ea 100644 --- a/dac/backend/src/test/java/com/dremio/dac/server/TestMasterDown.java +++ b/dac/backend/src/test/java/com/dremio/dac/server/TestMasterDown.java @@ -35,6 +35,7 @@ import org.junit.Assume; import org.junit.BeforeClass; import org.junit.ClassRule; +import org.junit.Ignore; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -271,6 +272,7 @@ private void sanityCheck() throws Exception { expectSuccess((currentApiV2.path("space/DG/folder/").request(JSON).header(authHeader, authToken)).buildPost(Entity.json("{\"name\": \""+folderName+"\"}")), Folder.class); } + @Ignore("DX-61484") @Test public void testMasterDown() throws Exception { final long timeoutMs = 5_000; // Timeout when checking if a node reached a given status @@ -326,7 +328,7 @@ public void run() { return factory.get(new ReflectionContext(DEFAULT_USER_NAME, true)); }); - CollaborationHelper collaborationService = new CollaborationHelper(mp.lookup(LegacyKVStoreProvider.class), sabotContext, mp.lookup(NamespaceService.class), dacSecurityContext, mp.lookup(SearchService.class)); + CollaborationHelper collaborationService = new CollaborationHelper(mp.lookup(LegacyKVStoreProvider.class), mp.lookup(NamespaceService.class), dacSecurityContext, mp.lookup(SearchService.class), sabotContext.getUserService()); SampleDataPopulator populator = new SampleDataPopulator( sabotContext, new SourceService( diff --git a/dac/backend/src/test/java/com/dremio/dac/server/TestMultiMaster.java b/dac/backend/src/test/java/com/dremio/dac/server/TestMultiMaster.java index e6be10e8c8..18bd5f2f37 100644 --- a/dac/backend/src/test/java/com/dremio/dac/server/TestMultiMaster.java +++ b/dac/backend/src/test/java/com/dremio/dac/server/TestMultiMaster.java @@ -40,6 +40,7 @@ import org.junit.After; import org.junit.Assume; import org.junit.Before; +import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -336,6 +337,7 @@ public void onFailure(Throwable t) { return promise; } + @Ignore("DX-61484") @Test public void testMasterFailover() throws Exception { currentDremioDaemon.startPreServices(); @@ -425,7 +427,7 @@ public DACDaemon call() { }); DACSecurityContext dacSecurityContext = new DACSecurityContext(new UserName(SystemUser.SYSTEM_USERNAME), SystemUser.SYSTEM_USER, null); - CollaborationHelper collaborationService = new CollaborationHelper(mp.lookup(LegacyKVStoreProvider.class), sabotContext, mp.lookup(NamespaceService.class), dacSecurityContext, mp.lookup(SearchService.class)); + CollaborationHelper collaborationService = new CollaborationHelper(mp.lookup(LegacyKVStoreProvider.class), mp.lookup(NamespaceService.class), dacSecurityContext, mp.lookup(SearchService.class), sabotContext.getUserService()); SampleDataPopulator populator = new SampleDataPopulator( sabotContext, new SourceService( @@ -469,6 +471,7 @@ public DACDaemon call() { } } + @Ignore("DX-61484") @Test public void testMasterFailoverOnZkSessionLost() throws Exception { currentDremioDaemon.startPreServices(); @@ -557,7 +560,7 @@ public DACDaemon call() { return factory.get(new ReflectionContext(DEFAULT_USER_NAME, true)); }); - CollaborationHelper collaborationService = new CollaborationHelper(mp.lookup(LegacyKVStoreProvider.class), sabotContext, mp.lookup(NamespaceService.class), dacSecurityContext, mp.lookup(SearchService.class)); + CollaborationHelper collaborationService = new CollaborationHelper(mp.lookup(LegacyKVStoreProvider.class), mp.lookup(NamespaceService.class), dacSecurityContext, mp.lookup(SearchService.class), sabotContext.getUserService()); SampleDataPopulator populator = new SampleDataPopulator( sabotContext, new SourceService( diff --git a/dac/backend/src/test/java/com/dremio/dac/server/TestServer.java b/dac/backend/src/test/java/com/dremio/dac/server/TestServer.java index fe16e2fa44..7a5b1a1c80 100644 --- a/dac/backend/src/test/java/com/dremio/dac/server/TestServer.java +++ b/dac/backend/src/test/java/com/dremio/dac/server/TestServer.java @@ -42,6 +42,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.UUID; import javax.ws.rs.client.Entity; import javax.ws.rs.client.Invocation; @@ -64,7 +65,6 @@ import com.dremio.dac.explore.DatasetsResource; import com.dremio.dac.explore.model.Column; import com.dremio.dac.explore.model.CreateFromSQL; -import com.dremio.dac.explore.model.DatasetDetails; import com.dremio.dac.explore.model.DatasetPath; import com.dremio.dac.explore.model.DatasetSummary; import com.dremio.dac.explore.model.DatasetUI; @@ -83,7 +83,6 @@ import com.dremio.dac.model.spaces.Space; import com.dremio.dac.model.spaces.SpaceName; import com.dremio.dac.model.spaces.SpacePath; -import com.dremio.dac.model.spaces.Spaces; import com.dremio.dac.model.usergroup.UserLogin; import com.dremio.dac.model.usergroup.UserLoginSession; import com.dremio.dac.proto.model.dataset.DataType; @@ -200,7 +199,7 @@ public void testInvalidSpace() throws Exception { @Test public void testValidSpace() throws Exception { - expectSuccess(getBuilder(getAPIv2().path("space/AB")).buildPut(Entity.json(new Space(null, "AB", null, null, null, 0, null)))); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "AB", null, null, null))), new GenericType() {}); } @Test @@ -246,10 +245,8 @@ public void testSpaces() throws Exception { final Space space2 = expectSuccess(getBuilder(getAPIv2().path("space/space2")).buildGet(), Space.class); assertEquals(config2.getName(), space2.getName()); - final Space config3 = new Space(null, "space3", "dremio eng", null, null, 0, null); - final Space space3 = expectSuccess(getBuilder(getAPIv2().path("space/space3")).buildPut(Entity.json(config3)), Space.class); + final com.dremio.dac.api.Space space3 = expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "space3", null, null, null))), new GenericType() {}); assertEquals("space3", space3.getName()); - assertEquals(config3.getDescription(), space3.getDescription()); final UserService userService = l(UserService.class); User dt = SimpleUser.newBuilder().setUserName("user").setCreatedAt(System.currentTimeMillis()). @@ -257,21 +254,13 @@ public void testSpaces() throws Exception { dt = userService.createUser(dt, "user1234"); UserLoginSession uls = expectSuccess(getAPIv2().path("/login").request(JSON).buildPost(Entity.json(new UserLogin("user", "user1234"))), UserLoginSession.class); - final Space config4 = new Space(null, "space4", "different user space", null, null, 0, null); - final Space space4 = expectSuccess(getBuilder(getAPIv2().path("space/space4")).buildPut(Entity.json(config4)), Space.class); + final com.dremio.dac.api.Space space4 = expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "space4", null, null, null))), new GenericType() {}); assertEquals("space4", space4.getName()); - assertEquals(config4.getDescription(), space4.getDescription()); - final Space config5 = new Space(null, "test1", "different space name", null, null, 0, null); - @SuppressWarnings("unused") - final Space space5 = expectSuccess(getBuilder(getAPIv2().path("space/test1")).buildPut(Entity.json(config5)), Space.class); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "test1", null, null, null))), new GenericType() {}); - final Space config6 = new Space(null, "test2", "different space name", null, null, 0, null); - @SuppressWarnings("unused") - final Space space6 = expectSuccess(getBuilder(getAPIv2().path("space/test2")).buildPut(Entity.json(config6)), Space.class); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "test2", null, null, null))), new GenericType() {}); - final Spaces spaces1 = expectSuccess(getBuilder(getAPIv2().path("spaces")).buildGet(), Spaces.class); - assertEquals(spaces1.toString(), 6, spaces1.getSpaces().size()); final JobFilterItems spaces2 = expectSuccess(getBuilder(getAPIv2().path("jobs/filters/spaces").queryParam("filter", "test")).buildGet(), JobFilterItems.class); assertEquals(spaces2.toString(), 2, spaces2.getItems().size()); @@ -297,7 +286,9 @@ public void testDataGrid() throws Exception { ).buildGet(), InitialPreviewResponse.class); - JobDataFragment dataA = previewResponseA.getData(); + waitForJobComplete(previewResponseA.getJobId().getId()); + final JobDataFragment dataA = getData(previewResponseA.getPaginationUrl(), 0, INITIAL_RESULTSET_SIZE); + assertEquals(10, dataA.getReturnedRowCount()); assertEquals(4, dataA.getColumns().size()); assertEquals(asList( @@ -318,7 +309,8 @@ public void testDataGrid() throws Exception { ).buildGet(), InitialPreviewResponse.class); - final JobDataFragment dataB = previewResponseB.getData(); + waitForJobComplete(previewResponseB.getJobId().getId()); + final JobDataFragment dataB = getData(previewResponseB.getPaginationUrl(), 0, INITIAL_RESULTSET_SIZE); assertEquals(INITIAL_RESULTSET_SIZE, dataB.getReturnedRowCount()); assertEquals(2, dataB.getColumns().size()); @@ -341,7 +333,7 @@ public void testDataGrid() throws Exception { @Test public void testFolderOCC() throws Exception { - expectSuccess(getBuilder(getAPIv2().path("space/s1")).buildPut(Entity.json(new Space(null, "s1", null, null, null, 0, null))), Space.class); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "s1", null, null, null))), new GenericType() {}); String spaceResource = "space/s1/folder/f1"; doc("create folder 1"); @@ -371,9 +363,10 @@ public void testFolderOCC() throws Exception { public void testFolder() throws Exception { // create spaces. doc("create spaces"); - expectSuccess(getBuilder(getAPIv2().path("space/s1")).buildPut(Entity.json(new Space(null, "s1", null, null, null, 0, null))), Space.class); - expectSuccess(getBuilder(getAPIv2().path("space/s2")).buildPut(Entity.json(new Space(null, "s2", null, null, null, 0, null))), Space.class); - expectSuccess(getBuilder(getAPIv2().path("space/s3")).buildPut(Entity.json(new Space(null, "s3", null, null, null, 0, null))), Space.class); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "s1", null, null, null))), new GenericType() {}); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "s2", null, null, null))), new GenericType() {}); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "s3", null, null, null))), new GenericType() {}); + doc("create folders"); expectSuccess(getBuilder(getAPIv2().path("space/s1/folder/")).buildPost(Entity.json("{\"name\": \"f1\"}")), Folder.class); @@ -470,26 +463,11 @@ public void testFolder() throws Exception { assertEquals(1, lists3.getFolders().size()); assertNull(expectSuccess(getBuilder(getAPIv2().path("space/s1").queryParam("includeContents", false)).buildGet(), Space.class).getContents()); - - /* Renames are disabled in beta1 - expectSuccess(getBuilder(getAPIv2().path("space/s1/rename_folder/f1").queryParam("renameTo", "f1r")).buildPost(Entity.json(new FolderConfig())), Folder.class); - lists1f1 = expectSuccess(getBuilder(getAPIv2().path("space/s1/folder/f1r")).buildGet(), Folder.class).getContents(); - assertEquals(2, lists1f1.getDatasets().size()); - assertEquals(2, lists1f1.getFolders().size()); - expectError(FamilyExpectation.CLIENT_ERROR, getBuilder(getAPIv2().path("space/s1/folder/f1")).buildGet(), NotFoundErrorMessage.class); - - expectSuccess(getBuilder(getAPIv2().path("space/s1/rename").queryParam("renameTo", "s1r")).buildPost(Entity.json(new SpaceConfig())), Space.class); - lists1 = expectSuccess(getBuilder(getAPIv2().path("space/s1r")).buildGet(), Space.class).getContents(); - assertEquals(1, lists1.getDatasets().size()); - assertEquals(1, lists1.getFolders().size()); - - expectError(FamilyExpectation.CLIENT_ERROR, getBuilder(getAPIv2().path("space/s1")).buildGet(), NotFoundErrorMessage.class); - */ } @Test public void testFolderParentNotFound() throws Exception { - expectSuccess(getBuilder(getAPIv2().path("space/s1")).buildPut(Entity.json(new Space(null, "s1", null, null, null, 0, null))), Space.class); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "s1", null, null, null))), new GenericType() {}); expectSuccess(getBuilder(getAPIv2().path("space/s1/folder/")).buildPost(Entity.json("{\"name\": \"f1\"}")), Folder.class); expectStatus(Status.BAD_REQUEST, getBuilder(getAPIv2().path("space/s1/folder/wrongfolder/")).buildPost(Entity.json("{\"name\": \"f1\"}"))); @@ -586,10 +564,6 @@ public void testDatasetJobCount() throws Exception { NamespaceTree nst = home.getContents(); assertEquals(1, nst.getFolders().size()); - doc("list all spaces"); - final Spaces spaces = expectSuccess(getBuilder(getAPIv2().path("spaces")).buildGet(), Spaces.class); - assertEquals(1, spaces.getSpaces().size()); - doc("get space"); final Space space1 = expectSuccess(getBuilder(getAPIv2().path("space/space1")).buildGet(), Space.class); assertEquals(2, space1.getDatasetCount()); @@ -612,6 +586,13 @@ public void testDatasetSummary() throws Exception { assertEquals(0, (int)summary.getJobCount()); assertEquals(3, summary.getFields().size()); assertEquals(Arrays.asList("tag1", "tag2"), summary.getTags()); + assertEquals(summary.getEntityId(), UUID.fromString(summary.getEntityId()).toString()); + assertFalse(summary.getHasReflection()); + assertEquals("dremio", summary.getOwnerName()); + assertEquals("dremio@dremio.test", summary.getOwnerEmail()); + assertEquals("dremio", summary.getLastModifyingUserName()); + assertEquals("dremio@dremio.test", summary.getLastModifyingUserEmail()); + assertTrue(summary.getCreatedAt() <= summary.getLastModified()); doc("get dataset summary for virtual dataset DG.dsg4 with empty tags"); summary = expectSuccess(getBuilder(getAPIv2().path("/datasets/summary/DG/dsg4")).buildGet(), DatasetSummary.class); assertEquals(new ArrayList<>(), summary.getTags()); @@ -622,6 +603,13 @@ public void testDatasetSummary() throws Exception { assertEquals(0, (int) summary.getJobCount()); assertEquals(3, summary.getFields().size()); assertEquals(Arrays.asList("tag3", "tag4"), summary.getTags()); + assertEquals(summary.getEntityId(), UUID.fromString(summary.getEntityId()).toString()); + assertFalse(summary.getHasReflection()); + assertEquals("dremio", summary.getOwnerName()); + assertEquals("dremio@dremio.test", summary.getOwnerEmail()); + assertEquals("dremio", summary.getLastModifyingUserName()); + assertEquals("dremio@dremio.test", summary.getLastModifyingUserEmail()); + assertTrue(summary.getCreatedAt() <= summary.getLastModified()); doc("get dataset summary for physical dataset with empty tags"); summary = expectSuccess(getBuilder(getAPIv2().path("/datasets/summary/LocalFS2/dac-sample2.json")).buildGet(), DatasetSummary.class); assertEquals(new ArrayList<>(), summary.getTags()); @@ -638,6 +626,13 @@ public void testDatasetSummaryWithReferences() throws Exception { assertEquals(6, (int) summary.getDescendants()); assertEquals(0, (int)summary.getJobCount()); assertEquals(3, summary.getFields().size()); + assertEquals(summary.getEntityId(), UUID.fromString(summary.getEntityId()).toString()); + assertFalse(summary.getHasReflection()); + assertEquals("dremio", summary.getOwnerName()); + assertEquals("dremio@dremio.test", summary.getOwnerEmail()); + assertEquals("dremio", summary.getLastModifyingUserName()); + assertEquals("dremio@dremio.test", summary.getLastModifyingUserEmail()); + assertTrue(summary.getCreatedAt() <= summary.getLastModified()); references = new HashMap<>(); references.put("DG", new VersionContextReq(VersionContextReq.VersionContextType.TAG, "tag")); @@ -646,6 +641,13 @@ public void testDatasetSummaryWithReferences() throws Exception { assertEquals(6, (int) summary.getDescendants()); assertEquals(0, (int)summary.getJobCount()); assertEquals(3, summary.getFields().size()); + assertEquals(summary.getEntityId(), UUID.fromString(summary.getEntityId()).toString()); + assertFalse(summary.getHasReflection()); + assertEquals("dremio", summary.getOwnerName()); + assertEquals("dremio@dremio.test", summary.getOwnerEmail()); + assertEquals("dremio", summary.getLastModifyingUserName()); + assertEquals("dremio@dremio.test", summary.getLastModifyingUserEmail()); + assertTrue(summary.getCreatedAt() <= summary.getLastModified()); references = new HashMap<>(); references.put("DG", new VersionContextReq(VersionContextReq.VersionContextType.COMMIT, "d0628f078890fec234b98b873f9e1f3cd140988a")); @@ -654,22 +656,19 @@ public void testDatasetSummaryWithReferences() throws Exception { assertEquals(6, (int) summary.getDescendants()); assertEquals(0, (int)summary.getJobCount()); assertEquals(3, summary.getFields().size()); + assertEquals(summary.getEntityId(), UUID.fromString(summary.getEntityId()).toString()); + assertFalse(summary.getHasReflection()); + assertEquals("dremio", summary.getOwnerName()); + assertEquals("dremio@dremio.test", summary.getOwnerEmail()); + assertEquals("dremio", summary.getLastModifyingUserName()); + assertEquals("dremio@dremio.test", summary.getLastModifyingUserEmail()); + assertTrue(summary.getCreatedAt() <= summary.getLastModified()); assertThat(summary.getReferences()).usingRecursiveComparison().isEqualTo(references); webTarget = getAPIv2().path("/datasets/summary/DG/dsg3").queryParam("refType", "INVALID").queryParam("refValue", "invalid"); expectStatus(Status.BAD_REQUEST, getBuilder(webTarget).buildGet()); } - @Test - public void testDatasetDetails() throws Exception { - populateInitialData(); - doc("get dataset summary for virtual dataset DG.dsg3"); - DatasetDetails details = expectSuccess(getBuilder(getAPIv2().path("/datasets/context/space/DG/dsg3")).buildGet(), DatasetDetails.class); - assertEquals(6, (int) details.getDescendants()); - assertEquals(0, (int) details.getJobCount()); - doc("get dataset summary for physical dataset"); - } - @Test @SuppressWarnings("unchecked") public void testDatasetParents() throws Exception { diff --git a/dac/backend/src/test/java/com/dremio/dac/server/TestServerExplore.java b/dac/backend/src/test/java/com/dremio/dac/server/TestServerExplore.java index 59bc8116dd..51d0853896 100644 --- a/dac/backend/src/test/java/com/dremio/dac/server/TestServerExplore.java +++ b/dac/backend/src/test/java/com/dremio/dac/server/TestServerExplore.java @@ -189,9 +189,7 @@ import com.dremio.exec.store.dfs.NASConf; import com.dremio.options.OptionManager; import com.dremio.options.OptionValue; -import com.dremio.service.jobs.HybridJobsService; import com.dremio.service.jobs.JobRequest; -import com.dremio.service.jobs.JobsService; import com.dremio.service.jobs.SqlQuery; import com.dremio.service.namespace.NamespaceKey; import com.dremio.service.namespace.NamespaceService; @@ -285,8 +283,11 @@ public void testGetDataset() throws Exception { assertEquals(dataString, INITIAL_RESULTSET_SIZE, data.getReturnedRowCount()); // Preview the data in initial dataset - InitialPreviewResponse previewResponse = getPreview(dataset); - data = previewResponse.getData(); + final InitialPreviewResponse previewResponse = getPreview(dataset); + waitForJobComplete(previewResponse.getJobId().getId()); + + data = getData(previewResponse.getPaginationUrl(), 0, INITIAL_RESULTSET_SIZE); + dataString = JSONUtil.toString(data); assertEquals(dataString, 7, data.getColumns().size()); assertEquals(dataString, "s_suppkey", data.getColumns().get(0).getName()); @@ -309,10 +310,8 @@ public void testPagination() throws Exception { createDatasetFromParentAndSave(datasetPath, "cp.\"json/mixed_example.json\""); DatasetUI dataset = getDataset(datasetPath); - InitialPreviewResponse previewResponse = getPreview(dataset); - - // Initial response contains limited records - assertEquals(INITIAL_RESULTSET_SIZE, previewResponse.getData().getReturnedRowCount()); + final InitialPreviewResponse previewResponse = getPreview(dataset); + waitForJobComplete(previewResponse.getJobId().getId()); JobDataFragment data1 = getData(previewResponse.getPaginationUrl(), 0, 200); assertEquals(105, data1.getReturnedRowCount()); @@ -366,6 +365,7 @@ public void previewDataForPhysicalDataset() throws Exception { sourceService.registerSourceWithRuntime(source); InitialDataPreviewResponse resp = getPreview(new DatasetPath(asList("testNAS", "users.json"))); + assertEquals(3, resp.getData().getReturnedRowCount()); assertEquals(2, resp.getData().getColumns().size()); @@ -1199,7 +1199,7 @@ private JobDataFragment testConvert(String expected, FieldTransformationBase t, public void testSaveDataset() throws Exception { setSpace(); - expectSuccess(getBuilder(getAPIv2().path("space/space1")).buildPut(Entity.json(new Space(null, "space1", null, null, null, 0, null))), Space.class); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "space1", null, null, null))), new GenericType() {}); DatasetPath datasetPath = new DatasetPath("spacefoo.folderbar.folderbaz.datasetbuzz"); doc("creating dataset"); DatasetUI dataset = createDatasetFromParentAndSave(datasetPath, "cp.\"tpch/supplier.parquet\""); @@ -1267,7 +1267,7 @@ public void testSaveDataset() throws Exception { public void testSaveDatasetWrongFolder() throws Exception { setSpace(); - expectSuccess(getBuilder(getAPIv2().path("space/space1")).buildPut(Entity.json(new Space(null, "space1", null, null, null, 0, null))), Space.class); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "space1", null, null, null))), new GenericType() {}); DatasetPath datasetPath = new DatasetPath("spacefoo.folderbar.folderblahz.datasetbuzz"); InitialPreviewResponse preview = createDatasetFromParent("cp.\"tpch/supplier.parquet\""); @@ -1280,9 +1280,8 @@ public void testSaveDatasetWrongFolder() throws Exception { } @Test - public void testVirtualDatasetWithNotNullFields() throws Exception { - final HybridJobsService jobsService = (HybridJobsService) l(JobsService.class); - expectSuccess(getBuilder(getAPIv2().path("space/space1")).buildPut(Entity.json(new Space(null, "space1", null, null, null, 0, null))), Space.class); + public void testVirtualDatasetWithNotNullFields() { + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "space1", null, null, null))), new GenericType() {}); final String pathName = "space1.v1"; final DatasetPath numbersJsonPath = new DatasetPath(pathName); DatasetUI numbersJsonVD = createDatasetFromSQLAndSave(numbersJsonPath, @@ -1293,8 +1292,7 @@ public void testVirtualDatasetWithNotNullFields() throws Exception { @Test public void testVirtualDatasetWithTimestampDiff() throws Exception { - final HybridJobsService jobsService = (HybridJobsService) l(JobsService.class); - expectSuccess(getBuilder(getAPIv2().path("space/space1")).buildPut(Entity.json(new Space(null, "space1", null, null, null, 0, null))), Space.class); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "space1", null, null, null))), new GenericType() {}); final String pathName = "space1.v1"; final DatasetPath datetimePath = new DatasetPath(pathName); DatasetUI dateTimeVD = createDatasetFromSQLAndSave(datetimePath, @@ -1304,9 +1302,8 @@ public void testVirtualDatasetWithTimestampDiff() throws Exception { } @Test - public void testVirtualDatasetWithChar() throws Exception { - final HybridJobsService jobsService = (HybridJobsService) l(JobsService.class); - expectSuccess(getBuilder(getAPIv2().path("space/space1")).buildPut(Entity.json(new Space(null,"space1", null, null, null, 0, null))), Space.class); + public void testVirtualDatasetWithChar() { + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "space1", null, null, null))), new GenericType() {}); final String pathName = "space1.v1"; final DatasetPath numbersJsonPath = new DatasetPath(pathName); DatasetUI numbersJsonVD = createDatasetFromSQLAndSave(numbersJsonPath, @@ -1320,8 +1317,8 @@ public void testVirtualDatasetWithChar() throws Exception { @Test public void testReapplyDataset() { - expectSuccess(getBuilder(getAPIv2().path("space/space1")).buildPut(Entity.json(new Space(null, "space1", null, null, null, 0, null))), Space.class); - expectSuccess(getBuilder(getAPIv2().path("space/space2")).buildPut(Entity.json(new Space(null, "space2", null, null, null, 0, null))), Space.class); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "space1", null, null, null))), new GenericType() {}); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "space2", null, null, null))), new GenericType() {}); DatasetPath d1Path = new DatasetPath("space1.ds1"); DatasetPath d2Path = new DatasetPath("space1.ds2"); @@ -1359,7 +1356,7 @@ public void testReapplyDataset() { @Test public void testReapplyAndSave(){ - expectSuccess(getBuilder(getAPIv2().path("space/reapplyAndSave")).buildPut(Entity.json(new Space(null, "reapplyAndSave", null, null, null, 0, null))), Space.class); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "reapplyAndSave", null, null, null))), new GenericType() {}); DatasetPath d1Path = new DatasetPath("reapplyAndSave.ds1"); createDatasetFromSQLAndSave(d1Path, "select s_name, s_phone from cp.\"tpch/supplier.parquet\"", asList("cp")); final DatasetUI d2 = createDatasetFromParent("reapplyAndSave.ds1").getDataset(); @@ -1375,7 +1372,7 @@ public void testReapplyAndSave(){ @Test public void testReapplyAndSaveWrongFolder(){ - expectSuccess(getBuilder(getAPIv2().path("space/reapplyAndSave")).buildPut(Entity.json(new Space(null, "reapplyAndSave", null, null, null, 0, null))), Space.class); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "reapplyAndSave", null, null, null))), new GenericType() {}); DatasetPath d1Path = new DatasetPath("reapplyAndSave.ds1"); createDatasetFromSQLAndSave(d1Path, "select s_name, s_phone from cp.\"tpch/supplier.parquet\"", asList("cp")); final DatasetUI d2 = createDatasetFromParent("reapplyAndSave.ds1").getDataset(); @@ -1387,8 +1384,8 @@ public void testReapplyAndSaveWrongFolder(){ @Test public void testRenameDataset() throws Exception { - expectSuccess(getBuilder(getAPIv2().path("space/space1")).buildPut(Entity.json(new Space(null, "space1", null, null, null, 0, null))), Space.class); - expectSuccess(getBuilder(getAPIv2().path("space/space2")).buildPut(Entity.json(new Space(null, "space2", null, null, null, 0, null))), Space.class); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "space1", null, null, null))), new GenericType() {}); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "space2", null, null, null))), new GenericType() {}); doc("creating dataset"); DatasetPath datasetPath = new DatasetPath("space1.ds1"); @@ -1548,25 +1545,12 @@ public void testGroupByCountStar() throws Exception { @Test public void testDatasets() throws Exception { - expectSuccess(getBuilder(getAPIv2().path("space/space1")).buildPut(Entity.json(new Space(null, "space1", null, null, null, 0, null))), Space.class); - expectSuccess(getBuilder(getAPIv2().path("space/space2")).buildPut(Entity.json(new Space(null, "space2", null, null, null, 0, null))), Space.class); - expectSuccess(getBuilder(getAPIv2().path("space/space3")).buildPut(Entity.json(new Space(null, "space3", null, null, null, 0, null))), Space.class); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "space1", null, null, null))), new GenericType() {}); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "space2", null, null, null))), new GenericType() {}); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "space3", null, null, null))), new GenericType() {}); expectError(CLIENT_ERROR, getDatasetInvocation(new DatasetPath("space.myspace")), NotFoundErrorMessage.class); - /** - * TODO: This doesn't seem like valid. Creating datasets directly in namespace - // create few datasets - VirtualDatasetUI vds = newDataSetFromParent(new DatasetPath("space1.ds12"), "cp.\"tpch/supplier.parquet\""); - namespaceService.addOrUpdateDataset(new DatasetPath("space1.ds1").toNamespaceKey(), - toVirtualDatasetVersion(ProtostuffUtil.copy(vds).setName("ds1").setLegacyTag(DatasetVersion.newVersion())).getDataset()); - namespaceService.addOrUpdateDataset(new DatasetPath("space1.ds2").toNamespaceKey(), - toVirtualDatasetVersion(ProtostuffUtil.copy(vds).setName("ds2").setLegacyTag(DatasetVersion.newVersion())).getDataset()); - - getDatasetService().deleteDataset(new DatasetPath("space1.ds1"), 0); - getDatasetService().deleteDataset(new DatasetPath("space1.ds2"), 0); - */ - createDatasetFromSQLAndSave(new DatasetPath("space1.ds2"), "select s.s_name from cp.\"tpch/supplier.parquet\" s", asList("cp")); createDatasetFromSQLAndSave(new DatasetPath("space2.ds1"), @@ -1590,7 +1574,7 @@ public void testDatasets() throws Exception { @Test public void testCreateDatasets() throws Exception { - expectSuccess(getBuilder(getAPIv2().path("space/spaceCreateDataset")).buildPut(Entity.json(new Space(null, "spaceCreateDataset", null, null, null, 0, null))), Space.class); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "spaceCreateDataset", null, null, null))), new GenericType() {}); DatasetPath datasetPath = new DatasetPath("spaceCreateDataset.ds1"); DatasetUI ds1 = createDatasetFromSQLAndSave(datasetPath, "select s.s_name from cp.\"tpch/supplier.parquet\" s", asList("cp")); @@ -1605,7 +1589,7 @@ public void testCreateDatasets() throws Exception { @Test public void canReapplyIsCorrect(){ - expectSuccess(getBuilder(getAPIv2().path("space/canReapplyDataset")).buildPut(Entity.json(new Space(null, "canReapplyDataset", null, null, null, 0, null))), Space.class); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "canReapplyDataset", null, null, null))), new GenericType() {}); InitialPreviewResponse createFromPhysical = createDatasetFromParent("cp.\"tpch/supplier.parquet\""); List displayFullPath = createFromPhysical.getDataset().getDisplayFullPath(); // datasets directly derived from other datasets should have their display path set to their parent @@ -1676,7 +1660,8 @@ public void testRecommendedJoins() throws Exception { DatasetUI ds = getDataset(getDatasetPath(dataset)); doc("Get data for join_reco so that there's a job for it"); - getPreview(ds); + final InitialPreviewResponse previewResponse = getPreview(ds); + waitForJobComplete(previewResponse.getJobId().getId()); doc("Get dataset join recommendations join_reco"); @@ -1685,7 +1670,8 @@ public void testRecommendedJoins() throws Exception { assertEquals(0, recommendations.getRecommendations().size()); DatasetUI djointest = createDatasetFromParentAndSave(new DatasetPath(getRoot(dataset) + ".djointest"), getDatasetPath(dataset).toString()); - getPreview(djointest); + final InitialPreviewResponse previewResponseA = getPreview(djointest); + waitForJobComplete(previewResponseA.getJobId().getId()); DatasetUI sibling = getDataset(getDatasetPath(djointest)); @@ -2100,8 +2086,9 @@ public void testCellTruncation() throws Exception { DatasetUI dataset = createDatasetFromParentAndSave("cellTrunc", "cp.\"json/cell_truncation.json\""); - InitialPreviewResponse previewResponse = getPreview(dataset); - DataPOJO data = (DataPOJO) previewResponse.getData(); + final InitialPreviewResponse previewResponse = getPreview(dataset); + waitForJobComplete(previewResponse.getJobId().getId()); + final DataPOJO data = (DataPOJO) getData(previewResponse.getPaginationUrl(), 0, INITIAL_RESULTSET_SIZE); List row0Cells = data.getRows().get(0).getRow(); fetchAndVerifyFullCellValue(row0Cells.get(1).getUrl(), @@ -2226,8 +2213,8 @@ public void previewTableInSubschemaExposedAsTopLevelSchema() throws Exception { @Test public void testReapplyForCopiedRenamedMovedDataset() throws Exception { - expectSuccess(getBuilder(getAPIv2().path("space/space1")).buildPut(Entity.json(new Space(null, "space1", null, null, null, 0, null))), Space.class); - expectSuccess(getBuilder(getAPIv2().path("space/space2")).buildPut(Entity.json(new Space(null, "space2", null, null, null, 0, null))), Space.class); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "space1", null, null, null))), new GenericType() {}); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "space2", null, null, null))), new GenericType() {}); //create dataset DatasetPath datasetPath = new DatasetPath("space1.ds1"); diff --git a/dac/backend/src/test/java/com/dremio/dac/server/TestServerJobs.java b/dac/backend/src/test/java/com/dremio/dac/server/TestServerJobs.java index e647924fdd..d8649d0344 100644 --- a/dac/backend/src/test/java/com/dremio/dac/server/TestServerJobs.java +++ b/dac/backend/src/test/java/com/dremio/dac/server/TestServerJobs.java @@ -34,6 +34,7 @@ import com.dremio.dac.daemon.TestSpacesStoragePlugin; import com.dremio.dac.explore.model.DatasetPath; import com.dremio.dac.explore.model.DatasetUI; +import com.dremio.dac.explore.model.InitialPreviewResponse; import com.dremio.dac.model.job.JobDetailsUI; import com.dremio.dac.model.job.JobListItem; import com.dremio.dac.model.job.JobUI; @@ -272,7 +273,9 @@ public void testJobsPerUser() throws Exception { JobsService jobsService = l(JobsService.class); TestSpacesStoragePlugin.setup(); // run at least one job - getPreview(getDataset(new DatasetPath("testB.dsB1"))); // This triggers a job + final InitialPreviewResponse previewResponse = getPreview(getDataset(new DatasetPath("testB.dsB1"))); // This triggers a job + waitForJobComplete(previewResponse.getJobId().getId()); + doc("getting list of all jobs"); JobsUI allJobs = expectSuccess(getBuilder(getAPIv2().path("jobs")).buildGet(), JobsUI.class); int dsB1Jobs = 0; @@ -296,7 +299,9 @@ public void testJobsPerUser() throws Exception { dsB1Jobs, jobsUI.getJobs().size()); assertEquals(dsB1Jobs, jobsUI.getJobs().size()); - getPreview(getDataset(new DatasetPath("testB.dsB1"))); // this triggers a job + final InitialPreviewResponse previewResponseA = getPreview(getDataset(new DatasetPath("testB.dsB1"))); // this triggers a job + waitForJobComplete(previewResponseA.getJobId().getId()); + jobsUI = expectSuccess(getBuilder(getAPIv2().path("jobs").queryParam("filter", "ds==testB.dsB1")).buildGet(), JobsUI.class); // getting the data 2x on the same version does not create a new job assertEquals(dsB1Jobs, jobsUI.getJobs().size()); @@ -392,7 +397,9 @@ public void testJobs() throws Exception { JobsService jobsService = l(JobsService.class); TestSpacesStoragePlugin.setup(); // run at least one job - getPreview(getDataset(new DatasetPath("testB.dsB1"))); // This triggers a job + final InitialPreviewResponse previewResponse = getPreview(getDataset(new DatasetPath("testB.dsB1"))); // This triggers a job + waitForJobComplete(previewResponse.getJobId().getId()); + doc("getting list of all jobs"); JobsUI allJobs = expectSuccess(getBuilder(getAPIv2().path("jobs")).buildGet(), JobsUI.class); int dsB1Jobs = 0; @@ -416,7 +423,9 @@ public void testJobs() throws Exception { dsB1Jobs, jobsUI.getJobs().size()); assertEquals(dsB1Jobs, jobsUI.getJobs().size()); - getPreview(getDataset(new DatasetPath("testB.dsB1"))); // this triggers a job + final InitialPreviewResponse previewResponseA = getPreview(getDataset(new DatasetPath("testB.dsB1"))); // this triggers a job + waitForJobComplete(previewResponseA.getJobId().getId()); + jobsUI = expectSuccess(getBuilder(getAPIv2().path("jobs").queryParam("filter", "ds==testB.dsB1")).buildGet(), JobsUI.class); // getting the data 2x on the same version does not create a new job assertEquals(dsB1Jobs, jobsUI.getJobs().size()); @@ -523,7 +532,9 @@ private DatasetUI setupIteratorTests(String datasetName) throws Exception{ DatasetUI dataset = getDataset(new DatasetPath(datasetName)); // run dataset twice. We do a run and a preview since subsequent previews won't actually rerun... - getPreview(dataset); + final InitialPreviewResponse previewResponse = getPreview(dataset); + waitForJobComplete(previewResponse.getJobId().getId()); + submitJobAndWaitUntilCompletion( JobRequest.newBuilder() .setSqlQuery(getQueryFromConfig(dataset)) diff --git a/dac/backend/src/test/java/com/dremio/dac/server/TestServerWithInitialData.java b/dac/backend/src/test/java/com/dremio/dac/server/TestServerWithInitialData.java index 026c8ba2e2..8555a53d07 100644 --- a/dac/backend/src/test/java/com/dremio/dac/server/TestServerWithInitialData.java +++ b/dac/backend/src/test/java/com/dremio/dac/server/TestServerWithInitialData.java @@ -31,9 +31,7 @@ import com.dremio.dac.explore.model.DatasetPath; import com.dremio.dac.explore.model.DatasetUI; -import com.dremio.dac.explore.model.DatasetVersionResourcePath; import com.dremio.dac.explore.model.ExtractPreviewReq; -import com.dremio.dac.explore.model.History; import com.dremio.dac.explore.model.HistoryItem; import com.dremio.dac.explore.model.InitialPreviewResponse; import com.dremio.dac.explore.model.TransformBase; @@ -43,7 +41,6 @@ import com.dremio.dac.model.sources.SourceUI; import com.dremio.dac.model.sources.UIMetadataPolicy; import com.dremio.dac.model.spaces.Space; -import com.dremio.dac.model.spaces.Spaces; import com.dremio.dac.proto.model.dataset.ConvertCase; import com.dremio.dac.proto.model.dataset.ExtractCard; import com.dremio.dac.proto.model.dataset.IndexType; @@ -64,7 +61,6 @@ import com.dremio.exec.store.CatalogService; import com.dremio.exec.store.dfs.NASConf; import com.dremio.service.job.proto.JobState; -import com.google.common.collect.Lists; /** * Server test with initial data @@ -96,14 +92,6 @@ public void testSortExistingDataset() throws Exception { initData(); doc("get spaces"); expectSuccess(getBuilder(getAPIv2().path("space/Sales-Sample")).buildGet(), Space.class); - final Spaces spaces = expectSuccess(getBuilder(getAPIv2().path("spaces")).buildGet(), Spaces.class); - boolean found = false; - for (Space space : spaces.getSpaces()) { - if (space.getName().equals("Sales-Sample")) { - found = true; - } - } - assertTrue("Sales-Sample found in " + spaces, found); final DatasetUI dsGet = getDataset(new DatasetPath("Sales-Sample.ds4")); @@ -309,134 +297,6 @@ public void testHistory() throws Exception { // Job is always in completed state. assertEquals(JSONUtil.toString(lastHistoryItem), JobState.COMPLETED, lastItemState); assertEquals("SQL Edited to: select * from \"Sales-Sample\".ds3", lastHistoryItem.getTransformDescription()); - - doc("get history"); - History history = expectSuccess( - getBuilder(getAPIv2().path(historyItems.get(0).getVersionedResourcePath() + "/history") - .queryParam("tipVersion", historyItems.get(1).getDatasetVersion())) - .buildGet(), History.class); - - assertEquals(history.toString(), 2, history.getItems().size()); - assertEquals("Expected current version is not correct", history.getCurrentDatasetVersion(), historyItems.get(0).getDatasetVersion()); - HistoryItem actual = history.getItems().get(history.getItems().size() - 1); - assertEquals(lastHistoryItem.getVersionedResourcePath(), actual.getVersionedResourcePath()); - assertEquals("SQL Edited to: select * from \"Sales-Sample\".ds3", actual.getTransformDescription()); - HistoryItem previous = history.getItems().get(history.getItems().size() - 2); - assertEquals(getDatasetVersionPath(dsGet), previous.getVersionedResourcePath()); - } - - - @Test - public void testHistoryRewrittenForUntitled() throws Exception { - - initData(); - DatasetUI dataset = - createDatasetFromSQL("Select * from \"Prod-Sample\".ds1 limit 1", asList("Prod-Sample")).getDataset(); - - doc("update SQL"); - InitialPreviewResponse transformResponse = transformAndValidate( - dataset, - new TransformUpdateSQL("select * from \"Sales-Sample\".ds3").setSqlContextList(asList("Prod-Sample"))); - - List historyItems = transformResponse.getHistory().getItems(); - assertEquals(historyItems.toString(), 2, historyItems.size()); - HistoryItem lastHistoryItem = historyItems.get(1); - assertEquals(getDatasetVersionPath(transformResponse.getDataset()), lastHistoryItem.getVersionedResourcePath()); - JobState lastItemState = lastHistoryItem.getState(); - - // Job is always in completed state. - assertEquals(JSONUtil.toString(lastHistoryItem), JobState.COMPLETED, lastItemState); - assertEquals("SQL Edited to: select * from \"Sales-Sample\".ds3", lastHistoryItem.getTransformDescription()); - - doc("get history"); - History history = expectSuccess( - getBuilder(getAPIv2().path(historyItems.get(0).getVersionedResourcePath() + - "/history").queryParam("tipVersion", historyItems.get(1).getDatasetVersion())) - .buildGet(), History.class); - - assertEquals(history.toString(), 2, history.getItems().size()); - assertEquals("Expected current version is not correct", history.getCurrentDatasetVersion(), historyItems.get(0).getDatasetVersion()); - HistoryItem actual = history.getItems().get(history.getItems().size() - 1); - assertEquals(lastHistoryItem.getVersionedResourcePath(), actual.getVersionedResourcePath()); - assertEquals("SQL Edited to: select * from \"Sales-Sample\".ds3", actual.getTransformDescription()); - HistoryItem previous = history.getItems().get(history.getItems().size() - 2); - assertEquals(getDatasetVersionPath(dataset), previous.getVersionedResourcePath()); - DatasetUI saved = saveAs( - transformResponse.getDataset(), new DatasetPath(Lists.newArrayList("Prod-Sample", "testHistoryRewrittenForUntitled")) - ).getDataset(); - - doc("get history after save"); - History historyAfterSave = expectSuccess( - getBuilder(getAPIv2().path(new DatasetVersionResourcePath(new DatasetPath(saved.getFullPath()), saved.getDatasetVersion()) + - "/history").queryParam("tipVersion", saved.getDatasetVersion())) - .buildGet(), History.class); - assertEquals(history.toString(), 2, history.getItems().size()); - assertEquals("Expected current version is not correct", historyItems.get(1).getDatasetVersion(), historyAfterSave.getItems().get(1).getDatasetVersion()); - assertEquals(new DatasetPath(saved.getFullPath()), historyAfterSave.getItems().get(1).getDataset()); - assertEquals("Expected current version is not correct", historyItems.get(0).getDatasetVersion(), historyAfterSave.getItems().get(0).getDatasetVersion()); - assertEquals(new DatasetPath(saved.getFullPath()), historyAfterSave.getItems().get(0).getDataset()); - - InitialPreviewResponse transformResponse2 = transformAndValidate( - saved, - new TransformUpdateSQL("select * from \"Sales-Sample\".ds3 limit 1").setSqlContextList(asList("Prod-Sample"))); - - doc("get history after save and further transform"); - History historyAfterSaveAndLaterTransform = expectSuccess( - getBuilder(getAPIv2().path(new DatasetVersionResourcePath(new DatasetPath(saved.getFullPath()), transformResponse2.getDataset().getDatasetVersion()) + - "/history").queryParam("tipVersion", transformResponse2.getDataset().getDatasetVersion())) - .buildGet(), History.class); - assertEquals(history.toString(), 3, historyAfterSaveAndLaterTransform.getItems().size()); - assertEquals(new DatasetPath(saved.getFullPath()), historyAfterSaveAndLaterTransform.getItems().get(2).getDataset()); - - - DatasetUI saved2 = saveAs( - transformResponse2.getDataset(), new DatasetPath(Lists.newArrayList("Prod-Sample", "testHistoryRewrittenForUntitled_2")) - ).getDataset(); - - History historyAfterSecondSave = expectSuccess( - getBuilder(getAPIv2().path(new DatasetVersionResourcePath(new DatasetPath(saved2.getFullPath()), saved2.getDatasetVersion()) + - "/history").queryParam("tipVersion", saved2.getDatasetVersion())) - .buildGet(), History.class); - assertEquals(history.toString(), 3, historyAfterSecondSave.getItems().size()); - assertEquals( - "Expected current version is not correct", - historyAfterSecondSave.getItems().get(2).getDatasetVersion(), - historyAfterSaveAndLaterTransform.getItems().get(2).getDatasetVersion()); - // TODO - to make this history item easily searchable, we need to remove dataset path from the key - // for now save-as is just duplicating the history item with two different dataset paths after - // using save-as on top of a transform sequence on top of a named dataset - assertEquals( - new DatasetPath(saved2.getFullPath()), - historyAfterSecondSave.getItems().get(2).getDataset()); - assertEquals( - new DatasetPath(saved2.getFullPath()), - historyAfterSecondSave.getItems().get(1).getDataset()); - assertEquals( - new DatasetPath(saved2.getFullPath()), - historyAfterSecondSave.getItems().get(0).getDataset()); - - // test the "review" API, used by the UI when refreshing the page or navigating to a UI link saved - // by a user - InitialPreviewResponse responseForReview = expectSuccess( - getBuilder(getAPIv2().path(new DatasetVersionResourcePath(new DatasetPath(saved.getFullPath()), saved.getDatasetVersion()) + - "/review").queryParam("jobId", transformResponse.getJobId().getId()).queryParam("tipVersion", saved2.getDatasetVersion())) - .buildGet(), InitialPreviewResponse.class); - History historyInPreviewRequest = responseForReview.getHistory(); - assertEquals(history.toString(), 3, historyInPreviewRequest.getItems().size()); - assertEquals( - "Expected current version is not correct", - historyInPreviewRequest.getItems().get(2).getDatasetVersion(), - historyAfterSaveAndLaterTransform.getItems().get(2).getDatasetVersion()); - assertEquals(saved.getDatasetVersion(), historyInPreviewRequest.getCurrentDatasetVersion()); - assertEquals( - new DatasetPath(saved.getFullPath()), - historyInPreviewRequest.getItems().get(2).getDataset()); - assertEquals( - new DatasetPath(saved.getFullPath()), - historyInPreviewRequest.getItems().get(1).getDataset()); - assertEquals( - new DatasetPath(saved.getFullPath()), - historyInPreviewRequest.getItems().get(0).getDataset()); } /** diff --git a/dac/backend/src/test/java/com/dremio/dac/server/TestSystemAutoPromoteDisabled.java b/dac/backend/src/test/java/com/dremio/dac/server/TestSystemAutoPromoteDisabled.java index cc9f26feab..2986dfd074 100644 --- a/dac/backend/src/test/java/com/dremio/dac/server/TestSystemAutoPromoteDisabled.java +++ b/dac/backend/src/test/java/com/dremio/dac/server/TestSystemAutoPromoteDisabled.java @@ -28,7 +28,7 @@ public class TestSystemAutoPromoteDisabled extends TestSystemAutoPromotionBase { public static void init() throws Exception { // Set the system property before starting dremio so it gets picked up // by com.dremio.exec.store.DatasetRetrievalOptions.DEFAULT_AUTO_PROMOTE_OPTIONAL - System.setProperty("dremio.datasets.auto_promote", "false"); + PROPERTIES.set("dremio.datasets.auto_promote", "false"); TestSystemAutoPromotionBase.init(); } diff --git a/dac/backend/src/test/java/com/dremio/dac/server/TestSystemAutoPromoteEnabled.java b/dac/backend/src/test/java/com/dremio/dac/server/TestSystemAutoPromoteEnabled.java index b515a6c5b0..9766541d42 100644 --- a/dac/backend/src/test/java/com/dremio/dac/server/TestSystemAutoPromoteEnabled.java +++ b/dac/backend/src/test/java/com/dremio/dac/server/TestSystemAutoPromoteEnabled.java @@ -28,7 +28,7 @@ public class TestSystemAutoPromoteEnabled extends TestSystemAutoPromotionBase { public static void init() throws Exception { // Set the system property before starting dremio so it gets picked up // by com.dremio.exec.store.DatasetRetrievalOptions.DEFAULT_AUTO_PROMOTE_OPTIONAL - System.setProperty("dremio.datasets.auto_promote", "true"); + PROPERTIES.set("dremio.datasets.auto_promote", "true"); TestSystemAutoPromotionBase.init(); } diff --git a/dac/backend/src/test/java/com/dremio/dac/server/TestSystemAutoPromotionBase.java b/dac/backend/src/test/java/com/dremio/dac/server/TestSystemAutoPromotionBase.java index ef6be92301..d5e64fb2d4 100644 --- a/dac/backend/src/test/java/com/dremio/dac/server/TestSystemAutoPromotionBase.java +++ b/dac/backend/src/test/java/com/dremio/dac/server/TestSystemAutoPromotionBase.java @@ -23,6 +23,7 @@ import javax.ws.rs.core.GenericType; import org.junit.BeforeClass; +import org.junit.ClassRule; import com.dremio.common.util.TestTools; import com.dremio.dac.api.MetadataPolicy; @@ -32,12 +33,16 @@ import com.dremio.service.job.proto.QueryType; import com.dremio.service.jobs.JobRequest; import com.dremio.service.jobs.SqlQuery; +import com.dremio.test.TemporarySystemProperties; /** * Base test class */ public class TestSystemAutoPromotionBase extends BaseTestServer { + @ClassRule + public static final TemporarySystemProperties PROPERTIES = new TemporarySystemProperties(); + @BeforeClass public static void init() throws Exception { BaseTestServer.init(); diff --git a/dac/backend/src/test/java/com/dremio/dac/server/TestViewCreator.java b/dac/backend/src/test/java/com/dremio/dac/server/TestViewCreator.java index 728f97628d..133ba1a300 100644 --- a/dac/backend/src/test/java/com/dremio/dac/server/TestViewCreator.java +++ b/dac/backend/src/test/java/com/dremio/dac/server/TestViewCreator.java @@ -19,13 +19,13 @@ import static org.junit.Assert.assertTrue; import javax.ws.rs.client.Entity; +import javax.ws.rs.core.GenericType; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import com.dremio.dac.model.folder.Folder; -import com.dremio.dac.model.spaces.Space; import com.dremio.service.job.JobSummary; import com.dremio.service.job.JobSummaryRequest; import com.dremio.service.job.proto.JobId; @@ -49,7 +49,7 @@ public void setup() throws Exception { public void createQueryDrop() throws Exception { JobsService jobsService = l(JobsService.class); - expectSuccess(getBuilder(getAPIv2().path("space/mySpace")).buildPut(Entity.json(new Space(null, "mySpace", null, null, null, 0, null)))); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "mySpace", null, null, null))), new GenericType() {}); expectSuccess(getBuilder(getAPIv2().path("space/mySpace/folder/")).buildPost(Entity.json("{\"name\": \"myFolder\"}")), Folder.class); @@ -92,7 +92,7 @@ public void createQueryDrop() throws Exception { @Test public void createQueryDDLSql() { enableVersionedViews(); - expectSuccess(getBuilder(getAPIv2().path("space/mySpace")).buildPut(Entity.json(new Space(null, "mySpace", null, null, null, 0, null)))); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, "mySpace", null, null, null))), new GenericType() {}); expectSuccess(getBuilder(getAPIv2().path("space/mySpace/folder/")).buildPost(Entity.json("{\"name\": \"myFolder\"}")), Folder.class); diff --git a/dac/backend/src/test/java/com/dremio/dac/server/test/TestJobResource.java b/dac/backend/src/test/java/com/dremio/dac/server/test/TestJobResource.java index 5799530945..adb04146b9 100644 --- a/dac/backend/src/test/java/com/dremio/dac/server/test/TestJobResource.java +++ b/dac/backend/src/test/java/com/dremio/dac/server/test/TestJobResource.java @@ -76,6 +76,7 @@ public TestJobResource( * @throws JobResourceNotFoundException * @throws JobNotFoundException */ + @Override @GET @Path("download") @Consumes(MediaType.APPLICATION_JSON) diff --git a/dac/backend/src/test/java/com/dremio/dac/service/TestCatalogServiceHelperForVersioned.java b/dac/backend/src/test/java/com/dremio/dac/service/TestCatalogServiceHelperForVersioned.java new file mode 100644 index 0000000000..b689dcee78 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/dac/service/TestCatalogServiceHelperForVersioned.java @@ -0,0 +1,216 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.service; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.when; + +import java.util.Arrays; +import java.util.Collections; +import java.util.Optional; +import java.util.UUID; + +import javax.ws.rs.core.SecurityContext; + +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnit; +import org.mockito.junit.MockitoRule; +import org.mockito.quality.Strictness; + +import com.dremio.dac.api.CatalogEntity; +import com.dremio.dac.api.Dataset; +import com.dremio.dac.homefiles.HomeFileTool; +import com.dremio.dac.service.catalog.CatalogServiceHelper; +import com.dremio.dac.service.datasets.DatasetVersionMutator; +import com.dremio.dac.service.errors.ClientErrorException; +import com.dremio.dac.service.reflection.ReflectionServiceHelper; +import com.dremio.dac.service.search.SearchService; +import com.dremio.dac.service.source.SourceService; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.CatalogEntityKey; +import com.dremio.exec.catalog.DremioTable; +import com.dremio.exec.catalog.TableVersionContext; +import com.dremio.exec.server.SabotContext; +import com.dremio.plugins.dataplane.store.DataplanePlugin; +import com.dremio.service.namespace.NamespaceKey; +import com.dremio.service.namespace.NamespaceService; +import com.dremio.service.namespace.dataset.proto.DatasetConfig; +import com.dremio.service.namespace.dataset.proto.DatasetType; +import com.dremio.service.namespace.proto.EntityId; +import com.dremio.service.reflection.ReflectionSettings; +import com.dremio.test.DremioTest; + +/** + * Tests for catalog service helper for arctic + */ +public class TestCatalogServiceHelperForVersioned extends DremioTest { + @Rule public MockitoRule rule = MockitoJUnit.rule().strictness(Strictness.LENIENT); + + @Mock private ReflectionSettings reflectionSettings; + @Mock private DataplanePlugin dataplanePlugin; + @Mock private DremioTable dremioTable; + + @Mock private Catalog catalog; + @Mock private SecurityContext securityContext; + @Mock private SourceService sourceService; + @Mock private NamespaceService namespaceService; + @Mock private SabotContext sabotContext; + @Mock private ReflectionServiceHelper reflectionServiceHelper; + @Mock private HomeFileTool homeFileTool; + @Mock private DatasetVersionMutator datasetVersionMutator; + @Mock private SearchService searchService; + + private static final String datasetId = UUID.randomUUID().toString(); + + @InjectMocks private CatalogServiceHelper catalogServiceHelper; + + @Before + public void setup() { + when(catalog.getSource(anyString())).thenReturn(dataplanePlugin); + when(reflectionServiceHelper.getReflectionSettings()).thenReturn(reflectionSettings); + when(reflectionSettings.getStoredReflectionSettings(any(CatalogEntityKey.class))) + .thenReturn(Optional.empty()); + } + + @Test + public void getCatalogEntityByPathWithNullReference() throws Exception { + assertThatThrownBy( + () -> + catalogServiceHelper.getCatalogEntityByPath( + Arrays.asList("arctic", "table"), null, null, null, null)) + .isInstanceOf(ClientErrorException.class) + .hasMessageContaining("Missing a versionType/versionValue"); + } + + @Test + public void getCatalogEntityByPathNotFound() throws Exception { + when(catalog.getTableSnapshot(any(NamespaceKey.class), any(TableVersionContext.class))) + .thenReturn(null); + + final Optional catalogEntity = + catalogServiceHelper.getCatalogEntityByPath( + Arrays.asList("arctic", "table"), null, null, "BRANCH", "main"); + + assertThat(catalogEntity.isPresent()).isFalse(); + } + + @Test + public void getCatalogEntityByPathForTable() throws Exception { + final DatasetConfig datasetConfig = + new DatasetConfig() + .setType(DatasetType.PHYSICAL_DATASET) + .setId(new EntityId(datasetId)) + .setFullPathList(Collections.singletonList("table")); + + when(catalog.getTableSnapshot(any(NamespaceKey.class), any(TableVersionContext.class))) + .thenReturn(dremioTable); + when(dremioTable.getDatasetConfig()).thenReturn(datasetConfig); + + final Optional catalogEntity = + catalogServiceHelper.getCatalogEntityByPath( + Arrays.asList("arctic", "table"), null, null, "BRANCH", "main"); + + assertThat(catalogEntity.isPresent()).isTrue(); + assertThat(catalogEntity.get()).isInstanceOf(Dataset.class); + + final Dataset dataset = (Dataset) catalogEntity.get(); + + assertThat(dataset.getId()).isEqualTo(datasetId); + assertThat(dataset.getType()).isEqualTo(Dataset.DatasetType.PHYSICAL_DATASET); + } + + @Test + public void getCatalogEntityByPathForSnapshot() throws Exception { + final DatasetConfig datasetConfig = + new DatasetConfig() + .setType(DatasetType.PHYSICAL_DATASET) + .setId(new EntityId(datasetId)) + .setFullPathList(Collections.singletonList("table")); + + when(catalog.getTableSnapshot(any(NamespaceKey.class), any(TableVersionContext.class))) + .thenReturn(dremioTable); + when(dremioTable.getDatasetConfig()).thenReturn(datasetConfig); + + final Optional catalogEntity = + catalogServiceHelper.getCatalogEntityByPath( + Arrays.asList("arctic", "table"), null, null, "SNAPSHOT", "1128544236092645872"); + + assertThat(catalogEntity.isPresent()).isTrue(); + assertThat(catalogEntity.get()).isInstanceOf(Dataset.class); + + final Dataset dataset = (Dataset) catalogEntity.get(); + + assertThat(dataset.getId()).isEqualTo(datasetId); + assertThat(dataset.getType()).isEqualTo(Dataset.DatasetType.PHYSICAL_DATASET); + } + + @Test + public void getCatalogEntityByPathForTimestamp() throws Exception { + final DatasetConfig datasetConfig = + new DatasetConfig() + .setType(DatasetType.PHYSICAL_DATASET) + .setId(new EntityId(datasetId)) + .setFullPathList(Collections.singletonList("table")); + + when(catalog.getTableSnapshot(any(NamespaceKey.class), any(TableVersionContext.class))) + .thenReturn(dremioTable); + when(dremioTable.getDatasetConfig()).thenReturn(datasetConfig); + + final Optional catalogEntity = + catalogServiceHelper.getCatalogEntityByPath( + Arrays.asList("arctic", "table"), null, null, "TIMESTAMP", "1679029735226"); + + assertThat(catalogEntity.isPresent()).isTrue(); + assertThat(catalogEntity.get()).isInstanceOf(Dataset.class); + + final Dataset dataset = (Dataset) catalogEntity.get(); + + assertThat(dataset.getId()).isEqualTo(datasetId); + assertThat(dataset.getType()).isEqualTo(Dataset.DatasetType.PHYSICAL_DATASET); + } + + @Test + public void getCatalogEntityByPathForView() throws Exception { + final DatasetConfig datasetConfig = + new DatasetConfig() + .setType(DatasetType.VIRTUAL_DATASET) + .setId(new EntityId(datasetId)) + .setFullPathList(Collections.singletonList("table")); + + when(catalog.getTableSnapshot(any(NamespaceKey.class), any(TableVersionContext.class))) + .thenReturn(dremioTable); + when(dremioTable.getDatasetConfig()).thenReturn(datasetConfig); + + final Optional catalogEntity = + catalogServiceHelper.getCatalogEntityByPath( + Arrays.asList("arctic", "view"), null, null, "BRANCH", "main"); + + assertThat(catalogEntity.isPresent()).isTrue(); + assertThat(catalogEntity.get()).isInstanceOf(Dataset.class); + + final Dataset dataset = (Dataset) catalogEntity.get(); + + assertThat(dataset.getId()).isEqualTo(datasetId); + assertThat(dataset.getType()).isEqualTo(Dataset.DatasetType.VIRTUAL_DATASET); + } +} diff --git a/dac/backend/src/test/java/com/dremio/dac/service/TestPhysicalDatasets.java b/dac/backend/src/test/java/com/dremio/dac/service/TestPhysicalDatasets.java index 3f2ff903a6..76e83be59f 100644 --- a/dac/backend/src/test/java/com/dremio/dac/service/TestPhysicalDatasets.java +++ b/dac/backend/src/test/java/com/dremio/dac/service/TestPhysicalDatasets.java @@ -39,20 +39,17 @@ import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; -import org.junit.Ignore; import org.junit.Test; import com.dremio.common.util.FileUtils; import com.dremio.common.utils.PathUtils; import com.dremio.dac.explore.model.DatasetPath; import com.dremio.dac.explore.model.FileFormatUI; -import com.dremio.dac.explore.model.InitialPreviewResponse; import com.dremio.dac.model.folder.Folder; import com.dremio.dac.model.job.JobDataFragment; import com.dremio.dac.model.namespace.NamespaceTree; import com.dremio.dac.model.sources.FormatTools; import com.dremio.dac.model.sources.PhysicalDataset; -import com.dremio.dac.model.sources.SourcePath; import com.dremio.dac.model.sources.SourceUI; import com.dremio.dac.model.sources.UIMetadataPolicy; import com.dremio.dac.server.BaseTestServer; @@ -92,7 +89,7 @@ * in oss/dac/backend/src/test/java/com/dremio/dac/api/TestPromotion.java. */ public class TestPhysicalDatasets extends BaseTestServer { - private static ch.qos.logback.classic.Logger rootLogger = ((ch.qos.logback.classic.Logger)org.slf4j.LoggerFactory.getLogger("com.dremio")); + private static final ch.qos.logback.classic.Logger rootLogger = ((ch.qos.logback.classic.Logger)org.slf4j.LoggerFactory.getLogger("com.dremio")); private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestPhysicalDatasets.class); private static Level originalLogLevel; private BufferAllocator allocator; @@ -120,7 +117,6 @@ public void setup() throws Exception { source.setConfig(nas); source.setMetadataPolicy(UIMetadataPolicy.of(CatalogService.DEFAULT_METADATA_POLICY_WITH_AUTO_PROMOTE)); sourceService.registerSourceWithRuntime(source); -// namespaceService.addOrUpdateSource(new SourcePath(new SourceName(nas.getName())).toNamespaceKey(), nas.asSourceConfig()); } allocator = getSabotContext().getAllocator().newChildAllocator(getClass().getName(), 0, Long.MAX_VALUE); } @@ -193,13 +189,7 @@ public void testJsonFile() throws Exception { assertEquals(3, data.getReturnedRowCount()); assertEquals(2, data.getColumns().size()); - doc("creating dataset from source file"); - InitialPreviewResponse createResponse = expectSuccess(getBuilder(getAPIv2().path( - "source/dacfs_test/new_untitled_from_file/" + getUrlPath("/datasets/users.json"))).buildPost(Entity.json("")), - InitialPreviewResponse.class); - assertEquals(2, createResponse.getData().getColumns().size()); - - checkCounts(fileParentUrlPath, "users.json", true, 2, 0, 0); + checkCounts(fileParentUrlPath, "users.json", true, 1, 0, 0); } } @@ -235,7 +225,6 @@ public void testCommaSeparatedCsv() throws Exception { assertEquals(3, data.getColumns().size()); fileConfig.setExtractHeader(true); -// fileConfig.setLegacyTag(0L); expectSuccess(getBuilder(getAPIv2().path("/source/dacfs_test/file_format/" + fileUrlPath)).buildPut(Entity.json(fileConfig))); data = expectSuccess(getBuilder(getAPIv2().path("/source/dacfs_test/file_preview/"+ fileUrlPath)).buildPost(Entity.json(fileConfig)), JobDataFragment.class); assertEquals(3, data.getReturnedRowCount()); @@ -264,7 +253,6 @@ public void testCommaSeparatedCsvWindowsLineEndings() throws Exception { assertEquals(3, data.getColumns().size()); fileConfig.setExtractHeader(true); -// fileConfig.setLegacyTag(0L); expectSuccess(getBuilder(getAPIv2().path("/source/dacfs_test/file_format/" + fileUrlPath)).buildPut(Entity.json(fileConfig))); data = expectSuccess(getBuilder(getAPIv2().path("/source/dacfs_test/file_preview/"+ fileUrlPath)).buildPost(Entity.json(fileConfig)), JobDataFragment.class); assertEquals(3, data.getReturnedRowCount()); @@ -467,47 +455,12 @@ public void testQueryOnFolder() throws Exception { expectSuccess(getBuilder(getAPIv2().path("/source/dacfs_test/folder_format/" + filePath)).buildPut(Entity.json(fileConfig))); - doc("creating dataset from source folder"); - expectSuccess(getBuilder(getAPIv2().path("/source/dacfs_test/new_untitled_from_folder/" + filePath)).buildPost(Entity.json("")), InitialPreviewResponse.class); - checkCounts(fileParentPath, "folderdataset", true, 1, 0, 0); } finally { resetSystemOption("dac.format.preview.batch_size"); } } - private void noop() { - } - /* - @Test - public void testSubSchemaListing() throws Exception { - final StoragePluginRegistry pluginRegistry = getCurrentDremioDaemon().getDremio().getStoragePluginRegistry(); - final FileSystemPlugin plugin = (FileSystemPlugin) pluginRegistry.getPlugin("dacfs_test"); - - SchemaPlus rootSchema = CalciteSchema.createRootSchema(false, false).plus(); - Constructor config = SchemaConfig.class.getDeclaredConstructor(String.class, SchemaConfigInfoProvider.class, - boolean.class); - config.setAccessible(true); - SchemaConfig schemaConfig = config.newInstance("test_user", null, true); - plugin.registerSchemas(schemaConfig, rootSchema); - assertEquals(0, rootSchema.getSubSchema("dacfs_test").getSubSchemaNames().size()); - assertEquals(0, rootSchema.getSubSchema("dacfs_test").getTableNames().size()); - - getNamespaceService().tryCreatePhysicalDataset(new PhysicalDatasetPath("dacfs_test.tmp.foo1").toNamespaceKey(), - toDatasetConfig(new PhysicalDatasetConfig().setType(DatasetType.PHYSICAL_DATASET_SOURCE_FOLDER))); - getNamespaceService().tryCreatePhysicalDataset(new PhysicalDatasetPath("dacfs_test.home.bar").toNamespaceKey(), - toDatasetConfig(new PhysicalDatasetConfig().setType(DatasetType.PHYSICAL_DATASET_SOURCE_FOLDER))); - getNamespaceService().tryCreatePhysicalDataset(new PhysicalDatasetPath("dacfs_test.tmp.foo2").toNamespaceKey(), - toDatasetConfig(new PhysicalDatasetConfig().setType(DatasetType.PHYSICAL_DATASET_SOURCE_FOLDER))); - - rootSchema = CalciteSchema.createRootSchema(false, false).plus(); - config.setAccessible(true); - plugin.registerSchemas(schemaConfig, rootSchema); - - assertEquals(0, rootSchema.getSubSchema("dacfs_test").getSubSchemaNames().size()); - assertEquals(3, rootSchema.getSubSchema("dacfs_test").getTableNames().size()); - } - */ @Test public void listSource() { @@ -597,8 +550,6 @@ public void testPhysicalDatasetSourceFolders() throws Exception { TextFileConfig fileConfig = new TextFileConfig(); fileConfig.setComment("#"); fileConfig.setFieldDelimiter("|"); - //fileConfig.setSkipFirstLine(true); - //fileConfig.setExtractHeader(true); fileConfig.setName("fff"); fileConfig.setVersion(null); @@ -696,55 +647,6 @@ public void testPhysicalDatasetSourceFolders() throws Exception { } - @Test - @Ignore("DX-10523") - public void testPhysicalDatasetSourceFiles() throws Exception { - final NASConf config = new NASConf(); - config.path = "/"; - SourceUI source = new SourceUI(); - source.setName("src"); - source.setCtime(1000L); - source.setConfig(config); - - NamespaceService namespaceService = newNamespaceService(); - namespaceService.addOrUpdateSource(new SourcePath("src").toNamespaceKey(), source.asSourceConfig()); - - TextFileConfig fileConfig = new TextFileConfig(); - fileConfig.setComment("#"); - fileConfig.setFieldDelimiter("|"); - fileConfig.setExtractHeader(true); - fileConfig.setName("fff"); - - doc("create physical dataset from source file/ set format settings on a file in source"); - expectSuccess(getBuilder(getAPIv2().path( - "/source/src/file_format/file1")).buildPut(Entity.json(fileConfig))); - - doc("get physical dataset config from source file/get format settings on a file in source"); - TextFileConfig format1 = (TextFileConfig) expectSuccess(getBuilder(getAPIv2().path( - "/source/src/file_format/file1")).buildGet(), FileFormatUI.class).getFileFormat(); - - assertEquals(fileConfig.getName(), format1.getName()); - assertEquals(fileConfig.getFieldDelimiter(), format1.getFieldDelimiter()); - assertEquals(fileConfig.getExtractHeader(), format1.getExtractHeader()); - assertEquals(fileConfig.asFileConfig().getType(), format1.asFileConfig().getType()); - assertEquals(fileConfig.asFileConfig().getOwner(), format1.asFileConfig().getOwner()); - - doc("delete with bad version"); - long badVersion = 1234L; - String expectedErrorMessage = String.format("Cannot delete file format \"%s\", version provided \"%s\" is different from version found \"%s\"", - "file1", badVersion, format1.getVersion()); - final GenericErrorMessage errorDelete2 = expectStatus(CONFLICT, - getBuilder(getAPIv2().path("/source/src/file_format/file1").queryParam("version", badVersion)).buildDelete(), - GenericErrorMessage.class); - assertErrorMessage(errorDelete2, expectedErrorMessage); - - doc("delete physical dataset for source file/delete format settings on a file in source"); - expectSuccess(getBuilder(getAPIv2().path("/source/src/file_format/file1").queryParam("version", format1.getVersion())).buildDelete()); - - FileFormat fileFormat = expectSuccess(getBuilder(getAPIv2().path("/source/src/file_format/file1")).buildGet(), FileFormatUI.class).getFileFormat(); - assertEquals(FileType.UNKNOWN, fileFormat.getFileType()); - } - @Test public void testExcelWithHeaderAndMergeCellExpansion() throws Exception { Invocation inv = getExcelTestQueryInvocation(getUrlPath("/testfiles/excel.xlsx"), "sheet 1", true, true); @@ -1100,60 +1002,4 @@ public void testSchemaChangeNewColumn() throws Exception { assertEquals(2, jobData4.getColumns().size()); } } - - /* - @Test - public void testInfoSchema() throws Exception { - TextFileConfig fileConfig = new TextFileConfig(); - fileConfig.setName("blah"); - - getNamespaceService().tryCreatePhysicalDataset(new PhysicalDatasetPath(getSchemaPath("/json")).toNamespaceKey(), - toDatasetConfig(new PhysicalDatasetConfig() - .setName("json") - .setFormatSettings(fileConfig.asFileConfig()) - .setType(DatasetType.PHYSICAL_DATASET_SOURCE_FOLDER))); - getNamespaceService().addOrUpdateDataset(new PhysicalDatasetPath(getSchemaPath("/nation_ctas")).toNamespaceKey(), - toDatasetConfig(new PhysicalDatasetConfig() - .setName("nation_ctas") - .setFormatSettings(fileConfig.asFileConfig()) - .setType(DatasetType.PHYSICAL_DATASET_SOURCE_FOLDER))); - getNamespaceService().tryCreatePhysicalDataset(new PhysicalDatasetPath(getSchemaPath("/datasets/csv/comma.csv")).toNamespaceKey(), - toDatasetConfig(new PhysicalDatasetConfig() - .setName("comma.csv") - .setFormatSettings(fileConfig.asFileConfig()) - .setType(DatasetType.PHYSICAL_DATASET_SOURCE_FILE))); - - String query = "select * from INFORMATION_SCHEMA.\"TABLES\" where TABLE_SCHEMA like '%dacfs_test%'"; - Job job1 = l(JobsService.class).submitExternalJob(new SqlQuery(query, "test_user"), QueryType.UNKNOWN); - JobData job1Data = job1.getData().trunc(500); - assertEquals(3, job1Data.getReturnedRowCount()); - - getNamespaceService().tryCreatePhysicalDataset(new PhysicalDatasetPath(getSchemaPath("/datasets/text/comma.txt")).toNamespaceKey(), - toDatasetConfig(new PhysicalDatasetConfig() - .setName("comma.txt") - .setFormatSettings(fileConfig.asFileConfig()) - .setType(DatasetType.PHYSICAL_DATASET_SOURCE_FILE))); - - Job job2 = l(JobsService.class).submitExternalJob(new SqlQuery(query, "test_user"), QueryType.UNKNOWN); - JobData job2Data = job2.getData().trunc(500); - assertEquals(4, job2Data.getReturnedRowCount()); - - SchemaPlus rootSchema = CalciteSchema.createRootSchema(false, false).plus(); - Constructor config = SchemaConfig.class.getDeclaredConstructor(String.class, SchemaConfigInfoProvider.class, - boolean.class); - config.setAccessible(true); - SchemaConfig schemaConfig = config.newInstance("test_user", null, true); - getCurrentDremioDaemon().getDremio().getStoragePluginRegistry().getSchemaFactory().registerSchemas(schemaConfig, rootSchema); - - for (int i = 0; i < 4; ++i) { - assertNotNull(rootSchema.getSubSchema((job2Data.extractValue("TABLE_SCHEMA", i)).toString()). - getTable((job2Data.extractValue("TABLE_NAME", i)).toString())); - } - getNamespaceService().deleteDataset(new PhysicalDatasetPath(getSchemaPath("/nation_ctas")).toNamespaceKey(), 0); - getNamespaceService().deleteDataset(new PhysicalDatasetPath(getSchemaPath("/json")).toNamespaceKey(), 0); - Job job3 = l(JobsService.class).submitExternalJob(new SqlQuery(query, "test_user"), QueryType.UNKNOWN); - JobData job3Data = job3.getData().trunc(500); - assertEquals(2, job3Data.getReturnedRowCount()); - } - */ } diff --git a/dac/backend/src/test/java/com/dremio/dac/service/TestQueryProfileParser.java b/dac/backend/src/test/java/com/dremio/dac/service/TestQueryProfileParser.java index b8f7fa87a2..b5b093dc45 100644 --- a/dac/backend/src/test/java/com/dremio/dac/service/TestQueryProfileParser.java +++ b/dac/backend/src/test/java/com/dremio/dac/service/TestQueryProfileParser.java @@ -25,6 +25,7 @@ import com.dremio.dac.daemon.TestSpacesStoragePlugin; import com.dremio.dac.explore.model.DatasetPath; +import com.dremio.dac.explore.model.InitialPreviewResponse; import com.dremio.dac.model.job.JobUI; import com.dremio.dac.server.BaseTestServer; import com.dremio.resource.GroupResourceInformation; @@ -53,7 +54,9 @@ public class TestQueryProfileParser extends BaseTestServer { public void testQueryParser() throws Exception { TestSpacesStoragePlugin.setup(); - getPreview(getDataset(new DatasetPath("testA.dsA1"))); + final InitialPreviewResponse previewResponse = getPreview(getDataset(new DatasetPath("testA.dsA1"))); + waitForJobComplete(previewResponse.getJobId().getId()); + final SearchJobsRequest searchJobsRequest = SearchJobsRequest.newBuilder() .setDataset(VersionedDatasetPath.newBuilder() .addAllPath(new DatasetPath("testA.dsA1").toPathList()) diff --git a/dac/backend/src/test/java/com/dremio/dac/service/datasets/ITTestDatasetMutatorForVersionedViews.java b/dac/backend/src/test/java/com/dremio/dac/service/datasets/ITTestDatasetMutatorForVersionedViews.java new file mode 100644 index 0000000000..59e33407f5 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/dac/service/datasets/ITTestDatasetMutatorForVersionedViews.java @@ -0,0 +1,145 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.service.datasets; + +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DATAPLANE_PLUGIN_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_BRANCH_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createEmptyTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTableName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueViewName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.insertTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.selectCountQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.selectStarQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.tablePathWithFolders; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.tablePathWithSource; +import static java.util.Collections.emptyList; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.arrow.memory.BufferAllocator; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.projectnessie.tools.compatibility.internal.OlderNessieServersExtension; + +import com.dremio.dac.explore.model.DatasetPath; +import com.dremio.dac.explore.model.DatasetUI; +import com.dremio.dac.explore.model.InitialPreviewResponse; +import com.dremio.dac.explore.model.VersionContextReq; +import com.dremio.dac.server.UserExceptionMapper; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.catalog.dataplane.ITBaseTestVersioned; +import com.dremio.service.jobs.JobsService; + +@ExtendWith(OlderNessieServersExtension.class) +public class ITTestDatasetMutatorForVersionedViews extends ITBaseTestVersioned { + private BufferAllocator allocator; + @BeforeEach + public void setUp() throws Exception { + allocator = + getSabotContext().getAllocator().newChildAllocator(getClass().getName(), 0, Long.MAX_VALUE); + } + + @AfterEach + public void cleanUp() throws Exception { + allocator.close(); + } + + @Test + public void testSaveAsOnNewView() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final String viewName = generateUniqueViewName(); + final List viewPath = tablePathWithFolders(viewName); + final String newViewName = generateUniqueViewName(); + final List newViewPath = tablePathWithFolders(newViewName); + + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME), null); + runQuery(createEmptyTableQuery(tablePath)); + runQuery(insertTableQuery(tablePath)); + runQueryCheckResults(l(JobsService.class), DATAPLANE_PLUGIN_NAME, tablePath, 3, 3, allocator, null); + createFolders(viewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME), null); + createFolders(newViewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME), null); + DatasetPath targetViewPath = new DatasetPath(tablePathWithSource(DATAPLANE_PLUGIN_NAME, newViewPath)); + saveAsVersionedDataset(targetViewPath, selectStarQuery(tablePath), emptyList(), DATAPLANE_PLUGIN_NAME, DEFAULT_BRANCH_NAME); + } + + @Test + public void testSaveAsOnExistingView() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final String viewName = generateUniqueViewName(); + final List viewPath = tablePathWithFolders(viewName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME), null); + runQuery(createEmptyTableQuery(tablePath)); + runQuery(insertTableQuery(tablePath)); + runQueryCheckResults(l(JobsService.class), DATAPLANE_PLUGIN_NAME, tablePath, 3, 3, allocator, null); + createFolders(viewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME), null); + + DatasetPath targetViewPath = new DatasetPath(tablePathWithSource(DATAPLANE_PLUGIN_NAME, viewPath)); + final DatasetUI createdDatasetUI = createVersionedDatasetFromSQLAndSave(targetViewPath, selectStarQuery(tablePath), emptyList(), DATAPLANE_PLUGIN_NAME, DEFAULT_BRANCH_NAME); + UserExceptionMapper.ErrorMessageWithContext errorMessage = saveAsVersionedDatasetExpectError(targetViewPath, selectStarQuery(tablePath), emptyList(), DATAPLANE_PLUGIN_NAME, DEFAULT_BRANCH_NAME); + assertContains("The specified location already contains a view", errorMessage.getErrorMessage()); + } + + @Test + public void testSaveAsWithConcurrentUpdates() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final String viewName = generateUniqueViewName(); + final List viewPath = tablePathWithFolders(viewName); + final String newViewName = generateUniqueViewName(); + final List newViewPath = tablePathWithFolders(newViewName); + + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME), null); + runQuery(createEmptyTableQuery(tablePath)); + runQuery(insertTableQuery(tablePath)); + runQueryCheckResults(l(JobsService.class), DATAPLANE_PLUGIN_NAME, tablePath, 3, 3, allocator, null); + createFolders(viewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME), null); + createFolders(newViewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME), null); + DatasetPath targetViewPath = new DatasetPath(tablePathWithSource(DATAPLANE_PLUGIN_NAME, newViewPath)); + final Map references = new HashMap<>(); + references.put(DATAPLANE_PLUGIN_NAME, new VersionContextReq(VersionContextReq.VersionContextType.BRANCH, DEFAULT_BRANCH_NAME)); + InitialPreviewResponse datasetCreateResponse1 = createVersionedDatasetFromSQL(selectStarQuery(tablePath), emptyList(), DATAPLANE_PLUGIN_NAME, DEFAULT_BRANCH_NAME); + InitialPreviewResponse datasetCreateResponse2 = createVersionedDatasetFromSQL(selectCountQuery(tablePath, "c1"), emptyList(), DATAPLANE_PLUGIN_NAME, DEFAULT_BRANCH_NAME); + + //Assert + //First save should work + saveAsInBranch(datasetCreateResponse1.getDataset(), targetViewPath, null, DEFAULT_BRANCH_NAME); + //Conflicting save with null tag should fail + saveAsInBranchExpectError(datasetCreateResponse2.getDataset(), targetViewPath, null, DEFAULT_BRANCH_NAME); + } + + protected UserExceptionMapper.ErrorMessageWithContext saveAsVersionedDatasetExpectError( + DatasetPath datasetPath, String sql, List context, String pluginName, String branchName) { + final Map references = new HashMap<>(); + references.put(pluginName, new VersionContextReq(VersionContextReq.VersionContextType.BRANCH, branchName)); + InitialPreviewResponse datasetCreateResponse = createVersionedDatasetFromSQL(sql, context, pluginName, branchName); + return saveAsInBranchExpectError(datasetCreateResponse.getDataset(), datasetPath, null, branchName); + + } + + protected void saveAsVersionedDataset( + DatasetPath datasetPath, String sql, List context, String pluginName, String branchName) { + final Map references = new HashMap<>(); + references.put(pluginName, new VersionContextReq(VersionContextReq.VersionContextType.BRANCH, branchName)); + InitialPreviewResponse datasetCreateResponse = createVersionedDatasetFromSQL(sql, context, pluginName, branchName); + saveAsInBranch(datasetCreateResponse.getDataset(), datasetPath, null, branchName); + } +} diff --git a/dac/backend/src/test/java/com/dremio/dac/service/datasets/TestDatasetVersionMutator.java b/dac/backend/src/test/java/com/dremio/dac/service/datasets/TestDatasetVersionMutator.java index 44f3173b02..97ca986727 100644 --- a/dac/backend/src/test/java/com/dremio/dac/service/datasets/TestDatasetVersionMutator.java +++ b/dac/backend/src/test/java/com/dremio/dac/service/datasets/TestDatasetVersionMutator.java @@ -20,17 +20,21 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; +import java.io.IOException; import java.util.List; +import java.util.stream.Collectors; import org.junit.Before; import org.junit.Test; import com.dremio.dac.explore.model.DatasetPath; +import com.dremio.dac.explore.model.DatasetUI; import com.dremio.dac.explore.model.InitialPreviewResponse; import com.dremio.dac.proto.model.dataset.VirtualDatasetUI; import com.dremio.dac.server.BaseTestServer; import com.dremio.dac.service.errors.DatasetVersionNotFoundException; import com.dremio.datastore.api.LegacyKVStoreProvider; +import com.dremio.service.namespace.NamespaceException; import com.dremio.service.namespace.dataset.DatasetVersion; import com.google.common.collect.Lists; @@ -63,7 +67,7 @@ public void testDeleteDatasetVersion() { } @Test - public void testDeleteDatasetVersion_deleteOtherSamePath_willNotChangeHistory() { + public void testDeleteDatasetVersion_deleteOtherSamePath_willNotChangeHistory() throws NamespaceException { final InitialPreviewResponse showSchemasResponse = createDatasetFromSQL("SHOW SCHEMAS", emptyList()); final List path = showSchemasResponse.getDataset().getFullPath(); final String version = showSchemasResponse.getDataset().getDatasetVersion().getVersion(); @@ -82,4 +86,26 @@ public void testDeleteDatasetVersion_deleteOtherSamePath_willNotChangeHistory() assertEquals("Dataset must have just one version", 1, allVersions.size()); } + @Test + public void testGetDatasetVersionWithDifferentCase() throws NamespaceException, IOException { + setSpace(); + final DatasetPath datasetPath = new DatasetPath("spacefoo.folderbar.tEsTcAsE"); + final DatasetUI datasetUI =createDatasetFromSQLAndSave(datasetPath, "SHOW SCHEMAS", emptyList()); + final DatasetVersion datasetVersion = datasetUI.getDatasetVersion(); + final VirtualDatasetUI vds = service.getVersion(datasetPath, datasetVersion); + assertNotNull("Dataset cannot be null after its creation", vds); + + // All upper case + final List upperCasePath = datasetPath.toPathList().stream().map(String::toUpperCase).collect(Collectors.toList()); + final DatasetPath uppercaseDatasetPath = new DatasetPath(upperCasePath); + final VirtualDatasetUI vds1 = service.getVersion(uppercaseDatasetPath, datasetVersion); + assertNotNull("Must be able to get Dataset version using all upper case path", vds1); + + // All lower case + final List lowerCasePath = datasetPath.toPathList().stream().map(String::toLowerCase).collect(Collectors.toList()); + final DatasetPath lowercaseDatasetPath = new DatasetPath(lowerCasePath); + final VirtualDatasetUI vds2 = service.getVersion(lowercaseDatasetPath, datasetVersion); + assertNotNull("Must be able to get Dataset version using all lower case path", vds2); + } + } diff --git a/dac/backend/src/test/java/com/dremio/dac/service/reflection/TestReflectionServiceHelper.java b/dac/backend/src/test/java/com/dremio/dac/service/reflection/TestReflectionServiceHelper.java new file mode 100644 index 0000000000..7289c30e6b --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/dac/service/reflection/TestReflectionServiceHelper.java @@ -0,0 +1,74 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.service.reflection; + +import static com.dremio.exec.catalog.CatalogOptions.REFLECTION_ARCTIC_ENABLED; +import static org.assertj.core.api.AssertionsForClassTypes.assertThatThrownBy; +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.mockito.Mockito.when; + +import java.util.UUID; + +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnit; +import org.mockito.junit.MockitoRule; +import org.mockito.quality.Strictness; + +import com.dremio.options.OptionManager; + +/** + * Unit Test class for {@link ReflectionServiceHelper} + */ +public class TestReflectionServiceHelper { + @Rule + public MockitoRule rule = MockitoJUnit.rule().strictness(Strictness.STRICT_STUBS); + + @Mock + private OptionManager optionManager; + + private ReflectionServiceHelper reflectionServiceHelper; + + @Before + public void setup() { + reflectionServiceHelper = new ReflectionServiceHelper(null, null, optionManager); + } + + @Test + public void testIsVersionedSourceEnabledForVersionedSourceThrowsUnsupportedError() { + String datasetId = "{\"tableKey\":[\"nessie_without_auth\",\"test\"],\"contentId\":\"cf3c730a-98c0-43a1-855d-02fb97a046c6" + + "\",\"versionContext\":{\"type\":\"BRANCH\",\"value\":\"main\"}}"; + assertThatThrownBy(() -> reflectionServiceHelper.isVersionedSourceEnabled(datasetId)) + .hasMessageContaining("does not support reflection") + .isInstanceOf(UnsupportedOperationException.class); + } + + @Test + public void testIsVersionedSourceEnabledForVersionedSource() { + String datasetId = "{\"tableKey\":[\"nessie_without_auth\",\"test\"],\"contentId\":\"cf3c730a-98c0-43a1-855d-02fb97a046c6" + + "\",\"versionContext\":{\"type\":\"BRANCH\",\"value\":\"main\"}}"; + when(optionManager.getOption(REFLECTION_ARCTIC_ENABLED)).thenReturn(true); + assertDoesNotThrow(() -> reflectionServiceHelper.isVersionedSourceEnabled(datasetId)); + } + + @Test + public void testIsVersionedSourceEnabledForNonVersionedSource() { + String datasetId = UUID.randomUUID().toString(); + assertDoesNotThrow(() -> reflectionServiceHelper.isVersionedSourceEnabled(datasetId)); + } +} diff --git a/dac/backend/src/test/java/com/dremio/dac/service/source/TestSourceService.java b/dac/backend/src/test/java/com/dremio/dac/service/source/TestSourceService.java index 52b29fc3a3..507e124b93 100644 --- a/dac/backend/src/test/java/com/dremio/dac/service/source/TestSourceService.java +++ b/dac/backend/src/test/java/com/dremio/dac/service/source/TestSourceService.java @@ -15,34 +15,106 @@ */ package com.dremio.dac.service.source; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.junit.Assert.assertFalse; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import java.security.Principal; +import java.util.Arrays; +import java.util.Collections; import java.util.List; import javax.inject.Provider; import javax.ws.rs.core.SecurityContext; +import org.junit.Rule; import org.junit.Test; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnit; +import org.mockito.junit.MockitoRule; +import org.mockito.quality.Strictness; import com.dremio.common.exceptions.UserException; +import com.dremio.dac.explore.QueryExecutor; +import com.dremio.dac.explore.model.Dataset; +import com.dremio.dac.explore.model.VersionContextReq; import com.dremio.dac.homefiles.HomeFileConf; +import com.dremio.dac.model.folder.Folder; +import com.dremio.dac.model.folder.FolderName; +import com.dremio.dac.model.namespace.NamespaceTree; +import com.dremio.dac.model.sources.FormatTools; +import com.dremio.dac.model.sources.PhysicalDataset; +import com.dremio.dac.model.sources.SourceName; +import com.dremio.dac.resource.SourceResource; +import com.dremio.dac.server.BufferAllocatorFactory; import com.dremio.dac.service.collaboration.CollaborationHelper; import com.dremio.dac.service.datasets.DatasetVersionMutator; import com.dremio.dac.service.reflection.ReflectionServiceHelper; import com.dremio.exec.catalog.ConnectionReader; +import com.dremio.exec.catalog.SourceCatalog; import com.dremio.exec.catalog.StoragePluginId; +import com.dremio.exec.catalog.VersionContext; import com.dremio.exec.catalog.conf.ConnectionConf; +import com.dremio.exec.server.ContextService; import com.dremio.exec.server.SabotContext; import com.dremio.exec.store.CatalogService; +import com.dremio.exec.store.NessieNamespaceAlreadyExistsException; +import com.dremio.exec.store.ReferenceNotFoundException; import com.dremio.exec.store.StoragePlugin; import com.dremio.exec.store.dfs.InternalFileConf; import com.dremio.exec.store.dfs.PDFSConf; import com.dremio.exec.store.sys.SystemPluginConf; +import com.dremio.file.File; +import com.dremio.plugins.ExternalNamespaceEntry; +import com.dremio.plugins.dataplane.store.DataplanePlugin; +import com.dremio.service.namespace.BoundedDatasetCount; +import com.dremio.service.namespace.NamespaceException; +import com.dremio.service.namespace.NamespaceKey; +import com.dremio.service.namespace.NamespaceNotFoundException; import com.dremio.service.namespace.NamespaceService; +import com.dremio.service.namespace.SourceState; +import com.dremio.service.namespace.proto.EntityId; +import com.dremio.service.namespace.source.proto.SourceConfig; +import com.dremio.service.reflection.ReflectionAdministrationService; +import com.dremio.service.reflection.ReflectionSettings; import com.google.common.collect.ImmutableList; public class TestSourceService { + private static final String SOURCE_NAME = "sourceName"; + private static final String NAMESPACE = "NAMESPACE"; + private static final String ICEBERG_TABLE = "ICEBERG_TABLE"; + private static final String DEFAULT_REF_TYPE = VersionContextReq.VersionContextType.BRANCH.toString(); + private static final String DEFAULT_BRANCH_NAME = "somebranch"; + private static final VersionContext DEFAULT_VERSION_CONTEXT = + VersionContext.ofBranch(DEFAULT_BRANCH_NAME); + private static final String FOLDER_NAME_1 = "folder1"; + private static final String FOLDER_NAME_2 = "folder2"; + private static final String TABLE_NAME_1 = "table1"; + private static final List DEFAULT_ENTRIES = Arrays.asList( + ExternalNamespaceEntry.of(NAMESPACE, Collections.singletonList(FOLDER_NAME_1)), + ExternalNamespaceEntry.of(NAMESPACE, Collections.singletonList(FOLDER_NAME_2)), + ExternalNamespaceEntry.of(ICEBERG_TABLE, Collections.singletonList(TABLE_NAME_1))); + + @Rule + public MockitoRule rule = MockitoJUnit.rule().strictness(Strictness.STRICT_STUBS); + + @Mock + private NamespaceService namespaceService; + @Mock private DataplanePlugin dataplanePlugin; + @Mock private ConnectionReader connectionReader; + @Mock private ReflectionAdministrationService.Factory reflectionService; + @Mock private SecurityContext securityContext; + @Mock private CatalogService catalogService; private static class NonInternalConf extends ConnectionConf { @@ -84,6 +156,216 @@ private void testConnectionConfs(List> validConnectionConfs } } + @Test + public void getSource() throws Exception { + // Arrange + complexMockSetup(); + when(dataplanePlugin.listEntries(any(), eq(DEFAULT_VERSION_CONTEXT))).thenReturn(DEFAULT_ENTRIES.stream()); + SourceResource sourceResource = makeSourceResource(); + + // Act + NamespaceTree contents = sourceResource + .getSource(true, DEFAULT_REF_TYPE, DEFAULT_BRANCH_NAME) + .getContents(); + + // Assert + assertMatchesDefaultEntries(contents); + } + + @Test + public void getFolder() throws Exception { + // Arrange + when(namespaceService.getDataset(any())) + .thenThrow(NamespaceNotFoundException.class); + + when(dataplanePlugin.listEntries(any(), eq(DEFAULT_VERSION_CONTEXT))) + .thenReturn(DEFAULT_ENTRIES.stream()); + + SourceResource sourceResource = makeSourceResource(); + + // Act + NamespaceTree contents = sourceResource + .getFolder("folder", true, DEFAULT_REF_TYPE, DEFAULT_BRANCH_NAME) + .getContents(); + + // Assert + assertMatchesDefaultEntries(contents); + } + + @Test + public void createFolder() { + SourceResource sourceResource = makeSourceResource(); + + doNothing() + .when(dataplanePlugin) + .createNamespace(any(), eq(DEFAULT_VERSION_CONTEXT)); + + Folder folder = + sourceResource.createFolder( + null, DEFAULT_REF_TYPE, DEFAULT_BRANCH_NAME, new FolderName(FOLDER_NAME_1)); + + assertThat(folder.getName()).isEqualTo(FOLDER_NAME_1); + assertThat(folder.getIsPhysicalDataset()).isFalse(); + } + + @Test + public void createFolderThrownNessieNamespaceAlreadyExistsException() { + SourceResource sourceResource = makeSourceResource(); + + doThrow(NessieNamespaceAlreadyExistsException.class) + .doNothing() + .when(dataplanePlugin) + .createNamespace(any(), eq(DEFAULT_VERSION_CONTEXT)); + + assertThatThrownBy( + () -> + sourceResource.createFolder( + null, DEFAULT_REF_TYPE, DEFAULT_BRANCH_NAME, new FolderName(FOLDER_NAME_1))) + .isInstanceOf(UserException.class) + .hasMessageContaining("already exists"); + } + + @Test + public void createFolderThrownReferenceNotFoundException() { + SourceResource sourceResource = makeSourceResource(); + + doThrow(ReferenceNotFoundException.class) + .doNothing() + .when(dataplanePlugin) + .createNamespace(any(), eq(DEFAULT_VERSION_CONTEXT)); + + assertThatThrownBy( + () -> + sourceResource.createFolder( + null, DEFAULT_REF_TYPE, DEFAULT_BRANCH_NAME, new FolderName(FOLDER_NAME_1))) + .isInstanceOf(UserException.class) + .hasMessageContaining("not found"); + } + + @Test + public void createFolderWithSpaceInFolderName() { + final String folderNameWithSpace = "folder with space"; + SourceResource sourceResource = makeSourceResource(); + + Folder folder = + sourceResource.createFolder( + null, DEFAULT_REF_TYPE, DEFAULT_BRANCH_NAME, new FolderName(folderNameWithSpace)); + + verify(dataplanePlugin) + .createNamespace( + new NamespaceKey(Arrays.asList(SOURCE_NAME, folderNameWithSpace)), + DEFAULT_VERSION_CONTEXT); + assertThat(folder.getName()).isEqualTo(folderNameWithSpace); + assertThat(folder.getIsPhysicalDataset()).isFalse(); + } + + @Test + public void createFolderWithSpaceInFolderNameWithinNestedFolder() { + final String rootFolderNameWithSpace = "folder with space"; + final String leafFolderNameWithSpace = "folder with another space"; + final String path = "folder with space/"; + SourceResource sourceResource = makeSourceResource(); + + Folder folder = + sourceResource.createFolder( + path, DEFAULT_REF_TYPE, DEFAULT_BRANCH_NAME, new FolderName(leafFolderNameWithSpace)); + + verify(dataplanePlugin) + .createNamespace( + new NamespaceKey(Arrays.asList(SOURCE_NAME, + rootFolderNameWithSpace, + leafFolderNameWithSpace)), + DEFAULT_VERSION_CONTEXT); + assertThat(folder.getName()).isEqualTo(leafFolderNameWithSpace); + assertThat(folder.getIsPhysicalDataset()).isFalse(); + } + + @Test + public void deleteFolder() { + final String rootFolder = "rootFolder"; + final String path = ""; + SourceResource sourceResource = makeSourceResource(); + + sourceResource.createFolder( + path, DEFAULT_REF_TYPE, DEFAULT_BRANCH_NAME, new FolderName(rootFolder)); + + sourceResource.deleteFolder("rootFolder/", DEFAULT_REF_TYPE, DEFAULT_BRANCH_NAME); + verify(dataplanePlugin) + .deleteFolder(new NamespaceKey(Arrays.asList(SOURCE_NAME, rootFolder)), + DEFAULT_VERSION_CONTEXT); + } + + private void assertMatchesDefaultEntries(NamespaceTree contents) { + + List folders = contents.getFolders(); + List physicalDatasets = contents.getPhysicalDatasets(); + List files = contents.getFiles(); + List virtualDatasets = contents.getDatasets(); + + assertThat(folders).hasSize(2); + assertThat(folders.get(0).getName()).isEqualTo(FOLDER_NAME_1); + assertThat(folders.get(0).getIsPhysicalDataset()).isFalse(); + + assertThat(physicalDatasets).hasSize(1); + assertThat(physicalDatasets.get(0).getDatasetName().getName()).isEqualTo(TABLE_NAME_1); + + assertThat(files).isEmpty(); + assertThat(virtualDatasets).isEmpty(); + + assertThat(contents.totalCount()).isEqualTo(3); + } + + private SourceResource makeSourceResource() { + when(catalogService.getSource(anyString())).thenReturn(dataplanePlugin); + + final Principal principal = mock(Principal.class); + when(principal.getName()).thenReturn("username"); + when(securityContext.getUserPrincipal()).thenReturn(principal); + + final SourceService sourceService = new SourceService( + mock(SabotContext.class), + namespaceService, + mock(DatasetVersionMutator.class), + catalogService, + mock(ReflectionServiceHelper.class), + mock(CollaborationHelper.class), + connectionReader, + mock(SecurityContext.class)); + + return new SourceResource( + namespaceService, + reflectionService, + sourceService, + new SourceName(SOURCE_NAME), + mock(QueryExecutor.class), + securityContext, + connectionReader, + mock(SourceCatalog.class), + mock(FormatTools.class), + mock(ContextService.class), + mock(BufferAllocatorFactory.class)); + } + + private void complexMockSetup() throws NamespaceException { + final SourceConfig sourceConfig = new SourceConfig() + .setName(SOURCE_NAME) + .setMetadataPolicy(CatalogService.NEVER_REFRESH_POLICY) + .setCtime(100L) + .setId(new EntityId().setId("1")); + when(connectionReader.getConnectionConf(sourceConfig)).thenReturn(mock(ConnectionConf.class)); + when(namespaceService.getSource(any())).thenReturn(sourceConfig); + when(namespaceService.getDatasetCount(any(), anyLong(), anyInt())).thenReturn(new BoundedDatasetCount(0, false, false)); + when(catalogService.getSourceState(SOURCE_NAME)).thenReturn(SourceState.GOOD); + + ReflectionSettings reflectionSettings = mock(ReflectionSettings.class); + when(reflectionSettings.getReflectionSettings((NamespaceKey) any())).thenReturn(null); + + ReflectionAdministrationService reflectionAdministrationService = mock(ReflectionAdministrationService.class); + when(reflectionAdministrationService.getReflectionSettings()).thenReturn(reflectionSettings); + + when(reflectionService.get(any())).thenReturn(reflectionAdministrationService); + } + @Test public void testValidConnectionConfs() { testConnectionConfs(validConnectionConfs, true); diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/AlterTestCases.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/AlterTestCases.java new file mode 100644 index 0000000000..606ddc774a --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/AlterTestCases.java @@ -0,0 +1,464 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_BRANCH_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.alterTableAddColumnsQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.alterTableAddPrimaryKeyQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.alterTableChangeColumnQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.alterTableDropColumnQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.alterTableDropPrimaryKeyQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createBranchAtBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createEmptyTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createTableAsQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createTableWithColDefsQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createTagQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.dropTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueBranchName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTableName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTagName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.insertTableWithValuesQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.selectStarQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.tablePathWithFolders; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.truncateTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useTagQuery; +import static com.dremio.exec.catalog.dataplane.ITDataplanePluginTestSetup.createFolders; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import org.junit.jupiter.api.Test; + +import com.dremio.exec.catalog.VersionContext; + +/** + * + * To run these tests, run through container class ITDataplanePlugin + * To run all tests run {@link ITDataplanePlugin.NestedAlterTests} + * To run single test, see instructions at the top of {@link ITDataplanePlugin} + */ + +public class AlterTestCases { + private ITDataplanePluginTestSetup base; + + AlterTestCases(ITDataplanePluginTestSetup base) { + this.base = base; + } + + //Tests adding columns to existing table + @Test + void alterTableAddOneColumn() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + final List columnDefinition = Collections.singletonList("col1 int"); + final List addedColDef = Collections.singletonList("col2 int"); + final List columnValuesBeforeAdd = Collections.singletonList("(1)"); + final List columnValuesAfterAdd = Collections.singletonList("(2,2)"); + //Setup + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableWithColDefsQuery(tablePath, columnDefinition)); + //Insert + base.runSQL(insertTableWithValuesQuery(tablePath, columnValuesBeforeAdd)); + //Select + base.testBuilder() + .sqlQuery(selectStarQuery(tablePath)) + .unOrdered() + .baselineColumns("col1") + .baselineValues(1) + .go(); + + //Add single column + base.runSQL(alterTableAddColumnsQuery(tablePath, addedColDef)); + //Select again + base.testBuilder() + .sqlQuery(selectStarQuery(tablePath)) + .unOrdered() + .baselineColumns("col1", "col2") + .baselineValues(1, null) + .go(); + + //Insert a new row + base.runSQL(insertTableWithValuesQuery(tablePath, columnValuesAfterAdd)); + //Select + base.testBuilder() + .sqlQuery(selectStarQuery(tablePath)) + .unOrdered() + .baselineColumns("col1", "col2") + .baselineValues(1, null) + .baselineValues(2, 2) + .go(); + + //cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + void alterTableAddMultipleColumns() throws Exception { + //Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final List columnDefinition = Collections.singletonList("col1 int"); + final List addedColDef1 = Arrays.asList("col2 int", "col3 int", "col4 varchar"); + final List columnValuesBeforeAdd = Collections.singletonList("(1)"); + final List columnValuesAfterAdd = Arrays.asList("(2,2,2,'two')", "(3,3,3,'three')"); + + //Setup + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableWithColDefsQuery(tablePath, columnDefinition)); + //Insert + base.runSQL(insertTableWithValuesQuery(tablePath, columnValuesBeforeAdd)); + + //Act + //Add 3 columns + base.runSQL(alterTableAddColumnsQuery(tablePath, addedColDef1)); + + //Assert + //Insert a new row + base.test(insertTableWithValuesQuery(tablePath, columnValuesAfterAdd)); + //Select the new rows from new columns + base.testBuilder() + .sqlQuery(selectStarQuery(tablePath)) + .unOrdered() + .baselineColumns("col1", "col2", "col3", "col4") + .baselineValues(1, null, null, null) + .baselineValues(2, 2, 2, "two") + .baselineValues(3, 3, 3, "three") + .go(); + + //cleanup + base.test(dropTableQuery(tablePath)); + } + + @Test + void alterTableDropOneColumn() throws Exception { + //Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + final List columnDefinition = Arrays.asList("col1 int", "col2 int", "col3 int", "col4 varchar"); + final List dropColumn = Collections.singletonList("col3"); + final List columnValues = Collections.singletonList("(1,1,1,'one')"); + final List columnsValuesAfterDrop = Arrays.asList("(2,2,'two')", "(3,3,'three')"); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableWithColDefsQuery(tablePath, columnDefinition)); + base.runSQL(insertTableWithValuesQuery(tablePath, columnValues)); + base.testBuilder() + .sqlQuery(selectStarQuery(tablePath)) + .unOrdered() + .baselineColumns("col1", "col2", "col3", "col4") + .baselineValues(1, 1, 1, "one") + .go(); + + //Act + //Drop column + base.runSQL(alterTableDropColumnQuery(tablePath, dropColumn)); + + //Assert + //select + base.testBuilder() + .sqlQuery(selectStarQuery(tablePath)) + .unOrdered() + .baselineColumns("col1", "col2", "col4") + .baselineValues(1, 1, "one") + .go(); + //Insert again + base.runSQL(insertTableWithValuesQuery(tablePath, columnsValuesAfterDrop)); + base.testBuilder() + .sqlQuery(selectStarQuery(tablePath)) + .unOrdered() + .baselineColumns("col1", "col2", "col4") + .baselineValues(1, 1, "one") + .baselineValues(2, 2, "two") + .baselineValues(3, 3, "three") + .go(); + + //Cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + void alterTableAddDropColumns() throws Exception { + + //Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + final List columnDefinition = Arrays.asList("col1 int", "col2 int"); + final List addedColDef = Arrays.asList("col3 int", "col4 int"); + final List dropColumns = Collections.singletonList("col2"); + final List columnValues = Collections.singletonList("(1,1)"); + final List columnValuesAfterAdd = Collections.singletonList("(2,2,2,2)"); + final List columnValuesAfterDrop = Arrays.asList("(3,3,3)", "(4,4,4)"); + //Setup + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableWithColDefsQuery(tablePath, columnDefinition)); + base.test(insertTableWithValuesQuery(tablePath, columnValues)); + base.testBuilder() + .sqlQuery(selectStarQuery(tablePath)) + .unOrdered() + .baselineColumns("col1", "col2") + .baselineValues(1, 1) + .go(); + + //Act + //Add columns + base.runSQL(alterTableAddColumnsQuery(tablePath, addedColDef)); + base.runSQL(insertTableWithValuesQuery(tablePath, columnValuesAfterAdd)); + //Drop columns + base.runSQL(alterTableDropColumnQuery(tablePath, dropColumns)); + //Insert again + base.runSQL(insertTableWithValuesQuery(tablePath, columnValuesAfterDrop)); + + //Assert + base.testBuilder() + .sqlQuery(selectStarQuery(tablePath)) + .unOrdered() + .baselineColumns("col1", "col3", "col4") + .baselineValues(1, null, null) + .baselineValues(2, 2, 2) + .baselineValues(3, 3, 3) + .baselineValues(4, 4, 4) + .go(); + + //Cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + void alterTableChangeColumn() throws Exception { + //Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + final List columnDefinition = Arrays.asList("col1 int", "col2 float"); + final List changeColDef = Collections.singletonList("col2 col3 double"); + final List columnValuesBeforeChange = Collections.singletonList("(1, cast(1.0 as float))"); + final List columnValuesAfterChange = Collections.singletonList("(2,cast(2.0 as double))"); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableWithColDefsQuery(tablePath, columnDefinition)); + //Insert + base.runSQL(insertTableWithValuesQuery(tablePath, columnValuesBeforeChange)); + //Select + base.testBuilder() + .sqlQuery(selectStarQuery(tablePath)) + .unOrdered() + .baselineColumns("col1", "col2") + .baselineValues(1, new Float("1.0")) + .go(); + + //Act + //Change column + base.runSQL(alterTableChangeColumnQuery(tablePath, changeColDef)); + + //Assert + //Insert a new row + base.runSQL(insertTableWithValuesQuery(tablePath, columnValuesAfterChange)); + //Select + base.testBuilder() + .sqlQuery(selectStarQuery(tablePath)) + .unOrdered() + .baselineColumns("col1", "col3") + .baselineValues(1, 1.0) + .baselineValues(2, 2.0) + .go(); + + //cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + void truncateTableInDiffBranches() throws Exception { + //Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final String devBranch = generateUniqueBranchName(); + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath, 5)); + // Verify with select + base.assertTableHasExpectedNumRows(tablePath, 5); + // Create dev branch + base.runSQL(createBranchAtBranchQuery(devBranch, DEFAULT_BRANCH_NAME)); + // Switch to dev + base.runSQL(useBranchQuery(devBranch)); + + //Act + base.runSQL(truncateTableQuery(tablePath)); + + //Assert + base.assertTableHasExpectedNumRows(tablePath, 0); + //Check that main context still has the table + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + base.assertTableHasExpectedNumRows(tablePath, 5); + + //cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void testDropInvalidPrimaryKey() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + + try { + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + // Try to drop a primary key when there is none added. + assertThatThrownBy(() -> base.runSQL(alterTableDropPrimaryKeyQuery(tablePath))) + .hasMessageContaining("No primary key to drop"); + } finally { + // Cleanup + try { + base.runSQL(dropTableQuery(tablePath)); + } catch (Exception ignore) { + } + } + } + + @Test + public void testAddAndDropPrimaryKey() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + try { + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.runSQL(alterTableAddPrimaryKeyQuery(tablePath, Collections.singletonList("id"))); + base.runSQL(alterTableDropPrimaryKeyQuery(tablePath)); + } finally { + // Cleanup + try { + base.runSQL(dropTableQuery(tablePath)); + } catch (Exception ignore) { + } + } + } + + @Test + public void testAddInvalidPrimaryKey() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + + try { + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + // Try to add a non-existent primary key. + final String primaryKey = "blah_blah"; + assertThatThrownBy(() -> base.runSQL(alterTableAddPrimaryKeyQuery(tablePath, Collections.singletonList(primaryKey)))) + .hasMessageContaining(String.format("Column %s not found", primaryKey)); + } finally { + // Cleanup + try { + base.runSQL(dropTableQuery(tablePath)); + } catch (Exception ignore) { + } + } + } + + @Test + public void testAddColumnAgnosticOfSourceBucket() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + final List columnDefinition = Collections.singletonList("col1 int"); + final List addedColDef = Collections.singletonList("col2 int"); + final List columnValuesBeforeAdd = Collections.singletonList("(1)"); + + //Setup + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableWithColDefsQuery(tablePath, columnDefinition)); + + //Insert + base.runSQL(insertTableWithValuesQuery(tablePath, columnValuesBeforeAdd)); + + //Select + base.testBuilder() + .sqlQuery(selectStarQuery(tablePath)) + .unOrdered() + .baselineColumns("col1") + .baselineValues(1) + .go(); + + //Add single column + base.runWithAlternateSourcePath(alterTableAddColumnsQuery(tablePath, addedColDef)); + //Select again + base.testBuilder() + .sqlQuery(selectStarQuery(tablePath)) + .unOrdered() + .baselineColumns("col1", "col2") + .baselineValues(1, null) + .go(); + } + + + @Test + public void addColumnWithTagSet() throws Exception { + // Arrange + String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final String tag = generateUniqueTagName(); + base.runSQL(createTagQuery(tag, DEFAULT_BRANCH_NAME)); + base.runSQL(useTagQuery(tag)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + + // Act and Assert + base.assertQueryThrowsExpectedError(createTableAsQuery(tablePath, 5), + String.format("DDL and DML operations are only supported for branches - not on tags or commits. %s is not a branch.", + tag)); + } + + @Test + public void changeColumnWithTagSet() throws Exception { + // Arrange + String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final String tag = generateUniqueTagName(); + base.runSQL(createTagQuery(tag, DEFAULT_BRANCH_NAME)); + base.runSQL(useTagQuery(tag)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + + // Act and Assert + base.assertQueryThrowsExpectedError(createTableAsQuery(tablePath, 5), + String.format("DDL and DML operations are only supported for branches - not on tags or commits. %s is not a branch.", + tag)); + } + + @Test + public void dropColumnWithTagSet() throws Exception { + // Arrange + String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final String tag = generateUniqueTagName(); + base.runSQL(createTagQuery(tag, DEFAULT_BRANCH_NAME)); + base.runSQL(useTagQuery(tag)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + + // Act and Assert + base.assertQueryThrowsExpectedError(createTableAsQuery(tablePath, 5), + String.format("DDL and DML operations are only supported for branches - not on tags or commits. %s is not a branch.", + tag)); + } +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/BranchTagTestCases.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/BranchTagTestCases.java new file mode 100644 index 0000000000..3ab083cb0d --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/BranchTagTestCases.java @@ -0,0 +1,363 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DATAPLANE_PLUGIN_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_BRANCH_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.alterBranchAssignBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.alterBranchAssignCommitQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.alterBranchAssignTagQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.alterTagAssignBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.alterTagAssignCommitQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.alterTagAssignTagQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createBranchAtBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createBranchAtSpecifierQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createEmptyTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createTagQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.dropBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.dropTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.dropTagQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueBranchName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTableName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTagName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.quoted; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.showBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.showTagQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.tablePathWithFolders; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useTagQuery; +import static com.dremio.exec.catalog.dataplane.ITDataplanePluginTestSetup.createFolders; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import java.util.List; +import java.util.Set; + +import org.junit.jupiter.api.Test; + +import com.dremio.exec.catalog.VersionContext; +import com.google.common.collect.Sets; + +/** + * + * To run these tests, run through container class ITDataplanePlugin + * To run all tests run {@link ITDataplanePlugin.NestedBranchTagTests} + * To run single test, see instructions at the top of {@link ITDataplanePlugin} + */ + +public class BranchTagTestCases { + private ITDataplanePluginTestSetup base; + + BranchTagTestCases(ITDataplanePluginTestSetup base) { + this.base = base; + } + + @Test + void createBranchAtCommit() throws Exception { + // Arrange + // Make some commit on another branch to make sure we're not working with any defaults + final String temporaryBranchName = generateUniqueBranchName(); + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + base.runSQL(createBranchAtBranchQuery(temporaryBranchName, DEFAULT_BRANCH_NAME)); + base.runSQL(useBranchQuery(temporaryBranchName)); + createFolders(tablePath, VersionContext.ofBranch(temporaryBranchName)); + base.runSQL(createEmptyTableQuery(tablePath)); + String commitHashTemporaryBranch = base.getCommitHashForBranch(temporaryBranchName); + + final String branchName = generateUniqueBranchName(); + + // Act + base.runSQL(createBranchAtSpecifierQuery(branchName, "COMMIT " + quoted(commitHashTemporaryBranch))); + + // Assert + assertThat(base.getCommitHashForBranch(branchName)).isEqualTo(commitHashTemporaryBranch); + } + + @Test + public void alterBranchAssignBranch() throws Exception { + // Arrange + final String branchName = generateUniqueBranchName(); + final String mainTableName = generateUniqueTableName(); + final List mainTablePath = tablePathWithFolders(mainTableName); + + base.runSQL(createBranchAtBranchQuery(branchName, DEFAULT_BRANCH_NAME)); + createFolders(mainTablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(mainTablePath)); + + // Act + base.runSQL(alterBranchAssignBranchQuery(branchName, DEFAULT_BRANCH_NAME)); + base.runSQL(useBranchQuery(branchName)); + + // ASSERT + base.assertTableHasExpectedNumRows(mainTablePath, 0); + + // Drop tables + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + base.runSQL(dropTableQuery(mainTablePath)); + + } + + @Test + public void alterBranchAssignTag() throws Exception { + // Arrange + final String tagName = generateUniqueTagName(); + final String mainTableName = generateUniqueTableName(); + final List mainTablePath = tablePathWithFolders(mainTableName); + + createFolders(mainTablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(mainTablePath)); + base.runSQL(createTagQuery(tagName, DEFAULT_BRANCH_NAME)); + base.runSQL(dropTableQuery(mainTablePath)); + + // Act + base.runSQL(alterBranchAssignTagQuery(DEFAULT_BRANCH_NAME, tagName)); + base.runSQL(useTagQuery(tagName)); + + // ASSERT + base.assertTableHasExpectedNumRows(mainTablePath, 0); + + // Drop tables + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + base.runSQL(dropTableQuery(mainTablePath)); + } + + @Test + public void alterBranchAssignCommit() throws Exception { + // Arrange + final String branchName = generateUniqueBranchName(); + final String mainTableName = generateUniqueTableName(); + final List mainTablePath = tablePathWithFolders(mainTableName); + + createFolders(mainTablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createBranchAtBranchQuery(branchName, DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(mainTablePath)); + String commitHash = base.getCommitHashForBranch(DEFAULT_BRANCH_NAME); + + // Act + base.runSQL(alterBranchAssignCommitQuery(branchName, quoted(commitHash))); + base.runSQL(useBranchQuery(branchName)); + + // ASSERT + base.assertTableHasExpectedNumRows(mainTablePath, 0); + + // Drop tables + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + base.runSQL(dropTableQuery(mainTablePath)); + } + + //This validation check is added as a part of 0.52.3 Nessie versions + //PR For reference: https://github.com/projectnessie/nessie/pull/6224. + @Test + public void alterBranchAssignCommitUsingTag() throws Exception { + // Arrange + final String tagName = generateUniqueTagName(); + final String mainTableName = generateUniqueTableName(); + final List mainTablePath = tablePathWithFolders(mainTableName); + + createFolders(mainTablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTagQuery(tagName, DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(mainTablePath)); + String commitHash = base.getCommitHashForBranch(DEFAULT_BRANCH_NAME); + + // Act + assertThatThrownBy(() -> base.runSQL(alterBranchAssignCommitQuery(tagName, quoted(commitHash)))) + .hasMessageContaining("Expected reference type BRANCH does not match existing reference TagName"); + } + + @Test + public void alterTagAssignTag() throws Exception { + // Arrange + final String tagName = generateUniqueTagName(); + final String tagName2 = generateUniqueTagName(); + final String mainTableName = generateUniqueTableName(); + final List mainTablePath = tablePathWithFolders(mainTableName); + + base.runSQL(createTagQuery(tagName, DEFAULT_BRANCH_NAME)); + createFolders(mainTablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(mainTablePath)); + base.runSQL(createTagQuery(tagName2, DEFAULT_BRANCH_NAME)); + + // Act + base.runSQL(alterTagAssignTagQuery(tagName, tagName2)); + base.runSQL(useTagQuery(tagName)); + + // ASSERT + base.assertTableHasExpectedNumRows(mainTablePath, 0); + + // Drop tables + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + base.runSQL(dropTableQuery(mainTablePath)); + } + + @Test + public void alterTagAssignBranch() throws Exception { + // Arrange + final String tagName = generateUniqueTagName(); + final String branchName = generateUniqueBranchName(); + final String mainTableName = generateUniqueTableName(); + final List mainTablePath = tablePathWithFolders(mainTableName); + + base.runSQL(createTagQuery(tagName, DEFAULT_BRANCH_NAME)); + + base.runSQL(createBranchAtBranchQuery(branchName, DEFAULT_BRANCH_NAME)); + base.runSQL(useBranchQuery(branchName)); + createFolders(mainTablePath, VersionContext.ofBranch(branchName)); + base.runSQL(createEmptyTableQuery(mainTablePath)); + + // Act + base.runSQL(alterTagAssignBranchQuery(tagName, branchName)); + base.runSQL(useTagQuery(tagName)); + + // ASSERT + base.assertTableHasExpectedNumRows(mainTablePath, 0); + } + + @Test + public void alterTagAssignCommit() throws Exception { + // Arrange + final String tagName = generateUniqueTagName(); + final String branchName = generateUniqueBranchName(); + final String mainTableName = generateUniqueTableName(); + final List mainTablePath = tablePathWithFolders(mainTableName); + + base.runSQL(createTagQuery(tagName, DEFAULT_BRANCH_NAME)); + + base.runSQL(createBranchAtBranchQuery(branchName, DEFAULT_BRANCH_NAME)); + base.runSQL(useBranchQuery(branchName)); + createFolders(mainTablePath, VersionContext.ofBranch(branchName)); + base.runSQL(createEmptyTableQuery(mainTablePath)); + String commitHash = base.getCommitHashForBranch(branchName); + + // Act + base.runSQL(alterTagAssignCommitQuery(tagName, quoted(commitHash))); + base.runSQL(useTagQuery(tagName)); + + // ASSERT + base.assertTableHasExpectedNumRows(mainTablePath, 0); + } + + @Test + public void showNewlyCreatedBranch() throws Exception { + final String branchName = generateUniqueBranchName(); + base.runSQL(createBranchAtBranchQuery(branchName, DEFAULT_BRANCH_NAME)); + List> branchresults = base.runSqlWithResults(showBranchQuery(DATAPLANE_PLUGIN_NAME)); + assertThat(branchresults.stream().filter(row -> row.contains(branchName))).isNotEmpty(); + } + + @Test + public void showBranchAfterDelete() throws Exception { + final String branchName1 = generateUniqueBranchName(); + final String branchName2 = generateUniqueBranchName(); + final String branchName3 = generateUniqueBranchName(); + Set branchSetBeforeDelete = Sets.newHashSet(branchName1, branchName2, branchName3); + Set branchSetAfterDelete = Sets.newHashSet(branchName1, branchName3); + base.runSQL(createBranchAtBranchQuery(branchName1, DEFAULT_BRANCH_NAME)); + base.runSQL(createBranchAtBranchQuery(branchName2, DEFAULT_BRANCH_NAME)); + base.runSQL(createBranchAtBranchQuery(branchName3, DEFAULT_BRANCH_NAME)); + + List> branchresults = base.runSqlWithResults(showBranchQuery(DATAPLANE_PLUGIN_NAME)); + assertThat(branchresults.stream().filter(row -> branchSetBeforeDelete.contains(row.get(1))).count()).isEqualTo(3); + base.runSQL(dropBranchQuery(branchName2)); + branchresults = base.runSqlWithResults(showBranchQuery(DATAPLANE_PLUGIN_NAME)); + assertThat(branchresults.stream().filter(row -> branchSetAfterDelete.contains(row.get(1))).count()).isEqualTo(2); + assertThat(branchresults.stream().filter(row -> row.contains(branchName2)).count()).isEqualTo(0); + } + + @Test + public void showMultipleCreatedBranches() throws Exception { + Set branchSet = Sets.newHashSet(); + String branchName; + for (int i = 0; i < 100; i++) { + branchName = generateUniqueBranchName(); + branchSet.add(branchName); + base.runSQL(createBranchAtBranchQuery(branchName, DEFAULT_BRANCH_NAME)); + } + + List> branchresults = base.runSqlWithResults(showBranchQuery(DATAPLANE_PLUGIN_NAME)); + assertThat(branchresults.stream().filter(row -> branchSet.contains(row.get(1))).count()).isEqualTo(100); + } + + @Test + public void showNewlyCreatedTag() throws Exception { + final String tagName = generateUniqueTagName(); + base.runSQL(createTagQuery(tagName, DEFAULT_BRANCH_NAME)); + List> tagResults = base.runSqlWithResults(showTagQuery(DATAPLANE_PLUGIN_NAME)); + assertThat(tagResults.stream().filter(row -> row.contains(tagName))).isNotEmpty(); + } + + + @Test + public void showTagAfterDelete() throws Exception { + final String tagName1 = generateUniqueBranchName(); + final String tagName2 = generateUniqueBranchName(); + final String tagName3 = generateUniqueBranchName(); + Set tagSetBeforeDelete = Sets.newHashSet(tagName1, tagName2, tagName3); + Set tagSetAfterDelete = Sets.newHashSet(tagName1, tagName3); + base.runSQL(createTagQuery(tagName1, DEFAULT_BRANCH_NAME)); + base.runSQL(createTagQuery(tagName2, DEFAULT_BRANCH_NAME)); + base.runSQL(createTagQuery(tagName3, DEFAULT_BRANCH_NAME)); + + List> tagresults = base.runSqlWithResults(showTagQuery(DATAPLANE_PLUGIN_NAME)); + assertThat(tagresults.stream().filter(row -> tagSetBeforeDelete.contains(row.get(1))).count()).isEqualTo(3); + base.runSQL(dropTagQuery(tagName2)); + tagresults = base.runSqlWithResults(showTagQuery(DATAPLANE_PLUGIN_NAME)); + assertThat(tagresults.stream().filter(row -> tagSetAfterDelete.contains(row.get(1))).count()).isEqualTo(2); + assertThat(tagresults.stream().filter(row -> row.contains(tagName2)).count()).isEqualTo(0); + } + + @Test + void dropCurrentBranch() throws Exception { + // Arrange + final String branchName = generateUniqueBranchName(); + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + base.runSQL(createBranchAtBranchQuery(branchName, DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(branchName)); + + + //Act and Assert + base.runSQL(useBranchQuery(branchName)); + base.runSQL(createEmptyTableQuery(tablePath)); + assertThatThrownBy(() -> base.runSQL(dropBranchQuery(branchName))) + .hasMessageContaining(String.format("Cannot drop branch %s for source %s while it is set in the current session's reference context", + branchName, + DATAPLANE_PLUGIN_NAME)); + } + + @Test + void dropCurrentTag() throws Exception { + // Arrange + final String tagName = generateUniqueBranchName(); + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + final String branchName = generateUniqueBranchName(); + base.runSQL(createBranchAtBranchQuery(branchName, DEFAULT_BRANCH_NAME)); + base.runSQL(useBranchQuery(branchName)); + createFolders(tablePath, VersionContext.ofBranch(branchName)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.runSQL(createTagQuery(tagName, branchName)); + + // Act and Assert + base.runSQL(useTagQuery(tagName)); + assertThatThrownBy(() -> base.runSQL(dropTagQuery(tagName))) + .hasMessageContaining(String.format("Cannot drop tag %s for source %s while it is set in the current session's reference context", + tagName, + DATAPLANE_PLUGIN_NAME)); + } + +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/ContainerEntity.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/ContainerEntity.java new file mode 100644 index 0000000000..6364fba0dc --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/ContainerEntity.java @@ -0,0 +1,216 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import com.google.common.base.Joiner; +import com.google.common.collect.Streams; + +/** + * Helper class for {@link InfoSchemaCombinationTestCases}. + * + * Represents a container (source or folder) that may contain tables, views, and + * other folders. In the combination test, we are creating a similar set of + * tables and views, so this class helps us be consistent about that creation. + */ +public class ContainerEntity { + + private static final Joiner DOT_JOINER = Joiner.on('.'); + private static final Joiner QUOTE_DOT_QUOTE_JOINER = Joiner.on("\".\""); + public static final String tableAFirst = "tableAFirst"; + public static final String tableBSecond = "tableBSecond"; + public static final String viewCThird = "viewCThird"; + public static final String viewDFourth = "viewDFourth"; + + public enum Type { + SOURCE, + IMPLICIT_FOLDER, + EXPLICIT_FOLDER, + } + + public enum Contains { + FOLDERS_ONLY, + TABLES_ONLY, + FOLDERS_AND_VIEWS, + MAX_KEY_TABLE, + EMPTY, + + } + + private final String name; + private final Type type; + private final Contains contains; + private final List parentPath; + + public ContainerEntity(String name, Type type, Contains contains, List parentPath) { + this.name = name; + this.type = type; + this.contains = contains; + this.parentPath = parentPath; + } + + public String getName() { + return name; + } + + public Type getType() { + return type; + } + + public Contains getContains() { + return contains; + } + + public List getParentPath() { + return parentPath; + } + + public List getFullPath() { + return Streams.concat(getParentPath().stream(), Stream.of(getName())).collect(Collectors.toList()); + } + + public List getPathWithoutRoot() { + if (parentPath.size() == 0) { + // If no parentPath, this IS the root so don't return it + return Collections.emptyList(); + } + return Streams.concat(getParentPath().stream().skip(1), Stream.of(getName())).collect(Collectors.toList()); + } + + public List getChildPathWithoutRoot(String childName) { + return Streams.concat(getPathWithoutRoot().stream(), Stream.of(childName)).collect(Collectors.toList()); + } + + public String asSqlIdentifier() { + // Note: this won't work correctly when we add special characters + return DOT_JOINER.join(getFullPath()); + } + + public List> getExpectedTablesIncludingViews() { + return Arrays.asList( + Arrays.asList( + "DREMIO", + DOT_JOINER.join(getFullPath()), + tableAFirst, + "TABLE"), + Arrays.asList( + "DREMIO", + DOT_JOINER.join(getFullPath()), + tableBSecond, + "TABLE"), + Arrays.asList( + "DREMIO", + DOT_JOINER.join(getFullPath()), + viewCThird, + "VIEW"), + Arrays.asList( + "DREMIO", + DOT_JOINER.join(getFullPath()), + viewDFourth, + "VIEW") + ); + } + + public List> getExpectedTablesWithoutViews() { + return Arrays.asList( + Arrays.asList( + "DREMIO", + DOT_JOINER.join(getFullPath()), + tableAFirst, + "TABLE"), + Arrays.asList( + "DREMIO", + DOT_JOINER.join(getFullPath()), + tableBSecond, + "TABLE")); + } + + public List> getTableAOnly() { + return Collections.singletonList( + Arrays.asList( + "DREMIO", + DOT_JOINER.join(getFullPath()), + tableAFirst, + "TABLE")); + } + + public List> getTableBOnly() { + return Collections.singletonList( + Arrays.asList( + "DREMIO", + DOT_JOINER.join(getFullPath()), + tableBSecond, + "TABLE")); + } + + public List> getExpectedViews() { + // Note: this won't work correctly when we add special characters + return Arrays.asList( + Arrays.asList( + "DREMIO", + DOT_JOINER.join(getFullPath()), + viewCThird, + String.format("SELECT * FROM %s.%s", DOT_JOINER.join(getFullPath()), tableAFirst)), + Arrays.asList( + "DREMIO", + DOT_JOINER.join(getFullPath()), + viewDFourth, + String.format("SELECT * FROM %s.%s", DOT_JOINER.join(getFullPath()), tableBSecond)) + ); + } + + public List> getViewCOnly() { + // Note: this won't work correctly when we add special characters + return Collections.singletonList( + Arrays.asList( + "DREMIO", + DOT_JOINER.join(getFullPath()), + viewCThird, + String.format("SELECT * FROM %s.%s", DOT_JOINER.join(getFullPath()), tableAFirst))); + } + + public List> getViewDOnly() { + // Note: this won't work correctly when we add special characters + return Collections.singletonList( + Arrays.asList( + "DREMIO", + DOT_JOINER.join(getFullPath()), + viewDFourth, + String.format("SELECT * FROM %s.%s", DOT_JOINER.join(getFullPath()), tableBSecond))); + } + + public List getExpectedSchemata() { + return Arrays.asList( + "DREMIO", + DOT_JOINER.join(getFullPath()), + "", + "SIMPLE", + "NO"); + } + + /** + * Overriding toString allows us to name parameterized tests more clearly. + */ + @Override + public String toString() { + return getName(); + } +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/CopyIntoTestCases.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/CopyIntoTestCases.java new file mode 100644 index 0000000000..e315e1d81a --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/CopyIntoTestCases.java @@ -0,0 +1,403 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DATAPLANE_PLUGIN_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_BRANCH_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_COUNT_COLUMN; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.copyIntoTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createBranchAtBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createEmptyTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createTableWithColDefsQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.dropBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.dropTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateFolderPath; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateSourceFiles; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueBranchName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueFolderName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTableName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.insertTableWithValuesQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.mergeBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.selectCountQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.tablePathWithFolders; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useBranchQuery; +import static com.dremio.exec.catalog.dataplane.ITDataplanePluginTestSetup.createFolders; +import static com.dremio.exec.catalog.dataplane.TestDataplaneAssertions.assertIcebergFilesExistAtSubPath; + +import java.io.File; +import java.util.Arrays; +import java.util.List; + +import org.junit.Assert; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import com.dremio.exec.catalog.VersionContext; + +/** + * + * To run these tests, run through container class OSSITDataplanePlugin + * To run all tests run {@link com.dremio.exec.catalog.dataplane.ITDataplanePlugin.NestedCopyIntoTests} + * To run single test, see instructions at the top of {@link com.dremio.exec.catalog.dataplane.ITDataplanePlugin} + */ +public class CopyIntoTestCases { + private ITDataplanePluginTestSetup base; + private static File location; + private static String source; + private static String fileNameCsv = "file1.csv"; + private static String fileNameJson = "file1.json"; + private static File newSourceFileCsv; + private static File newSourceFileJson; + private static String storageLocation ; + + CopyIntoTestCases(ITDataplanePluginTestSetup base, File location, String source) { + this.base = base; + this.location = location; + this.source = source; + this.storageLocation = "\'@" + source + "/" + location.getName() + "\'"; + } + + @BeforeEach + public void createSourceFiles() throws Exception{ + newSourceFileCsv = generateSourceFiles(fileNameCsv , location); + newSourceFileJson = generateSourceFiles(fileNameJson , location); + } + + @AfterEach + public void cleanupSourceFiles() { + Assert.assertTrue(newSourceFileCsv.delete()); + Assert.assertTrue(newSourceFileJson.delete()); + } + + @Test + public void copyIntoEmptyTable() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + // Act + base.runSQL(copyIntoTableQuery(tablePath, storageLocation, fileNameCsv)); + + // Assert + base.assertTableHasExpectedNumRows(tablePath, 3); + + // Act + base.runSQL(copyIntoTableQuery(tablePath, storageLocation, fileNameJson)); + + // Assert + base.assertTableHasExpectedNumRows(tablePath, 6); + + // cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void copyIntoEmptyTableWithNoMatchingRows() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableWithColDefsQuery(tablePath, Arrays.asList("c1 int", "c2 int", "c3 int"))); + + // Act and assert error is thrown + base.assertQueryThrowsExpectedError(copyIntoTableQuery(tablePath, storageLocation, fileNameCsv),String.format("No column name matches target schema")); + + // cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + + @Test + public void copyIntoTableWithRows() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + //Insert into table + base.runSQL(insertTableWithValuesQuery(tablePath, Arrays.asList("(4,'str1',34.45)","(5,'str1',34.45)","(6,'str1',34.45)"))); + + //Assert + base.assertTableHasExpectedNumRows(tablePath, 3); + + // Act + base.runSQL(copyIntoTableQuery(tablePath, storageLocation, fileNameCsv)); + + // Assert + base.assertTableHasExpectedNumRows(tablePath, 6); + + // cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + + /** + * Verify insert creates underlying iceberg files in the right locations + */ + @Test + public void copyIntoAndVerifyFolders() throws Exception { + // Arrange + // Create a hierarchy of 2 folders to form key of TABLE + final List tablePath = Arrays.asList("if1", "if2", generateUniqueTableName()); + + // Create empty + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + // Verify iceberg manifest/avro/metadata.json files on FS + assertIcebergFilesExistAtSubPath(tablePath, 0, 1, 1, 0); + + // Do 2 separate Inserts so there are multiple data files. + // Copy 1 + base.runSQL(copyIntoTableQuery(tablePath, storageLocation, fileNameCsv)); + base.assertTableHasExpectedNumRows(tablePath, 3); + // Verify iceberg manifest/avro/metadata.json files on FS + assertIcebergFilesExistAtSubPath(tablePath, 1, 2, 2, 1); + + // Copy 2 + base.runSQL(copyIntoTableQuery(tablePath, storageLocation, fileNameCsv)); + // Verify number of rows with select + base.assertTableHasExpectedNumRows(tablePath, 6); + + // Assert + // Verify iceberg manifest/avro/metadata.json files on FS + assertIcebergFilesExistAtSubPath(tablePath, 2, 3, 3, 2); + + // Cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void copyIntoInDiffBranchesAndConflicts() throws Exception { + // Arrange + final List tablePath = Arrays.asList("if1", "if2", generateUniqueTableName()); + final String devBranchName = generateUniqueBranchName(); + + // Set context to main + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.assertTableHasExpectedNumRows(tablePath, 0); + + // Create a dev branch from main + base.runSQL(createBranchAtBranchQuery(devBranchName, DEFAULT_BRANCH_NAME)); + + // copy into table on main branch + base.runSQL(copyIntoTableQuery(tablePath, storageLocation, fileNameCsv)); + base.assertTableHasExpectedNumRows(tablePath, 3); + + // switch to branch dev + base.runSQL(useBranchQuery(devBranchName)); + + // copy into table on dev branch so there will be conflicts + base.runSQL(copyIntoTableQuery(tablePath, storageLocation, fileNameCsv)); + base.assertTableHasExpectedNumRows(tablePath, 3); + + // Act and Assert + base.assertQueryThrowsExpectedError(mergeBranchQuery(devBranchName, DEFAULT_BRANCH_NAME), + String.format(("VALIDATION ERROR: Merge branch %s into branch %s failed due to commit conflict on source %s"), + devBranchName, DEFAULT_BRANCH_NAME, DATAPLANE_PLUGIN_NAME)); + + // Cleanup + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void copyInDiffBranchesAndMerge() throws Exception { + // Arrange + final List shareFolderPath = generateFolderPath(generateUniqueFolderName()); + final String mainTableName = generateUniqueTableName(); + final String devTableName = generateUniqueTableName(); + final List mainTablePath = tablePathWithFolders(mainTableName); + final List devTablePath = tablePathWithFolders(devTableName); + final String devBranchName = generateUniqueBranchName(); + + // Creating an arbitrary commit to Nessie to make a common ancestor between two branches otherwise + // those are un-related branches + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(shareFolderPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + + // Create a dev branch from main + base.runSQL(createBranchAtBranchQuery(devBranchName, DEFAULT_BRANCH_NAME)); + + // Set context to main + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(mainTablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(mainTablePath)); + base.assertTableHasExpectedNumRows(mainTablePath, 0); + + // Copy into table main + base.runSQL(copyIntoTableQuery(mainTablePath, storageLocation, fileNameCsv)); + base.assertTableHasExpectedNumRows(mainTablePath, 3); + + // switch to branch dev + base.runSQL(useBranchQuery(devBranchName)); + // Check that table does not exist in Nessie in branch dev (since it was branched off before create table) + base.assertQueryThrowsExpectedError(selectCountQuery(mainTablePath, DEFAULT_COUNT_COLUMN), + String.format("VALIDATION ERROR: Object '%s' not found within '%s", + mainTablePath.get(0), + DATAPLANE_PLUGIN_NAME)); + createFolders(devTablePath, VersionContext.ofBranch(devBranchName)); + base.runSQL(createEmptyTableQuery(devTablePath)); + base.assertTableHasExpectedNumRows(devTablePath, 0); + + // Copy into table dev + base.runSQL(copyIntoTableQuery(devTablePath, storageLocation, fileNameCsv)); + base.assertTableHasExpectedNumRows(devTablePath, 3); + + // switch to branch main + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + + // Check that dev table cannot be seen in branch main + base.assertQueryThrowsExpectedError(selectCountQuery(devTablePath, DEFAULT_COUNT_COLUMN), + String.format("VALIDATION ERROR: Object '%s' not found within '%s", + devTablePath.get(0), + DATAPLANE_PLUGIN_NAME)); + + // Act + base.runSQL(mergeBranchQuery(devBranchName, DEFAULT_BRANCH_NAME)); + + // Assert and checking records in both tables + // Table must now be visible in main. + base.assertTableHasExpectedNumRows(devTablePath, 3); + base.assertTableHasExpectedNumRows(mainTablePath, 3); + + // Cleanup + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + base.runSQL(dropTableQuery(mainTablePath)); + base.runSQL(dropTableQuery(devTablePath)); + } + + /** + * Create in main branch + * Insert in dev branch + * Compare row counts in each branch + * Merge branch to main branch and compare row count again + */ + @Test + public void copyIntoAndCreateInDifferentBranches() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final String devBranch = generateUniqueBranchName(); + final List tablePath = tablePathWithFolders(tableName); + + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + // Verify with select + base.assertTableHasExpectedNumRows(tablePath, 0); + + // Create dev branch + base.runSQL(createBranchAtBranchQuery(devBranch, DEFAULT_BRANCH_NAME)); + + // Switch to dev + base.runSQL(useBranchQuery(devBranch)); + + // Insert rows using copy into + base.runSQL(copyIntoTableQuery(tablePath, storageLocation, fileNameCsv)); + + // Verify number of rows. + base.assertTableHasExpectedNumRows(tablePath, 3); + + // Switch back to main + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + + // Verify number of rows + base.assertTableHasExpectedNumRows(tablePath, 0); + + // Act + // Merge dev to main + base.runSQL(mergeBranchQuery(devBranch, DEFAULT_BRANCH_NAME)); + + // Assert + base.assertTableHasExpectedNumRows(tablePath, 3); + + // Cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + /** + * The inserts should write data files relative to the table base location, and agnostic of the source configuration. + * Create a table, insert some records using copy into + * Create a different source with a dummy bucket path as root location + * Make further inserts, operation should succeed + * Verify the records + */ + @Test + public void copyIntoAgnosticOfSourceBucket() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + // Act + base.runSQL(copyIntoTableQuery(tablePath, storageLocation, fileNameCsv)); + base.runWithAlternateSourcePath(copyIntoTableQuery(tablePath, storageLocation, fileNameCsv)); + + // Assert rows from both copy into commands + base.assertTableHasExpectedNumRows(tablePath, 6); + base.assertAllFilesAreInBaseBucket(tablePath); + + // cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void copyInDifferentTablesWithSameName() throws Exception { + // Arrange + final List shareFolderPath = generateFolderPath(generateUniqueFolderName()); + final String tableName = generateUniqueTableName(); + final String devBranch = generateUniqueBranchName(); + final List tablePath = tablePathWithFolders(tableName); + + // Creating an arbitrary commit to Nessie to make a common ancestor between two branches otherwise + // those are un-related branches + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(shareFolderPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + + base.runSQL(createBranchAtBranchQuery(devBranch, DEFAULT_BRANCH_NAME)); + + // Create table with this name in the main branch, insert records using copy into + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.runSQL(copyIntoTableQuery(tablePath, storageLocation, fileNameCsv)); + + // Create table with this name in the dev branch, different source path, insert records using copy into + base.runSQL(useBranchQuery(devBranch)); + createFolders(tablePath, VersionContext.ofBranch(devBranch)); + base.runWithAlternateSourcePath(createEmptyTableQuery(tablePath)); + base.runSQL(copyIntoTableQuery(tablePath, storageLocation, fileNameCsv)); + + // Act: Assert the paths are correct in each branch + base.assertAllFilesInAlternativeBucket(tablePath); // dev branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + base.assertAllFilesAreInBaseBucket(tablePath); + + // cleanup + base.runSQL(useBranchQuery(devBranch)); + base.runSQL(dropTableQuery(tablePath)); + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + base.runSQL(dropTableQuery(tablePath)); + base.runSQL(dropBranchQuery(devBranch)); + } +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/CreateTestCases.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/CreateTestCases.java new file mode 100644 index 0000000000..8a88fd81fe --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/CreateTestCases.java @@ -0,0 +1,378 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DATAPLANE_PLUGIN_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_BRANCH_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_COLUMN_DEFINITION; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_COUNT_COLUMN; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.convertFolderNameToList; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createBranchAtBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createEmptyTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createFolderAtQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createFolderQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createTagQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.dropBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.dropTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateFolderPath; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateNestedFolderPath; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueBranchName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueFolderName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTableName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTagName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.mergeBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.selectCountQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.sqlFolderPathToNamespaceKey; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.tablePathWithFolders; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useTagQuery; +import static com.dremio.exec.catalog.dataplane.ITDataplanePluginTestSetup.createFolders; +import static com.dremio.exec.catalog.dataplane.TestDataplaneAssertions.assertIcebergTableExistsAtSubPath; +import static com.dremio.exec.catalog.dataplane.TestDataplaneAssertions.assertLastCommitMadeBySpecifiedAuthor; +import static com.dremio.exec.catalog.dataplane.TestDataplaneAssertions.assertNessieDoesNotHaveTable; +import static com.dremio.exec.catalog.dataplane.TestDataplaneAssertions.assertNessieHasCommitForTable; +import static com.dremio.exec.catalog.dataplane.TestDataplaneAssertions.assertNessieHasNamespace; +import static com.dremio.exec.catalog.dataplane.TestDataplaneAssertions.assertNessieHasTable; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import java.util.List; + +import org.junit.jupiter.api.Test; +import org.projectnessie.model.Operation; + +import com.dremio.BaseTestQuery; +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.VersionContext; + +/** + * + * To run these tests, run through container class ITDataplanePlugin + * To run all tests run {@link ITDataplanePlugin.NestedCreateTests} + * To run single test, see instructions at the top of {@link ITDataplanePlugin} + */ +public class CreateTestCases { + private ITDataplanePluginTestSetup base; + + CreateTestCases(ITDataplanePluginTestSetup base) { + this.base = base; + } + + + @Test + public void checkCreateSourceWithWrongUrl() throws Exception { + // Arrange + Act + Assert + + assertThatThrownBy(() -> base.setUpDataplanePluginWithWrongUrl()) + .isInstanceOf(UserException.class) + .hasMessageContaining("must be a valid http or https address"); + } + + @Test + public void createEmptyTable() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + // Act + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + // TODO Check for correct message + + // Assert + assertNessieHasCommitForTable(tablePath, Operation.Put.class, DEFAULT_BRANCH_NAME, base); + assertNessieHasTable(tablePath, DEFAULT_BRANCH_NAME, base); + assertIcebergTableExistsAtSubPath(tablePath); + } + + @Test + public void createEmptyTableTwice() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + assertNessieHasCommitForTable(tablePath, Operation.Put.class, DEFAULT_BRANCH_NAME, base); + assertNessieHasTable(tablePath, DEFAULT_BRANCH_NAME, base); + assertIcebergTableExistsAtSubPath(tablePath); + + // Act and Assert + base.assertQueryThrowsExpectedError( + createEmptyTableQuery(tablePath), + "A table with the given name already exists"); + } + + @Test + public void useNonExistentBranch() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final String invalidBranch = "xyz"; + + base.runSQL(createBranchAtBranchQuery(invalidBranch, DEFAULT_BRANCH_NAME)); + base.runSQL(dropBranchQuery(invalidBranch)); + + // Act and Assert + base.assertQueryThrowsExpectedError(useBranchQuery(invalidBranch), + String.format("%s not found", invalidBranch)); + } + + @Test + public void createTableInNonBranchVersionContext() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final String tag = generateUniqueTagName(); + + base.runSQL(createTagQuery(tag, DEFAULT_BRANCH_NAME)); + base.runSQL(useTagQuery(tag)); + + // Act and Assert + base.assertQueryThrowsExpectedError(createEmptyTableQuery(tablePath), + String.format("DDL and DML operations are only supported for branches - not on tags or commits. %s is not a branch.", + tag)); + } + + @Test + public void createInDiffBranchesAndMerge() throws Exception { + // Arrange + final List shareFolderPath = generateFolderPath(generateUniqueFolderName()); + final String mainTableName = generateUniqueTableName(); + final String devTableName = generateUniqueTableName(); + final List mainTablePath = tablePathWithFolders(mainTableName); + final List devTablePath = tablePathWithFolders(devTableName); + final String devBranchName = generateUniqueBranchName(); + + // Creating an arbitrary commit to Nessie to make a common ancestor between two branches otherwise + // those are un-related branches + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(shareFolderPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + + // Create a dev branch from main + base.runSQL(createBranchAtBranchQuery(devBranchName, DEFAULT_BRANCH_NAME)); + + // Set context to main + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(mainTablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(mainTablePath)); + base.assertTableHasExpectedNumRows(mainTablePath, 0); + + // switch to branch dev + base.runSQL(useBranchQuery(devBranchName)); + // Check that table does not exist in Nessie in branch dev (since it was branched off before create table) + base.assertQueryThrowsExpectedError(selectCountQuery(mainTablePath, DEFAULT_COUNT_COLUMN), + String.format("VALIDATION ERROR: Object '%s' not found within '%s", + mainTablePath.get(0), + DATAPLANE_PLUGIN_NAME)); + createFolders(devTablePath, VersionContext.ofBranch(devBranchName)); + base.runSQL(createEmptyTableQuery(devTablePath)); + base.assertTableHasExpectedNumRows(devTablePath, 0); + + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + // Check that dev table cannot be seen in branch main + base.assertQueryThrowsExpectedError(selectCountQuery(devTablePath, DEFAULT_COUNT_COLUMN), + String.format("VALIDATION ERROR: Object '%s' not found within '%s", + devTablePath.get(0), + DATAPLANE_PLUGIN_NAME)); + + // Act + base.runSQL(mergeBranchQuery(devBranchName, DEFAULT_BRANCH_NAME)); + + // Assert + // Table must now be visible in main. + base.assertTableHasExpectedNumRows(devTablePath, 0); + + // Drop tables + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + base.runSQL(dropTableQuery(mainTablePath)); + base.runSQL(dropTableQuery(devTablePath)); + } + + @Test + public void createAfterDrop() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + assertNessieHasCommitForTable(tablePath, Operation.Put.class, DEFAULT_BRANCH_NAME, base); + assertNessieHasTable(tablePath, DEFAULT_BRANCH_NAME, base); + assertIcebergTableExistsAtSubPath(tablePath); + + base.runSQL(dropTableQuery(tablePath)); + assertNessieDoesNotHaveTable(tablePath, DEFAULT_BRANCH_NAME, base); + assertIcebergTableExistsAtSubPath(tablePath); + + // Act + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + // Assert + assertNessieHasCommitForTable(tablePath, Operation.Put.class, DEFAULT_BRANCH_NAME, base); + assertNessieHasTable(tablePath, DEFAULT_BRANCH_NAME, base); + assertIcebergTableExistsAtSubPath(tablePath); + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void createEmptyTableInvalidPluginName() { + // Arrange + final String invalidDataplanePlugin = "invalid_plugin"; + final String tableName = generateUniqueTableName(); + final String createInvTableDirQuery = String.format( + "CREATE TABLE %s.%s %s", + invalidDataplanePlugin, + tableName, + DEFAULT_COLUMN_DEFINITION); + + // Act and Assert + base.assertQueryThrowsExpectedError(createInvTableDirQuery, + String.format("Invalid path. Given path, [%s.%s] is not valid", invalidDataplanePlugin, tableName)); + } + + @Test + public void checkTableVisibilityInDerivedBranch() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.assertTableHasExpectedNumRows(tablePath, 0); + + final String devBranch = generateUniqueBranchName(); + + // Act + base.runSQL(createBranchAtBranchQuery(devBranch, DEFAULT_BRANCH_NAME)); + + // Assert + // Table must be visible in dev + base.runSQL(useBranchQuery(devBranch)); + base.assertTableHasExpectedNumRows(tablePath, 0); + + // Cleanup + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void checkTableVisibilityInParentBranch() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final String devBranch = generateUniqueBranchName(); + + // Create a dev branch from main + base.runSQL(createBranchAtBranchQuery(devBranch, DEFAULT_BRANCH_NAME)); + base.runSQL(useBranchQuery(devBranch)); + createFolders(tablePath, VersionContext.ofBranch(devBranch)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.assertTableHasExpectedNumRows(tablePath, 0); + + // Act and Assert + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + + // Check that table created in dev branch cannot be seen in branch main + base.assertQueryThrowsExpectedError(selectCountQuery(tablePath, DEFAULT_COUNT_COLUMN), + String.format("VALIDATION ERROR: Object '%s' not found within '%s'", + tablePath.get(0), + DATAPLANE_PLUGIN_NAME)); + } + + @Test + public void createFolder() throws Exception { + // Arrange + final String folderName = generateUniqueFolderName(); + final List sqlFolderPath = generateFolderPath(folderName); + final List namespaceKey = sqlFolderPathToNamespaceKey(sqlFolderPath); + + // Act + base.runSQL(createFolderQuery(sqlFolderPath)); + + // Assert + assertLastCommitMadeBySpecifiedAuthor(DEFAULT_BRANCH_NAME, base); + assertNessieHasNamespace(namespaceKey, DEFAULT_BRANCH_NAME, base); + } + + @Test + public void createNestedFolder() throws Exception { + // Arrange + final String folderName = generateUniqueFolderName(); + final List sqlFolderPath = generateFolderPath(folderName); + base.runSQL(createFolderQuery(sqlFolderPath)); + + final String folderName2 = generateUniqueFolderName(); + final List sqlFolderPath2 = generateNestedFolderPath(folderName, folderName2); + final List namespaceKey = sqlFolderPathToNamespaceKey(sqlFolderPath2); + base.runSQL(createFolderQuery(sqlFolderPath2)); + + // Assert + assertLastCommitMadeBySpecifiedAuthor(DEFAULT_BRANCH_NAME, base); + assertNessieHasNamespace(namespaceKey, DEFAULT_BRANCH_NAME, base); + } + + @Test + public void createFolderWithSingleElementWithContext() throws Exception { + BaseTestQuery.test(String.format("USE %s", DATAPLANE_PLUGIN_NAME)); + // Arrange + final String folderName = generateUniqueFolderName(); + final List sqlFolderPath = convertFolderNameToList(folderName); + //since sqlFolderPath only has the name of the folder, its namespaceKey should be DATAPLANE_PLUGIN_NAME.folderName + final List namespaceKey = sqlFolderPathToNamespaceKey(generateFolderPath(folderName)); + + base.runSQL(createFolderQuery(sqlFolderPath)); + + // Act + assertLastCommitMadeBySpecifiedAuthor(DEFAULT_BRANCH_NAME, base); + assertNessieHasNamespace(namespaceKey, DEFAULT_BRANCH_NAME, base); + } + + @Test + public void createFolderUsingAt() throws Exception { + // Arrange + final String folderName = generateUniqueFolderName(); + final List sqlFolderPath = generateFolderPath(folderName); + final List namespaceKey = sqlFolderPathToNamespaceKey(sqlFolderPath); + + base.runSQL(createFolderAtQuery(sqlFolderPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME))); + + // Act + assertLastCommitMadeBySpecifiedAuthor(DEFAULT_BRANCH_NAME, base); + assertNessieHasNamespace(namespaceKey, DEFAULT_BRANCH_NAME, base); + } + + @Test + public void createWithImplicitFolders() throws Exception { + // Arrange + final String folderName = generateUniqueFolderName(); + final String folderName2 = generateUniqueFolderName(); + final List sqlFolderPath2 = generateNestedFolderPath(folderName, folderName2); + // Assert + base.assertQueryThrowsExpectedError(createFolderQuery(sqlFolderPath2), + String.format("VALIDATION ERROR: Namespace '%s' must exist.", + folderName)); + } + + @Test + public void createTableWithImplicitFolders() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + base.assertQueryThrowsExpectedError(createEmptyTableQuery(tablePath), + String.format("VALIDATION ERROR: Namespace '%s' must exist.", + String.join(".", tablePath.subList(0, tablePath.size()-1)))); + } +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/CtasTestCases.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/CtasTestCases.java new file mode 100644 index 0000000000..574c52f34b --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/CtasTestCases.java @@ -0,0 +1,138 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_BRANCH_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createTableAsQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createTagQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.dropTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTableName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTagName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.tablePathWithFolders; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useTagQuery; +import static com.dremio.exec.catalog.dataplane.ITDataplanePluginTestSetup.createFolders; +import static com.dremio.exec.catalog.dataplane.TestDataplaneAssertions.assertIcebergFilesExistAtSubPath; +import static com.dremio.exec.catalog.dataplane.TestDataplaneAssertions.assertIcebergTableExistsAtSubPath; +import static com.dremio.exec.catalog.dataplane.TestDataplaneAssertions.assertNessieHasCommitForTable; +import static com.dremio.exec.catalog.dataplane.TestDataplaneAssertions.assertNessieHasTable; + +import java.util.Arrays; +import java.util.List; + +import org.junit.jupiter.api.Test; +import org.projectnessie.model.Operation; + +import com.dremio.exec.catalog.VersionContext; + +/** + * + * To run these tests, run through container class ITDataplanePlugin + * To run all tests run {@link ITDataplanePlugin.NestedCtasTests} + * To run single test, see instructions at the top of {@link ITDataplanePlugin} + */ +public class CtasTestCases { + private ITDataplanePluginTestSetup base; + + CtasTestCases(ITDataplanePluginTestSetup base) { + this.base = base; + } + @Test + public void ctas() throws Exception { + // Arrange + String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + // Act + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath, 5)); + + // Assert + assertNessieHasCommitForTable(tablePath, Operation.Put.class, DEFAULT_BRANCH_NAME, base); + assertNessieHasTable(tablePath, DEFAULT_BRANCH_NAME, base); + assertIcebergTableExistsAtSubPath(tablePath); + // Verify with select also + base.assertTableHasExpectedNumRows(tablePath, 5); + + // Cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void ctasWithTagSet() throws Exception { + // Arrange + String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final String tag = generateUniqueTagName(); + base.runSQL(createTagQuery(tag, DEFAULT_BRANCH_NAME)); + base.runSQL(useTagQuery(tag)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + + // Act and Assert + base.assertQueryThrowsExpectedError(createTableAsQuery(tablePath, 5), + String.format("DDL and DML operations are only supported for branches - not on tags or commits. %s is not a branch.", + tag)); + } + + + // Verify ctas creates underlying iceberg files in the right locations + @Test + void ctasTestVerifyFolders() throws Exception { + // Arrange + // Create a hierarchy of 2 folders to form key for TABLE1 + final String table1 = generateUniqueTableName(); + final List table1Path = Arrays.asList("f1", "f2", table1); + + // Create a hierarchy of 3 folders to form key for TABLE2 + final String table2 = generateUniqueTableName(); + final List table2Path = Arrays.asList("f1", "f2", "f3", table2); + + // Act 1 + createFolders(table1Path, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(table1Path, 10)); + base.assertTableHasExpectedNumRows(table1Path, 10); + + // Assert1 + // Verify iceberg manifest/avro/metadata.json files on FS + assertIcebergFilesExistAtSubPath(table1Path, 1, 1, 1, 1); + + // Act2 + createFolders(table2Path, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(table2Path, 10)); + base.assertTableHasExpectedNumRows(table2Path, 10); + + // Assert2 + // Verify iceberg manifest/avro/metadata.json files on FS + assertIcebergFilesExistAtSubPath(table2Path, 1, 1, 1, 1); + // Verify that table1's files are isolated from creation of table2 + assertIcebergFilesExistAtSubPath(table1Path, 1, 1, 1, 1); + + // Cleanup + base.runSQL(dropTableQuery(table1Path)); + base.runSQL(dropTableQuery(table2Path)); + } + + @Test + public void ctasWithImplicitFolders() throws Exception { + // Arrange + String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + // Act + Assert + base.assertQueryThrowsExpectedError(createTableAsQuery(tablePath, 5), + String.format("VALIDATION ERROR: Namespace '%s' must exist.", + String.join(".", tablePath.subList(0, tablePath.size()-1)))); + } +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/DataplaneTestDefines.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/DataplaneTestDefines.java new file mode 100644 index 0000000000..6e66c9b4a7 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/DataplaneTestDefines.java @@ -0,0 +1,969 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import java.io.File; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.List; +import java.util.concurrent.ThreadLocalRandom; + +import com.dremio.common.util.FileUtils; +import com.dremio.exec.catalog.VersionContext; +import com.google.common.base.Preconditions; +import com.google.common.base.Strings; +import com.google.common.io.Files; + +/** + * All the constant declarations for OSS Dataplane Integration Tests + */ +public final class DataplaneTestDefines { + + private DataplaneTestDefines() { + } + // Constants + public static final String S3_PREFIX = "s3://"; + public static final String BUCKET_NAME = "test.dataplane.bucket"; + public static final String ALTERNATIVE_BUCKET_NAME = "test.alternative.bucket"; + public static final String DATAPLANE_PLUGIN_NAME = "dataPlane_Test"; + public static final String DATAPLANE_PLUGIN_NAME_FOR_REFLECTION_TEST = "dataPlane_Test2"; + public static final String METADATA_FOLDER = "metadata"; + public static final String DEFAULT_BRANCH_NAME = "main"; + private static final String DEFAULT_TABLE_NAME_PREFIX = "table"; + private static final String DEFAULT_VIEW_NAME_PREFIX = "view"; + private static final String DEFAULT_FOLDER_NAME_PREFIX = "folder"; + private static final String DEFAULT_BRANCH_NAME_PREFIX = "branch"; + private static final String DEFAULT_TAG_NAME_PREFIX = "tag"; + private static final String DEFAULT_RAW_REF_NAME_PREFIX = "rawref"; + + public static final String NO_ANCESTOR = + "2e1cfa82b035c26cbbbdae632cea070514eb8b773f616aaeaf668e2f0be8f10d"; + + + // Query components + public static final String DEFAULT_COLUMN_DEFINITION = "(id int, name varchar, distance Decimal(38, 3))"; + public static final String DEFAULT_VALUES_CLAUSE = + " values (1, 'first row', 1000), (2,'second row', 2000), (3, 'third row', 3000)"; + public static final String DEFAULT_COUNT_COLUMN = "C"; + public static final String USER_NAME = "anonymous"; + public static final String DEFAULT_RECORD_DELIMITER = "(RECORD_DELIMITER '\n')"; + public static final String folderA = "folderA"; + public static final String folderB = "folderB"; + public static final String tableA = "tableA"; + + public enum OptimizeMode { + REWRITE_DATA, + REWRITE_MANIFESTS, + REWRITE_ALL + } + + // Randomizers for preventing concurrency issues + static int randomInt() { + return ThreadLocalRandom.current().nextInt(1, 100000); + } + + public static String generateUniqueTableName() { + return DEFAULT_TABLE_NAME_PREFIX + randomInt(); + } + + public static String generateUniqueViewName() { + return DEFAULT_VIEW_NAME_PREFIX + randomInt(); + } + + public static String generateUniqueFolderName() { + return DEFAULT_FOLDER_NAME_PREFIX + randomInt(); + } + + public static String generateUniqueBranchName() { + return DEFAULT_BRANCH_NAME_PREFIX + randomInt(); + } + + public static String generateUniqueTagName() { + return DEFAULT_TAG_NAME_PREFIX + randomInt(); + } + + public static String generateUniqueRawRefName() { return DEFAULT_TABLE_NAME_PREFIX + randomInt(); } + + public static List tablePathWithFolders(final String tableName) { + Preconditions.checkNotNull(tableName); + return Arrays.asList( + generateUniqueFolderName(), + generateUniqueFolderName(), + tableName); + } + + public static List tablePathWithSource( + final String sourceName, final List tablePathList) { + Preconditions.checkNotNull(tablePathList); + Preconditions.checkArgument(!tablePathList.isEmpty()); + + return new ArrayList() { + { + add(sourceName); + addAll(tablePathList); + } + }; + } + + public static List generateSchemaPath() { + return Arrays.asList( + DATAPLANE_PLUGIN_NAME, + generateUniqueFolderName(), + generateUniqueFolderName()); + + } + + public static List generateFolderPath(final String folderName) { + Preconditions.checkNotNull(folderName); + return Arrays.asList( + DATAPLANE_PLUGIN_NAME, + folderName); + } + + public static List generateNestedFolderPath(final String parentFolderName, final String folderName) { + Preconditions.checkNotNull(folderName); + return Arrays.asList( + DATAPLANE_PLUGIN_NAME, + parentFolderName, + folderName); + } + + public static List convertFolderNameToList(final String folderName) { + Preconditions.checkNotNull(folderName); + return Arrays.asList( + folderName); + } + + public static List sqlFolderPathToNamespaceKey(final List sqlFolderPath) { + Preconditions.checkNotNull(sqlFolderPath); + return sqlFolderPath.subList(1, sqlFolderPath.size()); + } + + public static String joinedTableKey(final List tablePathList) { + return String.join(".", tablePathList); + } + + public static String fullyQualifiedTableName(String pluginName, List tablePath) { + return String.format("%s.%s", pluginName, joinedTableKey(tablePath));} + + // Query generators + public static String createEmptyTableQuery(final List tablePath) { + Preconditions.checkNotNull(tablePath); + return String.format( + "CREATE TABLE %s.%s %s", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath), + DEFAULT_COLUMN_DEFINITION); + } + + public static String createViewQuery(final List viewPath, final List tablePath) { + Preconditions.checkNotNull(viewPath); + return String.format( + "CREATE VIEW %s.%s AS SELECT * FROM %s.%s", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(viewPath), + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath) + ); + } + + public static String createFolderQuery(final List sqlFolderPath) { + Preconditions.checkNotNull(sqlFolderPath); + return String.format( + "CREATE FOLDER %s", + joinedTableKey(sqlFolderPath)); + } + + public static String createFolderAtQuery(final List folderPath, VersionContext versionContext) { + Preconditions.checkNotNull(folderPath); + return String.format( + "CREATE FOLDER %s AT %s %s", + joinedTableKey(folderPath), + versionContext.getType().name(), + versionContext.getValue()); + } + + public static String createFolderAtQueryWithIfNotExists(final List folderPath, VersionContext versionContext) { + Preconditions.checkNotNull(folderPath); + return String.format( + "CREATE FOLDER IF NOT EXISTS %s AT %s %s", + joinedTableKey(folderPath), + versionContext.getType().name(), + versionContext.getValue()); + } + + public static String createReplaceViewQuery(final List viewPath, final List tablePath) { + Preconditions.checkNotNull(viewPath); + return String.format( + "CREATE OR REPLACE VIEW %s.%s AS SELECT * FROM %s.%s", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(viewPath), + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath) + ); + } + + public static String dropViewQuery(final List viewPath) { + Preconditions.checkNotNull(viewPath); + return String.format( + "DROP VIEW %s.%s", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(viewPath) + ); + } + + public static String createViewSelectQuery(final List viewPath, final String sql) { + Preconditions.checkNotNull(viewPath); + return String.format( + "CREATE VIEW %s.%s AS %s", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(viewPath), + sql + ); + } + + public static String updateViewSelectQuery(final List viewPath, final String sql) { + Preconditions.checkNotNull(viewPath); + return String.format( + "CREATE OR REPLACE VIEW %s.%s AS %s", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(viewPath), + sql + ); + } + + public static String createViewQueryWithEmptySql(final List viewPath, final List tablePath) { + Preconditions.checkNotNull(viewPath); + return String.format( + "CREATE VIEW %s.%s AS", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(viewPath) + ); + } + + public static String createViewQueryWithIncompleteSql(final List viewPath, final List tablePath) { + Preconditions.checkNotNull(viewPath); + return String.format( + "CREATE VIEW %s.%s AS SELECT * FROM", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(viewPath) + ); + } + + public static String alterViewPropertyQuery(final List viewPath, final String attribute, final String value) { + Preconditions.checkNotNull(viewPath); + return String.format( + "ALTER VIEW %s.%s SET %s=%s", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(viewPath), + attribute, + value + ); + } + + /** + * @param colDefs + * Example format "c1 in", "c2 int", "c3 varchar" + */ + public static String createTableWithColDefsQuery(final List tablePath, List colDefs) { + Preconditions.checkNotNull(tablePath); + String columnDefsString = "(" + String.join(",", colDefs) + ")"; + return String.format( + "CREATE TABLE %s.%s %s", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath), + columnDefsString); + } + + /** + * @param colDefs + * Example format "c1 int", "c2 int", "c3 varchar" + */ + public static String alterTableAddColumnsQuery(final List tablePath, List colDefs) { + Preconditions.checkNotNull(tablePath); + String columnDefsString = "("+ String.join(",", colDefs) +")" ; + return String.format("ALTER TABLE %s.%s add columns %s", + DATAPLANE_PLUGIN_NAME, + String.join(".", tablePath), + columnDefsString); + } + + public static String alterTableDropColumnQuery(final List tablePath, List dropCols) { + Preconditions.checkNotNull(tablePath); + String dropColumnString = String.join(",", dropCols) ; + return String.format("ALTER TABLE %s.%s drop column %s", + DATAPLANE_PLUGIN_NAME, + String.join(".",tablePath), + dropColumnString); + } + + public static String alterTableChangeColumnQuery(final List tablePath, List changeColumnList) { + Preconditions.checkNotNull(tablePath); + String changeColumns = String.join(",",changeColumnList); + return String.format("ALTER TABLE %s.%s change column %s", + DATAPLANE_PLUGIN_NAME, + String.join(".",tablePath), + changeColumns); + } + + public static String alterTableAddPrimaryKeyQuery(final List tablePath, List primaryKey) { + Preconditions.checkNotNull(tablePath); + String primaryKeyStr = String.join(",",primaryKey); + return String.format("ALTER TABLE %s.%s ADD PRIMARY KEY (%s)", + DATAPLANE_PLUGIN_NAME, + String.join(".",tablePath), + primaryKeyStr); + } + + public static String alterTableDropPrimaryKeyQuery(final List tablePath) { + Preconditions.checkNotNull(tablePath); + return String.format("ALTER TABLE %s.%s DROP PRIMARY KEY", + DATAPLANE_PLUGIN_NAME, + String.join(".",tablePath)); + } + + public static String alterTableModifyColumnQuery(final List tablePath, final String columnName, + final List newColDef) { + Preconditions.checkNotNull(tablePath); + return String.format("ALTER TABLE %s.%s MODIFY COLUMN %s %s", + DATAPLANE_PLUGIN_NAME, + String.join(".",tablePath), + columnName, + String.join(" ", newColDef)); + } + + public static String alterBranchAssignBranchQuery(final String branchName, final String sourceBranchName) { + Preconditions.checkNotNull(branchName); + Preconditions.checkNotNull(sourceBranchName); + return String.format("ALTER BRANCH %s ASSIGN BRANCH %s in %s", + branchName, + sourceBranchName, + DATAPLANE_PLUGIN_NAME + ); + } + + public static String alterBranchAssignTagQuery(final String branchName, final String tagName) { + Preconditions.checkNotNull(branchName); + Preconditions.checkNotNull(tagName); + return String.format("ALTER BRANCH %s ASSIGN TAG %s in %s", + branchName, + tagName, + DATAPLANE_PLUGIN_NAME + ); + } + + public static String alterBranchAssignCommitQuery(final String branchName, final String commitHash) { + Preconditions.checkNotNull(branchName); + Preconditions.checkNotNull(commitHash); + return String.format("ALTER BRANCH %s ASSIGN COMMIT %s in %s", + branchName, + commitHash, + DATAPLANE_PLUGIN_NAME + ); + } + + public static String alterTagAssignTagQuery(final String tagName, final String sourceTagName) { + Preconditions.checkNotNull(tagName); + Preconditions.checkNotNull(sourceTagName); + return String.format("ALTER TAG %s ASSIGN TAG %s in %s", + tagName, + sourceTagName, + DATAPLANE_PLUGIN_NAME + ); + } + + public static String alterTagAssignBranchQuery(final String tagName, final String branchName) { + Preconditions.checkNotNull(tagName); + Preconditions.checkNotNull(branchName); + return String.format("ALTER TAG %s ASSIGN BRANCH %s in %s", + tagName, + branchName, + DATAPLANE_PLUGIN_NAME + ); + } + + public static String alterTagAssignCommitQuery(final String tagName, final String commitHash) { + Preconditions.checkNotNull(tagName); + Preconditions.checkNotNull(commitHash); + return String.format("ALTER TAG %s ASSIGN COMMIT %s in %s", + tagName, + commitHash, + DATAPLANE_PLUGIN_NAME + ); + } + + public static String alterViewPropertyQuery( + final String viewName, final String propertyName, final String propertyValue) { + Preconditions.checkNotNull(viewName); + Preconditions.checkNotNull(propertyName); + Preconditions.checkNotNull(propertyValue); + + return String.format("ALTER VIEW %s SET %s = %s", viewName, propertyName, propertyValue); + } + + public static String selectCountQuery(final List tablePath, String countColumn) { + Preconditions.checkNotNull(tablePath); + return String.format("SELECT count(*) %s from %s.%s", + countColumn, + DATAPLANE_PLUGIN_NAME, + String.join(".", tablePath)); + } + + public static String selectCountSnapshotQuery(final List tablePath, String countColumn) { + Preconditions.checkNotNull(tablePath); + return String.format("SELECT count(*) as %s FROM table(table_snapshot('%s.%s'))", + countColumn, + DATAPLANE_PLUGIN_NAME, + String.join(".", tablePath)); + } + + public static String selectCountDataFilesQuery(final List tablePath, String countColumn) { + Preconditions.checkNotNull(tablePath); + return String.format("SELECT count(*) as %s FROM table(table_files('%s.%s'))", + countColumn, + DATAPLANE_PLUGIN_NAME, + String.join(".", tablePath)); + } + + public static String selectCountQueryWithSpecifier(List tablePath, String countColumn, String specifier) { + Preconditions.checkNotNull(tablePath); + return String.format("SELECT count(*) %s from %s.%s AT %s", + countColumn, + DATAPLANE_PLUGIN_NAME, + String.join(".", tablePath), + specifier); + } + + public static String selectStarQueryWithSpecifier(List tablePath, String specifier) { + Preconditions.checkNotNull(tablePath); + return String.format("SELECT * from %s.%s AT %s", + DATAPLANE_PLUGIN_NAME, + String.join(".", tablePath), + specifier); + } + + public static String truncateTableQuery(final List tablePath) { + Preconditions.checkNotNull(tablePath); + return String.format("TRUNCATE TABLE %s.%s ", + DATAPLANE_PLUGIN_NAME, + String.join(".", tablePath)); + } + + public static String selectStarQuery(final List tablePath) { + Preconditions.checkNotNull(tablePath); + return String.format("SELECT * from %s.%s", + DATAPLANE_PLUGIN_NAME, + String.join(".", tablePath)); + } + + public static String dropTableQuery(final List tablePath) { + Preconditions.checkNotNull(tablePath); + return String.format("DROP TABLE %s.%s ", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath)); + } + + public static String dropTableIfExistsQuery(final List tablePath) { + Preconditions.checkNotNull(tablePath); + return String.format("DROP TABLE IF EXISTS %s.%s ", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath)); + } + + public static String insertTableQuery(final List tablePath) { + Preconditions.checkNotNull(tablePath); + return String.format("INSERT INTO %s.%s %s", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath), + DEFAULT_VALUES_CLAUSE); + } + + public static String copyIntoTableQuery(final List tablePath, String filePath, String fileName) { + Preconditions.checkNotNull(tablePath); + Preconditions.checkNotNull(filePath); + return String.format("COPY INTO %s.%s FROM %s FILES(\'%s\') %s", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath), + filePath, + fileName, + DEFAULT_RECORD_DELIMITER); + } + + public static String optimizeTableQuery(final List tablePath, OptimizeMode mode) { + String modeContext = ""; + switch (mode) { + case REWRITE_DATA: + modeContext = "REWRITE DATA (MIN_INPUT_FILES=2)"; + break; + case REWRITE_MANIFESTS: + modeContext = "REWRITE MANIFESTS"; + break; + case REWRITE_ALL: + default: + modeContext = "(MIN_INPUT_FILES=2)"; // default mode + break; + } + Preconditions.checkNotNull(tablePath); + return String.format("OPTIMIZE TABLE %s.%s %s", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath), + modeContext); + } + + public static String rollbackTableQuery(final List tablePath, long timestampInMillis) { + Preconditions.checkNotNull(tablePath); + String timestamp = getTimestampFromMillis(timestampInMillis); + return String.format("ROLLBACK TABLE %s.%s TO TIMESTAMP '%s'", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath), + timestamp); + } + + public static String vacuumTableQuery(final List tablePath, long timestampInMillis) { + Preconditions.checkNotNull(tablePath); + String timestamp = getTimestampFromMillis(timestampInMillis); + return String.format("VACUUM TABLE %s.%s EXPIRE SNAPSHOTS OLDER_THAN = '%s'", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath), + timestamp); + } + + public static String selectFileLocationsQuery(final List tablePath) { + Preconditions.checkNotNull(tablePath); + return String.format("SELECT file_path FROM TABLE(TABLE_FILES('%s.%s'))", DATAPLANE_PLUGIN_NAME, joinedTableKey(tablePath)); + } + + public static String joinTpcdsTablesQuery() { + return String.format("SELECT * FROM cp.tpch.\"customer.parquet\"" + + "JOIN cp.tpch.\"orders.parquet\" ON TRUE "); + } + + public static String joinTablesQuery(String table1, String table2, String condition) { + return String.format("Select * from %s.%s JOIN %s.%s ON %s" , + DATAPLANE_PLUGIN_NAME, table1, + DATAPLANE_PLUGIN_NAME, table2, + condition); + } + + /** + * @param valuesList + * Example format : "(1,1)", "(2,2)", "(3,3)" + */ + public static String insertTableWithValuesQuery(final List tablePath, List valuesList) { + Preconditions.checkNotNull(tablePath); + String valuesString = "values" + String.join(",", valuesList); + return String.format("INSERT INTO %s.%s %s", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath), + valuesString); + } + + public static String createTableAsQuery(final List tablePath, final int limit) { + Preconditions.checkNotNull(tablePath); + return String.format( + "CREATE TABLE %s.%s " + + " AS SELECT n_nationkey, n_regionkey from cp.\"tpch/nation.parquet\" limit %d", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath), + limit); + } + + public static String insertSelectQuery(final List tablePath, final int limit) { + Preconditions.checkNotNull(tablePath); + return String.format( + "INSERT INTO %s.%s " + + "SELECT n_nationkey, n_regionkey from cp.\"tpch/nation.parquet\" limit %d", + DATAPLANE_PLUGIN_NAME, joinedTableKey(tablePath), limit); + } + + public static String deleteAllQuery(final String source, final List tablePath) { + return String.format("DELETE FROM %s", Strings.isNullOrEmpty(source) + ? joinedTableKey(tablePath) + : String.format("%s.%s", source, joinedTableKey(tablePath))); + } + + public static String deleteAllQuery(final List tablePath) { + return deleteAllQuery(DATAPLANE_PLUGIN_NAME, tablePath); + } + + public static String deleteAllQueryWithoutContext(final List tablePath) { + return deleteAllQuery(null, tablePath); + } + + public static String updateByIdQuery(final List tablePath) { + return String.format( + "UPDATE %s.%s" + + " SET distance = CAST(30000 AS DECIMAL(38,3)) WHERE id = 3", + DATAPLANE_PLUGIN_NAME, joinedTableKey(tablePath)); + } + + public static String updateByIdFromAnotherBranchQuery(final List tablePath, String selectBranchName) { + return String.format( + "UPDATE %s.%s" + + " SET distance = (SELECT distance FROM %s.%s AT BRANCH %s WHERE id = 4 LIMIT 1) WHERE id = 3", + DATAPLANE_PLUGIN_NAME, joinedTableKey(tablePath), + DATAPLANE_PLUGIN_NAME, joinedTableKey(tablePath), selectBranchName); + } + + public static String mergeByIdQuery(final List targetTablePath, final List sourceTablePath) { + String target = String.format("%s.%s", DATAPLANE_PLUGIN_NAME, joinedTableKey(targetTablePath)); + String source = String.format("%s.%s", DATAPLANE_PLUGIN_NAME, joinedTableKey(sourceTablePath)); + return String.format( + "MERGE INTO %s USING %s ON (%s.id = %s.id)" + + " WHEN MATCHED THEN UPDATE SET distance = CAST(1 AS DECIMAL(38,3))" + + " WHEN NOT MATCHED THEN INSERT VALUES (4, CAST('fourth row' AS VARCHAR(65536)), CAST(0 AS DECIMAL(38,3)))", + target, source, target, source); + } + + public static String createBranchAtBranchQuery(final String branchName, final String parentBranchName) { + return createBranchAtSpecifierQuery(branchName, "BRANCH " + parentBranchName); + } + + public static String showBranchQuery(final String sourceName) { + return String.format("SHOW BRANCHES IN %s", sourceName); + } + + public static String showBranchQuery() { + return String.format("SHOW BRANCHES "); + } + + public static String createBranchAtSpecifierQuery(final String branchName, final String specifier) { + Preconditions.checkNotNull(branchName); + Preconditions.checkNotNull(specifier); + return String.format("CREATE BRANCH %s AT %s in %s", + branchName, + specifier, + DATAPLANE_PLUGIN_NAME); + } + + public static String mergeBranchQuery(final String branchName, final String targetBranchName) { + Preconditions.checkNotNull(branchName); + Preconditions.checkNotNull(targetBranchName); + + return String.format("MERGE BRANCH %s INTO %s in %s", + branchName, + targetBranchName, + DATAPLANE_PLUGIN_NAME); + } + + public static String createTagQuery(final String tagName, final String branchName) { + Preconditions.checkNotNull(tagName); + Preconditions.checkNotNull(branchName); + return String.format("CREATE TAG %s AT BRANCH %s in %s", + tagName, + branchName, + DATAPLANE_PLUGIN_NAME + ); + } + + public static String showTagQuery(final String sourceName) { + return String.format("SHOW TAGS IN %s", sourceName); + } + + public static String showTagQuery() { + return String.format("SHOW TAGS "); + } + + public static String useContextQuery() { + return String.format("USE %s", DATAPLANE_PLUGIN_NAME); + } + + public static String useContextQuery(List workspaceSchema) { + String workspaceSchemaPath = joinedTableKey(workspaceSchema); + return String.format("USE %s", DATAPLANE_PLUGIN_NAME); + } + + public static String useBranchQuery(final String branchName) { + Preconditions.checkNotNull(branchName); + return String.format("USE BRANCH %s IN %s", branchName, DATAPLANE_PLUGIN_NAME); + } + + public static String useTagQuery(final String tagName) { + Preconditions.checkNotNull(tagName); + return String.format("USE TAG %s IN %s", tagName, DATAPLANE_PLUGIN_NAME); + } + + public static String useCommitQuery(final String commitHash) { + Preconditions.checkNotNull(commitHash); + return String.format("USE COMMIT %s IN %s", quoted(commitHash), DATAPLANE_PLUGIN_NAME); + } + + // Nearly identical to useReferenceQuery, but we support both syntaxes + public static String useRefQuery(final String refName) { + Preconditions.checkNotNull(refName); + return String.format("USE REF %s IN %s", quoted(refName), DATAPLANE_PLUGIN_NAME); + } + + // Nearly identical to useRefQuery, but we support both syntaxes + public static String useReferenceQuery(final String referenceName) { + Preconditions.checkNotNull(referenceName); + return String.format("USE REFERENCE %s IN %s", quoted(referenceName), DATAPLANE_PLUGIN_NAME); + } + + public static String useSpecifierQuery(final String specifier) { + Preconditions.checkNotNull(specifier); + return String.format("USE %s IN %s", specifier, DATAPLANE_PLUGIN_NAME); + } + + public static String dropBranchQuery(final String branchName) { + Preconditions.checkNotNull(branchName); + return String.format("DROP BRANCH %s FORCE IN %s", branchName, DATAPLANE_PLUGIN_NAME); + } + + public static String dropTagQuery(final String tagName) { + Preconditions.checkNotNull(tagName); + return String.format("DROP TAG %s FORCE IN %s", tagName, DATAPLANE_PLUGIN_NAME); + } + + public static String showBranchesQuery() { + return String.format("SHOW BRANCHES IN %s", DATAPLANE_PLUGIN_NAME); + } + + public static String quoted(String string) { + Preconditions.checkArgument(!Strings.isNullOrEmpty(string)); + return "\"" + string + "\""; + } + + public static File generateSourceFiles(String fileName, File location) throws Exception { + String relativePath = String.format("/copyinto/%s", fileName); + File newSourceFile = new File(location.toString(), fileName); + File oldSourceFile = FileUtils.getResourceAsFile(relativePath); + Files.copy(oldSourceFile, newSourceFile); + + return newSourceFile; + } + + public static String createRawReflection(final List tablePath, final String rawRefName, List colNames){ + Preconditions.checkNotNull(tablePath); + String commaSeparatedDisplayNames = String.join(",", colNames); + return String.format("ALTER TABLE %s.%s CREATE RAW REFLECTION %s using display (%s)", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath), + rawRefName, + commaSeparatedDisplayNames + + ); + } + + public static String createRawReflectionAtSpecifierQuery(final List tablePath, final String specifier, + final String reflectionName, final List colNames) { + Preconditions.checkNotNull(tablePath); + String commaSeparatedDisplayNames = String.join(",", colNames); + return String.format( + "ALTER DATASET %s.%s AT %s CREATE RAW REFLECTION %s USING DISPLAY (%s)", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath), + specifier, + reflectionName, + commaSeparatedDisplayNames + ); + } + + public static String createAggReflectionAtSpecifierQuery(final List tablePath, final String specifier, final String reflectionName) { + Preconditions.checkNotNull(tablePath); + return String.format( + "ALTER DATASET %s.%s AT %s CREATE AGGREGATE REFLECTION %s USING DIMENSIONS (n_nationkey) MEASURES (n_regionkey (COUNT, MAX, MIN))", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath), + specifier, + reflectionName + ); + } + + public static String selectCountMinMaxByGroupAtSpecifierQuery(final List SourcePath, final String specifier) { + Preconditions.checkNotNull(SourcePath); + return String.format("SELECT COUNT(n_regionkey), MIN(n_regionkey), MAX(n_regionkey) from %s.%s AT %s GROUP BY n_nationkey", + DATAPLANE_PLUGIN_NAME, + String.join(".", SourcePath), + specifier + ); + } + + public static String selectColsQuery(final List SourcePath, final List colNames) { + Preconditions.checkNotNull(SourcePath); + String commaSeparatedDisplayNames = String.join(",", colNames); + return String.format("SELECT %s from %s.%s", + commaSeparatedDisplayNames, + DATAPLANE_PLUGIN_NAME, + String.join(".", SourcePath) + ); + } + + public static String selectColsAtSpecifierQuery(final List SourcePath, final String specifier, final List colNames) { + Preconditions.checkNotNull(SourcePath); + String commaSeparatedDisplayNames = String.join(",", colNames); + return String.format("SELECT %s from %s.%s AT %s", + commaSeparatedDisplayNames, + DATAPLANE_PLUGIN_NAME, + String.join(".", SourcePath), + specifier + ); + } + + public static String dropReflectionAtSpecifierQuery(final List tablePath, final String specifier, final String reflectionName) { + Preconditions.checkNotNull(tablePath); + return String.format( + "ALTER DATASET %s.%s AT %s DROP REFLECTION %s", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath), + specifier, + reflectionName + ); + } + + public static String disableRawReflectionAtSpecifierQuery(final List tablePath, final String specifier) { + Preconditions.checkNotNull(tablePath); + return String.format( + "ALTER DATASET %s.%s AT %s DISABLE RAW ACCELERATION", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath), + specifier + ); + } + + public static String disableAggReflectionAtSpecifierQuery(final List tablePath, final String specifier) { + Preconditions.checkNotNull(tablePath); + return String.format( + "ALTER DATASET %s.%s AT %s DISABLE AGGREGATE ACCELERATION", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath), + specifier + ); + } + + public static String enableRawReflectionAtSpecifierQuery(final List tablePath, final String specifier) { + Preconditions.checkNotNull(tablePath); + return String.format( + "ALTER DATASET %s.%s AT %s ENABLE RAW ACCELERATION", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath), + specifier + ); + } + + public static String enableAggReflectionAtSpecifierQuery(final List tablePath, final String specifier) { + Preconditions.checkNotNull(tablePath); + return String.format( + "ALTER DATASET %s.%s AT %s ENABLE AGGREGATE ACCELERATION", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath), + specifier + ); + } + + public static String createRawReflectionQuery(final List tablePath, final String reflectionName, final List colNames) { + Preconditions.checkNotNull(tablePath); + String commaSeparatedDisplayNames = String.join(",", colNames); + return String.format( + "ALTER DATASET %s.%s CREATE RAW REFLECTION %s USING DISPLAY (%s)", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath), + reflectionName, + commaSeparatedDisplayNames + ); + } + + public static String createAggReflectionQuery(final List tablePath, final String reflectionName) { + Preconditions.checkNotNull(tablePath); + return String.format( + "ALTER DATASET %s.%s CREATE AGGREGATE REFLECTION %s USING DIMENSIONS (n_nationkey) MEASURES (n_regionkey (COUNT, MAX, MIN))", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath), + reflectionName + ); + } + + public static String dropReflectionQuery(final List tablePath, final String reflectionName) { + Preconditions.checkNotNull(tablePath); + return String.format( + "ALTER DATASET %s.%s DROP REFLECTION %s", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath), + reflectionName + ); + } + + public static String disableRawReflectionQuery(final List tablePath) { + Preconditions.checkNotNull(tablePath); + return String.format( + "ALTER DATASET %s.%s DISABLE RAW ACCELERATION", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath) + ); + } + + public static String disableAggReflectionQuery(final List tablePath) { + Preconditions.checkNotNull(tablePath); + return String.format( + "ALTER DATASET %s.%s DISABLE AGGREGATE ACCELERATION", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath) + ); + } + + public static String enableRawReflectionQuery(final List tablePath) { + Preconditions.checkNotNull(tablePath); + return String.format( + "ALTER DATASET %s.%s ENABLE RAW ACCELERATION", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath) + ); + } + + public static String enableAggReflectionQuery(final List tablePath) { + Preconditions.checkNotNull(tablePath); + return String.format( + "ALTER DATASET %s.%s ENABLE AGGREGATE ACCELERATION", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath) + ); + } + + public static String getSysReflectionsQuery(final List tablePath) { + Preconditions.checkNotNull(tablePath); + return String.format( + "SELECT * FROM sys.reflections WHERE dataset_name = '%s.%s'", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath) + ); + } + + public static String getSnapshotTableQuery(final List tablePath) { + Preconditions.checkNotNull(tablePath); + return String.format("SELECT * FROM table(table_snapshot('%s.%s'))", + DATAPLANE_PLUGIN_NAME, + String.join(".", tablePath)); + } + + public static String createViewAtSpecifierQuery(final List viewPath, final List tablePath, final String specifier) { + Preconditions.checkNotNull(viewPath); + return String.format( + "CREATE VIEW %s.%s AS SELECT * FROM %s.%s AT %s", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(viewPath), + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath), + specifier + ); + } + + private static String getTimestampFromMillis(long timestampInMillis) { + SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS000000"); + return simpleDateFormat.format(new Date(timestampInMillis)); + } +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/DataplaneTestHelper.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/DataplaneTestHelper.java new file mode 100644 index 0000000000..c1bb6fbf2f --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/DataplaneTestHelper.java @@ -0,0 +1,276 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import static com.dremio.exec.catalog.VersionContext.NOT_SPECIFIED; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DATAPLANE_PLUGIN_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_BRANCH_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_COUNT_COLUMN; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.fullyQualifiedTableName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.selectCountDataFilesQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.selectCountQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.selectCountSnapshotQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.showBranchesQuery; + +import java.util.ArrayList; +import java.util.List; +import java.util.Properties; + +import org.apache.arrow.memory.BufferAllocator; +import org.apache.arrow.memory.RootAllocatorFactory; +import org.apache.arrow.vector.ValueVector; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInfo; + +import com.dremio.BaseTestQuery; +import com.dremio.common.AutoCloseables; +import com.dremio.common.utils.PathUtils; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.DremioTable; +import com.dremio.exec.catalog.TableVersionContext; +import com.dremio.exec.catalog.TableVersionType; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.catalog.VersionedDatasetId; +import com.dremio.exec.record.RecordBatchLoader; +import com.dremio.exec.record.VectorWrapper; +import com.dremio.sabot.rpc.user.QueryDataBatch; +import com.dremio.service.namespace.NamespaceKey; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.google.common.base.Preconditions; + +/** + * Dataplane test common helper class + */ +public class DataplaneTestHelper extends BaseTestQuery { + + // Allocators for reading query results, compare to ExecTest (but it's still on JUnit4) + private BufferAllocator rootAllocator; + + /** + * Resets the properties and context set for a test session + */ + @BeforeEach + public void resetTestClient() throws Exception { + updateClient((Properties) null); + } + + @BeforeEach + public void setUpAllocators(TestInfo testInfo) { + // Same as initAllocators() (stuck on Junit4), but needs to get test name from injected TestInfo + rootAllocator = RootAllocatorFactory.newRoot(DEFAULT_SABOT_CONFIG); + allocator = rootAllocator.newChildAllocator(testInfo.getDisplayName(), 0, rootAllocator.getLimit()); + } + + @AfterEach + public void tearDownAllocators() { + AutoCloseables.closeNoChecked(allocator); + AutoCloseables.closeNoChecked(rootAllocator); + } + + public void assertTableHasExpectedNumRows(List tablePath, long expectedNumRows) throws Exception { + assertSQLReturnsExpectedNumRows( + selectCountQuery(tablePath, DEFAULT_COUNT_COLUMN), + DEFAULT_COUNT_COLUMN, + expectedNumRows); + } + + public void assertTableHasExpectedNumOfSnapshots(List tablePath, long expectedNumRows) throws Exception { + assertSQLReturnsExpectedNumRows( + selectCountSnapshotQuery(tablePath, DEFAULT_COUNT_COLUMN), + DEFAULT_COUNT_COLUMN, + expectedNumRows); + } + + public void assertTableHasExpectedNumOfDataFiles(List tablePath, long expectedNumRows) throws Exception { + assertSQLReturnsExpectedNumRows( + selectCountDataFilesQuery(tablePath, DEFAULT_COUNT_COLUMN), + DEFAULT_COUNT_COLUMN, + expectedNumRows); + } + + public void assertViewHasExpectedNumRows(List viewPath, long expectedNumRows) throws Exception { + assertSQLReturnsExpectedNumRows( + selectCountQuery(viewPath, DEFAULT_COUNT_COLUMN), + DEFAULT_COUNT_COLUMN, + expectedNumRows); + } + + public void assertSQLReturnsExpectedNumRows(String sqlQuery, String columnName, long expectedNumRows) + throws Exception { + testBuilder() + .sqlQuery(sqlQuery) + .unOrdered() + .baselineColumns(columnName) + .baselineValues(expectedNumRows) + .build() + .run(); + } + + public void assertQueryThrowsExpectedError(String query, String expectedError) { + errorMsgTestHelper(query, expectedError); + } + + public List> runSqlWithResults(String sql) throws Exception { + return getResultsFromBatches(testSqlWithResults(sql)); + } + + private List> getResultsFromBatches(List batches) { + List> output = new ArrayList<>(); + RecordBatchLoader loader = new RecordBatchLoader(getAllocator()); + int last = 0; + for(QueryDataBatch batch : batches) { + int rows = batch.getHeader().getRowCount(); + if(batch.getData() != null) { + loader.load(batch.getHeader().getDef(), batch.getData()); + for (int i = 0; i < rows; ++i) { + output.add(new ArrayList<>()); + for (VectorWrapper vw: loader) { + ValueVector vv = vw.getValueVector(); + Object o = vv.getObject(i); + output.get(last).add(o == null ? null: o.toString()); + } + ++last; + } + } + loader.clear(); + batch.release(); + } + return output; + } + + String getCommitHashForBranch(String branchName) throws Exception { + List> rows = runSqlWithResults(showBranchesQuery()); + for (List row : rows) { + if (branchName.equals(row.get(1))) { // Column 1 is branchName + return row.get(2); // Column 2 is commitHash + } + } + return null; + } + + public String getContentId(List tableKey, TableVersionContext tableVersionContext, ITDataplanePluginTestSetup base) { + VersionedDatasetId versionedDatasetId = null; + try { + versionedDatasetId = VersionedDatasetId.fromString(getVersionedDatatsetId(tableKey, tableVersionContext, base)); + } catch (JsonProcessingException e) { + return null; + } + return (versionedDatasetId == null ? null : versionedDatasetId.getContentId()); + } + + public String getVersionedDatatsetId(List tableKey, TableVersionContext tableVersionContext, ITDataplanePluginTestSetup base) { + Preconditions.checkState(tableVersionContext.asVersionContext() != NOT_SPECIFIED, "tableVersionContext must specify a BRANCH, TAG or COMMIT"); + //Get a fresh instance of the Catalog and clear cache( i.e source, version mapping) + Catalog contextualizedCatalog = null; + contextualizedCatalog = base.getContextualizedCatalog(DATAPLANE_PLUGIN_NAME, tableVersionContext.asVersionContext()); + try { + return contextualizedCatalog + .getTableForQuery(new NamespaceKey(PathUtils.parseFullPath(fullyQualifiedTableName(DATAPLANE_PLUGIN_NAME, tableKey)))) + .getDatasetConfig() + .getId().getId(); + } catch (Exception r) { + return null; + } + } + + public String getVersionedDatatsetIdForTimeTravel(List tableKey, TableVersionContext tableVersionContext, ITDataplanePluginTestSetup base) { + Preconditions.checkState((tableVersionContext.getType() == TableVersionType.TIMESTAMP + || tableVersionContext.getType() == TableVersionType.SNAPSHOT_ID), "tableVersionContext needs to be of type TIME_TRAVEL or SNAPSHOT_ID"); + //Get a fresh instance of the Catalog and clear cache( i.e source, version mapping) + Catalog contextualizedCatalog = null; + contextualizedCatalog = base.getContextualizedCatalog(DATAPLANE_PLUGIN_NAME, tableVersionContext.asVersionContext()); + return contextualizedCatalog.getTableSnapshotForQuery(new NamespaceKey(PathUtils.parseFullPath(fullyQualifiedTableName(DATAPLANE_PLUGIN_NAME, tableKey))) + , tableVersionContext) + .getDatasetConfig() + .getId().getId(); + } + + public DremioTable getTableFromId(String id, ITDataplanePluginTestSetup base) { + //Get a fresh instance of the Catalog and clear cache( i.e source, version mapping) + Catalog contextualizedCatalog = base.getContextualizedCatalog(DATAPLANE_PLUGIN_NAME, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + return contextualizedCatalog.getTable(id); + } + + public String getUniqueIdForTable(List tableKey, TableVersionContext tableVersionContext, ITDataplanePluginTestSetup base) { + //Get a fresh instance of the Catalog and clear cache( i.e source, version mapping) + Catalog contextualizedCatalog = base.getContextualizedCatalog(DATAPLANE_PLUGIN_NAME, tableVersionContext.asVersionContext()); + return String.valueOf(contextualizedCatalog + .getTableForQuery(new NamespaceKey(PathUtils.parseFullPath(fullyQualifiedTableName(DATAPLANE_PLUGIN_NAME, tableKey)))) + .getDatasetConfig() + .getPhysicalDataset() + .getIcebergMetadata() + .getTableUuid()); + } + + public String getContentIdForTableAtRef(List tableKey, TableVersionContext tableVersionContext, ITDataplanePluginTestSetup base) { + VersionedDatasetId versionedDatasetId = null; + try { + versionedDatasetId = VersionedDatasetId.fromString(getVersionedDatasetIdForTableAtRef(tableKey, tableVersionContext, base)); + } catch (JsonProcessingException e) { + return null; + } + return (versionedDatasetId == null ? null : versionedDatasetId.getContentId()); + } + + public String getVersionedDatasetIdForTableAtRef(List tableKey, TableVersionContext tableVersionContext, ITDataplanePluginTestSetup base) { + //Get a fresh instance of the Catalog and clear cache( i.e source, version mapping) + Catalog contextualizedCatalog = base.getContextualizedCatalog(DATAPLANE_PLUGIN_NAME, tableVersionContext.asVersionContext()); + return contextualizedCatalog + .getTableSnapshotForQuery(new NamespaceKey(PathUtils.parseFullPath(fullyQualifiedTableName(DATAPLANE_PLUGIN_NAME, tableKey))), tableVersionContext) + .getDatasetConfig() + .getId().getId(); + } + + public String getUniqueIdForView (List tableKey, TableVersionContext tableVersionContext, ITDataplanePluginTestSetup base){ + //Get a fresh instance of the Catalog and clear cache( i.e source, version mapping) + Catalog contextualizedCatalog = base.getContextualizedCatalog(DATAPLANE_PLUGIN_NAME, tableVersionContext.asVersionContext()); + return String.valueOf(contextualizedCatalog + .getTableForQuery(new NamespaceKey(PathUtils.parseFullPath(fullyQualifiedTableName(DATAPLANE_PLUGIN_NAME, tableKey)))) + .getDatasetConfig() + .getTag()); + } + + public String getUniqueIdForTableAtRef (List tableKey, TableVersionContext tableVersionContext, ITDataplanePluginTestSetup base){ + //Get a fresh instance of the Catalog and clear cache( i.e source, version mapping) + Catalog contextualizedCatalog = base.getContextualizedCatalog(DATAPLANE_PLUGIN_NAME, tableVersionContext.asVersionContext()); + return String.valueOf(contextualizedCatalog + .getTableSnapshotForQuery(new NamespaceKey(PathUtils.parseFullPath(fullyQualifiedTableName(DATAPLANE_PLUGIN_NAME, tableKey))), tableVersionContext) + .getDatasetConfig() + .getPhysicalDataset() + .getIcebergMetadata() + .getTableUuid()); + } + + public long getMtimeForTable(List tableKey, TableVersionContext tableVersionContext, ITDataplanePluginTestSetup base) { + //Get a fresh instance of the Catalog and clear cache( i.e source, version mapping) + Catalog contextualizedCatalog = base.getContextualizedCatalog(DATAPLANE_PLUGIN_NAME, tableVersionContext.asVersionContext()); + return contextualizedCatalog + .getTableForQuery(new NamespaceKey(PathUtils.parseFullPath(fullyQualifiedTableName(DATAPLANE_PLUGIN_NAME, tableKey)))) + .getDatasetConfig() + .getLastModified(); + } + + public long getMtimeForTableAtRef(List tableKey, TableVersionContext tableVersionContext, ITDataplanePluginTestSetup base) { + //Get a fresh instance of the Catalog and clear cache( i.e source, version mapping) + Catalog contextualizedCatalog = base.getContextualizedCatalog(DATAPLANE_PLUGIN_NAME, tableVersionContext.asVersionContext()); + return contextualizedCatalog + .getTableSnapshotForQuery(new NamespaceKey(PathUtils.parseFullPath(fullyQualifiedTableName(DATAPLANE_PLUGIN_NAME, tableKey))), tableVersionContext) + .getDatasetConfig() + .getLastModified(); + } + +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/DeleteFolderTestCases.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/DeleteFolderTestCases.java new file mode 100644 index 0000000000..6176d737e1 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/DeleteFolderTestCases.java @@ -0,0 +1,152 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DATAPLANE_PLUGIN_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_BRANCH_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createEmptyTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createViewQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueFolderName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTableName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueViewName; +import static com.dremio.exec.catalog.dataplane.TestDataplaneAssertions.assertNessieDoesNotHaveTable; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import java.util.Arrays; +import java.util.List; + +import org.junit.jupiter.api.Test; +import org.projectnessie.model.ContentKey; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.service.namespace.NamespaceKey; +/** + * + * To run these tests, run through container class ITDataplanePlugin + * To run all tests run {@link ITDataplanePlugin.NestedDeleteFolderTestCases} + * To run single test, see instructions at the top of {@link ITDataplanePlugin} + */ +public class DeleteFolderTestCases { + private ITDataplanePluginTestSetup base; + + DeleteFolderTestCases(ITDataplanePluginTestSetup base) { + this.base = base; + } + + @Test + public void deleteEmptyFolder() throws Exception { + final String rootFolder = generateUniqueFolderName(); + final List folderPath = Arrays.asList(DATAPLANE_PLUGIN_NAME, rootFolder); + final NamespaceKey namespaceKey = new NamespaceKey(folderPath); + final VersionContext version = VersionContext.ofBranch(DEFAULT_BRANCH_NAME); + + base.getDataplanePlugin().createNamespace(namespaceKey, version); + base.getDataplanePlugin().deleteFolder(namespaceKey, version); + + // Assert + assertNessieDoesNotHaveTable(Arrays.asList(rootFolder), DEFAULT_BRANCH_NAME, base); + } + + @Test + public void deleteNonEmptyFolderWithTableThenThrowError() throws Exception { + final String rootFolder = generateUniqueFolderName(); + final String tableName = generateUniqueTableName(); + final List tablePath = Arrays.asList(rootFolder, tableName); + final List folderPath = Arrays.asList(DATAPLANE_PLUGIN_NAME, rootFolder); + final NamespaceKey namespaceKey = new NamespaceKey(folderPath); + ContentKey contentKey = ContentKey.of(rootFolder); + String folderName = contentKey.toPathString(); + final VersionContext version = VersionContext.ofBranch(DEFAULT_BRANCH_NAME); + + base.getDataplanePlugin().createNamespace(namespaceKey, version); + base.runSQL(createEmptyTableQuery(tablePath)); + + // Assert + assertThatThrownBy( + () -> base.getDataplanePlugin().deleteFolder(namespaceKey, version)) + .isInstanceOf(UserException.class) + .hasMessageContaining("Folder '%s' is not empty", folderName); + } + + @Test + public void deleteNonEmptyFolderWithViewThenThrowError() throws Exception { + final String rootFolder = generateUniqueFolderName(); + final String tableName = generateUniqueTableName(); + final List tablePath = Arrays.asList(rootFolder, tableName); + final List folderPath = Arrays.asList(DATAPLANE_PLUGIN_NAME, rootFolder); + final NamespaceKey namespaceKey = new NamespaceKey(folderPath); + ContentKey contentKey = ContentKey.of(rootFolder); + String folderName = contentKey.toPathString(); + final VersionContext version = VersionContext.ofBranch(DEFAULT_BRANCH_NAME); + + base.getDataplanePlugin().createNamespace(namespaceKey, version); + base.runSQL(createEmptyTableQuery(tablePath)); + + final String viewName = generateUniqueViewName(); + List viewKey = Arrays.asList(rootFolder, viewName); + base.runSQL(createViewQuery(viewKey, tablePath)); + + // Assert + assertThatThrownBy( + () -> base.getDataplanePlugin().deleteFolder(namespaceKey, version)) + .isInstanceOf(UserException.class) + .hasMessageContaining("Folder '%s' is not empty", folderName); + } + + @Test + public void deleteNonEmptyFolderWithSubFolderThenThrowError() throws Exception { + final String rootFolder = generateUniqueFolderName(); + final List rootFolderPath = Arrays.asList(DATAPLANE_PLUGIN_NAME, rootFolder); + final String leafFolder = generateUniqueFolderName(); + final List leafFolderPath = Arrays.asList(DATAPLANE_PLUGIN_NAME, rootFolder, leafFolder); + final NamespaceKey rootNamespaceKey = new NamespaceKey(rootFolderPath); + final NamespaceKey leafNamespaceKey = new NamespaceKey(leafFolderPath); + ContentKey contentKey = ContentKey.of(rootFolder); + String folderName = contentKey.toPathString(); + final VersionContext version = VersionContext.ofBranch(DEFAULT_BRANCH_NAME); + + base.getDataplanePlugin().createNamespace(rootNamespaceKey, version); + base.getDataplanePlugin().createNamespace(leafNamespaceKey, version); + + // Assert + assertThatThrownBy( + () -> base.getDataplanePlugin().deleteFolder(rootNamespaceKey, version)) + .isInstanceOf(UserException.class) + .hasMessageContaining("Folder '%s' is not empty", folderName); + } + + @Test + public void deleteParentAndChildFolders() throws Exception { + final String rootFolder = generateUniqueFolderName(); + final List rootFolderPath = Arrays.asList(DATAPLANE_PLUGIN_NAME, rootFolder); + final String leafFolder = generateUniqueFolderName(); + final List leafFolderPath = Arrays.asList(DATAPLANE_PLUGIN_NAME, rootFolder, leafFolder); + final NamespaceKey rootNamespaceKey = new NamespaceKey(rootFolderPath); + final NamespaceKey leafNamespaceKey = new NamespaceKey(leafFolderPath); + final VersionContext version = VersionContext.ofBranch(DEFAULT_BRANCH_NAME); + + base.getDataplanePlugin().createNamespace(rootNamespaceKey, version); + base.getDataplanePlugin().createNamespace(leafNamespaceKey, version); + + // Delete child folder and then parent folder + base.getDataplanePlugin().deleteFolder(leafNamespaceKey, version); + base.getDataplanePlugin().deleteFolder(rootNamespaceKey, version); + + // Assert + assertNessieDoesNotHaveTable(Arrays.asList(rootFolder), DEFAULT_BRANCH_NAME, base); + } +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/DeleteTestCases.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/DeleteTestCases.java new file mode 100644 index 0000000000..63045e393b --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/DeleteTestCases.java @@ -0,0 +1,184 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_BRANCH_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createBranchAtBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createEmptyTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createTagQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.deleteAllQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.deleteAllQueryWithoutContext; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.dropTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueBranchName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTableName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTagName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.insertTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.tablePathWithFolders; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useContextQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useTagQuery; +import static com.dremio.exec.catalog.dataplane.ITDataplanePluginTestSetup.createFolders; + +import java.util.List; + +import org.junit.jupiter.api.Test; + +import com.dremio.exec.catalog.VersionContext; + +/** + * + * To run these tests, run through container class ITDataplanePlugin + * To run all tests run {@link ITDataplanePlugin.NestedDeleteTests} + * To run single test, see instructions at the top of {@link ITDataplanePlugin} + */ +public class DeleteTestCases { + private ITDataplanePluginTestSetup base; + + DeleteTestCases(ITDataplanePluginTestSetup base) { + this.base = base; + } + + @Test + public void deleteAll() throws Exception { + //Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final String devBranch = generateUniqueBranchName(); + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.runSQL(insertTableQuery(tablePath)); + // Verify with select + base.assertTableHasExpectedNumRows(tablePath, 3); + // Create dev branch + base.runSQL(createBranchAtBranchQuery(devBranch, DEFAULT_BRANCH_NAME)); + // Switch to dev + base.runSQL(useBranchQuery(devBranch)); + + // Act + base.runSQL(deleteAllQuery(tablePath)); + + // Assert + base.assertTableHasExpectedNumRows(tablePath, 0); + // Check that main context still has the table + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + base.assertTableHasExpectedNumRows(tablePath, 3); + + // cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void deleteAllWithContext() throws Exception { + //Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final String devBranch = generateUniqueBranchName(); + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.runSQL(insertTableQuery(tablePath)); + // Verify with select + base.assertTableHasExpectedNumRows(tablePath, 3); + // Create dev branch + base.runSQL(createBranchAtBranchQuery(devBranch, DEFAULT_BRANCH_NAME)); + // Switch to dev + base.runSQL(useBranchQuery(devBranch)); + + // Act + base.runSQL(useContextQuery()); + base.runSQL(deleteAllQueryWithoutContext(tablePath)); + + // Assert + base.assertTableHasExpectedNumRows(tablePath, 0); + // Check that main context still has the table + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + base.assertTableHasExpectedNumRows(tablePath, 3); + + // cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void deleteAllInATag() throws Exception { + //Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + String tag = generateUniqueTagName(); + final String devBranch = generateUniqueBranchName(); + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.runSQL(insertTableQuery(tablePath)); + // Verify with select + base.assertTableHasExpectedNumRows(tablePath, 3); + // Create a tag to mark it + base.runSQL(createTagQuery(tag, DEFAULT_BRANCH_NAME)); + + // Switch to the tag + base.runSQL(useTagQuery(tag)); + + base.assertQueryThrowsExpectedError(deleteAllQuery(tablePath), "DDL and DML operations are only supported for branches - not on tags or commits"); + + // Check that main context still has the table + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + base.assertTableHasExpectedNumRows(tablePath, 3); + + //cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void deleteAgnosticOfSourceBucket() throws Exception { + //Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.runSQL(insertTableQuery(tablePath)); + + // Act + base.runWithAlternateSourcePath(deleteAllQuery(tablePath)); + + // Assert + base.assertTableHasExpectedNumRows(tablePath, 0); + + // cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void deleteWithTagSet() throws Exception { + // Arrange + String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.runSQL(insertTableQuery(tablePath)); + final String tag = generateUniqueTagName(); + // Act and Assert + base.runSQL(createTagQuery(tag, DEFAULT_BRANCH_NAME)); + base.runSQL(useTagQuery(tag)); + base.assertQueryThrowsExpectedError(deleteAllQuery(tablePath), + String.format("DDL and DML operations are only supported for branches - not on tags or commits. %s is not a branch.", + tag)); + } +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/DropTestCases.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/DropTestCases.java new file mode 100644 index 0000000000..31a62f7580 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/DropTestCases.java @@ -0,0 +1,205 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DATAPLANE_PLUGIN_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_BRANCH_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_COUNT_COLUMN; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createBranchAtBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createEmptyTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createTagQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.dropTableIfExistsQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.dropTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueBranchName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTableName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTagName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.selectCountQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.tablePathWithFolders; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useTagQuery; +import static com.dremio.exec.catalog.dataplane.ITDataplanePluginTestSetup.createFolders; +import static com.dremio.exec.catalog.dataplane.TestDataplaneAssertions.assertIcebergTableExistsAtSubPath; +import static com.dremio.exec.catalog.dataplane.TestDataplaneAssertions.assertNessieDoesNotHaveTable; +import static com.dremio.exec.catalog.dataplane.TestDataplaneAssertions.assertNessieHasCommitForTable; +import static com.dremio.exec.catalog.dataplane.TestDataplaneAssertions.assertNessieHasTable; + +import java.util.List; + +import org.junit.jupiter.api.Test; +import org.projectnessie.model.Operation; + +import com.dremio.exec.catalog.VersionContext; + +/** + * + * To run these tests, run through container class ITDataplanePlugin + * To run all tests run {@link ITDataplanePlugin.NestedDropTests} + * To run single test, see instructions at the top of {@link ITDataplanePlugin} + */ +public class DropTestCases { + private ITDataplanePluginTestSetup base; + + DropTestCases(ITDataplanePluginTestSetup base) { + this.base = base; + } + + @Test + public void dropTable() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + // Act + base.runSQL(dropTableQuery(tablePath)); + // TODO Check for correct message + + // Assert + assertNessieHasCommitForTable(tablePath, Operation.Delete.class, DEFAULT_BRANCH_NAME, base); + assertNessieDoesNotHaveTable(tablePath, DEFAULT_BRANCH_NAME, base); + // TODO For now, we aren't doing filesystem cleanup, so this check is correct. Might change in the future. + assertIcebergTableExistsAtSubPath(tablePath); + } + + @Test + public void dropNonExistentTable() { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + // Act and Assert + base.assertQueryThrowsExpectedError(dropTableQuery(tablePath), + "does not exist"); + } + + @Test + public void dropIfExistsNonExistentTable() throws Exception { + // Arrange + // Expect no error for non existent table + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + // Act + base.runSQL(dropTableIfExistsQuery(tablePath)); + + // Assert + // No exception + } + + @Test + public void dropIfExistsExistingTable() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + // Act + base.runSQL(dropTableIfExistsQuery(tablePath)); + + // Assert + // No exception + } + + @Test + public void dropExistingTableTwice() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + // Ensure contents + assertNessieHasCommitForTable(tablePath, Operation.Put.class, DEFAULT_BRANCH_NAME, base); + assertNessieHasTable(tablePath, DEFAULT_BRANCH_NAME, base); + assertIcebergTableExistsAtSubPath(tablePath); + + // First drop + base.runSQL(dropTableQuery(tablePath)); + // Assert removal of key from Nessie + assertNessieDoesNotHaveTable(tablePath, DEFAULT_BRANCH_NAME, base); + // Contents must still exist + assertIcebergTableExistsAtSubPath(tablePath); + + // Act + // Try second drop + base.assertQueryThrowsExpectedError(dropTableQuery(tablePath), + "does not exist"); + + // Assert + // Contents must still exist + assertIcebergTableExistsAtSubPath(tablePath); + } + + @Test + public void dropSameTableNameDifferentBranches() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final String devBranchName = generateUniqueBranchName(); + + // Create a dev branch from main + base.runSQL(createBranchAtBranchQuery(devBranchName, DEFAULT_BRANCH_NAME)); + + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.assertTableHasExpectedNumRows(tablePath, 0); + + // Switch to branch dev + base.runSQL(useBranchQuery(devBranchName)); + // Now try to create the same table in that branch - should succeed. + createFolders(tablePath, VersionContext.ofBranch(devBranchName)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.assertTableHasExpectedNumRows(tablePath, 0); + + // Act + base.runSQL(dropTableQuery(tablePath)); + // ensure it's dropped in dev branch + base.assertQueryThrowsExpectedError(selectCountQuery(tablePath, DEFAULT_COUNT_COLUMN), + String.format("Object '%s' not found within '%s'", tablePath.get(0), DATAPLANE_PLUGIN_NAME)); + // Switch back to main + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + + // Assert + // Ensure table is still there + base.assertTableHasExpectedNumRows(tablePath, 0); + + base.runSQL(dropTableQuery(tablePath)); + // Now ensure it's gone in main too + base.assertQueryThrowsExpectedError(selectCountQuery(tablePath, DEFAULT_COUNT_COLUMN), + String.format("Object '%s' not found within '%s'", tablePath.get(0), DATAPLANE_PLUGIN_NAME)); + } + + @Test + public void dropTableInNonBranchVersionContext() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final String tag = generateUniqueTagName(); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + base.runSQL(createTagQuery(tag, DEFAULT_BRANCH_NAME)); + base.runSQL(useTagQuery(tag)); + + // Act and Assert + base.assertQueryThrowsExpectedError(dropTableQuery(tablePath), + String.format("DDL and DML operations are only supported for branches - not on tags or commits. %s is not a branch.", + tag)); + } + +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/FolderTestCases.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/FolderTestCases.java new file mode 100644 index 0000000000..2215608a18 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/FolderTestCases.java @@ -0,0 +1,515 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import static com.dremio.BaseTestQuery.runSQL; +import static com.dremio.exec.catalog.dataplane.ContainerEntity.tableAFirst; +import static com.dremio.exec.catalog.dataplane.ContainerEntity.tableBSecond; +import static com.dremio.exec.catalog.dataplane.ContainerEntity.viewCThird; +import static com.dremio.exec.catalog.dataplane.ContainerEntity.viewDFourth; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DATAPLANE_PLUGIN_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createEmptyTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createViewQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.folderA; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.folderB; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.tableA; +import static org.assertj.core.api.Assertions.assertThat; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.projectnessie.model.Namespace; + +import com.dremio.exec.catalog.VersionContext; +import com.dremio.plugins.ExternalNamespaceEntry; +import com.google.common.collect.Streams; + +/** + * + * Add tests around Dataplane listEntries and listEntriesWithNested. In Nessie API V2, there is no implicit namespaces + * + * dataplane_test + * - tableAFirst + * - tableBSecond + * - viewCThird + * - viewDFourth + * - explicitFolder1 + * - tableA + * - tableBSecond + * - viewCThird + * - viewDFourth + * - explicitFolderInExplicitParent3 + * - tableAFirst + * - tableBSecond + * - viewCThird + * - viewDFourth + * - emptyExplicitFolder7 + * - maxDepthExplicitFolder8 + * - maxDepthExplicitFolder9 + * - maxDepthExplicitFolder10 + * - maxDepthExplicitFolder11 + * - maxDepthExplicitFolder12 + * - maxDepthExplicitFolder13 + * - maxDepthExplicitFolder14 + * - maxDepthExplicitFolder15 + * - maxDepthExplicitFolder16 + * - maxDepthExplicitFolder17 + * - maxDepthExplicitFolder18 + * - maxDepthExplicitFolder19 + * - maxDepthExplicitFolder20 + * - maxDepthExplicitFolder21 + * - maxDepthExplicitFolder22 + * - maxDepthExplicitFolder23 + * - maxDepthExplicitFolder24 + * - maxDepthExplicitFolder25 + * - maxDepthExplicitFolder26 + * - tableWithFortySixCharacterssssssssssssssssssss + * - folderA + * - folderB + * - tableA + * - folderB + */ +public class FolderTestCases { + private ITDataplanePluginTestSetup base; + + FolderTestCases(ITDataplanePluginTestSetup base) { + this.base = base; + } + private static final VersionContext MAIN = VersionContext.ofBranch("main"); + + private static final String longTableNameForMaxDepthTest = "tableWithFortySixCharacterssssssssssssssssssss"; + + private static final ContainerEntity sourceRoot = new ContainerEntity( + DATAPLANE_PLUGIN_NAME, + ContainerEntity.Type.SOURCE, + ContainerEntity.Contains.FOLDERS_AND_VIEWS, + Collections.emptyList()); + private static final ContainerEntity explicitFolder1 = new ContainerEntity( + "explicitFolder1", + ContainerEntity.Type.EXPLICIT_FOLDER, + ContainerEntity.Contains.FOLDERS_AND_VIEWS, + sourceRoot.getFullPath()); + private static final ContainerEntity explicitFolderInExplicitParent3 = new ContainerEntity( + "explicitFolderInExplicitParent3", + ContainerEntity.Type.EXPLICIT_FOLDER, + ContainerEntity.Contains.FOLDERS_AND_VIEWS, + explicitFolder1.getFullPath()); + private static final ContainerEntity emptyExplicitFolder7 = new ContainerEntity( + "emptyExplicitFolder7", + ContainerEntity.Type.EXPLICIT_FOLDER, + ContainerEntity.Contains.EMPTY, + sourceRoot.getFullPath()); + + private static final ContainerEntity maxDepthExplicitFolder8 = new ContainerEntity( + "maxDepthExplicitFolder8", //23 + ContainerEntity.Type.EXPLICIT_FOLDER, + ContainerEntity.Contains.EMPTY, + sourceRoot.getFullPath()); + + private static final ContainerEntity maxDepthExplicitFolder9 = new ContainerEntity( + "maxDepthExplicitFolder9", //23 + ContainerEntity.Type.EXPLICIT_FOLDER, + ContainerEntity.Contains.EMPTY, + maxDepthExplicitFolder8.getFullPath()); + + private static final ContainerEntity maxDepthExplicitFolder10 = new ContainerEntity( + "maxDepthExplicitFolder10", //24 + ContainerEntity.Type.EXPLICIT_FOLDER, + ContainerEntity.Contains.EMPTY, + maxDepthExplicitFolder9.getFullPath()); + + private static final ContainerEntity maxDepthExplicitFolder11 = new ContainerEntity( + "maxDepthExplicitFolder11", + ContainerEntity.Type.EXPLICIT_FOLDER, + ContainerEntity.Contains.EMPTY, + maxDepthExplicitFolder10.getFullPath()); + + private static final ContainerEntity maxDepthExplicitFolder12 = new ContainerEntity( + "maxDepthExplicitFolder12", + ContainerEntity.Type.EXPLICIT_FOLDER, + ContainerEntity.Contains.EMPTY, + maxDepthExplicitFolder11.getFullPath()); + + private static final ContainerEntity maxDepthExplicitFolder13 = new ContainerEntity( + "maxDepthExplicitFolder13", + ContainerEntity.Type.EXPLICIT_FOLDER, + ContainerEntity.Contains.EMPTY, + maxDepthExplicitFolder12.getFullPath()); + + private static final ContainerEntity maxDepthExplicitFolder14 = new ContainerEntity( + "maxDepthExplicitFolder14", + ContainerEntity.Type.EXPLICIT_FOLDER, + ContainerEntity.Contains.EMPTY, + maxDepthExplicitFolder13.getFullPath()); + + private static final ContainerEntity maxDepthExplicitFolder15 = new ContainerEntity( + "maxDepthExplicitFolder15", + ContainerEntity.Type.EXPLICIT_FOLDER, + ContainerEntity.Contains.EMPTY, + maxDepthExplicitFolder14.getFullPath()); + + private static final ContainerEntity maxDepthExplicitFolder16 = new ContainerEntity( + "maxDepthExplicitFolder16", + ContainerEntity.Type.EXPLICIT_FOLDER, + ContainerEntity.Contains.EMPTY, + maxDepthExplicitFolder15.getFullPath()); + + private static final ContainerEntity maxDepthExplicitFolder17 = new ContainerEntity( + "maxDepthExplicitFolder17", + ContainerEntity.Type.EXPLICIT_FOLDER, + ContainerEntity.Contains.EMPTY, + maxDepthExplicitFolder16.getFullPath()); + + private static final ContainerEntity maxDepthExplicitFolder18 = new ContainerEntity( + "maxDepthExplicitFolder18", + ContainerEntity.Type.EXPLICIT_FOLDER, + ContainerEntity.Contains.EMPTY, + maxDepthExplicitFolder17.getFullPath()); + + private static final ContainerEntity maxDepthExplicitFolder19 = new ContainerEntity( + "maxDepthExplicitFolder19", + ContainerEntity.Type.EXPLICIT_FOLDER, + ContainerEntity.Contains.EMPTY, + maxDepthExplicitFolder18.getFullPath()); + + private static final ContainerEntity maxDepthExplicitFolder20 = new ContainerEntity( + "maxDepthExplicitFolder20", + ContainerEntity.Type.EXPLICIT_FOLDER, + ContainerEntity.Contains.EMPTY, + maxDepthExplicitFolder19.getFullPath()); + + private static final ContainerEntity maxDepthExplicitFolder21 = new ContainerEntity( + "maxDepthExplicitFolder21", + ContainerEntity.Type.EXPLICIT_FOLDER, + ContainerEntity.Contains.EMPTY, + maxDepthExplicitFolder20.getFullPath()); + + private static final ContainerEntity maxDepthExplicitFolder22 = new ContainerEntity( + "maxDepthExplicitFolder22", + ContainerEntity.Type.EXPLICIT_FOLDER, + ContainerEntity.Contains.EMPTY, + maxDepthExplicitFolder21.getFullPath()); + + private static final ContainerEntity maxDepthExplicitFolder23 = new ContainerEntity( + "maxDepthExplicitFolder23", + ContainerEntity.Type.EXPLICIT_FOLDER, + ContainerEntity.Contains.EMPTY, + maxDepthExplicitFolder22.getFullPath()); + + private static final ContainerEntity maxDepthExplicitFolder24 = new ContainerEntity( + "maxDepthExplicitFolder24", + ContainerEntity.Type.EXPLICIT_FOLDER, + ContainerEntity.Contains.EMPTY, + maxDepthExplicitFolder23.getFullPath()); + + private static final ContainerEntity maxDepthExplicitFolder25 = new ContainerEntity( + "maxDepthExplicitFolder25", + ContainerEntity.Type.EXPLICIT_FOLDER, + ContainerEntity.Contains.EMPTY, + maxDepthExplicitFolder24.getFullPath()); + + private static final ContainerEntity maxDepthExplicitFolder26 = new ContainerEntity( + "maxDepthExplicitFolder26", + ContainerEntity.Type.EXPLICIT_FOLDER, + ContainerEntity.Contains.MAX_KEY_TABLE, + maxDepthExplicitFolder25.getFullPath()); + private static final ContainerEntity explicitFolderA = new ContainerEntity( + "folderA", + ContainerEntity.Type.EXPLICIT_FOLDER, + ContainerEntity.Contains.FOLDERS_ONLY, + sourceRoot.getFullPath()); + + private static final ContainerEntity explicitFolderB = new ContainerEntity( + "folderB", + ContainerEntity.Type.EXPLICIT_FOLDER, + ContainerEntity.Contains.TABLES_ONLY, + explicitFolderA.getFullPath()); + + private static final ContainerEntity explicitFolderBUnderRoot = new ContainerEntity( + "folderB", + ContainerEntity.Type.EXPLICIT_FOLDER, + ContainerEntity.Contains.EMPTY, + sourceRoot.getFullPath()); + private void createEntitiesForContainer(ContainerEntity container) throws Exception { + switch(container.getType()) { + case SOURCE: + // Intentional fallthrough + break; + case IMPLICIT_FOLDER: + break; + case EXPLICIT_FOLDER: + base.getNessieClient().createNamespace() + .namespace(Namespace.of(container.getPathWithoutRoot())) + .refName("main") + .create(); + break; + default: + throw new IllegalStateException("Unexpected value: " + container.getType()); + } + + switch(container.getContains()) { + case FOLDERS_AND_VIEWS: + createTablesAndViewsInContainer(container); + break; + case MAX_KEY_TABLE: + createTablesAndViewsInContainerForMaxDepthTestCases(container); + break; + case TABLES_ONLY: + createSingleTable(container, tableA); + break; + case EMPTY: + // Intentional fallthrough + case FOLDERS_ONLY: + //we are manually creating folder. other than creating here. so we just break. + break; + default: + throw new IllegalStateException("Unexpected value: " + container.getContains()); + } + } + + private static void createTablesAndViewsInContainer(ContainerEntity container) throws Exception { + runSQL(createEmptyTableQuery(container.getChildPathWithoutRoot("tableAFirst"))); + runSQL(createEmptyTableQuery(container.getChildPathWithoutRoot("tableBSecond"))); + runSQL(createViewQuery( + container.getChildPathWithoutRoot("viewCThird"), + container.getChildPathWithoutRoot("tableAFirst"))); + runSQL(createViewQuery( + container.getChildPathWithoutRoot("viewDFourth"), + container.getChildPathWithoutRoot("tableBSecond"))); + } + + private static void createTablesAndViewsInContainerForMaxDepthTestCases(ContainerEntity container) throws Exception { + // the key length before the last part is 454 so create tables that has length of 46 to hit the max key length + runSQL(createEmptyTableQuery(container.getChildPathWithoutRoot(longTableNameForMaxDepthTest))); + } + + private static void createSingleTable(ContainerEntity container, String tableName) throws Exception { + runSQL(createEmptyTableQuery(container.getChildPathWithoutRoot(tableName))); + } + + @BeforeEach + public void createFoldersTablesViews() throws Exception { + createEntitiesForContainer(sourceRoot); + createEntitiesForContainer(explicitFolder1); + createEntitiesForContainer(explicitFolderInExplicitParent3); + createEntitiesForContainer(emptyExplicitFolder7); + createEntitiesForContainer(maxDepthExplicitFolder8); + createEntitiesForContainer(maxDepthExplicitFolder9); + createEntitiesForContainer(maxDepthExplicitFolder10); + createEntitiesForContainer(maxDepthExplicitFolder11); + createEntitiesForContainer(maxDepthExplicitFolder12); + createEntitiesForContainer(maxDepthExplicitFolder13); + createEntitiesForContainer(maxDepthExplicitFolder14); + createEntitiesForContainer(maxDepthExplicitFolder15); + createEntitiesForContainer(maxDepthExplicitFolder16); + createEntitiesForContainer(maxDepthExplicitFolder17); + createEntitiesForContainer(maxDepthExplicitFolder18); + createEntitiesForContainer(maxDepthExplicitFolder19); + createEntitiesForContainer(maxDepthExplicitFolder20); + createEntitiesForContainer(maxDepthExplicitFolder21); + createEntitiesForContainer(maxDepthExplicitFolder22); + createEntitiesForContainer(maxDepthExplicitFolder23); + createEntitiesForContainer(maxDepthExplicitFolder24); + createEntitiesForContainer(maxDepthExplicitFolder25); + createEntitiesForContainer(maxDepthExplicitFolder26); + createEntitiesForContainer(explicitFolderA); + createEntitiesForContainer(explicitFolderB); + createEntitiesForContainer(explicitFolderBUnderRoot); + } + + private Stream> listEntries(ContainerEntity container) { + return base.getDataplanePlugin(). + listEntries(container.getPathWithoutRoot(), MAIN) + .map(ExternalNamespaceEntry::getNameElements); + } + + private Stream> listEntriesIncludeNested(ContainerEntity container) { + return base.getDataplanePlugin(). + listEntriesIncludeNested(container.getPathWithoutRoot(), MAIN) + .map(ExternalNamespaceEntry::getNameElements); + } + + @Test + public void listEntriesForExplicitFolder() { + assertThat(listEntries(explicitFolder1)) + .containsExactlyInAnyOrderElementsOf( + Streams.concat( + getFullPath(explicitFolderInExplicitParent3), + getFullPath(explicitFolder1, tableAFirst), + getFullPath(explicitFolder1, tableBSecond), + getFullPath(explicitFolder1, viewCThird), + getFullPath(explicitFolder1, viewDFourth) + ).collect(Collectors.toList())); + } + + @Test + public void listNestedEntriesForExplicitFolder() { + assertThat(listEntriesIncludeNested(explicitFolder1)) + .containsExactlyInAnyOrderElementsOf( + Streams.concat( + getFullPath(explicitFolderInExplicitParent3), + getFullPath(explicitFolder1, tableAFirst), + getFullPath(explicitFolder1, tableBSecond), + getFullPath(explicitFolder1, viewCThird), + getFullPath(explicitFolder1, viewDFourth), + getFullPath(explicitFolderInExplicitParent3, tableAFirst), + getFullPath(explicitFolderInExplicitParent3, tableBSecond), + getFullPath(explicitFolderInExplicitParent3, viewCThird), + getFullPath(explicitFolderInExplicitParent3, viewDFourth) + ).collect(Collectors.toList())); + } + + @Test + public void listEntriesInRoot() { + assertThat(listEntries(sourceRoot)) + .containsExactlyInAnyOrderElementsOf( + Streams.concat( + getFullPath(explicitFolder1), + getFullPath(emptyExplicitFolder7), + getFullPath(maxDepthExplicitFolder8), + getFullPath(sourceRoot, tableAFirst), + getFullPath(sourceRoot, tableBSecond), + getFullPath(sourceRoot, viewCThird), + getFullPath(sourceRoot, viewDFourth), + getFullPath(sourceRoot, folderA), + getFullPath(sourceRoot, folderB) + ).collect(Collectors.toList())); + } + + @Test + public void listNestedEntriesInRoot() { + assertThat(listEntriesIncludeNested(sourceRoot)) + .containsExactlyInAnyOrderElementsOf( + Streams.concat( + getFullPath(explicitFolder1), + getFullPath(explicitFolderInExplicitParent3), + getFullPath(emptyExplicitFolder7), + getFullPath(maxDepthExplicitFolder8), + getFullPath(maxDepthExplicitFolder9), + getFullPath(maxDepthExplicitFolder10), + getFullPath(maxDepthExplicitFolder11), + getFullPath(maxDepthExplicitFolder12), + getFullPath(maxDepthExplicitFolder13), + getFullPath(maxDepthExplicitFolder14), + getFullPath(maxDepthExplicitFolder15), + getFullPath(maxDepthExplicitFolder16), + getFullPath(maxDepthExplicitFolder17), + getFullPath(maxDepthExplicitFolder18), + getFullPath(maxDepthExplicitFolder19), + getFullPath(maxDepthExplicitFolder20), + getFullPath(maxDepthExplicitFolder21), + getFullPath(maxDepthExplicitFolder22), + getFullPath(maxDepthExplicitFolder23), + getFullPath(maxDepthExplicitFolder24), + getFullPath(maxDepthExplicitFolder25), + getFullPath(maxDepthExplicitFolder26), + getFullPath(sourceRoot, tableAFirst), + getFullPath(sourceRoot, tableBSecond), + getFullPath(sourceRoot, viewCThird), + getFullPath(sourceRoot, viewDFourth), + getFullPath(explicitFolder1, tableAFirst), + getFullPath(explicitFolder1, tableBSecond), + getFullPath(explicitFolder1, viewCThird), + getFullPath(explicitFolder1, viewDFourth), + getFullPath(explicitFolderInExplicitParent3, tableAFirst), + getFullPath(explicitFolderInExplicitParent3, tableBSecond), + getFullPath(explicitFolderInExplicitParent3, viewCThird), + getFullPath(explicitFolderInExplicitParent3, viewDFourth), + getFullPath(maxDepthExplicitFolder26, longTableNameForMaxDepthTest), + getFullPath(sourceRoot, folderA), + getFullPath(sourceRoot, folderB), + getFullPath(explicitFolderA, folderB), + getFullPath(explicitFolderB, tableA) + ).collect(Collectors.toList())); + } + + @Test + public void listEntriesInExplicitFolderInExplicitParent() { + assertThat(listEntries(explicitFolderInExplicitParent3)) + .containsExactlyInAnyOrderElementsOf( + Streams.concat( + getFullPath(explicitFolderInExplicitParent3, tableAFirst), + getFullPath(explicitFolderInExplicitParent3, tableBSecond), + getFullPath(explicitFolderInExplicitParent3, viewCThird), + getFullPath(explicitFolderInExplicitParent3, viewDFourth) + ).collect(Collectors.toList())); + } + + @Test + public void listEntriesEmptyExplicitFolders() { + assertThat(listEntries(emptyExplicitFolder7)) + .isEmpty(); + } + + @Test + public void testMaxDepthFolders() { + assertThat(listEntriesIncludeNested(maxDepthExplicitFolder8)) + .containsExactlyInAnyOrderElementsOf( + Streams.concat( + getFullPath(maxDepthExplicitFolder8, maxDepthExplicitFolder9.getName()), + getFullPath(maxDepthExplicitFolder9, maxDepthExplicitFolder10.getName()), + getFullPath(maxDepthExplicitFolder10, maxDepthExplicitFolder11.getName()), + getFullPath(maxDepthExplicitFolder11, maxDepthExplicitFolder12.getName()), + getFullPath(maxDepthExplicitFolder12, maxDepthExplicitFolder13.getName()), + getFullPath(maxDepthExplicitFolder13, maxDepthExplicitFolder14.getName()), + getFullPath(maxDepthExplicitFolder14, maxDepthExplicitFolder15.getName()), + getFullPath(maxDepthExplicitFolder15, maxDepthExplicitFolder16.getName()), + getFullPath(maxDepthExplicitFolder16, maxDepthExplicitFolder17.getName()), + getFullPath(maxDepthExplicitFolder17, maxDepthExplicitFolder18.getName()), + getFullPath(maxDepthExplicitFolder18, maxDepthExplicitFolder19.getName()), + getFullPath(maxDepthExplicitFolder19, maxDepthExplicitFolder20.getName()), + getFullPath(maxDepthExplicitFolder20, maxDepthExplicitFolder21.getName()), + getFullPath(maxDepthExplicitFolder21, maxDepthExplicitFolder22.getName()), + getFullPath(maxDepthExplicitFolder22, maxDepthExplicitFolder23.getName()), + getFullPath(maxDepthExplicitFolder23, maxDepthExplicitFolder24.getName()), + getFullPath(maxDepthExplicitFolder24, maxDepthExplicitFolder25.getName()), + getFullPath(maxDepthExplicitFolder25, maxDepthExplicitFolder26.getName()), + getFullPath(maxDepthExplicitFolder26, longTableNameForMaxDepthTest) + ).collect(Collectors.toList())); + } + + @Test + public void testNamespaceLinksToCorrectNamespace() { + //Assert that dataplane_test.folderA.folderB has entry that has name of tableA + assertThat(listEntries(explicitFolderB)) + .containsExactlyInAnyOrderElementsOf( + Streams.concat( + getFullPath(explicitFolderB, tableA) + ).collect(Collectors.toList())); + + //Assert that dataplane_test.folderB.folderB does not have entry that has name of tableA + List incorrectFullPath = Arrays.asList(folderB, folderB); + assertThat(base.getDataplanePlugin().listEntries(incorrectFullPath, MAIN)) + .map(ExternalNamespaceEntry::getNameElements) + .isEmpty(); + } + + public Stream> getFullPath(ContainerEntity parent) { + return Collections.singletonList(parent.getPathWithoutRoot()).stream(); + } + public Stream> getFullPath(ContainerEntity parent, String nameElement) { + if (parent.getType() == ContainerEntity.Type.SOURCE) { + return Collections.singletonList(Arrays.asList(nameElement)).stream(); + } + return Collections.singletonList(Streams.concat(parent.getPathWithoutRoot().stream(), Stream.of(nameElement)) + .collect(Collectors.toList())).stream(); + } +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/ITBaseTestReflection.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/ITBaseTestReflection.java new file mode 100644 index 0000000000..aadbff38b2 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/ITBaseTestReflection.java @@ -0,0 +1,683 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import static com.dremio.dac.server.JobsServiceTestUtils.submitJobAndGetData; +import static com.dremio.options.OptionValue.OptionType.SYSTEM; +import static com.dremio.service.accelerator.proto.SubstitutionState.CHOSEN; +import static com.dremio.service.reflection.ReflectionOptions.MATERIALIZATION_CACHE_ENABLED; +import static com.dremio.service.reflection.ReflectionOptions.REFLECTION_DELETION_GRACE_PERIOD; +import static com.dremio.service.reflection.ReflectionOptions.REFLECTION_MANAGER_REFRESH_DELAY_MILLIS; +import static com.dremio.service.reflection.ReflectionOptions.REFLECTION_PERIODIC_WAKEUP_ONLY; +import static com.dremio.service.users.SystemUser.SYSTEM_USERNAME; +import static com.google.common.collect.Iterables.isEmpty; +import static java.util.concurrent.TimeUnit.HOURS; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import java.util.UUID; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; + +import javax.ws.rs.client.Entity; +import javax.ws.rs.core.GenericType; + +import org.apache.arrow.memory.BufferAllocator; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; + +import com.dremio.dac.explore.model.DatasetPath; +import com.dremio.dac.explore.model.DatasetUI; +import com.dremio.dac.model.job.JobDataFragment; +import com.dremio.dac.model.spaces.SpacePath; +import com.dremio.dac.server.BaseTestServerJunit5; +import com.dremio.dac.server.JobsServiceTestUtils; +import com.dremio.datastore.api.LegacyKVStoreProvider; +import com.dremio.exec.ExecConstants; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.CatalogEntityKey; +import com.dremio.exec.catalog.CatalogUser; +import com.dremio.exec.catalog.MetadataRequestOptions; +import com.dremio.exec.planner.physical.PlannerSettings; +import com.dremio.exec.server.ContextService; +import com.dremio.exec.server.MaterializationDescriptorProvider; +import com.dremio.exec.store.CatalogService; +import com.dremio.exec.store.SchemaConfig; +import com.dremio.options.OptionValue; +import com.dremio.service.accelerator.AccelerationDetailsUtils; +import com.dremio.service.accelerator.proto.AccelerationDetails; +import com.dremio.service.accelerator.proto.ReflectionRelationship; +import com.dremio.service.job.JobDetailsRequest; +import com.dremio.service.job.proto.JobDetails; +import com.dremio.service.job.proto.JobId; +import com.dremio.service.job.proto.JobProtobuf; +import com.dremio.service.job.proto.QueryType; +import com.dremio.service.jobs.JobNotFoundException; +import com.dremio.service.jobs.JobRequest; +import com.dremio.service.jobs.JobsProtoUtil; +import com.dremio.service.jobs.JobsService; +import com.dremio.service.jobs.LocalJobsService; +import com.dremio.service.jobs.LogicalPlanCaptureListener; +import com.dremio.service.jobs.SqlQuery; +import com.dremio.service.namespace.NamespaceException; +import com.dremio.service.namespace.NamespaceKey; +import com.dremio.service.namespace.NamespaceService; +import com.dremio.service.namespace.dataset.proto.AccelerationSettings; +import com.dremio.service.namespace.dataset.proto.DatasetConfig; +import com.dremio.service.namespace.dataset.proto.DatasetType; +import com.dremio.service.namespace.dataset.proto.PhysicalDataset; +import com.dremio.service.namespace.dataset.proto.RefreshMethod; +import com.dremio.service.namespace.file.proto.FileConfig; +import com.dremio.service.namespace.file.proto.FileType; +import com.dremio.service.namespace.proto.EntityId; +import com.dremio.service.namespace.space.proto.SpaceConfig; +import com.dremio.service.reflection.DependencyEntry; +import com.dremio.service.reflection.DependencyEntry.DatasetDependency; +import com.dremio.service.reflection.DependencyEntry.ReflectionDependency; +import com.dremio.service.reflection.DependencyUtils; +import com.dremio.service.reflection.ReflectionMonitor; +import com.dremio.service.reflection.ReflectionOptions; +import com.dremio.service.reflection.ReflectionService; +import com.dremio.service.reflection.ReflectionServiceImpl; +import com.dremio.service.reflection.ReflectionStatusService; +import com.dremio.service.reflection.proto.Materialization; +import com.dremio.service.reflection.proto.MaterializationId; +import com.dremio.service.reflection.proto.ReflectionDetails; +import com.dremio.service.reflection.proto.ReflectionDimensionField; +import com.dremio.service.reflection.proto.ReflectionField; +import com.dremio.service.reflection.proto.ReflectionGoal; +import com.dremio.service.reflection.proto.ReflectionId; +import com.dremio.service.reflection.proto.ReflectionMeasureField; +import com.dremio.service.reflection.proto.ReflectionType; +import com.dremio.service.reflection.store.MaterializationStore; +import com.dremio.service.reflection.store.ReflectionEntriesStore; +import com.dremio.service.users.SystemUser; +import com.google.common.base.Throwables; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; + +public abstract class ITBaseTestReflection extends ITBaseTestVersioned { + + private static AtomicInteger queryNumber = new AtomicInteger(0); + + protected static final String TEST_SPACE = "refl_test"; + + private static MaterializationStore materializationStore; + private static ReflectionEntriesStore entriesStore; + + @BeforeAll + public static void reflectionSetup() throws Exception { + BaseTestServerJunit5.getPopulator().populateTestUsers(); + + final NamespaceService nsService = getNamespaceService(); + final SpaceConfig config = new SpaceConfig().setName(TEST_SPACE); + nsService.addOrUpdateSpace(new SpacePath(config.getName()).toNamespaceKey(), config); + + materializationStore = new MaterializationStore(p(LegacyKVStoreProvider.class)); + entriesStore = new ReflectionEntriesStore(p(LegacyKVStoreProvider.class)); + setSystemOption(PlannerSettings.QUERY_PLAN_CACHE_ENABLED.getOptionName(), "false"); + } + + @AfterAll + public static void reflectionCleanUp() { + // reset deletion grace period + setDeletionGracePeriod(HOURS.toSeconds(4)); + setManagerRefreshDelay(10); + } + + protected static MaterializationStore getMaterializationStore() { + return materializationStore; + } + + protected static ReflectionEntriesStore getReflectionEntriesStore() { + return entriesStore; + } + + protected Catalog cat() { + return l(CatalogService.class) + .getCatalog( + MetadataRequestOptions.of( + SchemaConfig.newBuilder(CatalogUser.from(SYSTEM_USERNAME)).build())); + } + + protected static NamespaceService getNamespaceService() { + return p(NamespaceService.class).get(); + } + + protected static ReflectionServiceImpl getReflectionService() { + return (ReflectionServiceImpl) p(ReflectionService.class).get(); + } + + protected static ReflectionStatusService getReflectionStatusService() { + return p(ReflectionStatusService.class).get(); + } + + protected static MaterializationDescriptorProvider getMaterializationDescriptorProvider() { + return p(MaterializationDescriptorProvider.class).get(); + } + + protected static long requestRefresh(NamespaceKey datasetKey) throws NamespaceException { + final long requestTime = System.currentTimeMillis(); + DatasetConfig dataset = getNamespaceService().getDataset(datasetKey); + getReflectionService().requestRefresh(dataset.getId().getId()); + return requestTime; + } + + protected static long requestRefreshWithRetry( + NamespaceKey datasetKey, ReflectionMonitor monitor, ReflectionId rawId, Materialization m) { + int retry = 3; + long requestTime; + while (true) { + try { + requestTime = requestRefresh(datasetKey); + monitor.waitUntilMaterialized(rawId, m); + break; + } catch (Throwable t) { + if (retry == 0) { + Throwables.propagate(t); + } else { + retry--; + } + } + } + return requestTime; + } + + protected static ReflectionMonitor newReflectionMonitor(long delay, long maxWait) { + final MaterializationStore materializationStore = + new MaterializationStore(p(LegacyKVStoreProvider.class)); + return new ReflectionMonitor( + getReflectionService(), + getReflectionStatusService(), + getMaterializationDescriptorProvider(), + getJobsService(), + materializationStore, + delay, + maxWait); + } + + protected static JobsService getJobsService() { + return p(JobsService.class).get(); + } + + protected static DatasetConfig addJson(DatasetPath path) throws Exception { + final DatasetConfig dataset = + new DatasetConfig() + .setId(new EntityId(UUID.randomUUID().toString())) + .setType(DatasetType.PHYSICAL_DATASET_SOURCE_FILE) + .setFullPathList(path.toPathList()) + .setName(path.getLeaf().getName()) + .setCreatedAt(System.currentTimeMillis()) + .setTag(null) + .setOwner(DEFAULT_USERNAME) + .setPhysicalDataset( + new PhysicalDataset().setFormatSettings(new FileConfig().setType(FileType.JSON))); + final NamespaceService nsService = getNamespaceService(); + nsService.addOrUpdateDataset(path.toNamespaceKey(), dataset); + return nsService.getDataset(path.toNamespaceKey()); + } + + protected String getQueryPlan(final String query) { + return getQueryPlan(query, false); + } + + protected String getQueryPlan(final String query, boolean asSystemUser) { + final LogicalPlanCaptureListener capturePlanListener = new LogicalPlanCaptureListener(); + JobRequest jobRequest = + JobRequest.newBuilder() + .setSqlQuery( + new SqlQuery(query, asSystemUser ? SystemUser.SYSTEM_USERNAME : DEFAULT_USERNAME)) + .setQueryType(QueryType.UI_RUN) + .setDatasetPath(DatasetPath.NONE.toNamespaceKey()) + .build(); + JobsServiceTestUtils.submitJobAndWaitUntilCompletion( + l(LocalJobsService.class), jobRequest, capturePlanListener); + return capturePlanListener.getPlan(); + } + + protected static List reflectionFields(String... fields) { + ImmutableList.Builder builder = new ImmutableList.Builder<>(); + for (String field : fields) { + builder.add(new ReflectionField(field)); + } + return builder.build(); + } + + protected static List reflectionDimensionFields(String... fields) { + ImmutableList.Builder builder = new ImmutableList.Builder<>(); + for (String field : fields) { + builder.add(new ReflectionDimensionField(field)); + } + return builder.build(); + } + + protected static List reflectionMeasureFields(String... fields) { + ImmutableList.Builder builder = new ImmutableList.Builder<>(); + for (String field : fields) { + builder.add(new ReflectionMeasureField(field)); + } + return builder.build(); + } + + protected static List getChosen( + List relationships) { + if (relationships == null) { + return Collections.emptyList(); + } + + return relationships.stream() + .filter((r) -> r.getState() == CHOSEN) + .collect(Collectors.toList()); + } + + protected Materialization getMaterializationFor(final ReflectionId rId) { + final Iterable mIds = + getMaterializationDescriptorProvider().get().stream() + .filter(input -> input.getLayoutId().equals(rId.getId())) + .map(descriptor -> new MaterializationId(descriptor.getMaterializationId())) + .collect(Collectors.toList()); + assertEquals( + "only one materialization expected, but got " + mIds.toString(), 1, Iterables.size(mIds)); + + final MaterializationId mId = mIds.iterator().next(); + final Optional m = getReflectionService().getMaterialization(mId); + assertTrue("materialization not found", m.isPresent()); + return m.get(); + } + + protected DatasetUI createVdsFromQuery(String query, String space, String dataset) { + final DatasetPath datasetPath = new DatasetPath(ImmutableList.of(space, dataset)); + return createDatasetFromSQLAndSave(datasetPath, query, Collections.emptyList()); + } + + protected DatasetUI createVdsFromQuery(String query, String testSpace) { + final String datasetName = "query" + queryNumber.getAndIncrement(); + return createVdsFromQuery(query, testSpace, datasetName); + } + + protected ReflectionId createRawOnVds( + String datasetId, String reflectionName, List rawFields) throws Exception { + return getReflectionService() + .create( + new ReflectionGoal() + .setType(ReflectionType.RAW) + .setDatasetId(datasetId) + .setName(reflectionName) + .setDetails( + new ReflectionDetails() + .setDisplayFieldList( + rawFields.stream() + .map(ReflectionField::new) + .collect(Collectors.toList())))); + } + + protected void onlyAllowPeriodicWakeup(boolean periodicOnly) { + getSabotContext() + .getOptionManager() + .setOption( + OptionValue.createBoolean( + SYSTEM, REFLECTION_PERIODIC_WAKEUP_ONLY.getOptionName(), periodicOnly)); + } + + protected ReflectionId createRawFromQuery( + String query, String testSpace, List rawFields, String reflectionName) + throws Exception { + final DatasetUI datasetUI = createVdsFromQuery(query, testSpace); + return createRawOnVds(datasetUI.getId(), reflectionName, rawFields); + } + + protected static void setMaterializationCacheSettings( + boolean enabled, long refreshDelayInSeconds) { + l(ContextService.class) + .get() + .getOptionManager() + .setOption( + OptionValue.createBoolean( + SYSTEM, MATERIALIZATION_CACHE_ENABLED.getOptionName(), enabled)); + l(ContextService.class) + .get() + .getOptionManager() + .setOption( + OptionValue.createLong( + SYSTEM, + ReflectionOptions.MATERIALIZATION_CACHE_REFRESH_DELAY_MILLIS.getOptionName(), + refreshDelayInSeconds * 1000)); + } + + protected static void setEnableReAttempts(boolean enableReAttempts) { + l(ContextService.class) + .get() + .getOptionManager() + .setOption( + OptionValue.createBoolean( + SYSTEM, ExecConstants.ENABLE_REATTEMPTS.getOptionName(), enableReAttempts)); + } + + protected static void setManagerRefreshDelay(long delayInSeconds) { + setManagerRefreshDelayMs(delayInSeconds * 1000); + } + + protected static void setManagerRefreshDelayMs(long delayInMillis) { + l(ContextService.class) + .get() + .getOptionManager() + .setOption( + OptionValue.createLong( + SYSTEM, REFLECTION_MANAGER_REFRESH_DELAY_MILLIS.getOptionName(), delayInMillis)); + } + + protected static void setDeletionGracePeriod(long periodInSeconds) { + l(ContextService.class) + .get() + .getOptionManager() + .setOption( + OptionValue.createLong( + SYSTEM, REFLECTION_DELETION_GRACE_PERIOD.getOptionName(), periodInSeconds)); + } + + protected void setDatasetAccelerationSettings( + CatalogEntityKey key, long refreshPeriod, long gracePeriod) { + setDatasetAccelerationSettings(key, refreshPeriod, gracePeriod, false, null, false, false); + } + + protected void setDatasetAccelerationSettings( + CatalogEntityKey key, long refreshPeriod, long gracePeriod, boolean neverExpire) { + setDatasetAccelerationSettings( + key, refreshPeriod, gracePeriod, false, null, neverExpire, false); + } + + protected void setDatasetAccelerationSettings( + CatalogEntityKey key, + long refreshPeriod, + long gracePeriod, + boolean neverExpire, + boolean neverRefresh) { + setDatasetAccelerationSettings( + key, refreshPeriod, gracePeriod, false, null, neverExpire, neverRefresh); + } + + protected void setDatasetAccelerationSettings( + CatalogEntityKey key, + long refreshPeriod, + long gracePeriod, + boolean incremental, + String refreshField) { + setDatasetAccelerationSettings( + key, refreshPeriod, gracePeriod, incremental, refreshField, false, false); + } + + protected void setDatasetAccelerationSettings( + CatalogEntityKey key, + long refreshPeriod, + long gracePeriod, + boolean incremental, + String refreshField, + boolean neverExpire, + boolean neverRefresh) { + // update dataset refresh/grace period + getReflectionService() + .getReflectionSettings() + .setReflectionSettings( + key, + new AccelerationSettings() + .setMethod(incremental ? RefreshMethod.INCREMENTAL : RefreshMethod.FULL) + .setRefreshPeriod(refreshPeriod) + .setGracePeriod(gracePeriod) + .setRefreshField(refreshField) + .setNeverExpire(neverExpire) + .setNeverRefresh(neverRefresh)); + } + + protected DatasetDependency dependency(final String datasetId, final NamespaceKey datasetKey) { + return DependencyEntry.of(datasetId, datasetKey.getPathComponents()); + } + + protected ReflectionDependency dependency(final ReflectionId reflectionId) { + return DependencyEntry.of(reflectionId); + } + + protected boolean dependsOn(ReflectionId rId, final DependencyEntry... entries) { + final Iterable dependencies = getReflectionService().getDependencies(rId); + if (isEmpty(dependencies)) { + return false; + } + for (DependencyEntry entry : entries) { + if (!Iterables.contains(dependencies, entry)) { + return false; + } + } + return true; + } + + protected void assertDependsOn(ReflectionId rId, final DependencyEntry... entries) { + assertTrue( + String.format( + "Unexpected state %s", + DependencyUtils.describeDependencies(rId, getReflectionService().getDependencies(rId))), + dependsOn(rId, entries)); + } + + protected void assertNotDependsOn(ReflectionId rId, final DependencyEntry... entries) { + assertFalse( + String.format( + "Unexpected state %s", + DependencyUtils.describeDependencies(rId, getReflectionService().getDependencies(rId))), + dependsOn(rId, entries)); + } + + protected String dumpState(final Materialization m) { + return String.format( + "%s %s", + m, + DependencyUtils.describeDependencies( + m.getReflectionId(), getReflectionService().getDependencies(m.getReflectionId()))); + } + + protected void createSpace(String name) { + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(new com.dremio.dac.api.Space(null, name, null, null, null))), new GenericType() {}); + } + + protected void dropSpace(String name) { + com.dremio.dac.api.Space s = expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/").path("by-path").path(name)).buildGet(), new GenericType() {}); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/").path(s.getId())).buildDelete()); + } + + /** + * Get data record of a reflection + * + * @param reflectionId id of a reflection + * @return data record of the reflection + */ + protected JobDataFragment getReflectionsData( + JobsService jobsService, ReflectionId reflectionId, BufferAllocator allocator) + throws Exception { + return submitJobAndGetData( + jobsService, + JobRequest.newBuilder() + .setSqlQuery( + getQueryFromSQL( + "select * from sys.reflections where reflection_id = '" + + reflectionId.getId() + + "'")) + .build(), + 0, + 1, + allocator); + } + + /** + * Get materialization data of a reflection + * + * @param reflectionId id of a reflection + * @return materialization data of the reflection + */ + protected JobDataFragment getMaterializationsData( + JobsService jobsService, ReflectionId reflectionId, BufferAllocator allocator) + throws Exception { + return submitJobAndGetData( + jobsService, + JobRequest.newBuilder() + .setSqlQuery( + getQueryFromSQL( + "select * from sys.materializations where reflection_id = '" + + reflectionId.getId() + + "'")) + .build(), + 0, + 1, + allocator); + } + + /** + * Get refresh data of a reflection + * + * @param reflectionId id of a reflection + * @return refresh data of the reflection + */ + protected JobDataFragment getRefreshesData( + JobsService jobsService, ReflectionId reflectionId, BufferAllocator allocator) + throws Exception { + return submitJobAndGetData( + jobsService, + JobRequest.newBuilder() + .setSqlQuery( + getQueryFromSQL( + "select * from sys.materializations where reflection_id = '" + + reflectionId.getId() + + "'")) + .build(), + 0, + 100, + allocator); + } + + /** + * Get the number of written records of a materialization, which is output records shown in its + * refresh job details + * + * @param m materialization of a reflection + * @return the number of written records + */ + protected static long getNumWrittenRecords(Materialization m) throws JobNotFoundException { + final JobId refreshJobId = new JobId(m.getInitRefreshJobId()); + JobDetailsRequest request = + JobDetailsRequest.newBuilder().setJobId(JobsProtoUtil.toBuf(refreshJobId)).build(); + final com.dremio.service.job.JobDetails refreshJob = getJobsService().getJobDetails(request); + final JobDetails jobDetails = JobsProtoUtil.getLastAttempt(refreshJob).getDetails(); + return jobDetails.getOutputRecords(); + } + + /** + * Ensures child materialization depends properly on parent materialization: + * + *
        + *
      1. child materialization started after parent materialization was done + *
      2. child reflection depends on parent materialization + *
      + */ + protected void checkReflectionDependency(Materialization parent, Materialization child) + throws Exception { + // child reflection should depend on its parent + assertDependsOn(child.getReflectionId(), dependency(parent.getReflectionId())); + + JobDetailsRequest parentRefreshReflectionRequest = + JobDetailsRequest.newBuilder() + .setJobId(JobProtobuf.JobId.newBuilder().setId(parent.getInitRefreshJobId()).build()) + .build(); + JobDetailsRequest childRefreshReflectionRequest = + JobDetailsRequest.newBuilder() + .setJobId(JobProtobuf.JobId.newBuilder().setId(child.getInitRefreshJobId()).build()) + .build(); + + final com.dremio.service.job.JobDetails parentRefreshReflectionJobDetails = + getJobsService().getJobDetails(parentRefreshReflectionRequest); + final com.dremio.service.job.JobDetails childRefreshReflectionJobDetails = + getJobsService().getJobDetails(childRefreshReflectionRequest); + + // make sure child has been accelerated with parent's latest materialization + AccelerationDetails details = + AccelerationDetailsUtils.deserialize( + JobsProtoUtil.getLastAttempt(childRefreshReflectionJobDetails) + .getAccelerationDetails()); + List chosen = getChosen(details.getReflectionRelationshipsList()); + assertTrue( + "child refresh wasn't accelerated with parent's latest materialization", + chosen.stream() + .anyMatch(r -> r.getMaterialization().getId().equals(parent.getId().getId()))); + + assertTrue( + "child refresh started before parent load materialization job finished", + JobsProtoUtil.getLastAttempt(childRefreshReflectionJobDetails).getInfo().getStartTime() + >= JobsProtoUtil.getLastAttempt(parentRefreshReflectionJobDetails) + .getInfo() + .getFinishTime()); + } + + /** + * Refresh metadata of a dataset + * + * @param datasetPath dataset path + */ + protected void refreshMetadata(final String datasetPath) { + submitJobAndWaitUntilCompletion( + JobRequest.newBuilder() + .setSqlQuery(getQueryFromSQL("ALTER TABLE " + datasetPath + " REFRESH METADATA")) + .build()); + } + + /** + * Get query data with max rows + * + * @param query sql query + * @param maxRows max rows of the results + * @return results of the query + */ + protected JobDataFragment getQueryData( + JobsService jobsService, String query, int maxRows, BufferAllocator allocator) + throws JobNotFoundException { + return submitJobAndGetData( + jobsService, + JobRequest.newBuilder().setSqlQuery(getQueryFromSQL(query)).build(), + 0, + maxRows, + allocator); + } + + /** + * Get query data in a certain session with max rows + * + * @param query sql query + * @param sessionId sql query session id + * @param maxRows max rows of the results + * @return results of the query + */ + protected JobDataFragment getQueryDataInSession( + JobsService jobsService, String query, String sessionId, int maxRows, BufferAllocator allocator) + throws JobNotFoundException { + return submitJobAndGetData( + jobsService, + createJobRequestFromSqlAndSessionId(query, sessionId), + 0, + maxRows, + allocator); + } + + +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/ITBaseTestVersioned.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/ITBaseTestVersioned.java new file mode 100644 index 0000000000..4dfa10473c --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/ITBaseTestVersioned.java @@ -0,0 +1,286 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import static com.dremio.exec.ExecConstants.ENABLE_ICEBERG_TIME_TRAVEL; +import static com.dremio.exec.ExecConstants.ENABLE_USE_VERSION_SYNTAX; +import static com.dremio.exec.ExecConstants.VERSIONED_VIEW_ENABLED; +import static com.dremio.exec.catalog.CatalogOptions.REFLECTION_ARCTIC_ENABLED; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.ALTERNATIVE_BUCKET_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.BUCKET_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DATAPLANE_PLUGIN_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DATAPLANE_PLUGIN_NAME_FOR_REFLECTION_TEST; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createFolderAtQueryWithIfNotExists; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.fullyQualifiedTableName; +import static com.dremio.exec.store.DataplanePluginOptions.DATAPLANE_PLUGIN_ENABLED; +import static com.dremio.exec.store.DataplanePluginOptions.NESSIE_PLUGIN_ENABLED; +import static com.dremio.options.OptionValue.OptionType.SYSTEM; + +import java.io.File; +import java.io.IOException; +import java.net.URI; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import org.apache.commons.lang3.StringUtils; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.api.io.TempDir; +import org.projectnessie.client.api.NessieApiV1; +import org.projectnessie.client.api.NessieApiV2; +import org.projectnessie.client.http.HttpClientBuilder; +import org.projectnessie.tools.compatibility.api.NessieBaseUri; +import org.projectnessie.tools.compatibility.api.NessieServerProperty; +import org.projectnessie.tools.compatibility.internal.OlderNessieServersExtension; +import org.projectnessie.versioned.persist.adapter.DatabaseAdapter; +import org.projectnessie.versioned.persist.tests.extension.NessieDbAdapter; + +import com.dremio.common.AutoCloseables; +import com.dremio.common.utils.PathUtils; +import com.dremio.dac.server.BaseTestServerJunit5; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.CatalogServiceImpl; +import com.dremio.exec.catalog.TableVersionContext; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.catalog.VersionedDatasetId; +import com.dremio.exec.catalog.conf.NessieAuthType; +import com.dremio.exec.catalog.conf.Property; +import com.dremio.exec.store.CatalogService; +import com.dremio.exec.store.ReferenceNotFoundException; +import com.dremio.options.OptionValue; +import com.dremio.plugins.dataplane.store.DataplanePlugin; +import com.dremio.plugins.dataplane.store.NessiePluginConfig; +import com.dremio.plugins.s3.store.S3FileSystem; +import com.dremio.service.namespace.NamespaceKey; +import com.dremio.service.namespace.NamespaceService; +import com.dremio.service.namespace.source.proto.SourceConfig; +import com.dremio.service.users.SystemUser; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; + +import io.findify.s3mock.S3Mock; + +@ExtendWith(OlderNessieServersExtension.class) +@NessieServerProperty(name = "nessie.test.storage.kind", value = "PERSIST") //PERSIST is the new model in Nessie Server +public class ITBaseTestVersioned extends BaseTestServerJunit5 { + private static S3Mock s3Mock; + private static int S3_PORT; + + @TempDir static File temporaryDirectory; + private static Path bucketPath; + + @NessieDbAdapter static DatabaseAdapter databaseAdapter; + + @NessieBaseUri private static URI nessieUri; + + private static NessieApiV1 nessieClient; + private static DataplanePlugin dataplanePlugin; + + private static Catalog catalog; + private static NamespaceService namespaceService; + + @BeforeAll + public static void arcticSetup() throws Exception { + setUpS3Mock(); + setUpNessie(); + setUpDataplanePlugin(); + } + + @AfterAll + public static void arcticCleanUp() throws Exception { + AutoCloseables.close( + dataplanePlugin, + nessieClient); + if (s3Mock != null) { + s3Mock.shutdown(); + s3Mock = null; + } + } + + protected static void setUpS3Mock() throws IOException { + bucketPath = Paths.get(temporaryDirectory.getAbsolutePath(), BUCKET_NAME); + Files.createDirectory(bucketPath); + Files.createDirectory(Paths.get(temporaryDirectory.getAbsolutePath(), ALTERNATIVE_BUCKET_NAME)); + + Preconditions.checkState(s3Mock == null); + s3Mock = + new S3Mock.Builder() + .withPort(0) + .withFileBackend(temporaryDirectory.getAbsolutePath()) + .build(); + S3_PORT = s3Mock.start().localAddress().getPort(); + } + + protected static void setUpNessie() { + nessieClient = + HttpClientBuilder.builder().withUri(createNessieURIString()).build(NessieApiV2.class); + } + + private static NessiePluginConfig prepareConnectionConf(String bucket) { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + nessiePluginConfig.nessieEndpoint = createNessieURIString(); + nessiePluginConfig.nessieAuthType = NessieAuthType.NONE; + nessiePluginConfig.secure = false; + nessiePluginConfig.awsAccessKey = "foo"; // Unused, just needs to be set + nessiePluginConfig.awsAccessSecret = "bar"; // Unused, just needs to be set + nessiePluginConfig.awsRootPath = bucket; + + // S3Mock settings + nessiePluginConfig.propertyList = + Arrays.asList( + new Property("fs.s3a.endpoint", "localhost:" + S3_PORT), + new Property("fs.s3a.path.style.access", "true"), + new Property("fs.s3a.connection.ssl.enabled", "false"), + new Property(S3FileSystem.COMPATIBILITY_MODE, "true")); + + return nessiePluginConfig; + } + + protected static void setUpDataplanePlugin() { + getSabotContext() + .getOptionManager() + .setOption( + OptionValue.createBoolean(SYSTEM, DATAPLANE_PLUGIN_ENABLED.getOptionName(), true)); + getSabotContext() + .getOptionManager() + .setOption(OptionValue.createBoolean(SYSTEM, VERSIONED_VIEW_ENABLED.getOptionName(), true)); + getSabotContext() + .getOptionManager() + .setOption( + OptionValue.createBoolean(SYSTEM, ENABLE_USE_VERSION_SYNTAX.getOptionName(), true)); + getSabotContext() + .getOptionManager() + .setOption( + OptionValue.createBoolean(SYSTEM, ENABLE_ICEBERG_TIME_TRAVEL.getOptionName(), true)); + getSabotContext() + .getOptionManager() + .setOption( + OptionValue.createBoolean(SYSTEM, REFLECTION_ARCTIC_ENABLED.getOptionName(), true)); + getSabotContext() + .getOptionManager() + .setOption( + OptionValue.createBoolean(SYSTEM, NESSIE_PLUGIN_ENABLED.getOptionName(), true)); + + CatalogServiceImpl catalogImpl = (CatalogServiceImpl) getSabotContext().getCatalogService(); + + SourceConfig sourceConfig = + new SourceConfig() + .setConnectionConf(prepareConnectionConf(BUCKET_NAME)) + .setName(DATAPLANE_PLUGIN_NAME) + .setMetadataPolicy(CatalogService.NEVER_REFRESH_POLICY_WITH_AUTO_PROMOTE); + catalogImpl.getSystemUserCatalog().createSource(sourceConfig); + dataplanePlugin = catalogImpl.getSystemUserCatalog().getSource(DATAPLANE_PLUGIN_NAME); + catalog = catalogImpl.getSystemUserCatalog(); + + SourceConfig sourceConfigForReflectionTest = + new SourceConfig() + .setConnectionConf(prepareConnectionConf(BUCKET_NAME)) + .setName(DATAPLANE_PLUGIN_NAME_FOR_REFLECTION_TEST) + .setMetadataPolicy(CatalogService.NEVER_REFRESH_POLICY_WITH_AUTO_PROMOTE); + catalogImpl.getSystemUserCatalog().createSource(sourceConfigForReflectionTest); + + namespaceService = getSabotContext().getNamespaceService(SystemUser.SYSTEM_USERNAME); + } + + public static NessieApiV1 getNessieClient() { + return nessieClient; + } + + public static DataplanePlugin getDataplanePlugin() { + return dataplanePlugin; + } + + public static Catalog getCatalog() { + return catalog; + } + + public static Path getBucketPath() { + return bucketPath; + } + + public static void reinit() { + databaseAdapter.eraseRepo(); + s3Mock.stop(); + s3Mock.start(); + } + + public static Catalog getContextualizedCatalog(String pluginName, VersionContext versionContext) { + final Catalog resetCatalog = + getCatalog().resolveCatalogResetContext(DATAPLANE_PLUGIN_NAME, versionContext); + final Catalog newCatalog = + resetCatalog.resolveCatalog(ImmutableMap.of(DATAPLANE_PLUGIN_NAME, versionContext)); + return newCatalog; + } + + public String getContentId(List tableKey, TableVersionContext tableVersionContext) { + try { + final VersionedDatasetId versionedDatasetId = + VersionedDatasetId.fromString(getVersionedDatatsetId(tableKey, tableVersionContext)); + + return (versionedDatasetId == null ? null : versionedDatasetId.getContentId()); + } catch (JsonProcessingException e) { + return null; + } + } + + public String getVersionedDatatsetId( + List tableKey, TableVersionContext tableVersionContext) { + final Catalog contextualizedCatalog = + getContextualizedCatalog(DATAPLANE_PLUGIN_NAME, tableVersionContext.asVersionContext()); + + try { + return contextualizedCatalog + .getTableForQuery( + new NamespaceKey( + PathUtils.parseFullPath( + fullyQualifiedTableName(DATAPLANE_PLUGIN_NAME, tableKey)))) + .getDatasetConfig() + .getId() + .getId(); + } catch (ReferenceNotFoundException r) { + return null; + } + } + + /* Helper function used to set up the NessieURI as a string. */ + private static String createNessieURIString() { + return nessieUri.toString() + "v2"; + } + + /* Helper function to create a set of folders in a DATAPLANE_PLUGIN given a table path of format: + folder1/folder2/.../table1 + */ + protected void createFolders(List tablePath, VersionContext versionContext, String sessionId) { + //Iterate to get the parent folders where the table should be created (till tableName). Last element is tableName + StringBuilder folderName = new StringBuilder(); + folderName.append(DATAPLANE_PLUGIN_NAME); + for (int i = 0; i < tablePath.size() - 1; i++) { + folderName.append(".").append(tablePath.get(i)); + String query = createFolderAtQueryWithIfNotExists(Collections.singletonList(folderName.toString()), versionContext); + if (StringUtils.isEmpty(sessionId)) { + runQuery(query); + } else { + runQueryInSession(query, sessionId); + } + } + } +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/ITDataplanePlugin.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/ITDataplanePlugin.java new file mode 100644 index 0000000000..b2810c461e --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/ITDataplanePlugin.java @@ -0,0 +1,259 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + + +import static com.dremio.exec.ExecConstants.ENABLE_ICEBERG_ADVANCED_DML; +import static com.dremio.exec.ExecConstants.ENABLE_ICEBERG_VACUUM; +import static com.dremio.exec.ExecConstants.VERSIONED_INFOSCHEMA_ENABLED; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.NO_ANCESTOR; + +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Nested; +import org.projectnessie.error.NessieConflictException; +import org.projectnessie.error.NessieNotFoundException; +import org.projectnessie.model.Branch; +import org.projectnessie.model.Detached; +import org.projectnessie.model.Tag; +import org.projectnessie.tools.compatibility.api.NessieServerProperty; + +import com.dremio.BaseTestQuery; + +/** + * Integration tests for Dataplane Plugin which runs for Nessie source in software against Nessie server (new model) + * which enforces namespace validation as default. + * Two models are used in Nessie servers: "PERSIST" and "DATABASE_ADAPTER". + * "PERSIST" is the new model and "DATABASE_ADAPTER" is the old model + * By default, it uses the old model if no model name specified in config property "nessie.test.storage.kind" of NessieServer + * + * Note: Requires complex setup, see base class ITDataplanePluginTestSetup for + * more details + * + * Adding new tests: + * - Check to see if your test(s) fit into an existing category, if so, add them there + * - If a new category of tests is needed, add the new set of tests in a + * separate file, and use the Nested pattern (see below) to define the new + * set of tests in this file. + * + * Instructions for running tests from Maven CLI: + * 1) Run the entire ITDataplanePlugin test suite + * - mvn failsafe:integration-test -pl :dremio-dac-backend -Dit.test=ITDataplanePlugin + * 2) Run a specific category, e.g. AlterTestCases + * - mvn failsafe:integration-test -pl :dremio-dac-backend -Dit.test="ITDataplanePlugin*NestedAlterTests" + * 3) Run a specific test, e.g. AlterTestCases#alterTableAddOneColumn + * - mvn failsafe:integration-test -pl :dremio-dac-backend -Dit.test="ITDataplanePlugin*NestedAlterTests#alterTableAddOneColumn" + * + * Note: The syntax of these commands is very particular, check it carefully + * - e.g. goal must be failsafe:integration-test, not just integration-test + * - Nested class must be quoted with wildcard (*), even though $ separator should have worked + * + * Instructions for running tests from IntelliJ: + * For either of: + * 1) Run the entire ITDataplanePlugin test suite + * 2) Run a specific category, e.g. AlterTestCases + * IntelliJ can automatically create a run configuration for you. + * + * Just click the green checkmark/arrow next to the top-level class for (1) or + * next to the Nested class for (2). + * + * For: + * 3) Run a specific test, e.g. AlterTestCases#alterTableAddOneColumn + * IntelliJ does not detect these automatically, so you'll have to manually + * modify one of the above auto-configurations. Change the configuration to + * the following values: + * - Dropdown set to "Method" (NOT "Class") + * - First text box set to: com.dremio.exec.catalog.dataplane.ITDataplanePlugin.NestedAlterTests + * - Second text box set to: alterTableAddOneColumn + */ +@NessieServerProperty(name = "nessie.test.storage.kind", value = "PERSIST") //PERSIST is the new model in Nessie Server +public class ITDataplanePlugin extends ITDataplanePluginTestSetup { + + @BeforeAll + public static void beforeAll() { + setSystemOption(ENABLE_ICEBERG_ADVANCED_DML, "true"); + setSystemOption(VERSIONED_INFOSCHEMA_ENABLED, "true"); + setSystemOption(ENABLE_ICEBERG_VACUUM, "true"); + } + + @AfterAll + public static void afterAll() { + resetSystemOption(ENABLE_ICEBERG_ADVANCED_DML.getOptionName()); + resetSystemOption(VERSIONED_INFOSCHEMA_ENABLED.getOptionName()); + resetSystemOption(ENABLE_ICEBERG_VACUUM.getOptionName()); + } + + @BeforeEach + // Since we can't set to an empty context, setting it to some known but *wrong* context on purpose. + public void before() throws Exception { + // Note: dfs_hadoop is immutable. + BaseTestQuery.test("USE dfs_hadoop"); + Branch defaultBranch = getNessieClient().getDefaultBranch(); + getNessieClient().getAllReferences().stream() + .forEach( + ref -> { + try { + if (ref instanceof Branch && !ref.getName().equals(defaultBranch.getName())) { + getNessieClient().deleteBranch().branch((Branch) ref).delete(); + } else if (ref instanceof Tag) { + getNessieClient().deleteTag().tag((Tag) ref).delete(); + } + } catch (NessieConflictException | NessieNotFoundException e) { + throw new RuntimeException(e); + } + }); + + getNessieClient().assignBranch().branch(defaultBranch).assignTo(Detached.of(NO_ANCESTOR)).assign(); + } + + @Nested + class NestedInfoSchemaTests extends InfoSchemaTestCases { + NestedInfoSchemaTests() { + super(ITDataplanePlugin.this); + } + } + + @Nested + class NestedInfoSchemaCombinationTests extends InfoSchemaCombinationTestCases { + NestedInfoSchemaCombinationTests() { + super(ITDataplanePlugin.this); + } + } + + @Nested + class NestedFolderTests extends FolderTestCases { + NestedFolderTests() {super(ITDataplanePlugin.this);} + } + + @Nested + class NestedAlterTests extends AlterTestCases { + NestedAlterTests() { + super(ITDataplanePlugin.this); + } + } + + @Nested + class NestedBranchTagTests extends BranchTagTestCases { + NestedBranchTagTests() { + super(ITDataplanePlugin.this); + } + } + + @Nested + class NestedCreateTests extends CreateTestCases { + NestedCreateTests() { + super(ITDataplanePlugin.this); + } + } + + @Nested + class NestedCtasTests extends CtasTestCases { + NestedCtasTests() { + super(ITDataplanePlugin.this); + } + } + + @Nested + class NestedDeleteTests extends DeleteTestCases { + NestedDeleteTests() { + super(ITDataplanePlugin.this); + } + } + + @Nested + class NestedDropTests extends DropTestCases { + NestedDropTests() { + super(ITDataplanePlugin.this); + } + } + + @Nested + class NestedInsertTests extends InsertTestCases { + NestedInsertTests() { + super(ITDataplanePlugin.this); + } + } + + @Nested + class NestedSelectTests extends SelectTestCases { + NestedSelectTests() { + super(ITDataplanePlugin.this); + } + } + + @Nested + class NestedMergeTests extends MergeTestCases { + NestedMergeTests() { + super(ITDataplanePlugin.this); + } + } + + @Nested + class NestedUpdateTests extends UpdateTestCases { + NestedUpdateTests() { + super(ITDataplanePlugin.this); + } + } + + @Nested + class NestedOptimizeTests extends OptimizeTestCases { + NestedOptimizeTests() { + super(ITDataplanePlugin.this); + } + } + + @Nested + class NestedViewTests extends ViewTestCases { + NestedViewTests() { + super(ITDataplanePlugin.this); + } + } + + @Nested + class NestedDeleteFolderTestCases extends DeleteFolderTestCases { + NestedDeleteFolderTestCases() { + super(ITDataplanePlugin.this); + } + } + + @Nested + class NestedIDTests extends IdTestCases { + NestedIDTests() { + super(ITDataplanePlugin.this); + } + } + + @Nested + class NestedCopyIntoTests extends CopyIntoTestCases { + NestedCopyIntoTests() { + super(ITDataplanePlugin.this, createTempLocation(), TEMP_SCHEMA_HADOOP); + } + } + + @Nested + class NestedRollbackTests extends RollbackTestCases { + NestedRollbackTests() { + super(ITDataplanePlugin.this); + } + } + @Nested + class NestedVacuumTests extends VacuumTestCases { + NestedVacuumTests() { + super(ITDataplanePlugin.this); + } + } +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/ITDataplanePluginTestSetup.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/ITDataplanePluginTestSetup.java new file mode 100644 index 0000000000..085243e879 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/ITDataplanePluginTestSetup.java @@ -0,0 +1,384 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import static com.dremio.exec.ExecConstants.ENABLE_ICEBERG_TIME_TRAVEL; +import static com.dremio.exec.ExecConstants.VERSIONED_VIEW_ENABLED; +import static com.dremio.exec.catalog.CatalogOptions.REFLECTION_ARCTIC_ENABLED; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.ALTERNATIVE_BUCKET_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.BUCKET_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DATAPLANE_PLUGIN_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.S3_PREFIX; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createFolderAtQueryWithIfNotExists; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.selectFileLocationsQuery; +import static com.dremio.exec.store.DataplanePluginOptions.NESSIE_PLUGIN_ENABLED; +import static com.dremio.options.OptionValue.OptionType.SYSTEM; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.mock; + +import java.io.File; +import java.io.IOException; +import java.net.URI; +import java.nio.file.Path; +import java.util.Arrays; +import java.util.Collections; +import java.util.EnumSet; +import java.util.List; + +import org.apache.arrow.memory.BufferAllocator; +import org.apache.arrow.memory.RootAllocatorFactory; +import org.apache.commons.lang3.RandomStringUtils; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.api.io.TempDir; +import org.projectnessie.client.api.NessieApiV1; +import org.projectnessie.client.api.NessieApiV2; +import org.projectnessie.client.http.HttpClientBuilder; +import org.projectnessie.tools.compatibility.api.NessieBaseUri; +import org.projectnessie.tools.compatibility.internal.OlderNessieServersExtension; +import org.projectnessie.versioned.persist.adapter.DatabaseAdapter; +import org.projectnessie.versioned.persist.tests.extension.NessieDbAdapter; + +import com.amazonaws.auth.AWSStaticCredentialsProvider; +import com.amazonaws.auth.AnonymousAWSCredentials; +import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration; +import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.AmazonS3ClientBuilder; +import com.dremio.common.AutoCloseables; +import com.dremio.common.concurrent.CloseableThreadPool; +import com.dremio.common.concurrent.ContextMigratingExecutorService; +import com.dremio.config.DremioConfig; +import com.dremio.datastore.api.KVStoreProvider; +import com.dremio.datastore.api.LegacyKVStoreProvider; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.CatalogServiceImpl; +import com.dremio.exec.catalog.ConnectionReader; +import com.dremio.exec.catalog.DatasetCatalogServiceImpl; +import com.dremio.exec.catalog.InformationSchemaServiceImpl; +import com.dremio.exec.catalog.MetadataRefreshInfoBroadcaster; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.catalog.conf.NessieAuthType; +import com.dremio.exec.catalog.conf.Property; +import com.dremio.exec.server.SabotContext; +import com.dremio.exec.store.CatalogService; +import com.dremio.exec.store.sys.SystemTablePluginConfigProvider; +import com.dremio.options.OptionManager; +import com.dremio.options.OptionValue; +import com.dremio.plugins.dataplane.store.DataplanePlugin; +import com.dremio.plugins.dataplane.store.NessiePluginConfig; +import com.dremio.plugins.s3.store.S3FileSystem; +import com.dremio.plugins.sysflight.SysFlightPluginConfigProvider; +import com.dremio.service.conduit.server.ConduitServiceRegistry; +import com.dremio.service.conduit.server.ConduitServiceRegistryImpl; +import com.dremio.service.coordinator.ClusterCoordinator; +import com.dremio.service.listing.DatasetListingService; +import com.dremio.service.namespace.NamespaceException; +import com.dremio.service.namespace.NamespaceKey; +import com.dremio.service.namespace.NamespaceService; +import com.dremio.service.namespace.source.proto.SourceConfig; +import com.dremio.service.nessie.NessieService; +import com.dremio.service.scheduler.ModifiableSchedulerService; +import com.dremio.service.scheduler.SchedulerService; +import com.dremio.service.users.SystemUser; +import com.dremio.services.fabric.api.FabricService; +import com.dremio.telemetry.utils.TracerFacade; +import com.dremio.test.DremioTest; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import com.google.inject.AbstractModule; + +import io.findify.s3mock.S3Mock; +import software.amazon.awssdk.regions.Region; + +/** + * Set up for ITDataplane Integration tests + */ +@ExtendWith(OlderNessieServersExtension.class) +public class ITDataplanePluginTestSetup extends DataplaneTestHelper { + + private static AmazonS3 s3client; + private static S3Mock s3Mock; + @SuppressWarnings("checkstyle:VisibilityModifier") // read by subclasses + protected static int S3_PORT; + @TempDir + static File temporaryDirectory; + private static Path bucketPath; + + // Nessie + + @NessieDbAdapter + static DatabaseAdapter databaseAdapter; + + @NessieBaseUri + private static URI nessieUri; + + private static NessieApiV2 nessieClient; + private static DataplanePlugin dataplanePlugin; + + private static Catalog catalog; + private static NamespaceService namespaceService; + + @BeforeAll + protected static void setUp() throws Exception { + setUpS3Mock(); + setUpNessie(); + setUpSabotNodeRule(); + setUpDataplanePlugin(); + enableUseSyntax(); + setSystemOption(ENABLE_ICEBERG_TIME_TRAVEL, "true"); + setSystemOption(REFLECTION_ARCTIC_ENABLED, "true"); + } + + @AfterAll + public static void tearDown() throws Exception { + AutoCloseables.close( + dataplanePlugin, + nessieClient); + if (s3Mock != null) { + s3Mock.shutdown(); + s3Mock = null; + } + resetSystemOption(ENABLE_ICEBERG_TIME_TRAVEL.getOptionName()); + } + + protected static void setUpS3Mock() throws IOException { + Preconditions.checkState(s3Mock == null); + + // We use S3Mock's in-memory backend implementation to avoid incompatibility issues between Hadoop's S3's implementation + // and S3Mock's filesystem backend. When doing file deletions, Hadoop executes a "maybeCreateFakeParentDirectory" + // operation that tries to write a 0 byte object to S3. S3Mock's filesystem backend throws an AmazonS3Exception + // with a "Is a directory" message. The in-memory backend does not have the same issue. + // We encountered this problem (in tests only, not AWS S3) when cleaning up Iceberg metadata files after a failed Nessie commit. + s3Mock = new S3Mock.Builder() + .withPort(0) + .withInMemoryBackend() + .build(); + + S3_PORT = s3Mock.start().localAddress().getPort(); + + EndpointConfiguration endpoint = new EndpointConfiguration(String.format("http://localhost:%d", S3_PORT), Region.US_EAST_1.toString()); + + s3client = AmazonS3ClientBuilder + .standard() + .withPathStyleAccessEnabled(true) + .withEndpointConfiguration(endpoint) + .withCredentials(new AWSStaticCredentialsProvider(new AnonymousAWSCredentials())) + .build(); + + s3client.createBucket(BUCKET_NAME); + s3client.createBucket(ALTERNATIVE_BUCKET_NAME); + } + + protected static void setUpNessie() { + nessieClient = HttpClientBuilder.builder().withUri(createNessieURIString()).build(NessieApiV2.class); + } + + private static void setUpSabotNodeRule() throws Exception { + SABOT_NODE_RULE.register(new AbstractModule() { + @Override + protected void configure() { + bind(SysFlightPluginConfigProvider.class).toInstance(new SysFlightPluginConfigProvider()); + + // For System Table set + final ConduitServiceRegistry conduitServiceRegistry = new ConduitServiceRegistryImpl(); + BufferAllocator rootAllocator = RootAllocatorFactory.newRoot(DremioTest.DEFAULT_SABOT_CONFIG); + BufferAllocator testAllocator = rootAllocator.newChildAllocator("test-sysflight-Plugin", 0, Long.MAX_VALUE); + + final boolean isMaster = true; + NessieService nessieService = new NessieService( + getProvider(KVStoreProvider.class), + getProvider(OptionManager.class), + getProvider(SchedulerService.class), + false, + () -> isMaster + ); + nessieService.getGrpcServices().forEach(conduitServiceRegistry::registerService); + + final DatasetCatalogServiceImpl datasetCatalogServiceImpl = new DatasetCatalogServiceImpl( + getProvider(CatalogService.class), + getProvider(NamespaceService.Factory.class) + ); + bind(DatasetCatalogServiceImpl.class).toInstance(datasetCatalogServiceImpl); + conduitServiceRegistry.registerService(datasetCatalogServiceImpl); + + conduitServiceRegistry.registerService(new InformationSchemaServiceImpl(getProvider(CatalogService.class), + () -> new ContextMigratingExecutorService.ContextMigratingCloseableExecutorService<>(new CloseableThreadPool("DataplaneEnterpriseTestSetup-"), TracerFacade.INSTANCE))); + + bind(ConduitServiceRegistry.class).toInstance(conduitServiceRegistry); + // End System Table set + + bind(ModifiableSchedulerService.class).toInstance(mock(ModifiableSchedulerService.class)); + bind(CatalogService.class).toInstance( + new CatalogServiceImpl( + getProvider(SabotContext.class), + getProvider(SchedulerService.class), + getProvider(SystemTablePluginConfigProvider.class), + getProvider(SysFlightPluginConfigProvider.class), + getProvider(FabricService.class), + getProvider(ConnectionReader.class), + getProvider(BufferAllocator.class), + getProvider(LegacyKVStoreProvider.class), + getProvider(DatasetListingService.class), + getProvider(OptionManager.class), + () -> mock(MetadataRefreshInfoBroadcaster.class), + DremioConfig.create(), + EnumSet.allOf(ClusterCoordinator.Role.class), + getProvider(ModifiableSchedulerService.class) + ) + ); + } + }); + setupDefaultTestCluster(); + } + + public static void setUpDataplanePluginWithWrongUrl(){ + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + nessiePluginConfig.nessieEndpoint = "wrong"; + nessiePluginConfig.nessieAuthType = NessieAuthType.NONE; + nessiePluginConfig.secure = false; + nessiePluginConfig.awsAccessKey = "foo"; // Unused, just needs to be set + nessiePluginConfig.awsAccessSecret = "bar"; // Unused, just needs to be set + nessiePluginConfig.awsRootPath = BUCKET_NAME; + + // S3Mock settings + nessiePluginConfig.propertyList = Arrays.asList( + new Property("fs.s3a.endpoint", "localhost:" + S3_PORT), + new Property("fs.s3a.path.style.access", "true"), + new Property("fs.s3a.connection.ssl.enabled", "false"), + new Property(S3FileSystem.COMPATIBILITY_MODE, "true")); + + SourceConfig sourceConfig = new SourceConfig() + .setConnectionConf(nessiePluginConfig) + .setName(DATAPLANE_PLUGIN_NAME+"_wrongURL") + .setMetadataPolicy(CatalogService.NEVER_REFRESH_POLICY_WITH_AUTO_PROMOTE); + + getSabotContext().getOptionManager().setOption(OptionValue.createBoolean(SYSTEM, NESSIE_PLUGIN_ENABLED.getOptionName(), true)); + getSabotContext().getOptionManager().setOption(OptionValue.createBoolean(SYSTEM, VERSIONED_VIEW_ENABLED.getOptionName(), true)); + + CatalogServiceImpl catalogImpl = (CatalogServiceImpl) getSabotContext().getCatalogService(); + catalogImpl.getSystemUserCatalog().createSource(sourceConfig); + } + + private static NessiePluginConfig prepareConnectionConf(String bucket) { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + nessiePluginConfig.nessieEndpoint = createNessieURIString(); + nessiePluginConfig.nessieAuthType = NessieAuthType.NONE; + nessiePluginConfig.secure = false; + nessiePluginConfig.awsAccessKey = "foo"; // Unused, just needs to be set + nessiePluginConfig.awsAccessSecret = "bar"; // Unused, just needs to be set + nessiePluginConfig.awsRootPath = bucket; + + // S3Mock settings + nessiePluginConfig.propertyList = Arrays.asList( + new Property("fs.s3a.endpoint", "localhost:" + S3_PORT), + new Property("fs.s3a.path.style.access", "true"), + new Property("fs.s3a.connection.ssl.enabled", "false"), + new Property(S3FileSystem.COMPATIBILITY_MODE, "true")); + + return nessiePluginConfig; + } + + protected static void setUpDataplanePlugin() { + getSabotContext().getOptionManager().setOption(OptionValue.createBoolean(SYSTEM, NESSIE_PLUGIN_ENABLED.getOptionName(), true)); + getSabotContext().getOptionManager().setOption(OptionValue.createBoolean(SYSTEM, VERSIONED_VIEW_ENABLED.getOptionName(), true)); + + CatalogServiceImpl catalogImpl = (CatalogServiceImpl) getSabotContext().getCatalogService(); + + SourceConfig sourceConfig = new SourceConfig() + .setConnectionConf(prepareConnectionConf(BUCKET_NAME)) + .setName(DATAPLANE_PLUGIN_NAME) + .setMetadataPolicy(CatalogService.NEVER_REFRESH_POLICY_WITH_AUTO_PROMOTE); + catalogImpl.getSystemUserCatalog().createSource(sourceConfig); + dataplanePlugin = catalogImpl.getSystemUserCatalog().getSource(DATAPLANE_PLUGIN_NAME); + catalog = catalogImpl.getSystemUserCatalog(); + + namespaceService = getSabotContext().getNamespaceService(SystemUser.SYSTEM_USERNAME); + } + + public void runWithAlternateSourcePath(String sql) throws Exception { + try (AutoCloseable resetPath = () -> changeSourceRootPath(BUCKET_NAME)) { + changeSourceRootPath(ALTERNATIVE_BUCKET_NAME); + runSQL(sql); + } + } + + private static void changeSourceRootPath(String bucket) throws NamespaceException { + SourceConfig sourceConfig = namespaceService.getSource(new NamespaceKey(DATAPLANE_PLUGIN_NAME)); + sourceConfig.setConnectionConf(prepareConnectionConf(bucket)); + CatalogServiceImpl catalogImpl = (CatalogServiceImpl) getSabotContext().getCatalogService(); + catalogImpl.getSystemUserCatalog().updateSource(sourceConfig); + } + + public NessieApiV1 getNessieClient() { + return nessieClient; + } + + public DataplanePlugin getDataplanePlugin() { + return dataplanePlugin; + } + + public Catalog getContextualizedCatalog(String pluginName, VersionContext versionContext) { + Catalog resetCatalog = getCatalog().resolveCatalogResetContext(DATAPLANE_PLUGIN_NAME, versionContext); + Catalog newCatalog = resetCatalog.resolveCatalog(ImmutableMap.of(DATAPLANE_PLUGIN_NAME, versionContext)); + return newCatalog; + } + + public Catalog getCatalog() { + return catalog; + } + + public static AmazonS3 getS3Client() { + return s3client; + } + public static void reinit() { + databaseAdapter.eraseRepo(); + s3Mock.stop(); + s3Mock.start(); + } + + public void assertAllFilesAreInBaseBucket(List tablePath) throws Exception { + List> fileLocations = runSqlWithResults(selectFileLocationsQuery(tablePath)); + fileLocations.stream().flatMap(List::stream).forEach(loc -> assertTrue(loc.startsWith(S3_PREFIX + BUCKET_NAME))); + } + + public void assertAllFilesInAlternativeBucket(List tablePath) throws Exception { + List> fileLocations = runSqlWithResults(selectFileLocationsQuery(tablePath)); + fileLocations.stream().flatMap(List::stream).forEach(loc -> assertTrue(loc.startsWith(S3_PREFIX + ALTERNATIVE_BUCKET_NAME))); + } + + protected static File createTempLocation() { + String locationName = RandomStringUtils.randomAlphanumeric(8); + File location = new File(getDfsTestTmpSchemaLocation(), locationName); + location.mkdirs(); + return location; + } + + protected static void createFolders(List tablePath, VersionContext versionContext) throws Exception { + //Iterate to get the parent folders where the table should be created (till tableName). Last element is tableName + StringBuilder folderName = new StringBuilder(); + folderName.append(DATAPLANE_PLUGIN_NAME); + for (int i = 0; i < tablePath.size() - 1; i++) { + folderName.append(".").append(tablePath.get(i)); + runSQL(createFolderAtQueryWithIfNotExists(Collections.singletonList(folderName.toString()), versionContext)); + } + } + + /* Helper function used to set up the NessieURI as a string. */ + private static String createNessieURIString() { + return nessieUri.toString()+"v2"; + } +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/ITDatasetVersionContextTestCases.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/ITDatasetVersionContextTestCases.java new file mode 100644 index 0000000000..53bd43caf2 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/ITDatasetVersionContextTestCases.java @@ -0,0 +1,201 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DATAPLANE_PLUGIN_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_BRANCH_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createBranchAtBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createTableAsQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createViewAtSpecifierQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createViewQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTableName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueViewName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.joinTpcdsTablesQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.selectStarQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.selectStarQueryWithSpecifier; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.tablePathWithFolders; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.tablePathWithSource; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useBranchQuery; +import static org.assertj.core.api.Assertions.assertThat; + +import java.util.List; + +import org.junit.jupiter.api.Test; + +import com.dremio.dac.util.JobUtil; +import com.dremio.exec.catalog.TableVersionContext; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.service.job.JobDetails; +import com.dremio.service.job.JobDetailsRequest; +import com.dremio.service.job.RequestType; +import com.dremio.service.job.proto.DataSet; +import com.dremio.service.job.proto.JobId; +import com.dremio.service.job.proto.JobProtobuf; +import com.dremio.service.jobs.JobsProtoUtil; +import com.dremio.service.jobs.JobsService; + +public class ITDatasetVersionContextTestCases extends ITBaseTestVersioned { + @Test + public void testArcticTableVersionContext() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final List tableFullPath = tablePathWithSource(DATAPLANE_PLUGIN_NAME, tablePath); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME), null); + + runQuery(createTableAsQuery(tablePath, 1000)); + + final JobId jobId = runQuery(selectStarQuery(tablePath), null); + final JobDetails jobDetails = + l(JobsService.class) + .getJobDetails( + JobDetailsRequest.newBuilder().setJobId(JobsProtoUtil.toBuf(jobId)).build()); + final JobProtobuf.JobInfo jobInfo = jobDetails.getAttempts(0).getInfo(); + final List queriedDatasets = + JobUtil.getQueriedDatasets( + JobsProtoUtil.toStuff(jobInfo), + RequestType.valueOf(jobInfo.getRequestType().toString())); + + assertThat(queriedDatasets.size()).isEqualTo(1); + assertThat(queriedDatasets.get(0).getDatasetPath()).isEqualTo(String.join(".", tableFullPath)); + assertThat(queriedDatasets.get(0).getVersionContext()) + .isEqualTo( + TableVersionContext.of(VersionContext.ofBranch(DEFAULT_BRANCH_NAME)).serialize()); + } + + @Test + public void testArcticTableOnTestBranchVersionContext() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final List tableFullPath = tablePathWithSource(DATAPLANE_PLUGIN_NAME, tablePath); + final String testBranch = "test"; + final String sessionId = + runQueryAndGetSessionId(createBranchAtBranchQuery(testBranch, DEFAULT_BRANCH_NAME)); + + runQuery(useBranchQuery(testBranch), sessionId); + createFolders(tablePath, VersionContext.ofBranch(testBranch), sessionId); + runQuery(createTableAsQuery(tablePath, 1000), sessionId); + + final JobId jobId = + runQuery(selectStarQueryWithSpecifier(tablePath, "BRANCH " + testBranch), sessionId); + final JobDetails jobDetails = + l(JobsService.class) + .getJobDetails( + JobDetailsRequest.newBuilder().setJobId(JobsProtoUtil.toBuf(jobId)).build()); + final JobProtobuf.JobInfo jobInfo = jobDetails.getAttempts(0).getInfo(); + final List queriedDatasets = + JobUtil.getQueriedDatasets( + JobsProtoUtil.toStuff(jobInfo), + RequestType.valueOf(jobInfo.getRequestType().toString())); + + assertThat(queriedDatasets.size()).isEqualTo(1); + assertThat(queriedDatasets.get(0).getDatasetPath()).isEqualTo(String.join(".", tableFullPath)); + assertThat(queriedDatasets.get(0).getVersionContext()) + .isEqualTo(TableVersionContext.of(VersionContext.ofBranch(testBranch)).serialize()); + } + + @Test + public void testArcticViewVersionContext() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final List tableFullPath = tablePathWithSource(DATAPLANE_PLUGIN_NAME, tablePath); + final String viewName = generateUniqueViewName(); + final List viewPath = tablePathWithFolders(viewName); + final List viewFullPath = tablePathWithSource(DATAPLANE_PLUGIN_NAME, viewPath); + + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME), null); + createFolders(viewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME), null); + + runQuery(createTableAsQuery(tablePath, 1000)); + runQuery(createViewQuery(viewPath, tablePath)); + + final JobId jobId = runQuery(selectStarQuery(viewPath), null); + final JobDetails jobDetails = + l(JobsService.class) + .getJobDetails( + JobDetailsRequest.newBuilder().setJobId(JobsProtoUtil.toBuf(jobId)).build()); + final JobProtobuf.JobInfo jobInfo = jobDetails.getAttempts(0).getInfo(); + final List queriedDatasets = + JobUtil.getQueriedDatasets( + JobsProtoUtil.toStuff(jobInfo), + RequestType.valueOf(jobInfo.getRequestType().toString())); + + assertThat(queriedDatasets.size()).isEqualTo(1); + assertThat(queriedDatasets.get(0).getDatasetPath()).isEqualTo(String.join(".", viewFullPath)); + assertThat(queriedDatasets.get(0).getVersionContext()) + .isEqualTo( + TableVersionContext.of(VersionContext.ofBranch(DEFAULT_BRANCH_NAME)).serialize()); + } + + @Test + public void testArcticViewOnDifferentBranchVersionContext() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final String devBranch = "dev"; + final String sessionId = + runQueryAndGetSessionId(createBranchAtBranchQuery(devBranch, DEFAULT_BRANCH_NAME)); + + runQuery(useBranchQuery(devBranch), sessionId); + createFolders(tablePath, VersionContext.ofBranch(devBranch), sessionId); + runQuery(createTableAsQuery(tablePath, 1000), sessionId); + + final String viewName = generateUniqueViewName(); + final List viewPath = tablePathWithFolders(viewName); + final List viewFullPath = tablePathWithSource(DATAPLANE_PLUGIN_NAME, viewPath); + + runQuery(useBranchQuery(DEFAULT_BRANCH_NAME), sessionId); + createFolders(viewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME), sessionId); + + runQuery(createViewAtSpecifierQuery(viewPath, tablePath, "BRANCH " + devBranch), sessionId); + + final JobId jobId = + runQuery( + selectStarQueryWithSpecifier(viewPath, "BRANCH " + DEFAULT_BRANCH_NAME), sessionId); + final JobDetails jobDetails = + l(JobsService.class) + .getJobDetails( + JobDetailsRequest.newBuilder().setJobId(JobsProtoUtil.toBuf(jobId)).build()); + final JobProtobuf.JobInfo jobInfo = jobDetails.getAttempts(0).getInfo(); + final List queriedDatasets = + JobUtil.getQueriedDatasets( + JobsProtoUtil.toStuff(jobInfo), + RequestType.valueOf(jobInfo.getRequestType().toString())); + + assertThat(queriedDatasets.size()).isEqualTo(1); + assertThat(queriedDatasets.get(0).getDatasetPath()).isEqualTo(String.join(".", viewFullPath)); + assertThat(queriedDatasets.get(0).getVersionContext()) + .isEqualTo( + TableVersionContext.of(VersionContext.ofBranch(DEFAULT_BRANCH_NAME)).serialize()); + } + + @Test + public void testNonArcticTableVersionContext() throws Exception { + final JobId jobId = runQuery(joinTpcdsTablesQuery()); + final JobDetails jobDetails = + l(JobsService.class) + .getJobDetails( + JobDetailsRequest.newBuilder().setJobId(JobsProtoUtil.toBuf(jobId)).build()); + final JobProtobuf.JobInfo jobInfo = jobDetails.getAttempts(0).getInfo(); + final List queriedDatasets = + JobUtil.getQueriedDatasets( + JobsProtoUtil.toStuff(jobInfo), + RequestType.valueOf(jobInfo.getRequestType().toString())); + + assertThat(queriedDatasets.size()).isEqualTo(2); + assertThat(queriedDatasets.get(0).getVersionContext()).isNull(); + assertThat(queriedDatasets.get(1).getVersionContext()).isNull(); + } + +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/ITTestExternalReflections.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/ITTestExternalReflections.java new file mode 100644 index 0000000000..3af860fadf --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/ITTestExternalReflections.java @@ -0,0 +1,82 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DATAPLANE_PLUGIN_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createTableAsQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTableName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.joinedTableKey; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.tablePathWithSource; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import java.util.Collections; +import java.util.List; + +import org.apache.arrow.memory.BufferAllocator; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import com.dremio.common.exceptions.UserRemoteException; +import com.dremio.dac.server.JobsServiceTestUtils; +import com.dremio.service.jobs.JobStatusListener; +import com.dremio.service.jobs.JobsService; + +/** + * External reflection tests on versioned datasets. Currently, external reflections on versioned datasets is not supported + * so just verify that DDL query fails. In the future, when we do support this feature, add tests here. + */ +public class ITTestExternalReflections extends ITBaseTestReflection { + private BufferAllocator allocator; + private static final String tableName = generateUniqueTableName(); + private static final List tablePath = tablePathWithSource(DATAPLANE_PLUGIN_NAME, Collections.singletonList(tableName)); + + @BeforeAll + public static void testSetup() { + JobsServiceTestUtils.submitJobAndWaitUntilCompletion( + l(JobsService.class), createNewJobRequestFromSql(createTableAsQuery(Collections.singletonList(tableName), 1000)), JobStatusListener.NO_OP); + } + + @BeforeEach + public void setUp() { + allocator = getSabotContext().getAllocator().newChildAllocator(getClass().getName(), 0, Long.MAX_VALUE); + } + + @AfterEach + public void cleanUp() throws Exception { + allocator.close(); + } + + /** + * Verifies DDL for external reflection on Arctic Dataset errors out + */ + @Test + public void testExternalReflectionFails() { + assertThatThrownBy(() -> JobsServiceTestUtils.submitJobAndWaitUntilCompletion( + l(JobsService.class), createNewJobRequestFromSql(createExternalReflection(tablePath, tablePath, "ext_ref")), JobStatusListener.NO_OP)) + .isInstanceOf(UserRemoteException.class) + .hasMessageContaining("External reflections are not supported on versioned source dataPlane_Test"); + } + + public static String createExternalReflection(final List query, final List target, final String name) { + return String.format( + "ALTER DATASET %s CREATE EXTERNAL REFLECTION %s USING %s", + joinedTableKey(query), + name, + joinedTableKey(target)); + } +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/ITTestReflectionSuggester.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/ITTestReflectionSuggester.java new file mode 100644 index 0000000000..216e7bef02 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/ITTestReflectionSuggester.java @@ -0,0 +1,166 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import static com.dremio.exec.ExecConstants.LAYOUT_REFRESH_MAX_ATTEMPTS; +import static com.dremio.exec.ExecConstants.PARQUET_MAXIMUM_PARTITIONS_VALIDATOR; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DATAPLANE_PLUGIN_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_BRANCH_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createTableAsQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createTagQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTableName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.tablePathWithSource; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.Collections; +import java.util.List; + +import org.apache.arrow.memory.BufferAllocator; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import com.dremio.dac.server.JobsServiceTestUtils; +import com.dremio.exec.catalog.CatalogEntityKey; +import com.dremio.exec.catalog.CatalogUtil; +import com.dremio.exec.catalog.DremioTable; +import com.dremio.exec.catalog.TableVersionContext; +import com.dremio.exec.catalog.TableVersionType; +import com.dremio.service.jobs.JobStatusListener; +import com.dremio.service.jobs.JobsService; +import com.dremio.service.namespace.NamespaceKey; +import com.dremio.service.namespace.dataset.proto.DatasetConfig; +import com.dremio.service.reflection.analysis.ReflectionAnalyzer; +import com.dremio.service.reflection.analysis.ReflectionSuggester; +import com.dremio.service.reflection.proto.ReflectionGoal; +import com.dremio.service.reflection.proto.ReflectionType; + +public class ITTestReflectionSuggester extends ITBaseTestReflection { + + private BufferAllocator allocator; + private static final String tableName = generateUniqueTableName(); + private static final List tablePath = tablePathWithSource(DATAPLANE_PLUGIN_NAME, Collections.singletonList(tableName)); + + @BeforeAll + public static void testSetup() { + JobsServiceTestUtils.submitJobAndWaitUntilCompletion( + l(JobsService.class), createNewJobRequestFromSql(createTableAsQuery(Collections.singletonList(tableName), 1000)), JobStatusListener.NO_OP); + + JobsServiceTestUtils.submitJobAndWaitUntilCompletion( + l(JobsService.class), createNewJobRequestFromSql(createTagQuery("dev", DEFAULT_BRANCH_NAME)), JobStatusListener.NO_OP); + } + + @BeforeEach + public void setUp() { + allocator = getSabotContext().getAllocator().newChildAllocator(getClass().getName(), 0, Long.MAX_VALUE); + } + + @AfterEach + public void cleanUp() throws Exception { + resetSystemOption(LAYOUT_REFRESH_MAX_ATTEMPTS.getOptionName()); + resetSystemOption(PARQUET_MAXIMUM_PARTITIONS_VALIDATOR.getOptionName()); + allocator.close(); + } + + /** + * Verifies reflection suggestions on a versioned table at branch + */ + @Test + public void testBranchSuggestions() { + // Analyze the table and collect stats + ReflectionAnalyzer analyzer = new ReflectionAnalyzer(getJobsService(), getSabotContext().getCatalogService(), allocator); + DremioTable table = CatalogUtil.getTable(CatalogEntityKey.newBuilder() + .keyComponents(tablePath) + .tableVersionContext(new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME)) + .build(), getCatalog()); + ReflectionAnalyzer.TableStats tableStats = analyzer.analyze(table.getDatasetConfig().getId().getId()); + assertEquals(25, tableStats.getCount().longValue()); + assertEquals(2, tableStats.getColumns().size()); + verifyNationKeyColumnStat(tableStats.getColumns().get(0)); + + // Verify the agg reflection suggestion + ReflectionSuggester suggester = new ReflectionSuggester(table.getDatasetConfig(), tableStats); + List goals = suggester.getReflectionGoals(); + assertEquals(2, goals.size()); + verifyAggGoal(goals.get(1)); + } + + /** + * Verifies reflection suggestions on a versioned table at snapshot + */ + @Test + public void testSnapshotSuggestions() { + // Get the latest snapshot id from the dataset config + DatasetConfig config = CatalogUtil.getDatasetConfig(getCatalog(), new NamespaceKey(tablePath)); + String snapshotId = String.valueOf(config.getPhysicalDataset().getIcebergMetadata().getSnapshotId()); + + // Analyze the table and collect stats + ReflectionAnalyzer analyzer = new ReflectionAnalyzer(getJobsService(), getSabotContext().getCatalogService(), allocator); + DremioTable table = CatalogUtil.getTable(CatalogEntityKey.newBuilder() + .keyComponents(tablePath) + .tableVersionContext(new TableVersionContext(TableVersionType.SNAPSHOT_ID, snapshotId)) + .build(), getCatalog()); + ReflectionAnalyzer.TableStats tableStats = analyzer.analyze(table.getDatasetConfig().getId().getId()); + assertEquals(25, tableStats.getCount().longValue()); + assertEquals(2, tableStats.getColumns().size()); + verifyNationKeyColumnStat(tableStats.getColumns().get(0)); + + // Verify the agg reflection suggestion + ReflectionSuggester suggester = new ReflectionSuggester(table.getDatasetConfig(), tableStats); + List goals = suggester.getReflectionGoals(); + assertEquals(2, goals.size()); + verifyAggGoal(goals.get(1)); + } + + /** + * Verifies reflection suggestions on a versioned table at tag + */ + @Test + public void testTagSuggestions() { + // Analyze the table and collect stats + ReflectionAnalyzer analyzer = new ReflectionAnalyzer(getJobsService(), getSabotContext().getCatalogService(), allocator); + DremioTable table = CatalogUtil.getTable(CatalogEntityKey.newBuilder() + .keyComponents(tablePath) + .tableVersionContext(new TableVersionContext(TableVersionType.TAG, "dev")) + .build(), getCatalog()); + ReflectionAnalyzer.TableStats tableStats = analyzer.analyze(table.getDatasetConfig().getId().getId()); + assertEquals(25, tableStats.getCount().longValue()); + assertEquals(2, tableStats.getColumns().size()); + verifyNationKeyColumnStat(tableStats.getColumns().get(0)); + + // Verify the agg reflection suggestion + ReflectionSuggester suggester = new ReflectionSuggester(table.getDatasetConfig(), tableStats); + List goals = suggester.getReflectionGoals(); + assertEquals(2, goals.size()); + verifyAggGoal(goals.get(1)); + } + + private void verifyNationKeyColumnStat(ReflectionAnalyzer.ColumnStats stat) { + assertEquals("n_nationkey", stat.getField().getName()); + assertEquals(25, stat.getCardinality()); + assertEquals(25, stat.getCount()); + assertEquals(0.0, stat.getMinValue()); + assertEquals(24.0, stat.getMaxValue()); + assertEquals(12.0, stat.getAverageValue()); + } + + private void verifyAggGoal(ReflectionGoal goal) { + assertEquals(ReflectionType.AGGREGATION, goal.getType()); + assertEquals("n_regionkey", goal.getDetails().getDimensionFieldList().get(0).getName()); + assertEquals("n_nationkey", goal.getDetails().getMeasureFieldList().get(0).getName()); + } +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/IdTestCases.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/IdTestCases.java new file mode 100644 index 0000000000..b716dbcc2f --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/IdTestCases.java @@ -0,0 +1,758 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DATAPLANE_PLUGIN_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_BRANCH_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.alterBranchAssignBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.alterTableAddColumnsQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createBranchAtBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createEmptyTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createReplaceViewQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createTableAsQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createTagQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createViewQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.deleteAllQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.dropTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueBranchName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTableName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTagName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueViewName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.insertSelectQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.insertTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.tablePathWithFolders; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useBranchQuery; +import static com.dremio.exec.catalog.dataplane.ITDataplanePluginTestSetup.createFolders; +import static com.dremio.exec.catalog.dataplane.TestDataplaneAssertions.assertNessieHasCommitForTable; +import static com.dremio.exec.catalog.dataplane.TestDataplaneAssertions.assertNessieHasTable; +import static org.assertj.core.api.AssertionsForClassTypes.assertThat; + +import java.util.Collections; +import java.util.List; + +import org.apache.commons.lang3.StringUtils; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.parallel.Isolated; +import org.projectnessie.model.Operation; + +import com.dremio.exec.catalog.DremioTable; +import com.dremio.exec.catalog.TableVersionContext; +import com.dremio.exec.catalog.TableVersionType; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.catalog.VersionedDatasetId; +import com.dremio.service.namespace.proto.EntityId; +@Isolated +public class IdTestCases { + private ITDataplanePluginTestSetup base; + + IdTestCases(ITDataplanePluginTestSetup base) { + this.base = base; + } + + /** + * Tests content id after create- should be unique for each create + */ + @Test + public void checkContentIdAfterReCreate() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + String contentIdBeforeDrop = base.getContentId(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + base.runSQL(dropTableQuery(tablePath)); + + // Act + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + String contentIdAfterReCreate = base.getContentId(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + + // Assert + assertThat((contentIdBeforeDrop.equals(contentIdAfterReCreate))).isFalse(); + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void checkUniqueIdAfterCreate() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + String uniqueIdBeforeDrop = base.getUniqueIdForTable(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + base.runSQL(dropTableQuery(tablePath)); + + // Act + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + String uniqueIdAfterDrop = base.getUniqueIdForTable(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + + // Assert + assertThat(uniqueIdBeforeDrop.equals(uniqueIdAfterDrop)).isFalse(); + base.runSQL(dropTableQuery(tablePath)); + } + + /** + * Tests content id of altered objects - should remain the same + */ + @Test + public void checkContentIdAfterAlters() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + String contentIdBeforeAlter = base.getContentId(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + final List addedColDef = Collections.singletonList("col2 int"); + //Add single column + base.runSQL(alterTableAddColumnsQuery(tablePath, addedColDef)); + String contentIdAfterAlter = base.getContentId(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + + // Assert + assertThat(contentIdBeforeAlter.equals(contentIdAfterAlter)).isTrue(); + base.runSQL(dropTableQuery(tablePath)); + } + + /** + * Tests unique id of altered objects - should change + */ + @Test + public void checkUniqueIdAfterAlters() throws Exception { +// Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + String uniqueIdBeforeAlter = base.getUniqueIdForTable(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + final List addedColDef = Collections.singletonList("col2 int"); + //Add single column + base.runSQL(alterTableAddColumnsQuery(tablePath, addedColDef)); + String uniqueIdAfterAlter = base.getUniqueIdForTable(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + + // Assert + assertThat(uniqueIdBeforeAlter.equals(uniqueIdAfterAlter)).isFalse(); + base.runSQL(dropTableQuery(tablePath)); + } + + /** + * Tests content id after insert- should stay the same + */ + @Test + public void checkContentIdAfterInsert() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + assertNessieHasTable(tablePath, DEFAULT_BRANCH_NAME, base); + assertNessieHasCommitForTable(tablePath, Operation.Put.class, DEFAULT_BRANCH_NAME, base); + String contentIdBeforeInsert = base.getContentId(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + // Act + base.runSQL(insertTableQuery(tablePath)); + String contentIdAfterInsert = base.getContentId(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + // Assert + assertThat(contentIdBeforeInsert.equals(contentIdAfterInsert)).isTrue(); + + // cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + /** + * Tests unique id of objects after insert - should change; + */ + @Test + public void checkUniqueIdAfterInsert() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + assertNessieHasTable(tablePath, DEFAULT_BRANCH_NAME, base); + assertNessieHasCommitForTable(tablePath, Operation.Put.class, DEFAULT_BRANCH_NAME, base); + String uniqueIdBeforeInsert = base.getUniqueIdForTable(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + // Act + base.runSQL(insertTableQuery(tablePath)); + String uniqueIdAfterInsert = base.getUniqueIdForTable(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + // Assert + assertThat(uniqueIdBeforeInsert.equals(uniqueIdAfterInsert)).isFalse(); + + // cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + /** + * Tests content id after insert- should stay the same + */ + @Test + public void checkContentIdAfterDeletes() throws Exception { + //Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + assertNessieHasTable(tablePath, DEFAULT_BRANCH_NAME, base); + assertNessieHasCommitForTable(tablePath, Operation.Put.class, DEFAULT_BRANCH_NAME, base); + String contentIdAfterCreate = base.getContentId(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + base.runSQL(insertTableQuery(tablePath)); + String contentIdAfterInsert = base.getContentId(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + // Act + base.runSQL(deleteAllQuery(tablePath)); + String contentIdAfterDelete = base.getContentId(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + // Assert + assertThat(contentIdAfterCreate.equals(contentIdAfterInsert)).isTrue(); + assertThat(contentIdAfterDelete.equals(contentIdAfterInsert)).isTrue(); + // cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + /** + * Tests unique id of objects after Deletes -should change + */ + @Test + public void checkUniqueIdAfterDeletes() throws Exception { + //Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + assertNessieHasTable(tablePath, DEFAULT_BRANCH_NAME, base); + assertNessieHasCommitForTable(tablePath, Operation.Put.class, DEFAULT_BRANCH_NAME, base); + String uniqueIdAfterCreate = base.getUniqueIdForTable(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + base.runSQL(insertTableQuery(tablePath)); + String uniqueIdAfterInsert = base.getUniqueIdForTable(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + // Act + base.runSQL(deleteAllQuery(tablePath)); + String uniqueIdAfterDelete = base.getUniqueIdForTable(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + // Assert + assertThat(uniqueIdAfterCreate.equals(uniqueIdAfterInsert)).isFalse(); + assertThat(uniqueIdAfterDelete.equals(uniqueIdAfterInsert)).isFalse(); + + // cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + + /** + * Tests content id after branching- should stay the same + */ + @Test + public void checkContentIdInNewBranch() throws Exception { + // Arrange + final String newBranchName = generateUniqueBranchName(); + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + //Create table in main branch + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.runSQL(insertTableQuery(tablePath)); + base.runSQL(createBranchAtBranchQuery(newBranchName, DEFAULT_BRANCH_NAME)); + String contentIdInMainBranch = base.getContentId(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + + //Act + base.runSQL(useBranchQuery(newBranchName)); + String contentIdInNewBranch = base.getContentId(tablePath, new TableVersionContext(TableVersionType.BRANCH, newBranchName), base); + + //Assert + assertThat(contentIdInNewBranch.equals(contentIdInMainBranch)).isTrue(); + } + + /** + * Tests unique id of objects after branching - should stay the same + */ + @Test + public void checkUniqueIdAfterDMLInBranch() throws Exception { + // Arrange + final String newBranchName = generateUniqueBranchName(); + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + //Create table in main branch + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.runSQL(createBranchAtBranchQuery(newBranchName, DEFAULT_BRANCH_NAME)); + //insert in main branch + base.runSQL(insertTableQuery(tablePath)); + String uniqueIdBeforeBranching = base.getUniqueIdForTable(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + + //Act + base.runSQL(useBranchQuery(newBranchName)); + String uniqueIdAfterBranching = base.getUniqueIdForTable(tablePath, new TableVersionContext(TableVersionType.BRANCH, newBranchName), base); + + //Assert + assertThat(uniqueIdAfterBranching.equals(uniqueIdBeforeBranching)).isFalse(); + } + + /** + * Tests content id after assign- should stay the same + */ + @Test + public void checkContentIdAfterAssignBranch() throws Exception { + // Arrange + final String branchName = generateUniqueBranchName(); + final String mainTableName = generateUniqueTableName(); + final List mainTablePath = tablePathWithFolders(mainTableName); + + base.runSQL(createBranchAtBranchQuery(branchName, DEFAULT_BRANCH_NAME)); + createFolders(mainTablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(mainTablePath)); + base.runSQL(insertTableQuery(mainTablePath)); + String contentIdBeforeAssign = base.getContentId(mainTablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + // Act + base.runSQL(alterBranchAssignBranchQuery(branchName, DEFAULT_BRANCH_NAME)); + base.runSQL(useBranchQuery(branchName)); + String contentIdAfterAssign = base.getContentId(mainTablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + // ASSERT + assertThat(contentIdBeforeAssign.equals(contentIdBeforeAssign)).isTrue(); + // Drop tables + base.runSQL(dropTableQuery(mainTablePath)); + } + + /** + * Tests unique id of objects after assign- should stay the same + */ + @Test + public void checkUniqueIdAfterAssignBranch() throws Exception { + // Arrange + final String branchName = generateUniqueBranchName(); + final String mainTableName = generateUniqueTableName(); + final List mainTablePath = tablePathWithFolders(mainTableName); + + base.runSQL(createBranchAtBranchQuery(branchName, DEFAULT_BRANCH_NAME)); + createFolders(mainTablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(mainTablePath)); + base.runSQL(insertTableQuery(mainTablePath)); + String uniqueIdBeforeAssign = base.getUniqueIdForTable(mainTablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + // Act + base.runSQL(alterBranchAssignBranchQuery(branchName, DEFAULT_BRANCH_NAME)); + base.runSQL(useBranchQuery(branchName)); + String uniqueIdAfterAssign = base.getUniqueIdForTable(mainTablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + // ASSERT + assertThat(uniqueIdBeforeAssign.equals(uniqueIdAfterAssign)).isTrue(); + // Drop tables + base.runSQL(dropTableQuery(mainTablePath)); + } + + @Test + public void checkContentIdAfterAlterView() throws Exception { + //Arrange + String tableName1 = generateUniqueTableName(); + final List tablePath1 = tablePathWithFolders(tableName1); + String tableName2 = generateUniqueTableName(); + final List tablePath2 = tablePathWithFolders(tableName2); + + // Create table1 with 10 rows + createFolders(tablePath1, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath1, 10)); + final String viewName = generateUniqueViewName(); + List viewKey = tablePathWithFolders(viewName); + createFolders(viewKey, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewQuery(viewKey, tablePath1)); + String contentIdAfterCreate = base.getContentId(viewKey, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + + base.assertViewHasExpectedNumRows(viewKey, 10); + //Create table2 with 20 rows. + createFolders(tablePath2, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath2, 20)); + + //Act + base.runSQL(createReplaceViewQuery(viewKey, tablePath2)); + String contentIdAfterUpdate = base.getContentId(viewKey, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + assertThat(contentIdAfterCreate.equals(contentIdAfterUpdate)).isTrue(); + base.runSQL(dropTableQuery(tablePath1)); + base.runSQL(dropTableQuery(tablePath2)); + } + + @Test + public void checkUniqueIdAfterAlterView() throws Exception { + //Arrange + String tableName1 = generateUniqueTableName(); + final List tablePath1 = tablePathWithFolders(tableName1); + String tableName2 = generateUniqueTableName(); + final List tablePath2 = tablePathWithFolders(tableName2); + + // Create table1 with 10 rows + createFolders(tablePath1, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath1, 10)); + final String viewName = generateUniqueViewName(); + List viewKey = tablePathWithFolders(viewName); + createFolders(viewKey, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewQuery(viewKey, tablePath1)); + String uniqueIdAfterCreate = base.getUniqueIdForView(viewKey, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + + base.assertViewHasExpectedNumRows(viewKey, 10); + //Create table2 with 20 rows. + createFolders(tablePath2, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath2, 20)); + + //Act + base.runSQL(createReplaceViewQuery(viewKey, tablePath2)); + String uniqueIdAfterUpdate = base.getUniqueIdForView(viewKey, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + assertThat(uniqueIdAfterCreate.equals(uniqueIdAfterUpdate)).isFalse(); + base.runSQL(dropTableQuery(tablePath1)); + base.runSQL(dropTableQuery(tablePath2)); + } + + /** + * Tests dataset id after branching- should change + */ + @Test + public void checkDatasetIdInNewBranch() throws Exception { + // Arrange + final String newBranchName = generateUniqueBranchName(); + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + //Create table in main branch + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.runSQL(insertTableQuery(tablePath)); + base.runSQL(createBranchAtBranchQuery(newBranchName, DEFAULT_BRANCH_NAME)); + String datasetIdInMainBranch = base.getVersionedDatatsetId(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + + //Act + base.runSQL(useBranchQuery(newBranchName)); + String datasetIdInNewBranch = base.getVersionedDatatsetId(tablePath, new TableVersionContext(TableVersionType.BRANCH, newBranchName), base); + + //Assert + assertThat(datasetIdInMainBranch.equals(datasetIdInNewBranch)).isFalse(); + + } + + /** + * Tests dataset id of objects after branching - should change + */ + @Test + public void checkDatasetIdAfterDMLInBranch() throws Exception { + // Arrange + final String newBranchName = generateUniqueBranchName(); + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + //Create table in main branch + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.runSQL(createBranchAtBranchQuery(newBranchName, DEFAULT_BRANCH_NAME)); + //insert in main branch + base.runSQL(insertTableQuery(tablePath)); + String datasetIdBeforeBranching = base.getVersionedDatatsetId(tablePath,new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + + //Act + base.runSQL(useBranchQuery(newBranchName)); + String datasetIdIdAfterBranching = base.getVersionedDatatsetId(tablePath,new TableVersionContext(TableVersionType.BRANCH, newBranchName), base); + + //Assert + assertThat(datasetIdBeforeBranching.equals(datasetIdIdAfterBranching)).isFalse(); + } + + @Test + public void getTableWithId() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + String datasetIdAfterCreate = base.getVersionedDatatsetId(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + DremioTable dremioTableAfterCreate = base.getTableFromId(datasetIdAfterCreate, base); + String schema = dremioTableAfterCreate.getSchema().toJSONString(); + assertThat(schema.contains("id")).isTrue(); + assertThat(schema.contains("name")).isTrue(); + assertThat(schema.contains("distance")).isTrue(); + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void getTableWithIdSnapshot() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.runSQL(insertTableQuery(tablePath)); + String getSnapshotString = String.format("SELECT snapshot_id FROM table(table_snapshot('%s.%s'))", + DATAPLANE_PLUGIN_NAME, + String.join(".", tablePath)); + List results = base.runSqlWithResults(getSnapshotString).get(0); + + + String datasetIdFromSnapshot = base.getVersionedDatatsetIdForTimeTravel(tablePath, new TableVersionContext(TableVersionType.SNAPSHOT_ID, results.get(0)), base); + DremioTable dremioTable = base.getTableFromId(datasetIdFromSnapshot, base); + String schema = dremioTable.getSchema().toJSONString(); + assertThat(schema.contains("id")).isTrue(); + assertThat(schema.contains("name")).isTrue(); + assertThat(schema.contains("distance")).isTrue(); + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void getTableWithIdTimestamp() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.runSQL(insertTableQuery(tablePath)); + long ts1 = System.currentTimeMillis(); + String datasetIdWithTS = base.getVersionedDatatsetIdForTimeTravel(tablePath, new TableVersionContext(TableVersionType.TIMESTAMP, ts1), base); + DremioTable dremioTableWithTS = base.getTableFromId(datasetIdWithTS, base); + String schema = dremioTableWithTS.getSchema().toJSONString(); + assertThat(schema.contains("id")).isTrue(); + assertThat(schema.contains("name")).isTrue(); + assertThat(schema.contains("distance")).isTrue(); + base.runSQL(dropTableQuery(tablePath)); + } + + + @Test + public void getTableWithIdAfterAlter() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + String datasetIdAfterCreate = base.getVersionedDatatsetId(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + final List addedColDef = Collections.singletonList("col2 int"); + //Add single column + base.runSQL(alterTableAddColumnsQuery(tablePath, addedColDef)); + String datasetIdAfterAlter = base.getVersionedDatatsetId(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + DremioTable dremioTableAfterAlter = base.getTableFromId(datasetIdAfterAlter, base); + + // Assert + assertThat(datasetIdAfterCreate.equals(datasetIdAfterAlter)).isTrue(); + String schemaAfterAlter = dremioTableAfterAlter.getSchema().toJSONString(); + assertThat(schemaAfterAlter.contains("col2")).isTrue(); + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void getTableWithIdAfterReCreate() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + String datasetIdAfterFirstCreate = base.getVersionedDatatsetId(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + + base.runSQL(dropTableQuery(tablePath)); + base.runSQL(createTableAsQuery(tablePath, 5)); + String datasetIdAfterRecreate = base.getVersionedDatatsetId(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + + //Act + //Lookup with the new id. + DremioTable dremioTableAfterRecreate = base.getTableFromId(datasetIdAfterRecreate, base); + DremioTable dremioTableAfterFirstCreate = base.getTableFromId(datasetIdAfterFirstCreate, base); + + // Assert + assertThat(datasetIdAfterFirstCreate.equals(datasetIdAfterRecreate)).isFalse(); + assertThat(dremioTableAfterRecreate.getSchema().toJSONString().contains("n_nationkey")).isTrue(); + assertThat(dremioTableAfterFirstCreate).isNull(); + + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void getTableWithInvalidId() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + String datasetIdAfterCreate = base.getVersionedDatatsetId(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + String invalidDatasetId = StringUtils.replace(datasetIdAfterCreate, "contentId" , "invalidContentIdToken"); + DremioTable dremioTableFromWrongId = base.getTableFromId(invalidDatasetId, base); + + assertThat(dremioTableFromWrongId).isNull(); + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void getTableWithIdFromNewBranch() throws Exception { +// Arrange + final String newBranchName = generateUniqueBranchName(); + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + //Create table in main branch + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + //insert in main branch + base.runSQL(insertTableQuery(tablePath)); + base.runSQL(createBranchAtBranchQuery(newBranchName, DEFAULT_BRANCH_NAME)); + + String datasetIdFromMain = base.getVersionedDatatsetId(tablePath,new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + + //Act + base.runSQL(useBranchQuery(newBranchName)); + String datasetIdIdFromBranch = base.getVersionedDatatsetId(tablePath,new TableVersionContext(TableVersionType.BRANCH, newBranchName), base); + + DremioTable tableFromMain = base.getTableFromId(datasetIdFromMain, base); + DremioTable tableFromBranch = base.getTableFromId(datasetIdIdFromBranch, base); + VersionedDatasetId versionedDatasetIdFromMain = VersionedDatasetId.fromString(tableFromMain.getDatasetConfig().getId().getId()); + VersionedDatasetId versionedDatasetIdFromBranch = VersionedDatasetId.fromString(tableFromBranch.getDatasetConfig().getId().getId()); + String contentIdFromMain = versionedDatasetIdFromMain.getContentId(); + String contentIdFromBranch = versionedDatasetIdFromBranch.getContentId(); + + String uniqueIdFromMain = tableFromMain.getDatasetConfig().getPhysicalDataset().getIcebergMetadata().getTableUuid(); + String uniqueIdFromBranch = tableFromBranch.getDatasetConfig().getPhysicalDataset().getIcebergMetadata().getTableUuid(); + + // Assert + // The uniqueId, contentId should be the same for the table in both branches. + // The datasetId would be different (version context part) + assertThat (contentIdFromBranch.equals(contentIdFromMain)).isTrue(); + assertThat(uniqueIdFromBranch.equals(uniqueIdFromMain)).isTrue(); + assertThat(datasetIdFromMain.equals(datasetIdIdFromBranch)).isFalse(); + } + + @Test + public void getTableWithIdFromTag() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final String firstTag = generateUniqueTagName(); + + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.runSQL(createTagQuery(firstTag, DEFAULT_BRANCH_NAME)); + + // Act + base.runSQL(insertTableQuery(tablePath)); + + String versionAtTag = base.getVersionedDatatsetId(tablePath, new TableVersionContext(TableVersionType.TAG, firstTag), base); + String versionAtBranchTip = base.getVersionedDatatsetId(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + DremioTable tableAtTag = base.getTableFromId(versionAtTag, base); + DremioTable tableAtBranchTip = base.getTableFromId(versionAtBranchTip, base); + + String uniqueIdAtTag = tableAtTag.getDatasetConfig().getPhysicalDataset().getIcebergMetadata().getTableUuid(); + String uniqueIdAtBranchTip = tableAtBranchTip.getDatasetConfig().getPhysicalDataset().getIcebergMetadata().getTableUuid(); + + EntityId versionedDatasetIdAtTag = tableAtTag.getDatasetConfig().getId(); + EntityId versionedDatasetIdAtBranchTip = tableAtBranchTip.getDatasetConfig().getId(); + // Assert + + assertThat(uniqueIdAtTag.equals(uniqueIdAtBranchTip)).isFalse(); + assertThat(versionedDatasetIdAtTag.equals(versionedDatasetIdAtBranchTip)).isFalse(); + + // cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void getTableWithIdFromInvalidTag() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final String invalidTag = generateUniqueTagName(); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + // Act + base.runSQL(insertTableQuery(tablePath)); + String versionAtTag = base.getVersionedDatatsetId(tablePath, new TableVersionContext(TableVersionType.TAG, invalidTag), base); + + // Assert + assertThat(versionAtTag).isNull(); + + // cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + + @Test + public void getTableWithIdFromCommit() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + String commitHashBeforeInsert = base.getCommitHashForBranch(DEFAULT_BRANCH_NAME); + + // Act + base.runSQL(insertTableQuery(tablePath)); + + String versionAtCommitHash = base.getVersionedDatatsetId(tablePath, new TableVersionContext(TableVersionType.COMMIT_HASH_ONLY, commitHashBeforeInsert), base); + String versionAtBranchTip = base.getVersionedDatatsetId(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + DremioTable tableAtCommitHash = base.getTableFromId(versionAtCommitHash, base); + DremioTable tableAtBranchTip = base.getTableFromId(versionAtBranchTip, base); + + String uniqueIdAtTag = tableAtCommitHash.getDatasetConfig().getPhysicalDataset().getIcebergMetadata().getTableUuid(); + String uniqueIdAtBranchTip = tableAtBranchTip.getDatasetConfig().getPhysicalDataset().getIcebergMetadata().getTableUuid(); + + EntityId versionedDatasetIdAtTag = tableAtCommitHash.getDatasetConfig().getId(); + EntityId versionedDatasetIdAtBranchTip = tableAtBranchTip.getDatasetConfig().getId(); + // Assert + + assertThat(uniqueIdAtTag.equals(uniqueIdAtBranchTip)).isFalse(); + assertThat(versionedDatasetIdAtTag.equals(versionedDatasetIdAtBranchTip)).isFalse(); + + // cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + /** + * Tests content id after insert- should stay the same + */ + @Test + public void checkContentIdWithAt() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final String tag1 = generateUniqueTagName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath, 5)); + assertNessieHasTable(tablePath, DEFAULT_BRANCH_NAME, base); + assertNessieHasCommitForTable(tablePath, Operation.Put.class, DEFAULT_BRANCH_NAME, base); + + //Act + base.runSQL(insertSelectQuery(tablePath, 5)); + base.runSQL(createTagQuery(tag1, DEFAULT_BRANCH_NAME)); + base.runSQL(insertSelectQuery(tablePath, 5)); + + String contentIdWithSelectAtTag = base.getContentIdForTableAtRef(tablePath, new TableVersionContext(TableVersionType.TAG, tag1), base); + String contentIdWithSelectAtMain = base.getContentId(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + // Assert + assertThat(contentIdWithSelectAtMain.equals(contentIdWithSelectAtTag)).isTrue(); + + // cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + /** + * Tests content id after insert- should stay the same + */ + @Test + public void checkUniqueIdWithAt() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final String tag1 = generateUniqueTagName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath, 5)); + assertNessieHasTable(tablePath, DEFAULT_BRANCH_NAME, base); + assertNessieHasCommitForTable(tablePath, Operation.Put.class, DEFAULT_BRANCH_NAME, base); + + //Act + base.runSQL(insertSelectQuery(tablePath, 5)); + base.runSQL(createTagQuery(tag1, DEFAULT_BRANCH_NAME)); + base.runSQL(insertSelectQuery(tablePath, 5)); + + String uniqueIdWithSelectAtTag = base.getUniqueIdForTableAtRef(tablePath, new TableVersionContext(TableVersionType.TAG, tag1), base); + String uniqueIdWithSelectAtMain = base.getUniqueIdForTable(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + // Assert + assertThat(uniqueIdWithSelectAtMain.equals(uniqueIdWithSelectAtTag)).isFalse(); + + // cleanup + base.runSQL(dropTableQuery(tablePath)); + } + +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/InfoSchemaCombinationTestCases.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/InfoSchemaCombinationTestCases.java new file mode 100644 index 0000000000..54ee7fa1e5 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/InfoSchemaCombinationTestCases.java @@ -0,0 +1,416 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import static com.dremio.BaseTestQuery.runSQL; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DATAPLANE_PLUGIN_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createEmptyTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createViewQuery; +import static org.assertj.core.api.Assertions.assertThat; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.projectnessie.model.Namespace; + +import com.google.common.collect.Streams; + +/** + * To run these tests, run through container class ITDataplanePlugin + * To run all tests run {@link ITDataplanePlugin.NestedInfoSchemaCombinationTests} + * To run single test, see instructions at the top of {@link ITDataplanePlugin} + * + * For this combined test, we construct a series of tables and views. It is + * important to test these together to make sure that things don't leak + * outside where they are expected. Isolated tests cannot capture this + * easily. We have combinations of tables and views at various levels: at + * the root level of the source, inside a folder at the root level, and also + * in sub-folders. Combinations of explicit and implicit folders are also + * used. + * + * Structure of source, folders, tables, and views: + * + * Note: Table and view names are longer (e.g. tableAFirst vs. just tableA) than + * you might initially expect. This is so that we can do tests with "names + * contains" tests like "... WHERE table_name LIKE '%ablA%'". Table and view + * names are also intentionally camelCased for case-sensitivity testing. + * + * TODO: DX-58674 other folder/table/view names incl special characters \ * ? . / and more + * TODO: DX-58674 also add tests that explicitly account for case-sensitivity + * + * dataplane_test + * - tableAFirst + * - tableBSecond + * - viewCThird + * - viewDFourth + * - explicitFolder1 + * - tableA + * - tableBSecond + * - viewCThird + * - viewDFourth + * - explicitFolderInExplicitParent3 + * - tableAFirst + * - tableBSecond + * - viewCThird + * - viewDFourth + * - emptyExplicitFolder7 + */ +@SuppressWarnings("NewClassNamingConvention") // Parameterized test as part of ITDataplanePlugin +public class InfoSchemaCombinationTestCases { + + private static final ContainerEntity sourceRoot = new ContainerEntity( + DATAPLANE_PLUGIN_NAME, + ContainerEntity.Type.SOURCE, + ContainerEntity.Contains.FOLDERS_AND_VIEWS, + Collections.emptyList()); + private static final ContainerEntity explicitFolder1 = new ContainerEntity( + "explicitFolder1", + ContainerEntity.Type.EXPLICIT_FOLDER, + ContainerEntity.Contains.FOLDERS_AND_VIEWS, + sourceRoot.getFullPath()); + private static final ContainerEntity explicitFolderInExplicitParent3 = new ContainerEntity( + "explicitFolderInExplicitParent3", + ContainerEntity.Type.EXPLICIT_FOLDER, + ContainerEntity.Contains.FOLDERS_AND_VIEWS, + explicitFolder1.getFullPath()); + private static final ContainerEntity emptyExplicitFolder7 = new ContainerEntity( + "emptyExplicitFolder7", + ContainerEntity.Type.EXPLICIT_FOLDER, + ContainerEntity.Contains.EMPTY, + sourceRoot.getFullPath()); + + private final ITDataplanePluginTestSetup base; + + InfoSchemaCombinationTestCases(ITDataplanePluginTestSetup base) { + this.base = base; + } + + @BeforeEach + public void createFoldersTablesViews() throws Exception { + createEntitiesForContainer(sourceRoot); + createEntitiesForContainer(explicitFolder1); + createEntitiesForContainer(explicitFolderInExplicitParent3); + createEntitiesForContainer(emptyExplicitFolder7); + } + + private void createEntitiesForContainer(ContainerEntity container) throws Exception { + switch(container.getType()) { + case SOURCE: + // Intentional fallthrough + case IMPLICIT_FOLDER: + break; + case EXPLICIT_FOLDER: + base.getNessieClient().createNamespace() + .namespace(Namespace.of(container.getPathWithoutRoot())) + .refName("main") + .create(); + break; + default: + throw new IllegalStateException("Unexpected value: " + container.getType()); + } + + switch(container.getContains()) { + case FOLDERS_AND_VIEWS: + createTablesAndViewsInContainer(container); + break; + case EMPTY: + break; + default: + throw new IllegalStateException("Unexpected value: " + container.getContains()); + } + } + + private static void createTablesAndViewsInContainer(ContainerEntity container) throws Exception { + runSQL(createEmptyTableQuery(container.getChildPathWithoutRoot("tableAFirst"))); + runSQL(createEmptyTableQuery(container.getChildPathWithoutRoot("tableBSecond"))); + runSQL(createViewQuery( + container.getChildPathWithoutRoot("viewCThird"), + container.getChildPathWithoutRoot("tableAFirst"))); + runSQL(createViewQuery( + container.getChildPathWithoutRoot("viewDFourth"), + container.getChildPathWithoutRoot("tableBSecond"))); + } + + @Test + public void tablesAll() throws Exception { + assertThat(base.runSqlWithResults(String.format( + "select * from INFORMATION_SCHEMA.\"TABLES\" WHERE table_schema LIKE '%s%%'", + sourceRoot.asSqlIdentifier()))) + .containsExactlyInAnyOrderElementsOf( + Streams.concat( + sourceRoot.getExpectedTablesIncludingViews().stream(), + explicitFolder1.getExpectedTablesIncludingViews().stream(), + explicitFolderInExplicitParent3.getExpectedTablesIncludingViews().stream() + // No tables expected for emptyExplicitFolder7 + ).collect(Collectors.toList())); + } + + private static Stream tablesWhereSchemaLikeTopLevelFolderArguments() { + return Stream.of( + Arguments.of( + explicitFolder1, + Streams.concat( + explicitFolder1.getExpectedTablesIncludingViews().stream(), + explicitFolderInExplicitParent3.getExpectedTablesIncludingViews().stream()) + .collect(Collectors.toList())), + Arguments.of( + emptyExplicitFolder7, + Collections.emptyList())); + } + + @ParameterizedTest(name = "{index} {0}") + @MethodSource("tablesWhereSchemaLikeTopLevelFolderArguments") + public void tablesWhereSchemaLikeTopLevelFolder(ContainerEntity container, List> expectedTables) throws Exception { + assertThat(base.runSqlWithResults(String.format( + "select * from INFORMATION_SCHEMA.\"TABLES\" WHERE table_schema LIKE '%s%%'", + container.asSqlIdentifier()))) + .containsExactlyInAnyOrderElementsOf(expectedTables); + } + + private static Stream tablesWhereSchemaLikeSubfolderArguments() { + return Stream.of( + Arguments.of(explicitFolderInExplicitParent3)); + } + + @ParameterizedTest(name = "{index} {0}") + @MethodSource("tablesWhereSchemaLikeSubfolderArguments") + public void tablesWhereSchemaLikeSubfolder(ContainerEntity container) throws Exception { + assertThat(base.runSqlWithResults(String.format( + "select * from INFORMATION_SCHEMA.\"TABLES\" WHERE table_schema LIKE '%%%s%%'", + container.getName()))) + .containsExactlyInAnyOrderElementsOf(container.getExpectedTablesIncludingViews()); + } + + @Test + public void tablesWhereSchemaContains() throws Exception { + assertThat(base.runSqlWithResults( + "select * from INFORMATION_SCHEMA.\"TABLES\" WHERE table_schema LIKE '%FolderIn%'")) + .containsExactlyInAnyOrderElementsOf( + Streams.concat( + explicitFolderInExplicitParent3.getExpectedTablesIncludingViews().stream()) + .collect(Collectors.toList())); + } + + @Test + public void tablesWhereNameStartsWith() throws Exception { + assertThat(base.runSqlWithResults(String.format( + "select * from INFORMATION_SCHEMA.\"TABLES\" WHERE table_schema LIKE '%s%%' AND table_name LIKE 'tab%%'", + sourceRoot.asSqlIdentifier()))) + .containsExactlyInAnyOrderElementsOf( + Streams.concat( + sourceRoot.getExpectedTablesWithoutViews().stream(), + explicitFolder1.getExpectedTablesWithoutViews().stream(), + explicitFolderInExplicitParent3.getExpectedTablesWithoutViews().stream() + ).collect(Collectors.toList())); + } + + @Test + public void tablesWhereNameStartsWithTableA() throws Exception { + assertThat(base.runSqlWithResults(String.format( + "select * from INFORMATION_SCHEMA.\"TABLES\" WHERE table_schema LIKE '%s%%' AND table_name LIKE 'tableA%%'", + sourceRoot.asSqlIdentifier()))) + .containsExactlyInAnyOrderElementsOf( + Streams.concat( + sourceRoot.getTableAOnly().stream(), + explicitFolder1.getTableAOnly().stream(), + explicitFolderInExplicitParent3.getTableAOnly().stream() + ).collect(Collectors.toList())); + } + + @Test + public void tablesWhereNameContains() throws Exception { + assertThat(base.runSqlWithResults( + "select * from INFORMATION_SCHEMA.\"TABLES\" WHERE table_name LIKE '%bleB%'")) + .containsExactlyInAnyOrderElementsOf( + Streams.concat( + sourceRoot.getTableBOnly().stream(), + explicitFolder1.getTableBOnly().stream(), + explicitFolderInExplicitParent3.getTableBOnly().stream() + ).collect(Collectors.toList())); + } + + @Test + public void viewsAll() throws Exception { + assertThat(base.runSqlWithResults(String.format( + "select * from INFORMATION_SCHEMA.VIEWS WHERE table_schema LIKE '%s%%'", + sourceRoot.asSqlIdentifier()))) + .containsExactlyInAnyOrderElementsOf( + Streams.concat( + sourceRoot.getExpectedViews().stream(), + explicitFolder1.getExpectedViews().stream(), + explicitFolderInExplicitParent3.getExpectedViews().stream() + // No views expected for emptyExplicitFolder7 + ).collect(Collectors.toList())); + } + + private static Stream viewsWhereSchemaLikeTopLevelFolderArguments() { + return Stream.of( + Arguments.of( + explicitFolder1, + Streams.concat( + explicitFolder1.getExpectedViews().stream(), + explicitFolderInExplicitParent3.getExpectedViews().stream()) + .collect(Collectors.toList())), + Arguments.of( + emptyExplicitFolder7, + Collections.emptyList())); + } + + @ParameterizedTest(name = "{index} {0}") + @MethodSource("viewsWhereSchemaLikeTopLevelFolderArguments") + public void viewsWhereSchemaLikeTopLevelFolder(ContainerEntity container, List> expectedViews) throws Exception { + assertThat(base.runSqlWithResults(String.format( + "select * from INFORMATION_SCHEMA.VIEWS WHERE table_schema LIKE '%s%%'", + container.asSqlIdentifier()))) + .containsExactlyInAnyOrderElementsOf(expectedViews); + } + + private static Stream viewsWhereSchemaLikeSubfolderArguments() { + return Stream.of( + Arguments.of(explicitFolderInExplicitParent3)); + } + + @ParameterizedTest(name = "{index} {0}") + @MethodSource("viewsWhereSchemaLikeSubfolderArguments") + public void viewsWhereSchemaLikeSubfolder(ContainerEntity container) throws Exception { + assertThat(base.runSqlWithResults(String.format( + "select * from INFORMATION_SCHEMA.VIEWS WHERE table_schema LIKE '%%%s%%'", + container.asSqlIdentifier()))) + .containsExactlyInAnyOrderElementsOf(container.getExpectedViews()); + } + + @Test + public void viewsWhereSchemaContains() throws Exception { + assertThat(base.runSqlWithResults( + "select * from INFORMATION_SCHEMA.VIEWS WHERE table_schema LIKE '%FolderIn%'")) + .containsExactlyInAnyOrderElementsOf( + Streams.concat( + explicitFolderInExplicitParent3.getExpectedViews().stream()) + .collect(Collectors.toList())); + } + + @Test + public void viewsWhereNameStartsWith() throws Exception { + assertThat(base.runSqlWithResults(String.format( + "select * from INFORMATION_SCHEMA.VIEWS WHERE table_schema LIKE '%s%%' AND table_name LIKE 'vie%%'", + sourceRoot.asSqlIdentifier()))) + .containsExactlyInAnyOrderElementsOf( + Streams.concat( + sourceRoot.getExpectedViews().stream(), + explicitFolder1.getExpectedViews().stream(), + explicitFolderInExplicitParent3.getExpectedViews().stream() + ).collect(Collectors.toList())); + } + + @Test + public void viewsWhereNameStartsWithViewC() throws Exception { + assertThat(base.runSqlWithResults(String.format( + "select * from INFORMATION_SCHEMA.VIEWS WHERE table_schema LIKE '%s%%' AND table_name LIKE 'viewC%%'", + sourceRoot.asSqlIdentifier()))) + .containsExactlyInAnyOrderElementsOf( + Streams.concat( + sourceRoot.getViewCOnly().stream(), + explicitFolder1.getViewCOnly().stream(), + explicitFolderInExplicitParent3.getViewCOnly().stream() + ).collect(Collectors.toList())); + } + + @Test + public void viewsWhereNameContains() throws Exception { + assertThat(base.runSqlWithResults( + "select * from INFORMATION_SCHEMA.VIEWS WHERE table_name LIKE '%ewD%'")) + .containsExactlyInAnyOrderElementsOf( + Streams.concat( + sourceRoot.getViewDOnly().stream(), + explicitFolder1.getViewDOnly().stream(), + explicitFolderInExplicitParent3.getViewDOnly().stream() + ).collect(Collectors.toList())); + } + + @Test + public void schemataAll() throws Exception { + assertThat(base.runSqlWithResults(String.format( + "select * from INFORMATION_SCHEMA.SCHEMATA WHERE schema_name LIKE '%s%%'", + sourceRoot.asSqlIdentifier()))) + .containsExactlyInAnyOrder( + sourceRoot.getExpectedSchemata(), + explicitFolder1.getExpectedSchemata(), + explicitFolderInExplicitParent3.getExpectedSchemata(), + emptyExplicitFolder7.getExpectedSchemata()); + } + + private static Stream schemataWhereSchemaLikeTopLevelFolderArguments() { + return Stream.of( + Arguments.of( + explicitFolder1, + Arrays.asList( + explicitFolder1.getExpectedSchemata(), + explicitFolderInExplicitParent3.getExpectedSchemata() + )), + Arguments.of( + emptyExplicitFolder7, + Collections.singletonList( + emptyExplicitFolder7.getExpectedSchemata()))); + } + + @ParameterizedTest(name = "{index} {0}") + @MethodSource("schemataWhereSchemaLikeTopLevelFolderArguments") + public void schemataWhereSchemaLikeTopLevelFolder(ContainerEntity container, List> expectedSchemata) throws Exception { + assertThat(base.runSqlWithResults(String.format( + "select * from INFORMATION_SCHEMA.SCHEMATA WHERE schema_name LIKE '%s%%'", + container.asSqlIdentifier()))) + .containsExactlyInAnyOrderElementsOf(expectedSchemata); + } + + private static Stream schemataWhereSchemaLikeSubfolderArguments() { + return Stream.of( + Arguments.of( + explicitFolderInExplicitParent3, + Collections.singletonList( + explicitFolderInExplicitParent3.getExpectedSchemata()))); + } + + @ParameterizedTest(name = "{index} {0}") + @MethodSource("schemataWhereSchemaLikeSubfolderArguments") + public void schemataWhereSchemaLikeSubfolder(ContainerEntity container, List> expectedSchemata) throws Exception { + assertThat(base.runSqlWithResults(String.format( + "select * from INFORMATION_SCHEMA.SCHEMATA WHERE schema_name LIKE '%%%s%%'", + container.asSqlIdentifier()))) + .containsExactlyInAnyOrderElementsOf(expectedSchemata); + } + + @Test + public void schemataWhereSchemaContains() throws Exception { + assertThat(base.runSqlWithResults(String.format( + "select * from INFORMATION_SCHEMA.SCHEMATA WHERE schema_name LIKE '%s%%' AND schema_name LIKE '%%Fold%%'", + sourceRoot.asSqlIdentifier()))) + .containsExactlyInAnyOrder( + // sourceRoot doesn't match the filter + explicitFolder1.getExpectedSchemata(), + explicitFolderInExplicitParent3.getExpectedSchemata(), + emptyExplicitFolder7.getExpectedSchemata()); + } + + // TODO columns tests here too? Covered by InfoSchemaTestCases? +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/InfoSchemaTestCases.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/InfoSchemaTestCases.java new file mode 100644 index 0000000000..4ec90d06e4 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/InfoSchemaTestCases.java @@ -0,0 +1,837 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DATAPLANE_PLUGIN_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_BRANCH_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createEmptyTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createFolderQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createTableAsQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createTableWithColDefsQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createViewQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.folderA; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.folderB; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTableName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueViewName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.tableA; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.tablePathWithFolders; +import static com.dremio.exec.catalog.dataplane.ITDataplanePluginTestSetup.createFolders; +import static org.assertj.core.api.Assertions.assertThat; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.junit.jupiter.api.Test; +import org.projectnessie.model.Namespace; + +import com.dremio.exec.catalog.VersionContext; +import com.google.common.base.Joiner; + +/** + * To run these tests, run through container class ITDataplanePlugin + * To run all tests run {@link ITDataplanePlugin.NestedInfoSchemaTests} + * To run single test, see instructions at the top of {@link ITDataplanePlugin} + */ +public class InfoSchemaTestCases { + private ITDataplanePluginTestSetup base; + private static final Joiner DOT_JOINER = Joiner.on('.'); + + InfoSchemaTestCases(ITDataplanePluginTestSetup base) { + this.base = base; + } + + @Test + public void selectInformationSchemaTable() throws Exception { + final String tableName1 = generateUniqueTableName(); + final List tablePath1 = tablePathWithFolders(tableName1); + createFolders(tablePath1, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath1)); + + final String viewName = generateUniqueViewName(); + List viewKey = tablePathWithFolders(viewName); + + createFolders(viewKey, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewQuery(viewKey, tablePath1)); + + List tableRow = Arrays.asList( + "DREMIO", + String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, tablePath1.get(0), tablePath1.get(1)), + tableName1, + "TABLE"); + + List viewRow = Arrays.asList( + "DREMIO", + String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, viewKey.get(0), viewKey.get(1)), + viewName, + "VIEW"); + + // INFORMATION_SCHEMA."TABLES" should return correct value. + // Views should appear + assertThat(base.runSqlWithResults("select * from INFORMATION_SCHEMA.\"TABLES\"")) + .contains(tableRow) + .contains(viewRow) + // Column at index 3 is "TABLE_TYPE" + .allMatch(row -> (row.get(3).equals("TABLE") || row.get(3).equals("SYSTEM_TABLE") || row.get(3).equals("VIEW"))); + } + + @Test + public void selectInformationSchemaView() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + final String viewName = generateUniqueViewName(); + List viewKey = tablePathWithFolders(viewName); + + createFolders(viewKey, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewQuery(viewKey, tablePath)); + + List viewRow = Arrays.asList( + "DREMIO", + String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, viewKey.get(0), viewKey.get(1)), + viewName, + String.format( + "SELECT * FROM %s.%s.%s.%s", + DATAPLANE_PLUGIN_NAME, + tablePath.get(0), + tablePath.get(1), + tableName + )); + + assertThat(base.runSqlWithResults("select * from INFORMATION_SCHEMA.views")) + .contains(viewRow) + // Column at index 2 is "TABLE_NAME" + .allMatch(row -> !row.get(2).contains(tableName)); + } + + @Test + public void selectInformationSchemaSchemataForExplicitFolder() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + //Create folders explicitly + base.getNessieClient().createNamespace() + .namespace(tablePath.get(0)) + .refName("main") + .create(); + base.getNessieClient().createNamespace() + .namespace(Namespace.of(tablePath.subList(0, 2))) + .refName("main") + .create(); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + List sourceRow = Arrays.asList( + "DREMIO", + DATAPLANE_PLUGIN_NAME, + "", + "SIMPLE", + "NO"); + + List row1 = Arrays.asList("DREMIO", + String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, + tablePath.get(0), tablePath.get(1)), + "", + "SIMPLE", + "NO"); + List row2 = Arrays.asList("DREMIO", + String.format("%s.%s", DATAPLANE_PLUGIN_NAME, tablePath.get(0)), + "", + "SIMPLE", + "NO"); + + //Result comes out without a where statement. + assertThat(base.runSqlWithResults("select * from INFORMATION_SCHEMA.SCHEMATA")) + //Source should Appear + .contains(sourceRow) + .contains(row1) + .contains(row2); + } + + @Test + public void selectInformationSchemaColumns() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + List colDef = new ArrayList<>(); + colDef.add("col1 Varchar(255)"); + colDef.add("col2 Varchar(255)"); + + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableWithColDefsQuery(tablePath, colDef)); + + List> expected = new ArrayList<>(); + List row1 = Arrays.asList("DREMIO", + String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, tablePath.get(0), tablePath.get(1)), + tableName, + "col1", + "1"); + List row2 = Arrays.asList("DREMIO", + String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, tablePath.get(0), tablePath.get(1)), + tableName, + "col2", + "2"); + + expected.add(row1); + expected.add(row2); + + // row 1 and row 2 should appear + assertThat(base.runSqlWithResults("select TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, ORDINAL_POSITION from INFORMATION_SCHEMA.COLUMNS")) + .containsAll(expected); + // row 3 should not appear since we didn't create col3. + // Column at index 3 is "COLUMN_NAME" + assertThat(base.runSqlWithResults("select TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, ORDINAL_POSITION from INFORMATION_SCHEMA.COLUMNS")) + // Column at index 2 is "TABLE_NAME" + .filteredOn(row -> row.get(2).contains(tableName)) + .containsExactlyInAnyOrderElementsOf(expected); + + assertThat(base.runSqlWithResults(String.format("select TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, ORDINAL_POSITION from INFORMATION_SCHEMA.COLUMNS where table_schema = \'%s.%s.%s\'", DATAPLANE_PLUGIN_NAME, tablePath.get(0), tablePath.get(1)))) + .containsAll(expected); + } + @Test public void selectInformationSchemaTableWithSchemaFilter() throws Exception { + final String tableName1 = generateUniqueTableName(); + final List tablePath1 = tablePathWithFolders(tableName1); + createFolders(tablePath1, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath1)); + + final String tableName2 = generateUniqueTableName(); + final List tablePath2 = tablePathWithFolders(tableName1); + createFolders(tablePath2, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath2)); + + List tableRow = Arrays.asList( + "DREMIO", + String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, tablePath1.get(0), tablePath1. get(1)), + tableName1, + "TABLE"); + //Test INFORMATION_SCHEMA."TABLES" with TABLE SCHEMA + assertThat(base.runSqlWithResults(String.format("select * from INFORMATION_SCHEMA.\"TABLES\" WHERE TABLE_SCHEMA = \'%s.%s.%s\'",DATAPLANE_PLUGIN_NAME, tablePath1.get(0), tablePath1.get(1)))) + .contains(tableRow) + .allMatch(row -> !row.get(2).contains(tableName2)); + } + + @Test public void selectInformationSchemaTableWithNameFilter() throws Exception { + final String tableName1 = generateUniqueTableName(); + final List tablePath1 = tablePathWithFolders(tableName1); + createFolders(tablePath1, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath1)); + + final String tableName2 = generateUniqueTableName(); + final List tablePath2 = tablePathWithFolders(tableName1); + createFolders(tablePath2, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath2)); + + List tableRow = Arrays.asList( + "DREMIO", + String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, tablePath1.get(0), tablePath1. get(1)), + tableName1, + "TABLE"); + + // Test INFORMATION_SCHEMA."TABLES" with TABLE NAME + assertThat(base.runSqlWithResults(String.format("select * from INFORMATION_SCHEMA.\"TABLES\" WHERE TABLE_NAME = \'%s\'",tableName1))) + .contains(tableRow) + .allMatch(row -> !row.get(2).contains(tableName2)); + } + @Test public void selectInformationSchemaViewWithSchemaFilter() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + final String viewName = generateUniqueViewName(); + List viewKey = tablePathWithFolders(viewName); + createFolders(viewKey, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewQuery(viewKey, tablePath)); + + List viewRow = Arrays.asList( + "DREMIO", + String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, viewKey.get(0), viewKey.get(1)), + viewName , + String.format( + "SELECT * FROM %s.%s.%s.%s", + DATAPLANE_PLUGIN_NAME, + tablePath.get(0), + tablePath.get(1), + tableName + )); + + //Test with schema + assertThat(base.runSqlWithResults(String.format("select * from INFORMATION_SCHEMA.views where table_schema = \'%s.%s.%s\'", DATAPLANE_PLUGIN_NAME, viewKey.get(0), viewKey.get(1)))) + .contains(viewRow) + .allMatch(row -> !row.get(2).contains(tableName)); + } + + @Test public void selectInformationSchemaViewWithNameFilter() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + final String viewName = generateUniqueViewName(); + List viewKey = tablePathWithFolders(viewName); + createFolders(viewKey, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewQuery(viewKey, tablePath)); + + List viewRow = Arrays.asList( + "DREMIO", + String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, viewKey.get(0), viewKey.get(1)), + viewName , + String.format( + "SELECT * FROM %s.%s.%s.%s", + DATAPLANE_PLUGIN_NAME, + tablePath.get(0), + tablePath.get(1), + tableName + )); + + //Test with name + assertThat(base.runSqlWithResults(String.format("select * from INFORMATION_SCHEMA.views where table_name = \'%s\'", viewName))) + .contains(viewRow) + .allMatch(row -> !row.get(2).contains(tableName)); + } + + @Test public void selectInformationSchemaSchemataWithFilter() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + //Create folders explicitly + base.getNessieClient().createNamespace() + .namespace(tablePath.get(0)) + .refName("main") + .create(); + base.getNessieClient().createNamespace() + .namespace(Namespace.of(tablePath.subList(0, 2))) + .refName("main") + .create(); + base.runSQL(createEmptyTableQuery(tablePath)); + + List row1 = Arrays.asList("DREMIO", + String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, + tablePath.get(0), tablePath.get(1)), + "", + "SIMPLE", + "NO"); + + assertThat(base.runSqlWithResults(String.format("select * from INFORMATION_SCHEMA.SCHEMATA where schema_name = \'%s.%s.%s\'",DATAPLANE_PLUGIN_NAME, tablePath.get(0), tablePath.get(1)))) + .contains(row1) + .allMatch(row -> !row.get(1).equals(DATAPLANE_PLUGIN_NAME)) + .allMatch(row -> !row.get(1).equals(String.format("%s.%s", DATAPLANE_PLUGIN_NAME, tablePath.get(0)))); + } + + @Test public void selectInformationSchemaColumnsWithSchemaFilter() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + List colDef = new ArrayList<>(); + colDef.add("col1_table1 Varchar(255)"); + colDef.add("col2_table1 Varchar(255)"); + + final String tableName2 = generateUniqueTableName(); + final List tablePath2 = tablePathWithFolders(tableName2); + + List colDef2 = new ArrayList<>(); + colDef2.add("col1_table2 Varchar(255)"); + colDef2.add("col2_table2 Varchar(255)"); + + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + createFolders(tablePath2, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableWithColDefsQuery(tablePath, colDef)); + base.runSQL(createTableWithColDefsQuery(tablePath2, colDef2)); + + List> expected = new ArrayList<>(); + List row1Table1 = Arrays.asList("DREMIO", + String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, tablePath.get(0), tablePath.get(1)), + tableName, + "col1_table1", + "1"); + List row2Table1 = Arrays.asList("DREMIO", + String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, tablePath.get(0), tablePath.get(1)), + tableName, + "col2_table1", + "2"); + + expected.add(row1Table1); + expected.add(row2Table1); + //Test with schema + assertThat(base.runSqlWithResults(String.format("select TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, ORDINAL_POSITION from INFORMATION_SCHEMA.COLUMNS where table_schema = \'%s.%s.%s\'", DATAPLANE_PLUGIN_NAME, tablePath.get(0), tablePath.get(1)))) + .containsAll(expected) + .allMatch(row -> !(row.get(1).contains("col1_table2"))) + .allMatch(row -> !(row.get(1).contains("col2_table2"))); + } + + @Test public void selectInformationSchemaColumnsWithNameFilter() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + List colDef = new ArrayList<>(); + colDef.add("col1 Varchar(255)"); + colDef.add("col2 Varchar(255)"); + + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableWithColDefsQuery(tablePath, colDef)); + + List> expected = new ArrayList<>(); + List row1 = Arrays.asList("DREMIO", + String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, tablePath.get(0), tablePath.get(1)), + tableName, + "col1", + "1"); + List row2 = Arrays.asList("DREMIO", + String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, tablePath.get(0), tablePath.get(1)), + tableName, + "col2", + "2"); + + expected.add(row1); + expected.add(row2); + //Test with name + assertThat(base.runSqlWithResults(String.format("select TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, ORDINAL_POSITION from INFORMATION_SCHEMA.COLUMNS where table_name = \'%s\'", tableName))) + .containsAll(expected); + } + + @Test public void testLike() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + final String tableName2 = generateUniqueTableName(); + final List tablePath2 = tablePathWithFolders(tableName2); + createFolders(tablePath2, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath2)); + + List expected = Arrays.asList("DREMIO", + String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, tablePath.get(0), tablePath.get(1)), + tableName, + "TABLE"); + + String query = String.format("select * from information_schema.\"tables\" where table_name like \'%s\'", tableName); + assertThat(base.runSqlWithResults(query)) + .contains(expected) + .allMatch(row -> !(row.get(2).contains(tableName2))) + .allMatch(row -> !(row.get(1).contains(DOT_JOINER.join(tablePath2)))); + } + + @Test public void testLikeWithNoWildCard() throws Exception { + final String tableInSourceRoot = generateUniqueTableName(); + List tablePath = new ArrayList<>(); + tablePath.add(tableInSourceRoot); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + final String tableName1 = generateUniqueTableName(); + final List tablePath1 = tablePathWithFolders(tableName1); + createFolders(tablePath1, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath1)); + + final String tableName2 = generateUniqueTableName(); + final List tablePath2 = tablePathWithFolders(tableName2); + createFolders(tablePath2, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath2)); + + List expectedRowForTableInSourceRoot = Arrays.asList("DREMIO", + DATAPLANE_PLUGIN_NAME, + tableInSourceRoot, + "TABLE"); + + List expectedRowForTable1 = Arrays.asList("DREMIO", + String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, tablePath1.get(0), tablePath1.get(1)), + tableName1, + "TABLE"); + + String searchTableInSourceRoot = String.format("select * from information_schema.\"tables\" where table_name like \'%s\'", tableInSourceRoot); + assertThat(base.runSqlWithResults(searchTableInSourceRoot)) + .contains(expectedRowForTableInSourceRoot) + .allMatch(row -> !(row.get(2).contains(tableName1)) && !(row.get(2).contains(tableName2))) + .allMatch(row -> !(row.get(1).contains(DOT_JOINER.join(tablePath1))) && !(row.get(1).contains(DOT_JOINER.join(tablePath2)))); + + String query = String.format("select * from information_schema.\"tables\" where table_schema like \'%s.%s.%s\'",DATAPLANE_PLUGIN_NAME, tablePath1.get(0), tablePath1.get(1)); + assertThat(base.runSqlWithResults(query)) + .contains(expectedRowForTable1) + .allMatch(row -> !(row.get(2).contains(tableInSourceRoot)) && !(row.get(2).contains(tableName2))) + .allMatch(row -> !(row.get(1).equals(DATAPLANE_PLUGIN_NAME)) && !(row.get(1).contains(DOT_JOINER.join(tablePath2)))); + } + + @Test public void testStartWith() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + List expected = Arrays.asList("DREMIO", + String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, tablePath.get(0), tablePath.get(1)), + tableName, + "TABLE"); + + String query = String.format("select * from information_schema.\"tables\" where table_name like \'%s%%\'", tableName.substring(0, tableName.length() - 2)); + assertThat(base.runSqlWithResults(query)) + .contains(expected); + } + + @Test public void testContains() throws Exception { + final String tableInSourceRoot = generateUniqueTableName(); + List tablePath = new ArrayList<>(); + tablePath.add(tableInSourceRoot); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + List expected = Arrays.asList("DREMIO", + DATAPLANE_PLUGIN_NAME, + tableInSourceRoot, + "TABLE"); + + String query = String.format("select * from information_schema.\"tables\" where table_name like \'%%%s%%\'", tableInSourceRoot.substring(1, tableInSourceRoot.length() - 1)); + assertThat(base.runSqlWithResults(query)) + .contains(expected); + } + + @Test public void testNoEntriesFound() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + List table_row = Arrays.asList("DREMIO", + String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, tablePath.get(0), tablePath.get(1)), + tableName, + "TABLE"); + + String query = String.format("select * from information_schema.\"tables\" where table_schema like \'%s\' and table_name like \'hello_world\'",DATAPLANE_PLUGIN_NAME); + assertThat(base.runSqlWithResults(query)) + .doesNotContain(table_row) + .allMatch(row -> !row.get(2).contains(tableName)); + } + + @Test public void testWithEscape() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + List table_row = Arrays.asList("DREMIO", + String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, tablePath.get(0), tablePath.get(1)), + tableName, + "TABLE"); + + String query = String.format("select * from information_schema.\"tables\" where table_schema like \'%s\' and table_name like 'hello\\\\_world'",DATAPLANE_PLUGIN_NAME); + assertThat(base.runSqlWithResults(query)) + .doesNotContain(table_row) + .allMatch(row -> !row.get(2).contains(tableName)); + } + + @Test + public void testMultipleBackSlash() throws Exception { + //we need to add quotation at the front since it contains special character + final String escapedTableName = "\"\\\\\\table\""; + final String resultEscapedTableName = "\\\\\\table"; + final List escapedTablePath = tablePathWithFolders(escapedTableName); + createFolders(escapedTablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(escapedTablePath)); + + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + List table_row = Arrays.asList("DREMIO", + String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, escapedTablePath.get(0), escapedTablePath.get(1)), + resultEscapedTableName, + "TABLE"); + + String query = String.format("select * from information_schema.\"tables\" where table_name = \'%s\'", resultEscapedTableName); + assertThat(base.runSqlWithResults(query)) + .contains(table_row) + .allMatch(row -> !row.get(2).equals(tableName)); + + } + + @Test + public void testStarTable() throws Exception { + //we need to add quotation at the front since it contains special character + final String starTableName = "\"*table\""; + final String inputStarTableName = "*table"; + final List starTablePath = tablePathWithFolders(starTableName); + createFolders(starTablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(starTablePath)); + + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + List table_row = Arrays.asList("DREMIO", + String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, starTablePath.get(0), starTablePath.get(1)), + inputStarTableName, + "TABLE"); + + String query = String.format("select * from information_schema.\"tables\" where table_name like \'%s\'", inputStarTableName); + assertThat(base.runSqlWithResults(query)) + .contains(table_row) + .allMatch(row -> !row.get(2).equals(tableName)); + } + + @Test public void testQuestionMarkTable() throws Exception { + //we need to add quotation at the front since it contains special character + final String questionTableName = "\"?table\""; + final String inputQuestionTableName = "?table"; + final List questionTablePath = tablePathWithFolders(questionTableName); + createFolders(questionTablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(questionTablePath)); + + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + List table_row = Arrays.asList("DREMIO", + String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, questionTablePath.get(0), questionTablePath.get(1)), + inputQuestionTableName, + "TABLE"); + + String query = String.format("select * from information_schema.\"tables\" where table_name = \'%s\'", inputQuestionTableName); + assertThat(base.runSqlWithResults(query)) + .contains(table_row) + .allMatch(row -> !row.get(2).equals(tableName)); + } + + @Test public void testMultipleStatements() throws Exception { + final String tableName1 = generateUniqueTableName(); + final List tablePath1 = tablePathWithFolders(tableName1); + createFolders(tablePath1, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath1)); + + final String tableName2 = generateUniqueTableName(); + final List tablePath2 = tablePathWithFolders(tableName2); + createFolders(tablePath2, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath2)); + + final String tableName3 = generateUniqueTableName(); + final List tablePath3 = tablePathWithFolders(tableName3); + createFolders(tablePath3, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath3)); + + final String tableName4 = generateUniqueTableName(); + final List tablePath4 = tablePathWithFolders(tableName4); + createFolders(tablePath4, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath4)); + + final String tableName5 = generateUniqueTableName(); + final List tablePath5 = tablePathWithFolders(tableName5); + createFolders(tablePath5, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath5)); + + List table_row1 = Arrays.asList("DREMIO", + String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, tablePath1.get(0), tablePath1.get(1)), + tableName1, + "TABLE"); + + List table_row2 = Arrays.asList("DREMIO", + String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, tablePath2.get(0), tablePath2.get(1)), + tableName2, + "TABLE"); + + List table_row3 = Arrays.asList("DREMIO", + String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, tablePath3.get(0), tablePath3.get(1)), + tableName3, + "TABLE"); + + String query = String.format( + "select * from information_schema.\"tables\" where table_schema like \'%s.%s.%s\' or (table_name = \'%s\' or table_name like \'%s\' or table_schema like \'%%%s%%\')", + DATAPLANE_PLUGIN_NAME, + tablePath1.get(0), + tablePath1.get(1), + tableName2, + tableName3, + tablePath3.get(1)); + assertThat(base.runSqlWithResults(query)) + .contains(table_row1) + .contains(table_row2) + .contains(table_row3) + .allMatch(row -> !(row.get(2).contains(tableName4)) && !(row.get(2).contains(tableName5))) + .allMatch(row -> !(row.get(1).contains(DOT_JOINER.join(tablePath4))) && !(row.get(1).contains(DOT_JOINER.join(tablePath5)))); + } + + @Test public void testNestedFolders() throws Exception { + /* + * select * from information_schema."SCHEMATA" where schema_name = 'nessie_t.folder1' + * select * from information_schema."SCHEMATA" where schema_name like 'nessie_t.folder1%' + */ + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + //Create folders explicitly + base.getNessieClient().createNamespace() + .namespace(tablePath.get(0)) + .refName("main") + .create(); + base.getNessieClient().createNamespace() + .namespace(Namespace.of(tablePath.subList(0, 2))) + .refName("main") + .create(); + base.runSQL(createEmptyTableQuery(tablePath)); + String nestedFolderEqual = String.format("select * from information_schema.SCHEMATA where schema_name = '%s.%s'", DATAPLANE_PLUGIN_NAME, tablePath.get(0)); + String nestedFolderLikeStartWith = String.format("select * from information_schema.SCHEMATA where schema_name like '%s.%s%%'", DATAPLANE_PLUGIN_NAME, tablePath.get(0)); + + List folder1Row = Arrays.asList( + "DREMIO", + String.format("%s.%s", DATAPLANE_PLUGIN_NAME, tablePath.get(0)), + "", + "SIMPLE", + "NO"); + + List folder2Row = Arrays.asList( + "DREMIO", + String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, tablePath.get(0), tablePath.get(1)), + "", + "SIMPLE", + "NO"); + + assertThat(base.runSqlWithResults(nestedFolderEqual)) + .contains(folder1Row) + .allMatch(row -> !row.get(1).equals(String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, tablePath.get(0), tablePath.get(1)))); + assertThat(base.runSqlWithResults(nestedFolderLikeStartWith)) + .contains(folder1Row) + .contains(folder2Row); + } + + @Test public void testSelectFromNonArcticSources() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + String selectFromNonArcticSource = "SELECT * FROM INFORMATION_SCHEMA.\"TABLES\" WHERE TABLE_SCHEMA = 'cp'"; + String tablePathIncludeSource = String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, tablePath.get(0), tablePath.get(1)); + + assertThat(base.runSqlWithResults(selectFromNonArcticSource)) + .allMatch(row -> !row.get(1).equals(tablePathIncludeSource)); + } + + @Test public void testSelectEscapeQuoteWithDots() throws Exception { + List schemaPath = new ArrayList<>(); + schemaPath.add("folder1"); + schemaPath.add("folder2"); + schemaPath.add("\"dot.dot.dot.dot\""); + //Create folders explicitly + base.getNessieClient().createNamespace() + .namespace(schemaPath.get(0)) + .refName("main") + .create(); + base.getNessieClient().createNamespace() + .namespace(Namespace.of(schemaPath.subList(0, 2))) + .refName("main") + .create(); + base.getNessieClient().createNamespace() + .namespace(Namespace.of(schemaPath.subList(0, 3))) + .refName("main") + .create(); + String selectTableWithDots = String.format("SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = \'%s.%s.%s.%s\'",DATAPLANE_PLUGIN_NAME, schemaPath.get(0), schemaPath.get(1),schemaPath.get(2)); + String tablePathIncludeSource = String.format("%s.%s.%s.%s", DATAPLANE_PLUGIN_NAME, schemaPath.get(0), schemaPath.get(1), schemaPath.get(2)); + assertThat(base.runSqlWithResults(selectTableWithDots)) + .allMatch(row -> row.get(1).equals(tablePathIncludeSource)); + } + + @Test public void testTableUnderOneFolder() throws Exception { + final String tableUnderOneFolder = generateUniqueTableName(); + List pathUnderOneFolder = new ArrayList<>(); + pathUnderOneFolder.add("folder1"); + pathUnderOneFolder.add(tableUnderOneFolder); + createFolders(pathUnderOneFolder, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(pathUnderOneFolder)); + + final String tableName = generateUniqueTableName(); + List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + List table_row = Arrays.asList("DREMIO", + String.format("%s.%s", DATAPLANE_PLUGIN_NAME, pathUnderOneFolder.get(0)), + tableUnderOneFolder, + "TABLE"); + + String selectTableUnderOneFolder = String.format("SELECT * FROM INFORMATION_SCHEMA.\"TABLES\" WHERE TABLE_SCHEMA like \'%s.%s\'", DATAPLANE_PLUGIN_NAME, pathUnderOneFolder.get(0)); + assertThat(base.runSqlWithResults(selectTableUnderOneFolder)) + .contains(table_row) + .allMatch(row -> !row.get(1).equals(String.format("%s.%s.%s",DATAPLANE_PLUGIN_NAME, tablePath.get(0), tablePath.get(1)))); + } + + @Test public void testTableUnderSource() throws Exception { + final String tableUnderSource = generateUniqueTableName(); + List pathUnderSource = new ArrayList<>(); + pathUnderSource.add(tableUnderSource); + createFolders(pathUnderSource, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(pathUnderSource)); + + final String tableName = generateUniqueTableName(); + List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + List table_row = Arrays.asList("DREMIO", + String.format("%s", DATAPLANE_PLUGIN_NAME), + tableUnderSource, + "TABLE"); + + String selectTableUnderSource = String.format("SELECT * FROM INFORMATION_SCHEMA.\"TABLES\" WHERE TABLE_SCHEMA like \'%s\'", DATAPLANE_PLUGIN_NAME); + assertThat(base.runSqlWithResults(selectTableUnderSource)) + .contains(table_row) + .allMatch(row -> !row.get(1).equals(String.format("%s.%s.%s",DATAPLANE_PLUGIN_NAME, tablePath.get(0), tablePath.get(1)))); + } + + @Test + public void testNamespaceLinksToCorrectNamespace() throws Exception { + //create folder A + List folderPath1 = Arrays.asList(DATAPLANE_PLUGIN_NAME, folderA); + base.runSqlWithResults(createFolderQuery(folderPath1)); + + //create folder B + List folderPath2 = Arrays.asList(DATAPLANE_PLUGIN_NAME, folderA, folderB); + base.runSqlWithResults(createFolderQuery(folderPath2)); + + //create table A + List tablePath = Arrays.asList(folderA, folderB, tableA); + base.runSqlWithResults(createTableAsQuery(tablePath, 10)); + + //create folder B directly under folder A + List folderPath3 = Arrays.asList(DATAPLANE_PLUGIN_NAME, folderB); + base.runSqlWithResults(createFolderQuery(folderPath3)); + + List expectedTableRow = Arrays.asList("DREMIO", + String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, folderA, folderB), + tableA, + "TABLE"); + + List expectedSchemataRow = Arrays.asList("DREMIO", + String.format("%s.%s.%s", DATAPLANE_PLUGIN_NAME, folderA, folderB), + "", + "SIMPLE", + "NO"); + + String selectInfoSchemaTables = String.format("SELECT * FROM INFORMATION_SCHEMA.\"TABLES\""); + assertThat(base.runSqlWithResults(selectInfoSchemaTables)) + .contains(expectedTableRow) + .allMatch(row -> !row.get(1).equals(String.format("%s.%s.%s",DATAPLANE_PLUGIN_NAME, folderB, folderB))) + .allMatch(row -> !row.get(1).equals(String.format("%s.%s.%s",DATAPLANE_PLUGIN_NAME, folderA, folderA))) + .allMatch(row -> !row.get(1).equals(String.format("%s.%s.%s",DATAPLANE_PLUGIN_NAME, folderB, folderA))) + .allMatch(row -> !row.get(1).equals(String.format("%s.%s",DATAPLANE_PLUGIN_NAME, folderA))) + .allMatch(row -> !row.get(1).equals(String.format("%s.%s",DATAPLANE_PLUGIN_NAME, folderB))) + .allMatch(row -> !row.get(1).equals(DATAPLANE_PLUGIN_NAME)); + + String selectInfoSchemaSchemata = String.format("SELECT * FROM INFORMATION_SCHEMA.SCHEMATA"); + assertThat(base.runSqlWithResults(selectInfoSchemaSchemata)) + .contains(expectedSchemataRow) + .allMatch(row -> !row.get(1).equals(String.format("%s.%s.%s",DATAPLANE_PLUGIN_NAME, folderB, folderB))) + .allMatch(row -> !row.get(1).equals(String.format("%s.%s.%s",DATAPLANE_PLUGIN_NAME, folderA, folderA))); + } +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/InsertTestCases.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/InsertTestCases.java new file mode 100644 index 0000000000..739f315f9a --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/InsertTestCases.java @@ -0,0 +1,367 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DATAPLANE_PLUGIN_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_BRANCH_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_COUNT_COLUMN; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createBranchAtBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createEmptyTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createTableAsQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.dropBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.dropTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateFolderPath; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueBranchName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueFolderName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTableName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.insertSelectQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.insertTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.joinedTableKey; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.mergeBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.selectCountQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.tablePathWithFolders; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useCommitQuery; +import static com.dremio.exec.catalog.dataplane.ITDataplanePluginTestSetup.createFolders; +import static com.dremio.exec.catalog.dataplane.TestDataplaneAssertions.assertIcebergFilesExistAtSubPath; +import static org.assertj.core.api.AssertionsForClassTypes.assertThat; + +import java.util.Arrays; +import java.util.List; + +import org.junit.jupiter.api.Test; + +import com.dremio.exec.catalog.ResolvedVersionContext; +import com.dremio.exec.catalog.TableVersionContext; +import com.dremio.exec.catalog.TableVersionType; +import com.dremio.exec.catalog.VersionContext; + +/** + * + * To run these tests, run through container class ITDataplanePlugin + * To run all tests run {@link ITDataplanePlugin.NestedInsertTests} + * To run single test, see instructions at the top of {@link ITDataplanePlugin} + */ +public class InsertTestCases { + private ITDataplanePluginTestSetup base; + + InsertTestCases(ITDataplanePluginTestSetup base) { + this.base = base; + } + + @Test + public void insertIntoEmpty() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + // Act + base.runSQL(insertTableQuery(tablePath)); + + // Assert + base.assertTableHasExpectedNumRows(tablePath, 3); + + // cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void insertSelect() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath, 5)); + base.assertTableHasExpectedNumRows(tablePath, 5); + + // Act + base.runSQL(insertSelectQuery(tablePath, 3)); + + // Assert + // Verify number of rows with select + base.assertTableHasExpectedNumRows(tablePath, 8); + + // Cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void insertWithCommitSet() throws Exception { + // Arrange + String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + String commitHashBranch = base.getCommitHashForBranch(DEFAULT_BRANCH_NAME); + base.runSQL(useCommitQuery(commitHashBranch)); + + // Act and Assert + base.assertQueryThrowsExpectedError(insertTableQuery(tablePath), + String.format("DDL and DML operations are only supported for branches - not on tags or commits. %s is not a branch.", + ResolvedVersionContext.DETACHED_REF_NAME)); + } + + // Verify insert creates underlying iceberg files in the right locations + @Test + public void insertSelectVerifyFolders() throws Exception { + // Arrange + // Create a hierarchy of 2 folders to form key of TABLE + final List tablePath = Arrays.asList("if1", "if2", generateUniqueTableName()); + final String tableKey = joinedTableKey(tablePath); + final String createTableQuery = String.format( + "CREATE TABLE %s.%s %s", + DATAPLANE_PLUGIN_NAME, + tableKey, + "(nation_key int, region_key int)"); + + // Create empty + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableQuery); + // Verify iceberg manifest/avro/metadata.json files on FS + assertIcebergFilesExistAtSubPath(tablePath, 0, 1, 1, 0); + + // Do 2 separate Inserts so there are multiple data files. + // Insert 1 + base.runSQL(insertSelectQuery(tablePath, 2)); + base.assertTableHasExpectedNumRows(tablePath, 2); + // Verify iceberg manifest/avro/metadata.json files on FS + assertIcebergFilesExistAtSubPath(tablePath, 1, 2, 2, 1); + + // Insert 2 + base.runSQL(insertSelectQuery(tablePath, 3)); + // Verify number of rows with select + base.assertTableHasExpectedNumRows(tablePath, 5); + + // Assert + // Verify iceberg manifest/avro/metadata.json files on FS + assertIcebergFilesExistAtSubPath(tablePath, 2, 3, 3, 2); + + // Cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void insertInDiffBranchesAndConflicts() throws Exception { + // Arrange + final String mainTableName = generateUniqueTableName(); + final List mainTablePath = tablePathWithFolders(mainTableName); + final String devBranchName = generateUniqueBranchName(); + + // Set context to main + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(mainTablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(mainTablePath)); + base.assertTableHasExpectedNumRows(mainTablePath, 0); + + // Create a dev branch from main + base.runSQL(createBranchAtBranchQuery(devBranchName, DEFAULT_BRANCH_NAME)); + + // insert into table on main branch + base.runSQL(insertTableQuery(mainTablePath)); + base.assertTableHasExpectedNumRows(mainTablePath, 3); + long mtime1 = base.getMtimeForTable(mainTablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + // switch to branch dev + base.runSQL(useBranchQuery(devBranchName)); + + // insert into table on dev branch so there will be conflicts + base.runSQL(insertTableQuery(mainTablePath)); + base.assertTableHasExpectedNumRows(mainTablePath, 3); + long mtime2 = base.getMtimeForTable(mainTablePath, new TableVersionContext(TableVersionType.BRANCH, devBranchName), base); + // switch to branch dev + // Act and Assert + base.assertQueryThrowsExpectedError(mergeBranchQuery(devBranchName, DEFAULT_BRANCH_NAME), + String.format(("VALIDATION ERROR: Merge branch %s into branch %s failed due to commit conflict on source %s"), + devBranchName, DEFAULT_BRANCH_NAME, DATAPLANE_PLUGIN_NAME)); + assertThat(mtime2 > mtime1).isTrue(); + // Drop tables + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + base.runSQL(dropTableQuery(mainTablePath)); + } + + @Test + public void insertInDiffBranchesAndMerge() throws Exception { + // Arrange + final List shareFolderPath = generateFolderPath(generateUniqueFolderName()); + final String mainTableName = generateUniqueTableName(); + final String devTableName = generateUniqueTableName(); + final List mainTablePath = tablePathWithFolders(mainTableName); + final List devTablePath = tablePathWithFolders(devTableName); + final String devBranchName = generateUniqueBranchName(); + + // Creating an arbitrary commit to Nessie to make a common ancestor between two branches otherwise + // those are un-related branches + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(shareFolderPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + + // Create a dev branch from main + base.runSQL(createBranchAtBranchQuery(devBranchName, DEFAULT_BRANCH_NAME)); + + // Set context to main + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(mainTablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(mainTablePath)); + base.assertTableHasExpectedNumRows(mainTablePath, 0); + + // Insert into table main + base.runSQL(insertTableQuery(mainTablePath)); + base.assertTableHasExpectedNumRows(mainTablePath, 3); + + + // switch to branch dev + base.runSQL(useBranchQuery(devBranchName)); + // Check that table does not exist in Nessie in branch dev (since it was branched off before create table) + base.assertQueryThrowsExpectedError(selectCountQuery(mainTablePath, DEFAULT_COUNT_COLUMN), + String.format("VALIDATION ERROR: Object '%s' not found within '%s", + mainTablePath.get(0), + DATAPLANE_PLUGIN_NAME)); + createFolders(devTablePath, VersionContext.ofBranch(devBranchName)); + base.runSQL(createEmptyTableQuery(devTablePath)); + base.assertTableHasExpectedNumRows(devTablePath, 0); + + // Insert into table dev + base.runSQL(insertTableQuery(devTablePath)); + base.assertTableHasExpectedNumRows(devTablePath, 3); + + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + // Check that dev table cannot be seen in branch main + base.assertQueryThrowsExpectedError(selectCountQuery(devTablePath, DEFAULT_COUNT_COLUMN), + String.format("VALIDATION ERROR: Object '%s' not found within '%s", + devTablePath.get(0), + DATAPLANE_PLUGIN_NAME)); + + // Act + base.runSQL(mergeBranchQuery(devBranchName, DEFAULT_BRANCH_NAME)); + + // Assert and checking records in both tables + // Table must now be visible in main. + base.assertTableHasExpectedNumRows(devTablePath, 3); + base.assertTableHasExpectedNumRows(mainTablePath, 3); + + // Drop tables + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + base.runSQL(dropTableQuery(mainTablePath)); + base.runSQL(dropTableQuery(devTablePath)); + } + + /** + * Ctas in main branch + * Insert in dev branch + * Compare row counts in each branch + * Merge branch to main branch and compare row count again + */ + @Test + public void insertAndCtasInDifferentBranches() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final String devBranch = generateUniqueBranchName(); + final List tablePath = tablePathWithFolders(tableName); + + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath, 5)); + // Verify with select + base.assertTableHasExpectedNumRows(tablePath, 5); + long mtime1 = base.getMtimeForTable(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + // Create dev branch + base.runSQL(createBranchAtBranchQuery(devBranch, DEFAULT_BRANCH_NAME)); + // Switch to dev + base.runSQL(useBranchQuery(devBranch)); + // Insert rows + base.runSQL(insertSelectQuery(tablePath, 2)); + // Verify number of rows. + base.assertTableHasExpectedNumRows(tablePath, 7); + // Switch back to main + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + // Verify number of rows + base.assertTableHasExpectedNumRows(tablePath, 5); + + // Act + // Merge dev to main + base.runSQL(mergeBranchQuery(devBranch, DEFAULT_BRANCH_NAME)); + long mtime2 = base.getMtimeForTable(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + // Assert + base.assertTableHasExpectedNumRows(tablePath, 7); + assertThat(mtime2 > mtime1).isTrue(); + // Cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + /** + * The inserts should write data files relative to the table base location, and agnostic of the source configuration. + * Create a table, insert some records + * Create a different source with a dummy bucket path as root location + * Make further inserts, operation should succeed + * Verify the records + */ + @Test + public void insertAgnosticOfSourceBucket() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + // Act + base.runSQL(insertTableQuery(tablePath)); + base.runWithAlternateSourcePath(insertTableQuery(tablePath)); + + // Assert rows from both inserts + base.assertTableHasExpectedNumRows(tablePath, 6); + base.assertAllFilesAreInBaseBucket(tablePath); + + // cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void insertInDifferentTablesWithSameName() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final String devBranch = generateUniqueBranchName(); + final List tablePath = tablePathWithFolders(tableName); + base.runSQL(createBranchAtBranchQuery(devBranch, DEFAULT_BRANCH_NAME)); + + // Create table with this name in the main branch, insert records + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath, 5)); + base.runSQL(insertSelectQuery(tablePath,5)); + + // Create table with this name in the dev branch, different source path, insert records + base.runSQL(useBranchQuery(devBranch)); + createFolders(tablePath, VersionContext.ofBranch(devBranch)); + base.runWithAlternateSourcePath(createTableAsQuery(tablePath, 5)); + base.runSQL(insertSelectQuery(tablePath, 5)); + + // Act: Assert the paths are correct in each branch + base.assertAllFilesInAlternativeBucket(tablePath); // dev branch + + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + base.assertAllFilesAreInBaseBucket(tablePath); + + // cleanup + base.runSQL(useBranchQuery(devBranch)); + base.runSQL(dropTableQuery(tablePath)); + + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + base.runSQL(dropTableQuery(tablePath)); + base.runSQL(dropBranchQuery(devBranch)); + } +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/MergeTestCases.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/MergeTestCases.java new file mode 100644 index 0000000000..d9a08ce1de --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/MergeTestCases.java @@ -0,0 +1,152 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_BRANCH_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createBranchAtBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createEmptyTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createTagQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.dropTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueBranchName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTableName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTagName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.insertTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.insertTableWithValuesQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.mergeByIdQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.selectStarQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.tablePathWithFolders; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useTagQuery; +import static com.dremio.exec.catalog.dataplane.ITDataplanePluginTestSetup.createFolders; +import static org.assertj.core.api.AssertionsForClassTypes.assertThat; + +import java.math.BigDecimal; +import java.util.Collections; +import java.util.List; + +import org.junit.jupiter.api.Test; + +import com.dremio.exec.catalog.TableVersionContext; +import com.dremio.exec.catalog.TableVersionType; +import com.dremio.exec.catalog.VersionContext; + +/** + * + * To run these tests, run through container class ITDataplanePlugin + * To run all tests run {@link ITDataplanePlugin.NestedMergeTests} + * To run single test, see instructions at the top of {@link ITDataplanePlugin} + */ +public class MergeTestCases { + private ITDataplanePluginTestSetup base; + + MergeTestCases(ITDataplanePluginTestSetup base) { + this.base = base; + } + + @Test + public void mergeAll() throws Exception { + // Arrange + final String sourceTableName = generateUniqueTableName(); + final List sourceTablePath = tablePathWithFolders(sourceTableName); + final String devBranch = generateUniqueBranchName(); + + final String targetTableName = generateUniqueTableName(); + final List targetTablePath = tablePathWithFolders(targetTableName); + + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(targetTablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(targetTablePath)); + base.runSQL(insertTableQuery(targetTablePath)); + createFolders(sourceTablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(sourceTablePath)); + base.runSQL(insertTableQuery(sourceTablePath)); + long mtime1 = base.getMtimeForTable(targetTablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + // Add an extra row in the source + base.runSQL(insertTableWithValuesQuery(sourceTablePath, + Collections.singletonList("(4, CAST('fourth row' AS VARCHAR(65536)), CAST(4000 AS DECIMAL(38,3)))"))); + + // Verify with select + base.assertTableHasExpectedNumRows(targetTablePath, 3); + base.assertTableHasExpectedNumRows(sourceTablePath, 4); + // Create dev branch + base.runSQL(createBranchAtBranchQuery(devBranch, DEFAULT_BRANCH_NAME)); + // Switch to dev + base.runSQL(useBranchQuery(devBranch)); + + // Act + base.runSQL(mergeByIdQuery(targetTablePath, sourceTablePath)); + long mtime2 = base.getMtimeForTable(targetTablePath, new TableVersionContext(TableVersionType.BRANCH, devBranch), base); + // Assert + base.assertTableHasExpectedNumRows(targetTablePath, 4); + base.assertTableHasExpectedNumRows(sourceTablePath, 4); + assertThat(mtime2 > mtime1).isTrue(); + // Select + base.testBuilder() + .sqlQuery(selectStarQuery(targetTablePath)) + .unOrdered() + .baselineColumns("id", "name", "distance") + .baselineValues(1, "first row", new BigDecimal("1.000")) + .baselineValues(2, "second row", new BigDecimal("1.000")) + .baselineValues(3, "third row", new BigDecimal("1.000")) + .baselineValues(4, "fourth row", new BigDecimal("0.000")) + .go(); + + //Check that main context still has the table + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + long mtime3 = base.getMtimeForTable(targetTablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + // Assert + base.assertTableHasExpectedNumRows(targetTablePath, 3); + base.assertTableHasExpectedNumRows(sourceTablePath, 4); + assertThat(mtime3 == mtime1).isTrue(); + + // Select + base.testBuilder() + .sqlQuery(selectStarQuery(targetTablePath)) + .unOrdered() + .baselineColumns("id", "name", "distance") + .baselineValues(1, "first row", new BigDecimal("1000.000")) + .baselineValues(2, "second row", new BigDecimal("2000.000")) + .baselineValues(3, "third row", new BigDecimal("3000.000")) + .go(); + + //cleanup + base.runSQL(dropTableQuery(targetTablePath)); + base.runSQL(dropTableQuery(sourceTablePath)); + } + @Test + public void mergeWithTagSet() throws Exception { + // Arrange + String sourceTableName = generateUniqueTableName(); + final List sourceTablePath = tablePathWithFolders(sourceTableName); + final String targetTableName = generateUniqueTableName(); + final List targetTablePath = tablePathWithFolders(targetTableName); + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(targetTablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(targetTablePath)); + base.runSQL(insertTableQuery(targetTablePath)); + createFolders(sourceTablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(sourceTablePath)); + base.runSQL(insertTableQuery(sourceTablePath)); + final String tag = generateUniqueTagName(); + // Act and Assert + base.runSQL(createTagQuery(tag, DEFAULT_BRANCH_NAME)); + base.runSQL(useTagQuery(tag)); + base.assertQueryThrowsExpectedError(mergeByIdQuery(targetTablePath, sourceTablePath), + String.format("DDL and DML operations are only supported for branches - not on tags or commits. %s is not a branch.", + tag)); + } +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/OptimizeTestCases.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/OptimizeTestCases.java new file mode 100644 index 0000000000..f0c2100831 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/OptimizeTestCases.java @@ -0,0 +1,187 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_BRANCH_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.OptimizeMode.REWRITE_ALL; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.OptimizeMode.REWRITE_DATA; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.OptimizeMode.REWRITE_MANIFESTS; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createBranchAtBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createEmptyTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createTableAsQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.dropTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueBranchName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTableName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.insertSelectQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.insertTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.optimizeTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.tablePathWithFolders; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useBranchQuery; +import static com.dremio.exec.catalog.dataplane.ITDataplanePluginTestSetup.createFolders; +import static com.dremio.exec.planner.OptimizeOutputSchema.NEW_DATA_FILES_COUNT; +import static com.dremio.exec.planner.OptimizeOutputSchema.OPTIMIZE_OUTPUT_SUMMARY; +import static com.dremio.exec.planner.OptimizeOutputSchema.REWRITTEN_DATA_FILE_COUNT; +import static com.dremio.exec.planner.OptimizeOutputSchema.REWRITTEN_DELETE_FILE_COUNT; + +import java.util.List; + +import org.junit.jupiter.api.Test; + +import com.dremio.exec.catalog.VersionContext; + + +/** + * + * To run these tests, run through container class ITDataplanePlugin + * To run all tests run {@link ITDataplanePlugin.NestedOptimizeTests} + * To run single test, see instructions at the top of {@link ITDataplanePlugin} + */ +public class OptimizeTestCases { + private ITDataplanePluginTestSetup base; + + OptimizeTestCases(ITDataplanePluginTestSetup base) { + this.base = base; + } + + @Test + public void optimizeNewTable() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + // Act + base.runSQL(insertTableQuery(tablePath)); + base.runSQL(insertTableQuery(tablePath)); + + // Verify + base.testBuilder() + .sqlQuery(optimizeTableQuery(tablePath, REWRITE_ALL)) + .unOrdered() + .baselineColumns(REWRITTEN_DATA_FILE_COUNT, REWRITTEN_DELETE_FILE_COUNT, NEW_DATA_FILES_COUNT) + .baselineValues(2L, 0L, 1L).go(); + + // Cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void optimizeDataFilesOnly() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + // Act + base.runSQL(insertTableQuery(tablePath)); + base.runSQL(insertTableQuery(tablePath)); + + // Verify + base.testBuilder() + .sqlQuery(optimizeTableQuery(tablePath, REWRITE_DATA)) + .unOrdered() + .baselineColumns(REWRITTEN_DATA_FILE_COUNT, REWRITTEN_DELETE_FILE_COUNT, NEW_DATA_FILES_COUNT) + .baselineValues(2L, 0L, 1L).go(); + + // Cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void optimizeManifestsOnly() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + // Act + base.runSQL(insertTableQuery(tablePath)); + base.runSQL(insertTableQuery(tablePath)); + + // Verify + base.testBuilder() + .sqlQuery(optimizeTableQuery(tablePath, REWRITE_MANIFESTS)) + .unOrdered() + .baselineColumns(OPTIMIZE_OUTPUT_SUMMARY) + .baselineValues("Optimize table successful").go(); + + // Cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void optimizeAgnosticOfSourceBucket() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + // Act + base.runSQL(insertTableQuery(tablePath)); + base.runSQL(insertTableQuery(tablePath)); + + // Verify + base.runWithAlternateSourcePath(optimizeTableQuery(tablePath, REWRITE_ALL)); + base.assertAllFilesAreInBaseBucket(tablePath); + base.assertTableHasExpectedNumOfDataFiles(tablePath, 1); + + // cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void optimizeInDifferentBranches() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final String devBranch = generateUniqueBranchName(); + final List tablePath = tablePathWithFolders(tableName); + + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + + // Prepare data + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath, 5)); + base.runSQL(insertSelectQuery(tablePath, 2)); + + // Create dev branch and switch to that + base.runSQL(createBranchAtBranchQuery(devBranch, DEFAULT_BRANCH_NAME)); + base.runSQL(useBranchQuery(devBranch)); + + // Insert and optimize in dev branch + base.runSQL(insertSelectQuery(tablePath, 2)); + base.testBuilder() + .sqlQuery(optimizeTableQuery(tablePath, REWRITE_ALL)) + .unOrdered() + .baselineColumns(REWRITTEN_DATA_FILE_COUNT, REWRITTEN_DELETE_FILE_COUNT, NEW_DATA_FILES_COUNT) + .baselineValues(3L, 0L, 1L).go(); + + // Optimize in main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + base.testBuilder() + .sqlQuery(optimizeTableQuery(tablePath, REWRITE_ALL)) + .unOrdered() + .baselineColumns(REWRITTEN_DATA_FILE_COUNT, REWRITTEN_DELETE_FILE_COUNT, NEW_DATA_FILES_COUNT) + .baselineValues(2L, 0L, 1L).go(); + + // cleanup + base.runSQL(dropTableQuery(tablePath)); + } +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/RollbackTestCases.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/RollbackTestCases.java new file mode 100644 index 0000000000..f7eda41c6a --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/RollbackTestCases.java @@ -0,0 +1,164 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DATAPLANE_PLUGIN_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_BRANCH_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createBranchAtBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createEmptyTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.dropTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueBranchName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTableName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.insertTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.insertTableWithValuesQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.joinedTableKey; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.rollbackTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.selectStarQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.tablePathWithFolders; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useBranchQuery; +import static com.dremio.exec.catalog.dataplane.ITDataplanePluginTestSetup.createFolders; + +import java.math.BigDecimal; +import java.util.Collections; +import java.util.List; + +import org.junit.jupiter.api.Test; + +import com.dremio.exec.catalog.VersionContext; + +/** + * + * To run these tests, run through container class ITDataplanePlugin + * To run all tests run {@link ITDataplanePlugin.NestedRollbackTests} + * To run single test, see instructions at the top of {@link ITDataplanePlugin} + */ +public class RollbackTestCases { + private ITDataplanePluginTestSetup base; + + RollbackTestCases(ITDataplanePluginTestSetup base) { + this.base = base; + } + + @Test + public void rollbackToTimestamp() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.runSQL(insertTableQuery(tablePath)); + + // Verify select results + base.assertTableHasExpectedNumRows(tablePath, 3); + base.assertTableHasExpectedNumOfSnapshots(tablePath, 2); + + // Timestamp to rollback + final long timestampMillis = System.currentTimeMillis(); + + // Add an extra row and increase one more snapshot + base.runSQL(insertTableWithValuesQuery(tablePath, + Collections.singletonList("(4, CAST('fourth row' AS VARCHAR(65536)), CAST(4000 AS DECIMAL(38,3)))"))); + + // Verify select results again + base.assertTableHasExpectedNumRows(tablePath, 4); + base.assertTableHasExpectedNumOfSnapshots(tablePath, 3); + + // Run rollback query + base.testBuilder() + .sqlQuery(rollbackTableQuery(tablePath, timestampMillis)) + .unOrdered() + .baselineColumns("ok", "summary") + .baselineValues(true, String.format("Table [%s.%s] rollbacked", DATAPLANE_PLUGIN_NAME, joinedTableKey(tablePath))) + .go(); + + // Rollback changes the number of rows, but not affect number of snapshots. + base.assertTableHasExpectedNumRows(tablePath, 3); + base.assertTableHasExpectedNumOfSnapshots(tablePath, 3); + + // Verify select results after rollback + base.testBuilder() + .sqlQuery(selectStarQuery(tablePath)) + .unOrdered() + .baselineColumns("id", "name", "distance") + .baselineValues(1, "first row", new BigDecimal("1000.000")) + .baselineValues(2, "second row", new BigDecimal("2000.000")) + .baselineValues(3, "third row", new BigDecimal("3000.000")) + .go(); + + // Cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void rollbackInDifferentBranches() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final String devBranch = generateUniqueBranchName(); + final List tablePath = tablePathWithFolders(tableName); + + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.runSQL(insertTableQuery(tablePath)); + + // Verify select results in main branch + base.assertTableHasExpectedNumRows(tablePath, 3); + base.assertTableHasExpectedNumOfSnapshots(tablePath, 2); + + // Timestamp to rollback + final long timestampMillis = System.currentTimeMillis(); + + // Create dev branch and switch to that + base.runSQL(createBranchAtBranchQuery(devBranch, DEFAULT_BRANCH_NAME)); + base.runSQL(useBranchQuery(devBranch)); + + // Add row into dev branch and increase one more snapshot + base.runSQL(insertTableWithValuesQuery(tablePath, + Collections.singletonList("(4, CAST('fourth row' AS VARCHAR(65536)), CAST(4000 AS DECIMAL(38,3)))"))); + + // Verify select results in dev branch + base.assertTableHasExpectedNumRows(tablePath, 4); + base.assertTableHasExpectedNumOfSnapshots(tablePath, 3); + + // Run rollback query in Dev branch + base.testBuilder() + .sqlQuery(rollbackTableQuery(tablePath, timestampMillis)) + .unOrdered() + .baselineColumns("ok", "summary") + .baselineValues(true, String.format("Table [%s.%s] rollbacked", DATAPLANE_PLUGIN_NAME, joinedTableKey(tablePath))) + .go(); + + // Verify select results after rollback + base.assertTableHasExpectedNumRows(tablePath, 3); + base.assertTableHasExpectedNumOfSnapshots(tablePath, 3); + + base.testBuilder() + .sqlQuery(selectStarQuery(tablePath)) + .unOrdered() + .baselineColumns("id", "name", "distance") + .baselineValues(1, "first row", new BigDecimal("1000.000")) + .baselineValues(2, "second row", new BigDecimal("2000.000")) + .baselineValues(3, "third row", new BigDecimal("3000.000")) + .go(); + + // Cleanup + base.runSQL(dropTableQuery(tablePath)); + } +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/SelectTestCases.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/SelectTestCases.java new file mode 100644 index 0000000000..a24edfcb3f --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/SelectTestCases.java @@ -0,0 +1,370 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import static com.dremio.BaseTestQuery.test; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DATAPLANE_PLUGIN_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_BRANCH_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_COUNT_COLUMN; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createBranchAtBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createEmptyTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createTableAsQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createTagQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.dropTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueBranchName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTableName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTagName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.insertSelectQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.joinedTableKey; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.mergeBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.quoted; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.selectCountQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.selectCountQueryWithSpecifier; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.selectStarQueryWithSpecifier; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.tablePathWithFolders; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useCommitQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useTagQuery; +import static com.dremio.exec.catalog.dataplane.ITDataplanePluginTestSetup.createFolders; +import static org.assertj.core.api.AssertionsForClassTypes.assertThat; + +import java.util.List; + +import org.apache.calcite.util.TimestampString; +import org.junit.jupiter.api.Test; + +import com.dremio.exec.catalog.CatalogEntityKey; +import com.dremio.exec.catalog.CatalogUtil; +import com.dremio.exec.catalog.DremioTable; +import com.dremio.exec.catalog.TableVersionContext; +import com.dremio.exec.catalog.TableVersionType; +import com.dremio.exec.catalog.VersionContext; + +/** + * + * To run these tests, run through container class ITDataplanePlugin + * To run all tests run {@link ITDataplanePlugin.NestedSelectTests} + * To run single test, see instructions at the top of {@link ITDataplanePlugin} + */ + +public class SelectTestCases { + private ITDataplanePluginTestSetup base; + + SelectTestCases(ITDataplanePluginTestSetup base) { + this.base = base; + } + + @Test + public void selectFromEmptyTable() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + // Act and Assert + base.assertTableHasExpectedNumRows(tablePath, 0); + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + void selectAfterDropTable() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.runSQL(dropTableQuery(tablePath)); + + // Act and Assert + // Expect validation error. validateAndConvert calls VersionedDatasetAdapter#build . That returns null if unable to + // get an IcebergDatasetHandle (VersionedDatasetAdapter#tryGetHandleToIcebergFormatPlugin). + // The top level resolution then returns this error. + base.assertQueryThrowsExpectedError(selectCountQuery(tablePath, DEFAULT_COUNT_COLUMN), + String.format("Object '%s' not found within '%s'", + tablePath.get(0), DATAPLANE_PLUGIN_NAME)); + } + @Test + public void selectTableInNonExistentBranch() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath, 5)); + final String invalidBranch = "xyz"; + + // Act and Assert + base.assertQueryThrowsExpectedError(selectStarQueryWithSpecifier(tablePath, "BRANCH "+invalidBranch), + String.format("Branch %s is not found", + invalidBranch)); + } + + @Test + public void selectWithSpecifiers() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final String devBranch = generateUniqueBranchName(); + String firstTag = generateUniqueTagName(); + final List tablePath = tablePathWithFolders(tableName); + + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath, 5)); + long mtime1 = base.getMtimeForTable(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + // Verify with select + base.assertTableHasExpectedNumRows(tablePath, 5); + base.assertSQLReturnsExpectedNumRows(selectCountQueryWithSpecifier(tablePath, DEFAULT_COUNT_COLUMN, + "BRANCH " + DEFAULT_BRANCH_NAME), DEFAULT_COUNT_COLUMN, 5); + // Create tag + base.runSQL(createTagQuery(firstTag, DEFAULT_BRANCH_NAME)); + + // Create dev branch + base.runSQL(createBranchAtBranchQuery(devBranch, DEFAULT_BRANCH_NAME)); + final TimestampString ts1 = TimestampString.fromMillisSinceEpoch(System.currentTimeMillis()); + + // Switch to dev + test("USE dfs_test"); + base.runSQL(useBranchQuery(devBranch)); + // Insert rows + base.runSQL(insertSelectQuery(tablePath, 2)); + // Verify number of rows. + long mtime2 = base.getMtimeForTable(tablePath, new TableVersionContext(TableVersionType.BRANCH, devBranch), base); + final TimestampString ts2 = TimestampString.fromMillisSinceEpoch(System.currentTimeMillis()); + + base.assertTableHasExpectedNumRows(tablePath, 7); + base.assertSQLReturnsExpectedNumRows(selectCountQueryWithSpecifier(tablePath, DEFAULT_COUNT_COLUMN, + "BRANCH " + DEFAULT_BRANCH_NAME), DEFAULT_COUNT_COLUMN, 5); + base.assertSQLReturnsExpectedNumRows(selectCountQueryWithSpecifier(tablePath, DEFAULT_COUNT_COLUMN, + "BRANCH " + devBranch), DEFAULT_COUNT_COLUMN, 7); + assertThat(mtime2 > mtime1).isTrue(); + + // on devBranch branch, at this timestamp + base.assertSQLReturnsExpectedNumRows(selectCountQueryWithSpecifier(tablePath, DEFAULT_COUNT_COLUMN, + "TIMESTAMP '" + ts1 + "'"), DEFAULT_COUNT_COLUMN, 5); + base.assertSQLReturnsExpectedNumRows(selectCountQueryWithSpecifier(tablePath, DEFAULT_COUNT_COLUMN, + "TIMESTAMP '" + ts2 + "'"), DEFAULT_COUNT_COLUMN, 7); + + base.assertSQLReturnsExpectedNumRows(selectCountQueryWithSpecifier(tablePath, DEFAULT_COUNT_COLUMN, + "CURRENT_TIMESTAMP()" ), DEFAULT_COUNT_COLUMN, 7); + + // Switch back to main + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + // Verify number of rows + base.assertTableHasExpectedNumRows(tablePath, 5); + + // Act + // Merge dev to main + base.runSQL(mergeBranchQuery(devBranch, DEFAULT_BRANCH_NAME)); + + // Assert + base.assertTableHasExpectedNumRows(tablePath, 7); + long mtime3 = base.getMtimeForTable(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + assertThat(mtime3 > mtime1).isTrue(); + assertThat(mtime3 == mtime2).isTrue(); + + base.assertSQLReturnsExpectedNumRows(selectCountQueryWithSpecifier(tablePath, DEFAULT_COUNT_COLUMN, + "TAG " + firstTag), DEFAULT_COUNT_COLUMN, 5); + + // Cleanup + base.runSQL(dropTableQuery(tablePath)); + CatalogEntityKey ckey = CatalogEntityKey.newBuilder() + .keyComponents(tablePath) + .tableVersionContext(new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME)) + .build(); + DremioTable droppedTable = CatalogUtil.getTable(ckey, base.getCatalog()); + + base.assertQueryThrowsExpectedError(selectCountQueryWithSpecifier(tablePath, DEFAULT_COUNT_COLUMN, "BRANCH " + DEFAULT_BRANCH_NAME), + String.format("Table '%s.%s' not found", + DATAPLANE_PLUGIN_NAME, joinedTableKey(tablePath))); + + assertThat(droppedTable).isNull(); + + } + + @Test + void selectAfterDropWithOlderTag() throws Exception { + // Arrange + String firstTag = generateUniqueTagName(); + String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + // Create table1 on default branch + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.assertTableHasExpectedNumRows(tablePath, 0); + + // Create a tag to mark it + base.runSQL(createTagQuery(firstTag, DEFAULT_BRANCH_NAME)); + + // Drop table table1 on default branch + base.runSQL(dropTableQuery(tablePath)); + + // Ensure it cannot be selected from the tip of the branch + base.assertQueryThrowsExpectedError(selectCountQuery(tablePath, DEFAULT_COUNT_COLUMN), + String.format("Object '%s' not found within '%s'", tablePath.get(0), DATAPLANE_PLUGIN_NAME)); + + // Act + // Go back to tag1 + base.runSQL(useTagQuery(firstTag)); + + // Assert + // Try to select from table1 - should succeed + base.assertTableHasExpectedNumRows(tablePath, 0); + + // Go back to branch reference + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + + base.assertQueryThrowsExpectedError(selectCountQueryWithSpecifier(tablePath, DEFAULT_COUNT_COLUMN, "BRANCH " + DEFAULT_BRANCH_NAME), + String.format("Table '%s.%s' not found", + DATAPLANE_PLUGIN_NAME, joinedTableKey(tablePath))); + } + + @Test + void selectUseCommit() throws Exception { + // Arrange + final String tableOnMainAndBranchName = generateUniqueTableName(); + final List tableOnMainAndBranchPath = tablePathWithFolders(tableOnMainAndBranchName); + final int tableOnMainAndBranchNumRows = 10; + final String tableOnBranchOnlyName = generateUniqueTableName(); + final List tableOnBranchOnlyPath = tablePathWithFolders(tableOnBranchOnlyName); + final int tableOnBranchOnlyNumRows = 15; + final String branchName = generateUniqueBranchName(); + + // "tableOnMainAndBranch" in both main and new branch + // "tableOnBranchOnly" only in new branch + String commitHashMainAtBeginning = base.getCommitHashForBranch(DEFAULT_BRANCH_NAME); + createFolders(tableOnMainAndBranchPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tableOnMainAndBranchPath, tableOnMainAndBranchNumRows)); + String commitHashMainAfterTable = base.getCommitHashForBranch(DEFAULT_BRANCH_NAME); + base.runSQL(createBranchAtBranchQuery(branchName, DEFAULT_BRANCH_NAME)); + base.runSQL(useBranchQuery(branchName)); + createFolders(tableOnBranchOnlyPath, VersionContext.ofBranch(branchName)); + base.runSQL(createTableAsQuery(tableOnBranchOnlyPath, tableOnBranchOnlyNumRows)); + String commitHashBranchAfterTable = base.getCommitHashForBranch(branchName); + + // Act + Assert + base.runSQL(useCommitQuery(commitHashBranchAfterTable)); + base.assertSQLReturnsExpectedNumRows( + selectCountQuery(tableOnBranchOnlyPath, DEFAULT_COUNT_COLUMN), + DEFAULT_COUNT_COLUMN, + tableOnBranchOnlyNumRows); + base.runSQL(useCommitQuery(commitHashMainAfterTable)); + base.assertSQLReturnsExpectedNumRows( + selectCountQuery(tableOnMainAndBranchPath, DEFAULT_COUNT_COLUMN), + DEFAULT_COUNT_COLUMN, + tableOnMainAndBranchNumRows); + base.runSQL(useCommitQuery(commitHashMainAtBeginning)); + base.assertQueryThrowsExpectedError( + selectCountQuery( + tableOnMainAndBranchPath, + DEFAULT_COUNT_COLUMN), + "not found"); + } + + @Test + void selectCommitAt() throws Exception { + // Arrange + final String tableOnMainAndBranchName = generateUniqueTableName(); + final List tableOnMainAndBranchPath = tablePathWithFolders(tableOnMainAndBranchName); + final int tableOnMainAndBranchNumRows = 10; + final String tableOnBranchOnlyName = generateUniqueTableName(); + final List tableOnBranchOnlyPath = tablePathWithFolders(tableOnBranchOnlyName); + final int tableOnBranchOnlyNumRows = 15; + final String branchName = generateUniqueBranchName(); + + // "tableOnMainAndBranch" in both main and new branch + // "tableOnBranchOnly" only in new branch + String commitHashMainAtBeginning = base.getCommitHashForBranch(DEFAULT_BRANCH_NAME); + createFolders(tableOnMainAndBranchPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tableOnMainAndBranchPath, tableOnMainAndBranchNumRows)); + String commitHashMainAfterTable = base.getCommitHashForBranch(DEFAULT_BRANCH_NAME); + base.runSQL(createBranchAtBranchQuery(branchName, DEFAULT_BRANCH_NAME)); + base.runSQL(useBranchQuery(branchName)); + createFolders(tableOnBranchOnlyPath, VersionContext.ofBranch(branchName)); + base.runSQL(createTableAsQuery(tableOnBranchOnlyPath, tableOnBranchOnlyNumRows)); + String commitHashBranchAfterTable = base.getCommitHashForBranch(branchName); + + // Act + Assert + base.assertSQLReturnsExpectedNumRows( + selectCountQueryWithSpecifier( + tableOnBranchOnlyPath, + DEFAULT_COUNT_COLUMN, + "COMMIT " + quoted(commitHashBranchAfterTable)), + DEFAULT_COUNT_COLUMN, + tableOnBranchOnlyNumRows); + base.assertSQLReturnsExpectedNumRows( + selectCountQueryWithSpecifier( + tableOnMainAndBranchPath, + DEFAULT_COUNT_COLUMN, + "COMMIT " + quoted(commitHashMainAfterTable)), + DEFAULT_COUNT_COLUMN, + tableOnMainAndBranchNumRows); + base.assertQueryThrowsExpectedError( + selectCountQueryWithSpecifier( + tableOnMainAndBranchPath, + DEFAULT_COUNT_COLUMN, + "COMMIT " + quoted(commitHashMainAtBeginning)), + "not found"); + } + + /** + * @throws Exception test case for select * from table(table_snapshot('icebergTable')) + */ + @Test + public void icebergSnapshotMFunctionSQL() throws Exception { + // Arrange + String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + // Act + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath, 5)); + base.assertTableHasExpectedNumOfSnapshots(tablePath, 1); + + // Cleanup + base.runSQL(dropTableQuery(tablePath)); + } + /** + * @throws Exception test case for select * from table(table_files('icebergTable')) + */ + @Test + public void icebergTableFilesMFunctionSQL() throws Exception { + // Arrange + String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + // Act + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath, 5)); + base.assertTableHasExpectedNumOfDataFiles(tablePath, 1); + + // Cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void selectStarVersionedTableWithQuotedPath() throws Exception { + final String tableName1 = generateUniqueTableName(); + final List tablePath1 = tablePathWithFolders(tableName1); + createFolders(tablePath1, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath1)); + base.runSqlWithResults(String.format("select * from \"%s.%s.%s\".\"%s\"", DATAPLANE_PLUGIN_NAME, tablePath1.get(0), tablePath1.get(1), tablePath1.get(2))); + } + +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/TestDataplaneAssertions.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/TestDataplaneAssertions.java new file mode 100644 index 0000000000..26aeb175a0 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/TestDataplaneAssertions.java @@ -0,0 +1,237 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.BUCKET_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.METADATA_FOLDER; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.S3_PREFIX; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.USER_NAME; +import static com.dremio.exec.catalog.dataplane.ITDataplanePluginTestSetup.getS3Client; +import static org.assertj.core.api.Assertions.assertThat; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; + +import org.projectnessie.error.NessieNotFoundException; +import org.projectnessie.model.Content; +import org.projectnessie.model.ContentKey; +import org.projectnessie.model.FetchOption; +import org.projectnessie.model.IcebergTable; +import org.projectnessie.model.IcebergView; +import org.projectnessie.model.LogResponse; +import org.projectnessie.model.Namespace; +import org.projectnessie.model.Operation; +import org.projectnessie.model.Reference; + +import com.amazonaws.services.s3.model.S3ObjectSummary; + + +/** + * Dataplane test assertion helpers + */ +public final class TestDataplaneAssertions { + + private TestDataplaneAssertions() { + } + + public static void assertNessieHasCommitForTable(List tableSchemaComponents, + Class operationType, + String branchName, + ITDataplanePluginTestSetup base + ) throws NessieNotFoundException { + final List logEntries = base.getNessieClient() + .getCommitLog() + .refName(branchName) + .fetch(FetchOption.ALL) // Get extended data, including operations + .get() + .getLogEntries(); + assertThat(logEntries).hasSizeGreaterThanOrEqualTo(1); + final LogResponse.LogEntry mostRecentLogEntry = logEntries.get(0); // Commits are ordered most recent to earliest + + final List operations = mostRecentLogEntry.getOperations(); + assertThat(operations).hasSize(1); + final Operation operation = operations.get(0); + assertThat(operationType).isAssignableFrom(operation.getClass()); + + final ContentKey actualContentKey = operation.getKey(); + final ContentKey expectedContentKey = ContentKey.of(tableSchemaComponents); + assertThat(actualContentKey).isEqualTo(expectedContentKey); + } + + public static void assertNessieHasTable(List tableSchemaComponents, + String branchName, + ITDataplanePluginTestSetup base) throws NessieNotFoundException { + Map contentsMap = base.getNessieClient() + .getContent() + .refName(branchName) + .key(ContentKey.of(tableSchemaComponents)) + .get(); + + ContentKey expectedContentsKey = ContentKey.of(tableSchemaComponents); + assertThat(contentsMap).containsKey(expectedContentsKey); + + String expectedMetadataLocationPrefix = S3_PREFIX + BUCKET_NAME + "/" + String.join("/", tableSchemaComponents) + "/" + METADATA_FOLDER; + Optional maybeIcebergTable = contentsMap + .get(expectedContentsKey) + .unwrap(IcebergTable.class); + assertThat(maybeIcebergTable).isPresent(); + assertThat(maybeIcebergTable.get().getMetadataLocation()).startsWith(expectedMetadataLocationPrefix); + } + + public static void assertNessieHasView(List viewSchemaComponents, + String branchName, + ITDataplanePluginTestSetup base) throws NessieNotFoundException { + Reference branch = base.getNessieClient().getReference() + .refName(branchName) + .get(); + Map contentsMap = base.getNessieClient() + .getContent() + .reference(branch) + .key(ContentKey.of(viewSchemaComponents)) + .get(); + + + ContentKey expectedContentsKey = ContentKey.of(viewSchemaComponents); + assertThat(contentsMap).containsKey(expectedContentsKey); + + String expectedMetadataLocationPrefix = S3_PREFIX + BUCKET_NAME + "/" + String.join("/", viewSchemaComponents) + "/" + METADATA_FOLDER; + Optional icebergView = contentsMap + .get(expectedContentsKey) + .unwrap(IcebergView.class); + assertThat(icebergView).isPresent(); + assertThat(icebergView.get().getMetadataLocation()).startsWith(expectedMetadataLocationPrefix); + } + + public static void assertNessieHasNamespace(List namespaceComponents, + String branchName, + ITDataplanePluginTestSetup base) throws NessieNotFoundException { + + Reference branch = base.getNessieClient().getReference() + .refName(branchName) + .get(); + Map contentsMap = base.getNessieClient() + .getContent() + .reference(branch) + .key(ContentKey.of(namespaceComponents)) + .get(); + + ContentKey expectedContentsKey = ContentKey.of(namespaceComponents); + assertThat(contentsMap).containsKey(expectedContentsKey); + + Optional namespace = contentsMap + .get(expectedContentsKey) + .unwrap(Namespace.class); + assertThat(namespace).isPresent(); + } + + public static void assertLastCommitMadeBySpecifiedAuthor(String branchName, + ITDataplanePluginTestSetup base) throws NessieNotFoundException { + final List logEntries = base.getNessieClient() + .getCommitLog() + .refName(branchName) + .fetch(FetchOption.ALL) // Get extended data, including operations + .get() + .getLogEntries(); + assertThat(logEntries).hasSizeGreaterThanOrEqualTo(1); + final LogResponse.LogEntry mostRecentLogEntry = logEntries.get(0); // Commits are ordered most recent to earliest + + final List operations = mostRecentLogEntry.getOperations(); + assertThat(operations).hasSize(1); + assertThat(mostRecentLogEntry.getCommitMeta().getAuthor()).isEqualTo(USER_NAME); + } + + public static void assertNessieDoesNotHaveView(List viewKey, + String branchName, + ITDataplanePluginTestSetup base) throws NessieNotFoundException { + Map contentsMap = base.getNessieClient() + .getContent() + .refName(branchName) + .key(ContentKey.of(viewKey)) + .get(); + assertThat(contentsMap).isEmpty(); + } + + public static void assertNessieDoesNotHaveTable(List tableSchemaComponents, + String branchName, + ITDataplanePluginTestSetup base) throws NessieNotFoundException { + Map contentsMap = base.getNessieClient() + .getContent() + .refName(branchName) + .key(ContentKey.of(tableSchemaComponents)) + .get(); + assertThat(contentsMap).isEmpty(); + } + + public static void assertIcebergTableExistsAtSubPath(List subPath) { + // Iceberg tables on disk have a "metadata" folder in their root, check for "metadata" folder too + List pathToMetadataFolder = new ArrayList<>(subPath); + pathToMetadataFolder.add(METADATA_FOLDER); + + List keysInMetadataSubPath = getS3Client().listObjects(BUCKET_NAME, String.join("/", pathToMetadataFolder)).getObjectSummaries().stream() + .map(S3ObjectSummary::getKey) + .collect(Collectors.toList()); + + assertThat(keysInMetadataSubPath.size()).isGreaterThanOrEqualTo(1); + } + + public static void assertIcebergFilesExistAtSubPath(List subPath, + int expectedNumAvroFilesExcludingSnapshot, + int expectedNumMetadataJsonFiles, + int expectedNumSnapshotFiles, + int expectedNumParquetFiles) { + List pathToMetadataFolder = new ArrayList<>(subPath); + pathToMetadataFolder.add(METADATA_FOLDER); + + List keysInMetadataSubPath = getS3Client().listObjects(BUCKET_NAME, String.join("/", pathToMetadataFolder)).getObjectSummaries().stream() + .map(S3ObjectSummary::getKey) + .collect(Collectors.toList()); + + //check for avro files + assertThat(keysInMetadataSubPath.stream() + .filter(key -> key.endsWith(".avro") && !(key.contains("snap"))) + .collect(Collectors.toList()) + .size() + ).isEqualTo(expectedNumAvroFilesExcludingSnapshot); + + //check for snapshot files + assertThat(keysInMetadataSubPath.stream() + .filter(key -> key.contains("snap")) + .collect(Collectors.toList()) + .size() + ).isEqualTo(expectedNumSnapshotFiles); + + //Check for metadata.json file + assertThat(keysInMetadataSubPath.stream() + .filter(key -> key.endsWith(".metadata.json")) + .collect(Collectors.toList()) + .size() + ).isEqualTo(expectedNumMetadataJsonFiles); + + List keysInSubPath = getS3Client().listObjects(BUCKET_NAME, String.join("/", subPath)).getObjectSummaries().stream() + .map(S3ObjectSummary::getKey) + .collect(Collectors.toList()); + + //Check for .parquet files + assertThat(keysInSubPath.stream() + .filter(key -> key.endsWith(".parquet")) + .collect(Collectors.toList()) + .size() + ).isEqualTo(expectedNumParquetFiles); + } +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/UpdateTestCases.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/UpdateTestCases.java new file mode 100644 index 0000000000..2417548ce0 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/UpdateTestCases.java @@ -0,0 +1,199 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_BRANCH_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createBranchAtBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createEmptyTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createTagQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.dropTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueBranchName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTableName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTagName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.insertTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.insertTableWithValuesQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.selectStarQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.tablePathWithFolders; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.updateByIdFromAnotherBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.updateByIdQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useTagQuery; +import static com.dremio.exec.catalog.dataplane.ITDataplanePluginTestSetup.createFolders; +import static org.assertj.core.api.AssertionsForClassTypes.assertThat; + +import java.math.BigDecimal; +import java.util.Collections; +import java.util.List; + +import org.junit.jupiter.api.Test; + +import com.dremio.exec.catalog.TableVersionContext; +import com.dremio.exec.catalog.TableVersionType; +import com.dremio.exec.catalog.VersionContext; + +/** + * + * To run these tests, run through container class ITDataplanePlugin + * To run all tests run {@link ITDataplanePlugin.NestedUpdateTests} + * To run single test, see instructions at the top of {@link ITDataplanePlugin} + */ +public class UpdateTestCases { + private ITDataplanePluginTestSetup base; + + UpdateTestCases(ITDataplanePluginTestSetup base) { + this.base = base; + } + + @Test + public void updateOne() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final String devBranch = generateUniqueBranchName(); + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.runSQL(insertTableQuery(tablePath)); + // Verify with select + base.assertTableHasExpectedNumRows(tablePath, 3); + // Create dev branch + base.runSQL(createBranchAtBranchQuery(devBranch, DEFAULT_BRANCH_NAME)); + // Switch to dev + base.runSQL(useBranchQuery(devBranch)); + + // Act + base.runSQL(updateByIdQuery(tablePath)); + + // Assert + base.assertTableHasExpectedNumRows(tablePath, 3); + // Select + base.testBuilder() + .sqlQuery(selectStarQuery(tablePath)) + .unOrdered() + .baselineColumns("id", "name", "distance") + .baselineValues(1, "first row", new BigDecimal("1000.000")) + .baselineValues(2, "second row", new BigDecimal("2000.000")) + .baselineValues(3, "third row", new BigDecimal("30000.000")) + .go(); + + //Check that main context still has the table + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + // Assert + base.assertTableHasExpectedNumRows(tablePath, 3); + // Select + base.testBuilder() + .sqlQuery(selectStarQuery(tablePath)) + .unOrdered() + .baselineColumns("id", "name", "distance") + .baselineValues(1, "first row", new BigDecimal("1000.000")) + .baselineValues(2, "second row", new BigDecimal("2000.000")) + .baselineValues(3, "third row", new BigDecimal("3000.000")) + .go(); + + //cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void updateOneFromAnotherBranch() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final String devBranch = generateUniqueBranchName(); + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.runSQL(insertTableQuery(tablePath)); + long mtime1 = base.getMtimeForTable(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME),base); + // Create dev branch + base.runSQL(createBranchAtBranchQuery(devBranch, DEFAULT_BRANCH_NAME)); + // Switch to dev + base.runSQL(useBranchQuery(devBranch)); + // Insert more data + base.runSQL(insertTableWithValuesQuery(tablePath, + Collections.singletonList("(4, CAST('fourth row' AS VARCHAR(65536)), CAST(4000 AS DECIMAL(38,3)))"))); + // Assert + base.assertTableHasExpectedNumRows(tablePath, 4); + // Switch to main + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + + // Act + base.runSQL(updateByIdFromAnotherBranchQuery(tablePath, devBranch)); + long mtime2 = base.getMtimeForTable(tablePath, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + // Assert + base.assertTableHasExpectedNumRows(tablePath, 3); + assertThat(mtime2 > mtime1).isTrue(); + // Select + base.testBuilder() + .sqlQuery(selectStarQuery(tablePath)) + .unOrdered() + .baselineColumns("id", "name", "distance") + .baselineValues(1, "first row", new BigDecimal("1000.000")) + .baselineValues(2, "second row", new BigDecimal("2000.000")) + .baselineValues(3, "third row", new BigDecimal("4000.000")) + .go(); + + //cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void updateAgnosticOfSourceBucket() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.runSQL(insertTableQuery(tablePath)); + + // Act + base.runWithAlternateSourcePath(updateByIdQuery(tablePath)); + base.assertAllFilesAreInBaseBucket(tablePath); + + // Assert + base.assertTableHasExpectedNumRows(tablePath, 3); + // Select + base.testBuilder() + .sqlQuery(selectStarQuery(tablePath)) + .unOrdered() + .baselineColumns("id", "name", "distance") + .baselineValues(1, "first row", new BigDecimal("1000.000")) + .baselineValues(2, "second row", new BigDecimal("2000.000")) + .baselineValues(3, "third row", new BigDecimal("30000.000")) + .go(); + } + + @Test + public void updateWithTagSet() throws Exception { + // Arrange + String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.runSQL(insertTableQuery(tablePath)); + final String tag = generateUniqueTagName(); + // Act and Assert + base.runSQL(createTagQuery(tag, DEFAULT_BRANCH_NAME)); + base.runSQL(useTagQuery(tag)); + base.assertQueryThrowsExpectedError(updateByIdQuery(tablePath), + String.format("DDL and DML operations are only supported for branches - not on tags or commits. %s is not a branch.", + tag)); + } +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/VacuumTestCases.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/VacuumTestCases.java new file mode 100644 index 0000000000..9b7be04685 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/VacuumTestCases.java @@ -0,0 +1,71 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import static com.dremio.BaseTestQuery.test; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_BRANCH_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createEmptyTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.dropTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTableName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.tablePathWithFolders; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.vacuumTableQuery; +import static com.dremio.exec.catalog.dataplane.ITDataplanePluginTestSetup.createFolders; + +import java.util.List; + +import org.junit.jupiter.api.Test; + +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.proto.UserBitShared.DremioPBError.ErrorType; +import com.dremio.test.UserExceptionAssert; + +/** + * + * To run these tests, run through container class ITDataplanePlugin + * To run all tests run {@link ITDataplanePlugin.NestedVacuumTests} + * To run single test, see instructions at the top of {@link ITDataplanePlugin} + */ +public class VacuumTestCases { + private ITDataplanePlugin base; + + VacuumTestCases(ITDataplanePlugin base) { + this.base = base; + } + + @Test + public void expireSnapshotsOlderThan() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + final long expireSnapshotTimestamp = System.currentTimeMillis(); + + // Test ExpireSnapshots query + UserExceptionAssert.assertThatThrownBy(() -> + test(vacuumTableQuery(tablePath, expireSnapshotTimestamp))) + .hasErrorType(ErrorType.UNSUPPORTED_OPERATION) + .hasMessageContaining("VACUUM TABLE command is not supported for this source"); + + // Cleanup + base.runSQL(dropTableQuery(tablePath)); + } +} diff --git a/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/ViewTestCases.java b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/ViewTestCases.java new file mode 100644 index 0000000000..4349708911 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/catalog/dataplane/ViewTestCases.java @@ -0,0 +1,1773 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.dataplane; + +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DATAPLANE_PLUGIN_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_BRANCH_NAME; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.DEFAULT_COUNT_COLUMN; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.alterTableAddColumnsQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.alterTableDropColumnQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.alterViewPropertyQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createBranchAtBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createEmptyTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createReplaceViewQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createTableAsQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createTableWithColDefsQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createTagQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createViewQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createViewQueryWithEmptySql; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createViewQueryWithIncompleteSql; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.createViewSelectQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.dropTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.dropViewQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateSchemaPath; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueBranchName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTableName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueTagName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.generateUniqueViewName; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.insertSelectQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.insertTableQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.insertTableWithValuesQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.joinTablesQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.joinTpcdsTablesQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.joinedTableKey; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.quoted; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.selectCountQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.selectCountQueryWithSpecifier; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.selectStarQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.selectStarQueryWithSpecifier; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.tablePathWithFolders; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.updateViewSelectQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useBranchQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useContextQuery; +import static com.dremio.exec.catalog.dataplane.DataplaneTestDefines.useTagQuery; +import static com.dremio.exec.catalog.dataplane.ITDataplanePluginTestSetup.createFolders; +import static com.dremio.exec.catalog.dataplane.TestDataplaneAssertions.assertNessieDoesNotHaveView; +import static com.dremio.exec.catalog.dataplane.TestDataplaneAssertions.assertNessieHasTable; +import static com.dremio.exec.catalog.dataplane.TestDataplaneAssertions.assertNessieHasView; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import org.apache.calcite.util.TimestampString; +import org.assertj.core.api.AssertionsForClassTypes; +import org.junit.jupiter.api.Test; + +import com.dremio.exec.catalog.DremioTable; +import com.dremio.exec.catalog.TableVersionContext; +import com.dremio.exec.catalog.TableVersionType; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.proto.UserBitShared; +import com.dremio.test.UserExceptionAssert; + +/** + * + * To run these tests, run through container class ITDataplanePlugin + * To run all tests run {@link ITDataplanePlugin.NestedViewTests} + * To run single test, see instructions at the top of {@link ITDataplanePlugin} + */ + +public class ViewTestCases { + private ITDataplanePluginTestSetup base; + + ViewTestCases(ITDataplanePluginTestSetup base) { + this.base = base; + } + + @Test + public void createView() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + final String viewName = generateUniqueViewName(); + List viewKey = tablePathWithFolders(viewName); + + // Act + createFolders(viewKey, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewQuery(viewKey, tablePath)); + + // Assert + assertNessieHasView(viewKey, DEFAULT_BRANCH_NAME, base); + } + + @Test + public void createViewTwice() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + final String viewName = generateUniqueViewName(); + List viewKey = tablePathWithFolders(viewName); + + // Act + createFolders(viewKey, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewQuery(viewKey, tablePath)); + + // Assert + assertThatThrownBy(() -> base.runSQL(createViewQuery(viewKey, tablePath))) + .hasMessageContaining("already exists"); + } + + @Test + public void createViewOnNonExistentTable() { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + + final String viewName = generateUniqueViewName(); + List viewKey = tablePathWithFolders(viewName); + + assertThatThrownBy(() -> base.runSQL(createViewQuery(viewKey, tablePath))) + .hasMessageContaining("Object '" + tablePath.get(0) + "' not found within"); + } + + @Test + public void createViewWithIncompleteSql() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + List viewKey = tablePathWithFolders(tableName); + createFolders(viewKey, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + + assertThatThrownBy(() -> base.runSQL(createViewQueryWithIncompleteSql(viewKey, tablePath))) + .hasMessageContaining("PARSE ERROR:"); + } + + @Test + public void dropView() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + final String viewName = generateUniqueViewName(); + List viewKey = tablePathWithFolders(viewName); + + // Act + createFolders(viewKey, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewQuery(viewKey, tablePath)); + assertNessieHasView(viewKey, DEFAULT_BRANCH_NAME, base); + base.runSQL(dropViewQuery(viewKey)); + + // Assert + assertNessieDoesNotHaveView(viewKey, DEFAULT_BRANCH_NAME, base); + } + + @Test + public void dropViewTwice() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + final String viewName = generateUniqueViewName(); + List viewKey = tablePathWithFolders(viewName); + + // Act + createFolders(viewKey, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewQuery(viewKey, tablePath)); + assertNessieHasView(viewKey, DEFAULT_BRANCH_NAME, base); + base.runSQL(dropViewQuery(viewKey)); + assertNessieDoesNotHaveView(viewKey, DEFAULT_BRANCH_NAME, base); + + // Assert + assertThatThrownBy(() -> base.runSQL(dropViewQuery(viewKey))) + .hasMessageContaining("Unknown view"); + } + + @Test + public void dropViewNonExist() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + final String viewName = generateUniqueViewName(); + List viewKey = tablePathWithFolders(viewName); + createFolders(viewKey, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + + // Assert + assertThatThrownBy(() -> base.runSQL(dropViewQuery(viewKey))) + .hasMessageContaining("Unknown view"); + } + + @Test + public void dropViewAsTable() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + final String viewName = generateUniqueViewName(); + List viewKey = tablePathWithFolders(viewName); + + // Act + createFolders(viewKey, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewQuery(viewKey, tablePath)); + assertNessieHasView(viewKey, DEFAULT_BRANCH_NAME, base); + + // Assert + assertThatThrownBy(() -> base.runSQL(dropTableQuery(viewKey))) + .hasMessageContaining("is not a TABLE"); + } + + @Test + public void createViewWithTagSet() throws Exception { + // Arrange + final String viewName = generateUniqueTableName(); + final String tableName = generateUniqueTableName(); + String firstTag = generateUniqueTagName(); + final List viewPath = tablePathWithFolders(viewName); + final List tablePath = tablePathWithFolders(tableName); + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath, 5)); + base.runSQL(insertSelectQuery(tablePath, 5)); + // Verify with select + base.assertTableHasExpectedNumRows(tablePath, 10); + base.assertSQLReturnsExpectedNumRows(selectCountQueryWithSpecifier(tablePath, DEFAULT_COUNT_COLUMN, + "BRANCH " + DEFAULT_BRANCH_NAME), DEFAULT_COUNT_COLUMN, 10); + // Create tag + base.runSQL(createTagQuery(firstTag, DEFAULT_BRANCH_NAME)); + + //Insert 5 more rows into the table + base.runSQL(insertSelectQuery(tablePath, 5)); + //AT query + String selectATQuery = String.format("select * from %s.%s AT TAG %s ", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath), + firstTag); + + //Act + createFolders(viewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewSelectQuery(viewPath, selectATQuery)); + + //Assert + base.assertViewHasExpectedNumRows(viewPath, 10); + base.assertTableHasExpectedNumRows(tablePath, 15); + + + } + + @Test + public void createViewClashWithTable() throws Exception { + String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + // Create table with 10 rows + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath, 10)); + + //Act and Assert + assertThatThrownBy(() -> base.runSQL(createViewQuery(tablePath, tablePath))) + .hasMessageContaining("A non-view table with given name ") + .hasMessageContaining("already exists in schema"); + } + + //View will be created with fully qualified name represented by viewCreationPath + //Table tableName1 will be resolved to the current schema path (workspaceSchemaPath) set in the context + //viewCreationPath != workspaceSchemaPath + @Test + public void createViewWithTableOnDifferentPathContext() throws Exception { + List workspaceSchemaPath = generateSchemaPath(); + //current schema context + useContextQuery(workspaceSchemaPath); + // Create table1 with 10 rows + String tableName1 = generateUniqueTableName(); + base.runSQL(createTableAsQuery(Collections.singletonList(tableName1), 10)); + final String viewName = generateUniqueViewName(); + List viewCreationPath = tablePathWithFolders(viewName); + createFolders(viewCreationPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewQuery(viewCreationPath, Collections.singletonList(tableName1))); + assertThat(viewCreationPath != workspaceSchemaPath).isTrue(); + base.assertViewHasExpectedNumRows(viewCreationPath, 10); + } + + @Test + public void updateView() throws Exception { + //Arrange + String tableName1 = generateUniqueTableName(); + final List tablePath1 = tablePathWithFolders(tableName1); + String tableName2 = generateUniqueTableName(); + final List tablePath2 = tablePathWithFolders(tableName2); + + // Create table1 with 10 rows + createFolders(tablePath1, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath1, 10)); + final String viewName = generateUniqueViewName(); + List viewKey = tablePathWithFolders(viewName); + createFolders(viewKey, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewQuery(viewKey, tablePath1)); + base.assertViewHasExpectedNumRows(viewKey, 10); + long mtime1 = base.getMtimeForTable(viewKey, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + //Create table2 with 20 rows. + createFolders(tablePath2, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath2, 20)); + + //Act + base.runSQL(createReplaceViewQuery(viewKey, tablePath2)); + long mtime2 = base.getMtimeForTable(viewKey, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + + //Assert + base.assertViewHasExpectedNumRows(viewKey, 20); + AssertionsForClassTypes.assertThat(mtime2 > mtime1).isTrue(); + } + + @Test + public void alterViewProperty() throws Exception { + final String tableName1 = generateUniqueTableName(); + final List tablePath1 = tablePathWithFolders(tableName1); + + // Create a table with 10 rows. + createFolders(tablePath1, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath1, 10)); + + final String viewName = generateUniqueViewName(); + final List viewKey = tablePathWithFolders(viewName); + + // Create a view. + createFolders(viewKey, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewQuery(viewKey, tablePath1)); + + final String attribute = "enable_default_reflection"; + final String value = "true"; + final List expectResult = + Arrays.asList( + "true", + String.format( + "Table [%s.%s] options updated", DATAPLANE_PLUGIN_NAME, joinedTableKey(viewKey))); + + assertThat(base.runSqlWithResults(alterViewPropertyQuery(viewKey, attribute, value))) + .contains(expectResult); + } + + @Test + public void alterViewPropertyTwice() throws Exception { + final String tableName1 = generateUniqueTableName(); + final List tablePath1 = tablePathWithFolders(tableName1); + + // Create a table with 10 rows. + createFolders(tablePath1, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath1, 10)); + + final String viewName = generateUniqueViewName(); + final List viewKey = tablePathWithFolders(viewName); + + // Create a view. + createFolders(viewKey, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewQuery(viewKey, tablePath1)); + + final String attribute = "enable_default_reflection"; + final String value = "true"; + List expectResult = + Arrays.asList( + "true", + String.format( + "Table [%s.%s] options updated", DATAPLANE_PLUGIN_NAME, joinedTableKey(viewKey))); + + assertThat(base.runSqlWithResults(alterViewPropertyQuery(viewKey, attribute, value))) + .contains(expectResult); + + expectResult = + Arrays.asList( + "true", + String.format( + "Table [%s.%s] options did not change", + DATAPLANE_PLUGIN_NAME, joinedTableKey(viewKey))); + assertThat(base.runSqlWithResults(alterViewPropertyQuery(viewKey, attribute, value))) + .contains(expectResult); + } + + @Test + public void alterViewPropertyWithDifferentValue() throws Exception { + final String tableName1 = generateUniqueTableName(); + final List tablePath1 = tablePathWithFolders(tableName1); + + // Create a table with 10 rows. + createFolders(tablePath1, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath1, 10)); + + final String viewName = generateUniqueViewName(); + final List viewKey = tablePathWithFolders(viewName); + + // Create a view. + createFolders(viewKey, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewQuery(viewKey, tablePath1)); + + final String attribute = "enable_default_reflection"; + String value = "true"; + List expectResult = + Arrays.asList( + "true", + String.format( + "Table [%s.%s] options updated", DATAPLANE_PLUGIN_NAME, joinedTableKey(viewKey))); + + assertThat(base.runSqlWithResults(alterViewPropertyQuery(viewKey, attribute, value))) + .contains(expectResult); + + value = "false"; + assertThat(base.runSqlWithResults(alterViewPropertyQuery(viewKey, attribute, value))) + .contains(expectResult); + } + + @Test + public void updateViewKeepProperties() throws Exception { + final String tableName1 = generateUniqueTableName(); + final List tablePath1 = tablePathWithFolders(tableName1); + final String tableName2 = generateUniqueTableName(); + final List tablePath2 = tablePathWithFolders(tableName2); + + // Create a table with 10 rows. + createFolders(tablePath1, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath1, 10)); + + // Create a table with 20 rows. + createFolders(tablePath2, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath2, 20)); + + final String viewName = generateUniqueViewName(); + final List viewKey = tablePathWithFolders(viewName); + + // Create a view. + createFolders(viewKey, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewQuery(viewKey, tablePath1)); + + final String attribute = "enable_default_reflection"; + final String value = "true"; + + List expectResult = + Arrays.asList( + "true", + String.format( + "Table [%s.%s] options updated", DATAPLANE_PLUGIN_NAME, joinedTableKey(viewKey))); + + assertThat(base.runSqlWithResults(alterViewPropertyQuery(viewKey, attribute, value))) + .contains(expectResult); + + base.runSQL(createReplaceViewQuery(viewKey, tablePath2)); + + expectResult = + Arrays.asList( + "true", + String.format( + "Table [%s.%s] options did not change", + DATAPLANE_PLUGIN_NAME, joinedTableKey(viewKey))); + + assertThat(base.runSqlWithResults(alterViewPropertyQuery(viewKey, attribute, value))) + .contains(expectResult); + } + + @Test + public void createViewWithNoSql() throws Exception { + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + List viewKey = tablePathWithFolders(tableName); + + UserExceptionAssert + .assertThatThrownBy(() -> base.runSQL(createViewQueryWithEmptySql(viewKey, tablePath))) + .hasErrorType(UserBitShared.DremioPBError.ErrorType.PARSE); + } + + @Test + public void selectFromView() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + final String viewName = generateUniqueViewName(); + List viewPath = tablePathWithFolders(viewName); + base.runSQL(insertTableQuery(tablePath)); + createFolders(viewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewQuery(viewPath, tablePath)); + + + //Act and Assert + base.assertTableHasExpectedNumRows(tablePath, 3); + base.assertViewHasExpectedNumRows(viewPath, 3); + //cleanup + + base.runSQL(dropViewQuery(viewPath)); + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void selectFromViewWithJoin() throws Exception { + // Arrange + final String viewName = generateUniqueViewName(); + List viewPath = tablePathWithFolders(viewName); + createFolders(viewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewSelectQuery(viewPath, joinTpcdsTablesQuery())); + + //Act and Assert + base.assertViewHasExpectedNumRows(viewPath, 22500000); + //cleanup + base.runSQL(dropViewQuery(viewPath)); + + } + + @Test + public void selectFromViewDifferentTags() throws Exception { + // Arrange + String firstTag = generateUniqueTagName(); + String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + // Create table1 on default branch with 10 rows + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath, 10)); + base.assertTableHasExpectedNumRows(tablePath, 10); + + final String viewName = generateUniqueViewName(); + List viewPath = tablePathWithFolders(viewName); + createFolders(viewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewQuery(viewPath, tablePath)); + // Create a tag to mark it + base.runSQL(createTagQuery(firstTag, DEFAULT_BRANCH_NAME)); + // Insert 10 more rows + base.runSQL(insertSelectQuery(tablePath, 10)); + base.assertViewHasExpectedNumRows(viewPath, 20); + + // Act + // Go back to tag1 + base.runSQL(useTagQuery(firstTag)); + + // Assert + // Select from view should return 10 + base.assertViewHasExpectedNumRows(tablePath, 10); + + //cleanup + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + base.runSQL(dropViewQuery(viewPath)); + base.runSQL(dropTableQuery(tablePath)); + } + + // This tests a view with column specified in select list + // This will not pick up underlying table schema changes for columns that are not in its select list + @Test + public void selectFromViewOnDiffBranchesWithAddColumn() throws Exception { + // Arrange + String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final List columnDefinition = Collections.singletonList("col1 int"); + final List addedColDef = Arrays.asList("col2 int", "col3 int", "col4 varchar"); + final List columnValuesBeforeAdd = Arrays.asList("(1)", "(2)"); + final List columnValuesAfterAdd = Arrays.asList("(3,3,3,'three')", "(4,4,4,'four')"); + + //Setup + // Set context to main + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableWithColDefsQuery(tablePath, columnDefinition)); + //Insert 2 rows into table + base.runSQL(insertTableWithValuesQuery(tablePath, columnValuesBeforeAdd)); + + final String viewName = generateUniqueViewName(); + final List viewPath = tablePathWithFolders(viewName); + String sqlQuery = String.format("select col1 from %s.%s", DATAPLANE_PLUGIN_NAME, joinedTableKey(tablePath)); + createFolders(viewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewSelectQuery(viewPath, sqlQuery)); + final String devBranchName = generateUniqueBranchName(); + // Create a dev branch from main + base.runSQL(createBranchAtBranchQuery(devBranchName, DEFAULT_BRANCH_NAME)); + + + // Act + base.runSQL(useBranchQuery(devBranchName)); + //Alter underlying table + base.runSQL(alterTableAddColumnsQuery(tablePath, addedColDef)); + base.runSQL(insertTableWithValuesQuery(tablePath, columnValuesAfterAdd)); + + //Assert + //Execute view in context of main + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + //Select the rows from view + + + base.testBuilder() + .sqlQuery(selectStarQuery(viewPath)) + .unOrdered() + .baselineColumns("col1") + .baselineValues(1) + .baselineValues(2) + .go(); + + //Execute view in context of dev + base.runSQL(useBranchQuery(devBranchName)); + base.testBuilder() + .sqlQuery(selectStarQuery(tablePath)) + .unOrdered() + .baselineColumns("col1", "col2", "col3", "col4") + .baselineValues(1, null, null, null) + .baselineValues(2, null, null, null) + .baselineValues(3, 3, 3, "three") + .baselineValues(4, 4, 4, "four") + .go(); + //Select the new rows from new columns + base.testBuilder() + .sqlQuery(selectStarQuery(viewPath)) + .unOrdered() + .baselineColumns("col1") + .baselineValues(1) + .baselineValues(2) + .baselineValues(3) + .baselineValues(4) + .go(); + + //cleanup + base.runSQL(dropViewQuery(viewPath)); + base.runSQL(dropTableQuery(tablePath)); + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + base.runSQL(dropTableQuery(tablePath)); + + + } + + // This tests a view with a '*' specification - no columns specified in select list + // This should pick up the underlying table schema changes + @Test + public void selectFromStarViewOnDiffBranchesWithAddColumn() throws Exception { + // Arrange + String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final List columnDefinition = Collections.singletonList("col1 int"); + final List addedColDef = Arrays.asList("col2 int", "col3 int", "col4 varchar"); + final List columnValuesBeforeAdd = Arrays.asList("(1)", "(2)"); + final List columnValuesAfterAdd = Arrays.asList("(3,3,3,'three')", "(4,4,4,'four')"); + + // Set context to main + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableWithColDefsQuery(tablePath, columnDefinition)); + //Insert 2 rows into table + base.runSQL(insertTableWithValuesQuery(tablePath, columnValuesBeforeAdd)); + + final String viewName = generateUniqueViewName(); + final List viewPath = tablePathWithFolders(viewName); + String sqlQuery = String.format("select * from %s.%s", DATAPLANE_PLUGIN_NAME, joinedTableKey(tablePath)); + createFolders(viewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewSelectQuery(viewPath, sqlQuery)); + final String devBranchName = generateUniqueBranchName(); + // Create a dev branch from main + base.runSQL(createBranchAtBranchQuery(devBranchName, DEFAULT_BRANCH_NAME)); + + + // Act + base.runSQL(useBranchQuery(devBranchName)); + //Alter underlying table + base.runSQL(alterTableAddColumnsQuery(tablePath, addedColDef)); + base.runSQL(insertTableWithValuesQuery(tablePath, columnValuesAfterAdd)); + + //Assert + + //Execute view in context of main + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + //Select the rows from view + + base.testBuilder() + .sqlQuery(selectStarQuery(viewPath)) + .unOrdered() + .baselineColumns("col1") + .baselineValues(1) + .baselineValues(2) + .go(); + + //Execute select from table in context of dev to reflect the added columns + base.runSQL(useBranchQuery(devBranchName)); + base.testBuilder() + .sqlQuery(selectStarQuery(tablePath)) + .unOrdered() + .baselineColumns("col1", "col2", "col3", "col4") + .baselineValues(1, null, null, null) + .baselineValues(2, null, null, null) + .baselineValues(3, 3, 3, "three") + .baselineValues(4, 4, 4, "four") + .go(); + // Execute view in context of dev + // First attempt should show the exception raised (retryable) + base.assertQueryThrowsExpectedError( + selectStarQuery(viewPath), + "SCHEMA_CHANGE ERROR: Some virtual datasets were out of date and have been corrected"); + + //Second attempt shoild show updates to get all the undelrying table changes + base.testBuilder() + .sqlQuery(selectStarQuery(viewPath)) + .unOrdered() + .baselineColumns("col1", "col2", "col3", "col4") + .baselineValues(1, null, null, null) + .baselineValues(2, null, null, null) + .baselineValues(3, 3, 3, "three") + .baselineValues(4, 4, 4, "four") + .go(); + + //cleanup TODO : Cleanup view after dropView support + base.runSQL(dropTableQuery(tablePath)); + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + base.runSQL(dropTableQuery(tablePath)); + + + } + + // Negative tests a view with a '*' specification . Select with a tag will fail auto fixup + @Test + public void selectFromStarViewWithTagWithAddColumn() throws Exception { + // Arrange + String tableName = generateUniqueTableName(); + String tagName = generateUniqueTagName(); + final List tablePath = tablePathWithFolders(tableName); + final List columnDefinition = Collections.singletonList("col1 int"); + final List addedColDef = Arrays.asList("col2 int", "col3 int", "col4 varchar"); + final List columnValuesBeforeAdd = Arrays.asList("(1)", "(2)"); + final List columnValuesAfterAdd = Arrays.asList("(3,3,3,'three')", "(4,4,4,'four')"); + + // Set context to main + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableWithColDefsQuery(tablePath, columnDefinition)); + //Insert 2 rows into table + base.runSQL(insertTableWithValuesQuery(tablePath, columnValuesBeforeAdd)); + + final String viewName = generateUniqueViewName(); + final List viewPath = tablePathWithFolders(viewName); + String sqlQuery = String.format("select * from %s.%s", DATAPLANE_PLUGIN_NAME, joinedTableKey(tablePath)); + createFolders(viewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewSelectQuery(viewPath, sqlQuery)); + final String devBranchName = generateUniqueBranchName(); + // Create a dev branch from main + base.runSQL(createBranchAtBranchQuery(devBranchName, DEFAULT_BRANCH_NAME)); + + + // Act + base.runSQL(useBranchQuery(devBranchName)); + //Alter underlying table + base.runSQL(alterTableAddColumnsQuery(tablePath, addedColDef)); + base.runSQL(insertTableWithValuesQuery(tablePath, columnValuesAfterAdd)); + + //Create a tag + base.runSQL(createTagQuery(tagName, devBranchName)); + + //Assert + //Execute view in context of tag + base.runSQL(useTagQuery(tagName)); + //Execute select from table in context of dev to reflect the added columns + base.testBuilder() + .sqlQuery(selectStarQuery(tablePath)) + .unOrdered() + .baselineColumns("col1", "col2", "col3", "col4") + .baselineValues(1, null, null, null) + .baselineValues(2, null, null, null) + .baselineValues(3, 3, 3, "three") + .baselineValues(4, 4, 4, "four") + .go(); + + //Execute view in context of dev - should fail - cannot fixup + + base.assertQueryThrowsExpectedError( + selectStarQuery(viewPath), + "VALIDATION ERROR: Some virtual datasets are out of date and need to be manually updated."); + + //cleanup + base.runSQL(useBranchQuery(devBranchName)); + base.runSQL(dropViewQuery(viewPath)); + base.runSQL(dropTableQuery(tablePath)); + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + base.runSQL(dropTableQuery(tablePath)); + + + } + + @Test + public void selectFromViewOnDiffBranchesWithDropColumn() throws Exception { + // Arrange + String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final List columnDefinition = Arrays.asList("col1 int", "col2 varchar"); + final List dropCols = Arrays.asList("col2"); + final List columnValuesBeforeAdd = Arrays.asList("(1,'one')", "(2,'two')"); + final List columnValuesAfterDrop = Arrays.asList("(3)", "(4)"); + + //Setup + // Set context to main + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableWithColDefsQuery(tablePath, columnDefinition)); + //Insert 2 rows into table + base.runSQL(insertTableWithValuesQuery(tablePath, columnValuesBeforeAdd)); + + final String viewName = generateUniqueViewName(); + final List viewPath = tablePathWithFolders(viewName); + String sqlQuery = String.format("select col2 from %s.%s", DATAPLANE_PLUGIN_NAME, joinedTableKey(tablePath)); + createFolders(viewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewSelectQuery(viewPath, sqlQuery)); + final String devBranchName = generateUniqueBranchName(); + // Create a dev branch from main + base.runSQL(createBranchAtBranchQuery(devBranchName, DEFAULT_BRANCH_NAME)); + + + // Act + base.runSQL(useBranchQuery(devBranchName)); + //Alter underlying table + base.runSQL(alterTableDropColumnQuery(tablePath, dropCols)); + base.runSQL(insertTableWithValuesQuery(tablePath, columnValuesAfterDrop)); + + //Assert + //Execute view in context of main + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + //Select the rows from view + + + base.testBuilder() + .sqlQuery(selectStarQuery(viewPath)) + .unOrdered() + .baselineColumns("col2") + .baselineValues("one") + .baselineValues("two") + .go(); + + //Select in context of dev to show one column + base.runSQL(useBranchQuery(devBranchName)); + base.testBuilder() + .sqlQuery(selectStarQuery(tablePath)) + .unOrdered() + .baselineColumns("col1") + .baselineValues(1) + .baselineValues(2) + .baselineValues(3) + .baselineValues(4) + .go(); + //Execute view in context of dev - should error out + base.assertQueryThrowsExpectedError( + selectStarQuery(viewPath), + "Error while expanding view " + + String.format("%s.%s", DATAPLANE_PLUGIN_NAME, joinedTableKey(viewPath)) + + ". Column 'col2' not found in any table. Verify the view’s SQL definition."); + + //cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void selectFromViewWithStarQueryAndDropUnderlyingColumn() throws Exception { + // Arrange + String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final List columnDefinition = Arrays.asList("col1 int", "col2 varchar"); + final List dropCols = Arrays.asList("col2"); + final List columnValuesBeforeAdd = Arrays.asList("(1,'one')", "(2,'two')"); + final List columnValuesAfterDrop = Arrays.asList("(3)", "(4)"); + + //Setup + // Set context to main + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableWithColDefsQuery(tablePath, columnDefinition)); + //Insert 2 rows into table + base.runSQL(insertTableWithValuesQuery(tablePath, columnValuesBeforeAdd)); + + final String viewName = generateUniqueViewName(); + final List viewPath = tablePathWithFolders(viewName); + String sqlQuery = String.format("select * from %s.%s", DATAPLANE_PLUGIN_NAME, joinedTableKey(tablePath)); + createFolders(viewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewSelectQuery(viewPath, sqlQuery)); + final String devBranchName = generateUniqueBranchName(); + // Create a dev branch from main + base.runSQL(createBranchAtBranchQuery(devBranchName, DEFAULT_BRANCH_NAME)); + + + // Act + base.runSQL(useBranchQuery(devBranchName)); + //Alter underlying table + base.runSQL(alterTableDropColumnQuery(tablePath, dropCols)); + base.runSQL(insertTableWithValuesQuery(tablePath, columnValuesAfterDrop)); + + //Assert + //Execute view in context of main + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + //Select the rows from view + + + base.testBuilder() + .sqlQuery(selectStarQuery(viewPath)) + .unOrdered() + .baselineColumns("col1", "col2") + .baselineValues(1, "one") + .baselineValues(2, "two") + .go(); + + //Select in context of dev to show one column + base.runSQL(useBranchQuery(devBranchName)); + base.testBuilder() + .sqlQuery(selectStarQuery(tablePath)) + .unOrdered() + .baselineColumns("col1") + .baselineValues(1) + .baselineValues(2) + .baselineValues(3) + .baselineValues(4) + .go(); + //Execute view in context of dev - should work since it's a star query + + //First attempt should show the exception raised (retryable) + base.assertQueryThrowsExpectedError( + selectStarQuery(viewPath), + "SCHEMA_CHANGE ERROR: Some virtual datasets were out of date and have been corrected"); + //Second attempt should show results + base.testBuilder() + .sqlQuery(selectStarQuery(viewPath)) + .unOrdered() + .baselineColumns("col1") + .baselineValues(1) + .baselineValues(2) + .baselineValues(3) + .baselineValues(4) + .go(); + + //cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + @Test + public void selectFromViewVirtualInt() throws Exception { + // Arrange + final String viewName = generateUniqueTableName(); + final List viewPath = tablePathWithFolders(viewName); + + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + + //Act + String viewSQL = String.format("select 1, 2, 3 "); + createFolders(viewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewSelectQuery(viewPath, viewSQL)); + + //Assert + base.assertSQLReturnsExpectedNumRows( + selectCountQuery(viewPath, DEFAULT_COUNT_COLUMN), + DEFAULT_COUNT_COLUMN, + 1); + } + + @Test + public void selectFromViewVirtualVarcharWithCast() throws Exception { + // Arrange + final String viewName = generateUniqueTableName(); + final List viewPath = tablePathWithFolders(viewName); + + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + + String viewSQL = String.format("select CAST('abc' AS VARCHAR(65536)) as varcharcol"); + createFolders(viewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewSelectQuery(viewPath, viewSQL)); + + // Assert + base.assertSQLReturnsExpectedNumRows( + selectCountQuery(viewPath, DEFAULT_COUNT_COLUMN), + DEFAULT_COUNT_COLUMN, + 1); + } + + @Test + public void selectFromViewVirtualVarchar() throws Exception { + // Arrange + final String viewName = generateUniqueTableName(); + final List viewPath = tablePathWithFolders(viewName); + + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + + String viewSQL = String.format("select 0 , 1 , 2 , 'abc' "); + createFolders(viewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewSelectQuery(viewPath, viewSQL)); + + // Assert + base.assertSQLReturnsExpectedNumRows( + selectCountQuery(viewPath, DEFAULT_COUNT_COLUMN), + DEFAULT_COUNT_COLUMN, + 1); + } + + @Test + public void selectFromViewConcat() throws Exception { + // Arrange + final String viewName1 = generateUniqueTableName(); + final String tableName = generateUniqueTableName(); + final List viewPath1 = tablePathWithFolders(viewName1); + final List tablePath = tablePathWithFolders(tableName); + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.runSQL(insertTableQuery(tablePath)); + + String viewSQL1 = String.format("select CONCAT(name, ' of view ') from %s.%s", DATAPLANE_PLUGIN_NAME, joinedTableKey(tablePath)); + createFolders(viewPath1, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewSelectQuery(viewPath1, viewSQL1)); + + // Select + base.assertSQLReturnsExpectedNumRows( + selectCountQuery(viewPath1, DEFAULT_COUNT_COLUMN), + DEFAULT_COUNT_COLUMN, + 3); + } + + @Test + public void selectFromViewCaseInt() throws Exception { + // Arrange + final String viewName1 = generateUniqueTableName(); + final String tableName = generateUniqueTableName(); + final List viewPath1 = tablePathWithFolders(viewName1); + final List tablePath = tablePathWithFolders(tableName); + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.runSQL(insertTableQuery(tablePath)); + base.runSQL(String.format("insert into %s.%s values (-1, 'invalid id', 10.0)", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath))); + + String viewSQL1 = String.format("select case when id > 0 THEN 1 ELSE 0 END AS C5 from %s.%s", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath)); + // Act + createFolders(viewPath1, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewSelectQuery(viewPath1, viewSQL1)); + + // Assert + base.assertViewHasExpectedNumRows(viewPath1, 4); + String viewWhereQuery = String.format("select count(*) as c1 from %s.%s where C5 = 1", DATAPLANE_PLUGIN_NAME, joinedTableKey(viewPath1)); + base.assertSQLReturnsExpectedNumRows(viewWhereQuery, "c1", 3); + + } + + @Test + public void selectFromViewCaseVarchar() throws Exception { + // Arrange + final String viewName1 = generateUniqueTableName(); + final String tableName = generateUniqueTableName(); + final List viewPath1 = tablePathWithFolders(viewName1); + final List tablePath = tablePathWithFolders(tableName); + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.runSQL(insertTableQuery(tablePath)); + base.runSQL(String.format("insert into %s.%s values (-1, 'invalid id', 10.0)", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath))); + + String viewSQL1 = String.format("select case when id > 0 THEN 'positive' ELSE 'invalid' END AS C5 from %s.%s", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath)); + + createFolders(viewPath1, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewSelectQuery(viewPath1, viewSQL1)); + + // Select + base.assertViewHasExpectedNumRows(viewPath1, 4); + String viewWhereQuery = String.format("select count(*) as c1 from %s.%s where C5 = 'invalid'", DATAPLANE_PLUGIN_NAME, joinedTableKey(viewPath1)); + base.assertSQLReturnsExpectedNumRows(viewWhereQuery, "c1", 1); + + } + + @Test + public void selectViewWithSpecifierTag() throws Exception { + // Arrange + final String viewName = generateUniqueTableName(); + final String tableName = generateUniqueTableName(); + String firstTag = generateUniqueTagName(); + final List viewPath = tablePathWithFolders(viewName); + final List tablePath = tablePathWithFolders(tableName); + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath, 5)); + base.runSQL(insertSelectQuery(tablePath, 5)); + // Verify with select + base.assertTableHasExpectedNumRows(tablePath, 10); + base.assertSQLReturnsExpectedNumRows(selectCountQueryWithSpecifier(tablePath, DEFAULT_COUNT_COLUMN, + "BRANCH " + DEFAULT_BRANCH_NAME), DEFAULT_COUNT_COLUMN, 10); + createFolders(viewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewQuery(viewPath, tablePath)); + // Create tag + base.runSQL(createTagQuery(firstTag, DEFAULT_BRANCH_NAME)); + + //Insert 10 more rows into table + base.runSQL(insertSelectQuery(tablePath, 10)); + //Verify view can see the new rows. + base.assertSQLReturnsExpectedNumRows( + selectCountQuery(viewPath, DEFAULT_COUNT_COLUMN), + DEFAULT_COUNT_COLUMN, + 20); + + //Act and Assert + //Now run the query with AT syntax on tag to verify only 5 rows are returned. + base.assertSQLReturnsExpectedNumRows( + selectCountQueryWithSpecifier(viewPath, + DEFAULT_COUNT_COLUMN, + "TAG " + firstTag), + DEFAULT_COUNT_COLUMN, + 10); + } + + @Test + public void selectViewWithSpecifierCommitAndBranch() throws Exception { + // Arrange + final String viewName = generateUniqueTableName(); + final String tableName = generateUniqueTableName(); + final String devBranch = generateUniqueBranchName(); + + final List viewPath = tablePathWithFolders(viewName); + final List tablePath = tablePathWithFolders(tableName); + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath, 5)); + base.runSQL(insertSelectQuery(tablePath, 5)); + // Verify with select + base.assertTableHasExpectedNumRows(tablePath, 10); + base.assertSQLReturnsExpectedNumRows(selectCountQueryWithSpecifier(tablePath, DEFAULT_COUNT_COLUMN, + "BRANCH " + DEFAULT_BRANCH_NAME), DEFAULT_COUNT_COLUMN, 10); + createFolders(viewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewQuery(viewPath, tablePath)); + + // Create dev branch + base.runSQL(createBranchAtBranchQuery(devBranch, DEFAULT_BRANCH_NAME)); + base.runSQL(useBranchQuery(devBranch)); + String commitHashBranchBeforeInsert = base.getCommitHashForBranch(devBranch); + //Insert 10 more rows into table + base.runSQL(insertSelectQuery(tablePath, 10)); + String commitHashBranchAfterInsert = base.getCommitHashForBranch(devBranch); + + //Act and Assert + + //Verify view can see the new rows at each commit + base.assertSQLReturnsExpectedNumRows( + selectCountQueryWithSpecifier(viewPath, DEFAULT_COUNT_COLUMN, + "COMMIT " + quoted(commitHashBranchBeforeInsert)), + DEFAULT_COUNT_COLUMN, + 10); + + base.assertSQLReturnsExpectedNumRows( + selectCountQueryWithSpecifier(viewPath, DEFAULT_COUNT_COLUMN, + "COMMIT " + quoted(commitHashBranchAfterInsert)), + DEFAULT_COUNT_COLUMN, + 20); + + } + + @Test + public void selectViewWithSpecifierAndJoin() throws Exception { + // Arrange + final String viewName = generateUniqueTableName(); + final String tableName = generateUniqueTableName(); + final String tableName2 = generateUniqueTableName(); + + String firstTag = generateUniqueTagName(); + final List viewPath = tablePathWithFolders(viewName); + final List tablePath = tablePathWithFolders(tableName); + final List tablePath2 = tablePathWithFolders(tableName2); + + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath, 5)); + createFolders(tablePath2, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath2, 10)); + // Verify with select + base.assertTableHasExpectedNumRows(tablePath, 5); + base.assertTableHasExpectedNumRows(tablePath2, 10); + + + String table1 = joinedTableKey(tablePath); + String table2 = joinedTableKey(tablePath2); + String condition = " TRUE "; + createFolders(viewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewSelectQuery(viewPath, joinTablesQuery(table1, table2, condition))); + + // Create tag + base.runSQL(createTagQuery(firstTag, DEFAULT_BRANCH_NAME)); + + //Verify view can see the new rows. + base.assertSQLReturnsExpectedNumRows( + selectCountQuery(viewPath, DEFAULT_COUNT_COLUMN), + DEFAULT_COUNT_COLUMN, + 50); + + base.runSQL(insertSelectQuery(tablePath, 10)); + + //Verify view can see the new rows. + base.assertSQLReturnsExpectedNumRows( + selectCountQuery(viewPath, DEFAULT_COUNT_COLUMN), + DEFAULT_COUNT_COLUMN, + 150); + + //Act and Assert + //Now run the query with AT syntax on tag to verify only 50 rows are returned. + base.assertSQLReturnsExpectedNumRows( + selectCountQueryWithSpecifier(viewPath, + DEFAULT_COUNT_COLUMN, + "TAG " + firstTag), + DEFAULT_COUNT_COLUMN, + 50); + + } + + @Test + public void selectFromViewOnViewWithAt() throws Exception { + // Arrange + /* + view2 + ---> view1 + --->table1 + -Test query with view2 AT TAG < tag> + -Both view1 and table 1 should resolve with version + + */ + final String viewName1 = generateUniqueViewName(); + final String viewName2 = generateUniqueViewName(); + final String tableName = generateUniqueTableName(); + String firstTag = generateUniqueTagName(); + final List viewPath1 = tablePathWithFolders(viewName1); + final List viewPath2 = tablePathWithFolders(viewName2); + final List tablePath = tablePathWithFolders(tableName); + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.runSQL(insertTableQuery(tablePath)); + + //Act + String viewSQL1 = String.format("select id+10 AS idv1, id+20 AS idv2 from %s.%s", DATAPLANE_PLUGIN_NAME, joinedTableKey(tablePath)); + createFolders(viewPath1, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewSelectQuery(viewPath1, viewSQL1)); + + String viewSQL2 = String.format("select idv1/10 AS idv10, idv2/10 AS idv20 from %s.%s", DATAPLANE_PLUGIN_NAME, joinedTableKey(viewPath1)); + createFolders(viewPath2, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewSelectQuery(viewPath2, viewSQL2)); + base.runSQL(createTagQuery(firstTag, DEFAULT_BRANCH_NAME)); + // Insert an extra row in table + base.runSQL(String.format("insert into %s.%s values (4, 'fourth row', 40.0)", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath))); + + // Modify the first view (after tag) - view1 contains 1 column. + String viewSQL11 = String.format("select id+10 AS idv1 from %s.%s", DATAPLANE_PLUGIN_NAME, joinedTableKey(tablePath)); + base.runSQL(updateViewSelectQuery(viewPath1, viewSQL11)); + + // Assert + //Ensure that the select from view2 are the previous tag is able to expand and see 2 columns and 3 rows + base.testBuilder() + .sqlQuery(selectStarQueryWithSpecifier(viewPath2, "TAG " + firstTag)) + .unOrdered() + .baselineColumns("idv10", "idv20") + .baselineValues(1, 2) + .baselineValues(1, 2) + .baselineValues(1, 2) + .go(); + + base.assertSQLReturnsExpectedNumRows(selectCountQueryWithSpecifier(viewPath2, "c1", "TAG " + firstTag), "c1", 3); + + } + + @Test + public void selectFromViewFromSessionVersionContext() throws Exception { + // Arrange + /* + view - only exists in branch2 + ---> table with AT < branch 1> (table only exists in branch 1) + + - Lookup of view2 should succeed only in context of branch2 + - expansion of view should pick up table1 only at branch1 + + */ + // Arrange + final String viewName = generateUniqueTableName(); + final String tableName = generateUniqueTableName(); + + final List viewPath = tablePathWithFolders(viewName); + final List tablePath = tablePathWithFolders(tableName); + final String devBranch = generateUniqueBranchName(); + // Create dev branch + base.runSQL(createBranchAtBranchQuery(devBranch, DEFAULT_BRANCH_NAME)); + //create table in dev + base.runSQL(useBranchQuery(devBranch)); + createFolders(tablePath, VersionContext.ofBranch(devBranch)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.runSQL(insertTableQuery(tablePath)); + + //create view in main + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + String viewSQL = String.format("select * from %s.%s AT BRANCH %s" , + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath), + devBranch); + createFolders(viewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewSelectQuery(viewPath, viewSQL)); + + //Act and Assert + + //In context of main, select from query. Underlying table should be resolved to dev.(does not exist in main) + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + + //Verify view can see the new rows. + base.assertSQLReturnsExpectedNumRows( + selectCountQuery(viewPath, DEFAULT_COUNT_COLUMN), + DEFAULT_COUNT_COLUMN, + 3); + + } + + + @Test + public void selectFromNestedViewsWithAT() throws Exception { + // Arrange + /* + view3 - only exists in branch main + ---> view2 with AT < branch dev> (view2 only exists in dev2) + ---> view1 (only exists in dev1) + ---> table1 (only exists in dev0) + - Test query with view3 with branch set to main + - Lookup of view2 should resolve to dev + - Lookup of view1 and table1 should resolve with dev. + */ + // Arrange + final String view1 = generateUniqueTableName(); + final String view2 = generateUniqueTableName(); + final String view3 = generateUniqueTableName(); + final String tableName = generateUniqueTableName(); + + final List view1Path = tablePathWithFolders(view1); + final List view2Path = tablePathWithFolders(view2); + final List view3Path = tablePathWithFolders(view3); + final List tablePath = tablePathWithFolders(tableName); + final String dev0 = generateUniqueBranchName(); + final String dev1 = generateUniqueBranchName(); + final String dev2 = generateUniqueBranchName(); + + // Create dev + base.runSQL(createBranchAtBranchQuery(dev0, DEFAULT_BRANCH_NAME)); + base.runSQL(createBranchAtBranchQuery(dev1, DEFAULT_BRANCH_NAME)); + base.runSQL(createBranchAtBranchQuery(dev2, DEFAULT_BRANCH_NAME)); + + //create table in dev0 + base.runSQL(useBranchQuery(dev0)); + createFolders(tablePath, VersionContext.ofBranch(dev0)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.runSQL(insertTableQuery(tablePath)); + + //create view1 in dev1 + base.runSQL(useBranchQuery(dev1)); + String view1SQL = String.format("select * from %s.%s AT BRANCH %s" , + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath), + dev0 + ); + createFolders(view1Path, VersionContext.ofBranch(dev1)); + base.runSQL(createViewSelectQuery(view1Path, view1SQL)); + + //create view2 in dev2 + base.runSQL(useBranchQuery(dev2)); + String view2SQL = String.format("select * from %s.%s AT BRANCH %s" , + DATAPLANE_PLUGIN_NAME, + joinedTableKey(view1Path), + dev1 + ); + createFolders(view2Path, VersionContext.ofBranch(dev2)); + base.runSQL(createViewSelectQuery(view2Path, view2SQL)); + + //create view3 in main + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + String view3SQL = String.format("select * from %s.%s AT BRANCH %s" , + DATAPLANE_PLUGIN_NAME, + joinedTableKey(view2Path), + dev2 + ); + createFolders(view3Path, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewSelectQuery(view3Path, view3SQL)); + //Act and Assert + + //In context of main, select from query. Underlying table should be resolved to dev.(does not exist in main) + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + + //Verify view can see the new rows. + base.assertSQLReturnsExpectedNumRows( + selectCountQuery(view3Path, DEFAULT_COUNT_COLUMN), + DEFAULT_COUNT_COLUMN, + 3); + + } + + + @Test + void selectFromNestedViewInnerTaggedVersion() throws Exception{ + /* + view2 - Defined with AT TAG atag on V1 + ---> view1 - Defined on table1 + + - Test query with view2 accessed with AT BRANCH main. + - Lookup of view1 should still resolve to TAG atag + */ + // Arrange + final String view1 = generateUniqueTableName(); + final String view2 = generateUniqueTableName(); + final String tableName = generateUniqueTableName(); + final String atag = generateUniqueTagName(); + + final List view1Path = tablePathWithFolders(view1); + final List view2Path = tablePathWithFolders(view2); + final List tablePath = tablePathWithFolders(tableName); + + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath, 5)); + base.runSQL(insertSelectQuery(tablePath, 5)); + String view1SQL = String.format("select * from %s.%s" , + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath) + ); + createFolders(view1Path, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewSelectQuery(view1Path, view1SQL)); + //Verify view can see the new rows. + base.assertSQLReturnsExpectedNumRows( + selectCountQuery(view1Path, DEFAULT_COUNT_COLUMN), + DEFAULT_COUNT_COLUMN, + 10); + base.runSQL(createTagQuery(atag, DEFAULT_BRANCH_NAME)); + String view2SQL = String.format("select * from %s.%s AT TAG %s" , + DATAPLANE_PLUGIN_NAME, + joinedTableKey(view1Path), + atag + ); + createFolders(view2Path, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewSelectQuery(view2Path, view2SQL)); + base.runSQL(insertSelectQuery(tablePath, 5)); + + //Verify view can see the new rows. + base.assertSQLReturnsExpectedNumRows( + selectCountQuery(view2Path, DEFAULT_COUNT_COLUMN), + DEFAULT_COUNT_COLUMN, + 10); + } + + //Negative test + @Test + public void createViewWithTimeTravelQuery() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final String viewName = generateUniqueViewName(); + final List viewPath = tablePathWithFolders(viewName); + final String devBranch = generateUniqueBranchName(); + final List tablePath = tablePathWithFolders(tableName); + + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath, 5)); + // Verify with select + base.assertTableHasExpectedNumRows(tablePath, 5); + base.assertSQLReturnsExpectedNumRows(selectCountQueryWithSpecifier(tablePath, DEFAULT_COUNT_COLUMN, + "BRANCH " + DEFAULT_BRANCH_NAME), DEFAULT_COUNT_COLUMN, 5); + + final TimestampString ts1 = TimestampString.fromMillisSinceEpoch(System.currentTimeMillis()); + + // Insert rows + base.runSQL(insertSelectQuery(tablePath, 2)); + // Verify number of rows. + // on main branch, at this timestamp + base.assertTableHasExpectedNumRows(tablePath, 7 ); + // on main branch, at this timestamp + base.assertSQLReturnsExpectedNumRows(selectCountQueryWithSpecifier(tablePath, DEFAULT_COUNT_COLUMN, + "TIMESTAMP '" + ts1 + "'"), DEFAULT_COUNT_COLUMN, 5); + + //AT query + String selectATQuery = String.format("select * from %s.%s AT TIMESTAMP '%s' ", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath), + ts1); + + //Act and Assert + createFolders(viewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + assertThatThrownBy(() -> base.runSQL(createViewSelectQuery(viewPath, selectATQuery))) + .hasMessageContaining("Versioned views not supported for time travel queries"); + + // Cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + //Negative test + @Test + public void selectFromArcticViewWithTimeTravel() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final String viewName = generateUniqueViewName(); + final List viewPath = tablePathWithFolders(viewName); + final String devBranch = generateUniqueBranchName(); + final List tablePath = tablePathWithFolders(tableName); + String countCol = "countCol"; + + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath, 5)); + // Verify with select + base.assertTableHasExpectedNumRows(tablePath, 5); + base.assertSQLReturnsExpectedNumRows(selectCountQueryWithSpecifier(tablePath, DEFAULT_COUNT_COLUMN, + "BRANCH " + DEFAULT_BRANCH_NAME), DEFAULT_COUNT_COLUMN, 5); + createFolders(viewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewQuery(viewPath, tablePath)); + + final TimestampString ts1 = TimestampString.fromMillisSinceEpoch(System.currentTimeMillis()); + + // Insert rows + base.runSQL(insertSelectQuery(tablePath, 2)); + // Verify number of rows. + // on main branch, at this timestamp + base.assertTableHasExpectedNumRows(tablePath, 7 ); + // on main branch, at this timestamp + base.assertSQLReturnsExpectedNumRows(selectCountQueryWithSpecifier(tablePath, DEFAULT_COUNT_COLUMN, + "TIMESTAMP '" + ts1 + "'"), DEFAULT_COUNT_COLUMN, 5); + + //Act and Assert + assertThatThrownBy(() -> base.runSQL(selectCountQueryWithSpecifier(viewPath, countCol, "TIMESTAMP '" + ts1 + "'"))) + .hasMessageContaining("Time travel is not supported on views"); + + // Cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + //Negative test + @Test + public void createViewWithSnapshotQuery() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final String viewName = generateUniqueViewName(); + final List viewPath = tablePathWithFolders(viewName); + final String devBranch = generateUniqueBranchName(); + final List tablePath = tablePathWithFolders(tableName); + + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createTableAsQuery(tablePath, 5)); + // Verify with select + base.assertTableHasExpectedNumRows(tablePath, 5); + base.assertSQLReturnsExpectedNumRows(selectCountQueryWithSpecifier(tablePath, DEFAULT_COUNT_COLUMN, + "BRANCH " + DEFAULT_BRANCH_NAME), DEFAULT_COUNT_COLUMN, 5); + + final long snapshotId = 1000; + + // Insert rows + base.runSQL(insertSelectQuery(tablePath, 2)); + // Verify number of rows. + // on main branch, at this timestamp + base.assertTableHasExpectedNumRows(tablePath, 7 ); + + //AT query + String selectATQuery = String.format("select * from %s.%s AT SNAPSHOT '%d' ", + DATAPLANE_PLUGIN_NAME, + joinedTableKey(tablePath), + snapshotId); + + //Act and Assert + createFolders(viewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + assertThatThrownBy(() -> base.runSQL(createViewSelectQuery(viewPath, selectATQuery))) + .hasMessageContaining("Versioned views not supported for time travel queries"); + + // Cleanup + base.runSQL(dropTableQuery(tablePath)); + } + + /** + * Verify CAST on Calcite schema with INTEGER NOT NULL to Iceberg View schema with nullable INTEGER + * @throws Exception + */ + @Test + public void selectStarFromViewVirtualInt() throws Exception { + // Arrange + final String viewName = generateUniqueTableName(); + final List viewPath = tablePathWithFolders(viewName); + + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + + //Act + String viewSQL = String.format("select 1 as col1"); + createFolders(viewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewSelectQuery(viewPath, viewSQL)); + + //Assert + base.testBuilder() + .sqlQuery(selectStarQuery(viewPath)) + .unOrdered() + .baselineColumns("col1") + .baselineValues(1) + .go(); + } + + /** + * Verify CAST on Calcite schema with VARCHAR(3) NOT NULL to Iceberg View schema with nullable VARCHAR(65536) + * @throws Exception + */ + @Test + public void selectStarFromViewVirtualVarchar() throws Exception { + // Arrange + final String viewName = generateUniqueTableName(); + final List viewPath = tablePathWithFolders(viewName); + + // Set context to main branch + base.runSQL(useBranchQuery(DEFAULT_BRANCH_NAME)); + + //Act + String viewSQL = String.format("select 'xyz' as col1"); + createFolders(viewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewSelectQuery(viewPath, viewSQL)); + + //Assert + base.testBuilder() + .sqlQuery(selectStarQuery(viewPath)) + .unOrdered() + .baselineColumns("col1") + .baselineValues("xyz") + .go(); + } + + /** + * Verify view property can be set and retrieved correctly. + * @throws Exception + */ + @Test + public void setViewProperty() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + + final String viewName = generateUniqueViewName(); + List viewKey = tablePathWithFolders(viewName); + + // Act + createFolders(viewKey, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewQuery(viewKey, tablePath)); + + // Assert + assertNessieHasView(viewKey, DEFAULT_BRANCH_NAME, base); + + // Disable the default reflection + base.runSQL(alterViewPropertyQuery(viewKey, "enable_default_reflection", "False")); + + final String versionedDatasetId = + base.getVersionedDatatsetId( + viewKey, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + final DremioTable dremioTable = base.getTableFromId(versionedDatasetId, base); + + // Assert + assertThat(dremioTable.getDatasetConfig().getVirtualDataset().getDefaultReflectionEnabled()) + .isFalse(); + + // Enable the default reflection + base.runSQL(alterViewPropertyQuery(viewKey, "enable_default_reflection", "True")); + + final String newVersionedDatasetId = + base.getVersionedDatatsetId( + viewKey, new TableVersionContext(TableVersionType.BRANCH, DEFAULT_BRANCH_NAME), base); + final DremioTable newDremioTable = base.getTableFromId(newVersionedDatasetId, base); + + // Assert + assertThat(newDremioTable.getDatasetConfig().getVirtualDataset().getDefaultReflectionEnabled()) + .isTrue(); + } + + @Test + public void createViewWithImplicitFolders() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + + // Act + base.runSQL(createEmptyTableQuery(tablePath)); + + // Assert + assertNessieHasTable(tablePath, DEFAULT_BRANCH_NAME, base); + + // Arrange + final String viewName = generateUniqueViewName(); + List viewKey = tablePathWithFolders(viewName); + + // Act + Assert + base.assertQueryThrowsExpectedError(createViewQuery(viewKey, tablePath), + String.format("VALIDATION ERROR: Namespace '%s' must exist.", + String.join(".", viewKey.subList(0, viewKey.size()-1)))); + } + + @Test + public void createViewInNonBranchVersionContext() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final String viewName = generateUniqueTableName(); + final List viewPath = tablePathWithFolders(viewName); + final String tag = generateUniqueTagName(); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + base.runSQL(createTagQuery(tag, DEFAULT_BRANCH_NAME)); + base.runSQL(useTagQuery(tag)); + + // Act and Assert + createFolders(viewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.assertQueryThrowsExpectedError(createViewQuery( viewPath, tablePath), + String.format("DDL and DML operations are only supported for branches - not on tags or commits. %s is not a branch.", + tag)); + } + + @Test + public void updateViewInNonBranchVersionContext() throws Exception { + // Arrange + final String tableName = generateUniqueTableName(); + final List tablePath = tablePathWithFolders(tableName); + final String viewName = generateUniqueTableName(); + final List viewPath = tablePathWithFolders(viewName); + final String tag = generateUniqueTagName(); + createFolders(tablePath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createEmptyTableQuery(tablePath)); + createFolders(viewPath, VersionContext.ofBranch(DEFAULT_BRANCH_NAME)); + base.runSQL(createViewQuery(viewPath, tablePath)); + base.runSQL(createTagQuery(tag, DEFAULT_BRANCH_NAME)); + base.runSQL(useTagQuery(tag)); + + // Act and Assert + String viewSQLupdate = String.format("select id+10 AS idv1 from %s.%s", DATAPLANE_PLUGIN_NAME, joinedTableKey(tablePath)); + base.assertQueryThrowsExpectedError(updateViewSelectQuery( viewPath, viewSQLupdate), + String.format("DDL and DML operations are only supported for branches - not on tags or commits. %s is not a branch.", + tag)); + } + +} diff --git a/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestAssignBranchHandler.java b/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestAssignBranchHandler.java new file mode 100644 index 0000000000..8063de6610 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestAssignBranchHandler.java @@ -0,0 +1,267 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +import static com.dremio.exec.ExecConstants.ENABLE_USE_VERSION_SYNTAX; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.anyString; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.util.Arrays; +import java.util.List; + +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.parser.SqlParserPos; +import org.junit.Before; +import org.junit.Test; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.ops.QueryContext; +import com.dremio.exec.planner.sql.handlers.direct.SimpleCommandResult; +import com.dremio.exec.planner.sql.parser.ReferenceType; +import com.dremio.exec.planner.sql.parser.SqlAssignBranch; +import com.dremio.exec.store.ReferenceConflictException; +import com.dremio.exec.store.ReferenceNotFoundException; +import com.dremio.exec.store.StoragePlugin; +import com.dremio.exec.work.foreman.ForemanSetupException; +import com.dremio.options.OptionManager; +import com.dremio.plugins.dataplane.store.DataplanePlugin; +import com.dremio.sabot.rpc.user.UserSession; +import com.dremio.service.namespace.NamespaceKey; +import com.dremio.test.DremioTest; + +/** + * Tests for ALTER BRANCH ASSIGN. + */ +public class TestAssignBranchHandler extends DremioTest { + + private static final String DEFAULT_SOURCE_NAME = "localnessie"; + private static final String TARGET_BRANCH = "branch"; + private static final String DEFAULT_REFERENCE = "reference"; + private static final VersionContext DEFAULT_VERSION = + VersionContext.ofBranch(TARGET_BRANCH); + + private OptionManager optionManager; + private Catalog catalog; + private AssignBranchHandler handler; + private SqlAssignBranch assignBranch; + private SqlAssignBranch assignBranchWithDefaultSource; + private SqlAssignBranch assignBranchWithTag; + private SqlAssignBranch assignBranchToItself; + + private DataplanePlugin dataplanePlugin; + + @Before + public void setup() throws Exception { + QueryContext context = mock(QueryContext.class); + optionManager = mock(OptionManager.class); + catalog = mock(Catalog.class); + UserSession userSession = mock(UserSession.class); + dataplanePlugin = mock(DataplanePlugin.class); + + when(context.getCatalog()).thenReturn(catalog); + when(context.getOptions()).thenReturn(optionManager); + when(context.getSession()).thenReturn(userSession); + when(userSession.getDefaultSchemaPath()).thenReturn(new NamespaceKey(Arrays.asList(DEFAULT_SOURCE_NAME, "unusedFolder"))); + when(userSession.getSessionVersionForSource(DEFAULT_SOURCE_NAME)).thenReturn(DEFAULT_VERSION); + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)).thenReturn(true); + when(catalog.getSource(anyString())).thenReturn(dataplanePlugin); + + handler = new AssignBranchHandler(context); + + assignBranch = + new SqlAssignBranch( + SqlParserPos.ZERO, + new SqlIdentifier(TARGET_BRANCH, SqlParserPos.ZERO), + ReferenceType.BRANCH, + new SqlIdentifier(DEFAULT_REFERENCE, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + + assignBranchWithDefaultSource = + new SqlAssignBranch( + SqlParserPos.ZERO, + new SqlIdentifier(TARGET_BRANCH, SqlParserPos.ZERO), + ReferenceType.BRANCH, + new SqlIdentifier(DEFAULT_REFERENCE, SqlParserPos.ZERO), + null); + + assignBranchWithTag = + new SqlAssignBranch( + SqlParserPos.ZERO, + new SqlIdentifier(TARGET_BRANCH, SqlParserPos.ZERO), + ReferenceType.TAG, + new SqlIdentifier(DEFAULT_REFERENCE, SqlParserPos.ZERO), + null); + + assignBranchToItself = + new SqlAssignBranch( + SqlParserPos.ZERO, + new SqlIdentifier(TARGET_BRANCH, SqlParserPos.ZERO), + ReferenceType.BRANCH, + new SqlIdentifier(TARGET_BRANCH, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + + } + + @Test + public void assignBranchSucceed() + throws ForemanSetupException, ReferenceConflictException, ReferenceNotFoundException { + // Arrange + doNothing().when(dataplanePlugin).assignBranch(anyString(), any()); + + // Assert + List result = handler.toResult("", assignBranch); + assertFalse(result.isEmpty()); + assertTrue(result.get(0).ok); + assertThat(result.get(0).summary) + .contains("Assigned") + .contains(TARGET_BRANCH) + .contains(DEFAULT_SOURCE_NAME); + } + + @Test + public void assignBranchThrowUnsupport() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)).thenReturn(false); + + // Assert + assertThatThrownBy(() -> handler.toResult("", assignBranch)) + .isInstanceOf(UserException.class) + .hasMessageContaining("ALTER BRANCH") + .hasMessageContaining("not supported"); + } + + @Test + public void assignBranchThrowWrongPlugin() { + // Arrange + when(catalog.getSource(anyString())).thenReturn(null); + + // Assert + assertThatThrownBy(() -> handler.toResult("", assignBranch)) + .isInstanceOf(UserException.class) + .hasMessageContaining("does not support") + .hasMessageContaining(DEFAULT_SOURCE_NAME); + } + + @Test + public void assignBranchThrowWrongSource() { + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(catalog.getSource(DEFAULT_SOURCE_NAME)) + .thenReturn(mock(StoragePlugin.class)); + + assertThatThrownBy(() -> handler.toResult("", assignBranch)) + .isInstanceOf(UserException.class) + .hasMessageContaining("does not support") + .hasMessageContaining(DEFAULT_SOURCE_NAME); + } + @Test + public void assignBranchThrowNotFound() + throws ReferenceConflictException, ReferenceNotFoundException { + + // Arrange + doThrow(ReferenceNotFoundException.class) + .when(dataplanePlugin) + .assignBranch(anyString(), any()); + + // Act+Assert + assertThatThrownBy(() -> handler.toResult("", assignBranch)) + .isInstanceOf(UserException.class) + .hasMessageContaining("not found") + .hasMessageContaining(DEFAULT_VERSION.toString()) + .hasMessageContaining(DEFAULT_SOURCE_NAME); + } + + @Test + public void assignBranchThrowConflict() + throws ReferenceConflictException, ReferenceNotFoundException { + // Arrange + doThrow(ReferenceConflictException.class) + .when(dataplanePlugin) + .assignBranch(anyString(), any()); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", assignBranch)) + .isInstanceOf(UserException.class) + .hasMessageContaining("hash change") + .hasMessageContaining(DEFAULT_VERSION.toString()) + .hasMessageContaining(DEFAULT_SOURCE_NAME); + } + + @Test + public void assignBranchWithDefaultSource() throws ReferenceNotFoundException, ReferenceConflictException, ForemanSetupException { + // Arrange + doNothing().when(dataplanePlugin).assignBranch(anyString(), any()); + + // Act + List result = handler.toResult("", assignBranchWithDefaultSource); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("Assigned") + .contains(DEFAULT_REFERENCE) + .contains(DEFAULT_SOURCE_NAME); + + } + @Test + public void assignBranchWithTag() throws ReferenceNotFoundException, ReferenceConflictException, ForemanSetupException { + // Arrange + doNothing().when(dataplanePlugin).assignBranch(anyString(), any()); + + // Act + List result = handler.toResult("", assignBranchWithTag); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("Assigned") + .contains("tag") + .contains(DEFAULT_REFERENCE) + .contains(DEFAULT_SOURCE_NAME); + + } + + @Test + public void assignBranchToItself () throws ReferenceNotFoundException, ReferenceConflictException, ForemanSetupException { + // Arrange + doNothing().when(dataplanePlugin).assignBranch(anyString(), any()); + + // Act + List result = handler.toResult("", assignBranchToItself); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("Assigned") + .contains("branch") + .contains(TARGET_BRANCH) + .contains(DEFAULT_SOURCE_NAME); + } + +} diff --git a/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestAssignTagHandler.java b/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestAssignTagHandler.java new file mode 100644 index 0000000000..65a03a64c2 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestAssignTagHandler.java @@ -0,0 +1,266 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +import static com.dremio.exec.ExecConstants.ENABLE_USE_VERSION_SYNTAX; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.anyString; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.util.Arrays; +import java.util.List; + +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.parser.SqlParserPos; +import org.junit.Before; +import org.junit.Test; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.ops.QueryContext; +import com.dremio.exec.planner.sql.handlers.direct.SimpleCommandResult; +import com.dremio.exec.planner.sql.parser.ReferenceType; +import com.dremio.exec.planner.sql.parser.SqlAssignTag; +import com.dremio.exec.store.ReferenceConflictException; +import com.dremio.exec.store.ReferenceNotFoundException; +import com.dremio.exec.store.StoragePlugin; +import com.dremio.exec.work.foreman.ForemanSetupException; +import com.dremio.options.OptionManager; +import com.dremio.plugins.dataplane.store.DataplanePlugin; +import com.dremio.sabot.rpc.user.UserSession; +import com.dremio.service.namespace.NamespaceKey; +import com.dremio.test.DremioTest; + +/** + * Tests for ALTER TAG ASSIGN. + */ +public class TestAssignTagHandler extends DremioTest { + + private static final String DEFAULT_SOURCE_NAME = "localnessie"; + private static final String TARGET_TAG = "tag"; + private static final String DEFAULT_REFERENCE = "reference"; + private static final VersionContext DEFAULT_VERSION = + VersionContext.ofTag(TARGET_TAG); + + private OptionManager optionManager; + private Catalog catalog; + private AssignTagHandler handler; + private SqlAssignTag assignTag; + private SqlAssignTag assignTagToItself; + private SqlAssignTag assignTagWithDefaultSource; + private SqlAssignTag assignTagWithBranch; + + private DataplanePlugin dataplanePlugin; + + @Before + public void setup() throws Exception { + QueryContext context = mock(QueryContext.class); + optionManager = mock(OptionManager.class); + catalog = mock(Catalog.class); + UserSession userSession = mock(UserSession.class); + dataplanePlugin = mock(DataplanePlugin.class); + + when(context.getCatalog()).thenReturn(catalog); + when(context.getOptions()).thenReturn(optionManager); + when(context.getSession()).thenReturn(userSession); + when(userSession.getDefaultSchemaPath()).thenReturn(new NamespaceKey(Arrays.asList(DEFAULT_SOURCE_NAME, "unusedFolder"))); + when(userSession.getSessionVersionForSource(DEFAULT_SOURCE_NAME)).thenReturn(DEFAULT_VERSION); + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)).thenReturn(true); + when(catalog.getSource(anyString())).thenReturn(dataplanePlugin); + + handler = new AssignTagHandler(context); + + assignTag = + new SqlAssignTag( + SqlParserPos.ZERO, + new SqlIdentifier(TARGET_TAG, SqlParserPos.ZERO), + ReferenceType.TAG, + new SqlIdentifier(DEFAULT_REFERENCE, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + + assignTagToItself = + new SqlAssignTag( + SqlParserPos.ZERO, + new SqlIdentifier(TARGET_TAG, SqlParserPos.ZERO), + ReferenceType.TAG, + new SqlIdentifier(TARGET_TAG, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + + assignTagWithBranch = + new SqlAssignTag( + SqlParserPos.ZERO, + new SqlIdentifier(TARGET_TAG, SqlParserPos.ZERO), + ReferenceType.BRANCH, + new SqlIdentifier(DEFAULT_REFERENCE, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + + assignTagWithDefaultSource = + new SqlAssignTag( + SqlParserPos.ZERO, + new SqlIdentifier(TARGET_TAG, SqlParserPos.ZERO), + ReferenceType.TAG, + new SqlIdentifier(DEFAULT_REFERENCE, SqlParserPos.ZERO), + null); + } + + @Test + public void assignTagSucceed() + throws ForemanSetupException, ReferenceConflictException, ReferenceNotFoundException { + // Arrange + doNothing().when(dataplanePlugin).assignTag(anyString(), any()); + + // Assert + List result = handler.toResult("", assignTag); + assertFalse(result.isEmpty()); + assertTrue(result.get(0).ok); + assertThat(result.get(0).summary) + .contains("Assigned") + .contains(TARGET_TAG) + .contains(DEFAULT_SOURCE_NAME); + } + + @Test + public void assignTagThrowUnsupport() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)).thenReturn(false); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", assignTag)) + .isInstanceOf(UserException.class); + } + + @Test + public void assignTagThrowWrongPlugin() { + // Arrange + when(catalog.getSource(anyString())).thenReturn(null); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", assignTag)) + .isInstanceOf(UserException.class) + .hasMessageContaining("does not support") + .hasMessageContaining(DEFAULT_SOURCE_NAME); + } + + @Test + public void assignTagThrowWrongSource() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(catalog.getSource(DEFAULT_SOURCE_NAME)) + .thenReturn(mock(StoragePlugin.class)); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", assignTag)) + .isInstanceOf(UserException.class) + .hasMessageContaining("does not support") + .hasMessageContaining(DEFAULT_SOURCE_NAME); + } + + @Test + public void assignTagThrowNotFound() + throws ReferenceConflictException, ReferenceNotFoundException { + // Arrange + doThrow(ReferenceNotFoundException.class) + .when(dataplanePlugin) + .assignTag(anyString(), any()); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", assignTag)) + .isInstanceOf(UserException.class) + .hasMessageContaining("not found") + .hasMessageContaining(DEFAULT_VERSION.toString()) + .hasMessageContaining(DEFAULT_SOURCE_NAME); + } + + @Test + public void assignTagThrowConflict() + throws ReferenceConflictException, ReferenceNotFoundException { + // Arrange + doThrow(ReferenceConflictException.class) + .when(dataplanePlugin) + .assignTag(anyString(), any()); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", assignTag)) + .isInstanceOf(UserException.class) + .hasMessageContaining("hash change") + .hasMessageContaining(DEFAULT_VERSION.toString()) + .hasMessageContaining(DEFAULT_SOURCE_NAME); + } + + @Test + public void assignTagWithDefaultSource() throws ReferenceNotFoundException, ReferenceConflictException, ForemanSetupException { + // Arrange + doNothing().when(dataplanePlugin).assignTag(anyString(), any()); + + // Act + List result = handler.toResult("", assignTagWithDefaultSource); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("Assigned") + .contains(DEFAULT_REFERENCE) + .contains(DEFAULT_SOURCE_NAME); + + } + @Test + public void assignTagWithBranch() throws ReferenceNotFoundException, ReferenceConflictException, ForemanSetupException { + // Arrange + doNothing().when(dataplanePlugin).assignTag(anyString(), any()); + + // Act + List result = handler.toResult("", assignTagWithBranch); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("Assigned") + .contains("branch") + .contains(DEFAULT_REFERENCE) + .contains(DEFAULT_SOURCE_NAME); + + } + + @Test + public void assignTagToItself () throws ReferenceNotFoundException, ReferenceConflictException, ForemanSetupException { + // Arrange + doNothing().when(dataplanePlugin).assignTag(anyString(), any()); + + // Act + List result = handler.toResult("", assignTagToItself); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("Assigned") + .contains("tag") + .contains(TARGET_TAG) + .contains(DEFAULT_SOURCE_NAME); + } + +} diff --git a/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestCreateBranchHandler.java b/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestCreateBranchHandler.java new file mode 100644 index 0000000000..f0251bdcc9 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestCreateBranchHandler.java @@ -0,0 +1,495 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +import static com.dremio.exec.ExecConstants.ENABLE_USE_VERSION_SYNTAX; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.util.Arrays; +import java.util.List; + +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlLiteral; +import org.apache.calcite.sql.parser.SqlParserPos; +import org.junit.Rule; +import org.junit.Test; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnit; +import org.mockito.junit.MockitoRule; +import org.mockito.quality.Strictness; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.planner.sql.handlers.direct.SimpleCommandResult; +import com.dremio.exec.planner.sql.parser.ReferenceType; +import com.dremio.exec.planner.sql.parser.SqlCreateBranch; +import com.dremio.exec.store.NoDefaultBranchException; +import com.dremio.exec.store.ReferenceAlreadyExistsException; +import com.dremio.exec.store.ReferenceNotFoundException; +import com.dremio.exec.store.ReferenceTypeConflictException; +import com.dremio.exec.store.StoragePlugin; +import com.dremio.exec.work.foreman.ForemanSetupException; +import com.dremio.options.OptionManager; +import com.dremio.plugins.dataplane.store.DataplanePlugin; +import com.dremio.sabot.rpc.user.UserSession; +import com.dremio.service.namespace.NamespaceKey; +import com.dremio.service.namespace.NamespaceNotFoundException; +import com.dremio.test.DremioTest; + +/** + * Tests for CREATE BRANCH SQL. + */ +public class TestCreateBranchHandler extends DremioTest { + + private static final String DEFAULT_SOURCE_NAME = "dataplane_source_1"; + private static final String NON_EXISTENT_SOURCE_NAME = "non_exist"; + private static final String SESSION_SOURCE_NAME = "session_source"; + private static final String DEFAULT_NEW_BRANCH_NAME = "new_branch"; + private static final String DEFAULT_BRANCH_NAME = "branchName"; + private static final VersionContext DEFAULT_VERSION = + VersionContext.ofBranch(DEFAULT_BRANCH_NAME); + private static final VersionContext SESSION_VERSION = + VersionContext.ofBranch("session"); + private static final SqlCreateBranch DEFAULT_INPUT = new SqlCreateBranch( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(true, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_NEW_BRANCH_NAME, SqlParserPos.ZERO), + ReferenceType.BRANCH, + new SqlIdentifier(DEFAULT_BRANCH_NAME, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + private static final SqlCreateBranch NO_SOURCE_INPUT = new SqlCreateBranch( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(true, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_NEW_BRANCH_NAME, SqlParserPos.ZERO), + ReferenceType.BRANCH, + new SqlIdentifier(DEFAULT_BRANCH_NAME, SqlParserPos.ZERO), + null); + private static final SqlCreateBranch NON_EXISTENT_SOURCE_INPUT = new SqlCreateBranch( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(true, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_NEW_BRANCH_NAME, SqlParserPos.ZERO), + ReferenceType.BRANCH, + new SqlIdentifier(DEFAULT_BRANCH_NAME, SqlParserPos.ZERO), + new SqlIdentifier(NON_EXISTENT_SOURCE_NAME, SqlParserPos.ZERO)); + private static final SqlCreateBranch NO_VERSION_INPUT = new SqlCreateBranch( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(true, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_NEW_BRANCH_NAME, SqlParserPos.ZERO), + null, + null, + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + private static final SqlCreateBranch IF_NOT_EXISTS_INPUT = new SqlCreateBranch( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(false, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_NEW_BRANCH_NAME, SqlParserPos.ZERO), + ReferenceType.BRANCH, + new SqlIdentifier(DEFAULT_BRANCH_NAME, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + + @Rule public MockitoRule rule = MockitoJUnit.rule().strictness(Strictness.STRICT_STUBS); + + @Mock private OptionManager optionManager; + @Mock private Catalog catalog; + @Mock private UserSession userSession; + @Mock private DataplanePlugin dataplanePlugin; + + @InjectMocks private CreateBranchHandler handler; + + @Test + public void createBranchSupportKeyDisabledThrows() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(false); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("CREATE BRANCH") + .hasMessageContaining("not supported"); + } + + @Test + public void createBranchOnNonExistentSource() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + NamespaceNotFoundException notFoundException = new NamespaceNotFoundException("Cannot access"); + UserException nonExistException = UserException.validationError(notFoundException) + .message("Tried to access non-existent source [%s].", NON_EXISTENT_SOURCE_NAME).build(); + when(userSession.getSessionVersionForSource(NON_EXISTENT_SOURCE_NAME)).thenReturn(VersionContext.NOT_SPECIFIED); + when(catalog.getSource(NON_EXISTENT_SOURCE_NAME)).thenThrow(nonExistException); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", NON_EXISTENT_SOURCE_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("Tried to access non-existent source"); + } + + @Test + public void createBranchEmptyReferenceUsesSessionVersion() throws ForemanSetupException { + // Arrange + setUpSupportKeyAndSessionVersionAndPlugin(); + doNothing() + .when(dataplanePlugin) + .createBranch(DEFAULT_NEW_BRANCH_NAME, SESSION_VERSION); + + // Act + List result = handler.toResult("", NO_VERSION_INPUT); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("created") + .contains(DEFAULT_NEW_BRANCH_NAME) + .contains(SESSION_VERSION.toString()) + .contains(DEFAULT_SOURCE_NAME); + } + + @Test + public void createBranchEmptyReferenceUnspecifiedSessionUsesDefaultVersion() throws ForemanSetupException { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(userSession.getSessionVersionForSource(DEFAULT_SOURCE_NAME)) + .thenReturn(VersionContext.NOT_SPECIFIED); + when(catalog.getSource(DEFAULT_SOURCE_NAME)) + .thenReturn(dataplanePlugin); + doNothing() + .when(dataplanePlugin) + .createBranch(DEFAULT_NEW_BRANCH_NAME, VersionContext.NOT_SPECIFIED); + + // Act + List result = handler.toResult("", NO_VERSION_INPUT); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("created") + .contains(DEFAULT_NEW_BRANCH_NAME) + .contains("the default branch") + .contains(DEFAULT_SOURCE_NAME); + } + + @Test + public void createBranchEmptySourceUsesSessionContext() throws ForemanSetupException { + // Arrange + setUpSupportKeyAndSessionVersionAndPluginAndSessionContext(); + when(catalog.getSource(SESSION_SOURCE_NAME)) + .thenReturn(dataplanePlugin); + doNothing() + .when(dataplanePlugin) + .createBranch(DEFAULT_NEW_BRANCH_NAME, DEFAULT_VERSION); + + // Act + List result = handler.toResult("", NO_SOURCE_INPUT); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("created") + .contains(DEFAULT_NEW_BRANCH_NAME) + .contains(DEFAULT_VERSION.toString()) + .contains(SESSION_SOURCE_NAME); + } + + @Test + public void createBranchAtBranchSucceeds() throws ForemanSetupException { + // Arrange + setUpSupportKeyAndSessionVersionAndPlugin(); + doNothing() + .when(dataplanePlugin) + .createBranch(DEFAULT_NEW_BRANCH_NAME, DEFAULT_VERSION); + + // Act + List result = handler.toResult("", DEFAULT_INPUT); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("created") + .contains(DEFAULT_NEW_BRANCH_NAME) + .contains(DEFAULT_VERSION.toString()) + .contains(DEFAULT_SOURCE_NAME); + } + + @Test + public void createBranchAtTagSucceeds() throws ForemanSetupException { + // Constants + final String tagName = "tagName"; + final SqlCreateBranch input = new SqlCreateBranch( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(true, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_NEW_BRANCH_NAME, SqlParserPos.ZERO), + ReferenceType.TAG, + new SqlIdentifier(tagName, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + + // Arrange + setUpSupportKeyAndSessionVersionAndPlugin(); + doNothing() + .when(dataplanePlugin) + .createBranch(DEFAULT_NEW_BRANCH_NAME, VersionContext.ofTag(tagName)); + + // Act + List result = handler.toResult("", input); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("created") + .contains(DEFAULT_NEW_BRANCH_NAME) + .contains(tagName) + .contains(DEFAULT_SOURCE_NAME); + } + + @Test + public void createBranchAtCommitSucceeds() throws ForemanSetupException { + // Constants + final String commitHash = "0123456789abcdeff"; + final SqlCreateBranch input = new SqlCreateBranch( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(true, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_NEW_BRANCH_NAME, SqlParserPos.ZERO), + ReferenceType.COMMIT, + new SqlIdentifier(commitHash, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + + // Arrange + setUpSupportKeyAndSessionVersionAndPlugin(); + doNothing() + .when(dataplanePlugin) + .createBranch(DEFAULT_NEW_BRANCH_NAME, VersionContext.ofBareCommit(commitHash)); + + // Act + List result = handler.toResult("", input); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("created") + .contains(DEFAULT_NEW_BRANCH_NAME) + .contains(commitHash) + .contains(DEFAULT_SOURCE_NAME); + } + + @Test + public void createBranchAtReferenceSucceeds() throws ForemanSetupException { + // Constants + final String referenceName = "refName"; + final SqlCreateBranch input = new SqlCreateBranch( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(true, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_NEW_BRANCH_NAME, SqlParserPos.ZERO), + ReferenceType.REFERENCE, + new SqlIdentifier(referenceName, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + + // Arrange + setUpSupportKeyAndSessionVersionAndPlugin(); + doNothing() + .when(dataplanePlugin) + .createBranch(DEFAULT_NEW_BRANCH_NAME, VersionContext.ofRef(referenceName)); + + // Act + List result = handler.toResult("", input); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("created") + .contains(DEFAULT_NEW_BRANCH_NAME) + .contains(referenceName) + .contains(DEFAULT_SOURCE_NAME); + } + + @Test + public void createBranchWrongSourceThrows() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(userSession.getSessionVersionForSource(DEFAULT_SOURCE_NAME)) + .thenReturn(SESSION_VERSION); + when(catalog.getSource(DEFAULT_SOURCE_NAME)) + .thenReturn(mock(StoragePlugin.class)); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("does not support") + .hasMessageContaining(DEFAULT_SOURCE_NAME); + } + + @Test + public void createBranchWrongSourceFromContextThrows() { + // Arrange + setUpSupportKeyAndSessionVersionAndPluginAndSessionContext(); + when(catalog.getSource(SESSION_SOURCE_NAME)) + .thenReturn(mock(StoragePlugin.class)); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", NO_SOURCE_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("does not support") + .hasMessageContaining(SESSION_SOURCE_NAME); + } + + @Test + public void createBranchNullSourceFromContextThrows() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(userSession.getDefaultSchemaPath()) + .thenReturn(null); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", NO_SOURCE_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("was not specified"); + } + + @Test + public void createBranchIfNotExistsDoesNotExistSucceeds() throws ForemanSetupException { + // Arrange + setUpSupportKeyAndSessionVersionAndPlugin(); + doNothing() + .when(dataplanePlugin) + .createBranch(DEFAULT_NEW_BRANCH_NAME, DEFAULT_VERSION); + + // Act + List result = handler.toResult("", IF_NOT_EXISTS_INPUT); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("created") + .contains(DEFAULT_NEW_BRANCH_NAME) + .contains(DEFAULT_VERSION.toString()) + .contains(DEFAULT_SOURCE_NAME); + } + + @Test + public void createBranchIfNotExistsDoesExistNoOp() throws ForemanSetupException { + // Arrange + setUpSupportKeyAndSessionVersionAndPlugin(); + doThrow(ReferenceAlreadyExistsException.class) + .when(dataplanePlugin) + .createBranch(DEFAULT_NEW_BRANCH_NAME, DEFAULT_VERSION); + + // Act + List result = handler.toResult("", IF_NOT_EXISTS_INPUT); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("already exists") + .contains(DEFAULT_NEW_BRANCH_NAME) + .contains(DEFAULT_SOURCE_NAME); + } + + @Test + public void createBranchAlreadyExistsThrows() { + // Arrange + setUpSupportKeyAndSessionVersionAndPlugin(); + doThrow(ReferenceAlreadyExistsException.class) + .when(dataplanePlugin) + .createBranch(DEFAULT_NEW_BRANCH_NAME, DEFAULT_VERSION); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("already exists") + .hasMessageContaining(DEFAULT_NEW_BRANCH_NAME) + .hasMessageContaining(DEFAULT_SOURCE_NAME); + } + + @Test + public void createBranchNotFoundThrows() { + setUpSupportKeyAndSessionVersionAndPlugin(); + doThrow(ReferenceNotFoundException.class) + .when(dataplanePlugin) + .createBranch(DEFAULT_NEW_BRANCH_NAME, DEFAULT_VERSION); + + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("not found") + .hasMessageContaining(DEFAULT_VERSION.toString()) + .hasMessageContaining(DEFAULT_SOURCE_NAME); + } + + @Test + public void createBranchNoDefaultBranchThrows() { + setUpSupportKeyAndSessionVersionAndPlugin(); + doThrow(NoDefaultBranchException.class) + .when(dataplanePlugin) + .createBranch(DEFAULT_NEW_BRANCH_NAME, DEFAULT_VERSION); + + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("does not have a default branch") + .hasMessageContaining(DEFAULT_SOURCE_NAME); + } + + @Test + public void createBranchTypeConflictThrows() { + setUpSupportKeyAndSessionVersionAndPlugin(); + doThrow(ReferenceTypeConflictException.class) + .when(dataplanePlugin) + .createBranch(DEFAULT_NEW_BRANCH_NAME, DEFAULT_VERSION); + + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("is not the requested type") + .hasMessageContaining(DEFAULT_VERSION.toString()) + .hasMessageContaining(DEFAULT_SOURCE_NAME); + } + + private void setUpSupportKeyAndSessionVersionAndPlugin() { + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(userSession.getSessionVersionForSource(DEFAULT_SOURCE_NAME)) + .thenReturn(SESSION_VERSION); + when(catalog.getSource(DEFAULT_SOURCE_NAME)) + .thenReturn(dataplanePlugin); + } + + private void setUpSupportKeyAndSessionVersionAndPluginAndSessionContext() { + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(userSession.getSessionVersionForSource(SESSION_SOURCE_NAME)) + .thenReturn(SESSION_VERSION); + when(catalog.getSource(SESSION_SOURCE_NAME)) + .thenReturn(dataplanePlugin); + when(userSession.getDefaultSchemaPath()) + .thenReturn(new NamespaceKey(Arrays.asList(SESSION_SOURCE_NAME, "unusedFolder"))); + } + +} diff --git a/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestCreateFolderHandler.java b/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestCreateFolderHandler.java new file mode 100644 index 0000000000..2cb38d4037 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestCreateFolderHandler.java @@ -0,0 +1,305 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +import static java.util.Objects.requireNonNull; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.util.Arrays; +import java.util.List; + +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlLiteral; +import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.parser.SqlParserPos; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.projectnessie.model.ContentKey; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.planner.sql.handlers.direct.SimpleCommandResult; +import com.dremio.exec.planner.sql.handlers.direct.SqlNodeUtil; +import com.dremio.exec.planner.sql.parser.ReferenceType; +import com.dremio.exec.planner.sql.parser.SqlCreateFolder; +import com.dremio.exec.planner.sql.parser.SqlGrant; +import com.dremio.exec.store.NessieNamespaceAlreadyExistsException; +import com.dremio.plugins.dataplane.store.DataplanePlugin; +import com.dremio.plugins.s3.store.S3StoragePlugin; +import com.dremio.sabot.rpc.user.UserSession; +import com.dremio.service.namespace.NamespaceKey; +import com.dremio.service.namespace.NamespaceNotFoundException; +import com.dremio.test.DremioTest; + +@ExtendWith(MockitoExtension.class) +public class TestCreateFolderHandler extends DremioTest { + private static final String DEFAULT_CONTEXT = "@dremio"; + private static final String NON_EXISTENT_SOURCE_NAME = "non_exist"; + private static final String DEFAULT_SOURCE_NAME = "dataplane_source_1"; + private static final String NON_VERSIONED_SOURCE_NAME = "s3Source"; + private static final String DEFAULT_FOLDER_NAME = "myFolder"; + private static final List SINGLE_FOLDER_PATH = Arrays.asList(DEFAULT_FOLDER_NAME); + private static final List NON_VERSIONED_SOURCE_PATH = Arrays.asList(NON_VERSIONED_SOURCE_NAME, DEFAULT_FOLDER_NAME); + private static final List DEFAULT_FOLDER_PATH = Arrays.asList(DEFAULT_SOURCE_NAME, DEFAULT_FOLDER_NAME); + private static final List NON_EXISTENT_FOLDER_PATH = Arrays.asList(NON_EXISTENT_SOURCE_NAME, DEFAULT_FOLDER_NAME); + private static final String DEV_BRANCH_NAME= "dev"; + + private static final VersionContext DEV_VERSION = + VersionContext.ofBranch("dev"); + + @Mock private Catalog catalog; + @Mock private UserSession userSession; + @Mock private DataplanePlugin dataplanePlugin; + @Mock private S3StoragePlugin s3StoragePlugin; + @InjectMocks private CreateFolderHandler handler; + + private static final SqlCreateFolder NON_EXISTENT_SOURCE_INPUT = new SqlCreateFolder( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(true, SqlParserPos.ZERO), + new SqlIdentifier(NON_EXISTENT_FOLDER_PATH, SqlParserPos.ZERO), + null, + null); + + private static final SqlCreateFolder DEFAULT_SOURCE_INPUT = new SqlCreateFolder( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(true, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_FOLDER_PATH, SqlParserPos.ZERO), + null, + null); + + private static final SqlCreateFolder SINGLE_FOLDER_NAME_NO_USER_SESSION_INPUT = new SqlCreateFolder( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(true, SqlParserPos.ZERO), + new SqlIdentifier(SINGLE_FOLDER_PATH, SqlParserPos.ZERO), + null, + null); + + private static final SqlCreateFolder SINGLE_FOLDER_NAME_WITH_USER_SESSION_INPUT = new SqlCreateFolder( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(true, SqlParserPos.ZERO), + new SqlIdentifier(SINGLE_FOLDER_PATH, SqlParserPos.ZERO), + null, + null); + + private static final SqlCreateFolder NON_VERSIONED_SOURCE_INPUT = new SqlCreateFolder( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(true, SqlParserPos.ZERO), + new SqlIdentifier(NON_VERSIONED_SOURCE_PATH, SqlParserPos.ZERO), + null, + null); + + private static final SqlCreateFolder WITH_REFERENCE_INPUT = new SqlCreateFolder( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(true, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_FOLDER_PATH, SqlParserPos.ZERO), + ReferenceType.BRANCH, + new SqlIdentifier(DEV_BRANCH_NAME, SqlParserPos.ZERO)); + + private static final SqlCreateFolder WITH_IF_NOT_EXISTS = new SqlCreateFolder( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(true, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_FOLDER_PATH, SqlParserPos.ZERO), + ReferenceType.BRANCH, + new SqlIdentifier(DEV_BRANCH_NAME, SqlParserPos.ZERO)); + + private static final SqlCreateFolder WITHOUT_IF_NOT_EXISTS = new SqlCreateFolder( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(false, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_FOLDER_PATH, SqlParserPos.ZERO), + ReferenceType.BRANCH, + new SqlIdentifier(DEV_BRANCH_NAME, SqlParserPos.ZERO)); + + /** + * CREATE FOLDER SQL SYNTAX + * CREATE FOLDER [ IF NOT EXISTS ] [source.]parentFolderName[.childFolder] + * [ AT ( REF[ERENCE) | BRANCH | TAG | COMMIT ) refValue ] + */ + @Test + public void createFolderOnNonExistentSource() throws Exception{ + NamespaceNotFoundException namespaceNotFoundException = new NamespaceNotFoundException("Cannot access"); + UserException nonExistUserException = UserException.validationError(namespaceNotFoundException) + .message("Tried to access non-existent source [%s].", NON_EXISTENT_SOURCE_NAME).build(); + when(userSession.getSessionVersionForSource(NON_EXISTENT_SOURCE_NAME)).thenReturn(VersionContext.NOT_SPECIFIED); + when(catalog.getSource(NON_EXISTENT_SOURCE_NAME)).thenThrow(nonExistUserException); + when(catalog.resolveSingle(extractNamespaceKeyFromSqlNode(NON_EXISTENT_SOURCE_INPUT))).thenReturn(extractNamespaceKeyFromSqlNode(NON_EXISTENT_SOURCE_INPUT)); + assertThatThrownBy(() -> handler.toResult("", NON_EXISTENT_SOURCE_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("Tried to access non-existent source"); + NamespaceKey path = SqlNodeUtil.unwrap(NON_EXISTENT_SOURCE_INPUT, SqlCreateFolder.class).getPath(); + verify(catalog).resolveSingle(path); + verify(catalog).validatePrivilege(path, SqlGrant.Privilege.ALTER); + } + + @Test + public void createFolderInExistentSource() throws Exception { + when(userSession.getSessionVersionForSource(DEFAULT_SOURCE_NAME)).thenReturn(VersionContext.NOT_SPECIFIED); + when(catalog.getSource(DEFAULT_SOURCE_NAME)).thenReturn(dataplanePlugin); + when(catalog.resolveSingle(extractNamespaceKeyFromSqlNode(DEFAULT_SOURCE_INPUT))).thenReturn(extractNamespaceKeyFromSqlNode(DEFAULT_SOURCE_INPUT)); + + List result; + result = handler.toResult("", DEFAULT_SOURCE_INPUT); + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("Folder") + .contains("has been created"); + NamespaceKey path = SqlNodeUtil.unwrap(DEFAULT_SOURCE_INPUT, SqlCreateFolder.class).getPath(); + verify(catalog).resolveSingle(path); + verify(catalog).validatePrivilege(path, SqlGrant.Privilege.ALTER); + } + + @Test + public void createFolderInExistentSourceWithSingleFolderNameWithoutUserSession() throws Exception { + NamespaceNotFoundException namespaceNotFoundException = new NamespaceNotFoundException("Cannot access"); + UserException nonExistUserException = UserException.validationError(namespaceNotFoundException) + .message("Tried to access non-existent source [%s].", NON_EXISTENT_SOURCE_NAME).build(); + when(userSession.getSessionVersionForSource(DEFAULT_CONTEXT)).thenReturn(VersionContext.NOT_SPECIFIED); + when(catalog.getSource(DEFAULT_CONTEXT)).thenThrow(nonExistUserException); + when(catalog.resolveSingle(extractNamespaceKeyFromSqlNode(SINGLE_FOLDER_NAME_NO_USER_SESSION_INPUT))).thenReturn(new NamespaceKey(Arrays.asList(DEFAULT_CONTEXT, DEFAULT_FOLDER_NAME))); + + assertThatThrownBy(() -> handler.toResult("", SINGLE_FOLDER_NAME_NO_USER_SESSION_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("Tried to access non-existent source"); + NamespaceKey path = new NamespaceKey(Arrays.asList(DEFAULT_CONTEXT, DEFAULT_FOLDER_NAME)); + verify(catalog).resolveSingle(extractNamespaceKeyFromSqlNode(SINGLE_FOLDER_NAME_NO_USER_SESSION_INPUT)); + verify(catalog).validatePrivilege(path, SqlGrant.Privilege.ALTER); + } + + @Test + public void createFolderInExistentSourceWithSingleFolderNameWithUserSession() throws Exception { + when(userSession.getSessionVersionForSource(DEFAULT_SOURCE_NAME)).thenReturn(VersionContext.NOT_SPECIFIED); + when(catalog.getSource(DEFAULT_SOURCE_NAME)).thenReturn(dataplanePlugin); + when(catalog.resolveSingle(extractNamespaceKeyFromSqlNode(SINGLE_FOLDER_NAME_WITH_USER_SESSION_INPUT))).thenReturn(new NamespaceKey(Arrays.asList(DEFAULT_SOURCE_NAME, DEFAULT_FOLDER_NAME))); + + List result; + result = handler.toResult("", SINGLE_FOLDER_NAME_WITH_USER_SESSION_INPUT); + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("Folder") + .contains("has been created"); + NamespaceKey path = new NamespaceKey(Arrays.asList(DEFAULT_SOURCE_NAME, DEFAULT_FOLDER_NAME)); + verify(catalog).resolveSingle(extractNamespaceKeyFromSqlNode(SINGLE_FOLDER_NAME_WITH_USER_SESSION_INPUT)); + verify(catalog).validatePrivilege(path, SqlGrant.Privilege.ALTER); + } + + @Test + public void createFolderInNonVersionedSource() throws Exception { + when(userSession.getSessionVersionForSource(NON_VERSIONED_SOURCE_NAME)).thenReturn(VersionContext.NOT_SPECIFIED); + when(catalog.getSource(NON_VERSIONED_SOURCE_NAME)).thenReturn(s3StoragePlugin); + when(catalog.resolveSingle(extractNamespaceKeyFromSqlNode(NON_VERSIONED_SOURCE_INPUT))).thenReturn(extractNamespaceKeyFromSqlNode(NON_VERSIONED_SOURCE_INPUT)); + + assertThatThrownBy(() -> handler.toResult("", NON_VERSIONED_SOURCE_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("does not support versioning"); + NamespaceKey path = SqlNodeUtil.unwrap(NON_VERSIONED_SOURCE_INPUT, SqlCreateFolder.class).getPath(); + verify(catalog).resolveSingle(path); + verify(catalog).validatePrivilege(path, SqlGrant.Privilege.ALTER); + } + + @Test + public void createFolderWithReference() throws Exception { + when(userSession.getSessionVersionForSource(DEFAULT_SOURCE_NAME)) + .thenReturn(DEV_VERSION); + when(catalog.getSource(DEFAULT_SOURCE_NAME)).thenReturn(dataplanePlugin); + when(catalog.resolveSingle(extractNamespaceKeyFromSqlNode(WITH_REFERENCE_INPUT))).thenReturn(extractNamespaceKeyFromSqlNode(WITH_REFERENCE_INPUT)); + + List result; + result = handler.toResult("", WITH_REFERENCE_INPUT); + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("Folder") + .contains("has been created") + .contains(String.format("created at branch %s", DEV_BRANCH_NAME)); + NamespaceKey path = SqlNodeUtil.unwrap(WITH_REFERENCE_INPUT, SqlCreateFolder.class).getPath(); + verify(catalog).resolveSingle(path); + verify(catalog).validatePrivilege(path, SqlGrant.Privilege.ALTER); + } + + @Test + public void createFolderWithIfNotExists() throws Exception { + ContentKey contentKey = ContentKey.of(WITHOUT_IF_NOT_EXISTS.getPath().getPathComponents()); + NessieNamespaceAlreadyExistsException nessieNamespaceAlreadyExistsException = new NessieNamespaceAlreadyExistsException(String.format("Folder %s already exists", contentKey.toPathString())); + when(catalog.resolveSingle(extractNamespaceKeyFromSqlNode(WITHOUT_IF_NOT_EXISTS))).thenReturn(extractNamespaceKeyFromSqlNode(WITH_IF_NOT_EXISTS)); + when(userSession.getSessionVersionForSource(DEFAULT_SOURCE_NAME)) + .thenReturn(DEV_VERSION); + when(catalog.getSource(DEFAULT_SOURCE_NAME)).thenReturn(dataplanePlugin); + doThrow(nessieNamespaceAlreadyExistsException) + .when(dataplanePlugin) + .createNamespace(new NamespaceKey(DEFAULT_FOLDER_PATH), DEV_VERSION); + + List result; + result = handler.toResult("", WITH_IF_NOT_EXISTS); + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("Folder") + .contains("already exists"); + NamespaceKey path = SqlNodeUtil.unwrap(WITH_IF_NOT_EXISTS, SqlCreateFolder.class).getPath(); + verify(catalog).resolveSingle(path); + verify(catalog).validatePrivilege(path, SqlGrant.Privilege.ALTER); + } + + @Test + public void createFolderWithoutIfNotExists() throws Exception { + ContentKey contentKey = ContentKey.of(WITHOUT_IF_NOT_EXISTS.getPath().getPathComponents()); + NessieNamespaceAlreadyExistsException nessieNamespaceAlreadyExistsException = new NessieNamespaceAlreadyExistsException(String.format("Folder %s already exists", contentKey.toPathString())); + when(catalog.resolveSingle(extractNamespaceKeyFromSqlNode(WITHOUT_IF_NOT_EXISTS))).thenReturn(extractNamespaceKeyFromSqlNode(WITHOUT_IF_NOT_EXISTS)); + when(userSession.getSessionVersionForSource(DEFAULT_SOURCE_NAME)) + .thenReturn(DEV_VERSION); + when(catalog.getSource(DEFAULT_SOURCE_NAME)).thenReturn(dataplanePlugin); + doThrow(nessieNamespaceAlreadyExistsException) + .when(dataplanePlugin) + .createNamespace(new NamespaceKey(DEFAULT_FOLDER_PATH), DEV_VERSION); + + assertThatThrownBy(() -> handler.toResult("", WITHOUT_IF_NOT_EXISTS)) + .isInstanceOf(UserException.class) + .hasMessageContaining("Folder") + .hasMessageContaining("already exists"); + NamespaceKey path = SqlNodeUtil.unwrap(WITHOUT_IF_NOT_EXISTS, SqlCreateFolder.class).getPath(); + verify(catalog).resolveSingle(path); + verify(catalog).validatePrivilege(path, SqlGrant.Privilege.ALTER); + } + + + @Test + public void createFolderWithoutALTERPrivilege() throws Exception { + doThrow(UserException.validationError().message("permission denied").buildSilently()) + .when(catalog) + .validatePrivilege(DEFAULT_SOURCE_INPUT.getPath(), SqlGrant.Privilege.ALTER); + when(catalog.resolveSingle(extractNamespaceKeyFromSqlNode(DEFAULT_SOURCE_INPUT))).thenReturn(extractNamespaceKeyFromSqlNode(DEFAULT_SOURCE_INPUT)); + + assertThatThrownBy(() -> handler.toResult("", DEFAULT_SOURCE_INPUT)) + .isInstanceOf(UserException.class) + .hasMessage("permission denied"); + NamespaceKey path = SqlNodeUtil.unwrap(DEFAULT_SOURCE_INPUT, SqlCreateFolder.class).getPath(); + verify(catalog).resolveSingle(path); + verify(catalog).validatePrivilege(path, SqlGrant.Privilege.ALTER); + } + + private NamespaceKey extractNamespaceKeyFromSqlNode(SqlNode sqlNode) throws Exception{ + return requireNonNull(SqlNodeUtil.unwrap(sqlNode, SqlCreateFolder.class)).getPath(); + } +} diff --git a/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestCreateTagHandler.java b/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestCreateTagHandler.java new file mode 100644 index 0000000000..c38e142f35 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestCreateTagHandler.java @@ -0,0 +1,503 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +import static com.dremio.exec.ExecConstants.ENABLE_USE_VERSION_SYNTAX; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.util.Arrays; +import java.util.List; + +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlLiteral; +import org.apache.calcite.sql.parser.SqlParserPos; +import org.junit.Rule; +import org.junit.Test; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnit; +import org.mockito.junit.MockitoRule; +import org.mockito.quality.Strictness; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.planner.sql.handlers.direct.SimpleCommandResult; +import com.dremio.exec.planner.sql.parser.ReferenceType; +import com.dremio.exec.planner.sql.parser.SqlCreateTag; +import com.dremio.exec.store.NoDefaultBranchException; +import com.dremio.exec.store.ReferenceAlreadyExistsException; +import com.dremio.exec.store.ReferenceConflictException; +import com.dremio.exec.store.ReferenceNotFoundException; +import com.dremio.exec.store.ReferenceTypeConflictException; +import com.dremio.exec.store.StoragePlugin; +import com.dremio.exec.work.foreman.ForemanSetupException; +import com.dremio.options.OptionManager; +import com.dremio.plugins.dataplane.store.DataplanePlugin; +import com.dremio.sabot.rpc.user.UserSession; +import com.dremio.service.namespace.NamespaceKey; +import com.dremio.service.namespace.NamespaceNotFoundException; +import com.dremio.test.DremioTest; + +/** + * Tests for CREATE TAG SQL. + */ +public class TestCreateTagHandler extends DremioTest { + + private static final String DEFAULT_SOURCE_NAME = "dataplane_source_1"; + private static final String NON_EXISTENT_SOURCE_NAME = "non_exist"; + private static final String SESSION_SOURCE_NAME = "session_source"; + private static final String DEFAULT_NEW_TAG_NAME = "new_tag"; + private static final String DEFAULT_BRANCH_NAME = "branchName"; + private static final VersionContext DEFAULT_VERSION = + VersionContext.ofBranch(DEFAULT_BRANCH_NAME); + private static final VersionContext SESSION_VERSION = + VersionContext.ofBranch("session"); + private static final SqlCreateTag DEFAULT_INPUT = new SqlCreateTag( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(true, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_NEW_TAG_NAME, SqlParserPos.ZERO), + ReferenceType.BRANCH, + new SqlIdentifier(DEFAULT_BRANCH_NAME, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + private static final SqlCreateTag NO_SOURCE_INPUT = new SqlCreateTag( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(true, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_NEW_TAG_NAME, SqlParserPos.ZERO), + ReferenceType.BRANCH, + new SqlIdentifier(DEFAULT_BRANCH_NAME, SqlParserPos.ZERO), + null); + private static final SqlCreateTag NON_EXISTENT_SOURCE_INPUT = new SqlCreateTag( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(true, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_NEW_TAG_NAME, SqlParserPos.ZERO), + ReferenceType.BRANCH, + new SqlIdentifier(DEFAULT_BRANCH_NAME, SqlParserPos.ZERO), + new SqlIdentifier(NON_EXISTENT_SOURCE_NAME, SqlParserPos.ZERO)); + private static final SqlCreateTag NO_VERSION_INPUT = new SqlCreateTag( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(true, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_NEW_TAG_NAME, SqlParserPos.ZERO), + null, + null, + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + private static final SqlCreateTag IF_NOT_EXISTS_INPUT = new SqlCreateTag( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(false, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_NEW_TAG_NAME, SqlParserPos.ZERO), + ReferenceType.BRANCH, + new SqlIdentifier(DEFAULT_BRANCH_NAME, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + + @Rule public MockitoRule rule = MockitoJUnit.rule().strictness(Strictness.STRICT_STUBS); + + @Mock private OptionManager optionManager; + @Mock private Catalog catalog; + @Mock private UserSession userSession; + @Mock private DataplanePlugin dataplanePlugin; + + @InjectMocks private CreateTagHandler handler; + + @Test + public void createTagSupportKeyDisabledThrows() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(false); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("CREATE TAG") + .hasMessageContaining("not supported"); + } + + @Test + public void createTagNonExistentSource() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + NamespaceNotFoundException notFoundException = new NamespaceNotFoundException("Cannot access"); + UserException nonExistException = UserException.validationError(notFoundException) + .message("Tried to access non-existent source [%s].", NON_EXISTENT_SOURCE_NAME).build(); + when(userSession.getSessionVersionForSource(NON_EXISTENT_SOURCE_NAME)).thenReturn(VersionContext.NOT_SPECIFIED); + when(catalog.getSource(NON_EXISTENT_SOURCE_NAME)).thenThrow(nonExistException); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", NON_EXISTENT_SOURCE_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("Tried to access non-existent source"); + } + + @Test + public void createTagEmptyReferenceUsesSessionVersion() throws ForemanSetupException { + // Arrange + setUpSupportKeyAndSessionVersionAndPlugin(); + doNothing() + .when(dataplanePlugin) + .createTag(DEFAULT_NEW_TAG_NAME, SESSION_VERSION); + + // Act + List result = handler.toResult("", NO_VERSION_INPUT); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("created") + .contains(DEFAULT_NEW_TAG_NAME) + .contains(SESSION_VERSION.toString()) + .contains(DEFAULT_SOURCE_NAME); + } + + @Test + public void createTagEmptyReferenceUnspecifiedSessionUsesDefaultVersion() throws ForemanSetupException { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(userSession.getSessionVersionForSource(DEFAULT_SOURCE_NAME)) + .thenReturn(VersionContext.NOT_SPECIFIED); + when(catalog.getSource(DEFAULT_SOURCE_NAME)) + .thenReturn(dataplanePlugin); + doNothing() + .when(dataplanePlugin) + .createTag(DEFAULT_NEW_TAG_NAME, VersionContext.NOT_SPECIFIED); + + // Act + List result = handler.toResult("", NO_VERSION_INPUT); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("created") + .contains(DEFAULT_NEW_TAG_NAME) + .contains("the default branch") + .contains(DEFAULT_SOURCE_NAME); + } + + @Test + public void createTagEmptySourceUsesSessionContext() throws ForemanSetupException { + // Arrange + setUpSupportKeyAndSessionVersionAndPluginAndSessionContext(); + when(catalog.getSource(SESSION_SOURCE_NAME)) + .thenReturn(dataplanePlugin); + doNothing() + .when(dataplanePlugin) + .createTag(DEFAULT_NEW_TAG_NAME, DEFAULT_VERSION); + + // Act + List result = handler.toResult("", NO_SOURCE_INPUT); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("created") + .contains(DEFAULT_NEW_TAG_NAME) + .contains(DEFAULT_VERSION.toString()) + .contains(SESSION_SOURCE_NAME); + } + + @Test + public void createTagAtBranchSucceeds() throws ForemanSetupException{ + // Arrange + setUpSupportKeyAndSessionVersionAndPlugin(); + doNothing() + .when(dataplanePlugin) + .createTag(DEFAULT_NEW_TAG_NAME, DEFAULT_VERSION); + + // Act + List result = handler.toResult("", DEFAULT_INPUT); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("created") + .contains(DEFAULT_NEW_TAG_NAME) + .contains(DEFAULT_VERSION.toString()) + .contains(DEFAULT_SOURCE_NAME); + } + + @Test + public void createTagAtTagSucceeds() throws ForemanSetupException { + // Constants + final String tagName = "tagName"; + final SqlCreateTag input = new SqlCreateTag( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(true, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_NEW_TAG_NAME, SqlParserPos.ZERO), + ReferenceType.TAG, + new SqlIdentifier(tagName, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + + // Arrange + setUpSupportKeyAndSessionVersionAndPlugin(); + doNothing() + .when(dataplanePlugin) + .createTag(DEFAULT_NEW_TAG_NAME, VersionContext.ofTag(tagName)); + + // Act + List result = handler.toResult("", input); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("created") + .contains(DEFAULT_NEW_TAG_NAME) + .contains(tagName) + .contains(DEFAULT_SOURCE_NAME); + } + + @Test + public void createTagAtCommitSucceeds() throws ForemanSetupException { + // Constants + final String commitHash = "0123456789abcdeff"; + final SqlCreateTag input = new SqlCreateTag( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(true, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_NEW_TAG_NAME, SqlParserPos.ZERO), + ReferenceType.COMMIT, + new SqlIdentifier(commitHash, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + + // Arrange + setUpSupportKeyAndSessionVersionAndPlugin(); + doNothing() + .when(dataplanePlugin) + .createTag(DEFAULT_NEW_TAG_NAME, VersionContext.ofBareCommit(commitHash)); + + // Act + List result = handler.toResult("", input); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("created") + .contains(DEFAULT_NEW_TAG_NAME) + .contains(commitHash) + .contains(DEFAULT_SOURCE_NAME); + } + + @Test + public void createTagAtReferenceSucceeds() throws ForemanSetupException { + // Constants + final String referenceName = "refName"; + final SqlCreateTag input = new SqlCreateTag( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(true, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_NEW_TAG_NAME, SqlParserPos.ZERO), + ReferenceType.REFERENCE, + new SqlIdentifier(referenceName, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + + // Arrange + setUpSupportKeyAndSessionVersionAndPlugin(); + doNothing() + .when(dataplanePlugin) + .createTag(DEFAULT_NEW_TAG_NAME, VersionContext.ofRef(referenceName)); + + // Act + List result = handler.toResult("", input); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("created") + .contains(DEFAULT_NEW_TAG_NAME) + .contains(referenceName) + .contains(DEFAULT_SOURCE_NAME); + } + + @Test + public void createTagWrongSourceThrows() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(userSession.getSessionVersionForSource(DEFAULT_SOURCE_NAME)) + .thenReturn(SESSION_VERSION); + when(catalog.getSource(DEFAULT_SOURCE_NAME)) + .thenReturn(mock(StoragePlugin.class)); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("does not support") + .hasMessageContaining(DEFAULT_SOURCE_NAME); + } + + @Test + public void createTagWrongSourceFromContextThrows() { + // Arrange + setUpSupportKeyAndSessionVersionAndPluginAndSessionContext(); + when(catalog.getSource(SESSION_SOURCE_NAME)) + .thenReturn(mock(StoragePlugin.class)); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", NO_SOURCE_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("does not support") + .hasMessageContaining(SESSION_SOURCE_NAME); + } + + @Test + public void createTagNullSourceFromContextThrows() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(userSession.getDefaultSchemaPath()) + .thenReturn(null); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", NO_SOURCE_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("was not specified"); + } + + @Test + public void createTagIfNotExistsDoesNotExistSucceeds() throws ForemanSetupException { + // Arrange + setUpSupportKeyAndSessionVersionAndPlugin(); + doNothing() + .when(dataplanePlugin) + .createTag(DEFAULT_NEW_TAG_NAME, DEFAULT_VERSION); + + // Act + List result = handler.toResult("", IF_NOT_EXISTS_INPUT); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("created") + .contains(DEFAULT_NEW_TAG_NAME) + .contains(DEFAULT_VERSION.toString()) + .contains(DEFAULT_SOURCE_NAME); + } + + @Test + public void createTagIfNotExistsDoesExistNoOp() throws ForemanSetupException { + // Arrange + setUpSupportKeyAndSessionVersionAndPlugin(); + doThrow(ReferenceAlreadyExistsException.class) + .when(dataplanePlugin) + .createTag(DEFAULT_NEW_TAG_NAME, DEFAULT_VERSION); + + // Act + List result = handler.toResult("", IF_NOT_EXISTS_INPUT); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("already exists") + .contains(DEFAULT_NEW_TAG_NAME) + .contains(DEFAULT_SOURCE_NAME); + } + + @Test + public void createTagAlreadyExistsThrows() { + // Arrange + setUpSupportKeyAndSessionVersionAndPlugin(); + doThrow(ReferenceAlreadyExistsException.class) + .when(dataplanePlugin) + .createTag(DEFAULT_NEW_TAG_NAME, DEFAULT_VERSION); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("already exists") + .hasMessageContaining(DEFAULT_NEW_TAG_NAME) + .hasMessageContaining(DEFAULT_SOURCE_NAME); + } + + @Test + public void createTagNotFoundThrows() { + // Arrange + setUpSupportKeyAndSessionVersionAndPlugin(); + doThrow(ReferenceNotFoundException.class) + .when(dataplanePlugin) + .createTag(DEFAULT_NEW_TAG_NAME, DEFAULT_VERSION); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("not found") + .hasMessageContaining(DEFAULT_VERSION.toString()) + .hasMessageContaining(DEFAULT_SOURCE_NAME); + } + + @Test + public void createTagNoDefaultBranchThrows() { + // Arrange + setUpSupportKeyAndSessionVersionAndPlugin(); + doThrow(NoDefaultBranchException.class) + .when(dataplanePlugin) + .createTag(DEFAULT_NEW_TAG_NAME, DEFAULT_VERSION); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("does not have a default branch") + .hasMessageContaining(DEFAULT_SOURCE_NAME); + } + + @Test + public void createTagTypeConflictThrows() + throws ReferenceAlreadyExistsException, ReferenceNotFoundException, NoDefaultBranchException, ReferenceConflictException { + // Arrange + setUpSupportKeyAndSessionVersionAndPlugin(); + doThrow(ReferenceTypeConflictException.class) + .when(dataplanePlugin) + .createTag(DEFAULT_NEW_TAG_NAME, DEFAULT_VERSION); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("is not the requested type") + .hasMessageContaining(DEFAULT_VERSION.toString()) + .hasMessageContaining(DEFAULT_SOURCE_NAME); + } + + private void setUpSupportKeyAndSessionVersionAndPlugin() { + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(userSession.getSessionVersionForSource(DEFAULT_SOURCE_NAME)) + .thenReturn(SESSION_VERSION); + when(catalog.getSource(DEFAULT_SOURCE_NAME)) + .thenReturn(dataplanePlugin); + } + + private void setUpSupportKeyAndSessionVersionAndPluginAndSessionContext() { + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(userSession.getSessionVersionForSource(SESSION_SOURCE_NAME)) + .thenReturn(SESSION_VERSION); + when(catalog.getSource(SESSION_SOURCE_NAME)) + .thenReturn(dataplanePlugin); + when(userSession.getDefaultSchemaPath()) + .thenReturn(new NamespaceKey(Arrays.asList(SESSION_SOURCE_NAME, "unusedFolder"))); + } + +} diff --git a/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestDropBranchHandler.java b/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestDropBranchHandler.java new file mode 100644 index 0000000000..c717c536bc --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestDropBranchHandler.java @@ -0,0 +1,350 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +import static com.dremio.exec.ExecConstants.ENABLE_USE_VERSION_SYNTAX; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.util.Arrays; +import java.util.List; + +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlLiteral; +import org.apache.calcite.sql.parser.SqlParserPos; +import org.junit.Rule; +import org.junit.Test; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnit; +import org.mockito.junit.MockitoRule; +import org.mockito.quality.Strictness; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.planner.sql.handlers.direct.SimpleCommandResult; +import com.dremio.exec.planner.sql.parser.SqlDropBranch; +import com.dremio.exec.store.NoDefaultBranchException; +import com.dremio.exec.store.ReferenceAlreadyExistsException; +import com.dremio.exec.store.ReferenceConflictException; +import com.dremio.exec.store.ReferenceNotFoundException; +import com.dremio.exec.store.StoragePlugin; +import com.dremio.exec.work.foreman.ForemanSetupException; +import com.dremio.options.OptionManager; +import com.dremio.plugins.dataplane.store.DataplanePlugin; +import com.dremio.sabot.rpc.user.UserSession; +import com.dremio.service.namespace.NamespaceKey; +import com.dremio.service.namespace.NamespaceNotFoundException; +import com.dremio.test.DremioTest; + +/** + * Tests for DROP BRANCH SQL. + */ +public class TestDropBranchHandler extends DremioTest { + + private static final String DEFAULT_SOURCE_NAME = "dataplane_source_1"; + private static final String NON_EXISTENT_SOURCE_NAME = "non_exist"; + private static final String SESSION_SOURCE_NAME = "session_source"; + private static final String DEFAULT_BRANCH_NAME = "branchName"; + private static final String DEFAULT_COMMIT_HASH = "0123456789abcdeff"; + private static final SqlDropBranch DEFAULT_INPUT = new SqlDropBranch( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(true, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_BRANCH_NAME, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_COMMIT_HASH, SqlParserPos.ZERO), + SqlLiteral.createBoolean(false, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + private static final SqlDropBranch NO_SOURCE_INPUT = new SqlDropBranch( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(true, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_BRANCH_NAME, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_COMMIT_HASH, SqlParserPos.ZERO), + SqlLiteral.createBoolean(false, SqlParserPos.ZERO), + null); + private static final SqlDropBranch NON_EXISTENT_SOURCE_INPUT = new SqlDropBranch( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(true, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_BRANCH_NAME, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_COMMIT_HASH, SqlParserPos.ZERO), + SqlLiteral.createBoolean(false, SqlParserPos.ZERO), + new SqlIdentifier(NON_EXISTENT_SOURCE_NAME, SqlParserPos.ZERO)); + private static final SqlDropBranch IF_EXISTS_INPUT = new SqlDropBranch( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(false, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_BRANCH_NAME, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_COMMIT_HASH, SqlParserPos.ZERO), + SqlLiteral.createBoolean(false, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + + @Rule public MockitoRule rule = MockitoJUnit.rule().strictness(Strictness.STRICT_STUBS); + + @Mock private OptionManager optionManager; + @Mock private Catalog catalog; + @Mock private UserSession userSession; + @Mock private DataplanePlugin dataplanePlugin; + + @InjectMocks private DropBranchHandler handler; + + @Test + public void dropBranchSupportKeyDisabledThrows() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(false); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("DROP BRANCH") + .hasMessageContaining("not supported"); + } + + @Test + public void dropBranchNonExistentSource() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + NamespaceNotFoundException notFoundException = new NamespaceNotFoundException("Cannot access"); + UserException nonExistException = UserException.validationError(notFoundException) + .message("Tried to access non-existent source [%s].", NON_EXISTENT_SOURCE_NAME).build(); + when(catalog.getSource(NON_EXISTENT_SOURCE_NAME)).thenThrow(nonExistException); + when(userSession.getSessionVersionForSource(NON_EXISTENT_SOURCE_NAME)).thenReturn(mock(VersionContext.class)); + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", NON_EXISTENT_SOURCE_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("Tried to access non-existent source"); + } + + @Test + public void dropBranchSucceed() + throws ForemanSetupException, ReferenceConflictException, ReferenceNotFoundException { + // Arrange + setUpSupportKeyAndPlugin(); + doNothing() + .when(dataplanePlugin) + .dropBranch(DEFAULT_BRANCH_NAME, DEFAULT_COMMIT_HASH); + when(userSession.getSessionVersionForSource(DEFAULT_SOURCE_NAME)).thenReturn(mock(VersionContext.class)); + // Act + List result = handler.toResult("", DEFAULT_INPUT); + + // Assert + assertThat(result).isNotEmpty(); + assertTrue(result.get(0).ok); + assertThat(result.get(0).summary) + .contains("dropped") + .contains(DEFAULT_BRANCH_NAME) + .contains(DEFAULT_SOURCE_NAME); + } + + @Test + public void dropBranchEmptySourceUsesSessionContext() + throws ReferenceNotFoundException, NoDefaultBranchException, ReferenceConflictException, ForemanSetupException { + // Arrange + setUpSupportKeyAndPluginAndSessionContext(); + doNothing() + .when(dataplanePlugin) + .dropBranch(DEFAULT_BRANCH_NAME, DEFAULT_COMMIT_HASH); + + // Act + List result = handler.toResult("", NO_SOURCE_INPUT); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("dropped") + .contains(DEFAULT_BRANCH_NAME) + .contains(SESSION_SOURCE_NAME); + } + + // test force drop + + @Test + public void dropBranchIfNotExistsDoesNotExistSucceeds() + throws ReferenceNotFoundException, NoDefaultBranchException, ReferenceConflictException, ReferenceAlreadyExistsException, ForemanSetupException { + // Arrange + setUpSupportKeyAndPlugin(); + doNothing() + .when(dataplanePlugin) + .dropBranch(DEFAULT_BRANCH_NAME, DEFAULT_COMMIT_HASH); + when(userSession.getSessionVersionForSource(DEFAULT_SOURCE_NAME)).thenReturn(mock(VersionContext.class)); + // Act + List result = handler.toResult("", IF_EXISTS_INPUT); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("dropped") + .contains(DEFAULT_BRANCH_NAME) + .contains(DEFAULT_SOURCE_NAME); + } + + @Test + public void dropBranchIfNotExistsDoesExistNoOp() + throws ReferenceNotFoundException, NoDefaultBranchException, ReferenceConflictException, ReferenceAlreadyExistsException, ForemanSetupException { + // Arrange + setUpSupportKeyAndPlugin(); + doThrow(ReferenceNotFoundException.class) + .when(dataplanePlugin) + .dropBranch(DEFAULT_BRANCH_NAME, DEFAULT_COMMIT_HASH); + when(userSession.getSessionVersionForSource(DEFAULT_SOURCE_NAME)).thenReturn(mock(VersionContext.class)); + // Act + List result = handler.toResult("", IF_EXISTS_INPUT); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("not found") + .contains(DEFAULT_BRANCH_NAME) + .contains(DEFAULT_SOURCE_NAME); + } + + @Test + public void dropBranchWrongSourceThrows() + throws ReferenceNotFoundException, NoDefaultBranchException, ReferenceConflictException { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(catalog.getSource(DEFAULT_SOURCE_NAME)) + .thenReturn(mock(StoragePlugin.class)); + when(userSession.getSessionVersionForSource(DEFAULT_SOURCE_NAME)).thenReturn(mock(VersionContext.class)); + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("does not support") + .hasMessageContaining(DEFAULT_SOURCE_NAME); + verify(userSession, never()).setSessionVersionForSource(any(), any()); + } + + @Test + public void dropBranchWrongSourceFromContextThrows() { + // Arrange + setUpSupportKeyAndPluginAndSessionContext(); + when(catalog.getSource(SESSION_SOURCE_NAME)) + .thenReturn(mock(StoragePlugin.class)); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", NO_SOURCE_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("does not support") + .hasMessageContaining(SESSION_SOURCE_NAME); + verify(userSession, never()).setSessionVersionForSource(any(), any()); + } + + @Test + public void dropBranchNullSourceFromContextThrows() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(userSession.getDefaultSchemaPath()) + .thenReturn(null); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", NO_SOURCE_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("was not specified"); + verify(userSession, never()).setSessionVersionForSource(any(), any()); + } + + @Test + public void dropBranchNoCommitHashNoForceThrows() + throws ReferenceConflictException, ReferenceNotFoundException { + // Constants + final SqlDropBranch dropBranchWithoutForceOrCommitHash = + new SqlDropBranch( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(false, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_BRANCH_NAME, SqlParserPos.ZERO), + null, + SqlLiteral.createBoolean(false, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", dropBranchWithoutForceOrCommitHash)) + .isInstanceOf(UserException.class) + .hasMessageContaining("Need commit hash") + .hasMessageContaining(DEFAULT_BRANCH_NAME) + .hasMessageContaining(DEFAULT_SOURCE_NAME); + } + + @Test + public void dropBranchNotFoundThrows() + throws ReferenceNotFoundException, NoDefaultBranchException, ReferenceConflictException { + // Arrange + setUpSupportKeyAndPlugin(); + doThrow(ReferenceNotFoundException.class) + .when(dataplanePlugin) + .dropBranch(DEFAULT_BRANCH_NAME, DEFAULT_COMMIT_HASH); + when(userSession.getSessionVersionForSource(DEFAULT_SOURCE_NAME)).thenReturn(mock(VersionContext.class)); + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("not found") + .hasMessageContaining(DEFAULT_BRANCH_NAME) + .hasMessageContaining(DEFAULT_SOURCE_NAME); + verify(userSession, never()).setSessionVersionForSource(any(), any()); + } + + @Test + public void dropBranchConflictThrows() + throws ReferenceNotFoundException, NoDefaultBranchException, ReferenceConflictException { + // Arrange + setUpSupportKeyAndPlugin(); + doThrow(ReferenceConflictException.class) + .when(dataplanePlugin) + .dropBranch(DEFAULT_BRANCH_NAME, DEFAULT_COMMIT_HASH); + when(userSession.getSessionVersionForSource(DEFAULT_SOURCE_NAME)).thenReturn(mock(VersionContext.class)); + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("has conflict") + .hasMessageContaining(DEFAULT_BRANCH_NAME) + .hasMessageContaining(DEFAULT_SOURCE_NAME); + verify(userSession, never()).setSessionVersionForSource(any(), any()); + } + + private void setUpSupportKeyAndPlugin() { + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(catalog.getSource(DEFAULT_SOURCE_NAME)) + .thenReturn(dataplanePlugin); + } + + private void setUpSupportKeyAndPluginAndSessionContext() { + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(catalog.getSource(SESSION_SOURCE_NAME)) + .thenReturn(dataplanePlugin); + when(userSession.getDefaultSchemaPath()) + .thenReturn(new NamespaceKey(Arrays.asList(SESSION_SOURCE_NAME, "unusedFolder"))); + when(userSession.getSessionVersionForSource(SESSION_SOURCE_NAME)).thenReturn(mock(VersionContext.class)); + } + +} diff --git a/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestDropTagHandler.java b/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestDropTagHandler.java new file mode 100644 index 0000000000..9295e39dcd --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestDropTagHandler.java @@ -0,0 +1,350 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +import static com.dremio.exec.ExecConstants.ENABLE_USE_VERSION_SYNTAX; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.util.Arrays; +import java.util.List; + +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlLiteral; +import org.apache.calcite.sql.parser.SqlParserPos; +import org.junit.Rule; +import org.junit.Test; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnit; +import org.mockito.junit.MockitoRule; +import org.mockito.quality.Strictness; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.planner.sql.handlers.direct.SimpleCommandResult; +import com.dremio.exec.planner.sql.parser.SqlDropTag; +import com.dremio.exec.store.NoDefaultBranchException; +import com.dremio.exec.store.ReferenceAlreadyExistsException; +import com.dremio.exec.store.ReferenceConflictException; +import com.dremio.exec.store.ReferenceNotFoundException; +import com.dremio.exec.store.StoragePlugin; +import com.dremio.exec.work.foreman.ForemanSetupException; +import com.dremio.options.OptionManager; +import com.dremio.plugins.dataplane.store.DataplanePlugin; +import com.dremio.sabot.rpc.user.UserSession; +import com.dremio.service.namespace.NamespaceKey; +import com.dremio.service.namespace.NamespaceNotFoundException; +import com.dremio.test.DremioTest; + +/** + * Tests for DROP TAG SQL. + */ +public class TestDropTagHandler extends DremioTest { + + private static final String DEFAULT_SOURCE_NAME = "dataplane_source_1"; + private static final String NON_EXISTENT_SOURCE_NAME = "non_exist"; + private static final String SESSION_SOURCE_NAME = "session_source"; + private static final String DEFAULT_TAG_NAME = "tagName"; + private static final String DEFAULT_COMMIT_HASH = "0123456789abcdeff"; + private static final SqlDropTag DEFAULT_INPUT = new SqlDropTag( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(true, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_TAG_NAME, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_COMMIT_HASH, SqlParserPos.ZERO), + SqlLiteral.createBoolean(false, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + private static final SqlDropTag NO_SOURCE_INPUT = new SqlDropTag( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(true, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_TAG_NAME, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_COMMIT_HASH, SqlParserPos.ZERO), + SqlLiteral.createBoolean(false, SqlParserPos.ZERO), + null); + private static final SqlDropTag NON_EXISTENT_SOURCE_INPUT = new SqlDropTag( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(true, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_TAG_NAME, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_COMMIT_HASH, SqlParserPos.ZERO), + SqlLiteral.createBoolean(false, SqlParserPos.ZERO), + new SqlIdentifier(NON_EXISTENT_SOURCE_NAME, SqlParserPos.ZERO)); + private static final SqlDropTag IF_EXISTS_INPUT = new SqlDropTag( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(false, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_TAG_NAME, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_COMMIT_HASH, SqlParserPos.ZERO), + SqlLiteral.createBoolean(false, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + + @Rule public MockitoRule rule = MockitoJUnit.rule().strictness(Strictness.STRICT_STUBS); + + @Mock private OptionManager optionManager; + @Mock private Catalog catalog; + @Mock private UserSession userSession; + @Mock private DataplanePlugin dataplanePlugin; + + @InjectMocks private DropTagHandler handler; + + @Test + public void dropTagSupportKeyDisabledThrows() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(false); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("DROP TAG") + .hasMessageContaining("not supported"); + } + + @Test + public void dropTagNonExistentSource() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + NamespaceNotFoundException notFoundException = new NamespaceNotFoundException("Cannot access"); + UserException nonExistException = UserException.validationError(notFoundException) + .message("Tried to access non-existent source [%s].", NON_EXISTENT_SOURCE_NAME).build(); + when(catalog.getSource(NON_EXISTENT_SOURCE_NAME)).thenThrow(nonExistException); + when(userSession.getSessionVersionForSource(NON_EXISTENT_SOURCE_NAME)).thenReturn(mock(VersionContext.class)); + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", NON_EXISTENT_SOURCE_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("Tried to access non-existent source"); + } + + @Test + public void dropTagSucceed() + throws ForemanSetupException, ReferenceConflictException, ReferenceNotFoundException { + // Arrange + setUpSupportKeyAndPlugin(); + doNothing() + .when(dataplanePlugin) + .dropTag(DEFAULT_TAG_NAME, DEFAULT_COMMIT_HASH); + when(userSession.getSessionVersionForSource(DEFAULT_SOURCE_NAME)).thenReturn(mock(VersionContext.class)); + // Act + List result = handler.toResult("", DEFAULT_INPUT); + + // Assert + assertThat(result).isNotEmpty(); + assertTrue(result.get(0).ok); + assertThat(result.get(0).summary) + .contains("dropped") + .contains(DEFAULT_TAG_NAME) + .contains(DEFAULT_SOURCE_NAME); + } + + @Test + public void dropTagEmptySourceUsesSessionContext() + throws ReferenceNotFoundException, NoDefaultBranchException, ReferenceConflictException, ForemanSetupException { + // Arrange + setUpSupportKeyAndPluginAndSessionContext(); + doNothing() + .when(dataplanePlugin) + .dropTag(DEFAULT_TAG_NAME, DEFAULT_COMMIT_HASH); + + // Act + List result = handler.toResult("", NO_SOURCE_INPUT); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("dropped") + .contains(DEFAULT_TAG_NAME) + .contains(SESSION_SOURCE_NAME); + } + + // test force drop + + @Test + public void dropTagIfNotExistsDoesNotExistSucceeds() + throws ReferenceNotFoundException, NoDefaultBranchException, ReferenceConflictException, ReferenceAlreadyExistsException, ForemanSetupException { + // Arrange + setUpSupportKeyAndPlugin(); + doNothing() + .when(dataplanePlugin) + .dropTag(DEFAULT_TAG_NAME, DEFAULT_COMMIT_HASH); + when(userSession.getSessionVersionForSource(DEFAULT_SOURCE_NAME)).thenReturn(mock(VersionContext.class)); + // Act + List result = handler.toResult("", IF_EXISTS_INPUT); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("dropped") + .contains(DEFAULT_TAG_NAME) + .contains(DEFAULT_SOURCE_NAME); + } + + @Test + public void dropTagIfNotExistsDoesExistNoOp() + throws ReferenceNotFoundException, NoDefaultBranchException, ReferenceConflictException, ReferenceAlreadyExistsException, ForemanSetupException { + // Arrange + setUpSupportKeyAndPlugin(); + doThrow(ReferenceNotFoundException.class) + .when(dataplanePlugin) + .dropTag(DEFAULT_TAG_NAME, DEFAULT_COMMIT_HASH); + when(userSession.getSessionVersionForSource(DEFAULT_SOURCE_NAME)).thenReturn(mock(VersionContext.class)); + // Act + List result = handler.toResult("", IF_EXISTS_INPUT); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("not found") + .contains(DEFAULT_TAG_NAME) + .contains(DEFAULT_SOURCE_NAME); + } + + @Test + public void dropTagWrongSourceThrows() + throws ReferenceNotFoundException, NoDefaultBranchException, ReferenceConflictException { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(catalog.getSource(DEFAULT_SOURCE_NAME)) + .thenReturn(mock(StoragePlugin.class)); + when(userSession.getSessionVersionForSource(DEFAULT_SOURCE_NAME)).thenReturn(mock(VersionContext.class)); + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("does not support") + .hasMessageContaining(DEFAULT_SOURCE_NAME); + verify(userSession, never()).setSessionVersionForSource(any(), any()); + } + + @Test + public void dropTagWrongSourceFromContextThrows() { + // Arrange + setUpSupportKeyAndPluginAndSessionContext(); + when(catalog.getSource(SESSION_SOURCE_NAME)) + .thenReturn(mock(StoragePlugin.class)); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", NO_SOURCE_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("does not support") + .hasMessageContaining(SESSION_SOURCE_NAME); + verify(userSession, never()).setSessionVersionForSource(any(), any()); + } + + @Test + public void dropTagNullSourceFromContextThrows() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(userSession.getDefaultSchemaPath()) + .thenReturn(null); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", NO_SOURCE_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("was not specified"); + verify(userSession, never()).setSessionVersionForSource(any(), any()); + } + + @Test + public void dropTagNoCommitHashNoForceThrows() + throws ReferenceConflictException, ReferenceNotFoundException { + // Constants + final SqlDropTag dropTagWithoutForceOrCommitHash = + new SqlDropTag( + SqlParserPos.ZERO, + SqlLiteral.createBoolean(false, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_TAG_NAME, SqlParserPos.ZERO), + null, + SqlLiteral.createBoolean(false, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", dropTagWithoutForceOrCommitHash)) + .isInstanceOf(UserException.class) + .hasMessageContaining("Need commit hash") + .hasMessageContaining(DEFAULT_TAG_NAME) + .hasMessageContaining(DEFAULT_SOURCE_NAME); + } + + @Test + public void dropTagNotFoundThrows() + throws ReferenceNotFoundException, NoDefaultBranchException, ReferenceConflictException { + // Arrange + setUpSupportKeyAndPlugin(); + doThrow(ReferenceNotFoundException.class) + .when(dataplanePlugin) + .dropTag(DEFAULT_TAG_NAME, DEFAULT_COMMIT_HASH); + when(userSession.getSessionVersionForSource(DEFAULT_SOURCE_NAME)).thenReturn(mock(VersionContext.class)); + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("not found") + .hasMessageContaining(DEFAULT_TAG_NAME) + .hasMessageContaining(DEFAULT_SOURCE_NAME); + verify(userSession, never()).setSessionVersionForSource(any(), any()); + } + + @Test + public void dropTagConflictThrows() + throws ReferenceNotFoundException, NoDefaultBranchException, ReferenceConflictException { + // Arrange + setUpSupportKeyAndPlugin(); + doThrow(ReferenceConflictException.class) + .when(dataplanePlugin) + .dropTag(DEFAULT_TAG_NAME, DEFAULT_COMMIT_HASH); + when(userSession.getSessionVersionForSource(DEFAULT_SOURCE_NAME)).thenReturn(mock(VersionContext.class)); + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("has conflict") + .hasMessageContaining(DEFAULT_TAG_NAME) + .hasMessageContaining(DEFAULT_SOURCE_NAME); + verify(userSession, never()).setSessionVersionForSource(any(), any()); + } + + private void setUpSupportKeyAndPlugin() { + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(catalog.getSource(DEFAULT_SOURCE_NAME)) + .thenReturn(dataplanePlugin); + } + + private void setUpSupportKeyAndPluginAndSessionContext() { + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(catalog.getSource(SESSION_SOURCE_NAME)) + .thenReturn(dataplanePlugin); + when(userSession.getDefaultSchemaPath()) + .thenReturn(new NamespaceKey(Arrays.asList(SESSION_SOURCE_NAME, "unusedFolder"))); + when(userSession.getSessionVersionForSource(SESSION_SOURCE_NAME)).thenReturn(mock(VersionContext.class)); + } + +} diff --git a/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestMergeBranchHandler.java b/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestMergeBranchHandler.java new file mode 100644 index 0000000000..5cb47e09e8 --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestMergeBranchHandler.java @@ -0,0 +1,290 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +import static com.dremio.exec.ExecConstants.ENABLE_USE_VERSION_SYNTAX; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.anyString; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.util.Arrays; +import java.util.List; + +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.parser.SqlParserPos; +import org.junit.Before; +import org.junit.Test; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.ops.QueryContext; +import com.dremio.exec.planner.sql.handlers.direct.SimpleCommandResult; +import com.dremio.exec.planner.sql.parser.SqlMergeBranch; +import com.dremio.exec.store.NoDefaultBranchException; +import com.dremio.exec.store.ReferenceAlreadyExistsException; +import com.dremio.exec.store.ReferenceConflictException; +import com.dremio.exec.store.ReferenceNotFoundException; +import com.dremio.exec.work.foreman.ForemanSetupException; +import com.dremio.options.OptionManager; +import com.dremio.plugins.dataplane.store.DataplanePlugin; +import com.dremio.sabot.rpc.user.UserSession; +import com.dremio.service.namespace.NamespaceKey; +import com.dremio.test.DremioTest; + +/** + * Tests for ALTER BRANCH MERGE. + */ +public class TestMergeBranchHandler extends DremioTest { + + private static final String DEFAULT_SOURCE_NAME = "localnessie"; + private static final String TARGET_BRANCH = "targetbranch"; + private static final String SOURCE_BRANCH = "sourcebranch"; + private static final String DEFAULT_BRANCH_NAME = "branchName"; + private static final VersionContext DEFAULT_VERSION = + VersionContext.ofBranch(DEFAULT_BRANCH_NAME); + + + private QueryContext context; + private OptionManager optionManager; + private Catalog catalog; + private UserSession userSession; + private MergeBranchHandler handler; + private DataplanePlugin dataplanePlugin; + + private MergeBranchHandler handlerVersionContext; + private SqlMergeBranch mergeBranch; + private SqlMergeBranch mergeBranchVersionContext; + private SqlMergeBranch mergeBranchWithNoSource; + private SqlMergeBranch mergeBranchWithNoTargetBranch; + private SqlMergeBranch mergeBranchWithNoSourceAndTargetBranch; + private DataplanePlugin dataplanePluginVersionContext; + + @Before + public void setup() throws Exception { + context = mock(QueryContext.class); + optionManager = mock(OptionManager.class); + catalog = mock(Catalog.class); + userSession = mock(UserSession.class); + dataplanePlugin = mock(DataplanePlugin.class); + + when(context.getCatalog()).thenReturn(catalog); + when(context.getOptions()).thenReturn(optionManager); + when(context.getSession()).thenReturn(userSession); + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)).thenReturn(true); + when(catalog.getSource(anyString())).thenReturn(dataplanePlugin); + when(userSession.getDefaultSchemaPath()).thenReturn(new NamespaceKey(Arrays.asList(DEFAULT_SOURCE_NAME, "unusedFolder"))); + when(userSession.getSessionVersionForSource(DEFAULT_SOURCE_NAME)).thenReturn(DEFAULT_VERSION); + + handler = new MergeBranchHandler(context); + + mergeBranch = + new SqlMergeBranch( + SqlParserPos.ZERO, + new SqlIdentifier(SOURCE_BRANCH, SqlParserPos.ZERO), + new SqlIdentifier(TARGET_BRANCH, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + + mergeBranchWithNoSource = + new SqlMergeBranch( + SqlParserPos.ZERO, + new SqlIdentifier(SOURCE_BRANCH, SqlParserPos.ZERO), + new SqlIdentifier(TARGET_BRANCH, SqlParserPos.ZERO), + null + ); + + mergeBranchWithNoTargetBranch = + new SqlMergeBranch( + SqlParserPos.ZERO, + new SqlIdentifier(SOURCE_BRANCH, SqlParserPos.ZERO), + null, + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + + mergeBranchWithNoSourceAndTargetBranch = + new SqlMergeBranch( + SqlParserPos.ZERO, + new SqlIdentifier(SOURCE_BRANCH, SqlParserPos.ZERO), + null, + null); + setUpVersionContext(); + } + public void setUpVersionContext() { + QueryContext queryVersionContext = mock(QueryContext.class); + OptionManager optionManagerVersionContext = mock(OptionManager.class); + Catalog catalogVersionContext = mock(Catalog.class); + UserSession userSessionVersionContext = mock(UserSession.class); + dataplanePluginVersionContext = mock(DataplanePlugin.class); + when(queryVersionContext.getCatalog()).thenReturn(catalogVersionContext); + when(queryVersionContext.getOptions()).thenReturn(optionManagerVersionContext); + when(queryVersionContext.getSession()).thenReturn(userSessionVersionContext); + when(optionManagerVersionContext.getOption(ENABLE_USE_VERSION_SYNTAX)).thenReturn(true); + when(catalogVersionContext.getSource(anyString())).thenReturn(dataplanePluginVersionContext); + when(userSessionVersionContext.getSessionVersionForSource("localnessie")).thenReturn(VersionContext.ofBranch("mainVersionContext")); + handlerVersionContext = new MergeBranchHandler(queryVersionContext); + mergeBranchVersionContext = + new SqlMergeBranch( + SqlParserPos.ZERO, + new SqlIdentifier("sourcebranch", SqlParserPos.ZERO), + null, + new SqlIdentifier("localnessie", SqlParserPos.ZERO)); + } + + @Test + public void mergeBranchSucceed() + throws ForemanSetupException, ReferenceConflictException, ReferenceNotFoundException { + doNothing().when(dataplanePlugin).mergeBranch(anyString(), anyString()); + + List result = handler.toResult("", mergeBranch); + assertFalse(result.isEmpty()); + assertTrue(result.get(0).ok); + } + + @Test + public void mergeBranchSucceedForVersionContext() + throws ForemanSetupException, ReferenceAlreadyExistsException, ReferenceNotFoundException, NoDefaultBranchException, ReferenceConflictException { + doNothing().when(dataplanePluginVersionContext).createTag(anyString(), any()); + List result = handlerVersionContext.toResult("", mergeBranchVersionContext); + assertFalse(result.isEmpty()); + assertTrue(result.get(0).ok); + } + + @Test + public void mergeBranchThrowUnsupport() { + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)).thenReturn(false); + + assertThatThrownBy(() -> handler.toResult("", mergeBranch)) + .isInstanceOf(UserException.class); + } + + @Test + public void createBranchThrowNotFoundForVersionContext() + throws ReferenceNotFoundException, ReferenceConflictException { + doThrow(ReferenceNotFoundException.class) + .when(dataplanePluginVersionContext) + .mergeBranch(anyString(), any()); + + assertThatThrownBy(() -> handlerVersionContext.toResult("", mergeBranchVersionContext)).hasMessageContaining("mainVersionContext"); + } + + @Test + public void mergeBranchThrowWrongPlugin() { + when(catalog.getSource(anyString())).thenReturn(null); + + assertThatThrownBy(() -> handler.toResult("", mergeBranch)) + .isInstanceOf(UserException.class); + } + + @Test + public void mergeBranchThrowNotFound() + throws ReferenceConflictException, ReferenceNotFoundException { + doThrow(ReferenceNotFoundException.class) + .when(dataplanePlugin) + .mergeBranch(anyString(), anyString()); + + assertThatThrownBy(() -> handler.toResult("", mergeBranch)) + .isInstanceOf(UserException.class); + } + + @Test + public void mergeBranchThrowConflict() + throws ReferenceConflictException, ReferenceNotFoundException { + doThrow(ReferenceConflictException.class) + .when(dataplanePlugin) + .mergeBranch(anyString(), anyString()); + + assertThatThrownBy(() -> handler.toResult("", mergeBranch)) + .isInstanceOf(UserException.class); + } + + @Test + public void mergeBranchNoDefaultBranch() + throws ReferenceConflictException, ReferenceNotFoundException { + // TODO: Is this var needed? + final SqlMergeBranch mergeBranch = + new SqlMergeBranch( + SqlParserPos.ZERO, + new SqlIdentifier("sourcebranch", SqlParserPos.ZERO), + null, + new SqlIdentifier("localnessie", SqlParserPos.ZERO)); + + doThrow(ReferenceNotFoundException.class) + .when(dataplanePlugin) + .mergeBranch(anyString(), anyString()); + + assertThatThrownBy(() -> handler.toResult("", mergeBranch)) + .isInstanceOf(UserException.class); + } + + @Test + public void mergeBranchNoTargetBranch() + throws ReferenceConflictException, ReferenceNotFoundException, ForemanSetupException { + // Arrange + doNothing().when(dataplanePlugin).mergeBranch(anyString(), anyString()); + // Act + List result = handler.toResult("", mergeBranchWithNoTargetBranch); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("merged") + .contains(DEFAULT_SOURCE_NAME); + } + + @Test + public void mergeBranchEmptySourceUsesSessionContext() + throws ReferenceNotFoundException, ReferenceConflictException, ForemanSetupException { + // Arrange + doNothing().when(dataplanePlugin).mergeBranch(anyString(), anyString()); + + // Act + List result = handler.toResult("", mergeBranchWithNoSource); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("merged") + .contains(DEFAULT_SOURCE_NAME); + } + + @Test + public void mergeBranchWithNoTargetBranchAndUseSessionContext() + throws ReferenceNotFoundException, ReferenceConflictException, ForemanSetupException { + // Arrange + doNothing().when(dataplanePlugin).mergeBranch(anyString(), anyString()); + + // Act + List result = handler.toResult("", mergeBranchWithNoSourceAndTargetBranch); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("merged") + .contains(DEFAULT_SOURCE_NAME); + } + + + +} diff --git a/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestShowBranchesHandler.java b/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestShowBranchesHandler.java new file mode 100644 index 0000000000..bd3924170c --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestShowBranchesHandler.java @@ -0,0 +1,213 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +import static com.dremio.exec.ExecConstants.ENABLE_USE_VERSION_SYNTAX; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.util.Arrays; +import java.util.List; +import java.util.stream.Stream; + +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.parser.SqlParserPos; +import org.junit.Rule; +import org.junit.Test; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnit; +import org.mockito.junit.MockitoRule; +import org.mockito.quality.Strictness; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.planner.sql.parser.SqlShowBranches; +import com.dremio.exec.store.NoDefaultBranchException; +import com.dremio.exec.store.ReferenceAlreadyExistsException; +import com.dremio.exec.store.ReferenceConflictException; +import com.dremio.exec.store.ReferenceInfo; +import com.dremio.exec.store.ReferenceNotFoundException; +import com.dremio.exec.store.StoragePlugin; +import com.dremio.exec.work.foreman.ForemanSetupException; +import com.dremio.options.OptionManager; +import com.dremio.plugins.dataplane.store.DataplanePlugin; +import com.dremio.sabot.rpc.user.UserSession; +import com.dremio.service.namespace.NamespaceKey; +import com.dremio.service.namespace.NamespaceNotFoundException; +import com.dremio.test.DremioTest; + +/** + * Tests for SHOW BRANCHES SQL. + */ +public class TestShowBranchesHandler extends DremioTest { + + private static final String DEFAULT_SOURCE_NAME = "dataplane_source_1"; + private static final String NON_EXISTENT_SOURCE_NAME = "non_exist"; + private static final String SESSION_SOURCE_NAME = "session_source"; + private static final SqlShowBranches DEFAULT_INPUT = + new SqlShowBranches(SqlParserPos.ZERO, new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + private static final SqlShowBranches NO_SOURCE_INPUT = + new SqlShowBranches(SqlParserPos.ZERO, null); + private static final SqlShowBranches NON_EXISTENT_SOURCE_INPUT = + new SqlShowBranches(SqlParserPos.ZERO, new SqlIdentifier(NON_EXISTENT_SOURCE_NAME, SqlParserPos.ZERO)); + private static final List EXPECTED = Arrays.asList( + new ReferenceInfo("Branch", "branch_1", null), + new ReferenceInfo("Branch", "branch_2", null)); + + @Rule public MockitoRule rule = MockitoJUnit.rule().strictness(Strictness.STRICT_STUBS); + + @Mock private OptionManager optionManager; + @Mock private Catalog catalog; + @Mock private UserSession userSession; + @Mock private DataplanePlugin dataplanePlugin; + + @InjectMocks private ShowBranchesHandler handler; + + @Test + public void showBranchesSupportKeyDisabledThrows() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(false); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("SHOW BRANCH") + .hasMessageContaining("not supported"); + } + + @Test + public void showBranchesNonExistentSource() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + NamespaceNotFoundException notFoundException = new NamespaceNotFoundException("Cannot access"); + UserException nonExistException = UserException.validationError(notFoundException) + .message("Tried to access non-existent source [%s].", NON_EXISTENT_SOURCE_NAME).build(); + when(catalog.getSource(NON_EXISTENT_SOURCE_NAME)).thenThrow(nonExistException); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", NON_EXISTENT_SOURCE_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("Tried to access non-existent source"); + } + + @Test + public void showBranchesSucceeds() throws ForemanSetupException { + // Arrange + setUpSupportKeyAndPlugin(); + when(dataplanePlugin.listBranches()) + .thenReturn(EXPECTED.stream()); + + // Act + List result = handler.toResult("", DEFAULT_INPUT); + + // Assert + assertThat(result).isEqualTo(EXPECTED); + } + + @Test + public void showBranchesNoBranchesSucceeds() throws ForemanSetupException { + // Arrange + setUpSupportKeyAndPlugin(); + when(dataplanePlugin.listBranches()) + .thenReturn(Stream.empty()); + + // Act + List result = handler.toResult("", DEFAULT_INPUT); + + // Assert + assertThat(result).isEmpty(); + } + + @Test + public void showBranchesEmptySourceUsesSessionContext() + throws ReferenceNotFoundException, NoDefaultBranchException, ReferenceConflictException, ForemanSetupException, ReferenceAlreadyExistsException { + // Arrange + setUpSupportKeyAndPluginAndSessionContext(); + when(dataplanePlugin.listBranches()) + .thenReturn(EXPECTED.stream()); + + // Act + List result = handler.toResult("", NO_SOURCE_INPUT); + + // Assert + assertThat(result).isEqualTo(EXPECTED); + } + + @Test + public void showBranchesWrongSourceThrows() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(catalog.getSource(DEFAULT_SOURCE_NAME)) + .thenReturn(mock(StoragePlugin.class)); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("does not support") + .hasMessageContaining(DEFAULT_SOURCE_NAME); + } + + @Test + public void showBranchesWrongSourceFromContextThrows() { + // Arrange + setUpSupportKeyAndPluginAndSessionContext(); + when(catalog.getSource(SESSION_SOURCE_NAME)) + .thenReturn(mock(StoragePlugin.class)); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", NO_SOURCE_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("does not support") + .hasMessageContaining(SESSION_SOURCE_NAME); + } + + @Test + public void showBranchesNullSourceFromContextThrows() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(userSession.getDefaultSchemaPath()) + .thenReturn(null); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", NO_SOURCE_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("was not specified"); + } + + private void setUpSupportKeyAndPlugin() { + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(catalog.getSource(DEFAULT_SOURCE_NAME)) + .thenReturn(dataplanePlugin); + } + + private void setUpSupportKeyAndPluginAndSessionContext() { + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(catalog.getSource(SESSION_SOURCE_NAME)) + .thenReturn(dataplanePlugin); + when(userSession.getDefaultSchemaPath()) + .thenReturn(new NamespaceKey(Arrays.asList(SESSION_SOURCE_NAME, "unusedFolder"))); + } + +} diff --git a/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestShowLogsHandler.java b/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestShowLogsHandler.java new file mode 100644 index 0000000000..bc33de8bac --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestShowLogsHandler.java @@ -0,0 +1,357 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +import static com.dremio.exec.ExecConstants.ENABLE_USE_VERSION_SYNTAX; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.util.Arrays; +import java.util.List; + +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.parser.SqlParserPos; +import org.junit.Rule; +import org.junit.Test; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnit; +import org.mockito.junit.MockitoRule; +import org.mockito.quality.Strictness; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.planner.sql.parser.ReferenceType; +import com.dremio.exec.planner.sql.parser.SqlShowLogs; +import com.dremio.exec.store.ChangeInfo; +import com.dremio.exec.store.NoDefaultBranchException; +import com.dremio.exec.store.ReferenceNotFoundException; +import com.dremio.exec.store.ReferenceTypeConflictException; +import com.dremio.exec.store.StoragePlugin; +import com.dremio.exec.work.foreman.ForemanSetupException; +import com.dremio.options.OptionManager; +import com.dremio.plugins.dataplane.store.DataplanePlugin; +import com.dremio.sabot.rpc.user.UserSession; +import com.dremio.service.namespace.NamespaceKey; +import com.dremio.service.namespace.NamespaceNotFoundException; +import com.dremio.test.DremioTest; + +/** + * Tests for SHOW LOGS SQL. + */ +public class TestShowLogsHandler extends DremioTest { + + private static final String DEFAULT_SOURCE_NAME = "dataplane_source_1"; + private static final String NON_EXISTENT_SOURCE_NAME = "non_exist"; + private static final String SESSION_SOURCE_NAME = "session_source"; + private static final String DEFAULT_BRANCH_NAME = "branchName"; + private static final VersionContext DEFAULT_VERSION = + VersionContext.ofBranch(DEFAULT_BRANCH_NAME); + private static final VersionContext SESSION_VERSION = + VersionContext.ofBranch("session"); + private static final SqlShowLogs DEFAULT_INPUT = new SqlShowLogs( + SqlParserPos.ZERO, + ReferenceType.BRANCH, + new SqlIdentifier(DEFAULT_BRANCH_NAME, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + private static final SqlShowLogs NO_SOURCE_INPUT = new SqlShowLogs( + SqlParserPos.ZERO, + ReferenceType.BRANCH, + new SqlIdentifier(DEFAULT_BRANCH_NAME, SqlParserPos.ZERO), + null); + private static final SqlShowLogs NON_EXISTENT_SOURCE_INPUT = new SqlShowLogs( + SqlParserPos.ZERO, + ReferenceType.BRANCH, + new SqlIdentifier(DEFAULT_BRANCH_NAME, SqlParserPos.ZERO), + new SqlIdentifier(NON_EXISTENT_SOURCE_NAME, SqlParserPos.ZERO)); + private static final List EXPECTED_LOG_RESULT = Arrays.asList( + new ChangeInfo(null, null, null, "message_1"), + new ChangeInfo(null, null, null, "message_2")); + + @Rule public MockitoRule rule = MockitoJUnit.rule().strictness(Strictness.STRICT_STUBS); + + @Mock private Catalog catalog; + @Mock private OptionManager optionManager; + @Mock private UserSession userSession; + @Mock private DataplanePlugin dataplanePlugin; + + @InjectMocks private ShowLogsHandler handler; + + @Test + public void showLogsSupportKeyDisabledThrows() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(false); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("SHOW LOGS") + .hasMessageContaining("not supported"); + } + + @Test + public void showLogsNonExistentSource() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + NamespaceNotFoundException notFoundException = new NamespaceNotFoundException("Cannot access"); + UserException nonExistException = UserException.validationError(notFoundException) + .message("Tried to access non-existent source [%s].", NON_EXISTENT_SOURCE_NAME).build(); + when(catalog.getSource(NON_EXISTENT_SOURCE_NAME)).thenThrow(nonExistException); + when(userSession.getSessionVersionForSource(NON_EXISTENT_SOURCE_NAME)).thenReturn(VersionContext.NOT_SPECIFIED); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", NON_EXISTENT_SOURCE_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("Tried to access non-existent source"); + } + + @Test + public void showLogsEmptyReferenceUsesSessionVersion() throws ForemanSetupException { + // Constants + final SqlShowLogs input = new SqlShowLogs( + SqlParserPos.ZERO, + null, + null, + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + + // Arrange + setUpSupportKeyAndSessionVersionAndPlugin(); + when(dataplanePlugin.listChanges(SESSION_VERSION)) + .thenReturn(EXPECTED_LOG_RESULT.stream()); + + // Act + List actualResult = handler.toResult("", input); + + // Assert + assertThat(actualResult).isEqualTo(EXPECTED_LOG_RESULT); + } + + @Test + public void showLogsEmptySourceUsesSessionContext() throws ForemanSetupException { + // Arrange + setUpSupportKeyAndSessionVersionAndPluginAndSessionContext(); + when(dataplanePlugin.listChanges(DEFAULT_VERSION)) + .thenReturn(EXPECTED_LOG_RESULT.stream()); + + // Act + List actualResult = handler.toResult("", NO_SOURCE_INPUT); + + // Assert + assertThat(actualResult).isEqualTo(EXPECTED_LOG_RESULT); + } + + @Test + public void showLogsBranchSucceeds() throws ForemanSetupException { + // Arrange + setUpSupportKeyAndSessionVersionAndPlugin(); + when(dataplanePlugin.listChanges(DEFAULT_VERSION)) + .thenReturn(EXPECTED_LOG_RESULT.stream()); + + // Act + List actualResult = handler.toResult("", DEFAULT_INPUT); + + // Assert + assertThat(actualResult).isEqualTo(EXPECTED_LOG_RESULT); + } + + @Test + public void showLogsTagSucceeds() throws ForemanSetupException { + // Constants + final String tagName = "tagName"; + final VersionContext version = VersionContext.ofTag(tagName); + final SqlShowLogs input = new SqlShowLogs( + SqlParserPos.ZERO, + ReferenceType.TAG, + new SqlIdentifier(tagName, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + + // Arrange + setUpSupportKeyAndSessionVersionAndPlugin(); + when(dataplanePlugin.listChanges(version)) + .thenReturn(EXPECTED_LOG_RESULT.stream()); + + // Act + List actualResult = handler.toResult("", input); + + // Assert + assertThat(actualResult).isEqualTo(EXPECTED_LOG_RESULT); + } + + @Test + public void showLogsCommitSucceeds() throws ForemanSetupException { + // Constants + final String commitHash = "0123456789abcdeff"; + final VersionContext version = VersionContext.ofBareCommit(commitHash); + final SqlShowLogs input = new SqlShowLogs( + SqlParserPos.ZERO, + ReferenceType.COMMIT, + new SqlIdentifier(commitHash, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + + // Arrange + setUpSupportKeyAndSessionVersionAndPlugin(); + when(dataplanePlugin.listChanges(version)) + .thenReturn(EXPECTED_LOG_RESULT.stream()); + + // Act + List actualResult = handler.toResult("", input); + + // Assert + assertThat(actualResult).isEqualTo(EXPECTED_LOG_RESULT); + } + + @Test + public void showLogsReferenceSucceeds() throws ForemanSetupException { + // Constants + final String referenceName = "refName"; + final VersionContext version = VersionContext.ofRef(referenceName); + final SqlShowLogs input = new SqlShowLogs( + SqlParserPos.ZERO, + ReferenceType.REFERENCE, + new SqlIdentifier(referenceName, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + + // Arrange + setUpSupportKeyAndSessionVersionAndPlugin(); + when(dataplanePlugin.listChanges(version)) + .thenReturn(EXPECTED_LOG_RESULT.stream()); + + // Act + List actualResult = handler.toResult("", input); + + // Assert + assertThat(actualResult).isEqualTo(EXPECTED_LOG_RESULT); + } + + @Test + public void showLogsWrongSourceThrows() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(userSession.getSessionVersionForSource(DEFAULT_SOURCE_NAME)) + .thenReturn(SESSION_VERSION); + when(catalog.getSource(DEFAULT_SOURCE_NAME)) + .thenReturn(mock(StoragePlugin.class)); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("does not support") + .hasMessageContaining(DEFAULT_SOURCE_NAME); + } + + @Test + public void showLogsWrongSourceFromContextThrows() { + // Arrange + setUpSupportKeyAndSessionVersionAndPluginAndSessionContext(); + when(catalog.getSource(SESSION_SOURCE_NAME)) + .thenReturn(mock(StoragePlugin.class)); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", NO_SOURCE_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("does not support") + .hasMessageContaining(SESSION_SOURCE_NAME); + } + + @Test + public void showLogsNullSourceFromContextThrows() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(userSession.getDefaultSchemaPath()) + .thenReturn(null); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", NO_SOURCE_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("was not specified"); + } + + @Test + public void showLogsNotFoundThrows() { + // Arrange + setUpSupportKeyAndSessionVersionAndPlugin(); + doThrow(ReferenceNotFoundException.class) + .when(dataplanePlugin) + .listChanges(DEFAULT_VERSION); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("not found") + .hasMessageContaining(DEFAULT_VERSION.toString()) + .hasMessageContaining(DEFAULT_SOURCE_NAME); + } + + @Test + public void showLogsNoDefaultBranchThrows() { + // Arrange + setUpSupportKeyAndSessionVersionAndPlugin(); + doThrow(NoDefaultBranchException.class) + .when(dataplanePlugin) + .listChanges(DEFAULT_VERSION); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("does not have a default branch") + .hasMessageContaining(DEFAULT_SOURCE_NAME); + } + + @Test + public void showLogsTypeConflictThrows() { + // Arrange + setUpSupportKeyAndSessionVersionAndPlugin(); + doThrow(ReferenceTypeConflictException.class) + .when(dataplanePlugin) + .listChanges(DEFAULT_VERSION); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("is not the requested type") + .hasMessageContaining(DEFAULT_VERSION.toString()) + .hasMessageContaining(DEFAULT_SOURCE_NAME); + } + + private void setUpSupportKeyAndSessionVersionAndPlugin() { + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(userSession.getSessionVersionForSource(DEFAULT_SOURCE_NAME)) + .thenReturn(SESSION_VERSION); + when(catalog.getSource(DEFAULT_SOURCE_NAME)) + .thenReturn(dataplanePlugin); + } + + private void setUpSupportKeyAndSessionVersionAndPluginAndSessionContext() { + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(userSession.getSessionVersionForSource(SESSION_SOURCE_NAME)) + .thenReturn(SESSION_VERSION); + when(catalog.getSource(SESSION_SOURCE_NAME)) + .thenReturn(dataplanePlugin); + when(userSession.getDefaultSchemaPath()) + .thenReturn(new NamespaceKey(Arrays.asList(SESSION_SOURCE_NAME, "unusedFolder"))); + } + +} diff --git a/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestShowTagsHandler.java b/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestShowTagsHandler.java new file mode 100644 index 0000000000..f02503b94e --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestShowTagsHandler.java @@ -0,0 +1,214 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +import static com.dremio.exec.ExecConstants.ENABLE_USE_VERSION_SYNTAX; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.util.Arrays; +import java.util.List; +import java.util.stream.Stream; + +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.parser.SqlParserPos; +import org.junit.Rule; +import org.junit.Test; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnit; +import org.mockito.junit.MockitoRule; +import org.mockito.quality.Strictness; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.planner.sql.parser.SqlShowTags; +import com.dremio.exec.store.NoDefaultBranchException; +import com.dremio.exec.store.ReferenceAlreadyExistsException; +import com.dremio.exec.store.ReferenceConflictException; +import com.dremio.exec.store.ReferenceInfo; +import com.dremio.exec.store.ReferenceNotFoundException; +import com.dremio.exec.store.StoragePlugin; +import com.dremio.exec.work.foreman.ForemanSetupException; +import com.dremio.options.OptionManager; +import com.dremio.plugins.dataplane.store.DataplanePlugin; +import com.dremio.sabot.rpc.user.UserSession; +import com.dremio.service.namespace.NamespaceKey; +import com.dremio.service.namespace.NamespaceNotFoundException; +import com.dremio.test.DremioTest; + +/** + * Tests for SHOW TAGS SQL. + */ +public class TestShowTagsHandler extends DremioTest { + + private static final String DEFAULT_SOURCE_NAME = "dataplane_source_1"; + private static final String NON_EXISTENT_SOURCE_NAME = "non_exist"; + private static final String SESSION_SOURCE_NAME = "session_source"; + private static final SqlShowTags DEFAULT_INPUT = + new SqlShowTags(SqlParserPos.ZERO, new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + private static final SqlShowTags NO_SOURCE_INPUT = + new SqlShowTags(SqlParserPos.ZERO, null); + private static final SqlShowTags NON_EXISTENT_SOURCE_INPUT = + new SqlShowTags(SqlParserPos.ZERO, new SqlIdentifier(NON_EXISTENT_SOURCE_NAME, SqlParserPos.ZERO)); + private static final List EXPECTED = Arrays.asList( + new ReferenceInfo("Tag", "tag_1", null), + new ReferenceInfo("Tag", "tag_2", null)); + + @Rule public MockitoRule rule = MockitoJUnit.rule().strictness(Strictness.STRICT_STUBS); + + @Mock private OptionManager optionManager; + @Mock private Catalog catalog; + @Mock private UserSession userSession; + @Mock private DataplanePlugin dataplanePlugin; + + @InjectMocks private ShowTagsHandler handler; + + @Test + public void showTagsSupportKeyDisabledThrows() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(false); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("SHOW TAG") + .hasMessageContaining("not supported"); + } + + @Test + public void showTagsSucceeds() throws ForemanSetupException { + // Arrange + setUpSupportKeyAndPlugin(); + when(dataplanePlugin.listTags()) + .thenReturn(EXPECTED.stream()); + + // Act + List result = handler.toResult("", DEFAULT_INPUT); + + // Assert + assertThat(result).isEqualTo(EXPECTED); + } + + @Test + public void showTagsNonExistentSource() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + NamespaceNotFoundException notFoundException = new NamespaceNotFoundException("Cannot access"); + UserException nonExistException = UserException.validationError(notFoundException) + .message("Tried to access non-existent source [%s].", NON_EXISTENT_SOURCE_NAME).build(); + when(catalog.getSource(NON_EXISTENT_SOURCE_NAME)).thenThrow(nonExistException); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", NON_EXISTENT_SOURCE_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("Tried to access non-existent source"); + } + + + @Test + public void showTagsNoTagsSucceeds() throws ForemanSetupException { + // Arrange + setUpSupportKeyAndPlugin(); + when(dataplanePlugin.listTags()) + .thenReturn(Stream.empty()); + + // Act + List result = handler.toResult("", DEFAULT_INPUT); + + // Assert + assertThat(result).isEmpty(); + } + + @Test + public void showTagsEmptySourceUsesSessionContext() + throws ReferenceNotFoundException, NoDefaultBranchException, ReferenceConflictException, ForemanSetupException, ReferenceAlreadyExistsException { + // Arrange + setUpSupportKeyAndPluginAndSessionContext(); + when(dataplanePlugin.listTags()) + .thenReturn(EXPECTED.stream()); + + // Act + List result = handler.toResult("", NO_SOURCE_INPUT); + + // Assert + assertThat(result).isEqualTo(EXPECTED); + } + + @Test + public void showTagsWrongSourceThrows() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(catalog.getSource(DEFAULT_SOURCE_NAME)) + .thenReturn(mock(StoragePlugin.class)); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("does not support") + .hasMessageContaining(DEFAULT_SOURCE_NAME); + } + + @Test + public void showTagsWrongSourceFromContextThrows() { + // Arrange + setUpSupportKeyAndPluginAndSessionContext(); + when(catalog.getSource(SESSION_SOURCE_NAME)) + .thenReturn(mock(StoragePlugin.class)); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", NO_SOURCE_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("does not support") + .hasMessageContaining(SESSION_SOURCE_NAME); + } + + @Test + public void showTagsNullSourceFromContextThrows() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(userSession.getDefaultSchemaPath()) + .thenReturn(null); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", NO_SOURCE_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("was not specified"); + } + + private void setUpSupportKeyAndPlugin() { + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(catalog.getSource(DEFAULT_SOURCE_NAME)) + .thenReturn(dataplanePlugin); + } + + private void setUpSupportKeyAndPluginAndSessionContext() { + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(catalog.getSource(SESSION_SOURCE_NAME)) + .thenReturn(dataplanePlugin); + when(userSession.getDefaultSchemaPath()) + .thenReturn(new NamespaceKey(Arrays.asList(SESSION_SOURCE_NAME, "unusedFolder"))); + } + +} diff --git a/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestShowViewsHandler.java b/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestShowViewsHandler.java new file mode 100644 index 0000000000..22db0bd9ad --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestShowViewsHandler.java @@ -0,0 +1,513 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +import static com.dremio.exec.ExecConstants.ENABLE_USE_VERSION_SYNTAX; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.AssertionsForClassTypes.assertThatThrownBy; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.UUID; +import java.util.stream.Stream; + +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlLiteral; +import org.apache.calcite.sql.parser.SqlParserPos; +import org.assertj.core.api.Assertions; +import org.junit.Rule; +import org.junit.Test; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnit; +import org.mockito.junit.MockitoRule; +import org.mockito.quality.Strictness; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.planner.sql.handlers.direct.ShowViewsHandler; +import com.dremio.exec.planner.sql.parser.ReferenceType; +import com.dremio.exec.planner.sql.parser.SqlShowViews; +import com.dremio.exec.store.ReferenceConflictException; +import com.dremio.exec.store.ReferenceNotFoundException; +import com.dremio.exec.store.StoragePlugin; +import com.dremio.options.OptionManager; +import com.dremio.plugins.ExternalNamespaceEntry; +import com.dremio.plugins.dataplane.store.DataplanePlugin; +import com.dremio.sabot.rpc.user.UserSession; +import com.dremio.service.namespace.NamespaceKey; +import com.google.common.collect.ImmutableList; + +public class TestShowViewsHandler { + private static final String STATEMENT_SOURCE_NAME = "source_name"; + private static final String SESSION_SOURCE_NAME = "session_source_name"; + private static final String NON_EXISTENT_SOURCE_NAME = "non_exist"; + + private static final String STATEMENT_REF_NAME = "statement_ref_name"; + private static final String STATEMENT_BRANCH_NAME = "statement_branch_name"; + private static final String STATEMENT_TAG_NAME = "statement_tag_name"; + private static final String STATEMENT_COMMIT_HASH = "DEADBEEFDEADBEEF"; + private static final String SESSION_BRANCH_NAME = "session_branch_name"; + + @Rule + public MockitoRule rule = MockitoJUnit.rule().strictness(Strictness.STRICT_STUBS); + + @Mock + private OptionManager optionManager; + @Mock private Catalog catalog; + @Mock private UserSession userSession; + @Mock private DataplanePlugin dataplanePlugin; + + @InjectMocks + private ShowViewsHandler showViewsHandler; + + @Test // SHOW VIEWS + public void showViewsSupportKeyDisabledThrows() { + // Arrange + // Note that it gets session source first to determine whether source is versioned or not. + setUpSessionSource(); + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(false); + + SqlShowViews input = TestShowViewsHandler.SqlShowViewsBuilder.builder().build(); + + // Act and Assert + assertThatThrownBy(() -> showViewsHandler.toResult("", input)) + .isInstanceOf(UserException.class) + .hasMessageContaining("SHOW VIEWS") + .hasMessageContaining("not supported"); + } + + @Test + public void showViewsNonExistentSource() { + // Arrange + SqlShowViews input = TestShowViewsHandler.SqlShowViewsBuilder.builder() + .withReference(ReferenceType.REFERENCE, STATEMENT_REF_NAME) + .withSource(NON_EXISTENT_SOURCE_NAME) + .build(); + + when(userSession.getDefaultSchemaPath()) + .thenReturn(new NamespaceKey(Arrays.asList(SESSION_SOURCE_NAME, "unusedFolder"))); + + // Assert + Act + Assertions.assertThatThrownBy(() -> showViewsHandler.toResult("", input)) + .isInstanceOf(UserException.class) + .hasMessageContaining("does not exist"); + } + + @Test //SHOW VIEWS + public void showViewsReturnsViewResult() throws Exception { + + // Arrange + setUpSessionSource(); + setUpSessionSourceVersion(); + setUpSupportKeys(); + + ExternalNamespaceEntry viewEntry1 = createRandomViewEntry(); + ExternalNamespaceEntry viewEntry2 = createRandomViewEntry(); + + when(dataplanePlugin.listViewsIncludeNested( + Collections.emptyList(), + VersionContext.ofBranch(SESSION_BRANCH_NAME))) + .thenReturn(Stream.of(viewEntry1, viewEntry2)); + + SqlShowViews input = TestShowViewsHandler.SqlShowViewsBuilder.builder().build(); + + // Act + List result = showViewsHandler.toResult("", input); + + // Assert + List expectedViewInfoList = createExpectedShowViewResultList(SESSION_SOURCE_NAME, viewEntry1, viewEntry2); + assertThat(result).hasSize(2); + assertThat(result).isEqualTo(expectedViewInfoList); + } + + @Test // SHOW VIEWS (Nested) + public void showViewsNestedSessionSourceReturnsViewsInfo() throws Exception { + // Arrange + final String nestedFolder = "NestedFolder"; + setUpSessionSource(nestedFolder); + setUpSessionSourceVersion(); + setUpSupportKeys(); + + ExternalNamespaceEntry viewEntry = createRandomViewEntry(); + + when(dataplanePlugin.listViewsIncludeNested( + ImmutableList.of(nestedFolder), + VersionContext.ofBranch(SESSION_BRANCH_NAME))) + .thenReturn(Stream.of(viewEntry)); + + SqlShowViews input = TestShowViewsHandler.SqlShowViewsBuilder.builder().build(); + + // Act + List result = showViewsHandler.toResult("", input); + + // Assert + ShowViewsHandler.ShowViewResult expectedViewInfo = createExpectedShowViewResult(SESSION_SOURCE_NAME, viewEntry); + assertThat(result).hasSize(1); + assertThat(result.get(0)).isEqualTo(expectedViewInfo); + } + + @Test // SHOW VIEWS in + public void showViewsNestedSourceReturnsViewsInfo() throws Exception { + // Arrange + final String nestedFolder = "NestedFolder"; + setUpStatementSource(nestedFolder); + setUpStatementSourceVersion(); + setUpSupportKeys(); + + ExternalNamespaceEntry viewEntry = createRandomViewEntry(); + + when(dataplanePlugin.listViewsIncludeNested( + ImmutableList.of(nestedFolder), + VersionContext.ofBranch(SESSION_BRANCH_NAME))) + .thenReturn(Stream.of(viewEntry)); + + SqlShowViews input = TestShowViewsHandler.SqlShowViewsBuilder.builder().build(); + + // Act + List result = showViewsHandler.toResult("", input); + + // Assert + ShowViewsHandler.ShowViewResult expectedViewInfo = createExpectedShowViewResult(STATEMENT_SOURCE_NAME, viewEntry); + assertThat(result).hasSize(1); + assertThat(result.get(0)).isEqualTo(expectedViewInfo); + } + + @Test + public void showViewsReturnsMatchingResult() throws Exception { + + // Arrange + setUpSessionSource(); + setUpSessionSourceVersion(); + setUpSupportKeys(); + + ExternalNamespaceEntry viewEntry1 = createRandomViewEntryWithSpecificPrefix("abc"); + ExternalNamespaceEntry viewEntry2 = createRandomViewEntryWithSpecificPrefix("xyz"); + + when(dataplanePlugin.listViewsIncludeNested( + Collections.emptyList(), + VersionContext.ofBranch(SESSION_BRANCH_NAME))) + .thenReturn(Stream.of(viewEntry1, viewEntry2)); + + SqlShowViews input = TestShowViewsHandler.SqlShowViewsBuilder.builder() + .withLike("%ab%") + .build(); + + // Act + List result = showViewsHandler.toResult("", input); + + // Assert + List expectedTableInfoList = createExpectedShowViewResultList(SESSION_SOURCE_NAME, viewEntry1); + assertThat(result).hasSize(1); + assertThat(result).isEqualTo(expectedTableInfoList); + } + + + @Test // SHOW VIEWS AT BRANCH IN + public void showViewsBranchReturnsViewInfo() throws Exception { + // Arrange + setUpStatementSource(); + setUpStatementSourceVersion(); + setUpSupportKeys(); + + ExternalNamespaceEntry viewEntry = createRandomViewEntry(); + + when(dataplanePlugin.listViewsIncludeNested( + Collections.emptyList(), + VersionContext.ofBranch(STATEMENT_BRANCH_NAME))) + .thenReturn(Stream.of(viewEntry)); + + SqlShowViews input = TestShowViewsHandler.SqlShowViewsBuilder.builder() + .withReference(ReferenceType.BRANCH, STATEMENT_BRANCH_NAME) + .withSource(STATEMENT_SOURCE_NAME) + .build(); + + // Act + List result = showViewsHandler.toResult("", input); + + // Assert + ShowViewsHandler.ShowViewResult expectedViewInfo = createExpectedShowViewResult(STATEMENT_SOURCE_NAME, viewEntry); + assertThat(result).hasSize(1); + assertThat(result.get(0)).isEqualTo(expectedViewInfo); + } + + @Test // SHOW VIEWS AT TAG IN + public void showViewsTagReturnsViewInfo() throws Exception { + // Arrange + setUpStatementSource(); + setUpStatementSourceVersion(); + setUpSupportKeys(); + + ExternalNamespaceEntry viewEntry = createRandomViewEntry(); + + when(dataplanePlugin.listViewsIncludeNested( + Collections.emptyList(), + VersionContext.ofTag(STATEMENT_TAG_NAME))) + .thenReturn(Stream.of(viewEntry)); + + SqlShowViews input = TestShowViewsHandler.SqlShowViewsBuilder.builder() + .withReference(ReferenceType.TAG, STATEMENT_TAG_NAME) + .withSource(STATEMENT_SOURCE_NAME) + .build(); + + // Act + List result = showViewsHandler.toResult("", input); + + // Assert + ShowViewsHandler.ShowViewResult expectedViewInfo = createExpectedShowViewResult(STATEMENT_SOURCE_NAME, viewEntry); + assertThat(result).hasSize(1); + assertThat(result.get(0)).isEqualTo(expectedViewInfo); + } + + @Test // SHOW VIEWS AT REF + public void showViewsRefSessionSourceNonexistentThrows() { + // Arrange + when(catalog.containerExists(new NamespaceKey(SESSION_SOURCE_NAME))) + .thenReturn(false); + when(userSession.getDefaultSchemaPath()) + .thenReturn(new NamespaceKey(ImmutableList.of((SESSION_SOURCE_NAME)))); + + SqlShowViews input = TestShowViewsHandler.SqlShowViewsBuilder.builder() + .withReference(ReferenceType.REFERENCE, STATEMENT_REF_NAME) + .build(); + + // Act and Assert + assertThatThrownBy(() -> showViewsHandler.toResult("", input)) + .isInstanceOf(UserException.class) + .hasMessageContaining(SESSION_SOURCE_NAME) + .hasMessageContaining("does not exist"); + } + + @Test // SHOW VIEWS AT REF (using non-nessie source) + public void showViewsRefSessionSourceNonNessieThrows() { + // Arrange + when(catalog.containerExists(new NamespaceKey(SESSION_SOURCE_NAME))) + .thenReturn(true); + when(catalog.getSource(SESSION_SOURCE_NAME)) + .thenReturn(mock(StoragePlugin.class)); // Non-nessie source + when(userSession.getDefaultSchemaPath()) + .thenReturn(new NamespaceKey(ImmutableList.of(SESSION_SOURCE_NAME, "unusedFolder"))); + + SqlShowViews input = TestShowViewsHandler.SqlShowViewsBuilder.builder() + .withReference(ReferenceType.REFERENCE, STATEMENT_REF_NAME) + .build(); + + // Act and Assert + assertThatThrownBy(() -> showViewsHandler.toResult("", input)) + .isInstanceOf(UserException.class) + .hasMessageContaining("does not support show views"); + } + + @Test // SHOW VIEWS AT REF IN + public void showViewsReferenceNotFoundThrows() { + // Arrange + setUpStatementSource(); + setUpStatementSourceVersion(); + setUpSupportKeys(); + + when(dataplanePlugin.listViewsIncludeNested( + Collections.emptyList(), + VersionContext.ofRef(STATEMENT_REF_NAME))) + .thenThrow(ReferenceNotFoundException.class); + + SqlShowViews input = TestShowViewsHandler.SqlShowViewsBuilder.builder() + .withReference(ReferenceType.REFERENCE, STATEMENT_REF_NAME) + .withSource(STATEMENT_SOURCE_NAME) + .build(); + + // Act and Assert + assertThatThrownBy(() -> showViewsHandler.toResult("", input)) + .isInstanceOf(UserException.class) + .hasMessageContaining("not found") + .hasMessageContaining(STATEMENT_SOURCE_NAME) + .hasMessageContaining(STATEMENT_REF_NAME); + } + + @Test // SHOW VIEWS AT REF IN + public void showViewsReferenceConflictThrows() { + // Arrange + setUpStatementSource(); + setUpStatementSourceVersion(); + setUpSupportKeys(); + + when(dataplanePlugin.listViewsIncludeNested( + Collections.emptyList(), + VersionContext.ofRef(STATEMENT_REF_NAME))) + .thenThrow(ReferenceConflictException.class); + + SqlShowViews input = TestShowViewsHandler.SqlShowViewsBuilder.builder() + .withReference(ReferenceType.REFERENCE, STATEMENT_REF_NAME) + .withSource(STATEMENT_SOURCE_NAME) + .build(); + + // Act and Assert + assertThatThrownBy(() -> showViewsHandler.toResult("", input)) + .isInstanceOf(UserException.class) + .hasMessageContaining("has conflict") + .hasMessageContaining(STATEMENT_SOURCE_NAME) + .hasMessageContaining(STATEMENT_REF_NAME); + } + + // Sets up to return the Session Source Version in user session (which shouldn't get + // used if the statement has a source). + private void setUpStatementSourceVersion() + { + VersionContext sessionVersionContext = VersionContext.ofBranch(SESSION_BRANCH_NAME); + when(userSession.getSessionVersionForSource(STATEMENT_SOURCE_NAME)) + .thenReturn(sessionVersionContext); + } + + // Sets up to return the Session Source Version in user session (which gets used if + // "AT " is not specified in query). + private void setUpSessionSourceVersion() + { + VersionContext sessionVersionContext = VersionContext.ofBranch(SESSION_BRANCH_NAME); + when(userSession.getSessionVersionForSource(SESSION_SOURCE_NAME)) + .thenReturn(sessionVersionContext); + } + + // Sets up to return the Statement Source in catalog (which gets used if + // "IN " is specified in query). + private void setUpStatementSource() + { + setUpStatementSource(null); + } + + private void setUpStatementSource(String nestedFolder) + { + when(catalog.containerExists(new NamespaceKey(STATEMENT_SOURCE_NAME))) + .thenReturn(true); + when(catalog.getSource(STATEMENT_SOURCE_NAME)) + .thenReturn(dataplanePlugin); + List path = nestedFolder != null ? + ImmutableList.of(STATEMENT_SOURCE_NAME, nestedFolder) : + ImmutableList.of(STATEMENT_SOURCE_NAME); + when(userSession.getDefaultSchemaPath()) + .thenReturn(new NamespaceKey(path)); + + } + + // Sets up to enable SQL syntax in option manager. + private void setUpSupportKeys() + { + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + } + + // Sets up to return the Session Source in catalog (which gets used if + // "IN " is not specified in query). + private void setUpSessionSource() + { + setUpSessionSource(null); + } + + private void setUpSessionSource(String nestedFolder) + { + when(catalog.containerExists(new NamespaceKey(SESSION_SOURCE_NAME))) + .thenReturn(true); + when(catalog.getSource(SESSION_SOURCE_NAME)) + .thenReturn(dataplanePlugin); + List path = nestedFolder!= null ? + ImmutableList.of(SESSION_SOURCE_NAME, nestedFolder) : + ImmutableList.of(SESSION_SOURCE_NAME); + when(userSession.getDefaultSchemaPath()) + .thenReturn(new NamespaceKey(path)); + } + + private ExternalNamespaceEntry createRandomViewEntry() + { + String randomName = UUID.randomUUID().toString(); + return ExternalNamespaceEntry.of("ICEBERG_VIEW", ImmutableList.of(randomName)); + } + + private ExternalNamespaceEntry createRandomViewEntryWithSpecificPrefix(String prefix) + { + String randomName = prefix + UUID.randomUUID(); + return ExternalNamespaceEntry.of("ICEBERG_VIEW", ImmutableList.of(randomName)); + } + + private ShowViewsHandler.ShowViewResult createExpectedShowViewResult(String sourceName, ExternalNamespaceEntry viewEntry) + { + String sourceNameAndNamespace = sourceName; + if(!viewEntry.getNamespace().isEmpty()) + { + sourceNameAndNamespace = String.join(".", sourceName, String.join(".", viewEntry.getNamespace())); + } + + return new ShowViewsHandler.ShowViewResult( + sourceNameAndNamespace, + viewEntry.getName()); + } + + private List createExpectedShowViewResultList(String sourceName, ExternalNamespaceEntry... viewEntries) { + String sourceNameAndNamespace = sourceName; + List returnList = new ArrayList<>(); + for (ExternalNamespaceEntry viewEntry : viewEntries) { + if (!viewEntry.getNamespace().isEmpty()) { + sourceNameAndNamespace = String.join(".", sourceName, String.join(".", viewEntry.getNamespace())); + } + returnList.add(new ShowViewsHandler.ShowViewResult(sourceNameAndNamespace, viewEntry.getName())); + } + return returnList; + } + + + + static class SqlShowViewsBuilder { + private String sourceName; + private ReferenceType referenceType; + private String reference; + private String likePattern; + + static TestShowViewsHandler.SqlShowViewsBuilder builder() { + return new TestShowViewsHandler.SqlShowViewsBuilder(); + } + + TestShowViewsHandler.SqlShowViewsBuilder withSource(String sourceName) { + this.sourceName = sourceName; + return this; + } + + TestShowViewsHandler.SqlShowViewsBuilder withReference(ReferenceType referenceType, String reference) { + this.referenceType = referenceType; + this.reference = reference; + return this; + } + + TestShowViewsHandler.SqlShowViewsBuilder withLike(String likePattern) { + this.likePattern = likePattern; + return this; + } + + // SHOW VIEWS [AT ] [IN ] + SqlShowViews build() + { + return new SqlShowViews( + SqlParserPos.ZERO, + referenceType, + reference != null ? new SqlIdentifier(reference, SqlParserPos.ZERO) : null, + sourceName != null ? new SqlIdentifier(sourceName, SqlParserPos.ZERO) : null, + likePattern != null ? SqlLiteral.createCharString(likePattern, SqlParserPos.ZERO) : null); + } + } + +} diff --git a/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestUseVersionHandler.java b/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestUseVersionHandler.java new file mode 100644 index 0000000000..90f5ff0fbb --- /dev/null +++ b/dac/backend/src/test/java/com/dremio/exec/planner/sql/handlers/TestUseVersionHandler.java @@ -0,0 +1,371 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +import static com.dremio.exec.ExecConstants.ENABLE_USE_VERSION_SYNTAX; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.util.Arrays; +import java.util.List; + +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.parser.SqlParserPos; +import org.junit.Rule; +import org.junit.Test; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnit; +import org.mockito.junit.MockitoRule; +import org.mockito.quality.Strictness; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.ResolvedVersionContext; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.planner.sql.handlers.direct.SimpleCommandResult; +import com.dremio.exec.planner.sql.parser.ReferenceType; +import com.dremio.exec.planner.sql.parser.SqlUseVersion; +import com.dremio.exec.store.ReferenceNotFoundException; +import com.dremio.exec.store.ReferenceTypeConflictException; +import com.dremio.exec.store.StoragePlugin; +import com.dremio.exec.work.foreman.ForemanSetupException; +import com.dremio.options.OptionManager; +import com.dremio.plugins.dataplane.store.DataplanePlugin; +import com.dremio.sabot.rpc.user.UserSession; +import com.dremio.service.namespace.NamespaceKey; +import com.dremio.service.namespace.NamespaceNotFoundException; +import com.dremio.test.DremioTest; + +/** + * Tests for USE VERSION SQL. + */ +public class TestUseVersionHandler extends DremioTest { + + private static final String DEFAULT_SOURCE_NAME = "dataplane_source_1"; + private static final String NON_EXISTENT_SOURCE_NAME = "non_exist"; + private static final String SESSION_SOURCE_NAME = "session_source"; + private static final String DEFAULT_BRANCH_NAME = "branchName"; + private static final String DEFAULT_COMMIT_HASH = "0123456789abcdeff"; + private static final VersionContext DEFAULT_VERSION = + VersionContext.ofBranch(DEFAULT_BRANCH_NAME); + private static final VersionContext COMMIT_VERSION = + VersionContext.ofBareCommit(DEFAULT_COMMIT_HASH); + private static final SqlUseVersion DEFAULT_INPUT = new SqlUseVersion( + SqlParserPos.ZERO, + ReferenceType.BRANCH, + new SqlIdentifier(DEFAULT_BRANCH_NAME, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + private static final SqlUseVersion NO_SOURCE_INPUT = new SqlUseVersion( + SqlParserPos.ZERO, + ReferenceType.BRANCH, + new SqlIdentifier(DEFAULT_BRANCH_NAME, SqlParserPos.ZERO), + null); + private static final SqlUseVersion NON_EXISTENT_SOURCE_INPUT = new SqlUseVersion( + SqlParserPos.ZERO, + ReferenceType.BRANCH, + new SqlIdentifier(DEFAULT_BRANCH_NAME, SqlParserPos.ZERO), + new SqlIdentifier(NON_EXISTENT_SOURCE_NAME, SqlParserPos.ZERO)); + private static final SqlUseVersion COMMIT_INPUT = new SqlUseVersion( + SqlParserPos.ZERO, + ReferenceType.COMMIT, + new SqlIdentifier(DEFAULT_COMMIT_HASH, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + private static final ResolvedVersionContext DEFAULT_RESOLVED_VERSION = + ResolvedVersionContext.ofBranch("expected", "ffedcba9876543210"); + private static final ResolvedVersionContext RESOLVED_COMMIT = + ResolvedVersionContext.ofBareCommit(DEFAULT_COMMIT_HASH); + + @Rule public MockitoRule rule = MockitoJUnit.rule().strictness(Strictness.STRICT_STUBS); + + @Mock private OptionManager optionManager; + @Mock private Catalog catalog; + @Mock private UserSession userSession; + @Mock private DataplanePlugin dataplanePlugin; + + @InjectMocks private UseVersionHandler handler; + + @Test + public void useBranchSupportKeyDisabledThrows() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(false); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("USE BRANCH") + .hasMessageContaining("not supported"); + verify(userSession, never()).setSessionVersionForSource(any(), any()); + } + + @Test + public void useBranchNonExistentSource() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + NamespaceNotFoundException notFoundException = new NamespaceNotFoundException("Cannot access"); + UserException nonExistException = UserException.validationError(notFoundException) + .message("Tried to access non-existent source [%s].", NON_EXISTENT_SOURCE_NAME).build(); + when(catalog.getSource(NON_EXISTENT_SOURCE_NAME)).thenThrow(nonExistException); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", NON_EXISTENT_SOURCE_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("Tried to access non-existent source"); + } + + @Test + public void useBranchEmptySourceUsesSessionContext() throws ForemanSetupException { + // Arrange + setUpSupportKeyAndPluginAndSessionContext(); + when(dataplanePlugin.resolveVersionContext(DEFAULT_VERSION)) + .thenReturn(DEFAULT_RESOLVED_VERSION); + + // Act + List result = handler.toResult("", NO_SOURCE_INPUT); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("set to") + .contains(DEFAULT_VERSION.toString()) + .contains(SESSION_SOURCE_NAME); + verify(userSession).setSessionVersionForSource(SESSION_SOURCE_NAME, DEFAULT_VERSION); + } + + @Test + public void useBranchSucceeds() throws ForemanSetupException { + // Arrange + setUpSupportKeyAndPlugin(); + when(dataplanePlugin.resolveVersionContext(DEFAULT_VERSION)) + .thenReturn(DEFAULT_RESOLVED_VERSION); + + // Act + List result = handler.toResult("", DEFAULT_INPUT); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("set to") + .contains(DEFAULT_VERSION.toString()) + .contains(DEFAULT_SOURCE_NAME); + verify(userSession).setSessionVersionForSource(DEFAULT_SOURCE_NAME, DEFAULT_VERSION); + } + + @Test + public void useTagSucceeds() throws ForemanSetupException { + // Constants + final String tagName = "tagName"; + final SqlUseVersion input = new SqlUseVersion( + SqlParserPos.ZERO, + ReferenceType.TAG, + new SqlIdentifier(tagName, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + final VersionContext version = VersionContext.ofTag(tagName); + + // Arrange + setUpSupportKeyAndPlugin(); + when(dataplanePlugin.resolveVersionContext(version)) + .thenReturn(ResolvedVersionContext.ofTag(tagName, DEFAULT_COMMIT_HASH)); + + // Act + List result = handler.toResult("", input); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("set to") + .contains(tagName) + .contains(DEFAULT_SOURCE_NAME); + verify(userSession).setSessionVersionForSource(DEFAULT_SOURCE_NAME, version); + } + + @Test + public void useCommitSucceeds() throws ForemanSetupException { + // Arrange + setUpSupportKeyAndPlugin(); + when(dataplanePlugin.resolveVersionContext(COMMIT_VERSION)) + .thenReturn(RESOLVED_COMMIT); + when(dataplanePlugin.commitExists(DEFAULT_COMMIT_HASH)) + .thenReturn(true); + + // Act + List result = handler.toResult("", COMMIT_INPUT); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("set to") + .contains(COMMIT_VERSION.toString()) + .contains(DEFAULT_SOURCE_NAME); + verify(userSession).setSessionVersionForSource(DEFAULT_SOURCE_NAME, COMMIT_VERSION); + } + + @Test + public void useCommitNotFoundThrows() { + // Arrange + setUpSupportKeyAndPlugin(); + when(dataplanePlugin.resolveVersionContext(COMMIT_VERSION)) + .thenReturn(RESOLVED_COMMIT); + when(dataplanePlugin.commitExists(DEFAULT_COMMIT_HASH)) + .thenReturn(false); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", COMMIT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("Commit") + .hasMessageContaining("not found") + .hasMessageContaining(DEFAULT_SOURCE_NAME); + verify(userSession, never()).setSessionVersionForSource(any(), any()); + } + + @Test + public void useReferenceSucceeds() throws ForemanSetupException { + // Constants + final String referenceName = "refName"; + final SqlUseVersion input = new SqlUseVersion( + SqlParserPos.ZERO, + ReferenceType.REFERENCE, + new SqlIdentifier(referenceName, SqlParserPos.ZERO), + new SqlIdentifier(DEFAULT_SOURCE_NAME, SqlParserPos.ZERO)); + final VersionContext version = VersionContext.ofRef(referenceName); + + // Arrange + setUpSupportKeyAndPlugin(); + when(dataplanePlugin.resolveVersionContext(version)) + .thenReturn(DEFAULT_RESOLVED_VERSION); + + // Act + List result = handler.toResult("", input); + + // Assert + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary) + .contains("set to") + .contains(referenceName) + .contains(DEFAULT_SOURCE_NAME); + verify(userSession).setSessionVersionForSource(DEFAULT_SOURCE_NAME, version); + } + + @Test + public void useBranchWrongSourceThrows() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(catalog.getSource(DEFAULT_SOURCE_NAME)) + .thenReturn(mock(StoragePlugin.class)); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("does not support") + .hasMessageContaining(DEFAULT_SOURCE_NAME); + verify(userSession, never()).setSessionVersionForSource(any(), any()); + } + + @Test + public void useBranchWrongSourceFromContextThrows() { + // Arrange + setUpSupportKeyAndPluginAndSessionContext(); + when(catalog.getSource(SESSION_SOURCE_NAME)) + .thenReturn(mock(StoragePlugin.class)); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", NO_SOURCE_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("does not support") + .hasMessageContaining(SESSION_SOURCE_NAME); + verify(userSession, never()).setSessionVersionForSource(any(), any()); + } + + @Test + public void useBranchNullSourceFromContextThrows() { + // Arrange + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(userSession.getDefaultSchemaPath()) + .thenReturn(null); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", NO_SOURCE_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("was not specified"); + verify(userSession, never()).setSessionVersionForSource(any(), any()); + } + + @Test + public void useBranchNotFoundThrows() { + // Arrange + setUpSupportKeyAndPlugin(); + doThrow(ReferenceNotFoundException.class) + .when(dataplanePlugin) + .resolveVersionContext(DEFAULT_VERSION); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("not found") + .hasMessageContaining(DEFAULT_VERSION.toString()) + .hasMessageContaining(DEFAULT_SOURCE_NAME); + verify(userSession, never()).setSessionVersionForSource(any(), any()); + } + + @Test + public void useBranchTypeConflictThrows() { + // Arrange + setUpSupportKeyAndPlugin(); + doThrow(ReferenceTypeConflictException.class) + .when(dataplanePlugin) + .resolveVersionContext(DEFAULT_VERSION); + + // Act + Assert + assertThatThrownBy(() -> handler.toResult("", DEFAULT_INPUT)) + .isInstanceOf(UserException.class) + .hasMessageContaining("is not the requested type") + .hasMessageContaining(DEFAULT_VERSION.toString()) + .hasMessageContaining(DEFAULT_SOURCE_NAME); + verify(userSession, never()).setSessionVersionForSource(any(), any()); + } + + private void setUpSupportKeyAndPlugin() { + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(catalog.getSource(DEFAULT_SOURCE_NAME)) + .thenReturn(dataplanePlugin); + } + + private void setUpSupportKeyAndPluginAndSessionContext() { + when(optionManager.getOption(ENABLE_USE_VERSION_SYNTAX)) + .thenReturn(true); + when(catalog.getSource(SESSION_SOURCE_NAME)) + .thenReturn(dataplanePlugin); + when(userSession.getDefaultSchemaPath()) + .thenReturn(new NamespaceKey(Arrays.asList(SESSION_SOURCE_NAME, "unusedFolder"))); + } + +} diff --git a/dac/backend/src/test/java/com/dremio/service/accelerator/BaseTestReflection.java b/dac/backend/src/test/java/com/dremio/service/accelerator/BaseTestReflection.java index 2454423012..0ad2409079 100644 --- a/dac/backend/src/test/java/com/dremio/service/accelerator/BaseTestReflection.java +++ b/dac/backend/src/test/java/com/dremio/service/accelerator/BaseTestReflection.java @@ -37,6 +37,7 @@ import java.util.stream.Collectors; import javax.ws.rs.client.Entity; +import javax.ws.rs.core.GenericType; import org.apache.arrow.memory.BufferAllocator; import org.junit.AfterClass; @@ -44,10 +45,10 @@ import org.junit.ClassRule; import org.junit.rules.TemporaryFolder; +import com.dremio.dac.api.Space; import com.dremio.dac.explore.model.DatasetPath; import com.dremio.dac.explore.model.DatasetUI; import com.dremio.dac.model.job.JobDataFragment; -import com.dremio.dac.model.spaces.Space; import com.dremio.dac.model.spaces.SpacePath; import com.dremio.dac.server.BaseTestServer; import com.dremio.dac.server.JobsServiceTestUtils; @@ -418,7 +419,8 @@ protected String dumpState(final Materialization m) { } protected void createSpace(String name) { - expectSuccess(getBuilder(getAPIv2().path("space/" + name)).buildPut(Entity.json(new Space(null, name, null, null, null, 0, null))), Space.class); + Space newSpace = new Space(null, name, null, null, null); + expectSuccess(getBuilder(getPublicAPI(3).path("/catalog/")).buildPost(Entity.json(newSpace)), new GenericType() {}); } /** diff --git a/dac/backend/src/test/java/com/dremio/service/jobs/TestInternalQueryStreamingMode.java b/dac/backend/src/test/java/com/dremio/service/jobs/TestInternalQueryStreamingMode.java index 1b019a7143..11c789e1e9 100644 --- a/dac/backend/src/test/java/com/dremio/service/jobs/TestInternalQueryStreamingMode.java +++ b/dac/backend/src/test/java/com/dremio/service/jobs/TestInternalQueryStreamingMode.java @@ -147,8 +147,8 @@ public static class QueryResultObserver implements StreamObserver { @Override public void onNext(JobEvent value) { eventCount.getAndIncrement(); - if (value.hasResultData()) { - results.add(value.getResultData()); + if (value.hasQueryResultData()) { + results.add(value.getQueryResultData().getResultData()); } else if (value.hasFinalJobSummary()) { final JobSummary finalSummary = value.getFinalJobSummary(); if (finalSummary.getJobState() == JobState.COMPLETED) { diff --git a/dac/backend/src/test/java/com/dremio/service/jobs/TestJobDetails.java b/dac/backend/src/test/java/com/dremio/service/jobs/TestJobDetails.java index c6d92ce471..8f6bb39acf 100644 --- a/dac/backend/src/test/java/com/dremio/service/jobs/TestJobDetails.java +++ b/dac/backend/src/test/java/com/dremio/service/jobs/TestJobDetails.java @@ -33,6 +33,7 @@ import com.dremio.common.utils.protos.AttemptIdUtils; import com.dremio.dac.daemon.TestSpacesStoragePlugin; import com.dremio.dac.explore.model.DatasetPath; +import com.dremio.dac.explore.model.InitialPreviewResponse; import com.dremio.dac.model.job.JobInfoDetailsUI; import com.dremio.dac.server.BaseTestServer; import com.dremio.dac.service.reflection.ReflectionServiceHelper; @@ -80,7 +81,8 @@ public void setup() throws Exception { @Test public void testJobDetailsAPI() throws Exception { TestSpacesStoragePlugin.setup(); - getPreview(getDataset(new DatasetPath("testA.dsA1"))); + final InitialPreviewResponse previewResponse = getPreview(getDataset(new DatasetPath("testA.dsA1"))); + waitForJobComplete(previewResponse.getJobId().getId()); String jobId = "1f3f8dad-f25e-8cbe-e952-1587f1647a00"; String sql = "select * from \" testA.dsA1\""; UUID id = UUID.fromString(jobId); diff --git a/dac/backend/src/test/java/com/dremio/service/jobs/TestJobMetadataCollection.java b/dac/backend/src/test/java/com/dremio/service/jobs/TestJobMetadataCollection.java index c0848828fe..c60603b4af 100644 --- a/dac/backend/src/test/java/com/dremio/service/jobs/TestJobMetadataCollection.java +++ b/dac/backend/src/test/java/com/dremio/service/jobs/TestJobMetadataCollection.java @@ -18,6 +18,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; +import java.util.List; import java.util.UUID; import java.util.concurrent.CountDownLatch; @@ -32,11 +33,14 @@ import com.dremio.exec.client.DremioClient; import com.dremio.exec.proto.UserBitShared.QueryId; import com.dremio.exec.proto.UserBitShared.QueryResult.QueryState; +import com.dremio.exec.proto.UserBitShared.QueryType; import com.dremio.exec.proto.UserProtos.CreatePreparedStatementResp; import com.dremio.exec.proto.UserProtos.GetCatalogsResp; import com.dremio.exec.proto.UserProtos.LikeFilter; import com.dremio.exec.rpc.ConnectionThrottle; import com.dremio.exec.rpc.RpcException; +import com.dremio.options.OptionManager; +import com.dremio.options.OptionValue; import com.dremio.proto.model.attempts.RequestType; import com.dremio.sabot.rpc.user.QueryDataBatch; import com.dremio.sabot.rpc.user.UserResultsListener; @@ -52,10 +56,12 @@ public class TestJobMetadataCollection extends BaseTestServer { private final DremioClient rpc; private final JobsService jobs; + private final OptionManager optionManager; public TestJobMetadataCollection() throws RpcException{ rpc = getRpcClient(); jobs = l(JobsService.class); + optionManager = l(OptionManager.class); } @Test @@ -68,6 +74,8 @@ public void getCatalogs() throws Exception { JobDetailsUI job = getDetails(resp.getQueryId()); assertEquals(RequestType.GET_CATALOGS, job.getRequestType()); + assertEquals("[Get Catalogs] Catalog Filter: .", job.getDescription()); + assertEquals("NA", job.getSql()); } @Test @@ -85,6 +93,8 @@ public void prepare() throws Exception { assertEquals("sys", job.getParentsList().get(0).getDatasetPathList().get(0)); assertEquals(com.dremio.service.job.proto.QueryType.JDBC, job.getQueryType()); + assertEquals("[Prepare Statement] select * from sys.options", job.getDescription()); + assertEquals("select * from sys.options", job.getSql()); final CountDownLatch latch = new CountDownLatch(1); final Pointer id = new Pointer<>(); @@ -117,7 +127,46 @@ public void dataArrived(QueryDataBatch result, ConnectionThrottle throttle) { assertTrue(job2.getSql() != null); } + @Test + public void runSql() throws Exception { + final List resp = rpc.runQuery(QueryType.SQL, "SELECT 1"); + JobDetailsUI job = getDetails(resp.get(0).getHeader().getQueryId()); + + assertEquals(RequestType.RUN_SQL, job.getRequestType()); + assertEquals("SELECT 1", job.getDescription()); + assertEquals("SELECT 1", job.getSql()); + } + @Test + public void runSqlWithTruncation() throws Exception { + optionManager.setOption(OptionValue.createLong(OptionValue.OptionType.SYSTEM, "jobs.sql.truncate.length", 5)); + final List resp = rpc.runQuery(QueryType.SQL, "SELECT 1"); + QueryId queryId = resp.get(0).getHeader().getQueryId(); + + // verify SQL is truncated in Jobs search API + Object searchRsp = expectSuccess( + getBuilder( + getAPIv2() + .path("jobs-listing") + .path("v1.0") + ).buildGet(), Object.class); + assertTrue(searchRsp.toString().contains("queryText=SELEC, ")); + assertTrue(searchRsp.toString().contains("description=SELEC, ")); + + // verify SQL is not truncated in Job details API + Object detailRsp = expectSuccess( + getBuilder( + getAPIv2() + .path("jobs-listing") + .path("v1.0") + .path(toId(queryId).getId()) + .path("jobDetails") + .queryParam("detailLevel", "0") + ).buildGet(), Object.class); + assertTrue(detailRsp.toString().contains("queryText=SELECT 1, ")); + assertTrue(detailRsp.toString().contains("description=SELECT 1, ")); + optionManager.setOption(OptionValue.createLong(OptionValue.OptionType.SYSTEM, "jobs.sql.truncate.length", 0)); + } private JobDetailsUI getDetails(QueryId id) throws JobNotFoundException { JobDetailsRequest request = JobDetailsRequest.newBuilder() diff --git a/dac/backend/src/test/java/com/dremio/service/jobs/TestJobService.java b/dac/backend/src/test/java/com/dremio/service/jobs/TestJobService.java index f803925438..6cffb29a90 100644 --- a/dac/backend/src/test/java/com/dremio/service/jobs/TestJobService.java +++ b/dac/backend/src/test/java/com/dremio/service/jobs/TestJobService.java @@ -86,6 +86,7 @@ import com.dremio.exec.planner.DremioVolcanoPlanner; import com.dremio.exec.proto.SearchProtos; import com.dremio.exec.proto.UserBitShared; +import com.dremio.exec.proto.UserBitShared.DremioPBError.ErrorType; import com.dremio.exec.proto.beans.AttemptEvent; import com.dremio.exec.server.SabotContext; import com.dremio.exec.store.CatalogService; @@ -94,10 +95,13 @@ import com.dremio.exec.testing.ExecutionControls; import com.dremio.exec.testing.Injection; import com.dremio.exec.work.foreman.AttemptManager; +import com.dremio.exec.work.protector.AttemptAnalyser; import com.dremio.exec.work.protector.ForemenWorkManager; +import com.dremio.options.OptionManager; import com.dremio.options.OptionValue; import com.dremio.options.OptionValue.OptionType; import com.dremio.proto.model.attempts.AttemptReason; +import com.dremio.proto.model.attempts.RequestType; import com.dremio.resource.exception.ResourceUnavailableException; import com.dremio.sabot.exec.CancelQueryContext; import com.dremio.sabot.exec.CoordinatorHeapClawBackStrategy; @@ -109,6 +113,7 @@ import com.dremio.service.job.JobsWithParentDatasetRequest; import com.dremio.service.job.SearchJobsRequest; import com.dremio.service.job.VersionedDatasetPath; +import com.dremio.service.job.proto.ExtraJobInfo; import com.dremio.service.job.proto.JobAttempt; import com.dremio.service.job.proto.JobDetails; import com.dremio.service.job.proto.JobId; @@ -136,6 +141,7 @@ public class TestJobService extends BaseTestServer { private HybridJobsService jobsService; private LocalJobsService localJobsService; private ForemenWorkManager foremenWorkManager; + private OptionManager optionManager; @Before public void setup() throws Exception { @@ -143,6 +149,7 @@ public void setup() throws Exception { jobsService = (HybridJobsService) l(JobsService.class); localJobsService = l(LocalJobsService.class); foremenWorkManager = l(ForemenWorkManager.class); + optionManager = l(OptionManager.class); } private com.dremio.service.job.JobDetails getJobDetails(Job job) { @@ -160,6 +167,24 @@ public static void failFunction() { throw UserException.dataReadError().message("expected failure").buildSilently(); } + @Test + public void testInvalidSQLQuery() throws Exception { + final CompletionListener completionListener = new CompletionListener(); + + final JobRequest request = JobRequest.newBuilder() + .setSqlQuery(new SqlQuery("SELECT xyz", null, DEFAULT_USERNAME)) + .build(); + final JobId jobId = jobsService.submitJob(toSubmitJobRequest(request), completionListener) + .getJobId(); + try { + completionListener.await(); + fail("Query submission is expected to fail"); + } catch (Exception e) { + } + + assertEquals("pb_" + ErrorType.VALIDATION.toString(), AttemptAnalyser.LAST_ATTEMPT_COMPLETION_STATE); + } + // Test cancelling query past the planning phase. @Test public void testCancel() throws Exception { @@ -186,10 +211,102 @@ public void testCancel() throws Exception { ).buildPost(entity(null, JSON)), NotificationResponse.class); completionListener.await(); + // query cancelled by user + assertEquals(UserException.AttemptCompletionState.CLIENT_CANCELLED.toString(), AttemptAnalyser.LAST_ATTEMPT_COMPLETION_STATE); assertEquals("Job cancellation requested", response.getMessage()); assertEquals(NotificationResponse.ResponseType.OK, response.getType()); } + @Test + public void testSqlTruncation() throws Exception { + optionManager.setOption(OptionValue.createLong(OptionValue.OptionType.SYSTEM, "jobs.sql.truncate.length", 5)); + final CompletionListener completionListener = new CompletionListener(); + + final JobRequest request = JobRequest.newBuilder() + .setSqlQuery(new SqlQuery("SELECT 1", null, DEFAULT_USERNAME)) + .build(); + final JobId jobId = jobsService.submitJob(toSubmitJobRequest(request), completionListener) + .getJobId(); + + completionListener.await(); + + // verify SQL is truncated in Jobs Search API + Object searchRsp = expectSuccess( + getBuilder( + getAPIv2() + .path("jobs-listing") + .path("v1.0") + ).buildGet(), Object.class); + assertTrue(searchRsp.toString().contains("queryText=SELEC, ")); + assertTrue(searchRsp.toString().contains("description=SELEC, ")); + + // verify SQL is not truncated in Job details API + Object detailRsp = expectSuccess( + getBuilder( + getAPIv2() + .path("jobs-listing") + .path("v1.0") + .path(jobId.getId()) + .path("jobDetails") + .queryParam("detailLevel", "0") + ).buildGet(), Object.class); + assertTrue(detailRsp.toString().contains("queryText=SELECT 1, ")); + assertTrue(detailRsp.toString().contains("description=SELECT 1, ")); + + // verify SQL is truncated in old Jobs Search API + Object oldSearchRsp = expectSuccess( + getBuilder( + getAPIv2() + .path("jobs") + ).buildGet(), Object.class); + assertTrue(oldSearchRsp.toString().contains("description=SELEC, ")); + + // verify SQL is not truncated in old Job summary API + Object summaryRsp = expectSuccess( + getBuilder( + getAPIv2() + .path("job") + .path(jobId.getId()) + .path("summary") + ).buildGet(), Object.class); + assertTrue(summaryRsp.toString().contains("description=SELECT 1, ")); + + // verify SQL is not truncated in old Job Details API + Object oldDetailRsp = expectSuccess( + getBuilder( + getAPIv2() + .path("job") + .path(jobId.getId()) + .path("details") + ).buildGet(), Object.class); + assertTrue(oldDetailRsp.toString().contains("sql=SELECT 1, ")); + assertTrue(oldDetailRsp.toString().contains("description=SELECT 1, ")); + assertEquals(UserException.AttemptCompletionState.SUCCESS.toString(), AttemptAnalyser.LAST_ATTEMPT_COMPLETION_STATE); + } + + @Test + public void testSqlTruncationDisable() throws Exception { + optionManager.setOption(OptionValue.createLong(OptionValue.OptionType.SYSTEM, "jobs.sql.truncate.length", 0)); + final CompletionListener completionListener = new CompletionListener(); + + final JobRequest request = JobRequest.newBuilder() + .setSqlQuery(new SqlQuery("SELECT 1", null, DEFAULT_USERNAME)) + .build(); + jobsService.submitJob(toSubmitJobRequest(request), completionListener); + + completionListener.await(); + + // verify SQL is not truncated in Jobs Search API, when jobs.sql.truncate.length option is set to 0 + Object response = expectSuccess( + getBuilder( + getAPIv2() + .path("jobs-listing") + .path("v1.0") + ).buildGet(), Object.class); + assertTrue(response.toString().contains("queryText=SELECT 1, ")); + assertTrue(response.toString().contains("description=SELECT 1, ")); + } + @Test public void testErrorOnCancellingACompletedJob() throws Exception { final JobSubmittedListener jobSubmittedListener = new JobSubmittedListener(); @@ -277,6 +394,8 @@ public void testResourceAllocationError() throws Exception { .build() )).isInstanceOf(RuntimeException.class) .hasMessageContaining("Job has been cancelled"); + + assertEquals(UserException.AttemptCompletionState.ENGINE_TIMEOUT.toString(), AttemptAnalyser.LAST_ATTEMPT_COMPLETION_STATE); } finally { // reset, irrespective any exception, so that other test cases are not affected. ExecutionControls.setControlsOptionMapper(new ObjectMapper()); @@ -301,6 +420,7 @@ public void testCancelPlanning() throws Exception { AttemptEvent.State.PLANNING, AttemptEvent.State.FAILED }; + assertEquals(UserException.AttemptCompletionState.HEAP_MONITOR_C.toString(), AttemptAnalyser.LAST_ATTEMPT_COMPLETION_STATE); assertArrayEquals("Since we paused during planning, there should be AttemptEvent.State.PLANNING" + " before AttemptEvent.State.FAILED.", expectedAttemptStates, observedAttemptStates); @@ -797,6 +917,7 @@ private Job createJob(final String id, final List datasetPath, final Str .setStartTime(start) .setFinishTime(end) .setQueryType(queryType) + .setRequestType(RequestType.RUN_SQL) .setResourceSchedulingInfo(new ResourceSchedulingInfo().setQueueName("SMALL") .setRuleName("ruleSmall")); @@ -1379,6 +1500,39 @@ public void testJobDependenciesCleanup() throws Exception { assertNotNull("Job2 must be kept in the database", jobsService.getJobDetails(request2)); } + @Test + public void testExtraJobInfoCleanup() throws Exception { + jobsService = (HybridJobsService) l(JobsService.class); + optionManager.setOption(OptionValue.createLong(OptionValue.OptionType.SYSTEM, "jobs.sql.truncate.length", 3)); + SqlQuery ctas = getQueryFromSQL("SHOW SCHEMAS"); + final com.dremio.service.job.JobDetails jobDetails0 = getJobDetails(ctas, "ds0", DatasetVersion.newVersion()); + getJobDetails(ctas, "ds1", DatasetVersion.newVersion()); + Thread.sleep(20); + long beforeJob2TS = System.currentTimeMillis(); + getJobDetails(ctas, "ds2", DatasetVersion.newVersion()); + Thread.sleep(20); + long diffBeforeJob2 = System.currentTimeMillis() - beforeJob2TS; + + LegacyKVStoreProvider provider = l(LegacyKVStoreProvider.class); + LegacyKVStore extraJobInfoStore = provider.getStore(ExtraJobInfoStoreCreator.class); + ExtraJobInfo extraJobInfo0 = extraJobInfoStore.get(JobsProtoUtil.toStuff(jobDetails0.getJobId())); + assertEquals("SHOW SCHEMAS", extraJobInfo0.getSql()); + + final List externalCleaners = Collections.singletonList( + l(LocalJobsService.class).new OnlineProfileCleaner()); + String report = + LocalJobsService.deleteOldJobsAndDependencies(externalCleaners, provider , diffBeforeJob2); + String expectedReport = "" + + "Completed. Deleted 2 jobs." + + System.lineSeparator() + "\tJobAttempts: 2, Attempts with failure: 0" + + System.lineSeparator() + "\t" + LocalJobsService.OnlineProfileCleaner.class.getSimpleName() + " executions: 2, failures: 0" + + System.lineSeparator(); + assertEquals(expectedReport, report); + + assertEquals(null, extraJobInfoStore.get(JobsProtoUtil.toStuff(jobDetails0.getJobId()))); + optionManager.setOption(OptionValue.createLong(OptionValue.OptionType.SYSTEM, "jobs.sql.truncate.length", 0)); + } + public static void cleanJobs() { final LegacyKVStoreProvider provider = l(LegacyKVStoreProvider.class); final List externalCleaners = Collections.singletonList( @@ -1467,6 +1621,7 @@ private static JobInfo newJobInfo(final JobInfo templateJobInfo, long start, lon .setStartTime(start) .setFinishTime(end) .setFailureInfo(failureInfo) + .setRequestType(templateJobInfo.getRequestType()) .setDatasetPathList(templateJobInfo.getDatasetPathList()); } @@ -1477,6 +1632,7 @@ private static JobInfo newJobInfo(final JobInfo templateJobInfo, long start, lon .setStartTime(start) .setFinishTime(end) .setFailureInfo(failureInfo) + .setRequestType(templateJobInfo.getRequestType()) .setResourceSchedulingInfo(new ResourceSchedulingInfo().setResourceSchedulingStart(schedulingStart).setResourceSchedulingEnd(schedulingEnd)) .setDatasetPathList(templateJobInfo.getDatasetPathList()); } diff --git a/dac/backend/src/test/java/com/dremio/service/jobs/TestJobsListingResource.java b/dac/backend/src/test/java/com/dremio/service/jobs/TestJobsListingResource.java index 8e1b29e2b0..f77e1c9adf 100644 --- a/dac/backend/src/test/java/com/dremio/service/jobs/TestJobsListingResource.java +++ b/dac/backend/src/test/java/com/dremio/service/jobs/TestJobsListingResource.java @@ -33,6 +33,7 @@ import com.dremio.common.utils.protos.AttemptIdUtils; import com.dremio.dac.daemon.TestSpacesStoragePlugin; import com.dremio.dac.explore.model.DatasetPath; +import com.dremio.dac.explore.model.InitialPreviewResponse; import com.dremio.dac.model.job.JobsListingUI; import com.dremio.dac.server.BaseTestServer; import com.dremio.exec.proto.UserBitShared; @@ -73,7 +74,8 @@ public void setup() throws Exception { @Test public void testJobsListingAPI() throws Exception { TestSpacesStoragePlugin.setup(); - getPreview(getDataset(new DatasetPath("testA.dsA1"))); + final InitialPreviewResponse previewResponse = getPreview(getDataset(new DatasetPath("testA.dsA1"))); + waitForJobComplete(previewResponse.getJobId().getId()); String jobId = "1f3f8dad-f25e-8cbe-e952-1587f1647a00"; String sql = "select * from \" testA.dsA1\""; UUID id = UUID.fromString(jobId); diff --git a/dac/backend/src/test/java/com/dremio/service/jobs/TestJobsServiceUtil.java b/dac/backend/src/test/java/com/dremio/service/jobs/TestJobsServiceUtil.java index cc3220182b..1dcbc52087 100644 --- a/dac/backend/src/test/java/com/dremio/service/jobs/TestJobsServiceUtil.java +++ b/dac/backend/src/test/java/com/dremio/service/jobs/TestJobsServiceUtil.java @@ -23,8 +23,11 @@ import java.util.ArrayList; import java.util.Collections; +import org.apache.calcite.runtime.CalciteContextException; import org.apache.calcite.sql.parser.SqlParseException; import org.apache.calcite.sql.parser.SqlParserPos; +import org.apache.calcite.sql.validate.SqlValidatorException; +import org.apache.calcite.tools.ValidationException; import org.junit.Test; import com.dremio.common.exceptions.UserException; @@ -77,6 +80,41 @@ public void convertExceptionToFailureInfo1() { JobFailureInfo.Error error = jobFailureInfo.getErrorsList().get(0); assertEquals(errString, error.getMessage()); } + @Test + public void testValidationErrorMessageForMissingContext() { + String genericError = "unable to validate sql node"; + + String queryString = "SELECT * FROM trips"; + // index location of "trips" inside the queryString + int startLineNumber = 1; + int startColumnNumber = 15; + int endLineNumber = 1; + int endColumnNumber = 19; + String contextExceptionLocation = String.format("From line %s, column %s to line %s, column %s", + startLineNumber, startColumnNumber, endLineNumber, endColumnNumber); + + String actualError = "Object 'trips' not found"; + + // create the exception + SqlValidatorException sqlValidatorException = new SqlValidatorException(actualError, null); + CalciteContextException calciteContextException = new CalciteContextException(contextExceptionLocation, + sqlValidatorException, startLineNumber, startColumnNumber, endLineNumber, endColumnNumber); + ValidationException validationException = new ValidationException(genericError, calciteContextException); + + // build the expected exception and convert it to failure info + UserException userException = SqlExceptionHelper.validationError(queryString, validationException) + .buildSilently(); + String verboseError = userException.getVerboseMessage(false); + JobFailureInfo jobFailureInfo = JobsServiceUtil.toFailureInfo(verboseError); + assertEquals(JobFailureInfo.Type.VALIDATION, jobFailureInfo.getType()); + JobFailureInfo.Error error = jobFailureInfo.getErrorsList().get(0); + + assertEquals(actualError + ". Please check that it exists in the selected context.", error.getMessage()); + assertEquals(startLineNumber, (int) error.getStartLine()); + assertEquals(startColumnNumber, (int) error.getStartColumn()); + assertEquals(endLineNumber, (int) error.getEndLine()); + assertEquals(endColumnNumber, (int) error.getEndColumn()); + } @Test public void testToSubmitJobRequest() { diff --git a/dac/backend/src/test/java/com/dremio/service/jobs/TestLocalJobsServiceStartup.java b/dac/backend/src/test/java/com/dremio/service/jobs/TestLocalJobsServiceStartup.java index 73ac40c48f..8a36327977 100644 --- a/dac/backend/src/test/java/com/dremio/service/jobs/TestLocalJobsServiceStartup.java +++ b/dac/backend/src/test/java/com/dremio/service/jobs/TestLocalJobsServiceStartup.java @@ -37,6 +37,7 @@ import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; +import com.dremio.common.logging.StructuredLogger; import com.dremio.datastore.api.LegacyIndexedStore; import com.dremio.datastore.api.LegacyIndexedStore.LegacyFindByCondition; import com.dremio.exec.proto.CoordinationProtos.NodeEndpoint; @@ -56,6 +57,7 @@ */ public class TestLocalJobsServiceStartup { private LegacyIndexedStore jobStore; + private StructuredLogger jobResultLogger; private Collection availableCoords; private static final String issuingAddress = "issuingAddress"; private static final com.dremio.exec.proto.beans.NodeEndpoint nodeEndpoint = @@ -86,6 +88,8 @@ public class TestLocalJobsServiceStartup { public void beforeEach() { jobStore = (LegacyIndexedStore) mock(LegacyIndexedStore.class); + jobResultLogger = (StructuredLogger) mock(StructuredLogger.class); + when(jobStore.find(any(LegacyFindByCondition.class))) .thenReturn(Sets.difference(EnumSet.allOf(JobState.class), finalJobStates) .stream() @@ -111,7 +115,7 @@ public Void answer(InvocationOnMock invocation) throws Throwable { public void cleanupJobStateOnStartUp() throws Exception { availableCoords = issuerRestart(); - LocalJobsService.setAbandonedJobsToFailedState(jobStore, availableCoords); + LocalJobsService.setAbandonedJobsToFailedState(jobStore, availableCoords, jobResultLogger); assertTrue("all job states must be final, or handled by the above method", allJobsCleanedUp(returns)); @@ -124,7 +128,7 @@ public void cleanupJobsWithIssuingCoordPresentOnStartup() throws Exception { // The issuing coordinator is present, so no jobs are cleaned up on startup availableCoords = issuerPresent(); - LocalJobsService.setAbandonedJobsToFailedState(jobStore, availableCoords); + LocalJobsService.setAbandonedJobsToFailedState(jobStore, availableCoords, jobResultLogger); assertTrue("All job states are final and not issued by the current restarted coordinator", noJobsCleanedUp(returns)); @@ -136,7 +140,7 @@ public void cleanupJobsIssuingCoordRestartOnStartup() throws Exception { // so its jobs are cleaned up availableCoords = issuerRestart(); - LocalJobsService.setAbandonedJobsToFailedState(jobStore, availableCoords); + LocalJobsService.setAbandonedJobsToFailedState(jobStore, availableCoords, jobResultLogger); assertTrue("All job states are final and issued by the current restarted coordinator, " + "and must have failed", allJobsCleanedUp(returns)); @@ -149,7 +153,7 @@ public void cleanupJobsWithIssuingCoordPresentRecurrent() throws Exception { // The issuing coordinator is present during the cleanup task, so no jobs are cleaned up availableCoords = issuerPresent(); - LocalJobsService.setAbandonedJobsToFailedState(jobStore, availableCoords); + LocalJobsService.setAbandonedJobsToFailedState(jobStore, availableCoords, jobResultLogger); assertTrue("All job states must be final, and jobs issued by a present coordinator, ", noJobsCleanedUp(returns)); @@ -160,7 +164,7 @@ public void cleanupJobsWithIssuingCoordAbsentRecurrent() throws Exception { // The issuing coordinator is absent during the cleanup task, so all jobs are cleaned up availableCoords = issuerAbsent(); - LocalJobsService.setAbandonedJobsToFailedState(jobStore, availableCoords); + LocalJobsService.setAbandonedJobsToFailedState(jobStore, availableCoords, jobResultLogger); assertTrue("All job states must be final, and jobs issued by an absent coordinator, ", allJobsCleanedUp(returns)); diff --git a/dac/backend/src/test/java/com/dremio/service/jobs/TestReflectionJob.java b/dac/backend/src/test/java/com/dremio/service/jobs/TestReflectionJob.java index a3a52ce4c2..ed5d75777c 100644 --- a/dac/backend/src/test/java/com/dremio/service/jobs/TestReflectionJob.java +++ b/dac/backend/src/test/java/com/dremio/service/jobs/TestReflectionJob.java @@ -265,7 +265,7 @@ public void testSearchJob() throws Exception { ReflectionId reflectionId = reflectionEntry.getId(); Iterable jobSummaries = searchJobs(reflectionId.getId(), DEFAULT_USERNAME); - jobSummaries.forEach(jobSummary -> assertTrue(jobSummary.getDescription().contains(reflectionId.getId()))); + jobSummaries.forEach(jobSummary -> assertTrue(jobSummary.getSql().contains(reflectionId.getId()))); assertEquals(2, Iterables.size(jobSummaries)); } diff --git a/services/functions/src/main/resources/function_specs/undocumented/CARDINALITY.yaml b/dac/backend/src/test/resources-nessie/META-INF/nessie-compatibility.properties similarity index 85% rename from services/functions/src/main/resources/function_specs/undocumented/CARDINALITY.yaml rename to dac/backend/src/test/resources-nessie/META-INF/nessie-compatibility.properties index 733905bf43..d3c660255d 100644 --- a/services/functions/src/main/resources/function_specs/undocumented/CARDINALITY.yaml +++ b/dac/backend/src/test/resources-nessie/META-INF/nessie-compatibility.properties @@ -14,8 +14,4 @@ # limitations under the License. # ---- -name: "CARDINALITY" -signatures: [] -functionCategories: [] -description: "" +nessie.versions=@nessie.version@ diff --git a/dac/backend/src/test/resources/copyinto/file1.csv b/dac/backend/src/test/resources/copyinto/file1.csv new file mode 100644 index 0000000000..f6031ce66e --- /dev/null +++ b/dac/backend/src/test/resources/copyinto/file1.csv @@ -0,0 +1,4 @@ +id, name, distance +1,str1,34.45 +2,str2,5.41 +3,str3,67.42 diff --git a/dac/backend/src/test/resources/copyinto/file1.json b/dac/backend/src/test/resources/copyinto/file1.json new file mode 100644 index 0000000000..3b48a359a5 --- /dev/null +++ b/dac/backend/src/test/resources/copyinto/file1.json @@ -0,0 +1,17 @@ +[ + { + "id": 1, + "name": "str1", + "distance": 1.23 + }, + { + "id": 2, + "name": "str2", + "distance": 13.27 + }, + { + "id": 3, + "name": "str3", + "distance": 56.42 + } +] diff --git a/dac/common/pom.xml b/dac/common/pom.xml index ecf77f4c72..4dd436ab06 100644 --- a/dac/common/pom.xml +++ b/dac/common/pom.xml @@ -22,7 +22,7 @@ com.dremio dremio-dac-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-dac-common diff --git a/dac/daemon/pom.xml b/dac/daemon/pom.xml index ab477c584f..347d586594 100644 --- a/dac/daemon/pom.xml +++ b/dac/daemon/pom.xml @@ -22,7 +22,7 @@ com.dremio dremio-dac-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-dac-daemon @@ -282,6 +282,11 @@ dremio-ce-elasticsearch-plugin ${project.version} + + com.dremio.community.contrib + db2-shade + ${project.version} + com.dremio.community.plugins dremio-ce-parquet-plugin @@ -387,6 +392,11 @@ dremio-gcs-plugin ${project.version} + + com.dremio.plugins + dremio-dataplane-plugin + ${project.version} + diff --git a/dac/daemon/src/main/java/com/dremio/dac/cmd/AdminCommandRunner.java b/dac/daemon/src/main/java/com/dremio/dac/cmd/AdminCommandRunner.java index 6a10235993..808624f1bd 100644 --- a/dac/daemon/src/main/java/com/dremio/dac/cmd/AdminCommandRunner.java +++ b/dac/daemon/src/main/java/com/dremio/dac/cmd/AdminCommandRunner.java @@ -26,6 +26,8 @@ import com.dremio.common.scanner.ClassPathScanner; import com.dremio.common.scanner.persistence.ScanResult; import com.dremio.dac.server.DACConfig; +import com.dremio.hadoop.security.alias.DremioCredentialProviderFactory; +import com.dremio.services.credentials.CredentialsService; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; @@ -35,7 +37,8 @@ public final class AdminCommandRunner { public static void main(String[] args) throws Exception { - final SabotConfig sabotConfig = DACConfig.newConfig().getConfig().getSabotConfig(); + final DACConfig dacConfig = DACConfig.newConfig(); + final SabotConfig sabotConfig = dacConfig.getConfig().getSabotConfig(); final ScanResult classpathScan = ClassPathScanner.fromPrescan(sabotConfig); final List> adminCommands = classpathScan.getAnnotatedClasses(AdminCommand.class); @@ -65,7 +68,10 @@ public static void main(String[] args) throws Exception { System.exit(2); } - try { + try (CredentialsService credentialsService = CredentialsService.newInstance(dacConfig.getConfig(), classpathScan)) + { + credentialsService.start(); + DremioCredentialProviderFactory.configure(() -> credentialsService); runCommand(commandName, command, Arrays.copyOfRange(args, 1, args.length)); } catch (Exception e) { AdminLogger.log(String.format("Failed to run '%s' command: %s", commandName, e.getMessage())); diff --git a/dac/daemon/src/main/java/com/dremio/dac/cmd/AdminLogger.java b/dac/daemon/src/main/java/com/dremio/dac/cmd/AdminLogger.java index 822cedd152..e2f3b8527e 100644 --- a/dac/daemon/src/main/java/com/dremio/dac/cmd/AdminLogger.java +++ b/dac/daemon/src/main/java/com/dremio/dac/cmd/AdminLogger.java @@ -21,7 +21,7 @@ public final class AdminLogger { - private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger("admin"); + public static final org.slf4j.Logger INSTANCE = org.slf4j.LoggerFactory.getLogger("admin"); private AdminLogger(){} @@ -33,7 +33,7 @@ private AdminLogger(){} * @param args Var args that are embedded in the msg for logging */ public static void log(String msg, Object... args) { - logger.info(msg, args); + INSTANCE.info(msg, args); } /** @@ -41,6 +41,6 @@ public static void log(String msg, Object... args) { * @param t Throwable to log */ public static void log(String msg, Throwable t) { - logger.info(msg, t); + INSTANCE.info(msg, t); } } diff --git a/dac/daemon/src/main/java/com/dremio/dac/cmd/Backup.java b/dac/daemon/src/main/java/com/dremio/dac/cmd/Backup.java index a83dadbd42..e73e7058fc 100644 --- a/dac/daemon/src/main/java/com/dremio/dac/cmd/Backup.java +++ b/dac/daemon/src/main/java/com/dremio/dac/cmd/Backup.java @@ -95,6 +95,10 @@ static final class BackupManagerOptions { hidden = true) private boolean sameProcess = false; + @Parameter(names = {"-c", "--compression"}, description = "choose backup compression method. Available options : " + + "snappy,lz4.", hidden = true) + private String compression = ""; + } public static BackupStats createBackup( @@ -105,11 +109,12 @@ public static BackupStats createBackup( boolean checkSSLCertificates, URI uri, boolean binary, - boolean includeProfiles + boolean includeProfiles, + String compression ) throws IOException, GeneralSecurityException { final WebClient client = new WebClient(dacConfig, credentialsServiceProvider, userName, password, checkSSLCertificates); - BackupOptions options = new BackupOptions(uri.toString(), binary, includeProfiles); + BackupOptions options = new BackupOptions(uri.toString(), binary, includeProfiles, compression); return client.buildPost(BackupStats.class, "/backup", options); } @@ -125,7 +130,7 @@ static CheckpointInfo createCheckpoint( ) throws IOException, GeneralSecurityException { final WebClient client = new WebClient(dacConfig, credentialsServiceProvider, userName, password, checkSSLCertificates); - BackupOptions options = new BackupOptions(uri.toString(), binary, includeProfiles); + BackupOptions options = new BackupOptions(uri.toString(), binary, includeProfiles, ""); return client.buildPost(CheckpointInfo.class, "/backup/checkpoint", options); } @@ -234,7 +239,7 @@ public static BackupResult doMain(String[] args, DACConfig dacConfig) { if (!options.sameProcess) { LOGGER.info("Running backup using REST API"); BackupStats backupStats = createBackup(dacConfig, () -> credService, options.userName, options.password, - checkSSLCertificates, target, !options.json, options.profiles); + checkSSLCertificates, target, !options.json, options.profiles, options.compression); AdminLogger.log("Backup created at {}, dremio tables {}, uploaded files {}", backupStats.getBackupPath(), backupStats.getTables(), backupStats.getFiles()); result.setBackupStats(backupStats); @@ -267,7 +272,7 @@ private static BackupStats backupUsingCliProcess(DACConfig dacConfig, BackupMana final FileSystem fs = HadoopFileSystem.get(backupDestinationDirPath, new Configuration()); final BackupOptions backupOptions = new BackupOptions(checkpoint.getBackupDestinationDir(), !options.json, - options.profiles); + options.profiles, options.compression); final Optional optionalKvStoreProvider = CmdUtils.getReadOnlyKVStoreProvider(dacConfig.getConfig().withValue(DremioConfig.DB_PATH_STRING, diff --git a/dac/daemon/src/main/java/com/dremio/dac/cmd/upgrade/DeleteSnowflakeCommunitySource.java b/dac/daemon/src/main/java/com/dremio/dac/cmd/upgrade/DeleteSnowflakeCommunitySource.java new file mode 100644 index 0000000000..dadd5d0fc7 --- /dev/null +++ b/dac/daemon/src/main/java/com/dremio/dac/cmd/upgrade/DeleteSnowflakeCommunitySource.java @@ -0,0 +1,80 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.dac.cmd.upgrade; + +import static java.util.Collections.emptyList; + +import com.dremio.service.namespace.NamespaceException; +import com.dremio.service.namespace.NamespaceServiceImpl; + +/** + * The pre-23.1 Dremio users had to use Snowflake Community connector. Dremio 23.1 started to + * include 1st-party Snowflake connector provided by Dremio. This connector has different ARP + * definitions, so when a Snowflake source is created with a community connector — it can't be + * deserialized with protobuf and fails with ProtobufException, and Dremio fails to start. For + * more details see DX-59245. + * DX-60529 is to improve the UX when the + * + * upgrade instruction was not followed, and the removal of the source is done + * programmatically. + */ +public class DeleteSnowflakeCommunitySource extends UpgradeTask { + public DeleteSnowflakeCommunitySource() { + super("Clean up after Snowflake community edition plugin", emptyList()); + } + + static final String taskUUID = "dcb07715-ebd9-432d-b948-601679b9577b"; + private static final String ANSI_RED_BACKGROUND = "\u001B[41m"; + private static final String ANSI_WHITE_TEXT = "\u001B[37m"; + private static final String ANSI_RESET = "\u001B[0m"; + private static final String ACTION_REQUIRED_MESSAGE = ANSI_RED_BACKGROUND + ANSI_WHITE_TEXT + + "[ACTION REQUIRED]\n\n" + + "It appears that you had a Snowflake source created by the Snowflake Community connector. " + + "The Community connector is not compatible with the Dremio Snowflake connector.\n" + + "The source created by the Community connector was removed.\n\n" + + "Please re-add your Snowflake source manually." + + ANSI_RESET + "\n\n"; + + @Override + public String getTaskUUID() { return taskUUID; } + + @Override + public void upgrade(UpgradeContext context) throws Exception { + NamespaceServiceImpl namespaceService = + new NamespaceServiceImpl(context.getLegacyKVStoreProvider()); + + // Will detect incompatible Snowflake sources created with the community connector by catching + // ProtobufException when attempting to read source configuration. Then we delete such + // sources in the catch block. + namespaceService.getSources().stream() + .filter(s -> "SNOWFLAKE".equals(s.getType())) + .forEach(snowflakeConfig -> { + //noinspection CatchMayIgnoreException + try { + context.getConnectionReader().getConnectionConf(snowflakeConfig); + } catch (Exception e) { + if (e.getCause() instanceof io.protostuff.ProtobufException) { + System.out.print(ACTION_REQUIRED_MESSAGE); + try { + namespaceService.deleteSource(snowflakeConfig.getKey(), snowflakeConfig.getTag()); + } catch (NamespaceException ex) { + throw new RuntimeException(ex); + } + } + } + }); + } +} diff --git a/dac/daemon/src/main/java/com/dremio/dac/cmd/upgrade/UpdateExternalReflectionHash.java b/dac/daemon/src/main/java/com/dremio/dac/cmd/upgrade/UpdateExternalReflectionHash.java index 11d1cfdff4..76bbd98ebd 100644 --- a/dac/daemon/src/main/java/com/dremio/dac/cmd/upgrade/UpdateExternalReflectionHash.java +++ b/dac/daemon/src/main/java/com/dremio/dac/cmd/upgrade/UpdateExternalReflectionHash.java @@ -17,14 +17,17 @@ import java.util.stream.StreamSupport; +import javax.inject.Provider; + import com.dremio.common.Version; import com.dremio.dac.cmd.AdminLogger; +import com.dremio.exec.store.CatalogService; import com.dremio.service.DirectProvider; import com.dremio.service.namespace.NamespaceException; import com.dremio.service.namespace.NamespaceService; import com.dremio.service.namespace.NamespaceServiceImpl; import com.dremio.service.namespace.dataset.proto.DatasetConfig; -import com.dremio.service.reflection.ReflectionUtils; +import com.dremio.service.reflection.DatasetHashUtils; import com.dremio.service.reflection.proto.ExternalReflection; import com.dremio.service.reflection.store.ExternalReflectionStore; import com.google.common.collect.ImmutableList; @@ -39,6 +42,7 @@ public class UpdateExternalReflectionHash extends UpgradeTask implements LegacyU static final String taskUUID = "79312f25-49d6-40e7-8096-7e132e1b64c4"; private NamespaceService namespace; + private Provider catalogServiceProvider; private ExternalReflectionStore store; public UpdateExternalReflectionHash() { @@ -90,7 +94,7 @@ private Integer computeDatasetHash(String datasetId) { } try { - return ReflectionUtils.computeDatasetHash(dataset, namespace, false); + return DatasetHashUtils.computeDatasetHash(dataset, catalogServiceProvider.get(), false); } catch (NamespaceException e) { return null; } diff --git a/dac/daemon/src/main/java/com/dremio/dac/daemon/DremioDaemon.java b/dac/daemon/src/main/java/com/dremio/dac/daemon/DremioDaemon.java index cc82b899ea..79a4c3d0c4 100644 --- a/dac/daemon/src/main/java/com/dremio/dac/daemon/DremioDaemon.java +++ b/dac/daemon/src/main/java/com/dremio/dac/daemon/DremioDaemon.java @@ -89,6 +89,7 @@ protected void ensureUpgradeSupported(Version storeVersion) { public static void main(String[] args) throws Exception { try (TimedBlock b = Timer.time("main")) { + logger.info("Starting dremio daemon"); final DACConfig config = DACConfig.newConfig(); final SabotConfig sabotConfig = config.getConfig().getSabotConfig(); final ScanResult classPathScan = ClassPathScanner.fromPrescan(sabotConfig); diff --git a/dac/daemon/src/test/java/com/dremio/dac/cmd/ITBackupManager.java b/dac/daemon/src/test/java/com/dremio/dac/cmd/ITBackupManager.java index 34a614ed7a..8119655ccd 100644 --- a/dac/daemon/src/test/java/com/dremio/dac/cmd/ITBackupManager.java +++ b/dac/daemon/src/test/java/com/dremio/dac/cmd/ITBackupManager.java @@ -16,10 +16,10 @@ package com.dremio.dac.cmd; import static java.nio.charset.StandardCharsets.UTF_8; +import static org.assertj.core.api.Assertions.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.File; @@ -32,11 +32,11 @@ import java.util.Optional; import java.util.stream.Stream; +import javax.annotation.Nonnull; + import org.apache.arrow.memory.BufferAllocator; -import org.apache.commons.collections.CollectionUtils; import org.apache.commons.io.FilenameUtils; import org.apache.hadoop.conf.Configuration; -import org.jetbrains.annotations.NotNull; import org.junit.Assume; import org.junit.BeforeClass; import org.junit.ClassRule; @@ -157,8 +157,7 @@ public static Iterable data() { return Arrays.asList("json", "binary"); } - @Test - public void testBackup() throws Exception { + private void testBackup(String compression) throws Exception { boolean binary = "binary".equals(mode); int httpPort = getCurrentDremioDaemon().getWebServer().getPort(); DACConfig dacConfig = ITBackupManager.dacConfig.httpPort(httpPort); @@ -170,7 +169,7 @@ public void testBackup() throws Exception { // take backup 1 CheckPoint cp1 = checkPoint(); Path backupDir1 = Path.of(BackupRestoreUtil.createBackup( - fs, new BackupOptions(BaseTestServer.folder1.newFolder().getAbsolutePath(), binary, false), + fs, new BackupOptions(BaseTestServer.folder1.newFolder().getAbsolutePath(), binary, false, compression), localKVStoreProvider, homeFileStore, null).getBackupPath()); // add dataset, delete dataset, upload file @@ -191,7 +190,7 @@ fs, new BackupOptions(BaseTestServer.folder1.newFolder().getAbsolutePath(), bina // take backup 2 using rest api final URI backupPath = BaseTestServer.folder1.newFolder().getAbsoluteFile().toURI(); Path backupDir2 = Path.of( - Backup.createBackup(dacConfig, () -> null, DEFAULT_USERNAME, DEFAULT_PASSWORD, false, backupPath, binary, false) + Backup.createBackup(dacConfig, () -> null, DEFAULT_USERNAME, DEFAULT_PASSWORD, false, backupPath, binary, false, compression) .getBackupPath()); // destroy everything @@ -257,6 +256,31 @@ fs, new BackupOptions(BaseTestServer.folder1.newFolder().getAbsolutePath(), bina } } + /** + * Test backup and restore for all the compression methods available. + * + * @throws Exception + */ + @Test + public void testBackup() throws Exception { + testBackup(""); + } + + @Test + public void testSnappyCompressionBackup() throws Exception { + testBackup("snappy"); + } + + @Test + public void testLZ4CompressionBackup() throws Exception { + testBackup("lz4"); + } + + @Test + public void testNullCompressionBackup() throws Exception { + testBackup(null); + } + /** * Test backup and restore with large sql (exceeding SimpleDocumentWriter.MAX_STRING_LENGTH) * containing only ascii characters. @@ -341,7 +365,7 @@ private void backupRestoreTestHelper(String dsName1, String dsName2, String sql) final String tempPath = TEMP_FOLDER.getRoot().getAbsolutePath(); Path backupDir1 = Path.of(BackupRestoreUtil.createBackup( - fs, new BackupOptions(BaseTestServer.folder1.newFolder().getAbsolutePath(), binary, false), + fs, new BackupOptions(BaseTestServer.folder1.newFolder().getAbsolutePath(), binary, false, ""), localKVStoreProvider, homeFileStore, null).getBackupPath()); // Do some things @@ -417,7 +441,7 @@ fs, new BackupOptions(BaseTestServer.folder1.newFolder().getAbsolutePath(), bina startDaemon(dacConfig); } - @NotNull + @Nonnull private static Optional findLastModifiedBackup(String workingDir) throws IOException { try (Stream stream = java.nio.file.Files.list(Paths.get(workingDir))) { return stream @@ -465,19 +489,18 @@ private static final class CheckPoint { private List spaces; private List homes; private List datasets; - private List users; + private List users; private List virtualDatasetVersions; private List jobs; private void checkEquals(CheckPoint o) { - assertTrue(CollectionUtils.isEqualCollection(sources, o.sources)); - assertTrue(CollectionUtils.isEqualCollection(spaces, o.spaces)); - assertTrue(CollectionUtils.isEqualCollection(homes, o.homes)); - assertTrue(CollectionUtils.isEqualCollection(datasets, o.datasets)); - assertTrue(CollectionUtils.isEqualCollection(users, o.users)); - assertTrue(CollectionUtils.isEqualCollection(virtualDatasetVersions, o.virtualDatasetVersions)); - assertTrue(CollectionUtils.isEqualCollection(jobs, o.jobs)); + assertThat(sources).containsExactlyInAnyOrderElementsOf(o.sources); + assertThat(spaces).containsExactlyInAnyOrderElementsOf(o.spaces); + assertThat(homes).containsExactlyInAnyOrderElementsOf(o.homes); + assertThat(datasets).containsExactlyInAnyOrderElementsOf(o.datasets); + assertThat(users).containsExactlyInAnyOrderElementsOf(o.users); + assertThat(virtualDatasetVersions).containsExactlyInAnyOrderElementsOf(o.virtualDatasetVersions); + assertThat(jobs).containsExactlyInAnyOrderElementsOf(o.jobs); } } - } diff --git a/dac/daemon/src/test/java/com/dremio/dac/cmd/upgrade/TestUpgrade.java b/dac/daemon/src/test/java/com/dremio/dac/cmd/upgrade/TestUpgrade.java index 3ab09b571f..c40481c32f 100644 --- a/dac/daemon/src/test/java/com/dremio/dac/cmd/upgrade/TestUpgrade.java +++ b/dac/daemon/src/test/java/com/dremio/dac/cmd/upgrade/TestUpgrade.java @@ -410,7 +410,7 @@ public void testTasksOrder() { // testNoDuplicateUUID() test - it will generate one // tasks will not include TestUpgradeFailORSuccessTask and TestUpgradeTask // because they don't have default ctor - assertThat(tasks).hasSize(11); + assertThat(tasks).hasSize(12); assertThat(tasks.get(0)).isInstanceOf(ReIndexAllStores.class); assertThat(tasks.get(1)).isInstanceOf(UpdateDatasetSplitIdTask.class); assertThat(tasks.get(2)).isInstanceOf(DeleteHistoryOfRenamedDatasets.class); @@ -422,6 +422,7 @@ public void testTasksOrder() { assertThat(tasks.get(8)).isInstanceOf(SetExportType.class); assertThat(tasks.get(9)).isInstanceOf(TopPriorityTask.class); assertThat(tasks.get(10)).isInstanceOf(LowPriorityTask.class); + assertThat(tasks.get(11)).isInstanceOf(DeleteSnowflakeCommunitySource.class); } @Test @@ -501,7 +502,7 @@ public void testDependenciesResolver() throws Exception { // testNoDuplicateUUID() test - it will generate one // tasks will not include TestUpgradeFailORSuccessTask and TestUpgradeTask // because they don't have default ctor - assertThat(resolvedTasks).hasSize(11); + assertThat(resolvedTasks).hasSize(12); assertThat(resolvedTasks.get(0)).isInstanceOf(ReIndexAllStores.class); assertThat(resolvedTasks.get(1)).isInstanceOf(UpdateDatasetSplitIdTask.class); assertThat(resolvedTasks.get(2)).isInstanceOf(DeleteHistoryOfRenamedDatasets.class); @@ -513,6 +514,7 @@ public void testDependenciesResolver() throws Exception { assertThat(resolvedTasks.get(8)).isInstanceOf(SetExportType.class); assertThat(resolvedTasks.get(9)).isInstanceOf(TopPriorityTask.class); assertThat(resolvedTasks.get(10)).isInstanceOf(LowPriorityTask.class); + assertThat(resolvedTasks.get(11)).isInstanceOf(DeleteSnowflakeCommunitySource.class); } /** diff --git a/dac/pom.xml b/dac/pom.xml index 5342e36304..6975b49d37 100644 --- a/dac/pom.xml +++ b/dac/pom.xml @@ -22,7 +22,7 @@ com.dremio dremio-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-dac-parent diff --git a/dac/ui-common/lang/common/en-US.yaml b/dac/ui-common/lang/common/en-US.yaml index 53793dfde3..99ce13c81f 100644 --- a/dac/ui-common/lang/common/en-US.yaml +++ b/dac/ui-common/lang/common/en-US.yaml @@ -19,6 +19,13 @@ Common.Actions.Accept: Accept Common.Actions.Cancel: Cancel Common.Actions.OK: OK +Common.Actions.Yes: "Yes" + +# Route leave warning dialog messages +Common.RouteLeaveDialog.Title: Unsaved Changes +Common.RouteLeaveDialog.Message: Are you sure you want to leave without saving changes? +Common.RouteLeaveDialog.Actions.Leave: Leave +Common.RouteLeaveDialog.Actions.Stay: Stay Common.Columns.Count: "{numColumns, plural, =0 {No columns} =1 {1 column} other {# columns}}" @@ -26,7 +33,18 @@ Common.Columns.Count: "{numColumns, plural, =0 {No columns} =1 {1 column} other Common.Errors.Persists: If the problem persists, please contact Dremio support. Common.Errors.Unexpected: An unexpected error occurred. Common.Errors.404: The page you are looking for doesn’t exist. +Common.Errors.UnexpectedError.Root: An unexpected error occurred when we tried to load the page. Common.Settings: Settings Common.LoadingMore: Loading more… + +NetworkConnectivity.Offline: Your internet connection appears to be offline. +NetworkConnectivity.Unreachable: There was a problem communicating with Dremio. Please verify that you’re on a network that can reach {server_origin}. +NetworkConnectivity.WebsocketFailure: There was a problem establishing a WebSocket connection with Dremio. + +# Privileges table messages +Admin.Privileges.OwnerChangeTitle: Make this user/role the owner? +Admin.Privileges.OwnerChangeContent: The new owner can perform all actions on this object and objects within the object. Actions include modifying object settings, granting/revoking user and role access, and deleting the object. You might also lose the ability to change privileges settings. +Admin.Privileges.RemoveGranteeTitle: Remove user/role? +Admin.Privileges.RemoveGranteeContent: “{name}” will not be able to perform any actions on this object, are you sure? diff --git a/dac/ui-common/lang/sonar/en-US.yaml b/dac/ui-common/lang/sonar/en-US.yaml index dd208b3847..78bcab342c 100644 --- a/dac/ui-common/lang/sonar/en-US.yaml +++ b/dac/ui-common/lang/sonar/en-US.yaml @@ -95,3 +95,4 @@ Sonar.Reflection.Column.AcceleratedCount.Label: "Accelerated Count" Sonar.Reflection.Column.AcceleratedCount.Hint: "Number of user jobs that were accelerated by the reflection." Sonar.Reflection.Column.RefreshHistory.Label: "Refresh Job History" Sonar.Reflection.Column.RefreshHistory.Link: "History" +Sonar.Reflection.TableError: Something went wrong when we tried to display the list of Reflections. diff --git a/dac/ui-common/mocks/reflections/getReflectionSummary.ts b/dac/ui-common/mocks/reflections/getReflectionSummary.ts new file mode 100644 index 0000000000..e87cc34f90 --- /dev/null +++ b/dac/ui-common/mocks/reflections/getReflectionSummary.ts @@ -0,0 +1,43 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { rest } from "msw"; +import { + ReflectionSummaries, + ReflectionSummary, +} from "../../src/sonar/reflections/ReflectionSummary.type"; +import { sampleReflections } from "./sampleReflections"; + +const originalMockData = sampleReflections; +let mockData = originalMockData; + +export const setMockData = (newMockData: ReflectionSummary[]) => { + mockData = newMockData; +}; + +export const restoreMockData = () => { + mockData = originalMockData; +}; + +export const getReflectionSummary = rest.get( + "/v0/projects/:projectId/reflection-summary", + (_req, res, ctx) => { + const response: ReflectionSummaries = { + canAlterReflections: true, + data: mockData, + }; + return res(ctx.delay(150), ctx.json(response)); + } +); diff --git a/dac/ui-common/mocks/reflections/index.ts b/dac/ui-common/mocks/reflections/index.ts new file mode 100644 index 0000000000..f44b1bc7ce --- /dev/null +++ b/dac/ui-common/mocks/reflections/index.ts @@ -0,0 +1,19 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { getReflectionSummary } from "./getReflectionSummary"; + +export { getReflectionSummary }; diff --git a/dac/ui-common/mocks/reflections/sampleReflections.ts b/dac/ui-common/mocks/reflections/sampleReflections.ts new file mode 100644 index 0000000000..ade046902d --- /dev/null +++ b/dac/ui-common/mocks/reflections/sampleReflections.ts @@ -0,0 +1,92 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { ReflectionSummary } from "../../src/sonar/reflections/ReflectionSummary.type"; + +export const scheduledReflection: ReflectionSummary = { + createdAt: new Date("2023-03-22T16:34:52.637Z"), + updatedAt: new Date("2023-03-22T16:34:52.637Z"), + id: "c182cd16-5e14-4d55-bad8-703f05bcef8a", + reflectionType: "AGGREGATION" as any, + name: "Aggregation Reflection", + currentSizeBytes: 0, + outputRecords: -1, + totalSizeBytes: 0, + datasetId: + '{"tableKey":["performance","star","time_dim_v"],"contentId":"0194e844-7c2c-457d-9ef0-6d4abde9399d","versionContext":{"type":"BRANCH","value":"main"}}', + datasetType: "VIRTUAL_DATASET" as any, + datasetPath: ["performance", "star", "time_dim_v"], + status: { + configStatus: "OK" as any, + refreshStatus: "SCHEDULED" as any, + availabilityStatus: "NONE" as any, + combinedStatus: "CANNOT_ACCELERATE_SCHEDULED" as any, + refreshMethod: "NONE" as any, + failureCount: 0, + lastDataFetchAt: null, + expiresAt: null, + lastRefreshDurationMillis: -1, + }, + consideredCount: 0, + matchedCount: 0, + chosenCount: 0, + chosenJobsLink: + "/jobs?filters=%7B%22chr%22%3A%5B%22a8fecb50f307-8dab-55d4-41e5-61dc281c%22%5D%2C%22qt%22%3A%5B%22UI%22%2C%22EXTERNAL%22%2C%22ACCELERATION%22%5D%7D", + isArrowCachingEnabled: false, + isCanView: true, + isCanAlter: true, + isEnabled: true, +}; + +export const runningReflection: ReflectionSummary = { + createdAt: new Date("2023-03-22T16:34:52.637Z"), + updatedAt: new Date("2023-03-22T16:34:52.637Z"), + id: "6137df2e-85ae-4b2e-8735-727f59ac96c6", + reflectionType: "AGGREGATION" as any, + name: "Aggregation Reflection", + currentSizeBytes: 0, + outputRecords: -1, + totalSizeBytes: 0, + datasetId: + '{"tableKey":["performance","star","time_dim_v"],"contentId":"0194e844-7c2c-457d-9ef0-6d4abde9399d","versionContext":{"type":"BRANCH","value":"main"}}', + datasetType: "VIRTUAL_DATASET" as any, + datasetPath: ["performance", "star", "time_dim_v"], + status: { + configStatus: "OK" as any, + refreshStatus: "RUNNING" as any, + availabilityStatus: "NONE" as any, + combinedStatus: "REFRESHING" as any, + refreshMethod: "NONE" as any, + failureCount: 0, + lastDataFetchAt: null, + expiresAt: null, + lastRefreshDurationMillis: -1, + }, + consideredCount: 0, + matchedCount: 0, + chosenCount: 0, + chosenJobsLink: + "/jobs?filters=%7B%22chr%22%3A%5B%22a8fecb50f307-8dab-55d4-41e5-61dc281c%22%5D%2C%22qt%22%3A%5B%22UI%22%2C%22EXTERNAL%22%2C%22ACCELERATION%22%5D%7D", + isCanView: true, + isCanAlter: true, + isArrowCachingEnabled: false, + isEnabled: true, +}; + +export const sampleReflections: ReflectionSummary[] = [ + scheduledReflection, + runningReflection, +]; diff --git a/dac/ui-common/package.json b/dac/ui-common/package.json index 4be642aadd..f7fe0ee2da 100644 --- a/dac/ui-common/package.json +++ b/dac/ui-common/package.json @@ -8,15 +8,15 @@ "dist-esm": "swc ./src --config-file config/.swcrc-esm --out-dir dist-esm && tsc --emitDeclarationOnly --declaration --declarationMap false --declarationDir dist-esm", "dist-lang": "ts-node ./scripts/build-lang.ts", "lint": "eslint src", - "postinstall": "stat ./dist-antlr >> /dev/null 2>&1 || pnpm run dist-antlr", - "prepack": "npm-run-all dist-cjs dist-esm dist-lang", + "prepack": "npm-run-all dist-antlr dist-cjs dist-esm dist-lang", "test": "jest" }, "files": [ "dist-antlr", "dist-cjs", "dist-esm", - "dist-lang" + "dist-lang", + "mocks" ], "exports": { "./components/*": { @@ -32,6 +32,7 @@ "require": "./dist-cjs/errors/*.js" }, "./lang/*": "./dist-lang/*", + "./mocks/*": "./mocks/*", "./paths/*": { "import": "./dist-esm/paths/*", "require": "./dist-cjs/paths/*" @@ -47,16 +48,24 @@ "./utilities/*": { "import": "./dist-esm/utilities/*", "require": "./dist-cjs/utilities/*" + }, + "./arctic/*": { + "import": "./dist-esm/arctic/*", + "require": "./dist-cjs/arctic/*" } }, "dependencies": { + "@types/lodash": "^4.14.191", "@types/react": "^18", + "antlr4-c3": "^2.2.3", "antlr4ts": "^0.5.0-alpha.4", "define-route": "^0.3.1", "dremio-ui-lib": "link:../ui-lib", "intl-messageformat": "^10.2.1", - "leantable": "^0.4.10", + "leantable": "^0.4.11", + "lodash": "^4.17.21", "moize": "^6.1.3", + "msw": "^1.2.0", "react-smart-promise": "^1.0.4" }, "devDependencies": { @@ -73,13 +82,25 @@ "@types/js-yaml": "^4.0.5", "@types/node": "^18.8.0", "@types/testing-library__jest-dom": "^5.14.5", + "@typescript-eslint/eslint-plugin": "^5", + "@typescript-eslint/parser": "^5", "antlr4ts-cli": "^0.5.0-alpha.4", "eslint": "^8.24.0", "eslint-config-dremio": "link:../ui-tools/eslint-config-dremio", + "eslint-plugin-jest": "^26", + "eslint-plugin-jest-dom": "^4", + "eslint-plugin-jsx-a11y": "^6", + "eslint-plugin-lit": "^1", + "eslint-plugin-mocha": "^10", + "eslint-plugin-promise": "^6", + "eslint-plugin-react": "^7", + "eslint-plugin-react-hooks": "^4", + "eslint-plugin-testing-library": "^5", "glob": "^8.0.3", "jest": "^29.1.2", "jest-environment-jsdom": "^29.1.2", "jest-junit": "^14.0.1", + "jest-silent-reporter": "^0.5.0", "js-yaml": "^4.1.0", "jsdom": "^20.0.0", "npm-run-all": "^4.1.5", @@ -103,5 +124,5 @@ "dremio/react-testing-library" ] }, - "packageManager": "pnpm@7.2.1" + "packageManager": "pnpm@8.1.0" } diff --git a/dac/ui-common/pnpm-lock.yaml b/dac/ui-common/pnpm-lock.yaml index be776233f0..dd6fd08516 100644 --- a/dac/ui-common/pnpm-lock.yaml +++ b/dac/ui-common/pnpm-lock.yaml @@ -1,90 +1,172 @@ -lockfileVersion: 5.4 - -specifiers: - '@formatjs/icu-messageformat-parser': ^2.1.10 - '@swc/cli': ^0.1.57 - '@swc/core': ^1.3.18 - '@swc/jest': ^0.2.23 - '@testing-library/dom': ^8.18.1 - '@testing-library/jest-dom': ^5.16.5 - '@testing-library/react': ^13.4.0 - '@testing-library/user-event': ^14.4.3 - '@types/glob': ^8.0.0 - '@types/jest': ^29.1.1 - '@types/js-yaml': ^4.0.5 - '@types/node': ^18.8.0 - '@types/react': ^18 - '@types/testing-library__jest-dom': ^5.14.5 - antlr4ts: ^0.5.0-alpha.4 - antlr4ts-cli: ^0.5.0-alpha.4 - define-route: ^0.3.1 - dremio-ui-lib: link:../ui-lib - eslint: ^8.24.0 - eslint-config-dremio: link:../ui-tools/eslint-config-dremio - glob: ^8.0.3 - intl-messageformat: ^10.2.1 - jest: ^29.1.2 - jest-environment-jsdom: ^29.1.2 - jest-junit: ^14.0.1 - js-yaml: ^4.1.0 - jsdom: ^20.0.0 - leantable: ^0.4.10 - moize: ^6.1.3 - npm-run-all: ^4.1.5 - prettier: ^2.7.1 - react: ^18.2.0 - react-dom: ^18.2.0 - react-smart-promise: ^1.0.4 - ts-node: ^10.9.1 - typescript: ^4.8.4 +lockfileVersion: '6.0' dependencies: - '@types/react': 18.0.24 - antlr4ts: 0.5.0-alpha.4 - define-route: 0.3.1 - dremio-ui-lib: link:../ui-lib - intl-messageformat: 10.2.1 - leantable: 0.4.10_react@18.2.0 - moize: 6.1.3 - react-smart-promise: 1.0.4_react@18.2.0 + '@types/lodash': + specifier: ^4.14.191 + version: 4.14.191 + '@types/react': + specifier: ^18 + version: 18.0.24 + antlr4-c3: + specifier: ^2.2.3 + version: 2.2.3 + antlr4ts: + specifier: ^0.5.0-alpha.4 + version: 0.5.0-alpha.4 + define-route: + specifier: ^0.3.1 + version: 0.3.1 + dremio-ui-lib: + specifier: link:../ui-lib + version: link:../ui-lib + intl-messageformat: + specifier: ^10.2.1 + version: 10.2.1 + leantable: + specifier: ^0.4.11 + version: 0.4.11(react@18.2.0) + lodash: + specifier: ^4.17.21 + version: 4.17.21 + moize: + specifier: ^6.1.3 + version: 6.1.3 + msw: + specifier: ^1.2.0 + version: 1.2.0(typescript@4.8.4) + react-smart-promise: + specifier: ^1.0.4 + version: 1.0.4(react@18.2.0) devDependencies: - '@formatjs/icu-messageformat-parser': 2.1.10 - '@swc/cli': 0.1.57_@swc+core@1.3.18 - '@swc/core': 1.3.18 - '@swc/jest': 0.2.23_@swc+core@1.3.18 - '@testing-library/dom': 8.18.1 - '@testing-library/jest-dom': 5.16.5 - '@testing-library/react': 13.4.0_biqbaboplfbrettd7655fr4n2y - '@testing-library/user-event': 14.4.3_znccgeejomvff3jrsk3ljovfpu - '@types/glob': 8.0.0 - '@types/jest': 29.1.1 - '@types/js-yaml': 4.0.5 - '@types/node': 18.8.0 - '@types/testing-library__jest-dom': 5.14.5 - antlr4ts-cli: 0.5.0-alpha.4 - eslint: 8.24.0 - eslint-config-dremio: link:../ui-tools/eslint-config-dremio - glob: 8.0.3 - jest: 29.1.2_wnseany3vswo6p7nhyzogpjzqe - jest-environment-jsdom: 29.1.2 - jest-junit: 14.0.1 - js-yaml: 4.1.0 - jsdom: 20.0.1 - npm-run-all: 4.1.5 - prettier: 2.7.1 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - ts-node: 10.9.1_6ma5wccgcz7ykn2lbqtwbuz2ty - typescript: 4.8.4 + '@formatjs/icu-messageformat-parser': + specifier: ^2.1.10 + version: 2.1.10 + '@swc/cli': + specifier: ^0.1.57 + version: 0.1.57(@swc/core@1.3.18) + '@swc/core': + specifier: ^1.3.18 + version: 1.3.18 + '@swc/jest': + specifier: ^0.2.23 + version: 0.2.23(@swc/core@1.3.18) + '@testing-library/dom': + specifier: ^8.18.1 + version: 8.18.1 + '@testing-library/jest-dom': + specifier: ^5.16.5 + version: 5.16.5 + '@testing-library/react': + specifier: ^13.4.0 + version: 13.4.0(react-dom@18.2.0)(react@18.2.0) + '@testing-library/user-event': + specifier: ^14.4.3 + version: 14.4.3(@testing-library/dom@8.18.1) + '@types/glob': + specifier: ^8.0.0 + version: 8.0.0 + '@types/jest': + specifier: ^29.1.1 + version: 29.1.1 + '@types/js-yaml': + specifier: ^4.0.5 + version: 4.0.5 + '@types/node': + specifier: ^18.8.0 + version: 18.8.0 + '@types/testing-library__jest-dom': + specifier: ^5.14.5 + version: 5.14.5 + '@typescript-eslint/eslint-plugin': + specifier: ^5 + version: 5.49.0(@typescript-eslint/parser@5.49.0)(eslint@8.24.0)(typescript@4.8.4) + '@typescript-eslint/parser': + specifier: ^5 + version: 5.49.0(eslint@8.24.0)(typescript@4.8.4) + antlr4ts-cli: + specifier: ^0.5.0-alpha.4 + version: 0.5.0-alpha.4 + eslint: + specifier: ^8.24.0 + version: 8.24.0 + eslint-config-dremio: + specifier: link:../ui-tools/eslint-config-dremio + version: link:../ui-tools/eslint-config-dremio + eslint-plugin-jest: + specifier: ^26 + version: 26.9.0(@typescript-eslint/eslint-plugin@5.49.0)(eslint@8.24.0)(jest@29.1.2)(typescript@4.8.4) + eslint-plugin-jest-dom: + specifier: ^4 + version: 4.0.3(eslint@8.24.0) + eslint-plugin-jsx-a11y: + specifier: ^6 + version: 6.7.1(eslint@8.24.0) + eslint-plugin-lit: + specifier: ^1 + version: 1.8.2(eslint@8.24.0) + eslint-plugin-mocha: + specifier: ^10 + version: 10.1.0(eslint@8.24.0) + eslint-plugin-promise: + specifier: ^6 + version: 6.1.1(eslint@8.24.0) + eslint-plugin-react: + specifier: ^7 + version: 7.32.1(eslint@8.24.0) + eslint-plugin-react-hooks: + specifier: ^4 + version: 4.6.0(eslint@8.24.0) + eslint-plugin-testing-library: + specifier: ^5 + version: 5.10.0(eslint@8.24.0)(typescript@4.8.4) + glob: + specifier: ^8.0.3 + version: 8.0.3 + jest: + specifier: ^29.1.2 + version: 29.1.2(@types/node@18.8.0)(ts-node@10.9.1) + jest-environment-jsdom: + specifier: ^29.1.2 + version: 29.1.2 + jest-junit: + specifier: ^14.0.1 + version: 14.0.1 + jest-silent-reporter: + specifier: ^0.5.0 + version: 0.5.0 + js-yaml: + specifier: ^4.1.0 + version: 4.1.0 + jsdom: + specifier: ^20.0.0 + version: 20.0.1 + npm-run-all: + specifier: ^4.1.5 + version: 4.1.5 + prettier: + specifier: ^2.7.1 + version: 2.7.1 + react: + specifier: ^18.2.0 + version: 18.2.0 + react-dom: + specifier: ^18.2.0 + version: 18.2.0(react@18.2.0) + ts-node: + specifier: ^10.9.1 + version: 10.9.1(@swc/core@1.3.18)(@types/node@18.8.0)(typescript@4.8.4) + typescript: + specifier: ^4.8.4 + version: 4.8.4 packages: - /@adobe/css-tools/4.0.1: + /@adobe/css-tools@4.0.1: resolution: {integrity: sha512-+u76oB43nOHrF4DDWRLWDCtci7f3QJoEBigemIdIeTi1ODqjx6Tad9NCVnPRwewWlKkVab5PlK8DCtPTyX7S8g==} dev: true - /@ampproject/remapping/2.2.0: + /@ampproject/remapping@2.2.0: resolution: {integrity: sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w==} engines: {node: '>=6.0.0'} dependencies: @@ -92,26 +174,26 @@ packages: '@jridgewell/trace-mapping': 0.3.15 dev: true - /@babel/code-frame/7.18.6: + /@babel/code-frame@7.18.6: resolution: {integrity: sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==} engines: {node: '>=6.9.0'} dependencies: '@babel/highlight': 7.18.6 dev: true - /@babel/compat-data/7.19.3: + /@babel/compat-data@7.19.3: resolution: {integrity: sha512-prBHMK4JYYK+wDjJF1q99KK4JLL+egWS4nmNqdlMUgCExMZ+iZW0hGhyC3VEbsPjvaN0TBhW//VIFwBrk8sEiw==} engines: {node: '>=6.9.0'} dev: true - /@babel/core/7.19.3: + /@babel/core@7.19.3: resolution: {integrity: sha512-WneDJxdsjEvyKtXKsaBGbDeiyOjR5vYq4HcShxnIbG0qixpoHjI3MqeZM9NDvsojNCEBItQE4juOo/bU6e72gQ==} engines: {node: '>=6.9.0'} dependencies: '@ampproject/remapping': 2.2.0 '@babel/code-frame': 7.18.6 '@babel/generator': 7.19.3 - '@babel/helper-compilation-targets': 7.19.3_@babel+core@7.19.3 + '@babel/helper-compilation-targets': 7.19.3(@babel/core@7.19.3) '@babel/helper-module-transforms': 7.19.0 '@babel/helpers': 7.19.0 '@babel/parser': 7.19.3 @@ -127,7 +209,7 @@ packages: - supports-color dev: true - /@babel/generator/7.19.3: + /@babel/generator@7.19.3: resolution: {integrity: sha512-fqVZnmp1ncvZU757UzDheKZpfPgatqY59XtW2/j/18H7u76akb8xqvjw82f+i2UKd/ksYsSick/BCLQUUtJ/qQ==} engines: {node: '>=6.9.0'} dependencies: @@ -136,7 +218,7 @@ packages: jsesc: 2.5.2 dev: true - /@babel/helper-compilation-targets/7.19.3_@babel+core@7.19.3: + /@babel/helper-compilation-targets@7.19.3(@babel/core@7.19.3): resolution: {integrity: sha512-65ESqLGyGmLvgR0mst5AdW1FkNlj9rQsCKduzEoEPhBCDFGXvz2jW6bXFG6i0/MrV2s7hhXjjb2yAzcPuQlLwg==} engines: {node: '>=6.9.0'} peerDependencies: @@ -149,12 +231,12 @@ packages: semver: 6.3.0 dev: true - /@babel/helper-environment-visitor/7.18.9: + /@babel/helper-environment-visitor@7.18.9: resolution: {integrity: sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==} engines: {node: '>=6.9.0'} dev: true - /@babel/helper-function-name/7.19.0: + /@babel/helper-function-name@7.19.0: resolution: {integrity: sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w==} engines: {node: '>=6.9.0'} dependencies: @@ -162,21 +244,21 @@ packages: '@babel/types': 7.19.3 dev: true - /@babel/helper-hoist-variables/7.18.6: + /@babel/helper-hoist-variables@7.18.6: resolution: {integrity: sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==} engines: {node: '>=6.9.0'} dependencies: '@babel/types': 7.19.3 dev: true - /@babel/helper-module-imports/7.18.6: + /@babel/helper-module-imports@7.18.6: resolution: {integrity: sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA==} engines: {node: '>=6.9.0'} dependencies: '@babel/types': 7.19.3 dev: true - /@babel/helper-module-transforms/7.19.0: + /@babel/helper-module-transforms@7.19.0: resolution: {integrity: sha512-3HBZ377Fe14RbLIA+ac3sY4PTgpxHVkFrESaWhoI5PuyXPBBX8+C34qblV9G89ZtycGJCmCI/Ut+VUDK4bltNQ==} engines: {node: '>=6.9.0'} dependencies: @@ -192,41 +274,41 @@ packages: - supports-color dev: true - /@babel/helper-plugin-utils/7.19.0: + /@babel/helper-plugin-utils@7.19.0: resolution: {integrity: sha512-40Ryx7I8mT+0gaNxm8JGTZFUITNqdLAgdg0hXzeVZxVD6nFsdhQvip6v8dqkRHzsz1VFpFAaOCHNn0vKBL7Czw==} engines: {node: '>=6.9.0'} dev: true - /@babel/helper-simple-access/7.18.6: + /@babel/helper-simple-access@7.18.6: resolution: {integrity: sha512-iNpIgTgyAvDQpDj76POqg+YEt8fPxx3yaNBg3S30dxNKm2SWfYhD0TGrK/Eu9wHpUW63VQU894TsTg+GLbUa1g==} engines: {node: '>=6.9.0'} dependencies: '@babel/types': 7.19.3 dev: true - /@babel/helper-split-export-declaration/7.18.6: + /@babel/helper-split-export-declaration@7.18.6: resolution: {integrity: sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==} engines: {node: '>=6.9.0'} dependencies: '@babel/types': 7.19.3 dev: true - /@babel/helper-string-parser/7.18.10: + /@babel/helper-string-parser@7.18.10: resolution: {integrity: sha512-XtIfWmeNY3i4t7t4D2t02q50HvqHybPqW2ki1kosnvWCwuCMeo81Jf0gwr85jy/neUdg5XDdeFE/80DXiO+njw==} engines: {node: '>=6.9.0'} dev: true - /@babel/helper-validator-identifier/7.19.1: + /@babel/helper-validator-identifier@7.19.1: resolution: {integrity: sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==} engines: {node: '>=6.9.0'} dev: true - /@babel/helper-validator-option/7.18.6: + /@babel/helper-validator-option@7.18.6: resolution: {integrity: sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw==} engines: {node: '>=6.9.0'} dev: true - /@babel/helpers/7.19.0: + /@babel/helpers@7.19.0: resolution: {integrity: sha512-DRBCKGwIEdqY3+rPJgG/dKfQy9+08rHIAJx8q2p+HSWP87s2HCrQmaAMMyMll2kIXKCW0cO1RdQskx15Xakftg==} engines: {node: '>=6.9.0'} dependencies: @@ -237,7 +319,7 @@ packages: - supports-color dev: true - /@babel/highlight/7.18.6: + /@babel/highlight@7.18.6: resolution: {integrity: sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==} engines: {node: '>=6.9.0'} dependencies: @@ -246,7 +328,7 @@ packages: js-tokens: 4.0.0 dev: true - /@babel/parser/7.19.3: + /@babel/parser@7.19.3: resolution: {integrity: sha512-pJ9xOlNWHiy9+FuFP09DEAFbAn4JskgRsVcc169w2xRBC3FRGuQEwjeIMMND9L2zc0iEhO/tGv4Zq+km+hxNpQ==} engines: {node: '>=6.0.0'} hasBin: true @@ -254,7 +336,7 @@ packages: '@babel/types': 7.19.3 dev: true - /@babel/plugin-syntax-async-generators/7.8.4_@babel+core@7.19.3: + /@babel/plugin-syntax-async-generators@7.8.4(@babel/core@7.19.3): resolution: {integrity: sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==} peerDependencies: '@babel/core': ^7.0.0-0 @@ -263,7 +345,7 @@ packages: '@babel/helper-plugin-utils': 7.19.0 dev: true - /@babel/plugin-syntax-bigint/7.8.3_@babel+core@7.19.3: + /@babel/plugin-syntax-bigint@7.8.3(@babel/core@7.19.3): resolution: {integrity: sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==} peerDependencies: '@babel/core': ^7.0.0-0 @@ -272,7 +354,7 @@ packages: '@babel/helper-plugin-utils': 7.19.0 dev: true - /@babel/plugin-syntax-class-properties/7.12.13_@babel+core@7.19.3: + /@babel/plugin-syntax-class-properties@7.12.13(@babel/core@7.19.3): resolution: {integrity: sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==} peerDependencies: '@babel/core': ^7.0.0-0 @@ -281,7 +363,7 @@ packages: '@babel/helper-plugin-utils': 7.19.0 dev: true - /@babel/plugin-syntax-import-meta/7.10.4_@babel+core@7.19.3: + /@babel/plugin-syntax-import-meta@7.10.4(@babel/core@7.19.3): resolution: {integrity: sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==} peerDependencies: '@babel/core': ^7.0.0-0 @@ -290,7 +372,7 @@ packages: '@babel/helper-plugin-utils': 7.19.0 dev: true - /@babel/plugin-syntax-json-strings/7.8.3_@babel+core@7.19.3: + /@babel/plugin-syntax-json-strings@7.8.3(@babel/core@7.19.3): resolution: {integrity: sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==} peerDependencies: '@babel/core': ^7.0.0-0 @@ -299,7 +381,7 @@ packages: '@babel/helper-plugin-utils': 7.19.0 dev: true - /@babel/plugin-syntax-jsx/7.18.6_@babel+core@7.19.3: + /@babel/plugin-syntax-jsx@7.18.6(@babel/core@7.19.3): resolution: {integrity: sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q==} engines: {node: '>=6.9.0'} peerDependencies: @@ -309,7 +391,7 @@ packages: '@babel/helper-plugin-utils': 7.19.0 dev: true - /@babel/plugin-syntax-logical-assignment-operators/7.10.4_@babel+core@7.19.3: + /@babel/plugin-syntax-logical-assignment-operators@7.10.4(@babel/core@7.19.3): resolution: {integrity: sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==} peerDependencies: '@babel/core': ^7.0.0-0 @@ -318,7 +400,7 @@ packages: '@babel/helper-plugin-utils': 7.19.0 dev: true - /@babel/plugin-syntax-nullish-coalescing-operator/7.8.3_@babel+core@7.19.3: + /@babel/plugin-syntax-nullish-coalescing-operator@7.8.3(@babel/core@7.19.3): resolution: {integrity: sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==} peerDependencies: '@babel/core': ^7.0.0-0 @@ -327,7 +409,7 @@ packages: '@babel/helper-plugin-utils': 7.19.0 dev: true - /@babel/plugin-syntax-numeric-separator/7.10.4_@babel+core@7.19.3: + /@babel/plugin-syntax-numeric-separator@7.10.4(@babel/core@7.19.3): resolution: {integrity: sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==} peerDependencies: '@babel/core': ^7.0.0-0 @@ -336,7 +418,7 @@ packages: '@babel/helper-plugin-utils': 7.19.0 dev: true - /@babel/plugin-syntax-object-rest-spread/7.8.3_@babel+core@7.19.3: + /@babel/plugin-syntax-object-rest-spread@7.8.3(@babel/core@7.19.3): resolution: {integrity: sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==} peerDependencies: '@babel/core': ^7.0.0-0 @@ -345,7 +427,7 @@ packages: '@babel/helper-plugin-utils': 7.19.0 dev: true - /@babel/plugin-syntax-optional-catch-binding/7.8.3_@babel+core@7.19.3: + /@babel/plugin-syntax-optional-catch-binding@7.8.3(@babel/core@7.19.3): resolution: {integrity: sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==} peerDependencies: '@babel/core': ^7.0.0-0 @@ -354,7 +436,7 @@ packages: '@babel/helper-plugin-utils': 7.19.0 dev: true - /@babel/plugin-syntax-optional-chaining/7.8.3_@babel+core@7.19.3: + /@babel/plugin-syntax-optional-chaining@7.8.3(@babel/core@7.19.3): resolution: {integrity: sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==} peerDependencies: '@babel/core': ^7.0.0-0 @@ -363,7 +445,7 @@ packages: '@babel/helper-plugin-utils': 7.19.0 dev: true - /@babel/plugin-syntax-top-level-await/7.14.5_@babel+core@7.19.3: + /@babel/plugin-syntax-top-level-await@7.14.5(@babel/core@7.19.3): resolution: {integrity: sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==} engines: {node: '>=6.9.0'} peerDependencies: @@ -373,7 +455,7 @@ packages: '@babel/helper-plugin-utils': 7.19.0 dev: true - /@babel/plugin-syntax-typescript/7.18.6_@babel+core@7.19.3: + /@babel/plugin-syntax-typescript@7.18.6(@babel/core@7.19.3): resolution: {integrity: sha512-mAWAuq4rvOepWCBid55JuRNvpTNf2UGVgoz4JV0fXEKolsVZDzsa4NqCef758WZJj/GDu0gVGItjKFiClTAmZA==} engines: {node: '>=6.9.0'} peerDependencies: @@ -383,14 +465,21 @@ packages: '@babel/helper-plugin-utils': 7.19.0 dev: true - /@babel/runtime/7.19.0: + /@babel/runtime@7.19.0: resolution: {integrity: sha512-eR8Lo9hnDS7tqkO7NsV+mKvCmv5boaXFSZ70DnfhcgiEne8hv9oCEd36Klw74EtizEqLsy4YnW8UWwpBVolHZA==} engines: {node: '>=6.9.0'} dependencies: regenerator-runtime: 0.13.9 dev: true - /@babel/template/7.18.10: + /@babel/runtime@7.20.13: + resolution: {integrity: sha512-gt3PKXs0DBoL9xCvOIIZ2NEqAGZqHjAnmVbfQtB620V0uReIQutpel14KcneZuer7UioY8ALKZ7iocavvzTNFA==} + engines: {node: '>=6.9.0'} + dependencies: + regenerator-runtime: 0.13.11 + dev: true + + /@babel/template@7.18.10: resolution: {integrity: sha512-TI+rCtooWHr3QJ27kJxfjutghu44DLnasDMwpDqCXVTal9RLp3RSYNh4NdBrRP2cQAoG9A8juOQl6P6oZG4JxA==} engines: {node: '>=6.9.0'} dependencies: @@ -399,7 +488,7 @@ packages: '@babel/types': 7.19.3 dev: true - /@babel/traverse/7.19.3: + /@babel/traverse@7.19.3: resolution: {integrity: sha512-qh5yf6149zhq2sgIXmwjnsvmnNQC2iw70UFjp4olxucKrWd/dvlUsBI88VSLUsnMNF7/vnOiA+nk1+yLoCqROQ==} engines: {node: '>=6.9.0'} dependencies: @@ -417,7 +506,7 @@ packages: - supports-color dev: true - /@babel/types/7.19.3: + /@babel/types@7.19.3: resolution: {integrity: sha512-hGCaQzIY22DJlDh9CH7NOxgKkFjBk0Cw9xDO1Xmh2151ti7wiGfQ3LauXzL4HP1fmFlTX6XjpRETTpUcv7wQLw==} engines: {node: '>=6.9.0'} dependencies: @@ -426,18 +515,18 @@ packages: to-fast-properties: 2.0.0 dev: true - /@bcoe/v8-coverage/0.2.3: + /@bcoe/v8-coverage@0.2.3: resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==} dev: true - /@cspotcode/source-map-support/0.8.1: + /@cspotcode/source-map-support@0.8.1: resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==} engines: {node: '>=12'} dependencies: '@jridgewell/trace-mapping': 0.3.9 dev: true - /@eslint/eslintrc/1.3.2: + /@eslint/eslintrc@1.3.2: resolution: {integrity: sha512-AXYd23w1S/bv3fTs3Lz0vjiYemS08jWkI3hYyS9I1ry+0f+Yjs1wm+sU0BS8qDOPrBIkp4qHYC16I8uVtpLajQ==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} dependencies: @@ -454,37 +543,37 @@ packages: - supports-color dev: true - /@formatjs/ecma402-abstract/1.13.0: + /@formatjs/ecma402-abstract@1.13.0: resolution: {integrity: sha512-CQ8Ykd51jYD1n05dtoX6ns6B9n/+6ZAxnWUAonvHC4kkuAemROYBhHkEB4tm1uVrRlE7gLDqXkAnY51Y0pRCWQ==} dependencies: '@formatjs/intl-localematcher': 0.2.31 tslib: 2.4.0 - /@formatjs/fast-memoize/1.2.6: + /@formatjs/fast-memoize@1.2.6: resolution: {integrity: sha512-9CWZ3+wCkClKHX+i5j+NyoBVqGf0pIskTo6Xl6ihGokYM2yqSSS68JIgeo+99UIHc+7vi9L3/SDSz/dWI9SNlA==} dependencies: tslib: 2.4.0 dev: false - /@formatjs/icu-messageformat-parser/2.1.10: + /@formatjs/icu-messageformat-parser@2.1.10: resolution: {integrity: sha512-KkRMxhifWkRC45dhM9tqm0GXbb6NPYTGVYY3xx891IKc6p++DQrZTnmkVSNNO47OEERLfuP2KkPFPJBuu8z/wg==} dependencies: '@formatjs/ecma402-abstract': 1.13.0 '@formatjs/icu-skeleton-parser': 1.3.14 tslib: 2.4.0 - /@formatjs/icu-skeleton-parser/1.3.14: + /@formatjs/icu-skeleton-parser@1.3.14: resolution: {integrity: sha512-7bv60HQQcBb3+TSj+45tOb/CHV5z1hOpwdtS50jsSBXfB+YpGhnoRsZxSRksXeCxMy6xn6tA6VY2601BrrK+OA==} dependencies: '@formatjs/ecma402-abstract': 1.13.0 tslib: 2.4.0 - /@formatjs/intl-localematcher/0.2.31: + /@formatjs/intl-localematcher@0.2.31: resolution: {integrity: sha512-9QTjdSBpQ7wHShZgsNzNig5qT3rCPvmZogS/wXZzKotns5skbXgs0I7J8cuN0PPqXyynvNVuN+iOKhNS2eb+ZA==} dependencies: tslib: 2.4.0 - /@humanwhocodes/config-array/0.10.7: + /@humanwhocodes/config-array@0.10.7: resolution: {integrity: sha512-MDl6D6sBsaV452/QSdX+4CXIjZhIcI0PELsxUjk4U828yd58vk3bTIvk/6w5FY+4hIy9sLW0sfrV7K7Kc++j/w==} engines: {node: '>=10.10.0'} dependencies: @@ -495,20 +584,20 @@ packages: - supports-color dev: true - /@humanwhocodes/gitignore-to-minimatch/1.0.2: + /@humanwhocodes/gitignore-to-minimatch@1.0.2: resolution: {integrity: sha512-rSqmMJDdLFUsyxR6FMtD00nfQKKLFb1kv+qBbOVKqErvloEIJLo5bDTJTQNTYgeyp78JsA7u/NPi5jT1GR/MuA==} dev: true - /@humanwhocodes/module-importer/1.0.1: + /@humanwhocodes/module-importer@1.0.1: resolution: {integrity: sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==} engines: {node: '>=12.22'} dev: true - /@humanwhocodes/object-schema/1.2.1: + /@humanwhocodes/object-schema@1.2.1: resolution: {integrity: sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==} dev: true - /@istanbuljs/load-nyc-config/1.1.0: + /@istanbuljs/load-nyc-config@1.1.0: resolution: {integrity: sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==} engines: {node: '>=8'} dependencies: @@ -519,12 +608,12 @@ packages: resolve-from: 5.0.0 dev: true - /@istanbuljs/schema/0.1.3: + /@istanbuljs/schema@0.1.3: resolution: {integrity: sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==} engines: {node: '>=8'} dev: true - /@jest/console/29.1.2: + /@jest/console@29.1.2: resolution: {integrity: sha512-ujEBCcYs82BTmRxqfHMQggSlkUZP63AE5YEaTPj7eFyJOzukkTorstOUC7L6nE3w5SYadGVAnTsQ/ZjTGL0qYQ==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -536,7 +625,7 @@ packages: slash: 3.0.0 dev: true - /@jest/core/29.1.2_ts-node@10.9.1: + /@jest/core@29.1.2(ts-node@10.9.1): resolution: {integrity: sha512-sCO2Va1gikvQU2ynDN8V4+6wB7iVrD2CvT0zaRst4rglf56yLly0NQ9nuRRAWFeimRf+tCdFsb1Vk1N9LrrMPA==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} peerDependencies: @@ -557,7 +646,7 @@ packages: exit: 0.1.2 graceful-fs: 4.2.10 jest-changed-files: 29.0.0 - jest-config: 29.1.2_wnseany3vswo6p7nhyzogpjzqe + jest-config: 29.1.2(@types/node@18.8.0)(ts-node@10.9.1) jest-haste-map: 29.1.2 jest-message-util: 29.1.2 jest-regex-util: 29.0.0 @@ -578,14 +667,14 @@ packages: - ts-node dev: true - /@jest/create-cache-key-function/27.5.1: + /@jest/create-cache-key-function@27.5.1: resolution: {integrity: sha512-dmH1yW+makpTSURTy8VzdUwFnfQh1G8R+DxO2Ho2FFmBbKFEVm+3jWdvFhE2VqB/LATCTokkP0dotjyQyw5/AQ==} engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} dependencies: '@jest/types': 27.5.1 dev: true - /@jest/environment/29.1.2: + /@jest/environment@29.1.2: resolution: {integrity: sha512-rG7xZ2UeOfvOVzoLIJ0ZmvPl4tBEQ2n73CZJSlzUjPw4or1oSWC0s0Rk0ZX+pIBJ04aVr6hLWFn1DFtrnf8MhQ==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -595,14 +684,14 @@ packages: jest-mock: 29.1.2 dev: true - /@jest/expect-utils/29.1.2: + /@jest/expect-utils@29.1.2: resolution: {integrity: sha512-4a48bhKfGj/KAH39u0ppzNTABXQ8QPccWAFUFobWBaEMSMp+sB31Z2fK/l47c4a/Mu1po2ffmfAIPxXbVTXdtg==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: jest-get-type: 29.0.0 dev: true - /@jest/expect/29.1.2: + /@jest/expect@29.1.2: resolution: {integrity: sha512-FXw/UmaZsyfRyvZw3M6POgSNqwmuOXJuzdNiMWW9LCYo0GRoRDhg+R5iq5higmRTHQY7hx32+j7WHwinRmoILQ==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -612,7 +701,7 @@ packages: - supports-color dev: true - /@jest/fake-timers/29.1.2: + /@jest/fake-timers@29.1.2: resolution: {integrity: sha512-GppaEqS+QQYegedxVMpCe2xCXxxeYwQ7RsNx55zc8f+1q1qevkZGKequfTASI7ejmg9WwI+SJCrHe9X11bLL9Q==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -624,7 +713,7 @@ packages: jest-util: 29.1.2 dev: true - /@jest/globals/29.1.2: + /@jest/globals@29.1.2: resolution: {integrity: sha512-uMgfERpJYoQmykAd0ffyMq8wignN4SvLUG6orJQRe9WAlTRc9cdpCaE/29qurXixYJVZWUqIBXhSk8v5xN1V9g==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -636,7 +725,7 @@ packages: - supports-color dev: true - /@jest/reporters/29.1.2: + /@jest/reporters@29.1.2: resolution: {integrity: sha512-X4fiwwyxy9mnfpxL0g9DD0KcTmEIqP0jUdnc2cfa9riHy+I6Gwwp5vOZiwyg0vZxfSDxrOlK9S4+340W4d+DAA==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} peerDependencies: @@ -674,14 +763,14 @@ packages: - supports-color dev: true - /@jest/schemas/29.0.0: + /@jest/schemas@29.0.0: resolution: {integrity: sha512-3Ab5HgYIIAnS0HjqJHQYZS+zXc4tUmTmBH3z83ajI6afXp8X3ZtdLX+nXx+I7LNkJD7uN9LAVhgnjDgZa2z0kA==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: '@sinclair/typebox': 0.24.44 dev: true - /@jest/source-map/29.0.0: + /@jest/source-map@29.0.0: resolution: {integrity: sha512-nOr+0EM8GiHf34mq2GcJyz/gYFyLQ2INDhAylrZJ9mMWoW21mLBfZa0BUVPPMxVYrLjeiRe2Z7kWXOGnS0TFhQ==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -690,7 +779,7 @@ packages: graceful-fs: 4.2.10 dev: true - /@jest/test-result/29.1.2: + /@jest/test-result@29.1.2: resolution: {integrity: sha512-jjYYjjumCJjH9hHCoMhA8PCl1OxNeGgAoZ7yuGYILRJX9NjgzTN0pCT5qAoYR4jfOP8htIByvAlz9vfNSSBoVg==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -700,7 +789,7 @@ packages: collect-v8-coverage: 1.0.1 dev: true - /@jest/test-sequencer/29.1.2: + /@jest/test-sequencer@29.1.2: resolution: {integrity: sha512-fU6dsUqqm8sA+cd85BmeF7Gu9DsXVWFdGn9taxM6xN1cKdcP/ivSgXh5QucFRFz1oZxKv3/9DYYbq0ULly3P/Q==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -710,7 +799,7 @@ packages: slash: 3.0.0 dev: true - /@jest/transform/29.1.2: + /@jest/transform@29.1.2: resolution: {integrity: sha512-2uaUuVHTitmkx1tHF+eBjb4p7UuzBG7SXIaA/hNIkaMP6K+gXYGxP38ZcrofzqN0HeZ7A90oqsOa97WU7WZkSw==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -733,7 +822,18 @@ packages: - supports-color dev: true - /@jest/types/27.5.1: + /@jest/types@26.6.2: + resolution: {integrity: sha512-fC6QCp7Sc5sX6g8Tvbmj4XUTbyrik0akgRy03yjXbQaBWWNWGE7SGtJk98m0N8nzegD/7SggrUlivxo5ax4KWQ==} + engines: {node: '>= 10.14.2'} + dependencies: + '@types/istanbul-lib-coverage': 2.0.4 + '@types/istanbul-reports': 3.0.1 + '@types/node': 18.8.0 + '@types/yargs': 15.0.15 + chalk: 4.1.2 + dev: true + + /@jest/types@27.5.1: resolution: {integrity: sha512-Cx46iJ9QpwQTjIdq5VJu2QTMMs3QlEjI0x1QbBP5W1+nMzyc2XmimiRR/CbX9TO0cPTeUlxWMOu8mslYsJ8DEw==} engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} dependencies: @@ -744,7 +844,7 @@ packages: chalk: 4.1.2 dev: true - /@jest/types/29.1.2: + /@jest/types@29.1.2: resolution: {integrity: sha512-DcXGtoTykQB5jiwCmVr8H4vdg2OJhQex3qPkG+ISyDO7xQXbt/4R6dowcRyPemRnkH7JoHvZuxPBdlq+9JxFCg==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -756,7 +856,7 @@ packages: chalk: 4.1.2 dev: true - /@jridgewell/gen-mapping/0.1.1: + /@jridgewell/gen-mapping@0.1.1: resolution: {integrity: sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w==} engines: {node: '>=6.0.0'} dependencies: @@ -764,7 +864,7 @@ packages: '@jridgewell/sourcemap-codec': 1.4.14 dev: true - /@jridgewell/gen-mapping/0.3.2: + /@jridgewell/gen-mapping@0.3.2: resolution: {integrity: sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==} engines: {node: '>=6.0.0'} dependencies: @@ -773,35 +873,59 @@ packages: '@jridgewell/trace-mapping': 0.3.15 dev: true - /@jridgewell/resolve-uri/3.1.0: + /@jridgewell/resolve-uri@3.1.0: resolution: {integrity: sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==} engines: {node: '>=6.0.0'} dev: true - /@jridgewell/set-array/1.1.2: + /@jridgewell/set-array@1.1.2: resolution: {integrity: sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==} engines: {node: '>=6.0.0'} dev: true - /@jridgewell/sourcemap-codec/1.4.14: + /@jridgewell/sourcemap-codec@1.4.14: resolution: {integrity: sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==} dev: true - /@jridgewell/trace-mapping/0.3.15: + /@jridgewell/trace-mapping@0.3.15: resolution: {integrity: sha512-oWZNOULl+UbhsgB51uuZzglikfIKSUBO/M9W2OfEjn7cmqoAiCgmv9lyACTUacZwBz0ITnJ2NqjU8Tx0DHL88g==} dependencies: '@jridgewell/resolve-uri': 3.1.0 '@jridgewell/sourcemap-codec': 1.4.14 dev: true - /@jridgewell/trace-mapping/0.3.9: + /@jridgewell/trace-mapping@0.3.9: resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} dependencies: '@jridgewell/resolve-uri': 3.1.0 '@jridgewell/sourcemap-codec': 1.4.14 dev: true - /@nodelib/fs.scandir/2.1.5: + /@mswjs/cookies@0.2.2: + resolution: {integrity: sha512-mlN83YSrcFgk7Dm1Mys40DLssI1KdJji2CMKN8eOlBqsTADYzj2+jWzsANsUTFbxDMWPD5e9bfA1RGqBpS3O1g==} + engines: {node: '>=14'} + dependencies: + '@types/set-cookie-parser': 2.4.2 + set-cookie-parser: 2.6.0 + dev: false + + /@mswjs/interceptors@0.17.9: + resolution: {integrity: sha512-4LVGt03RobMH/7ZrbHqRxQrS9cc2uh+iNKSj8UWr8M26A2i793ju+csaB5zaqYltqJmA2jUq4VeYfKmVqvsXQg==} + engines: {node: '>=14'} + dependencies: + '@open-draft/until': 1.0.3 + '@types/debug': 4.1.7 + '@xmldom/xmldom': 0.8.6 + debug: 4.3.4 + headers-polyfill: 3.1.2 + outvariant: 1.3.0 + strict-event-emitter: 0.2.8 + web-encoding: 1.1.5 + transitivePeerDependencies: + - supports-color + dev: false + + /@nodelib/fs.scandir@2.1.5: resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} engines: {node: '>= 8'} dependencies: @@ -809,12 +933,12 @@ packages: run-parallel: 1.2.0 dev: true - /@nodelib/fs.stat/2.0.5: + /@nodelib/fs.stat@2.0.5: resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} engines: {node: '>= 8'} dev: true - /@nodelib/fs.walk/1.2.8: + /@nodelib/fs.walk@1.2.8: resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} engines: {node: '>= 8'} dependencies: @@ -822,23 +946,27 @@ packages: fastq: 1.13.0 dev: true - /@sinclair/typebox/0.24.44: + /@open-draft/until@1.0.3: + resolution: {integrity: sha512-Aq58f5HiWdyDlFffbbSjAlv596h/cOnt2DO1w3DOC7OJ5EHs0hd/nycJfiu9RJbT6Yk6F1knnRRXNSpxoIVZ9Q==} + dev: false + + /@sinclair/typebox@0.24.44: resolution: {integrity: sha512-ka0W0KN5i6LfrSocduwliMMpqVgohtPFidKdMEOUjoOFCHcOOYkKsPRxfs5f15oPNHTm6ERAm0GV/+/LTKeiWg==} dev: true - /@sinonjs/commons/1.8.3: + /@sinonjs/commons@1.8.3: resolution: {integrity: sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ==} dependencies: type-detect: 4.0.8 dev: true - /@sinonjs/fake-timers/9.1.2: + /@sinonjs/fake-timers@9.1.2: resolution: {integrity: sha512-BPS4ynJW/o92PUR4wgriz2Ud5gpST5vz6GQfMixEDK0Z8ZCUv2M7SkBLykH56T++Xs+8ln9zTGbOvNGIe02/jw==} dependencies: '@sinonjs/commons': 1.8.3 dev: true - /@swc/cli/0.1.57_@swc+core@1.3.18: + /@swc/cli@0.1.57(@swc/core@1.3.18): resolution: {integrity: sha512-HxM8TqYHhAg+zp7+RdTU69bnkl4MWdt1ygyp6BDIPjTiaJVH6Dizn2ezbgDS8mnFZI1FyhKvxU/bbaUs8XhzQg==} engines: {node: '>= 12.13'} hasBin: true @@ -856,7 +984,7 @@ packages: source-map: 0.7.4 dev: true - /@swc/core-darwin-arm64/1.3.18: + /@swc/core-darwin-arm64@1.3.18: resolution: {integrity: sha512-4UEQ+LyzDFTszEy4LCU50h4cjVNJcNwD87aVBT/8i6YXj5dyMki/TrkIQ6Bhv7g5beg2GRncB2ndjN66r8I8+w==} engines: {node: '>=10'} cpu: [arm64] @@ -865,7 +993,7 @@ packages: dev: true optional: true - /@swc/core-darwin-x64/1.3.18: + /@swc/core-darwin-x64@1.3.18: resolution: {integrity: sha512-DSCd7eVr+4ukffNnvhrFmUoCF0VLOXPgGmdwm6u0irLWOLtr2VZNZcf7UF+t/Y9jPKmXz3OY6lVgwtjxZhiklQ==} engines: {node: '>=10'} cpu: [x64] @@ -874,7 +1002,7 @@ packages: dev: true optional: true - /@swc/core-linux-arm-gnueabihf/1.3.18: + /@swc/core-linux-arm-gnueabihf@1.3.18: resolution: {integrity: sha512-9dy6qJiWAls9OrBvrWbFDbjEkuOPrEP6OsKyrQWTMqLjCLwgLa3g4yC0YtPdUa/A8uyNVKtRcq+NXoKW+mP/QQ==} engines: {node: '>=10'} cpu: [arm] @@ -883,7 +1011,7 @@ packages: dev: true optional: true - /@swc/core-linux-arm64-gnu/1.3.18: + /@swc/core-linux-arm64-gnu@1.3.18: resolution: {integrity: sha512-8FZjiUSM4JBQTD4sV7Y6BNMdo0oDlqa8xYVaAimuIBL8ixD/Fb+0GIxKdB59yKRVQyuXJRa6Pwzd7zk3wY5T0Q==} engines: {node: '>=10'} cpu: [arm64] @@ -892,7 +1020,7 @@ packages: dev: true optional: true - /@swc/core-linux-arm64-musl/1.3.18: + /@swc/core-linux-arm64-musl@1.3.18: resolution: {integrity: sha512-0zNqfFeAHZp37lu+lTVvZKfDM10EIoYJtv9sWz+0EA5mkzwj4NtC3ialTIjcPAyJ9Oq4zBtToW2hv7qEtyBHZw==} engines: {node: '>=10'} cpu: [arm64] @@ -901,7 +1029,7 @@ packages: dev: true optional: true - /@swc/core-linux-x64-gnu/1.3.18: + /@swc/core-linux-x64-gnu@1.3.18: resolution: {integrity: sha512-PA3Cc97Kc6W6RtpBLeJaoXLCRL5dJLYd2dszf+f5hGHHJybh6eXGIU0ZkZr898NUHoL8fT6Mg6I4JCNImq/yBg==} engines: {node: '>=10'} cpu: [x64] @@ -910,7 +1038,7 @@ packages: dev: true optional: true - /@swc/core-linux-x64-musl/1.3.18: + /@swc/core-linux-x64-musl@1.3.18: resolution: {integrity: sha512-RiZXHwED8cfD/zoBG01iY8YZtOF/8t9XHZ1JqCx9PWOMjXD3Vc8F2I7bp1Qg6ahzWEaP+2+/rqGO1kSwaJjJLw==} engines: {node: '>=10'} cpu: [x64] @@ -919,7 +1047,7 @@ packages: dev: true optional: true - /@swc/core-win32-arm64-msvc/1.3.18: + /@swc/core-win32-arm64-msvc@1.3.18: resolution: {integrity: sha512-G1Lu/sP+v34lwsGFreklnCdxygMLmobyLY31cNPd0i47ZwgrGowuTV34Mcqfc4AWRkayqVAIlb/WWIZ1+qemcA==} engines: {node: '>=10'} cpu: [arm64] @@ -928,7 +1056,7 @@ packages: dev: true optional: true - /@swc/core-win32-ia32-msvc/1.3.18: + /@swc/core-win32-ia32-msvc@1.3.18: resolution: {integrity: sha512-Uu+m5BPemw5ZiG6LaF+pP0qFQuIXF55wMZNa0Dbl/16hF7ci6q941MT6CqeK5LQQ52FVVqeYO5lDk5CggaA3Mw==} engines: {node: '>=10'} cpu: [ia32] @@ -937,7 +1065,7 @@ packages: dev: true optional: true - /@swc/core-win32-x64-msvc/1.3.18: + /@swc/core-win32-x64-msvc@1.3.18: resolution: {integrity: sha512-9o8uFNsPmWB5FFQSDCsI/KVBSHuAILEwB/hMvbUxKtZeSWAQTm5BqbNPi6X11KJ3MdyoJn7zPejj3grL3dcd/w==} engines: {node: '>=10'} cpu: [x64] @@ -946,7 +1074,7 @@ packages: dev: true optional: true - /@swc/core/1.3.18: + /@swc/core@1.3.18: resolution: {integrity: sha512-VChk3ldLhmVoX3Hd2M3Y4j960T0lo2Zus60iZoWST6P65RVPt8BatFVVPAB9dABy1dB5zn1BCpHlH85yXVysQw==} engines: {node: '>=10'} hasBin: true @@ -964,7 +1092,7 @@ packages: '@swc/core-win32-x64-msvc': 1.3.18 dev: true - /@swc/jest/0.2.23_@swc+core@1.3.18: + /@swc/jest@0.2.23(@swc/core@1.3.18): resolution: {integrity: sha512-ZLj17XjHbPtNsgqjm83qizENw05emLkKGu3WuPUttcy9hkngl0/kcc7fDbcSBpADS0GUtsO+iKPjZFWVAtJSlA==} engines: {npm: '>= 7.0.0'} peerDependencies: @@ -975,7 +1103,7 @@ packages: jsonc-parser: 3.2.0 dev: true - /@testing-library/dom/8.18.1: + /@testing-library/dom@8.18.1: resolution: {integrity: sha512-oEvsm2B/WtcHKE+IcEeeCqNU/ltFGaVyGbpcm4g/2ytuT49jrlH9x5qRKL/H3A6yfM4YAbSbC0ceT5+9CEXnLg==} engines: {node: '>=12'} dependencies: @@ -989,7 +1117,7 @@ packages: pretty-format: 27.5.1 dev: true - /@testing-library/jest-dom/5.16.5: + /@testing-library/jest-dom@5.16.5: resolution: {integrity: sha512-N5ixQ2qKpi5OLYfwQmUb/5mSV9LneAcaUfp32pn4yCnpb8r/Yz0pXFPck21dIicKmi+ta5WRAknkZCfA8refMA==} engines: {node: '>=8', npm: '>=6', yarn: '>=1'} dependencies: @@ -1004,7 +1132,7 @@ packages: redent: 3.0.0 dev: true - /@testing-library/react/13.4.0_biqbaboplfbrettd7655fr4n2y: + /@testing-library/react@13.4.0(react-dom@18.2.0)(react@18.2.0): resolution: {integrity: sha512-sXOGON+WNTh3MLE9rve97ftaZukN3oNf2KjDy7YTx6hcTO2uuLHuCGynMDhFwGw/jYf4OJ2Qk0i4i79qMNNkyw==} engines: {node: '>=12'} peerDependencies: @@ -1015,10 +1143,10 @@ packages: '@testing-library/dom': 8.18.1 '@types/react-dom': 18.0.6 react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 + react-dom: 18.2.0(react@18.2.0) dev: true - /@testing-library/user-event/14.4.3_znccgeejomvff3jrsk3ljovfpu: + /@testing-library/user-event@14.4.3(@testing-library/dom@8.18.1): resolution: {integrity: sha512-kCUc5MEwaEMakkO5x7aoD+DLi02ehmEM2QCGWvNqAS1dV/fAvORWEjnjsEIvml59M7Y5kCkWN6fCCyPOe8OL6Q==} engines: {node: '>=12', npm: '>=6'} peerDependencies: @@ -1027,32 +1155,32 @@ packages: '@testing-library/dom': 8.18.1 dev: true - /@tootallnate/once/2.0.0: + /@tootallnate/once@2.0.0: resolution: {integrity: sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==} engines: {node: '>= 10'} dev: true - /@tsconfig/node10/1.0.9: + /@tsconfig/node10@1.0.9: resolution: {integrity: sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==} dev: true - /@tsconfig/node12/1.0.11: + /@tsconfig/node12@1.0.11: resolution: {integrity: sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==} dev: true - /@tsconfig/node14/1.0.3: + /@tsconfig/node14@1.0.3: resolution: {integrity: sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==} dev: true - /@tsconfig/node16/1.0.3: + /@tsconfig/node16@1.0.3: resolution: {integrity: sha512-yOlFc+7UtL/89t2ZhjPvvB/DeAr3r+Dq58IgzsFkOAvVC6NMJXmCGjbptdXdR9qsX7pKcTL+s87FtYREi2dEEQ==} dev: true - /@types/aria-query/4.2.2: + /@types/aria-query@4.2.2: resolution: {integrity: sha512-HnYpAE1Y6kRyKM/XkEuiRQhTHvkzMBurTHnpFLYLBGPIylZNPs9jJcuOOYWxPLJCSEtmZT0Y8rHDokKN7rRTig==} dev: true - /@types/babel__core/7.1.19: + /@types/babel__core@7.1.19: resolution: {integrity: sha512-WEOTgRsbYkvA/KCsDwVEGkd7WAr1e3g31VHQ8zy5gul/V1qKullU/BU5I68X5v7V3GnB9eotmom4v5a5gjxorw==} dependencies: '@babel/parser': 7.19.3 @@ -1062,66 +1190,80 @@ packages: '@types/babel__traverse': 7.18.2 dev: true - /@types/babel__generator/7.6.4: + /@types/babel__generator@7.6.4: resolution: {integrity: sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg==} dependencies: '@babel/types': 7.19.3 dev: true - /@types/babel__template/7.4.1: + /@types/babel__template@7.4.1: resolution: {integrity: sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g==} dependencies: '@babel/parser': 7.19.3 '@babel/types': 7.19.3 dev: true - /@types/babel__traverse/7.18.2: + /@types/babel__traverse@7.18.2: resolution: {integrity: sha512-FcFaxOr2V5KZCviw1TnutEMVUVsGt4D2hP1TAfXZAMKuHYW3xQhe3jTxNPWutgCJ3/X1c5yX8ZoGVEItxKbwBg==} dependencies: '@babel/types': 7.19.3 dev: true - /@types/glob/8.0.0: + /@types/cookie@0.4.1: + resolution: {integrity: sha512-XW/Aa8APYr6jSVVA1y/DEIZX0/GMKLEVekNG727R8cs56ahETkRAy/3DR7+fJyh7oUgGwNQaRfXCun0+KbWY7Q==} + dev: false + + /@types/debug@4.1.7: + resolution: {integrity: sha512-9AonUzyTjXXhEOa0DnqpzZi6VHlqKMswga9EXjpXnnqxwLtdvPPtlO8evrI5D9S6asFRCQ6v+wpiUKbw+vKqyg==} + dependencies: + '@types/ms': 0.7.31 + dev: false + + /@types/glob@8.0.0: resolution: {integrity: sha512-l6NQsDDyQUVeoTynNpC9uRvCUint/gSUXQA2euwmTuWGvPY5LSDUu6tkCtJB2SvGQlJQzLaKqcGZP4//7EDveA==} dependencies: '@types/minimatch': 5.1.2 '@types/node': 18.8.0 dev: true - /@types/graceful-fs/4.1.5: + /@types/graceful-fs@4.1.5: resolution: {integrity: sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw==} dependencies: '@types/node': 18.8.0 dev: true - /@types/istanbul-lib-coverage/2.0.4: + /@types/istanbul-lib-coverage@2.0.4: resolution: {integrity: sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g==} dev: true - /@types/istanbul-lib-report/3.0.0: + /@types/istanbul-lib-report@3.0.0: resolution: {integrity: sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg==} dependencies: '@types/istanbul-lib-coverage': 2.0.4 dev: true - /@types/istanbul-reports/3.0.1: + /@types/istanbul-reports@3.0.1: resolution: {integrity: sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw==} dependencies: '@types/istanbul-lib-report': 3.0.0 dev: true - /@types/jest/29.1.1: + /@types/jest@29.1.1: resolution: {integrity: sha512-U9Ey07dGWl6fUFaIaUQUKWG5NoKi/zizeVQCGV8s4nSU0jPgqphVZvS64+8BtWYvrc3ZGw6wo943NSYPxkrp/g==} dependencies: expect: 29.1.2 pretty-format: 29.1.2 dev: true - /@types/js-yaml/4.0.5: + /@types/js-levenshtein@1.1.1: + resolution: {integrity: sha512-qC4bCqYGy1y/NP7dDVr7KJarn+PbX1nSpwA7JXdu0HxT3QYjO8MJ+cntENtHFVy2dRAyBV23OZ6MxsW1AM1L8g==} + dev: false + + /@types/js-yaml@4.0.5: resolution: {integrity: sha512-FhpRzf927MNQdRZP0J5DLIdTXhjLYzeUTmLAu69mnVksLH9CJY3IuSeEgbKUki7GQZm0WqDkGzyxju2EZGD2wA==} dev: true - /@types/jsdom/20.0.0: + /@types/jsdom@20.0.0: resolution: {integrity: sha512-YfAchFs0yM1QPDrLm2VHe+WHGtqms3NXnXAMolrgrVP6fgBHHXy1ozAbo/dFtPNtZC/m66bPiCTWYmqp1F14gA==} dependencies: '@types/node': 18.8.0 @@ -1129,28 +1271,39 @@ packages: parse5: 7.1.1 dev: true - /@types/minimatch/5.1.2: + /@types/json-schema@7.0.11: + resolution: {integrity: sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==} + dev: true + + /@types/lodash@4.14.191: + resolution: {integrity: sha512-BdZ5BCCvho3EIXw6wUCXHe7rS53AIDPLE+JzwgT+OsJk53oBfbSmZZ7CX4VaRoN78N+TJpFi9QPlfIVNmJYWxQ==} + dev: false + + /@types/minimatch@5.1.2: resolution: {integrity: sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA==} dev: true - /@types/node/18.8.0: + /@types/ms@0.7.31: + resolution: {integrity: sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==} + dev: false + + /@types/node@18.8.0: resolution: {integrity: sha512-u+h43R6U8xXDt2vzUaVP3VwjjLyOJk6uEciZS8OSyziUQGOwmk+l+4drxcsDboHXwyTaqS1INebghmWMRxq3LA==} - dev: true - /@types/prettier/2.7.1: + /@types/prettier@2.7.1: resolution: {integrity: sha512-ri0UmynRRvZiiUJdiz38MmIblKK+oH30MztdBVR95dv/Ubw6neWSb8u1XpRb72L4qsZOhz+L+z9JD40SJmfWow==} dev: true - /@types/prop-types/15.7.5: + /@types/prop-types@15.7.5: resolution: {integrity: sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==} - /@types/react-dom/18.0.6: + /@types/react-dom@18.0.6: resolution: {integrity: sha512-/5OFZgfIPSwy+YuIBP/FgJnQnsxhZhjjrnxudMddeblOouIodEQ75X14Rr4wGSG/bknL+Omy9iWlLo1u/9GzAA==} dependencies: '@types/react': 18.0.24 dev: true - /@types/react/16.14.34: + /@types/react@16.14.34: resolution: {integrity: sha512-b99nWeGGReLh6aKBppghVqp93dFJtgtDOzc8NXM6hewD8PQ2zZG5kBLgbx+VJr7Q7WBMjHxaIl3dwpwwPIUgyA==} dependencies: '@types/prop-types': 15.7.5 @@ -1158,58 +1311,214 @@ packages: csstype: 3.1.1 dev: false - /@types/react/18.0.24: + /@types/react@18.0.24: resolution: {integrity: sha512-wRJWT6ouziGUy+9uX0aW4YOJxAY0bG6/AOk5AW5QSvZqI7dk6VBIbXvcVgIw/W5Jrl24f77df98GEKTJGOLx7Q==} dependencies: '@types/prop-types': 15.7.5 '@types/scheduler': 0.16.2 csstype: 3.1.1 - /@types/scheduler/0.16.2: + /@types/scheduler@0.16.2: resolution: {integrity: sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew==} - /@types/stack-utils/2.0.1: + /@types/semver@7.3.13: + resolution: {integrity: sha512-21cFJr9z3g5dW8B0CVI9g2O9beqaThGQ6ZFBqHfwhzLDKUxaqTIy3vnfah/UPkfOiF2pLq+tGz+W8RyCskuslw==} + dev: true + + /@types/set-cookie-parser@2.4.2: + resolution: {integrity: sha512-fBZgytwhYAUkj/jC/FAV4RQ5EerRup1YQsXQCh8rZfiHkc4UahC192oH0smGwsXol3cL3A5oETuAHeQHmhXM4w==} + dependencies: + '@types/node': 18.8.0 + dev: false + + /@types/stack-utils@2.0.1: resolution: {integrity: sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw==} dev: true - /@types/testing-library__jest-dom/5.14.5: + /@types/testing-library__jest-dom@5.14.5: resolution: {integrity: sha512-SBwbxYoyPIvxHbeHxTZX2Pe/74F/tX2/D3mMvzabdeJ25bBojfW0TyB8BHrbq/9zaaKICJZjLP+8r6AeZMFCuQ==} dependencies: '@types/jest': 29.1.1 dev: true - /@types/tough-cookie/4.0.2: + /@types/tough-cookie@4.0.2: resolution: {integrity: sha512-Q5vtl1W5ue16D+nIaW8JWebSSraJVlK+EthKn7e7UcD4KWsaSJ8BqGPXNaPghgtcn/fhvrN17Tv8ksUsQpiplw==} dev: true - /@types/yargs-parser/21.0.0: + /@types/yargs-parser@21.0.0: resolution: {integrity: sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA==} dev: true - /@types/yargs/16.0.4: + /@types/yargs@15.0.15: + resolution: {integrity: sha512-IziEYMU9XoVj8hWg7k+UJrXALkGFjWJhn5QFEv9q4p+v40oZhSuC135M38st8XPjICL7Ey4TV64ferBGUoJhBg==} + dependencies: + '@types/yargs-parser': 21.0.0 + dev: true + + /@types/yargs@16.0.4: resolution: {integrity: sha512-T8Yc9wt/5LbJyCaLiHPReJa0kApcIgJ7Bn735GjItUfh08Z1pJvu8QZqb9s+mMvKV6WUQRV7K2R46YbjMXTTJw==} dependencies: '@types/yargs-parser': 21.0.0 dev: true - /@types/yargs/17.0.13: + /@types/yargs@17.0.13: resolution: {integrity: sha512-9sWaruZk2JGxIQU+IhI1fhPYRcQ0UuTNuKuCW9bR5fp7qi2Llf7WDzNa17Cy7TKnh3cdxDOiyTu6gaLS0eDatg==} dependencies: '@types/yargs-parser': 21.0.0 dev: true - /abab/2.0.6: + /@typescript-eslint/eslint-plugin@5.49.0(@typescript-eslint/parser@5.49.0)(eslint@8.24.0)(typescript@4.8.4): + resolution: {integrity: sha512-IhxabIpcf++TBaBa1h7jtOWyon80SXPRLDq0dVz5SLFC/eW6tofkw/O7Ar3lkx5z5U6wzbKDrl2larprp5kk5Q==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + '@typescript-eslint/parser': ^5.0.0 + eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + dependencies: + '@typescript-eslint/parser': 5.49.0(eslint@8.24.0)(typescript@4.8.4) + '@typescript-eslint/scope-manager': 5.49.0 + '@typescript-eslint/type-utils': 5.49.0(eslint@8.24.0)(typescript@4.8.4) + '@typescript-eslint/utils': 5.49.0(eslint@8.24.0)(typescript@4.8.4) + debug: 4.3.4 + eslint: 8.24.0 + ignore: 5.2.0 + natural-compare-lite: 1.4.0 + regexpp: 3.2.0 + semver: 7.3.7 + tsutils: 3.21.0(typescript@4.8.4) + typescript: 4.8.4 + transitivePeerDependencies: + - supports-color + dev: true + + /@typescript-eslint/parser@5.49.0(eslint@8.24.0)(typescript@4.8.4): + resolution: {integrity: sha512-veDlZN9mUhGqU31Qiv2qEp+XrJj5fgZpJ8PW30sHU+j/8/e5ruAhLaVDAeznS7A7i4ucb/s8IozpDtt9NqCkZg==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + dependencies: + '@typescript-eslint/scope-manager': 5.49.0 + '@typescript-eslint/types': 5.49.0 + '@typescript-eslint/typescript-estree': 5.49.0(typescript@4.8.4) + debug: 4.3.4 + eslint: 8.24.0 + typescript: 4.8.4 + transitivePeerDependencies: + - supports-color + dev: true + + /@typescript-eslint/scope-manager@5.49.0: + resolution: {integrity: sha512-clpROBOiMIzpbWNxCe1xDK14uPZh35u4QaZO1GddilEzoCLAEz4szb51rBpdgurs5k2YzPtJeTEN3qVbG+LRUQ==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + dependencies: + '@typescript-eslint/types': 5.49.0 + '@typescript-eslint/visitor-keys': 5.49.0 + dev: true + + /@typescript-eslint/type-utils@5.49.0(eslint@8.24.0)(typescript@4.8.4): + resolution: {integrity: sha512-eUgLTYq0tR0FGU5g1YHm4rt5H/+V2IPVkP0cBmbhRyEmyGe4XvJ2YJ6sYTmONfjmdMqyMLad7SB8GvblbeESZA==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + eslint: '*' + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + dependencies: + '@typescript-eslint/typescript-estree': 5.49.0(typescript@4.8.4) + '@typescript-eslint/utils': 5.49.0(eslint@8.24.0)(typescript@4.8.4) + debug: 4.3.4 + eslint: 8.24.0 + tsutils: 3.21.0(typescript@4.8.4) + typescript: 4.8.4 + transitivePeerDependencies: + - supports-color + dev: true + + /@typescript-eslint/types@5.49.0: + resolution: {integrity: sha512-7If46kusG+sSnEpu0yOz2xFv5nRz158nzEXnJFCGVEHWnuzolXKwrH5Bsf9zsNlOQkyZuk0BZKKoJQI+1JPBBg==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + dev: true + + /@typescript-eslint/typescript-estree@5.49.0(typescript@4.8.4): + resolution: {integrity: sha512-PBdx+V7deZT/3GjNYPVQv1Nc0U46dAHbIuOG8AZ3on3vuEKiPDwFE/lG1snN2eUB9IhF7EyF7K1hmTcLztNIsA==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + dependencies: + '@typescript-eslint/types': 5.49.0 + '@typescript-eslint/visitor-keys': 5.49.0 + debug: 4.3.4 + globby: 11.1.0 + is-glob: 4.0.3 + semver: 7.3.7 + tsutils: 3.21.0(typescript@4.8.4) + typescript: 4.8.4 + transitivePeerDependencies: + - supports-color + dev: true + + /@typescript-eslint/utils@5.49.0(eslint@8.24.0)(typescript@4.8.4): + resolution: {integrity: sha512-cPJue/4Si25FViIb74sHCLtM4nTSBXtLx1d3/QT6mirQ/c65bV8arBEebBJJizfq8W2YyMoPI/WWPFWitmNqnQ==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 + dependencies: + '@types/json-schema': 7.0.11 + '@types/semver': 7.3.13 + '@typescript-eslint/scope-manager': 5.49.0 + '@typescript-eslint/types': 5.49.0 + '@typescript-eslint/typescript-estree': 5.49.0(typescript@4.8.4) + eslint: 8.24.0 + eslint-scope: 5.1.1 + eslint-utils: 3.0.0(eslint@8.24.0) + semver: 7.3.7 + transitivePeerDependencies: + - supports-color + - typescript + dev: true + + /@typescript-eslint/visitor-keys@5.49.0: + resolution: {integrity: sha512-v9jBMjpNWyn8B6k/Mjt6VbUS4J1GvUlR4x3Y+ibnP1z7y7V4n0WRz+50DY6+Myj0UaXVSuUlHohO+eZ8IJEnkg==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + dependencies: + '@typescript-eslint/types': 5.49.0 + eslint-visitor-keys: 3.3.0 + dev: true + + /@xmldom/xmldom@0.8.6: + resolution: {integrity: sha512-uRjjusqpoqfmRkTaNuLJ2VohVr67Q5YwDATW3VU7PfzTj6IRaihGrYI7zckGZjxQPBIp63nfvJbM+Yu5ICh0Bg==} + engines: {node: '>=10.0.0'} + dev: false + + /@zxing/text-encoding@0.9.0: + resolution: {integrity: sha512-U/4aVJ2mxI0aDNI8Uq0wEhMgY+u4CNtEb0om3+y3+niDAsoTCOB33UF0sxpzqzdqXLqmvc+vZyAt4O8pPdfkwA==} + requiresBuild: true + dev: false + optional: true + + /abab@2.0.6: resolution: {integrity: sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA==} dev: true - /acorn-globals/7.0.1: + /acorn-globals@7.0.1: resolution: {integrity: sha512-umOSDSDrfHbTNPuNpC2NSnnA3LUrqpevPb4T9jRx4MagXNS0rs+gwiTcAvqCRmsD6utzsrzNt+ebm00SNWiC3Q==} dependencies: acorn: 8.8.0 acorn-walk: 8.2.0 dev: true - /acorn-jsx/5.3.2_acorn@8.8.0: + /acorn-jsx@5.3.2(acorn@8.8.0): resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} peerDependencies: acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 @@ -1217,18 +1526,18 @@ packages: acorn: 8.8.0 dev: true - /acorn-walk/8.2.0: + /acorn-walk@8.2.0: resolution: {integrity: sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==} engines: {node: '>=0.4.0'} dev: true - /acorn/8.8.0: + /acorn@8.8.0: resolution: {integrity: sha512-QOxyigPVrpZ2GXT+PFyZTl6TtOFc5egxHIP9IlQ+RbupQuX4RkT/Bee4/kQuC02Xkzg84JcT7oLYtDIQxp+v7w==} engines: {node: '>=0.4.0'} hasBin: true dev: true - /agent-base/6.0.2: + /agent-base@6.0.2: resolution: {integrity: sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==} engines: {node: '>= 6.0.0'} dependencies: @@ -1237,7 +1546,7 @@ packages: - supports-color dev: true - /ajv/6.12.6: + /ajv@6.12.6: resolution: {integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==} dependencies: fast-deep-equal: 3.1.3 @@ -1246,83 +1555,141 @@ packages: uri-js: 4.4.1 dev: true - /ansi-escapes/4.3.2: + /ansi-escapes@4.3.2: resolution: {integrity: sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==} engines: {node: '>=8'} dependencies: type-fest: 0.21.3 - dev: true - /ansi-regex/5.0.1: + /ansi-regex@5.0.1: resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} engines: {node: '>=8'} - dev: true - /ansi-styles/3.2.1: + /ansi-styles@3.2.1: resolution: {integrity: sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==} engines: {node: '>=4'} dependencies: color-convert: 1.9.3 dev: true - /ansi-styles/4.3.0: + /ansi-styles@4.3.0: resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} engines: {node: '>=8'} dependencies: color-convert: 2.0.1 - dev: true - /ansi-styles/5.2.0: + /ansi-styles@5.2.0: resolution: {integrity: sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==} engines: {node: '>=10'} dev: true - /antlr4ts-cli/0.5.0-alpha.4: + /antlr4-c3@2.2.3: + resolution: {integrity: sha512-/mTYKetSRVU+eWRHRLv9XZ1pkuuYM8T8T7C6P9oCOQpQSbUGtLab7mFxKUwBIg3exdjjFfMfKWs5Tzv2egCfUA==} + dependencies: + antlr4ts: 0.5.0-alpha.4 + dev: false + + /antlr4ts-cli@0.5.0-alpha.4: resolution: {integrity: sha512-lVPVBTA2CVHRYILSKilL6Jd4hAumhSZZWA7UbQNQrmaSSj7dPmmYaN4bOmZG79cOy0lS00i4LY68JZZjZMWVrw==} hasBin: true dev: true - /antlr4ts/0.5.0-alpha.4: + /antlr4ts@0.5.0-alpha.4: resolution: {integrity: sha512-WPQDt1B74OfPv/IMS2ekXAKkTZIHl88uMetg6q3OTqgFxZ/dxDXI0EWLyZid/1Pe6hTftyg5N7gel5wNAGxXyQ==} dev: false - /anymatch/3.1.2: + /anymatch@3.1.2: resolution: {integrity: sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==} engines: {node: '>= 8'} dependencies: normalize-path: 3.0.0 picomatch: 2.3.1 - dev: true - /arg/4.1.3: + /arg@4.1.3: resolution: {integrity: sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==} dev: true - /argparse/1.0.10: + /argparse@1.0.10: resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==} dependencies: sprintf-js: 1.0.3 dev: true - /argparse/2.0.1: + /argparse@2.0.1: resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} dev: true - /aria-query/5.0.2: + /aria-query@5.0.2: resolution: {integrity: sha512-eigU3vhqSO+Z8BKDnVLN/ompjhf3pYzecKXz8+whRy+9gZu8n1TCGfwzQUUPnqdHl9ax1Hr9031orZ+UOEYr7Q==} engines: {node: '>=6.0'} dev: true - /array-union/2.1.0: + /aria-query@5.1.3: + resolution: {integrity: sha512-R5iJ5lkuHybztUfuOAznmboyjWq8O6sqNqtK7CLOqdydi54VNbORp49mb14KbWgG1QD3JFO9hJdZ+y4KutfdOQ==} + dependencies: + deep-equal: 2.2.0 + dev: true + + /array-includes@3.1.6: + resolution: {integrity: sha512-sgTbLvL6cNnw24FnbaDyjmvddQ2ML8arZsgaJhoABMoplz/4QRhtrYS+alr1BUM1Bwp6dhx8vVCBSLG+StwOFw==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + es-abstract: 1.21.1 + get-intrinsic: 1.1.3 + is-string: 1.0.7 + dev: true + + /array-union@2.1.0: resolution: {integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==} engines: {node: '>=8'} dev: true - /asynckit/0.4.0: + /array.prototype.flatmap@1.3.1: + resolution: {integrity: sha512-8UGn9O1FDVvMNB0UlLv4voxRMze7+FpHyF5mSMRjWHUMlpoDViniy05870VlxhfgTnLbpuwTzvD76MTtWxB/mQ==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + es-abstract: 1.21.1 + es-shim-unscopables: 1.0.0 + dev: true + + /array.prototype.tosorted@1.1.1: + resolution: {integrity: sha512-pZYPXPRl2PqWcsUs6LOMn+1f1532nEoPTYowBtqLwAW+W8vSVhkIGnmOX1t/UQjD6YGI0vcD2B1U7ZFGQH9jnQ==} + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + es-abstract: 1.21.1 + es-shim-unscopables: 1.0.0 + get-intrinsic: 1.1.3 + dev: true + + /ast-types-flow@0.0.7: + resolution: {integrity: sha512-eBvWn1lvIApYMhzQMsu9ciLfkBY499mFZlNqG+/9WR7PVlroQw0vG30cOQQbaKz3sCEc44TAOu2ykzqXSNnwag==} + dev: true + + /asynckit@0.4.0: resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} dev: true - /babel-jest/29.1.2_@babel+core@7.19.3: + /available-typed-arrays@1.0.5: + resolution: {integrity: sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==} + engines: {node: '>= 0.4'} + + /axe-core@4.6.3: + resolution: {integrity: sha512-/BQzOX780JhsxDnPpH4ZiyrJAzcd8AfzFPkv+89veFSr1rcMjuq2JDCwypKaPeB6ljHp9KjXhPpjgCvQlWYuqg==} + engines: {node: '>=4'} + dev: true + + /axobject-query@3.1.1: + resolution: {integrity: sha512-goKlv8DZrK9hUh975fnHzhNIO4jUnFCfv/dszV5VwUGDFjI6vQ2VwoyjYjYNEbBE8AH87TduWP5uyDR1D+Iteg==} + dependencies: + deep-equal: 2.2.0 + dev: true + + /babel-jest@29.1.2(@babel/core@7.19.3): resolution: {integrity: sha512-IuG+F3HTHryJb7gacC7SQ59A9kO56BctUsT67uJHp1mMCHUOMXpDwOHWGifWqdWVknN2WNkCVQELPjXx0aLJ9Q==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} peerDependencies: @@ -1332,7 +1699,7 @@ packages: '@jest/transform': 29.1.2 '@types/babel__core': 7.1.19 babel-plugin-istanbul: 6.1.1 - babel-preset-jest: 29.0.2_@babel+core@7.19.3 + babel-preset-jest: 29.0.2(@babel/core@7.19.3) chalk: 4.1.2 graceful-fs: 4.2.10 slash: 3.0.0 @@ -1340,7 +1707,7 @@ packages: - supports-color dev: true - /babel-plugin-istanbul/6.1.1: + /babel-plugin-istanbul@6.1.1: resolution: {integrity: sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==} engines: {node: '>=8'} dependencies: @@ -1353,7 +1720,7 @@ packages: - supports-color dev: true - /babel-plugin-jest-hoist/29.0.2: + /babel-plugin-jest-hoist@29.0.2: resolution: {integrity: sha512-eBr2ynAEFjcebVvu8Ktx580BD1QKCrBG1XwEUTXJe285p9HA/4hOhfWCFRQhTKSyBV0VzjhG7H91Eifz9s29hg==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -1363,27 +1730,27 @@ packages: '@types/babel__traverse': 7.18.2 dev: true - /babel-preset-current-node-syntax/1.0.1_@babel+core@7.19.3: + /babel-preset-current-node-syntax@1.0.1(@babel/core@7.19.3): resolution: {integrity: sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ==} peerDependencies: '@babel/core': ^7.0.0 dependencies: '@babel/core': 7.19.3 - '@babel/plugin-syntax-async-generators': 7.8.4_@babel+core@7.19.3 - '@babel/plugin-syntax-bigint': 7.8.3_@babel+core@7.19.3 - '@babel/plugin-syntax-class-properties': 7.12.13_@babel+core@7.19.3 - '@babel/plugin-syntax-import-meta': 7.10.4_@babel+core@7.19.3 - '@babel/plugin-syntax-json-strings': 7.8.3_@babel+core@7.19.3 - '@babel/plugin-syntax-logical-assignment-operators': 7.10.4_@babel+core@7.19.3 - '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3_@babel+core@7.19.3 - '@babel/plugin-syntax-numeric-separator': 7.10.4_@babel+core@7.19.3 - '@babel/plugin-syntax-object-rest-spread': 7.8.3_@babel+core@7.19.3 - '@babel/plugin-syntax-optional-catch-binding': 7.8.3_@babel+core@7.19.3 - '@babel/plugin-syntax-optional-chaining': 7.8.3_@babel+core@7.19.3 - '@babel/plugin-syntax-top-level-await': 7.14.5_@babel+core@7.19.3 - dev: true - - /babel-preset-jest/29.0.2_@babel+core@7.19.3: + '@babel/plugin-syntax-async-generators': 7.8.4(@babel/core@7.19.3) + '@babel/plugin-syntax-bigint': 7.8.3(@babel/core@7.19.3) + '@babel/plugin-syntax-class-properties': 7.12.13(@babel/core@7.19.3) + '@babel/plugin-syntax-import-meta': 7.10.4(@babel/core@7.19.3) + '@babel/plugin-syntax-json-strings': 7.8.3(@babel/core@7.19.3) + '@babel/plugin-syntax-logical-assignment-operators': 7.10.4(@babel/core@7.19.3) + '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3(@babel/core@7.19.3) + '@babel/plugin-syntax-numeric-separator': 7.10.4(@babel/core@7.19.3) + '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/core@7.19.3) + '@babel/plugin-syntax-optional-catch-binding': 7.8.3(@babel/core@7.19.3) + '@babel/plugin-syntax-optional-chaining': 7.8.3(@babel/core@7.19.3) + '@babel/plugin-syntax-top-level-await': 7.14.5(@babel/core@7.19.3) + dev: true + + /babel-preset-jest@29.0.2(@babel/core@7.19.3): resolution: {integrity: sha512-BeVXp7rH5TK96ofyEnHjznjLMQ2nAeDJ+QzxKnHAAMs0RgrQsCywjAN8m4mOm5Di0pxU//3AoEeJJrerMH5UeA==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} peerDependencies: @@ -1391,34 +1758,50 @@ packages: dependencies: '@babel/core': 7.19.3 babel-plugin-jest-hoist: 29.0.2 - babel-preset-current-node-syntax: 1.0.1_@babel+core@7.19.3 + babel-preset-current-node-syntax: 1.0.1(@babel/core@7.19.3) dev: true - /balanced-match/1.0.2: + /balanced-match@1.0.2: resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} dev: true - /brace-expansion/1.1.11: + /base64-js@1.5.1: + resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} + dev: false + + /binary-extensions@2.2.0: + resolution: {integrity: sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==} + engines: {node: '>=8'} + dev: false + + /bl@4.1.0: + resolution: {integrity: sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==} + dependencies: + buffer: 5.7.1 + inherits: 2.0.4 + readable-stream: 3.6.2 + dev: false + + /brace-expansion@1.1.11: resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==} dependencies: balanced-match: 1.0.2 concat-map: 0.0.1 dev: true - /brace-expansion/2.0.1: + /brace-expansion@2.0.1: resolution: {integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==} dependencies: balanced-match: 1.0.2 dev: true - /braces/3.0.2: + /braces@3.0.2: resolution: {integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==} engines: {node: '>=8'} dependencies: fill-range: 7.0.1 - dev: true - /browserslist/4.21.4: + /browserslist@4.21.4: resolution: {integrity: sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw==} engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} hasBin: true @@ -1426,46 +1809,52 @@ packages: caniuse-lite: 1.0.30001414 electron-to-chromium: 1.4.270 node-releases: 2.0.6 - update-browserslist-db: 1.0.9_browserslist@4.21.4 + update-browserslist-db: 1.0.9(browserslist@4.21.4) dev: true - /bser/2.1.1: + /bser@2.1.1: resolution: {integrity: sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==} dependencies: node-int64: 0.4.0 dev: true - /buffer-from/1.1.2: + /buffer-from@1.1.2: resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==} dev: true - /call-bind/1.0.2: + /buffer@5.7.1: + resolution: {integrity: sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==} + dependencies: + base64-js: 1.5.1 + ieee754: 1.2.1 + dev: false + + /call-bind@1.0.2: resolution: {integrity: sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==} dependencies: function-bind: 1.1.1 get-intrinsic: 1.1.3 - dev: true - /callsites/3.1.0: + /callsites@3.1.0: resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} engines: {node: '>=6'} dev: true - /camelcase/5.3.1: + /camelcase@5.3.1: resolution: {integrity: sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==} engines: {node: '>=6'} dev: true - /camelcase/6.3.0: + /camelcase@6.3.0: resolution: {integrity: sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==} engines: {node: '>=10'} dev: true - /caniuse-lite/1.0.30001414: + /caniuse-lite@1.0.30001414: resolution: {integrity: sha512-t55jfSaWjCdocnFdKQoO+d2ct9C59UZg4dY3OnUlSZ447r8pUtIKdp0hpAzrGFultmTC+Us+KpKi4GZl/LXlFg==} dev: true - /chalk/2.4.2: + /chalk@2.4.2: resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==} engines: {node: '>=4'} dependencies: @@ -1474,7 +1863,7 @@ packages: supports-color: 5.5.0 dev: true - /chalk/3.0.0: + /chalk@3.0.0: resolution: {integrity: sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==} engines: {node: '>=8'} dependencies: @@ -1482,98 +1871,152 @@ packages: supports-color: 7.2.0 dev: true - /chalk/4.1.2: + /chalk@4.1.1: + resolution: {integrity: sha512-diHzdDKxcU+bAsUboHLPEDQiw0qEe0qd7SYUn3HgcFlWgbDcfLGswOHYeGrHKzG9z6UYf01d9VFMfZxPM1xZSg==} + engines: {node: '>=10'} + dependencies: + ansi-styles: 4.3.0 + supports-color: 7.2.0 + dev: false + + /chalk@4.1.2: resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} engines: {node: '>=10'} dependencies: ansi-styles: 4.3.0 supports-color: 7.2.0 - dev: true - /char-regex/1.0.2: + /char-regex@1.0.2: resolution: {integrity: sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==} engines: {node: '>=10'} dev: true - /ci-info/3.4.0: + /chardet@0.7.0: + resolution: {integrity: sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==} + dev: false + + /chokidar@3.5.3: + resolution: {integrity: sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==} + engines: {node: '>= 8.10.0'} + dependencies: + anymatch: 3.1.2 + braces: 3.0.2 + glob-parent: 5.1.2 + is-binary-path: 2.1.0 + is-glob: 4.0.3 + normalize-path: 3.0.0 + readdirp: 3.6.0 + optionalDependencies: + fsevents: 2.3.2 + dev: false + + /ci-info@2.0.0: + resolution: {integrity: sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==} + dev: true + + /ci-info@3.4.0: resolution: {integrity: sha512-t5QdPT5jq3o262DOQ8zA6E1tlH2upmUc4Hlvrbx1pGYJuiiHl7O7rvVNI+l8HTVhd/q3Qc9vqimkNk5yiXsAug==} dev: true - /cjs-module-lexer/1.2.2: + /cjs-module-lexer@1.2.2: resolution: {integrity: sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA==} dev: true - /cliui/8.0.1: + /cli-cursor@3.1.0: + resolution: {integrity: sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==} + engines: {node: '>=8'} + dependencies: + restore-cursor: 3.1.0 + dev: false + + /cli-spinners@2.7.0: + resolution: {integrity: sha512-qu3pN8Y3qHNgE2AFweciB1IfMnmZ/fsNTEE+NOFjmGB2F/7rLhnhzppvpCnN4FovtP26k8lHyy9ptEbNwWFLzw==} + engines: {node: '>=6'} + dev: false + + /cli-width@3.0.0: + resolution: {integrity: sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==} + engines: {node: '>= 10'} + dev: false + + /cliui@8.0.1: resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} engines: {node: '>=12'} dependencies: string-width: 4.2.3 strip-ansi: 6.0.1 wrap-ansi: 7.0.0 - dev: true - /clsx/1.2.1: + /clone@1.0.4: + resolution: {integrity: sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg==} + engines: {node: '>=0.8'} + dev: false + + /clsx@1.2.1: resolution: {integrity: sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg==} engines: {node: '>=6'} dev: false - /co/4.6.0: + /co@4.6.0: resolution: {integrity: sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==} engines: {iojs: '>= 1.0.0', node: '>= 0.12.0'} dev: true - /collect-v8-coverage/1.0.1: + /collect-v8-coverage@1.0.1: resolution: {integrity: sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg==} dev: true - /color-convert/1.9.3: + /color-convert@1.9.3: resolution: {integrity: sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==} dependencies: color-name: 1.1.3 dev: true - /color-convert/2.0.1: + /color-convert@2.0.1: resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} engines: {node: '>=7.0.0'} dependencies: color-name: 1.1.4 - dev: true - /color-name/1.1.3: + /color-name@1.1.3: resolution: {integrity: sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==} dev: true - /color-name/1.1.4: + /color-name@1.1.4: resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} - dev: true - /combined-stream/1.0.8: + /combined-stream@1.0.8: resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} engines: {node: '>= 0.8'} dependencies: delayed-stream: 1.0.0 dev: true - /commander/7.2.0: + /commander@7.2.0: resolution: {integrity: sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==} engines: {node: '>= 10'} dev: true - /concat-map/0.0.1: + /concat-map@0.0.1: resolution: {integrity: sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=} dev: true - /convert-source-map/1.8.0: + /convert-source-map@1.8.0: resolution: {integrity: sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA==} dependencies: safe-buffer: 5.1.2 dev: true - /create-require/1.1.1: + /cookie@0.4.2: + resolution: {integrity: sha512-aSWTXFzaKWkvHO1Ny/s+ePFpvKsPnjc551iI41v3ny/ow6tBG5Vd+FuqGNhh1LxOmVzOlGUriIlOaokOvhaStA==} + engines: {node: '>= 0.6'} + dev: false + + /create-require@1.1.1: resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==} dev: true - /cross-spawn/6.0.5: + /cross-spawn@6.0.5: resolution: {integrity: sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==} engines: {node: '>=4.8'} dependencies: @@ -1584,7 +2027,7 @@ packages: which: 1.3.1 dev: true - /cross-spawn/7.0.3: + /cross-spawn@7.0.3: resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} engines: {node: '>= 8'} dependencies: @@ -1593,29 +2036,33 @@ packages: which: 2.0.2 dev: true - /css.escape/1.5.1: + /css.escape@1.5.1: resolution: {integrity: sha512-YUifsXXuknHlUsmlgyY0PKzgPOr7/FjCePfHNt0jxm83wHZi44VDMQ7/fGNkjY3/jV1MC+1CmZbaHzugyeRtpg==} dev: true - /cssom/0.3.8: + /cssom@0.3.8: resolution: {integrity: sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg==} dev: true - /cssom/0.5.0: + /cssom@0.5.0: resolution: {integrity: sha512-iKuQcq+NdHqlAcwUY0o/HL69XQrUaQdMjmStJ8JFmUaiiQErlhrmuigkg/CU4E2J0IyUKUrMAgl36TvN67MqTw==} dev: true - /cssstyle/2.3.0: + /cssstyle@2.3.0: resolution: {integrity: sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A==} engines: {node: '>=8'} dependencies: cssom: 0.3.8 dev: true - /csstype/3.1.1: + /csstype@3.1.1: resolution: {integrity: sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw==} - /data-urls/3.0.2: + /damerau-levenshtein@1.0.8: + resolution: {integrity: sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA==} + dev: true + + /data-urls@3.0.2: resolution: {integrity: sha512-Jy/tj3ldjZJo63sVAvg6LHt2mHvl4V6AgRAmNDtLdm7faqtsx+aJG42rsyCo9JCoRVKwPFzKlIPx3DIibwSIaQ==} engines: {node: '>=12'} dependencies: @@ -1624,7 +2071,7 @@ packages: whatwg-url: 11.0.0 dev: true - /debug/4.3.4: + /debug@4.3.4: resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} engines: {node: '>=6.0'} peerDependencies: @@ -1634,26 +2081,53 @@ packages: optional: true dependencies: ms: 2.1.2 - dev: true - /decimal.js/10.4.1: + /decimal.js@10.4.1: resolution: {integrity: sha512-F29o+vci4DodHYT9UrR5IEbfBw9pE5eSapIJdTqXK5+6hq+t8VRxwQyKlW2i+KDKFkkJQRvFyI/QXD83h8LyQw==} dev: true - /dedent/0.7.0: + /dedent@0.7.0: resolution: {integrity: sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA==} dev: true - /deep-is/0.1.4: + /deep-equal@2.2.0: + resolution: {integrity: sha512-RdpzE0Hv4lhowpIUKKMJfeH6C1pXdtT1/it80ubgWqwI3qpuxUBpC1S4hnHg+zjnuOoDkzUtUCEEkG+XG5l3Mw==} + dependencies: + call-bind: 1.0.2 + es-get-iterator: 1.1.3 + get-intrinsic: 1.1.3 + is-arguments: 1.1.1 + is-array-buffer: 3.0.1 + is-date-object: 1.0.5 + is-regex: 1.1.4 + is-shared-array-buffer: 1.0.2 + isarray: 2.0.5 + object-is: 1.1.5 + object-keys: 1.1.1 + object.assign: 4.1.4 + regexp.prototype.flags: 1.4.3 + side-channel: 1.0.4 + which-boxed-primitive: 1.0.2 + which-collection: 1.0.1 + which-typed-array: 1.1.9 + dev: true + + /deep-is@0.1.4: resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==} dev: true - /deepmerge/4.2.2: + /deepmerge@4.2.2: resolution: {integrity: sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==} engines: {node: '>=0.10.0'} dev: true - /define-properties/1.1.4: + /defaults@1.0.4: + resolution: {integrity: sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A==} + dependencies: + clone: 1.0.4 + dev: false + + /define-properties@1.1.4: resolution: {integrity: sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==} engines: {node: '>= 0.4'} dependencies: @@ -1661,110 +2135,158 @@ packages: object-keys: 1.1.1 dev: true - /define-route/0.3.1: + /define-route@0.3.1: resolution: {integrity: sha512-8lVnNT4K571CXccwzMGjDBU7vy1y+NhjMsxTizaR53OjV0QGr2QuJ0sMuyYU4bPDwYeAQjT7s//7yW+0QkqpmQ==} dev: false - /delayed-stream/1.0.0: + /delayed-stream@1.0.0: resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} engines: {node: '>=0.4.0'} dev: true - /detect-newline/3.1.0: + /detect-newline@3.1.0: resolution: {integrity: sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==} engines: {node: '>=8'} dev: true - /diff-sequences/29.0.0: + /diff-sequences@29.0.0: resolution: {integrity: sha512-7Qe/zd1wxSDL4D/X/FPjOMB+ZMDt71W94KYaq05I2l0oQqgXgs7s4ftYYmV38gBSrPz2vcygxfs1xn0FT+rKNA==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dev: true - /diff/4.0.2: + /diff@4.0.2: resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==} engines: {node: '>=0.3.1'} dev: true - /dir-glob/3.0.1: + /dir-glob@3.0.1: resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} engines: {node: '>=8'} dependencies: path-type: 4.0.0 dev: true - /doctrine/3.0.0: + /doctrine@2.1.0: + resolution: {integrity: sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==} + engines: {node: '>=0.10.0'} + dependencies: + esutils: 2.0.3 + dev: true + + /doctrine@3.0.0: resolution: {integrity: sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==} engines: {node: '>=6.0.0'} dependencies: esutils: 2.0.3 dev: true - /dom-accessibility-api/0.5.14: + /dom-accessibility-api@0.5.14: resolution: {integrity: sha512-NMt+m9zFMPZe0JcY9gN224Qvk6qLIdqex29clBvc/y75ZBX9YA9wNK3frsYvu2DI1xcCIwxwnX+TlsJ2DSOADg==} dev: true - /domexception/4.0.0: + /domexception@4.0.0: resolution: {integrity: sha512-A2is4PLG+eeSfoTMA95/s4pvAoSo2mKtiM5jlHkAVewmiO8ISFTFKZjH7UAM1Atli/OT/7JHOrJRJiMKUZKYBw==} engines: {node: '>=12'} dependencies: webidl-conversions: 7.0.0 dev: true - /electron-to-chromium/1.4.270: + /electron-to-chromium@1.4.270: resolution: {integrity: sha512-KNhIzgLiJmDDC444dj9vEOpZEgsV96ult9Iff98Vanumn+ShJHd5se8aX6KeVxdc0YQeqdrezBZv89rleDbvSg==} dev: true - /emittery/0.10.2: + /emittery@0.10.2: resolution: {integrity: sha512-aITqOwnLanpHLNXZJENbOgjUBeHocD+xsSJmNrjovKBW5HbSpW3d1pEls7GFQPUWXiwG9+0P4GtHfEqC/4M0Iw==} engines: {node: '>=12'} dev: true - /emoji-regex/8.0.0: + /emoji-regex@8.0.0: resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} + + /emoji-regex@9.2.2: + resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} dev: true - /entities/4.4.0: + /entities@4.4.0: resolution: {integrity: sha512-oYp7156SP8LkeGD0GF85ad1X9Ai79WtRsZ2gxJqtBuzH+98YUV6jkHEKlZkMbcrjJjIVJNIDP/3WL9wQkoPbWA==} engines: {node: '>=0.12'} dev: true - /error-ex/1.3.2: + /error-ex@1.3.2: resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==} dependencies: is-arrayish: 0.2.1 dev: true - /es-abstract/1.20.3: - resolution: {integrity: sha512-AyrnaKVpMzljIdwjzrj+LxGmj8ik2LckwXacHqrJJ/jxz6dDDBcZ7I7nlHM0FvEW8MfbWJwOd+yT2XzYW49Frw==} + /es-abstract@1.21.1: + resolution: {integrity: sha512-QudMsPOz86xYz/1dG1OuGBKOELjCh99IIWHLzy5znUB6j8xG2yMA7bfTV86VSqKF+Y/H08vQPR+9jyXpuC6hfg==} engines: {node: '>= 0.4'} dependencies: + available-typed-arrays: 1.0.5 call-bind: 1.0.2 + es-set-tostringtag: 2.0.1 es-to-primitive: 1.2.1 function-bind: 1.1.1 function.prototype.name: 1.1.5 get-intrinsic: 1.1.3 get-symbol-description: 1.0.0 + globalthis: 1.0.3 + gopd: 1.0.1 has: 1.0.3 has-property-descriptors: 1.0.0 + has-proto: 1.0.1 has-symbols: 1.0.3 - internal-slot: 1.0.3 + internal-slot: 1.0.4 + is-array-buffer: 3.0.1 is-callable: 1.2.7 is-negative-zero: 2.0.2 is-regex: 1.1.4 is-shared-array-buffer: 1.0.2 is-string: 1.0.7 + is-typed-array: 1.1.10 is-weakref: 1.0.2 object-inspect: 1.12.2 object-keys: 1.1.1 object.assign: 4.1.4 regexp.prototype.flags: 1.4.3 safe-regex-test: 1.0.0 - string.prototype.trimend: 1.0.5 - string.prototype.trimstart: 1.0.5 + string.prototype.trimend: 1.0.6 + string.prototype.trimstart: 1.0.6 + typed-array-length: 1.0.4 unbox-primitive: 1.0.2 + which-typed-array: 1.1.9 + dev: true + + /es-get-iterator@1.1.3: + resolution: {integrity: sha512-sPZmqHBe6JIiTfN5q2pEi//TwxmAFHwj/XEuYjTuse78i8KxaqMTTzxPoFKuzRpDpTJ+0NAbpfenkmH2rePtuw==} + dependencies: + call-bind: 1.0.2 + get-intrinsic: 1.1.3 + has-symbols: 1.0.3 + is-arguments: 1.1.1 + is-map: 2.0.2 + is-set: 2.0.2 + is-string: 1.0.7 + isarray: 2.0.5 + stop-iteration-iterator: 1.0.0 dev: true - /es-to-primitive/1.2.1: + /es-set-tostringtag@2.0.1: + resolution: {integrity: sha512-g3OMbtlwY3QewlqAiMLI47KywjWZoEytKr8pf6iTC8uJq5bIAH52Z9pnQ8pVL6whrCto53JZDuUIsifGeLorTg==} + engines: {node: '>= 0.4'} + dependencies: + get-intrinsic: 1.1.3 + has: 1.0.3 + has-tostringtag: 1.0.0 + dev: true + + /es-shim-unscopables@1.0.0: + resolution: {integrity: sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==} + dependencies: + has: 1.0.3 + dev: true + + /es-to-primitive@1.2.1: resolution: {integrity: sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==} engines: {node: '>= 0.4'} dependencies: @@ -1773,27 +2295,25 @@ packages: is-symbol: 1.0.4 dev: true - /escalade/3.1.1: + /escalade@3.1.1: resolution: {integrity: sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==} engines: {node: '>=6'} - dev: true - /escape-string-regexp/1.0.5: + /escape-string-regexp@1.0.5: resolution: {integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==} engines: {node: '>=0.8.0'} - dev: true - /escape-string-regexp/2.0.0: + /escape-string-regexp@2.0.0: resolution: {integrity: sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==} engines: {node: '>=8'} dev: true - /escape-string-regexp/4.0.0: + /escape-string-regexp@4.0.0: resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==} engines: {node: '>=10'} dev: true - /escodegen/2.0.0: + /escodegen@2.0.0: resolution: {integrity: sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw==} engines: {node: '>=6.0'} hasBin: true @@ -1806,7 +2326,152 @@ packages: source-map: 0.6.1 dev: true - /eslint-scope/7.1.1: + /eslint-plugin-jest-dom@4.0.3(eslint@8.24.0): + resolution: {integrity: sha512-9j+n8uj0+V0tmsoS7bYC7fLhQmIvjRqRYEcbDSi+TKPsTThLLXCyj5swMSSf/hTleeMktACnn+HFqXBr5gbcbA==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0, npm: '>=6', yarn: '>=1'} + peerDependencies: + eslint: ^6.8.0 || ^7.0.0 || ^8.0.0 + dependencies: + '@babel/runtime': 7.19.0 + '@testing-library/dom': 8.18.1 + eslint: 8.24.0 + requireindex: 1.2.0 + dev: true + + /eslint-plugin-jest@26.9.0(@typescript-eslint/eslint-plugin@5.49.0)(eslint@8.24.0)(jest@29.1.2)(typescript@4.8.4): + resolution: {integrity: sha512-TWJxWGp1J628gxh2KhaH1H1paEdgE2J61BBF1I59c6xWeL5+D1BzMxGDN/nXAfX+aSkR5u80K+XhskK6Gwq9ng==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + '@typescript-eslint/eslint-plugin': ^5.0.0 + eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 + jest: '*' + peerDependenciesMeta: + '@typescript-eslint/eslint-plugin': + optional: true + jest: + optional: true + dependencies: + '@typescript-eslint/eslint-plugin': 5.49.0(@typescript-eslint/parser@5.49.0)(eslint@8.24.0)(typescript@4.8.4) + '@typescript-eslint/utils': 5.49.0(eslint@8.24.0)(typescript@4.8.4) + eslint: 8.24.0 + jest: 29.1.2(@types/node@18.8.0)(ts-node@10.9.1) + transitivePeerDependencies: + - supports-color + - typescript + dev: true + + /eslint-plugin-jsx-a11y@6.7.1(eslint@8.24.0): + resolution: {integrity: sha512-63Bog4iIethyo8smBklORknVjB0T2dwB8Mr/hIC+fBS0uyHdYYpzM/Ed+YC8VxTjlXHEWFOdmgwcDn1U2L9VCA==} + engines: {node: '>=4.0'} + peerDependencies: + eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8 + dependencies: + '@babel/runtime': 7.20.13 + aria-query: 5.1.3 + array-includes: 3.1.6 + array.prototype.flatmap: 1.3.1 + ast-types-flow: 0.0.7 + axe-core: 4.6.3 + axobject-query: 3.1.1 + damerau-levenshtein: 1.0.8 + emoji-regex: 9.2.2 + eslint: 8.24.0 + has: 1.0.3 + jsx-ast-utils: 3.3.3 + language-tags: 1.0.5 + minimatch: 3.1.2 + object.entries: 1.1.6 + object.fromentries: 2.0.6 + semver: 6.3.0 + dev: true + + /eslint-plugin-lit@1.8.2(eslint@8.24.0): + resolution: {integrity: sha512-4mOGcSRNEPMh7AN2F7Iy6no36nuFgyYOsnTRhFw1k8xyy1Zm6QOp788ywDvJqy+eelFbLPBhq20Qr55a887Dmw==} + engines: {node: '>= 12'} + peerDependencies: + eslint: '>= 5' + dependencies: + eslint: 8.24.0 + parse5: 6.0.1 + parse5-htmlparser2-tree-adapter: 6.0.1 + requireindex: 1.2.0 + dev: true + + /eslint-plugin-mocha@10.1.0(eslint@8.24.0): + resolution: {integrity: sha512-xLqqWUF17llsogVOC+8C6/jvQ+4IoOREbN7ZCHuOHuD6cT5cDD4h7f2LgsZuzMAiwswWE21tO7ExaknHVDrSkw==} + engines: {node: '>=14.0.0'} + peerDependencies: + eslint: '>=7.0.0' + dependencies: + eslint: 8.24.0 + eslint-utils: 3.0.0(eslint@8.24.0) + rambda: 7.4.0 + dev: true + + /eslint-plugin-promise@6.1.1(eslint@8.24.0): + resolution: {integrity: sha512-tjqWDwVZQo7UIPMeDReOpUgHCmCiH+ePnVT+5zVapL0uuHnegBUs2smM13CzOs2Xb5+MHMRFTs9v24yjba4Oig==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + eslint: ^7.0.0 || ^8.0.0 + dependencies: + eslint: 8.24.0 + dev: true + + /eslint-plugin-react-hooks@4.6.0(eslint@8.24.0): + resolution: {integrity: sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g==} + engines: {node: '>=10'} + peerDependencies: + eslint: ^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 + dependencies: + eslint: 8.24.0 + dev: true + + /eslint-plugin-react@7.32.1(eslint@8.24.0): + resolution: {integrity: sha512-vOjdgyd0ZHBXNsmvU+785xY8Bfe57EFbTYYk8XrROzWpr9QBvpjITvAXt9xqcE6+8cjR/g1+mfumPToxsl1www==} + engines: {node: '>=4'} + peerDependencies: + eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8 + dependencies: + array-includes: 3.1.6 + array.prototype.flatmap: 1.3.1 + array.prototype.tosorted: 1.1.1 + doctrine: 2.1.0 + eslint: 8.24.0 + estraverse: 5.3.0 + jsx-ast-utils: 3.3.3 + minimatch: 3.1.2 + object.entries: 1.1.6 + object.fromentries: 2.0.6 + object.hasown: 1.1.2 + object.values: 1.1.6 + prop-types: 15.8.1 + resolve: 2.0.0-next.4 + semver: 6.3.0 + string.prototype.matchall: 4.0.8 + dev: true + + /eslint-plugin-testing-library@5.10.0(eslint@8.24.0)(typescript@4.8.4): + resolution: {integrity: sha512-aTOsCAEI9trrX3TLOnsskfhe57DmsjP/yMKLPqg4ftdRvfR4qut2PGWUa8TwP7whZbwMzJjh98tgAPcE8vdHow==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0, npm: '>=6'} + peerDependencies: + eslint: ^7.5.0 || ^8.0.0 + dependencies: + '@typescript-eslint/utils': 5.49.0(eslint@8.24.0)(typescript@4.8.4) + eslint: 8.24.0 + transitivePeerDependencies: + - supports-color + - typescript + dev: true + + /eslint-scope@5.1.1: + resolution: {integrity: sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==} + engines: {node: '>=8.0.0'} + dependencies: + esrecurse: 4.3.0 + estraverse: 4.3.0 + dev: true + + /eslint-scope@7.1.1: resolution: {integrity: sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} dependencies: @@ -1814,7 +2479,7 @@ packages: estraverse: 5.3.0 dev: true - /eslint-utils/3.0.0_eslint@8.24.0: + /eslint-utils@3.0.0(eslint@8.24.0): resolution: {integrity: sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==} engines: {node: ^10.0.0 || ^12.0.0 || >= 14.0.0} peerDependencies: @@ -1824,17 +2489,17 @@ packages: eslint-visitor-keys: 2.1.0 dev: true - /eslint-visitor-keys/2.1.0: + /eslint-visitor-keys@2.1.0: resolution: {integrity: sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==} engines: {node: '>=10'} dev: true - /eslint-visitor-keys/3.3.0: + /eslint-visitor-keys@3.3.0: resolution: {integrity: sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} dev: true - /eslint/8.24.0: + /eslint@8.24.0: resolution: {integrity: sha512-dWFaPhGhTAiPcCgm3f6LI2MBWbogMnTJzFBbhXVRQDJPkr9pGZvVjlVfXd+vyDcWPA2Ic9L2AXPIQM0+vk/cSQ==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} hasBin: true @@ -1850,7 +2515,7 @@ packages: doctrine: 3.0.0 escape-string-regexp: 4.0.0 eslint-scope: 7.1.1 - eslint-utils: 3.0.0_eslint@8.24.0 + eslint-utils: 3.0.0(eslint@8.24.0) eslint-visitor-keys: 3.3.0 espree: 9.4.0 esquery: 1.4.0 @@ -1882,46 +2547,56 @@ packages: - supports-color dev: true - /espree/9.4.0: + /espree@9.4.0: resolution: {integrity: sha512-DQmnRpLj7f6TgN/NYb0MTzJXL+vJF9h3pHy4JhCIs3zwcgez8xmGg3sXHcEO97BrmO2OSvCwMdfdlyl+E9KjOw==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} dependencies: acorn: 8.8.0 - acorn-jsx: 5.3.2_acorn@8.8.0 + acorn-jsx: 5.3.2(acorn@8.8.0) eslint-visitor-keys: 3.3.0 dev: true - /esprima/4.0.1: + /esprima@4.0.1: resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==} engines: {node: '>=4'} hasBin: true dev: true - /esquery/1.4.0: + /esquery@1.4.0: resolution: {integrity: sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==} engines: {node: '>=0.10'} dependencies: estraverse: 5.3.0 dev: true - /esrecurse/4.3.0: + /esrecurse@4.3.0: resolution: {integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==} engines: {node: '>=4.0'} dependencies: estraverse: 5.3.0 dev: true - /estraverse/5.3.0: + /estraverse@4.3.0: + resolution: {integrity: sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==} + engines: {node: '>=4.0'} + dev: true + + /estraverse@5.3.0: resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==} engines: {node: '>=4.0'} dev: true - /esutils/2.0.3: + /esutils@2.0.3: resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==} engines: {node: '>=0.10.0'} dev: true - /execa/5.1.1: + /events@3.3.0: + resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==} + engines: {node: '>=0.8.x'} + dev: false + + /execa@5.1.1: resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==} engines: {node: '>=10'} dependencies: @@ -1936,12 +2611,12 @@ packages: strip-final-newline: 2.0.0 dev: true - /exit/0.1.2: + /exit@0.1.2: resolution: {integrity: sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==} engines: {node: '>= 0.8.0'} dev: true - /expect/29.1.2: + /expect@29.1.2: resolution: {integrity: sha512-AuAGn1uxva5YBbBlXb+2JPxJRuemZsmlGcapPXWNSBNsQtAULfjioREGBWuI0EOvYUKjDnrCy8PW5Zlr1md5mw==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -1952,15 +2627,24 @@ packages: jest-util: 29.1.2 dev: true - /fast-deep-equal/3.1.3: + /external-editor@3.1.0: + resolution: {integrity: sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==} + engines: {node: '>=4'} + dependencies: + chardet: 0.7.0 + iconv-lite: 0.4.24 + tmp: 0.0.33 + dev: false + + /fast-deep-equal@3.1.3: resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} dev: true - /fast-equals/3.0.3: + /fast-equals@3.0.3: resolution: {integrity: sha512-NCe8qxnZFARSHGztGMZOO/PC1qa5MIFB5Hp66WdzbCRAz8U8US3bx1UTgLS49efBQPcUtO9gf5oVEY8o7y/7Kg==} dev: false - /fast-glob/3.2.12: + /fast-glob@3.2.12: resolution: {integrity: sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==} engines: {node: '>=8.6.0'} dependencies: @@ -1971,41 +2655,47 @@ packages: micromatch: 4.0.5 dev: true - /fast-json-stable-stringify/2.1.0: + /fast-json-stable-stringify@2.1.0: resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==} dev: true - /fast-levenshtein/2.0.6: + /fast-levenshtein@2.0.6: resolution: {integrity: sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==} dev: true - /fastq/1.13.0: + /fastq@1.13.0: resolution: {integrity: sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==} dependencies: reusify: 1.0.4 dev: true - /fb-watchman/2.0.2: + /fb-watchman@2.0.2: resolution: {integrity: sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==} dependencies: bser: 2.1.1 dev: true - /file-entry-cache/6.0.1: + /figures@3.2.0: + resolution: {integrity: sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==} + engines: {node: '>=8'} + dependencies: + escape-string-regexp: 1.0.5 + dev: false + + /file-entry-cache@6.0.1: resolution: {integrity: sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==} engines: {node: ^10.12.0 || >=12.0.0} dependencies: flat-cache: 3.0.4 dev: true - /fill-range/7.0.1: + /fill-range@7.0.1: resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==} engines: {node: '>=8'} dependencies: to-regex-range: 5.0.1 - dev: true - /find-up/4.1.0: + /find-up@4.1.0: resolution: {integrity: sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==} engines: {node: '>=8'} dependencies: @@ -2013,7 +2703,7 @@ packages: path-exists: 4.0.0 dev: true - /find-up/5.0.0: + /find-up@5.0.0: resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==} engines: {node: '>=10'} dependencies: @@ -2021,7 +2711,7 @@ packages: path-exists: 4.0.0 dev: true - /flat-cache/3.0.4: + /flat-cache@3.0.4: resolution: {integrity: sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==} engines: {node: ^10.12.0 || >=12.0.0} dependencies: @@ -2029,11 +2719,16 @@ packages: rimraf: 3.0.2 dev: true - /flatted/3.2.7: + /flatted@3.2.7: resolution: {integrity: sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==} dev: true - /form-data/4.0.0: + /for-each@0.3.3: + resolution: {integrity: sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==} + dependencies: + is-callable: 1.2.7 + + /form-data@4.0.0: resolution: {integrity: sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==} engines: {node: '>= 6'} dependencies: @@ -2042,65 +2737,61 @@ packages: mime-types: 2.1.35 dev: true - /fs.realpath/1.0.0: + /fs.realpath@1.0.0: resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} dev: true - /fsevents/2.3.2: + /fsevents@2.3.2: resolution: {integrity: sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==} engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} os: [darwin] requiresBuild: true - dev: true optional: true - /function-bind/1.1.1: + /function-bind@1.1.1: resolution: {integrity: sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==} - dev: true - /function.prototype.name/1.1.5: + /function.prototype.name@1.1.5: resolution: {integrity: sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==} engines: {node: '>= 0.4'} dependencies: call-bind: 1.0.2 define-properties: 1.1.4 - es-abstract: 1.20.3 + es-abstract: 1.21.1 functions-have-names: 1.2.3 dev: true - /functions-have-names/1.2.3: + /functions-have-names@1.2.3: resolution: {integrity: sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==} dev: true - /gensync/1.0.0-beta.2: + /gensync@1.0.0-beta.2: resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} engines: {node: '>=6.9.0'} dev: true - /get-caller-file/2.0.5: + /get-caller-file@2.0.5: resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} engines: {node: 6.* || 8.* || >= 10.*} - dev: true - /get-intrinsic/1.1.3: + /get-intrinsic@1.1.3: resolution: {integrity: sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==} dependencies: function-bind: 1.1.1 has: 1.0.3 has-symbols: 1.0.3 - dev: true - /get-package-type/0.1.0: + /get-package-type@0.1.0: resolution: {integrity: sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==} engines: {node: '>=8.0.0'} dev: true - /get-stream/6.0.1: + /get-stream@6.0.1: resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} engines: {node: '>=10'} dev: true - /get-symbol-description/1.0.0: + /get-symbol-description@1.0.0: resolution: {integrity: sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==} engines: {node: '>= 0.4'} dependencies: @@ -2108,21 +2799,20 @@ packages: get-intrinsic: 1.1.3 dev: true - /glob-parent/5.1.2: + /glob-parent@5.1.2: resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} engines: {node: '>= 6'} dependencies: is-glob: 4.0.3 - dev: true - /glob-parent/6.0.2: + /glob-parent@6.0.2: resolution: {integrity: sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==} engines: {node: '>=10.13.0'} dependencies: is-glob: 4.0.3 dev: true - /glob/7.2.3: + /glob@7.2.3: resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==} dependencies: fs.realpath: 1.0.0 @@ -2133,7 +2823,7 @@ packages: path-is-absolute: 1.0.1 dev: true - /glob/8.0.3: + /glob@8.0.3: resolution: {integrity: sha512-ull455NHSHI/Y1FqGaaYFaLGkNMMJbavMrEGFXG/PGrg6y7sutWHUHrz6gy6WEBH6akM1M414dWKCNs+IhKdiQ==} engines: {node: '>=12'} dependencies: @@ -2144,19 +2834,26 @@ packages: once: 1.4.0 dev: true - /globals/11.12.0: + /globals@11.12.0: resolution: {integrity: sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==} engines: {node: '>=4'} dev: true - /globals/13.17.0: + /globals@13.17.0: resolution: {integrity: sha512-1C+6nQRb1GwGMKm2dH/E7enFAMxGTmGI7/dEdhy/DNelv85w9B72t3uc5frtMNXIbzrarJJ/lTCjcaZwbLJmyw==} engines: {node: '>=8'} dependencies: type-fest: 0.20.2 dev: true - /globby/11.1.0: + /globalthis@1.0.3: + resolution: {integrity: sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==} + engines: {node: '>= 0.4'} + dependencies: + define-properties: 1.1.4 + dev: true + + /globby@11.1.0: resolution: {integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==} engines: {node: '>=10'} dependencies: @@ -2168,69 +2865,84 @@ packages: slash: 3.0.0 dev: true - /graceful-fs/4.2.10: + /gopd@1.0.1: + resolution: {integrity: sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==} + dependencies: + get-intrinsic: 1.1.3 + + /graceful-fs@4.2.10: resolution: {integrity: sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==} dev: true - /grapheme-splitter/1.0.4: + /grapheme-splitter@1.0.4: resolution: {integrity: sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==} dev: true - /has-bigints/1.0.2: + /graphql@16.6.0: + resolution: {integrity: sha512-KPIBPDlW7NxrbT/eh4qPXz5FiFdL5UbaA0XUNz2Rp3Z3hqBSkbj0GVjwFDztsWVauZUWsbKHgMg++sk8UX0bkw==} + engines: {node: ^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0} + dev: false + + /has-bigints@1.0.2: resolution: {integrity: sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==} dev: true - /has-flag/3.0.0: + /has-flag@3.0.0: resolution: {integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==} engines: {node: '>=4'} dev: true - /has-flag/4.0.0: + /has-flag@4.0.0: resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} engines: {node: '>=8'} - dev: true - /has-property-descriptors/1.0.0: + /has-property-descriptors@1.0.0: resolution: {integrity: sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==} dependencies: get-intrinsic: 1.1.3 dev: true - /has-symbols/1.0.3: - resolution: {integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==} + /has-proto@1.0.1: + resolution: {integrity: sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==} engines: {node: '>= 0.4'} dev: true - /has-tostringtag/1.0.0: + /has-symbols@1.0.3: + resolution: {integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==} + engines: {node: '>= 0.4'} + + /has-tostringtag@1.0.0: resolution: {integrity: sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==} engines: {node: '>= 0.4'} dependencies: has-symbols: 1.0.3 - dev: true - /has/1.0.3: + /has@1.0.3: resolution: {integrity: sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==} engines: {node: '>= 0.4.0'} dependencies: function-bind: 1.1.1 - dev: true - /hosted-git-info/2.8.9: + /headers-polyfill@3.1.2: + resolution: {integrity: sha512-tWCK4biJ6hcLqTviLXVR9DTRfYGQMXEIUj3gwJ2rZ5wO/at3XtkI4g8mCvFdUF9l1KMBNCfmNAdnahm1cgavQA==} + dev: false + + /hosted-git-info@2.8.9: resolution: {integrity: sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==} dev: true - /html-encoding-sniffer/3.0.0: + /html-encoding-sniffer@3.0.0: resolution: {integrity: sha512-oWv4T4yJ52iKrufjnyZPkrN0CH3QnrUqdB6In1g5Fe1mia8GmF36gnfNySxoZtxD5+NmYw1EElVXiBk93UeskA==} engines: {node: '>=12'} dependencies: whatwg-encoding: 2.0.0 dev: true - /html-escaper/2.0.2: + /html-escaper@2.0.2: resolution: {integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==} dev: true - /http-proxy-agent/5.0.0: + /http-proxy-agent@5.0.0: resolution: {integrity: sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==} engines: {node: '>= 6'} dependencies: @@ -2241,7 +2953,7 @@ packages: - supports-color dev: true - /https-proxy-agent/5.0.1: + /https-proxy-agent@5.0.1: resolution: {integrity: sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==} engines: {node: '>= 6'} dependencies: @@ -2251,24 +2963,35 @@ packages: - supports-color dev: true - /human-signals/2.1.0: + /human-signals@2.1.0: resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} engines: {node: '>=10.17.0'} dev: true - /iconv-lite/0.6.3: + /iconv-lite@0.4.24: + resolution: {integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==} + engines: {node: '>=0.10.0'} + dependencies: + safer-buffer: 2.1.2 + dev: false + + /iconv-lite@0.6.3: resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==} engines: {node: '>=0.10.0'} dependencies: safer-buffer: 2.1.2 dev: true - /ignore/5.2.0: + /ieee754@1.2.1: + resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} + dev: false + + /ignore@5.2.0: resolution: {integrity: sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==} engines: {node: '>= 4'} dev: true - /import-fresh/3.3.0: + /import-fresh@3.3.0: resolution: {integrity: sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==} engines: {node: '>=6'} dependencies: @@ -2276,7 +2999,7 @@ packages: resolve-from: 4.0.0 dev: true - /import-local/3.1.0: + /import-local@3.1.0: resolution: {integrity: sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==} engines: {node: '>=8'} hasBin: true @@ -2285,28 +3008,48 @@ packages: resolve-cwd: 3.0.0 dev: true - /imurmurhash/0.1.4: + /imurmurhash@0.1.4: resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==} engines: {node: '>=0.8.19'} dev: true - /indent-string/4.0.0: + /indent-string@4.0.0: resolution: {integrity: sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==} engines: {node: '>=8'} dev: true - /inflight/1.0.6: + /inflight@1.0.6: resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} dependencies: once: 1.4.0 wrappy: 1.0.2 dev: true - /inherits/2.0.4: + /inherits@2.0.4: resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} - dev: true - /internal-slot/1.0.3: + /inquirer@8.2.5: + resolution: {integrity: sha512-QAgPDQMEgrDssk1XiwwHoOGYF9BAbUcc1+j+FhEvaOt8/cKRqyLn0U5qA6F74fGhTMGxf92pOvPBeh29jQJDTQ==} + engines: {node: '>=12.0.0'} + dependencies: + ansi-escapes: 4.3.2 + chalk: 4.1.2 + cli-cursor: 3.1.0 + cli-width: 3.0.0 + external-editor: 3.1.0 + figures: 3.2.0 + lodash: 4.17.21 + mute-stream: 0.0.8 + ora: 5.4.1 + run-async: 2.4.1 + rxjs: 7.5.7 + string-width: 4.2.3 + strip-ansi: 6.0.1 + through: 2.3.8 + wrap-ansi: 7.0.0 + dev: false + + /internal-slot@1.0.3: resolution: {integrity: sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA==} engines: {node: '>= 0.4'} dependencies: @@ -2315,7 +3058,16 @@ packages: side-channel: 1.0.4 dev: true - /intl-messageformat/10.2.1: + /internal-slot@1.0.4: + resolution: {integrity: sha512-tA8URYccNzMo94s5MQZgH8NB/XTa6HsOo0MLfXTKKEnHVVdegzaQoFZ7Jp44bdvLvY2waT5dc+j5ICEswhi7UQ==} + engines: {node: '>= 0.4'} + dependencies: + get-intrinsic: 1.1.3 + has: 1.0.3 + side-channel: 1.0.4 + dev: true + + /intl-messageformat@10.2.1: resolution: {integrity: sha512-1lrJG2qKzcC1TVzYu1VuB1yiY68LU5rwpbHa2THCzA67Vutkz7+1lv5U20K3Lz5RAiH78zxNztMEtchokMWv8A==} dependencies: '@formatjs/ecma402-abstract': 1.13.0 @@ -2324,17 +3076,39 @@ packages: tslib: 2.4.0 dev: false - /is-arrayish/0.2.1: + /is-arguments@1.1.1: + resolution: {integrity: sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + has-tostringtag: 1.0.0 + + /is-array-buffer@3.0.1: + resolution: {integrity: sha512-ASfLknmY8Xa2XtB4wmbz13Wu202baeA18cJBCeCy0wXUHZF0IPyVEXqKEcd+t2fNSLLL1vC6k7lxZEojNbISXQ==} + dependencies: + call-bind: 1.0.2 + get-intrinsic: 1.1.3 + is-typed-array: 1.1.10 + dev: true + + /is-arrayish@0.2.1: resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==} dev: true - /is-bigint/1.0.4: + /is-bigint@1.0.4: resolution: {integrity: sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==} dependencies: has-bigints: 1.0.2 dev: true - /is-boolean-object/1.1.2: + /is-binary-path@2.1.0: + resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==} + engines: {node: '>=8'} + dependencies: + binary-extensions: 2.2.0 + dev: false + + /is-boolean-object@1.1.2: resolution: {integrity: sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==} engines: {node: '>= 0.4'} dependencies: @@ -2342,68 +3116,90 @@ packages: has-tostringtag: 1.0.0 dev: true - /is-callable/1.2.7: + /is-callable@1.2.7: resolution: {integrity: sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==} engines: {node: '>= 0.4'} + + /is-ci@2.0.0: + resolution: {integrity: sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==} + hasBin: true + dependencies: + ci-info: 2.0.0 dev: true - /is-core-module/2.10.0: + /is-core-module@2.10.0: resolution: {integrity: sha512-Erxj2n/LDAZ7H8WNJXd9tw38GYM3dv8rk8Zcs+jJuxYTW7sozH+SS8NtrSjVL1/vpLvWi1hxy96IzjJ3EHTJJg==} dependencies: has: 1.0.3 dev: true - /is-date-object/1.0.5: + /is-date-object@1.0.5: resolution: {integrity: sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==} engines: {node: '>= 0.4'} dependencies: has-tostringtag: 1.0.0 dev: true - /is-extglob/2.1.1: + /is-extglob@2.1.1: resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} engines: {node: '>=0.10.0'} - dev: true - /is-fullwidth-code-point/3.0.0: + /is-fullwidth-code-point@3.0.0: resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} engines: {node: '>=8'} - dev: true - /is-generator-fn/2.1.0: + /is-generator-fn@2.1.0: resolution: {integrity: sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==} engines: {node: '>=6'} dev: true - /is-glob/4.0.3: + /is-generator-function@1.0.10: + resolution: {integrity: sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==} + engines: {node: '>= 0.4'} + dependencies: + has-tostringtag: 1.0.0 + dev: false + + /is-glob@4.0.3: resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} engines: {node: '>=0.10.0'} dependencies: is-extglob: 2.1.1 + + /is-interactive@1.0.0: + resolution: {integrity: sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w==} + engines: {node: '>=8'} + dev: false + + /is-map@2.0.2: + resolution: {integrity: sha512-cOZFQQozTha1f4MxLFzlgKYPTyj26picdZTx82hbc/Xf4K/tZOOXSCkMvU4pKioRXGDLJRn0GM7Upe7kR721yg==} dev: true - /is-negative-zero/2.0.2: + /is-negative-zero@2.0.2: resolution: {integrity: sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==} engines: {node: '>= 0.4'} dev: true - /is-number-object/1.0.7: + /is-node-process@1.0.1: + resolution: {integrity: sha512-5IcdXuf++TTNt3oGl9EBdkvndXA8gmc4bz/Y+mdEpWh3Mcn/+kOw6hI7LD5CocqJWMzeb0I0ClndRVNdEPuJXQ==} + dev: false + + /is-number-object@1.0.7: resolution: {integrity: sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==} engines: {node: '>= 0.4'} dependencies: has-tostringtag: 1.0.0 dev: true - /is-number/7.0.0: + /is-number@7.0.0: resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} engines: {node: '>=0.12.0'} - dev: true - /is-potential-custom-element-name/1.0.1: + /is-potential-custom-element-name@1.0.1: resolution: {integrity: sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==} dev: true - /is-regex/1.1.4: + /is-regex@1.1.4: resolution: {integrity: sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==} engines: {node: '>= 0.4'} dependencies: @@ -2411,47 +3207,81 @@ packages: has-tostringtag: 1.0.0 dev: true - /is-shared-array-buffer/1.0.2: + /is-set@2.0.2: + resolution: {integrity: sha512-+2cnTEZeY5z/iXGbLhPrOAaK/Mau5k5eXq9j14CpRTftq0pAJu2MwVRSZhyZWBzx3o6X795Lz6Bpb6R0GKf37g==} + dev: true + + /is-shared-array-buffer@1.0.2: resolution: {integrity: sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==} dependencies: call-bind: 1.0.2 dev: true - /is-stream/2.0.1: + /is-stream@2.0.1: resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} engines: {node: '>=8'} dev: true - /is-string/1.0.7: + /is-string@1.0.7: resolution: {integrity: sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==} engines: {node: '>= 0.4'} dependencies: has-tostringtag: 1.0.0 dev: true - /is-symbol/1.0.4: + /is-symbol@1.0.4: resolution: {integrity: sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==} engines: {node: '>= 0.4'} dependencies: has-symbols: 1.0.3 dev: true - /is-weakref/1.0.2: + /is-typed-array@1.1.10: + resolution: {integrity: sha512-PJqgEHiWZvMpaFZ3uTc8kHPM4+4ADTlDniuQL7cU/UDA0Ql7F70yGfHph3cLNe+c9toaigv+DFzTJKhc2CtO6A==} + engines: {node: '>= 0.4'} + dependencies: + available-typed-arrays: 1.0.5 + call-bind: 1.0.2 + for-each: 0.3.3 + gopd: 1.0.1 + has-tostringtag: 1.0.0 + + /is-unicode-supported@0.1.0: + resolution: {integrity: sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==} + engines: {node: '>=10'} + dev: false + + /is-weakmap@2.0.1: + resolution: {integrity: sha512-NSBR4kH5oVj1Uwvv970ruUkCV7O1mzgVFO4/rev2cLRda9Tm9HrL70ZPut4rOHgY0FNrUu9BCbXA2sdQ+x0chA==} + dev: true + + /is-weakref@1.0.2: resolution: {integrity: sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==} dependencies: call-bind: 1.0.2 dev: true - /isexe/2.0.0: + /is-weakset@2.0.2: + resolution: {integrity: sha512-t2yVvttHkQktwnNNmBQ98AhENLdPUTDTE21uPqAQ0ARwQfGeQKRVS0NNurH7bTf7RrvcVn1OOge45CnBeHCSmg==} + dependencies: + call-bind: 1.0.2 + get-intrinsic: 1.1.3 + dev: true + + /isarray@2.0.5: + resolution: {integrity: sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==} + dev: true + + /isexe@2.0.0: resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} dev: true - /istanbul-lib-coverage/3.2.0: + /istanbul-lib-coverage@3.2.0: resolution: {integrity: sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw==} engines: {node: '>=8'} dev: true - /istanbul-lib-instrument/5.2.0: + /istanbul-lib-instrument@5.2.0: resolution: {integrity: sha512-6Lthe1hqXHBNsqvgDzGO6l03XNeu3CrG4RqQ1KM9+l5+jNGpEJfIELx1NS3SEHmJQA8np/u+E4EPRKRiu6m19A==} engines: {node: '>=8'} dependencies: @@ -2464,7 +3294,7 @@ packages: - supports-color dev: true - /istanbul-lib-report/3.0.0: + /istanbul-lib-report@3.0.0: resolution: {integrity: sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==} engines: {node: '>=8'} dependencies: @@ -2473,7 +3303,7 @@ packages: supports-color: 7.2.0 dev: true - /istanbul-lib-source-maps/4.0.1: + /istanbul-lib-source-maps@4.0.1: resolution: {integrity: sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==} engines: {node: '>=10'} dependencies: @@ -2484,7 +3314,7 @@ packages: - supports-color dev: true - /istanbul-reports/3.1.5: + /istanbul-reports@3.1.5: resolution: {integrity: sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w==} engines: {node: '>=8'} dependencies: @@ -2492,7 +3322,7 @@ packages: istanbul-lib-report: 3.0.0 dev: true - /jest-changed-files/29.0.0: + /jest-changed-files@29.0.0: resolution: {integrity: sha512-28/iDMDrUpGoCitTURuDqUzWQoWmOmOKOFST1mi2lwh62X4BFf6khgH3uSuo1e49X/UDjuApAj3w0wLOex4VPQ==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -2500,7 +3330,7 @@ packages: p-limit: 3.1.0 dev: true - /jest-circus/29.1.2: + /jest-circus@29.1.2: resolution: {integrity: sha512-ajQOdxY6mT9GtnfJRZBRYS7toNIJayiiyjDyoZcnvPRUPwJ58JX0ci0PKAKUo2C1RyzlHw0jabjLGKksO42JGA==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -2527,7 +3357,7 @@ packages: - supports-color dev: true - /jest-cli/29.1.2_wnseany3vswo6p7nhyzogpjzqe: + /jest-cli@29.1.2(@types/node@18.8.0)(ts-node@10.9.1): resolution: {integrity: sha512-vsvBfQ7oS2o4MJdAH+4u9z76Vw5Q8WBQF5MchDbkylNknZdrPTX1Ix7YRJyTlOWqRaS7ue/cEAn+E4V1MWyMzw==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} hasBin: true @@ -2537,14 +3367,14 @@ packages: node-notifier: optional: true dependencies: - '@jest/core': 29.1.2_ts-node@10.9.1 + '@jest/core': 29.1.2(ts-node@10.9.1) '@jest/test-result': 29.1.2 '@jest/types': 29.1.2 chalk: 4.1.2 exit: 0.1.2 graceful-fs: 4.2.10 import-local: 3.1.0 - jest-config: 29.1.2_wnseany3vswo6p7nhyzogpjzqe + jest-config: 29.1.2(@types/node@18.8.0)(ts-node@10.9.1) jest-util: 29.1.2 jest-validate: 29.1.2 prompts: 2.4.2 @@ -2555,7 +3385,7 @@ packages: - ts-node dev: true - /jest-config/29.1.2_wnseany3vswo6p7nhyzogpjzqe: + /jest-config@29.1.2(@types/node@18.8.0)(ts-node@10.9.1): resolution: {integrity: sha512-EC3Zi86HJUOz+2YWQcJYQXlf0zuBhJoeyxLM6vb6qJsVmpP7KcCP1JnyF0iaqTaXdBP8Rlwsvs7hnKWQWWLwwA==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} peerDependencies: @@ -2571,7 +3401,7 @@ packages: '@jest/test-sequencer': 29.1.2 '@jest/types': 29.1.2 '@types/node': 18.8.0 - babel-jest: 29.1.2_@babel+core@7.19.3 + babel-jest: 29.1.2(@babel/core@7.19.3) chalk: 4.1.2 ci-info: 3.4.0 deepmerge: 4.2.2 @@ -2590,12 +3420,12 @@ packages: pretty-format: 29.1.2 slash: 3.0.0 strip-json-comments: 3.1.1 - ts-node: 10.9.1_6ma5wccgcz7ykn2lbqtwbuz2ty + ts-node: 10.9.1(@swc/core@1.3.18)(@types/node@18.8.0)(typescript@4.8.4) transitivePeerDependencies: - supports-color dev: true - /jest-diff/29.1.2: + /jest-diff@29.1.2: resolution: {integrity: sha512-4GQts0aUopVvecIT4IwD/7xsBaMhKTYoM4/njE/aVw9wpw+pIUVp8Vab/KnSzSilr84GnLBkaP3JLDnQYCKqVQ==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -2605,14 +3435,14 @@ packages: pretty-format: 29.1.2 dev: true - /jest-docblock/29.0.0: + /jest-docblock@29.0.0: resolution: {integrity: sha512-s5Kpra/kLzbqu9dEjov30kj1n4tfu3e7Pl8v+f8jOkeWNqM6Ds8jRaJfZow3ducoQUrf2Z4rs2N5S3zXnb83gw==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: detect-newline: 3.1.0 dev: true - /jest-each/29.1.2: + /jest-each@29.1.2: resolution: {integrity: sha512-AmTQp9b2etNeEwMyr4jc0Ql/LIX/dhbgP21gHAizya2X6rUspHn2gysMXaj6iwWuOJ2sYRgP8c1P4cXswgvS1A==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -2623,7 +3453,7 @@ packages: pretty-format: 29.1.2 dev: true - /jest-environment-jsdom/29.1.2: + /jest-environment-jsdom@29.1.2: resolution: {integrity: sha512-D+XNIKia5+uDjSMwL/G1l6N9MCb7LymKI8FpcLo7kkISjc/Sa9w+dXXEa7u1Wijo3f8sVLqfxdGqYtRhmca+Xw==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -2642,7 +3472,7 @@ packages: - utf-8-validate dev: true - /jest-environment-node/29.1.2: + /jest-environment-node@29.1.2: resolution: {integrity: sha512-C59yVbdpY8682u6k/lh8SUMDJPbOyCHOTgLVVi1USWFxtNV+J8fyIwzkg+RJIVI30EKhKiAGNxYaFr3z6eyNhQ==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -2654,12 +3484,12 @@ packages: jest-util: 29.1.2 dev: true - /jest-get-type/29.0.0: + /jest-get-type@29.0.0: resolution: {integrity: sha512-83X19z/HuLKYXYHskZlBAShO7UfLFXu/vWajw9ZNJASN32li8yHMaVGAQqxFW1RCFOkB7cubaL6FaJVQqqJLSw==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dev: true - /jest-haste-map/29.1.2: + /jest-haste-map@29.1.2: resolution: {integrity: sha512-xSjbY8/BF11Jh3hGSPfYTa/qBFrm3TPM7WU8pU93m2gqzORVLkHFWvuZmFsTEBPRKndfewXhMOuzJNHyJIZGsw==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -2678,7 +3508,7 @@ packages: fsevents: 2.3.2 dev: true - /jest-junit/14.0.1: + /jest-junit@14.0.1: resolution: {integrity: sha512-h7/wwzPbllgpQhhVcRzRC76/cc89GlazThoV1fDxcALkf26IIlRsu/AcTG64f4nR2WPE3Cbd+i/sVf+NCUHrWQ==} engines: {node: '>=10.12.0'} dependencies: @@ -2688,7 +3518,7 @@ packages: xml: 1.0.1 dev: true - /jest-leak-detector/29.1.2: + /jest-leak-detector@29.1.2: resolution: {integrity: sha512-TG5gAZJpgmZtjb6oWxBLf2N6CfQ73iwCe6cofu/Uqv9iiAm6g502CAnGtxQaTfpHECBdVEMRBhomSXeLnoKjiQ==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -2696,7 +3526,7 @@ packages: pretty-format: 29.1.2 dev: true - /jest-matcher-utils/29.1.2: + /jest-matcher-utils@29.1.2: resolution: {integrity: sha512-MV5XrD3qYSW2zZSHRRceFzqJ39B2z11Qv0KPyZYxnzDHFeYZGJlgGi0SW+IXSJfOewgJp/Km/7lpcFT+cgZypw==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -2706,7 +3536,7 @@ packages: pretty-format: 29.1.2 dev: true - /jest-message-util/29.1.2: + /jest-message-util@29.1.2: resolution: {integrity: sha512-9oJ2Os+Qh6IlxLpmvshVbGUiSkZVc2FK+uGOm6tghafnB2RyjKAxMZhtxThRMxfX1J1SOMhTn9oK3/MutRWQJQ==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -2721,7 +3551,7 @@ packages: stack-utils: 2.0.5 dev: true - /jest-mock/29.1.2: + /jest-mock@29.1.2: resolution: {integrity: sha512-PFDAdjjWbjPUtQPkQufvniXIS3N9Tv7tbibePEjIIprzjgo0qQlyUiVMrT4vL8FaSJo1QXifQUOuPH3HQC/aMA==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -2730,7 +3560,7 @@ packages: jest-util: 29.1.2 dev: true - /jest-pnp-resolver/1.2.2_jest-resolve@29.1.2: + /jest-pnp-resolver@1.2.2(jest-resolve@29.1.2): resolution: {integrity: sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w==} engines: {node: '>=6'} peerDependencies: @@ -2742,12 +3572,12 @@ packages: jest-resolve: 29.1.2 dev: true - /jest-regex-util/29.0.0: + /jest-regex-util@29.0.0: resolution: {integrity: sha512-BV7VW7Sy0fInHWN93MMPtlClweYv2qrSCwfeFWmpribGZtQPWNvRSq9XOVgOEjU1iBGRKXUZil0o2AH7Iy9Lug==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dev: true - /jest-resolve-dependencies/29.1.2: + /jest-resolve-dependencies@29.1.2: resolution: {integrity: sha512-44yYi+yHqNmH3OoWZvPgmeeiwKxhKV/0CfrzaKLSkZG9gT973PX8i+m8j6pDrTYhhHoiKfF3YUFg/6AeuHw4HQ==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -2757,14 +3587,14 @@ packages: - supports-color dev: true - /jest-resolve/29.1.2: + /jest-resolve@29.1.2: resolution: {integrity: sha512-7fcOr+k7UYSVRJYhSmJHIid3AnDBcLQX3VmT9OSbPWsWz1MfT7bcoerMhADKGvKCoMpOHUQaDHtQoNp/P9JMGg==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: chalk: 4.1.2 graceful-fs: 4.2.10 jest-haste-map: 29.1.2 - jest-pnp-resolver: 1.2.2_jest-resolve@29.1.2 + jest-pnp-resolver: 1.2.2(jest-resolve@29.1.2) jest-util: 29.1.2 jest-validate: 29.1.2 resolve: 1.22.1 @@ -2772,7 +3602,7 @@ packages: slash: 3.0.0 dev: true - /jest-runner/29.1.2: + /jest-runner@29.1.2: resolution: {integrity: sha512-yy3LEWw8KuBCmg7sCGDIqKwJlULBuNIQa2eFSVgVASWdXbMYZ9H/X0tnXt70XFoGf92W2sOQDOIFAA6f2BG04Q==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -2801,7 +3631,7 @@ packages: - supports-color dev: true - /jest-runtime/29.1.2: + /jest-runtime@29.1.2: resolution: {integrity: sha512-jr8VJLIf+cYc+8hbrpt412n5jX3tiXmpPSYTGnwcvNemY+EOuLNiYnHJ3Kp25rkaAcTWOEI4ZdOIQcwYcXIAZw==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -2831,14 +3661,21 @@ packages: - supports-color dev: true - /jest-snapshot/29.1.2: + /jest-silent-reporter@0.5.0: + resolution: {integrity: sha512-epdLt8Oj0a1AyRiR6F8zx/1SVT1Mi7VU3y4wB2uOBHs/ohIquC7v2eeja7UN54uRPyHInIKWdL+RdG228n5pJQ==} + dependencies: + chalk: 4.1.2 + jest-util: 26.6.2 + dev: true + + /jest-snapshot@29.1.2: resolution: {integrity: sha512-rYFomGpVMdBlfwTYxkUp3sjD6usptvZcONFYNqVlaz4EpHPnDvlWjvmOQ9OCSNKqYZqLM2aS3wq01tWujLg7gg==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: '@babel/core': 7.19.3 '@babel/generator': 7.19.3 - '@babel/plugin-syntax-jsx': 7.18.6_@babel+core@7.19.3 - '@babel/plugin-syntax-typescript': 7.18.6_@babel+core@7.19.3 + '@babel/plugin-syntax-jsx': 7.18.6(@babel/core@7.19.3) + '@babel/plugin-syntax-typescript': 7.18.6(@babel/core@7.19.3) '@babel/traverse': 7.19.3 '@babel/types': 7.19.3 '@jest/expect-utils': 29.1.2 @@ -2846,7 +3683,7 @@ packages: '@jest/types': 29.1.2 '@types/babel__traverse': 7.18.2 '@types/prettier': 2.7.1 - babel-preset-current-node-syntax: 1.0.1_@babel+core@7.19.3 + babel-preset-current-node-syntax: 1.0.1(@babel/core@7.19.3) chalk: 4.1.2 expect: 29.1.2 graceful-fs: 4.2.10 @@ -2863,7 +3700,19 @@ packages: - supports-color dev: true - /jest-util/29.1.2: + /jest-util@26.6.2: + resolution: {integrity: sha512-MDW0fKfsn0OI7MS7Euz6h8HNDXVQ0gaM9uW6RjfDmd1DAFcaxX9OqIakHIqhbnmF08Cf2DLDG+ulq8YQQ0Lp0Q==} + engines: {node: '>= 10.14.2'} + dependencies: + '@jest/types': 26.6.2 + '@types/node': 18.8.0 + chalk: 4.1.2 + graceful-fs: 4.2.10 + is-ci: 2.0.0 + micromatch: 4.0.5 + dev: true + + /jest-util@29.1.2: resolution: {integrity: sha512-vPCk9F353i0Ymx3WQq3+a4lZ07NXu9Ca8wya6o4Fe4/aO1e1awMMprZ3woPFpKwghEOW+UXgd15vVotuNN9ONQ==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -2875,7 +3724,7 @@ packages: picomatch: 2.3.1 dev: true - /jest-validate/29.1.2: + /jest-validate@29.1.2: resolution: {integrity: sha512-k71pOslNlV8fVyI+mEySy2pq9KdXdgZtm7NHrBX8LghJayc3wWZH0Yr0mtYNGaCU4F1OLPXRkwZR0dBm/ClshA==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -2887,7 +3736,7 @@ packages: pretty-format: 29.1.2 dev: true - /jest-watcher/29.1.2: + /jest-watcher@29.1.2: resolution: {integrity: sha512-6JUIUKVdAvcxC6bM8/dMgqY2N4lbT+jZVsxh0hCJRbwkIEnbr/aPjMQ28fNDI5lB51Klh00MWZZeVf27KBUj5w==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -2901,7 +3750,7 @@ packages: string-length: 4.0.2 dev: true - /jest-worker/29.1.2: + /jest-worker@29.1.2: resolution: {integrity: sha512-AdTZJxKjTSPHbXT/AIOjQVmoFx0LHFcVabWu0sxI7PAy7rFf8c0upyvgBKgguVXdM4vY74JdwkyD4hSmpTW8jA==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -2911,7 +3760,7 @@ packages: supports-color: 8.1.1 dev: true - /jest/29.1.2_wnseany3vswo6p7nhyzogpjzqe: + /jest@29.1.2(@types/node@18.8.0)(ts-node@10.9.1): resolution: {integrity: sha512-5wEIPpCezgORnqf+rCaYD1SK+mNN7NsstWzIsuvsnrhR/hSxXWd82oI7DkrbJ+XTD28/eG8SmxdGvukrGGK6Tw==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} hasBin: true @@ -2921,24 +3770,29 @@ packages: node-notifier: optional: true dependencies: - '@jest/core': 29.1.2_ts-node@10.9.1 + '@jest/core': 29.1.2(ts-node@10.9.1) '@jest/types': 29.1.2 import-local: 3.1.0 - jest-cli: 29.1.2_wnseany3vswo6p7nhyzogpjzqe + jest-cli: 29.1.2(@types/node@18.8.0)(ts-node@10.9.1) transitivePeerDependencies: - '@types/node' - supports-color - ts-node dev: true - /js-sdsl/4.1.5: + /js-levenshtein@1.1.6: + resolution: {integrity: sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g==} + engines: {node: '>=0.10.0'} + dev: false + + /js-sdsl@4.1.5: resolution: {integrity: sha512-08bOAKweV2NUC1wqTtf3qZlnpOX/R2DU9ikpjOHs0H+ibQv3zpncVQg6um4uYtRtrwIX8M4Nh3ytK4HGlYAq7Q==} dev: true - /js-tokens/4.0.0: + /js-tokens@4.0.0: resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} - /js-yaml/3.14.1: + /js-yaml@3.14.1: resolution: {integrity: sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==} hasBin: true dependencies: @@ -2946,14 +3800,14 @@ packages: esprima: 4.0.1 dev: true - /js-yaml/4.1.0: + /js-yaml@4.1.0: resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==} hasBin: true dependencies: argparse: 2.0.1 dev: true - /jsdom/20.0.1: + /jsdom@20.0.1: resolution: {integrity: sha512-pksjj7Rqoa+wdpkKcLzQRHhJCEE42qQhl/xLMUKHgoSejaKOdaXEAnqs6uDNwMl/fciHTzKeR8Wm8cw7N+g98A==} engines: {node: '>=14'} peerDependencies: @@ -2994,45 +3848,63 @@ packages: - utf-8-validate dev: true - /jsesc/2.5.2: + /jsesc@2.5.2: resolution: {integrity: sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==} engines: {node: '>=4'} hasBin: true dev: true - /json-parse-better-errors/1.0.2: + /json-parse-better-errors@1.0.2: resolution: {integrity: sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==} dev: true - /json-parse-even-better-errors/2.3.1: + /json-parse-even-better-errors@2.3.1: resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} dev: true - /json-schema-traverse/0.4.1: + /json-schema-traverse@0.4.1: resolution: {integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==} dev: true - /json-stable-stringify-without-jsonify/1.0.1: + /json-stable-stringify-without-jsonify@1.0.1: resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==} dev: true - /json5/2.2.1: + /json5@2.2.1: resolution: {integrity: sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==} engines: {node: '>=6'} hasBin: true dev: true - /jsonc-parser/3.2.0: + /jsonc-parser@3.2.0: resolution: {integrity: sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w==} dev: true - /kleur/3.0.3: + /jsx-ast-utils@3.3.3: + resolution: {integrity: sha512-fYQHZTZ8jSfmWZ0iyzfwiU4WDX4HpHbMCZ3gPlWYiCl3BoeOTsqKBqnTVfH2rYT7eP5c3sVbeSPHnnJOaTrWiw==} + engines: {node: '>=4.0'} + dependencies: + array-includes: 3.1.6 + object.assign: 4.1.4 + dev: true + + /kleur@3.0.3: resolution: {integrity: sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==} engines: {node: '>=6'} dev: true - /leantable/0.4.10_react@18.2.0: - resolution: {integrity: sha512-aPJJnO32ikBtSYVTOvgVu6JBgsxFpT2vPLXprPHI2U+EAwYDmTTTd847w8pJ7fi2bec1tO8sbkx/IJkzyu2ovg==} + /language-subtag-registry@0.3.22: + resolution: {integrity: sha512-tN0MCzyWnoz/4nHS6uxdlFWoUZT7ABptwKPQ52Ea7URk6vll88bWBVhodtnlfEuCcKWNGoc+uGbw1cwa9IKh/w==} + dev: true + + /language-tags@1.0.5: + resolution: {integrity: sha512-qJhlO9cGXi6hBGKoxEG/sKZDAHD5Hnu9Hs4WbOY3pCWXDhw0N8x1NenNzm2EnNLkLkk7J2SdxAkDSbb6ftT+UQ==} + dependencies: + language-subtag-registry: 0.3.22 + dev: true + + /leantable@0.4.11(react@18.2.0): + resolution: {integrity: sha512-9szt8sSt61uUWsN2VcDmzArP5scSSkbKVJP/SG5TOBWXZnM/j32oescBHRHmAF5QcQp+GLTMn+0sf++85kfseg==} peerDependencies: react: '>= 16.8.0' dependencies: @@ -3041,12 +3913,12 @@ packages: rxjs: 7.5.7 dev: false - /leven/3.1.0: + /leven@3.1.0: resolution: {integrity: sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==} engines: {node: '>=6'} dev: true - /levn/0.3.0: + /levn@0.3.0: resolution: {integrity: sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA==} engines: {node: '>= 0.8.0'} dependencies: @@ -3054,7 +3926,7 @@ packages: type-check: 0.3.2 dev: true - /levn/0.4.1: + /levn@0.4.1: resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==} engines: {node: '>= 0.8.0'} dependencies: @@ -3062,11 +3934,11 @@ packages: type-check: 0.4.0 dev: true - /lines-and-columns/1.2.4: + /lines-and-columns@1.2.4: resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} dev: true - /load-json-file/4.0.0: + /load-json-file@4.0.0: resolution: {integrity: sha512-Kx8hMakjX03tiGTLAIdJ+lL0htKnXjEZN6hk/tozf/WOuYGdZBJrZ+rCJRbVCugsjB3jMLn9746NsQIf5VjBMw==} engines: {node: '>=4'} dependencies: @@ -3076,82 +3948,89 @@ packages: strip-bom: 3.0.0 dev: true - /locate-path/5.0.0: + /locate-path@5.0.0: resolution: {integrity: sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==} engines: {node: '>=8'} dependencies: p-locate: 4.1.0 dev: true - /locate-path/6.0.0: + /locate-path@6.0.0: resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==} engines: {node: '>=10'} dependencies: p-locate: 5.0.0 dev: true - /lodash.merge/4.6.2: + /lodash.merge@4.6.2: resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==} dev: true - /lodash/4.17.21: + /lodash@4.17.21: resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==} - dev: true - /loose-envify/1.4.0: + /log-symbols@4.1.0: + resolution: {integrity: sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==} + engines: {node: '>=10'} + dependencies: + chalk: 4.1.2 + is-unicode-supported: 0.1.0 + dev: false + + /loose-envify@1.4.0: resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==} hasBin: true dependencies: js-tokens: 4.0.0 - /lru-cache/6.0.0: + /lru-cache@6.0.0: resolution: {integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==} engines: {node: '>=10'} dependencies: yallist: 4.0.0 dev: true - /lz-string/1.4.4: + /lz-string@1.4.4: resolution: {integrity: sha512-0ckx7ZHRPqb0oUm8zNr+90mtf9DQB60H1wMCjBtfi62Kl3a7JbHob6gA2bC+xRvZoOL+1hzUK8jeuEIQE8svEQ==} hasBin: true dev: true - /make-dir/3.1.0: + /make-dir@3.1.0: resolution: {integrity: sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==} engines: {node: '>=8'} dependencies: semver: 6.3.0 dev: true - /make-error/1.3.6: + /make-error@1.3.6: resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==} dev: true - /makeerror/1.0.12: + /makeerror@1.0.12: resolution: {integrity: sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==} dependencies: tmpl: 1.0.5 dev: true - /memorystream/0.3.1: + /memorystream@0.3.1: resolution: {integrity: sha512-S3UwM3yj5mtUSEfP41UZmt/0SCoVYUcU1rkXv+BQ5Ig8ndL4sPoJNBUJERafdPb5jjHJGuMgytgKvKIf58XNBw==} engines: {node: '>= 0.10.0'} dev: true - /merge-stream/2.0.0: + /merge-stream@2.0.0: resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} dev: true - /merge2/1.4.1: + /merge2@1.4.1: resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} engines: {node: '>= 8'} dev: true - /micro-memoize/4.0.11: + /micro-memoize@4.0.11: resolution: {integrity: sha512-CjxsaYe4j43df32DtzzNCwanPqZjZDwuQAZilsCYpa2ZVtSPDjHXbTlR4gsEZRyO9/twHs0b7HLjvy/sowl7sA==} dev: false - /micromatch/4.0.5: + /micromatch@4.0.5: resolution: {integrity: sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==} engines: {node: '>=8.6'} dependencies: @@ -3159,75 +4038,129 @@ packages: picomatch: 2.3.1 dev: true - /mime-db/1.52.0: + /mime-db@1.52.0: resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} engines: {node: '>= 0.6'} dev: true - /mime-types/2.1.35: + /mime-types@2.1.35: resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} engines: {node: '>= 0.6'} dependencies: mime-db: 1.52.0 dev: true - /mimic-fn/2.1.0: + /mimic-fn@2.1.0: resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} engines: {node: '>=6'} - dev: true - /min-indent/1.0.1: + /min-indent@1.0.1: resolution: {integrity: sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==} engines: {node: '>=4'} dev: true - /minimatch/3.1.2: + /minimatch@3.1.2: resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} dependencies: brace-expansion: 1.1.11 dev: true - /minimatch/5.1.0: + /minimatch@5.1.0: resolution: {integrity: sha512-9TPBGGak4nHfGZsPBohm9AWg6NoT7QTCehS3BIJABslyZbzxfV78QM2Y6+i741OPZIafFAaiiEMh5OyIrJPgtg==} engines: {node: '>=10'} dependencies: brace-expansion: 2.0.1 dev: true - /mkdirp/1.0.4: + /mkdirp@1.0.4: resolution: {integrity: sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==} engines: {node: '>=10'} hasBin: true dev: true - /moize/6.1.3: + /moize@6.1.3: resolution: {integrity: sha512-Cn+1T5Ypieeo46fn8X98V2gHj2VSRohVPjvT8BRvNANJJC3UOeege/G84xA/3S9c5qA4p9jOdSB1jfhumwe8qw==} dependencies: fast-equals: 3.0.3 micro-memoize: 4.0.11 dev: false - /ms/2.1.2: + /ms@2.1.2: resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} + + /msw@1.2.0(typescript@4.8.4): + resolution: {integrity: sha512-2nbGxmG8Zk/eCiWCAFtgz5kwPE5YnTlOcRBggFkykqkBSPQE+kvlHJElOzrTG21Tb0ZGXWgn0PI5kyANnOpgug==} + engines: {node: '>=14'} + hasBin: true + requiresBuild: true + peerDependencies: + typescript: '>= 4.4.x <= 5.0.x' + peerDependenciesMeta: + typescript: + optional: true + dependencies: + '@mswjs/cookies': 0.2.2 + '@mswjs/interceptors': 0.17.9 + '@open-draft/until': 1.0.3 + '@types/cookie': 0.4.1 + '@types/js-levenshtein': 1.1.1 + chalk: 4.1.1 + chokidar: 3.5.3 + cookie: 0.4.2 + graphql: 16.6.0 + headers-polyfill: 3.1.2 + inquirer: 8.2.5 + is-node-process: 1.0.1 + js-levenshtein: 1.1.6 + node-fetch: 2.6.9 + outvariant: 1.3.0 + path-to-regexp: 6.2.1 + strict-event-emitter: 0.4.6 + type-fest: 2.19.0 + typescript: 4.8.4 + yargs: 17.6.0 + transitivePeerDependencies: + - encoding + - supports-color + dev: false + + /mute-stream@0.0.8: + resolution: {integrity: sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==} + dev: false + + /natural-compare-lite@1.4.0: + resolution: {integrity: sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g==} dev: true - /natural-compare/1.4.0: + /natural-compare@1.4.0: resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} dev: true - /nice-try/1.0.5: + /nice-try@1.0.5: resolution: {integrity: sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==} dev: true - /node-int64/0.4.0: + /node-fetch@2.6.9: + resolution: {integrity: sha512-DJm/CJkZkRjKKj4Zi4BsKVZh3ValV5IR5s7LVZnW+6YMh0W1BfNA8XSs6DLMGYlId5F3KnA70uu2qepcR08Qqg==} + engines: {node: 4.x || >=6.0.0} + peerDependencies: + encoding: ^0.1.0 + peerDependenciesMeta: + encoding: + optional: true + dependencies: + whatwg-url: 5.0.0 + dev: false + + /node-int64@0.4.0: resolution: {integrity: sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==} dev: true - /node-releases/2.0.6: + /node-releases@2.0.6: resolution: {integrity: sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg==} dev: true - /normalize-package-data/2.5.0: + /normalize-package-data@2.5.0: resolution: {integrity: sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==} dependencies: hosted-git-info: 2.8.9 @@ -3236,12 +4169,11 @@ packages: validate-npm-package-license: 3.0.4 dev: true - /normalize-path/3.0.0: + /normalize-path@3.0.0: resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} engines: {node: '>=0.10.0'} - dev: true - /npm-run-all/4.1.5: + /npm-run-all@4.1.5: resolution: {integrity: sha512-Oo82gJDAVcaMdi3nuoKFavkIHBRVqQ1qvMb+9LHk/cF4P6B2m8aP04hGf7oL6wZ9BuGwX1onlLhpuoofSyoQDQ==} engines: {node: '>= 4'} hasBin: true @@ -3257,27 +4189,40 @@ packages: string.prototype.padend: 3.1.3 dev: true - /npm-run-path/4.0.1: + /npm-run-path@4.0.1: resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==} engines: {node: '>=8'} dependencies: path-key: 3.1.1 dev: true - /nwsapi/2.2.2: + /nwsapi@2.2.2: resolution: {integrity: sha512-90yv+6538zuvUMnN+zCr8LuV6bPFdq50304114vJYJ8RDyK8D5O9Phpbd6SZWgI7PwzmmfN1upeOJlvybDSgCw==} dev: true - /object-inspect/1.12.2: + /object-assign@4.1.1: + resolution: {integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==} + engines: {node: '>=0.10.0'} + dev: true + + /object-inspect@1.12.2: resolution: {integrity: sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==} dev: true - /object-keys/1.1.1: + /object-is@1.1.5: + resolution: {integrity: sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + dev: true + + /object-keys@1.1.1: resolution: {integrity: sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==} engines: {node: '>= 0.4'} dev: true - /object.assign/4.1.4: + /object.assign@4.1.4: resolution: {integrity: sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==} engines: {node: '>= 0.4'} dependencies: @@ -3287,20 +4232,53 @@ packages: object-keys: 1.1.1 dev: true - /once/1.4.0: + /object.entries@1.1.6: + resolution: {integrity: sha512-leTPzo4Zvg3pmbQ3rDK69Rl8GQvIqMWubrkxONG9/ojtFE2rD9fjMKfSI5BxW3osRH1m6VdzmqK8oAY9aT4x5w==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + es-abstract: 1.21.1 + dev: true + + /object.fromentries@2.0.6: + resolution: {integrity: sha512-VciD13dswC4j1Xt5394WR4MzmAQmlgN72phd/riNp9vtD7tp4QQWJ0R4wvclXcafgcYK8veHRed2W6XeGBvcfg==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + es-abstract: 1.21.1 + dev: true + + /object.hasown@1.1.2: + resolution: {integrity: sha512-B5UIT3J1W+WuWIU55h0mjlwaqxiE5vYENJXIXZ4VFe05pNYrkKuK0U/6aFcb0pKywYJh7IhfoqUfKVmrJJHZHw==} + dependencies: + define-properties: 1.1.4 + es-abstract: 1.21.1 + dev: true + + /object.values@1.1.6: + resolution: {integrity: sha512-FVVTkD1vENCsAcwNs9k6jea2uHC/X0+JcjG8YA60FN5CMaJmG95wT9jek/xX9nornqGRrBkKtzuAu2wuHpKqvw==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + es-abstract: 1.21.1 + dev: true + + /once@1.4.0: resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} dependencies: wrappy: 1.0.2 dev: true - /onetime/5.1.2: + /onetime@5.1.2: resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} engines: {node: '>=6'} dependencies: mimic-fn: 2.1.0 - dev: true - /optionator/0.8.3: + /optionator@0.8.3: resolution: {integrity: sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==} engines: {node: '>= 0.8.0'} dependencies: @@ -3312,7 +4290,7 @@ packages: word-wrap: 1.2.3 dev: true - /optionator/0.9.1: + /optionator@0.9.1: resolution: {integrity: sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==} engines: {node: '>= 0.8.0'} dependencies: @@ -3324,47 +4302,71 @@ packages: word-wrap: 1.2.3 dev: true - /p-limit/2.3.0: + /ora@5.4.1: + resolution: {integrity: sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ==} + engines: {node: '>=10'} + dependencies: + bl: 4.1.0 + chalk: 4.1.2 + cli-cursor: 3.1.0 + cli-spinners: 2.7.0 + is-interactive: 1.0.0 + is-unicode-supported: 0.1.0 + log-symbols: 4.1.0 + strip-ansi: 6.0.1 + wcwidth: 1.0.1 + dev: false + + /os-tmpdir@1.0.2: + resolution: {integrity: sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==} + engines: {node: '>=0.10.0'} + dev: false + + /outvariant@1.3.0: + resolution: {integrity: sha512-yeWM9k6UPfG/nzxdaPlJkB2p08hCg4xP6Lx99F+vP8YF7xyZVfTmJjrrNalkmzudD4WFvNLVudQikqUmF8zhVQ==} + dev: false + + /p-limit@2.3.0: resolution: {integrity: sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==} engines: {node: '>=6'} dependencies: p-try: 2.2.0 dev: true - /p-limit/3.1.0: + /p-limit@3.1.0: resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==} engines: {node: '>=10'} dependencies: yocto-queue: 0.1.0 dev: true - /p-locate/4.1.0: + /p-locate@4.1.0: resolution: {integrity: sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==} engines: {node: '>=8'} dependencies: p-limit: 2.3.0 dev: true - /p-locate/5.0.0: + /p-locate@5.0.0: resolution: {integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==} engines: {node: '>=10'} dependencies: p-limit: 3.1.0 dev: true - /p-try/2.2.0: + /p-try@2.2.0: resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==} engines: {node: '>=6'} dev: true - /parent-module/1.0.1: + /parent-module@1.0.1: resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==} engines: {node: '>=6'} dependencies: callsites: 3.1.0 dev: true - /parse-json/4.0.0: + /parse-json@4.0.0: resolution: {integrity: sha512-aOIos8bujGN93/8Ox/jPLh7RwVnPEysynVFE+fQZyg6jKELEHwzgKdLRFHUgXJL6kylijVSBC4BvN9OmsB48Rw==} engines: {node: '>=4'} dependencies: @@ -3372,7 +4374,7 @@ packages: json-parse-better-errors: 1.0.2 dev: true - /parse-json/5.2.0: + /parse-json@5.2.0: resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==} engines: {node: '>=8'} dependencies: @@ -3382,97 +4384,110 @@ packages: lines-and-columns: 1.2.4 dev: true - /parse5/7.1.1: + /parse5-htmlparser2-tree-adapter@6.0.1: + resolution: {integrity: sha512-qPuWvbLgvDGilKc5BoicRovlT4MtYT6JfJyBOMDsKoiT+GiuP5qyrPCnR9HcPECIJJmZh5jRndyNThnhhb/vlA==} + dependencies: + parse5: 6.0.1 + dev: true + + /parse5@6.0.1: + resolution: {integrity: sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==} + dev: true + + /parse5@7.1.1: resolution: {integrity: sha512-kwpuwzB+px5WUg9pyK0IcK/shltJN5/OVhQagxhCQNtT9Y9QRZqNY2e1cmbu/paRh5LMnz/oVTVLBpjFmMZhSg==} dependencies: entities: 4.4.0 dev: true - /path-exists/4.0.0: + /path-exists@4.0.0: resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} engines: {node: '>=8'} dev: true - /path-is-absolute/1.0.1: + /path-is-absolute@1.0.1: resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==} engines: {node: '>=0.10.0'} dev: true - /path-key/2.0.1: + /path-key@2.0.1: resolution: {integrity: sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==} engines: {node: '>=4'} dev: true - /path-key/3.1.1: + /path-key@3.1.1: resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} engines: {node: '>=8'} dev: true - /path-parse/1.0.7: + /path-parse@1.0.7: resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==} dev: true - /path-type/3.0.0: + /path-to-regexp@6.2.1: + resolution: {integrity: sha512-JLyh7xT1kizaEvcaXOQwOc2/Yhw6KZOvPf1S8401UyLk86CU79LN3vl7ztXGm/pZ+YjoyAJ4rxmHwbkBXJX+yw==} + dev: false + + /path-type@3.0.0: resolution: {integrity: sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==} engines: {node: '>=4'} dependencies: pify: 3.0.0 dev: true - /path-type/4.0.0: + /path-type@4.0.0: resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} engines: {node: '>=8'} dev: true - /picocolors/1.0.0: + /picocolors@1.0.0: resolution: {integrity: sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==} dev: true - /picomatch/2.3.1: + /picomatch@2.3.1: resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} engines: {node: '>=8.6'} - dev: true - /pidtree/0.3.1: + /pidtree@0.3.1: resolution: {integrity: sha512-qQbW94hLHEqCg7nhby4yRC7G2+jYHY4Rguc2bjw7Uug4GIJuu1tvf2uHaZv5Q8zdt+WKJ6qK1FOI6amaWUo5FA==} engines: {node: '>=0.10'} hasBin: true dev: true - /pify/3.0.0: + /pify@3.0.0: resolution: {integrity: sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==} engines: {node: '>=4'} dev: true - /pirates/4.0.5: + /pirates@4.0.5: resolution: {integrity: sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ==} engines: {node: '>= 6'} dev: true - /pkg-dir/4.2.0: + /pkg-dir@4.2.0: resolution: {integrity: sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==} engines: {node: '>=8'} dependencies: find-up: 4.1.0 dev: true - /prelude-ls/1.1.2: + /prelude-ls@1.1.2: resolution: {integrity: sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w==} engines: {node: '>= 0.8.0'} dev: true - /prelude-ls/1.2.1: + /prelude-ls@1.2.1: resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==} engines: {node: '>= 0.8.0'} dev: true - /prettier/2.7.1: + /prettier@2.7.1: resolution: {integrity: sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g==} engines: {node: '>=10.13.0'} hasBin: true dev: true - /pretty-format/27.5.1: + /pretty-format@27.5.1: resolution: {integrity: sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ==} engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} dependencies: @@ -3481,7 +4496,7 @@ packages: react-is: 17.0.2 dev: true - /pretty-format/29.1.2: + /pretty-format@29.1.2: resolution: {integrity: sha512-CGJ6VVGXVRP2o2Dorl4mAwwvDWT25luIsYhkyVQW32E4nL+TgW939J7LlKT/npq5Cpq6j3s+sy+13yk7xYpBmg==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: @@ -3490,7 +4505,7 @@ packages: react-is: 18.2.0 dev: true - /prompts/2.4.2: + /prompts@2.4.2: resolution: {integrity: sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==} engines: {node: '>= 6'} dependencies: @@ -3498,24 +4513,36 @@ packages: sisteransi: 1.0.5 dev: true - /psl/1.9.0: + /prop-types@15.8.1: + resolution: {integrity: sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==} + dependencies: + loose-envify: 1.4.0 + object-assign: 4.1.1 + react-is: 16.13.1 + dev: true + + /psl@1.9.0: resolution: {integrity: sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag==} dev: true - /punycode/2.1.1: + /punycode@2.1.1: resolution: {integrity: sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==} engines: {node: '>=6'} dev: true - /querystringify/2.2.0: + /querystringify@2.2.0: resolution: {integrity: sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==} dev: true - /queue-microtask/1.2.3: + /queue-microtask@1.2.3: resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} dev: true - /react-dom/18.2.0_react@18.2.0: + /rambda@7.4.0: + resolution: {integrity: sha512-A9hihu7dUTLOUCM+I8E61V4kRXnN4DwYeK0DwCBydC1MqNI1PidyAtbtpsJlBBzK4icSctEcCQ1bGcLpBuETUQ==} + dev: true + + /react-dom@18.2.0(react@18.2.0): resolution: {integrity: sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g==} peerDependencies: react: ^18.2.0 @@ -3525,15 +4552,19 @@ packages: scheduler: 0.23.0 dev: true - /react-is/17.0.2: + /react-is@16.13.1: + resolution: {integrity: sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==} + dev: true + + /react-is@17.0.2: resolution: {integrity: sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==} dev: true - /react-is/18.2.0: + /react-is@18.2.0: resolution: {integrity: sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==} dev: true - /react-smart-promise/1.0.4_react@18.2.0: + /react-smart-promise@1.0.4(react@18.2.0): resolution: {integrity: sha512-ZVV3n0WlroxgdtnWBFc3wLcsGrSt7s0wGTuBp8p9vWkbJFJ4SfvKAl9YHiy5hfsJFRFpyZ67Q5umjE1fiRPUgA==} peerDependencies: react: ^16.8.0 || ^17 || ^18 @@ -3542,13 +4573,13 @@ packages: react: 18.2.0 dev: false - /react/18.2.0: + /react@18.2.0: resolution: {integrity: sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ==} engines: {node: '>=0.10.0'} dependencies: loose-envify: 1.4.0 - /read-pkg/3.0.0: + /read-pkg@3.0.0: resolution: {integrity: sha512-BLq/cCO9two+lBgiTYNqD6GdtK8s4NpaWrl6/rCO9w0TUS8oJl7cmToOZfRYllKTISY6nt1U7jQ53brmKqY6BA==} engines: {node: '>=4'} dependencies: @@ -3557,7 +4588,23 @@ packages: path-type: 3.0.0 dev: true - /redent/3.0.0: + /readable-stream@3.6.2: + resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==} + engines: {node: '>= 6'} + dependencies: + inherits: 2.0.4 + string_decoder: 1.3.0 + util-deprecate: 1.0.2 + dev: false + + /readdirp@3.6.0: + resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} + engines: {node: '>=8.10.0'} + dependencies: + picomatch: 2.3.1 + dev: false + + /redent@3.0.0: resolution: {integrity: sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==} engines: {node: '>=8'} dependencies: @@ -3565,11 +4612,15 @@ packages: strip-indent: 3.0.0 dev: true - /regenerator-runtime/0.13.9: + /regenerator-runtime@0.13.11: + resolution: {integrity: sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==} + dev: true + + /regenerator-runtime@0.13.9: resolution: {integrity: sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==} dev: true - /regexp.prototype.flags/1.4.3: + /regexp.prototype.flags@1.4.3: resolution: {integrity: sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==} engines: {node: '>= 0.4'} dependencies: @@ -3578,43 +4629,47 @@ packages: functions-have-names: 1.2.3 dev: true - /regexpp/3.2.0: + /regexpp@3.2.0: resolution: {integrity: sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==} engines: {node: '>=8'} dev: true - /require-directory/2.1.1: + /require-directory@2.1.1: resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} engines: {node: '>=0.10.0'} + + /requireindex@1.2.0: + resolution: {integrity: sha512-L9jEkOi3ASd9PYit2cwRfyppc9NoABujTP8/5gFcbERmo5jUoAKovIC3fsF17pkTnGsrByysqX+Kxd2OTNI1ww==} + engines: {node: '>=0.10.5'} dev: true - /requires-port/1.0.0: + /requires-port@1.0.0: resolution: {integrity: sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==} dev: true - /resolve-cwd/3.0.0: + /resolve-cwd@3.0.0: resolution: {integrity: sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==} engines: {node: '>=8'} dependencies: resolve-from: 5.0.0 dev: true - /resolve-from/4.0.0: + /resolve-from@4.0.0: resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==} engines: {node: '>=4'} dev: true - /resolve-from/5.0.0: + /resolve-from@5.0.0: resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} engines: {node: '>=8'} dev: true - /resolve.exports/1.1.0: + /resolve.exports@1.1.0: resolution: {integrity: sha512-J1l+Zxxp4XK3LUDZ9m60LRJF/mAe4z6a4xyabPHk7pvK5t35dACV32iIjJDFeWZFfZlO29w6SZ67knR0tHzJtQ==} engines: {node: '>=10'} dev: true - /resolve/1.22.1: + /resolve@1.22.1: resolution: {integrity: sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==} hasBin: true dependencies: @@ -3623,35 +4678,61 @@ packages: supports-preserve-symlinks-flag: 1.0.0 dev: true - /reusify/1.0.4: + /resolve@2.0.0-next.4: + resolution: {integrity: sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ==} + hasBin: true + dependencies: + is-core-module: 2.10.0 + path-parse: 1.0.7 + supports-preserve-symlinks-flag: 1.0.0 + dev: true + + /restore-cursor@3.1.0: + resolution: {integrity: sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==} + engines: {node: '>=8'} + dependencies: + onetime: 5.1.2 + signal-exit: 3.0.7 + dev: false + + /reusify@1.0.4: resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} engines: {iojs: '>=1.0.0', node: '>=0.10.0'} dev: true - /rimraf/3.0.2: + /rimraf@3.0.2: resolution: {integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==} hasBin: true dependencies: glob: 7.2.3 dev: true - /run-parallel/1.2.0: + /run-async@2.4.1: + resolution: {integrity: sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==} + engines: {node: '>=0.12.0'} + dev: false + + /run-parallel@1.2.0: resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} dependencies: queue-microtask: 1.2.3 dev: true - /rxjs/7.5.7: + /rxjs@7.5.7: resolution: {integrity: sha512-z9MzKh/UcOqB3i20H6rtrlaE/CgjLOvheWK/9ILrbhROGTweAi1BaFsTT9FbwZi5Trr1qNRs+MXkhmR06awzQA==} dependencies: tslib: 2.4.0 dev: false - /safe-buffer/5.1.2: + /safe-buffer@5.1.2: resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==} dev: true - /safe-regex-test/1.0.0: + /safe-buffer@5.2.1: + resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} + dev: false + + /safe-regex-test@1.0.0: resolution: {integrity: sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==} dependencies: call-bind: 1.0.2 @@ -3659,34 +4740,33 @@ packages: is-regex: 1.1.4 dev: true - /safer-buffer/2.1.2: + /safer-buffer@2.1.2: resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} - dev: true - /saxes/6.0.0: + /saxes@6.0.0: resolution: {integrity: sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA==} engines: {node: '>=v12.22.7'} dependencies: xmlchars: 2.2.0 dev: true - /scheduler/0.23.0: + /scheduler@0.23.0: resolution: {integrity: sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw==} dependencies: loose-envify: 1.4.0 dev: true - /semver/5.7.1: + /semver@5.7.1: resolution: {integrity: sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==} hasBin: true dev: true - /semver/6.3.0: + /semver@6.3.0: resolution: {integrity: sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==} hasBin: true dev: true - /semver/7.3.7: + /semver@7.3.7: resolution: {integrity: sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==} engines: {node: '>=10'} hasBin: true @@ -3694,35 +4774,39 @@ packages: lru-cache: 6.0.0 dev: true - /shebang-command/1.2.0: + /set-cookie-parser@2.6.0: + resolution: {integrity: sha512-RVnVQxTXuerk653XfuliOxBP81Sf0+qfQE73LIYKcyMYHG94AuH0kgrQpRDuTZnSmjpysHmzxJXKNfa6PjFhyQ==} + dev: false + + /shebang-command@1.2.0: resolution: {integrity: sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==} engines: {node: '>=0.10.0'} dependencies: shebang-regex: 1.0.0 dev: true - /shebang-command/2.0.0: + /shebang-command@2.0.0: resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} engines: {node: '>=8'} dependencies: shebang-regex: 3.0.0 dev: true - /shebang-regex/1.0.0: + /shebang-regex@1.0.0: resolution: {integrity: sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==} engines: {node: '>=0.10.0'} dev: true - /shebang-regex/3.0.0: + /shebang-regex@3.0.0: resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} engines: {node: '>=8'} dev: true - /shell-quote/1.7.3: + /shell-quote@1.7.3: resolution: {integrity: sha512-Vpfqwm4EnqGdlsBFNmHhxhElJYrdfcxPThu+ryKS5J8L/fhAwLazFZtq+S+TWZ9ANj2piSQLGj6NQg+lKPmxrw==} dev: true - /side-channel/1.0.4: + /side-channel@1.0.4: resolution: {integrity: sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==} dependencies: call-bind: 1.0.2 @@ -3730,70 +4814,86 @@ packages: object-inspect: 1.12.2 dev: true - /signal-exit/3.0.7: + /signal-exit@3.0.7: resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} - dev: true - /sisteransi/1.0.5: + /sisteransi@1.0.5: resolution: {integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==} dev: true - /slash/3.0.0: + /slash@3.0.0: resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} engines: {node: '>=8'} dev: true - /source-map-support/0.5.13: + /source-map-support@0.5.13: resolution: {integrity: sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==} dependencies: buffer-from: 1.1.2 source-map: 0.6.1 dev: true - /source-map/0.6.1: + /source-map@0.6.1: resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==} engines: {node: '>=0.10.0'} dev: true - /source-map/0.7.4: + /source-map@0.7.4: resolution: {integrity: sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==} engines: {node: '>= 8'} dev: true - /spdx-correct/3.1.1: + /spdx-correct@3.1.1: resolution: {integrity: sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==} dependencies: spdx-expression-parse: 3.0.1 spdx-license-ids: 3.0.12 dev: true - /spdx-exceptions/2.3.0: + /spdx-exceptions@2.3.0: resolution: {integrity: sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==} dev: true - /spdx-expression-parse/3.0.1: + /spdx-expression-parse@3.0.1: resolution: {integrity: sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==} dependencies: spdx-exceptions: 2.3.0 spdx-license-ids: 3.0.12 dev: true - /spdx-license-ids/3.0.12: + /spdx-license-ids@3.0.12: resolution: {integrity: sha512-rr+VVSXtRhO4OHbXUiAF7xW3Bo9DuuF6C5jH+q/x15j2jniycgKbxU09Hr0WqlSLUs4i4ltHGXqTe7VHclYWyA==} dev: true - /sprintf-js/1.0.3: + /sprintf-js@1.0.3: resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} dev: true - /stack-utils/2.0.5: + /stack-utils@2.0.5: resolution: {integrity: sha512-xrQcmYhOsn/1kX+Vraq+7j4oE2j/6BFscZ0etmYg81xuM8Gq0022Pxb8+IqgOFUIaxHs0KaSb7T1+OegiNrNFA==} engines: {node: '>=10'} dependencies: escape-string-regexp: 2.0.0 dev: true - /string-length/4.0.2: + /stop-iteration-iterator@1.0.0: + resolution: {integrity: sha512-iCGQj+0l0HOdZ2AEeBADlsRC+vsnDsZsbdSiH1yNSjcfKM7fdpCMfqAL/dwF5BLiw/XhRft/Wax6zQbhq2BcjQ==} + engines: {node: '>= 0.4'} + dependencies: + internal-slot: 1.0.4 + dev: true + + /strict-event-emitter@0.2.8: + resolution: {integrity: sha512-KDf/ujU8Zud3YaLtMCcTI4xkZlZVIYxTLr+XIULexP+77EEVWixeXroLUXQXiVtH4XH2W7jr/3PT1v3zBuvc3A==} + dependencies: + events: 3.3.0 + dev: false + + /strict-event-emitter@0.4.6: + resolution: {integrity: sha512-12KWeb+wixJohmnwNFerbyiBrAlq5qJLwIt38etRtKtmmHyDSoGlIqFE9wx+4IwG0aDjI7GV8tc8ZccjWZZtTg==} + dev: false + + /string-length@4.0.2: resolution: {integrity: sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==} engines: {node: '>=10'} dependencies: @@ -3801,96 +4901,112 @@ packages: strip-ansi: 6.0.1 dev: true - /string-width/4.2.3: + /string-width@4.2.3: resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} engines: {node: '>=8'} dependencies: emoji-regex: 8.0.0 is-fullwidth-code-point: 3.0.0 strip-ansi: 6.0.1 + + /string.prototype.matchall@4.0.8: + resolution: {integrity: sha512-6zOCOcJ+RJAQshcTvXPHoxoQGONa3e/Lqx90wUA+wEzX78sg5Bo+1tQo4N0pohS0erG9qtCqJDjNCQBjeWVxyg==} + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + es-abstract: 1.21.1 + get-intrinsic: 1.1.3 + has-symbols: 1.0.3 + internal-slot: 1.0.3 + regexp.prototype.flags: 1.4.3 + side-channel: 1.0.4 dev: true - /string.prototype.padend/3.1.3: + /string.prototype.padend@3.1.3: resolution: {integrity: sha512-jNIIeokznm8SD/TZISQsZKYu7RJyheFNt84DUPrh482GC8RVp2MKqm2O5oBRdGxbDQoXrhhWtPIWQOiy20svUg==} engines: {node: '>= 0.4'} dependencies: call-bind: 1.0.2 define-properties: 1.1.4 - es-abstract: 1.20.3 + es-abstract: 1.21.1 dev: true - /string.prototype.trimend/1.0.5: - resolution: {integrity: sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog==} + /string.prototype.trimend@1.0.6: + resolution: {integrity: sha512-JySq+4mrPf9EsDBEDYMOb/lM7XQLulwg5R/m1r0PXEFqrV0qHvl58sdTilSXtKOflCsK2E8jxf+GKC0T07RWwQ==} dependencies: call-bind: 1.0.2 define-properties: 1.1.4 - es-abstract: 1.20.3 + es-abstract: 1.21.1 dev: true - /string.prototype.trimstart/1.0.5: - resolution: {integrity: sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg==} + /string.prototype.trimstart@1.0.6: + resolution: {integrity: sha512-omqjMDaY92pbn5HOX7f9IccLA+U1tA9GvtU4JrodiXFfYB7jPzzHpRzpglLAjtUV6bB557zwClJezTqnAiYnQA==} dependencies: call-bind: 1.0.2 define-properties: 1.1.4 - es-abstract: 1.20.3 + es-abstract: 1.21.1 dev: true - /strip-ansi/6.0.1: + /string_decoder@1.3.0: + resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} + dependencies: + safe-buffer: 5.2.1 + dev: false + + /strip-ansi@6.0.1: resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} engines: {node: '>=8'} dependencies: ansi-regex: 5.0.1 - dev: true - /strip-bom/3.0.0: + /strip-bom@3.0.0: resolution: {integrity: sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==} engines: {node: '>=4'} dev: true - /strip-bom/4.0.0: + /strip-bom@4.0.0: resolution: {integrity: sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==} engines: {node: '>=8'} dev: true - /strip-final-newline/2.0.0: + /strip-final-newline@2.0.0: resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} engines: {node: '>=6'} dev: true - /strip-indent/3.0.0: + /strip-indent@3.0.0: resolution: {integrity: sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==} engines: {node: '>=8'} dependencies: min-indent: 1.0.1 dev: true - /strip-json-comments/3.1.1: + /strip-json-comments@3.1.1: resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} engines: {node: '>=8'} dev: true - /supports-color/5.5.0: + /supports-color@5.5.0: resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==} engines: {node: '>=4'} dependencies: has-flag: 3.0.0 dev: true - /supports-color/7.2.0: + /supports-color@7.2.0: resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} engines: {node: '>=8'} dependencies: has-flag: 4.0.0 - dev: true - /supports-color/8.1.1: + /supports-color@8.1.1: resolution: {integrity: sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==} engines: {node: '>=10'} dependencies: has-flag: 4.0.0 dev: true - /supports-hyperlinks/2.3.0: + /supports-hyperlinks@2.3.0: resolution: {integrity: sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA==} engines: {node: '>=8'} dependencies: @@ -3898,16 +5014,16 @@ packages: supports-color: 7.2.0 dev: true - /supports-preserve-symlinks-flag/1.0.0: + /supports-preserve-symlinks-flag@1.0.0: resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} engines: {node: '>= 0.4'} dev: true - /symbol-tree/3.2.4: + /symbol-tree@3.2.4: resolution: {integrity: sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==} dev: true - /terminal-link/2.1.1: + /terminal-link@2.1.1: resolution: {integrity: sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ==} engines: {node: '>=8'} dependencies: @@ -3915,7 +5031,7 @@ packages: supports-hyperlinks: 2.3.0 dev: true - /test-exclude/6.0.0: + /test-exclude@6.0.0: resolution: {integrity: sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==} engines: {node: '>=8'} dependencies: @@ -3924,27 +5040,37 @@ packages: minimatch: 3.1.2 dev: true - /text-table/0.2.0: + /text-table@0.2.0: resolution: {integrity: sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==} dev: true - /tmpl/1.0.5: + /through@2.3.8: + resolution: {integrity: sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==} + dev: false + + /tmp@0.0.33: + resolution: {integrity: sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==} + engines: {node: '>=0.6.0'} + dependencies: + os-tmpdir: 1.0.2 + dev: false + + /tmpl@1.0.5: resolution: {integrity: sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==} dev: true - /to-fast-properties/2.0.0: + /to-fast-properties@2.0.0: resolution: {integrity: sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==} engines: {node: '>=4'} dev: true - /to-regex-range/5.0.1: + /to-regex-range@5.0.1: resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} engines: {node: '>=8.0'} dependencies: is-number: 7.0.0 - dev: true - /tough-cookie/4.1.2: + /tough-cookie@4.1.2: resolution: {integrity: sha512-G9fqXWoYFZgTc2z8Q5zaHy/vJMjm+WV0AkAeHxVCQiEB1b+dGvWzFW6QV07cY5jQ5gRkeid2qIkzkxUnmoQZUQ==} engines: {node: '>=6'} dependencies: @@ -3954,14 +5080,18 @@ packages: url-parse: 1.5.10 dev: true - /tr46/3.0.0: + /tr46@0.0.3: + resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==} + dev: false + + /tr46@3.0.0: resolution: {integrity: sha512-l7FvfAHlcmulp8kr+flpQZmVwtu7nfRV7NZujtN0OqES8EL4O4e0qqzL0DC5gAvx/ZC/9lk6rhcUwYvkBnBnYA==} engines: {node: '>=12'} dependencies: punycode: 2.1.1 dev: true - /ts-node/10.9.1_6ma5wccgcz7ykn2lbqtwbuz2ty: + /ts-node@10.9.1(@swc/core@1.3.18)(@types/node@18.8.0)(typescript@4.8.4): resolution: {integrity: sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==} hasBin: true peerDependencies: @@ -3993,45 +5123,70 @@ packages: yn: 3.1.1 dev: true - /tslib/2.4.0: + /tslib@1.14.1: + resolution: {integrity: sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==} + dev: true + + /tslib@2.4.0: resolution: {integrity: sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==} - /type-check/0.3.2: + /tsutils@3.21.0(typescript@4.8.4): + resolution: {integrity: sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==} + engines: {node: '>= 6'} + peerDependencies: + typescript: '>=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta' + dependencies: + tslib: 1.14.1 + typescript: 4.8.4 + dev: true + + /type-check@0.3.2: resolution: {integrity: sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg==} engines: {node: '>= 0.8.0'} dependencies: prelude-ls: 1.1.2 dev: true - /type-check/0.4.0: + /type-check@0.4.0: resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==} engines: {node: '>= 0.8.0'} dependencies: prelude-ls: 1.2.1 dev: true - /type-detect/4.0.8: + /type-detect@4.0.8: resolution: {integrity: sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==} engines: {node: '>=4'} dev: true - /type-fest/0.20.2: + /type-fest@0.20.2: resolution: {integrity: sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==} engines: {node: '>=10'} dev: true - /type-fest/0.21.3: + /type-fest@0.21.3: resolution: {integrity: sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==} engines: {node: '>=10'} + + /type-fest@2.19.0: + resolution: {integrity: sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==} + engines: {node: '>=12.20'} + dev: false + + /typed-array-length@1.0.4: + resolution: {integrity: sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng==} + dependencies: + call-bind: 1.0.2 + for-each: 0.3.3 + is-typed-array: 1.1.10 dev: true - /typescript/4.8.4: + /typescript@4.8.4: resolution: {integrity: sha512-QCh+85mCy+h0IGff8r5XWzOVSbBO+KfeYrMQh7NJ58QujwcE22u+NUSmUxqF+un70P9GXKxa2HCNiTTMJknyjQ==} engines: {node: '>=4.2.0'} hasBin: true - dev: true - /unbox-primitive/1.0.2: + /unbox-primitive@1.0.2: resolution: {integrity: sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==} dependencies: call-bind: 1.0.2 @@ -4040,12 +5195,12 @@ packages: which-boxed-primitive: 1.0.2 dev: true - /universalify/0.2.0: + /universalify@0.2.0: resolution: {integrity: sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==} engines: {node: '>= 4.0.0'} dev: true - /update-browserslist-db/1.0.9_browserslist@4.21.4: + /update-browserslist-db@1.0.9(browserslist@4.21.4): resolution: {integrity: sha512-/xsqn21EGVdXI3EXSum1Yckj3ZVZugqyOZQ/CxYPBD/R+ko9NSUScf8tFF4dOKY+2pvSSJA/S+5B8s4Zr4kyvg==} hasBin: true peerDependencies: @@ -4056,29 +5211,43 @@ packages: picocolors: 1.0.0 dev: true - /uri-js/4.4.1: + /uri-js@4.4.1: resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} dependencies: punycode: 2.1.1 dev: true - /url-parse/1.5.10: + /url-parse@1.5.10: resolution: {integrity: sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==} dependencies: querystringify: 2.2.0 requires-port: 1.0.0 dev: true - /uuid/8.3.2: + /util-deprecate@1.0.2: + resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} + dev: false + + /util@0.12.5: + resolution: {integrity: sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==} + dependencies: + inherits: 2.0.4 + is-arguments: 1.1.1 + is-generator-function: 1.0.10 + is-typed-array: 1.1.10 + which-typed-array: 1.1.9 + dev: false + + /uuid@8.3.2: resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==} hasBin: true dev: true - /v8-compile-cache-lib/3.0.1: + /v8-compile-cache-lib@3.0.1: resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==} dev: true - /v8-to-istanbul/9.0.1: + /v8-to-istanbul@9.0.1: resolution: {integrity: sha512-74Y4LqY74kLE6IFyIjPtkSTWzUZmj8tdHT9Ii/26dvQ6K9Dl2NbEfj0XgU2sHCtKgt5VupqhlO/5aWuqS+IY1w==} engines: {node: '>=10.12.0'} dependencies: @@ -4087,44 +5256,62 @@ packages: convert-source-map: 1.8.0 dev: true - /validate-npm-package-license/3.0.4: + /validate-npm-package-license@3.0.4: resolution: {integrity: sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==} dependencies: spdx-correct: 3.1.1 spdx-expression-parse: 3.0.1 dev: true - /w3c-xmlserializer/3.0.0: + /w3c-xmlserializer@3.0.0: resolution: {integrity: sha512-3WFqGEgSXIyGhOmAFtlicJNMjEps8b1MG31NCA0/vOF9+nKMUW1ckhi9cnNHmf88Rzw5V+dwIwsm2C7X8k9aQg==} engines: {node: '>=12'} dependencies: xml-name-validator: 4.0.0 dev: true - /walker/1.0.8: + /walker@1.0.8: resolution: {integrity: sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==} dependencies: makeerror: 1.0.12 dev: true - /webidl-conversions/7.0.0: + /wcwidth@1.0.1: + resolution: {integrity: sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg==} + dependencies: + defaults: 1.0.4 + dev: false + + /web-encoding@1.1.5: + resolution: {integrity: sha512-HYLeVCdJ0+lBYV2FvNZmv3HJ2Nt0QYXqZojk3d9FJOLkwnuhzM9tmamh8d7HPM8QqjKH8DeHkFTx+CFlWpZZDA==} + dependencies: + util: 0.12.5 + optionalDependencies: + '@zxing/text-encoding': 0.9.0 + dev: false + + /webidl-conversions@3.0.1: + resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} + dev: false + + /webidl-conversions@7.0.0: resolution: {integrity: sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==} engines: {node: '>=12'} dev: true - /whatwg-encoding/2.0.0: + /whatwg-encoding@2.0.0: resolution: {integrity: sha512-p41ogyeMUrw3jWclHWTQg1k05DSVXPLcVxRTYsXUk+ZooOCZLcoYgPZ/HL/D/N+uQPOtcp1me1WhBEaX02mhWg==} engines: {node: '>=12'} dependencies: iconv-lite: 0.6.3 dev: true - /whatwg-mimetype/3.0.0: + /whatwg-mimetype@3.0.0: resolution: {integrity: sha512-nt+N2dzIutVRxARx1nghPKGv1xHikU7HKdfafKkLNLindmPU/ch3U31NOCGGA/dmPcmb1VlofO0vnKAcsm0o/Q==} engines: {node: '>=12'} dev: true - /whatwg-url/11.0.0: + /whatwg-url@11.0.0: resolution: {integrity: sha512-RKT8HExMpoYx4igMiVMY83lN6UeITKJlBQ+vR/8ZJ8OCdSiN3RwCq+9gH0+Xzj0+5IrM6i4j/6LuvzbZIQgEcQ==} engines: {node: '>=12'} dependencies: @@ -4132,7 +5319,14 @@ packages: webidl-conversions: 7.0.0 dev: true - /which-boxed-primitive/1.0.2: + /whatwg-url@5.0.0: + resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==} + dependencies: + tr46: 0.0.3 + webidl-conversions: 3.0.1 + dev: false + + /which-boxed-primitive@1.0.2: resolution: {integrity: sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==} dependencies: is-bigint: 1.0.4 @@ -4142,14 +5336,34 @@ packages: is-symbol: 1.0.4 dev: true - /which/1.3.1: + /which-collection@1.0.1: + resolution: {integrity: sha512-W8xeTUwaln8i3K/cY1nGXzdnVZlidBcagyNFtBdD5kxnb4TvGKR7FfSIS3mYpwWS1QUCutfKz8IY8RjftB0+1A==} + dependencies: + is-map: 2.0.2 + is-set: 2.0.2 + is-weakmap: 2.0.1 + is-weakset: 2.0.2 + dev: true + + /which-typed-array@1.1.9: + resolution: {integrity: sha512-w9c4xkx6mPidwp7180ckYWfMmvxpjlZuIudNtDf4N/tTAUB8VJbX25qZoAsrtGuYNnGw3pa0AXgbGKRB8/EceA==} + engines: {node: '>= 0.4'} + dependencies: + available-typed-arrays: 1.0.5 + call-bind: 1.0.2 + for-each: 0.3.3 + gopd: 1.0.1 + has-tostringtag: 1.0.0 + is-typed-array: 1.1.10 + + /which@1.3.1: resolution: {integrity: sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==} hasBin: true dependencies: isexe: 2.0.0 dev: true - /which/2.0.2: + /which@2.0.2: resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} engines: {node: '>= 8'} hasBin: true @@ -4157,25 +5371,24 @@ packages: isexe: 2.0.0 dev: true - /word-wrap/1.2.3: + /word-wrap@1.2.3: resolution: {integrity: sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==} engines: {node: '>=0.10.0'} dev: true - /wrap-ansi/7.0.0: + /wrap-ansi@7.0.0: resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} engines: {node: '>=10'} dependencies: ansi-styles: 4.3.0 string-width: 4.2.3 strip-ansi: 6.0.1 - dev: true - /wrappy/1.0.2: + /wrappy@1.0.2: resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} dev: true - /write-file-atomic/4.0.2: + /write-file-atomic@4.0.2: resolution: {integrity: sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==} engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} dependencies: @@ -4183,7 +5396,7 @@ packages: signal-exit: 3.0.7 dev: true - /ws/8.9.0: + /ws@8.9.0: resolution: {integrity: sha512-Ja7nszREasGaYUYCI2k4lCKIRTt+y7XuqVoHR44YpI49TtryyqbqvDMn5eqfW7e6HzTukDRIsXqzVHScqRcafg==} engines: {node: '>=10.0.0'} peerDependencies: @@ -4196,34 +5409,32 @@ packages: optional: true dev: true - /xml-name-validator/4.0.0: + /xml-name-validator@4.0.0: resolution: {integrity: sha512-ICP2e+jsHvAj2E2lIHxa5tjXRlKDJo4IdvPvCXbXQGdzSfmSpNVyIKMvoZHjDY9DP0zV17iI85o90vRFXNccRw==} engines: {node: '>=12'} dev: true - /xml/1.0.1: + /xml@1.0.1: resolution: {integrity: sha512-huCv9IH9Tcf95zuYCsQraZtWnJvBtLVE0QHMOs8bWyZAFZNDcYjsPq1nEx8jKA9y+Beo9v+7OBPRisQTjinQMw==} dev: true - /xmlchars/2.2.0: + /xmlchars@2.2.0: resolution: {integrity: sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==} dev: true - /y18n/5.0.8: + /y18n@5.0.8: resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} engines: {node: '>=10'} - dev: true - /yallist/4.0.0: + /yallist@4.0.0: resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==} dev: true - /yargs-parser/21.1.1: + /yargs-parser@21.1.1: resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} engines: {node: '>=12'} - dev: true - /yargs/17.6.0: + /yargs@17.6.0: resolution: {integrity: sha512-8H/wTDqlSwoSnScvV2N/JHfLWOKuh5MVla9hqLjK3nsfyy6Y4kDSYSvkU5YCUEPOSnRXfIyx3Sq+B/IWudTo4g==} engines: {node: '>=12'} dependencies: @@ -4234,14 +5445,13 @@ packages: string-width: 4.2.3 y18n: 5.0.8 yargs-parser: 21.1.1 - dev: true - /yn/3.1.1: + /yn@3.1.1: resolution: {integrity: sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==} engines: {node: '>=6'} dev: true - /yocto-queue/0.1.0: + /yocto-queue@0.1.0: resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} engines: {node: '>=10'} dev: true diff --git a/dac/ui-common/pom.xml b/dac/ui-common/pom.xml index 9927e44afe..980bbf7b1c 100644 --- a/dac/ui-common/pom.xml +++ b/dac/ui-common/pom.xml @@ -21,7 +21,7 @@ com.dremio dremio-dac-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-ui-common @@ -83,7 +83,7 @@ --> test - process-sources + test npm @@ -93,13 +93,13 @@ test-results.xml ui-common - run test -- --ci --reporters=jest-junit + run test -- --ci --reporters=jest-silent-reporter --reporters=jest-junit ${skipTests} build - generate-resources + compile npm @@ -119,7 +119,10 @@ false - ${project.basedir}/dist-* + ${project.basedir} + + dist-*/ + false diff --git a/dac/ui-common/sql-grammar/DremioParser.g4 b/dac/ui-common/sql-grammar/DremioParser.g4 index fd7a19950d..45b90550e3 100644 --- a/dac/ui-common/sql-grammar/DremioParser.g4 +++ b/dac/ui-common/sql-grammar/DremioParser.g4 @@ -333,8 +333,6 @@ privilege : | CREATE PROJECT | CREATE CATALOG | CONFIGURE SECURITY - | CREATE OAUTH APPLICATION - | CREATE EXTERNAL TOKENS PROVIDER | INSERT | TRUNCATE | DELETE @@ -819,7 +817,7 @@ sqlTypeName2 : (BINARY VARYING? | VARBINARY) precisionOpt ; sqlTypeName3 : ((DECIMAL | DEC | NUMERIC) | ANY) (LPAREN unsignedIntLiteral (COMMA unsignedIntLiteral)? RPAREN)? - | DOUBLE PRECISION? (LPAREN unsignedIntLiteral (COMMA unsignedIntLiteral)? RPAREN)? + | DOUBLE PRECISION? ; jdbcOdbcDataTypeName : diff --git a/dac/ui-common/src/arctic/components/ArcticCatalogJobsTable/ArcticCatalogJobsTable.tsx b/dac/ui-common/src/arctic/components/ArcticCatalogJobsTable/ArcticCatalogJobsTable.tsx new file mode 100644 index 0000000000..1cadafbc88 --- /dev/null +++ b/dac/ui-common/src/arctic/components/ArcticCatalogJobsTable/ArcticCatalogJobsTable.tsx @@ -0,0 +1,68 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { useCallback, useMemo, useRef } from "react"; +import { getArcticJobColumns } from "./utils"; +import { Table, columnSorting, infiniteScrolling } from "leantable/react"; +import { createTable } from "leantable/core"; +import { SortDirection } from "../../../components/TableCells/ColumnSortIcon"; + +type ArcticCatalogJobsTableProps = { + jobs: any[]; + order: SortDirection; + onScrolledBottom: () => void; +}; + +const ArcticCatalogJobsTable = ({ + jobs, + order, + onScrolledBottom = () => {}, +}: ArcticCatalogJobsTableProps) => { + const columns = useMemo(() => getArcticJobColumns(order), [order]); + + const scrolledBottomRef = useRef(onScrolledBottom); + scrolledBottomRef.current = onScrolledBottom; + + const arcticCatalogJobsTable = useMemo(() => { + return createTable([ + columnSorting(), + infiniteScrolling(() => scrolledBottomRef.current()), + ]); + }, []); + + const getRow = useCallback( + (rowIndex: number) => { + const data = jobs[rowIndex]; + return { + id: data?.id || rowIndex, + data: data || null, + }; + }, + [jobs] + ); + + return ( +
      + ); +}; + +export default ArcticCatalogJobsTable; diff --git a/dac/ui-common/src/arctic/components/ArcticCatalogJobsTable/utils.tsx b/dac/ui-common/src/arctic/components/ArcticCatalogJobsTable/utils.tsx new file mode 100644 index 0000000000..16b316f1e3 --- /dev/null +++ b/dac/ui-common/src/arctic/components/ArcticCatalogJobsTable/utils.tsx @@ -0,0 +1,242 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { type Column } from "leantable/react"; +import { CopyButton, Skeleton, Tooltip } from "dremio-ui-lib/components"; +// @ts-ignore +import { Tooltip as RichTooltip } from "dremio-ui-lib"; +import { + ColumnSortIcon, + SortDirection, +} from "../../../components/TableCells/ColumnSortIcon"; +import ArcticDataset, { DatasetType } from "../../../components/ArcticDataset"; +import DatasetSummaryOverlay from "../../../components/DatasetSummaryOverlay"; + +const JobTypeLabels: any = { + ["OPTIMIZE"]: "Optimize", + ["VACUUM"]: "Vacuum", +}; + +const JobStatusIcons: any = { + SETUP: "setup", + QUEUED: "queued", + STARTING: "starting", + RUNNING: { src: "running", className: "spinner" }, + COMPLETED: "job-completed", + CANCELLED: "canceled", + FAILED: "error-solid", +}; + +const JobStatusLabels: any = { + SETUP: "Setup", + QUEUED: "Queued", + STARTING: "Starting", + RUNNING: "Running", + COMPLETED: "Completed", + CANCELLED: "Cancelled", + FAILED: "Failed", +}; + +const getJobStatusLabel = (job: any): string | JSX.Element => { + const status = JobStatusLabels[job.state]; + if (job.state === "FAILED") { + return `${status}. ${job?.errorMessage ? `(${job?.errorMessage})` : ""}`; + } else if (job.state === "COMPLETED") { + if (job.type === "OPTIMIZE" && job?.metrics) { + return ( + <> +
      {status}.
      +
      {`Files rewritten: ${job.metrics.rewrittenDataFiles}`}
      +
      {`New data files: ${job.metrics.newDataFiles}`}
      +
      {`Delete files rewritten: ${job.metrics.rewrittenDeleteFiles}`}
      + + ); + } else if (job.type === "VACUUM") { + const deletedDataFiles = job?.metrics?.deletedDataFiles + ? `Files deleted: ${job.metrics.deletedDataFiles ?? ""}` + : ""; + const retentionPeriod = job?.config?.retentionPeriodMinutes + ? `Data retention period: ${job.config.retentionPeriodMinutes} minutes` + : ""; + return ( + <> + {status}. {deletedDataFiles &&
      {deletedDataFiles}
      } + {retentionPeriod &&
      {retentionPeriod}
      } + + ); + } else { + return status; + } + } else { + return status; + } +}; + +const ArcticCatalogJobTarget = (props: { item: any }) => { + const { item } = props; + const tablePath = item.config?.tableId?.split("."); + const name = tablePath?.pop(); + const path = tablePath?.join("."); + const target = + item.type === "OPTIMIZE" + ? `${tablePath[tablePath.length - 1]} (ref: ${item.config.reference})` + : item.catalogName; + return item.type === "OPTIMIZE" ? ( + + } + > +
      + +
      {name}
      +
      +
      + ) : ( +
      + {/* @ts-ignore */} + +
      {target}
      +
      + ); +}; + +export const getArcticJobColumns = (order: SortDirection): Column[] => [ + { + id: "target", + renderHeaderCell: () => "Target", + renderCell: (row: any) => { + if (!row.data) { + return ; + } + return ; + }, + }, + { + id: "jobStatus", + renderHeaderCell: () => "Status", + renderCell: (row: any) => { + if (!row.data) { + return ; + } + const icon = JobStatusIcons[row.data.state]; + return ( + +
      + {/* @ts-ignore */} + + {JobStatusLabels[row.data.state]} +
      +
      + ); + }, + }, + { + id: "user", + renderHeaderCell: () => "User", + renderCell: (row: any) => { + if (!row.data) { + return ; + } + return <>{row.data.username}; + }, + }, + { + id: "jobType", + renderHeaderCell: () => "Job Type", + renderCell: (row: any) => { + if (!row.data) { + return ; + } + return <>{JobTypeLabels[row.data.type]}; + }, + }, + { + id: "engineSize", + renderHeaderCell: () => "Engine Size", + renderCell: (row: any) => { + if (!row.data) { + return ; + } + return <>{row.data.engineSize}; + }, + }, + { + id: "startTime", + renderHeaderCell: () => { + return ( + <> + Start Time + + ); + }, + renderCell: (row: any) => { + if (!row.data) { + return ; + } + return <>{row.data.startedAt + " "}; + }, + }, + { + id: "duration", + renderHeaderCell: () => "Duration", + renderCell: (row: any) => { + if (!row.data) { + return ; + } + return <>{row.data.duration}; + }, + }, + { + id: "jobId", + renderHeaderCell: () => "Job ID", + renderCell: (row: any) => { + if (!row.data) { + return ; + } + return ( + <> + {row.data.id} + + + ); + }, + }, +]; diff --git a/dac/ui-common/src/components/ArcticDataset.tsx b/dac/ui-common/src/components/ArcticDataset.tsx new file mode 100644 index 0000000000..4138ca5606 --- /dev/null +++ b/dac/ui-common/src/components/ArcticDataset.tsx @@ -0,0 +1,47 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * + * @export + * @enum {string} + */ +export enum DatasetType { + Unknown = "UNKNOWN", + IcebergTable = "ICEBERG_TABLE", + DeltaLakeTable = "DELTA_LAKE_TABLE", + IcebergView = "ICEBERG_VIEW", + Namespace = "NAMESPACE", +} + +const getDatasetType = (type: string) => { + if (type === DatasetType.IcebergTable) { + return "iceberg-table"; + } else if (type === DatasetType.IcebergView) { + return "iceberg-view"; + } else return "iceberg-table"; +}; + +const ArcticDataset = (props: { type: string }) => { + return ( + + ); +}; + +export default ArcticDataset; diff --git a/dac/ui-common/src/components/ContainerSplash.tsx b/dac/ui-common/src/components/ContainerSplash.tsx index f1fd8c8aea..385c30785d 100644 --- a/dac/ui-common/src/components/ContainerSplash.tsx +++ b/dac/ui-common/src/components/ContainerSplash.tsx @@ -22,7 +22,7 @@ type ContainerSplashProps = { export const ContainerSplash = (props: ContainerSplashProps): JSX.Element => { return ( -
      +
      { + return ( +
      +
      + + {props.name} +
      +
      + {props.path} +
      +
      + Ref: {/* @ts-ignore */} + + {props.reference} +
      +
      + ); +}; + +export default DatasetSummaryOverlay; diff --git a/dac/ui-common/src/components/LeaveContext/LeaveContext.tsx b/dac/ui-common/src/components/LeaveContext/LeaveContext.tsx new file mode 100644 index 0000000000..81c7c69f07 --- /dev/null +++ b/dac/ui-common/src/components/LeaveContext/LeaveContext.tsx @@ -0,0 +1,117 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { getIntlContext } from "../../contexts/IntlContext"; +import { + Button, + DialogContent, + ModalContainer, +} from "dremio-ui-lib/components"; +import { + createContext, + useContext, + useEffect, + useMemo, + useRef, + useState, +} from "react"; + +export const leaveContext = createContext({ + setNextLocation: (nextLocation: any) => {}, + stay: () => {}, + leave: () => {}, + isAllowed: (nextLocation: any) => {}, + isOpen: false, +}); + +export const useLeaveModal = ({ router, route, isDirty }: any) => { + const leaveModal = useContext(leaveContext); + useEffect(() => { + if (!isDirty) return; + + // setRouteLeaveHook returns a remove function, run it during effect cleanup + return router.setRouteLeaveHook(route, (nextLocation: any) => { + const allowed = leaveModal.isAllowed(nextLocation); + leaveModal.setNextLocation(nextLocation); + return allowed; + }); + }, [leaveModal, isDirty]); +}; + +type LeaveContextProviderProps = { + children: JSX.Element; + setNextLocation: (nextLocation: any) => void; +}; + +export const LeaveContextProvider = (props: LeaveContextProviderProps) => { + const [isOpen, setIsOpen] = useState(false); + const nextRequestedLocation = useRef(null); + const acceptedKey = useRef(null); + const { t } = getIntlContext(); + + const leave = () => { + setIsOpen(false); + acceptedKey.current = nextRequestedLocation.current; + props.setNextLocation(nextRequestedLocation.current); //Use browser history.push in consuming code + nextRequestedLocation.current = null; + acceptedKey.current = null; + }; + + const contextValue = useMemo( + () => ({ + isOpen, + isAllowed: (nextLocation: any) => { + //@ts-ignore + return nextLocation.pathname === acceptedKey.current?.pathname; + }, + setNextLocation: (nextLocation: any) => { + if (nextRequestedLocation.current) { + return; + } + nextRequestedLocation.current = nextLocation; + setIsOpen(true); + }, + stay: () => { + setIsOpen(false); + nextRequestedLocation.current = null; + }, + leave, + }), + [isOpen] + ); + + return ( + + {props.children} + {}} close={contextValue.stay}> + + + +
      + } + > + <>{t("Common.RouteLeaveDialog.Message")} + + + + ); +}; diff --git a/dac/ui-common/src/components/LoadingMoreBanner.tsx b/dac/ui-common/src/components/LoadingMoreBanner.tsx index 61603d7642..159c0c1458 100644 --- a/dac/ui-common/src/components/LoadingMoreBanner.tsx +++ b/dac/ui-common/src/components/LoadingMoreBanner.tsx @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { Spinner } from "dremio-ui-lib/dist-esm"; +import { Spinner } from "dremio-ui-lib/components"; import { getIntlContext } from "../contexts/IntlContext"; export const LoadingMoreBanner = () => { return ( diff --git a/dac/ui-common/src/components/MessageDialog.tsx b/dac/ui-common/src/components/MessageDialog.tsx index 5f3771027c..f055b590e2 100644 --- a/dac/ui-common/src/components/MessageDialog.tsx +++ b/dac/ui-common/src/components/MessageDialog.tsx @@ -20,7 +20,7 @@ import { type ModalContainerProps, type DialogContentProps, IconButton, -} from "dremio-ui-lib/dist-esm"; +} from "dremio-ui-lib/components"; type MessageDialogProps = ModalContainerProps & DialogContentProps & { expandable: boolean }; diff --git a/dac/ui-common/src/components/NetworkConnectivityBanner.tsx b/dac/ui-common/src/components/NetworkConnectivityBanner.tsx new file mode 100644 index 0000000000..048c137187 --- /dev/null +++ b/dac/ui-common/src/components/NetworkConnectivityBanner.tsx @@ -0,0 +1,33 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { useNetworkAvailable } from "../utilities/useNetworkAvailable"; +import { SectionMessage } from "dremio-ui-lib/components"; +import { getIntlContext } from "../contexts/IntlContext"; + +export const NetworkConnectivityBanner = () => { + const networkAvailable = useNetworkAvailable(); + + if (!networkAvailable) { + return ( + + {getIntlContext().t("NetworkConnectivity.Offline")} + + ); + } + + return null; +}; diff --git a/dac/ui-common/src/components/PrivilegesTable/PrivilegesTable.tsx b/dac/ui-common/src/components/PrivilegesTable/PrivilegesTable.tsx new file mode 100644 index 0000000000..0d25ddb14c --- /dev/null +++ b/dac/ui-common/src/components/PrivilegesTable/PrivilegesTable.tsx @@ -0,0 +1,49 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { useMemo, useRef } from "react"; +import { createTable } from "leantable/core"; +import { Table, columnSorting, infiniteScrolling } from "leantable/react"; + +type PrivilegesTableProps = { + columns: any; + getRow: (rowIndex: number) => any; + rowCount: number; +}; + +export const PrivilegesTable = (props: PrivilegesTableProps) => { + const { columns, getRow, rowCount } = props; + + const scrolledBottomRef = useRef(() => {}); + scrolledBottomRef.current = () => {}; + + const privilegesTable = useMemo(() => { + return createTable([ + columnSorting(), + infiniteScrolling(() => scrolledBottomRef.current()), + ]); + }, []); + + return ( +
      + ); +}; diff --git a/dac/ui-common/src/components/PrivilegesTable/components/GrantOwnershipDialog/GrantOwnershipDialog.tsx b/dac/ui-common/src/components/PrivilegesTable/components/GrantOwnershipDialog/GrantOwnershipDialog.tsx new file mode 100644 index 0000000000..993507ffbb --- /dev/null +++ b/dac/ui-common/src/components/PrivilegesTable/components/GrantOwnershipDialog/GrantOwnershipDialog.tsx @@ -0,0 +1,75 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { getIntlContext } from "../../../../contexts/IntlContext"; +import { + ModalContainer, + DialogContent, + Button as NewButton, +} from "dremio-ui-lib/components"; + +type GrantOwnershipDialogProps = { + open: boolean; + closeDialog: () => void; + className: string; + granteeId?: string; + onUpdate: (id: string) => void; +}; + +const GrantOwnershipDialog = ({ + open, + closeDialog, + className, + granteeId, + onUpdate, +}: GrantOwnershipDialogProps) => { + const { t } = getIntlContext(); + const handleChangeOwner = async () => { + if (granteeId) { + onUpdate(granteeId); + } + closeDialog(); + }; + + return ( + {}} isOpen={open} close={closeDialog}> + + + {t("Common.Actions.Cancel")} + + + {t("Common.Actions.Yes")} + + + } + className={className} + > + + {t("Admin.Privileges.OwnerChangeContent")} + + + + ); +}; + +export default GrantOwnershipDialog; diff --git a/dac/ui-common/src/components/PrivilegesTable/components/RemoveGranteeDialog/RemoveGranteeDialog.tsx b/dac/ui-common/src/components/PrivilegesTable/components/RemoveGranteeDialog/RemoveGranteeDialog.tsx new file mode 100644 index 0000000000..8fab89172c --- /dev/null +++ b/dac/ui-common/src/components/PrivilegesTable/components/RemoveGranteeDialog/RemoveGranteeDialog.tsx @@ -0,0 +1,73 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { getIntlContext } from "../../../../contexts/IntlContext"; +import { + ModalContainer, + DialogContent, + Button as NewButton, +} from "dremio-ui-lib/components"; + +type RemoveGranteeDialogProps = { + open: boolean; + closeDialog: () => void; + className: string; + onRemove: (id: string) => void; + grantee?: any; +}; + +const RemoveGranteeDialog = ({ + open, + closeDialog, + className, + grantee, + onRemove, +}: RemoveGranteeDialogProps) => { + const { t } = getIntlContext(); + const handleRemoveOwner = () => { + onRemove(grantee?.granteeId); + closeDialog(); + }; + + return ( + {}} isOpen={open} close={closeDialog}> + + + {t("Common.Actions.Cancel")} + + + {t("Common.Actions.Yes")} + + + } + className={className} + > + + {t("Admin.Privileges.RemoveGranteeContent", { name: grantee?.name })} + + + + ); +}; + +export default RemoveGranteeDialog; diff --git a/dac/ui-common/src/components/PrivilegesTable/privilegesTableColumns.tsx b/dac/ui-common/src/components/PrivilegesTable/privilegesTableColumns.tsx new file mode 100644 index 0000000000..67d093e852 --- /dev/null +++ b/dac/ui-common/src/components/PrivilegesTable/privilegesTableColumns.tsx @@ -0,0 +1,77 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { Skeleton } from "dremio-ui-lib/components"; +// @ts-ignore +import { Tooltip } from "dremio-ui-lib"; + +export const renderPrivilegesColumns = ({ + nameColumnLabel, + availablePrivileges, + renderPrivilegeTooltip, + renderNameCell, + renderCheckboxCell, +}: { + nameColumnLabel: string; + availablePrivileges: string[]; + renderPrivilegeTooltip: (privilege: string) => string; + renderNameCell: (data: any) => JSX.Element; + renderCheckboxCell: (data: any, privilege: string) => JSX.Element; +}) => { + return [ + { + id: nameColumnLabel, + renderHeaderCell: () => nameColumnLabel, + class: "leantable-sticky-column leantable-sticky-column--left", + renderCell: (row: any) => + row.data ? ( + renderNameCell(row.data) + ) : ( +
      + + +
      + ), + }, + ...(availablePrivileges ?? []).map((privilege: string) => ({ + id: privilege, + renderHeaderCell: () => { + return privilege ? ( + + {privilege.split("_").join(" ")} + + ) : ( + + ); + }, + renderCell: (row: any) => + row.data ? ( + renderCheckboxCell(row.data, privilege) + ) : ( + + ), + })), + { + id: "ExtraSpace", + renderHeaderCell: () => "", + renderCell: () => "", + }, + ]; +}; diff --git a/dac/ui-common/src/components/TableCells/ColumnSortIcon.tsx b/dac/ui-common/src/components/TableCells/ColumnSortIcon.tsx index 284a9271d9..f85723d4a6 100644 --- a/dac/ui-common/src/components/TableCells/ColumnSortIcon.tsx +++ b/dac/ui-common/src/components/TableCells/ColumnSortIcon.tsx @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -type SortDirection = "ascending" | "descending" | "none"; +export type SortDirection = "ascending" | "descending" | "none"; export const ColumnSortIcon = (props: { sortDirection: SortDirection }) => { switch (props.sortDirection) { case "ascending": diff --git a/dac/ui-lib/themes/dremio-light/mantine.scss b/dac/ui-common/src/components/TableCells/SortableEngineHeaderCell.tsx similarity index 60% rename from dac/ui-lib/themes/dremio-light/mantine.scss rename to dac/ui-common/src/components/TableCells/SortableEngineHeaderCell.tsx index 05cedc806e..4ba1ddc81b 100644 --- a/dac/ui-lib/themes/dremio-light/mantine.scss +++ b/dac/ui-common/src/components/TableCells/SortableEngineHeaderCell.tsx @@ -13,19 +13,17 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import { useColumnSortDirection } from "leantable/react"; +import { ColumnSortIcon } from "./ColumnSortIcon"; -/* stylelint-disable selector-class-pattern */ -.mantine-Input-invalid { - border-color: var(--dremio--color--status--error--foreground) !important; - color: inherit !important; -} +type Props = { + children: React.ReactNode; + sortDirection: "ascending" | "descending" | "none"; +}; -.mantine-InputWrapper-error { - color: var(--dremio--color--status--error--foreground) !important; -} - -.mantine-InputWrapper-label { - font-weight: 400 !important; - margin-block-end: 2px !important; - inline-size: 100%; -} +export const SortableEngineHeaderCell = (props: Props) => ( +
      + {props.children} + +
      +); diff --git a/dac/ui-common/src/components/VersionContext.tsx b/dac/ui-common/src/components/VersionContext.tsx new file mode 100644 index 0000000000..a15da880d2 --- /dev/null +++ b/dac/ui-common/src/components/VersionContext.tsx @@ -0,0 +1,86 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { getShortHash } from "../utilities/versionContext"; + +export enum VersionContextTypes { + BRANCH = "BRANCH", + TAG = "TAG", + COMMIT_HASH_ONLY = "COMMIT_HASH_ONLY", + SNAPSHOT_ID = "SNAPSHOT_ID", + TIMESTAMP = "TIMESTAMP", + LATEST_VERSION = "LATEST_VERSION", + NOT_SPECIFIED = "NOT_SPECIFIED", + REFERENCE = "REFERENCE", +} + +const VERSION_CONTEXT = { + [VersionContextTypes.BRANCH]: "branch", + [VersionContextTypes.TAG]: "tag", + [VersionContextTypes.COMMIT_HASH_ONLY]: "commit", + [VersionContextTypes.SNAPSHOT_ID]: "snapshot", + [VersionContextTypes.TIMESTAMP]: "timestamp", + [VersionContextTypes.LATEST_VERSION]: "branch", + [VersionContextTypes.NOT_SPECIFIED]: "branch", + [VersionContextTypes.REFERENCE]: "branch", +}; + +export type VersionContextType = { + type: keyof typeof VersionContextTypes; + value: string; +}; + +const VersionContext = ({ + versionContext, + className, + withRefKeyword, +}: { + versionContext: VersionContextType; + className?: string; + withRefKeyword?: boolean; +}) => { + const { type, value } = versionContext; + const prefix = + type === VersionContextTypes.COMMIT_HASH_ONLY || + type === VersionContextTypes.BRANCH || + type === VersionContextTypes.TAG || + withRefKeyword + ? "Ref:" + : ""; + const refValue = + type === VersionContextTypes.COMMIT_HASH_ONLY + ? getShortHash(value || "") + : value; + + return ( +

      + {prefix} + {/* @ts-ignore */} + + {refValue} +

      + ); +}; + +export default VersionContext; diff --git a/dac/ui-common/src/contexts/SessionContext.ts b/dac/ui-common/src/contexts/SessionContext.ts index e71e056ac0..efb1e421e5 100644 --- a/dac/ui-common/src/contexts/SessionContext.ts +++ b/dac/ui-common/src/contexts/SessionContext.ts @@ -18,6 +18,7 @@ export type SessionContext = { handleInvalidSession: () => void; handleLogout: () => void; getSessionIdentifier: () => string; + getOrganizationId?: () => string; }; let sessionContext: SessionContext; diff --git a/dac/ui-common/src/contexts/SonarContext.ts b/dac/ui-common/src/contexts/SonarContext.ts index c14a272eed..67fe897b16 100644 --- a/dac/ui-common/src/contexts/SonarContext.ts +++ b/dac/ui-common/src/contexts/SonarContext.ts @@ -22,6 +22,8 @@ export type SonarContext = { //TODO Type needs to be made generic getProjectBaseRoute: () => any; + + getProjectVendorType: () => "AWS" | "AZURE"; }; let sonarContext: SonarContext; diff --git a/dac/ui-common/src/paths/admin.ts b/dac/ui-common/src/paths/admin.ts index b7ebe0b182..f659123aba 100644 --- a/dac/ui-common/src/paths/admin.ts +++ b/dac/ui-common/src/paths/admin.ts @@ -41,6 +41,6 @@ export const engineRouting = admin.extend(() => "engine-routing"); export const roles = admin.extend(() => "roles"); export const roleId = roles.extend((params: RolesParam) => `${params.roleId}`); export const biApplications = admin.extend(() => "bi-applications"); -export const sql = admin.extend(() => "sql"); +export const preferences = admin.extend(() => "preferences"); export const general = admin.extend(() => "general"); export const errorHandling = admin.extend(() => "*"); diff --git a/dac/ui-common/src/paths/common.ts b/dac/ui-common/src/paths/common.ts index 5e96dd9877..8894db96a5 100644 --- a/dac/ui-common/src/paths/common.ts +++ b/dac/ui-common/src/paths/common.ts @@ -50,6 +50,9 @@ export const dataplane = sources.extend(() => "dataplane/list"); export const arcticSource = sources.extend( (params: SourceNameParam) => `arctic/${params.sourceName}` ); +export const nessieSource = sources.extend( + (params: SourceNameParam) => `nessie/${params.sourceName}` +); export const source = projectBase.extend( (params: ResourceIdParam) => `source/${params.resourceId}` ); diff --git a/dac/ui-common/src/paths/organization.ts b/dac/ui-common/src/paths/organization.ts index 30aa38bdfe..a3b6c773ab 100644 --- a/dac/ui-common/src/paths/organization.ts +++ b/dac/ui-common/src/paths/organization.ts @@ -23,24 +23,25 @@ type InviteParam = { inviteId: string }; export const organization = defineRoute(() => "/organization"); /** Org Settings */ -export const setting = organization.extend(() => "settings"); -export const general = setting.extend(() => "general"); -export const projects = setting.extend(() => "projects"); -export const clouds = setting.extend(() => "clouds"); -export const billing = setting.extend(() => "billing"); +export const settings = organization.extend(() => "settings"); +export const general = settings.extend(() => "general"); +export const projects = settings.extend(() => "projects"); +export const clouds = settings.extend(() => "clouds"); +export const billing = settings.extend(() => "billing"); export const billingAccountId = billing.extend( (params: BillingAccountIdParam) => `${params.billingAccountId}` ); -export const usage = setting.extend(() => "usage"); -export const authentication = setting.extend(() => "authentication"); -export const biApplications = setting.extend(() => "bi-applications"); -export const externalTokens = setting.extend(() => "external-tokens"); -export const oauthApps = setting.extend(() => "oauth-applications"); -export const users = setting.extend(() => "users"); +export const usage = settings.extend(() => "usage"); +export const authentication = settings.extend(() => "authentication"); +export const biApplications = settings.extend(() => "bi-applications"); +export const externalTokens = settings.extend(() => "external-tokens"); +export const oauthApps = settings.extend(() => "oauth-applications"); +export const users = settings.extend(() => "users"); export const userId = users.extend((params: UserIdParam) => `${params.userId}`); -export const roles = setting.extend(() => "roles"); +export const roles = settings.extend(() => "roles"); +export const privileges = settings.extend(() => "privileges"); export const roleId = roles.extend((params: RoleIdParam) => `${params.roleId}`); -export const errorHandling = setting.extend(() => "*"); +export const errorHandling = settings.extend(() => "*"); export const invite = defineRoute( (params: InviteParam) => `/invite/${params.inviteId}` diff --git a/dac/ui-common/src/sonar/components/ReflectionStatus.tsx b/dac/ui-common/src/sonar/components/ReflectionStatus.tsx index a89a4a76f3..7b34cd0ffe 100644 --- a/dac/ui-common/src/sonar/components/ReflectionStatus.tsx +++ b/dac/ui-common/src/sonar/components/ReflectionStatus.tsx @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { Spinner, Tooltip } from "dremio-ui-lib/dist-esm"; +import { Spinner, Tooltip } from "dremio-ui-lib/components"; import { getIntlContext } from "../../contexts/IntlContext"; const ICON_BASE = "/static/icons/dremio"; diff --git a/dac/ui-common/src/sonar/components/ReflectionType.tsx b/dac/ui-common/src/sonar/components/ReflectionType.tsx index 4fd3b803be..b69d244090 100644 --- a/dac/ui-common/src/sonar/components/ReflectionType.tsx +++ b/dac/ui-common/src/sonar/components/ReflectionType.tsx @@ -13,24 +13,27 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -const ICON_BASE = "/static/icons/dremio"; -type ReflectionTypeType = "RAW" | "AGGREGATION"; +import { ReflectionSummary } from "../reflections/ReflectionSummary.type"; + +const ICON_BASE = "/static/icons/dremio"; type ReflectionTypeProps = { - type: ReflectionTypeType; + type: ReflectionSummary.ReflectionTypeEnum; }; -const getConfigForReflectionType = (type: ReflectionTypeType) => { +const getConfigForReflectionType = ( + type: ReflectionSummary.ReflectionTypeEnum +) => { const icon = (() => { switch (type) { - case "RAW": + case ReflectionSummary.ReflectionTypeEnum.RAW: return { iconName: "interface/reflection-raw-mode", // iconTooltip: "Raw Reflection", label: "Raw", }; - case "AGGREGATION": + case ReflectionSummary.ReflectionTypeEnum.AGGREGATION: return { iconName: "interface/reflection-aggregate", // iconTooltip: "Aggregation Reflection", diff --git a/dac/ui-common/src/sonar/components/ReflectionsTable/ReflectionsTable.tsx b/dac/ui-common/src/sonar/components/ReflectionsTable/ReflectionsTable.tsx index 8f192b4535..f70c721206 100644 --- a/dac/ui-common/src/sonar/components/ReflectionsTable/ReflectionsTable.tsx +++ b/dac/ui-common/src/sonar/components/ReflectionsTable/ReflectionsTable.tsx @@ -26,17 +26,13 @@ type ReflectionsTableProps = { columns: any; onScrolledBottom?: () => void; onColumnsSorted: (sortedColumns: any) => void; - reflections: any[]; pollingCache: Map; + getRow: (rowIndex: number) => any; + rowCount: number; }; export const ReflectionsTable = (props: ReflectionsTableProps) => { - const { - columns, - reflections, - pollingCache = new Map(), - onScrolledBottom = () => {}, - } = props; + const { columns, onScrolledBottom = () => {}, getRow, rowCount } = props; const scrolledBottomRef = useRef(onScrolledBottom); scrolledBottomRef.current = onScrolledBottom; @@ -48,22 +44,6 @@ export const ReflectionsTable = (props: ReflectionsTableProps) => { ]); }, []); - const getRow = useCallback( - (rowIndex: number) => { - const data = reflections[rowIndex]; - const polledData = - data?.id && pollingCache.has(data.id) - ? pollingCache.get(data.id) - : data; - - return { - id: data?.id || rowIndex, - data: polledData || null, - }; - }, - [reflections, pollingCache] - ); - const sortedColumns = useExternalStoreState( reflectionsTable.store, (state: any) => state.sortedColumns @@ -79,7 +59,7 @@ export const ReflectionsTable = (props: ReflectionsTableProps) => { className="leantable--fixed-header" columns={columns} getRow={getRow} - rowCount={reflections.length} + rowCount={rowCount} /> ); }; diff --git a/dac/ui-common/src/sonar/components/ReflectionsTable/reflectionsTableColumns.tsx b/dac/ui-common/src/sonar/components/ReflectionsTable/reflectionsTableColumns.tsx index f9f8f8205a..a135035a9b 100644 --- a/dac/ui-common/src/sonar/components/ReflectionsTable/reflectionsTableColumns.tsx +++ b/dac/ui-common/src/sonar/components/ReflectionsTable/reflectionsTableColumns.tsx @@ -14,7 +14,7 @@ * limitations under the License. */ import { type Column } from "leantable/react"; -import { IconButton, Skeleton, Tooltip } from "dremio-ui-lib/dist-esm"; +import { IconButton, Skeleton, Tooltip } from "dremio-ui-lib/components"; import { ReflectionType } from "../ReflectionType"; import { formatBytes } from "../../../utilities/formatBytes"; import { formatDuration } from "../../../utilities/formatDuration"; @@ -25,6 +25,9 @@ import { getIntlContext } from "../../../contexts/IntlContext"; import { SortableHeaderCell } from "../../../components/TableCells/SortableHeaderCell"; import { NullCell } from "../../../components/TableCells/NullCell"; import { ClickableCell } from "../../../components/TableCells/ClickableCell"; +import { jobs } from "../../../paths/jobs"; +import { getSonarContext } from "../../../contexts/SonarContext"; +import { type ReflectionSummary } from "../../reflections/ReflectionSummary.type"; export const getReflectionColumnLabels = () => { const { t } = getIntlContext(); @@ -58,7 +61,13 @@ export const reflectionsTableColumns = ({ onReflectionDelete: (id: string) => void; onRowClick: (id: string) => void; renderDataset: any; -}): Column[] => { +}): Column< + ReflectionSummary & { + chosenJobsFilters: any; + consideredJobsFilters: any; + matchedJobsFilters: any; + } +>[] => { const reflectionColumnLabels = getReflectionColumnLabels(); return [ { @@ -71,7 +80,16 @@ export const reflectionsTableColumns = ({ ), renderCell: (row) => { if (!row.data) { - return ; + return ( +
      + + +
      + ); } return ( onRowClick(row.id)}> @@ -95,7 +113,14 @@ export const reflectionsTableColumns = ({ row.data ? ( ) : ( - +
      + + +
      ), sortable: true, }, @@ -110,7 +135,16 @@ export const reflectionsTableColumns = ({ }, renderCell: (row) => { if (!row.data) { - return ; + return ( +
      + + +
      + ); } return renderDataset(row); }, @@ -383,7 +417,14 @@ export const reflectionsTableColumns = ({ {row.data ? ( canViewJobs ? ( - + {row.data.consideredCount.toLocaleString()} ) : ( @@ -419,7 +460,14 @@ export const reflectionsTableColumns = ({ {row.data ? ( canViewJobs ? ( - + {row.data.matchedCount.toLocaleString()} ) : ( @@ -459,7 +507,14 @@ export const reflectionsTableColumns = ({ {row.data ? ( canViewJobs ? ( - + {row.data.chosenCount.toLocaleString()} ) : ( diff --git a/dac/ui-common/src/sonar/components/RemoveReflectionDialog.tsx b/dac/ui-common/src/sonar/components/RemoveReflectionDialog.tsx index bfeb7136c5..32096b6003 100644 --- a/dac/ui-common/src/sonar/components/RemoveReflectionDialog.tsx +++ b/dac/ui-common/src/sonar/components/RemoveReflectionDialog.tsx @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { DialogContent, Button } from "dremio-ui-lib/dist-esm"; +import { DialogContent, Button } from "dremio-ui-lib/components"; import { getIntlContext } from "../../contexts/IntlContext"; type RemoveReflectionDialogProps = { diff --git a/dac/ui-common/src/sonar/types/ReflectionSummary.type.ts b/dac/ui-common/src/sonar/reflections/ReflectionSummary.type.ts similarity index 96% rename from dac/ui-common/src/sonar/types/ReflectionSummary.type.ts rename to dac/ui-common/src/sonar/reflections/ReflectionSummary.type.ts index 4f1d633520..dede0c1f84 100644 --- a/dac/ui-common/src/sonar/types/ReflectionSummary.type.ts +++ b/dac/ui-common/src/sonar/reflections/ReflectionSummary.type.ts @@ -119,31 +119,31 @@ export interface ReflectionSummary { * @type {number} * @memberof ReflectionSummary */ - currentSizeBytes?: number; + currentSizeBytes: number; /** * Output records of the latest reflection. * @type {number} * @memberof ReflectionSummary */ - outputRecordCount?: number; + outputRecords: number; /** * The data size (in bytes) of all reflection jobs that have not been pruned (if any exist). The value is generated by Dremio and is immutable. * @type {number} * @memberof ReflectionSummary */ - totalSizeBytes?: number; + totalSizeBytes: number; /** * Whether to allow using the reflection to accelerate queries. * @type {boolean} * @memberof ReflectionSummary */ - enabled: boolean; + isEnabled: boolean; /** * DEPRECATED - Whether Dremio converts data from your reflection’s Parquet files to the Apache Arrow format when copying that data to executor nodes. * @type {boolean} * @memberof ReflectionSummary */ - arrowCachingEnabled?: boolean; + isArrowCachingEnabled?: boolean; /** * The id of the dataset the reflection is for. Immutable after creation. * @type {string} @@ -167,25 +167,25 @@ export interface ReflectionSummary { * @type {ReflectionSummaryStatus} * @memberof ReflectionSummary */ - status?: ReflectionSummaryStatus; + status: ReflectionSummaryStatus; /** * Whether current user has view privilege on the reflection * @type {boolean} * @memberof ReflectionSummary */ - canView: boolean; + isCanView: boolean; /** * Whether current user has alter privilege on the reflection * @type {boolean} * @memberof ReflectionSummary */ - canAlter: boolean; + isCanAlter: boolean; /** * Number of jobs that considered this reflection because of overlapping tables or views * @type {number} * @memberof ReflectionSummary */ - consideredCount?: number; + consideredCount: number; /** * Link to jobs that considered this reflection * @type {string} @@ -197,7 +197,7 @@ export interface ReflectionSummary { * @type {number} * @memberof ReflectionSummary */ - matchedCount?: number; + matchedCount: number; /** * Link to jobs that matched this reflection * @type {string} @@ -209,7 +209,7 @@ export interface ReflectionSummary { * @type {number} * @memberof ReflectionSummary */ - chosenCount?: number; + chosenCount: number; /** * Link to jobs that chose this reflection * @type {string} @@ -229,7 +229,7 @@ export namespace ReflectionSummary { */ export enum ReflectionTypeEnum { RAW = "RAW", - AGGREGATE = "AGGREGATE", + AGGREGATION = "AGGREGATION", } /** * @export @@ -270,7 +270,7 @@ export interface ReflectionSummaryStatus { * @type {Date} * @memberof ReflectionSummaryStatus */ - expiresAt: Date; + expiresAt: Date | null; /** * Number of consecutive reflection creation failures. The value is generated by Dremio and is immutable. * @type {number} @@ -282,7 +282,7 @@ export interface ReflectionSummaryStatus { * @type {Date} * @memberof ReflectionSummaryStatus */ - lastDataFetchAt: Date; + lastDataFetchAt: Date | null; /** * Latest refresh status. The value is generated by Dremio and is immutable. * @type {string} diff --git a/dac/ui-common/src/sonar/reflections/reflectionIsRunning.ts b/dac/ui-common/src/sonar/reflections/reflectionIsRunning.ts new file mode 100644 index 0000000000..d33f91a87e --- /dev/null +++ b/dac/ui-common/src/sonar/reflections/reflectionIsRunning.ts @@ -0,0 +1,31 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { + type ReflectionSummary, + ReflectionSummaryStatus, +} from "./ReflectionSummary.type"; + +export const reflectionIsRunning = (reflection: ReflectionSummary) => + reflection.status.refreshStatus === + ReflectionSummaryStatus.RefreshStatusEnum.RUNNING || + (reflection.isEnabled && + reflection.status.configStatus === + ReflectionSummaryStatus.ConfigStatusEnum.OK && + reflection.status.refreshStatus === + ReflectionSummaryStatus.RefreshStatusEnum.SCHEDULED && + reflection.status.availabilityStatus === + ReflectionSummaryStatus.AvailabilityStatusEnum.NONE); diff --git a/dac/ui-common/src/types/VersionContext.types.ts b/dac/ui-common/src/types/VersionContext.types.ts new file mode 100644 index 0000000000..729a243ee3 --- /dev/null +++ b/dac/ui-common/src/types/VersionContext.types.ts @@ -0,0 +1,20 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +export type VersionContext = { + type: "BRANCH" | "TAG" | "DETACHED"; + value: string; +}; diff --git a/dac/ui-common/src/utilities/appFetch.ts b/dac/ui-common/src/utilities/appFetch.ts new file mode 100644 index 0000000000..5e88a0b073 --- /dev/null +++ b/dac/ui-common/src/utilities/appFetch.ts @@ -0,0 +1,110 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { UnauthorizedError } from "../errors/UnauthorizedError"; +import { HttpError } from "../errors/HttpError"; +import { narrowHttpError } from "../errors/narrowHttpError"; +import { getSessionContext } from "../contexts/SessionContext.js"; +import { BadGatewayError } from "../errors/BadGatewayError"; +import { ServiceUnavailableError } from "../errors/ServiceUnavailableError"; +import { GatewayTimeoutError } from "../errors/GatewayTimeoutError"; +import { waitForServerReachable } from "../utilities/waitForServerReachable.js"; + +const MAX_RETRIES = 4; + +const getRetryDuration = (error: HttpError, triesRemaining: number): number => { + if (error.res.headers.get("retry-after")) { + // ignore for now + } + return 2 ** (MAX_RETRIES - triesRemaining) * 1000; +}; + +const isRetryableError = (error: HttpError) => { + return ( + error instanceof BadGatewayError || + error instanceof ServiceUnavailableError || + error instanceof GatewayTimeoutError + ); +}; + +export const appFetch = (input: RequestInfo | URL, init?: RequestInit) => { + const startFetch = ( + triesRemaining: number + ): ReturnType => { + return fetch(input, init) + .then(async (res) => { + if (!res.ok) { + const error = await narrowHttpError(new HttpError(res)); + if (isRetryableError(error) && triesRemaining) { + await new Promise((resolve) => + setTimeout(resolve, getRetryDuration(error, triesRemaining)) + ); + return startFetch(triesRemaining - 1); + } + throw error; + } + return res; + }) + .catch((err) => { + if (err instanceof TypeError || err.message === "Failed to fetch") { + return waitForServerReachable().then(() => + startFetch(triesRemaining) + ); + } + + if (err instanceof UnauthorizedError) { + getSessionContext().handleInvalidSession(); + return new Promise(() => {}); + } + + throw err; + }); + }; + + return startFetch(MAX_RETRIES); +}; + +/** + * @deprecated This version of `appFetch` allows old error handling code that expects to decode + * the response itself to continue working. + */ +export const appFetchWithoutErrorHandling = ( + input: RequestInfo | URL, + init?: RequestInit +) => { + const startFetch = ( + triesRemaining: number + ): ReturnType => { + return fetch(input, init) + .then(async (res) => { + if (!res.ok) { + throw res; + } + return res; + }) + .catch((err) => { + if (err instanceof TypeError || err.message === "Failed to fetch") { + return waitForServerReachable().then(() => + startFetch(triesRemaining) + ); + } + + throw err; + }); + }; + + return startFetch(MAX_RETRIES); +}; diff --git a/dac/ui-lib/.storybook/preview.js b/dac/ui-common/src/utilities/datasetReference.ts similarity index 67% rename from dac/ui-lib/.storybook/preview.js rename to dac/ui-common/src/utilities/datasetReference.ts index ac273bf903..1a18c403e4 100644 --- a/dac/ui-lib/.storybook/preview.js +++ b/dac/ui-common/src/utilities/datasetReference.ts @@ -13,13 +13,16 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import "!style-loader!css-loader!sass-loader!../themes/dremio-light/index.scss"; -export const parameters = { - actions: { argTypesRegex: "^on[A-Z].*" }, - controls: { - matchers: { - color: /(background|color)$/i, - date: /Date$/, - }, - }, + +import { type VersionContext } from "../types/VersionContext.types"; + +export const getVersionContextFromId = ( + id: string +): VersionContext | undefined => { + try { + const datasetReference = JSON.parse(id); + return datasetReference.versionContext; + } catch (e) { + return; + } }; diff --git a/dac/ui-common/src/utilities/useNetworkAvailable.ts b/dac/ui-common/src/utilities/useNetworkAvailable.ts new file mode 100644 index 0000000000..2d823ee1b1 --- /dev/null +++ b/dac/ui-common/src/utilities/useNetworkAvailable.ts @@ -0,0 +1,58 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { useEffect, useState } from "react"; + +export const isNetworkAvailable = () => window.navigator.onLine; + +export const useNetworkAvailable = () => { + const [networkAvailable, setNetworkAvailable] = useState(isNetworkAvailable); + + useEffect(() => { + const handleOnline = () => { + setNetworkAvailable(true); + }; + + const handleOffline = () => { + setNetworkAvailable(false); + }; + + window.addEventListener("online", handleOnline); + window.addEventListener("offline", handleOffline); + + return () => { + window.removeEventListener("online", handleOnline); + window.removeEventListener("offline", handleOffline); + }; + }, []); + + return networkAvailable; +}; + +export const waitForNetworkAvailable = (): Promise => { + if (isNetworkAvailable()) { + return Promise.resolve(); + } + return new Promise((resolve) => { + window.addEventListener( + "online", + () => { + resolve(); + }, + { once: true } + ); + }); +}; diff --git a/dac/ui-common/src/utilities/versionContext.ts b/dac/ui-common/src/utilities/versionContext.ts new file mode 100644 index 0000000000..610a3ebfaf --- /dev/null +++ b/dac/ui-common/src/utilities/versionContext.ts @@ -0,0 +1,25 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { VersionContext } from "../types/VersionContext.types"; + +export function getShortHash(hash?: string) { + return hash && hash.length > 6 ? hash.substring(0, 8) : hash; +} + +export function isDefaultBranch(versionContext?: VersionContext) { + return versionContext?.type === "BRANCH" && versionContext.value === "main"; +} diff --git a/dac/ui-common/src/utilities/waitForServerReachable.ts b/dac/ui-common/src/utilities/waitForServerReachable.ts new file mode 100644 index 0000000000..79d97b4f0c --- /dev/null +++ b/dac/ui-common/src/utilities/waitForServerReachable.ts @@ -0,0 +1,64 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import moize from "moize"; +import { waitForNetworkAvailable } from "./useNetworkAvailable"; + +const CHECK_INTERVAL = 1500; + +/** + * Check if the UI application server (origin) is reachable without + * a fetch error. The response should be memoized so that if this + * function gets called in multiple places we only make one request at a time. + */ +const canReachOrigin = moize.promise( + () => + fetch(window.location.origin) + .then(() => true) + .catch(() => false), + { maxAge: CHECK_INTERVAL / 2 } +); + +/** + * In some cases the network may time out in which case it can be + * better to retry the request rather than continuing to wait. + */ +const failAfterTimeout = (timeout: number) => + new Promise((_, reject) => setTimeout(() => reject(), timeout)) + .then(() => true) + .catch(() => false); + +export const isServerReachable = (): Promise => + Promise.race([canReachOrigin(), failAfterTimeout(10000)]); + +/** + * Returns a promise that resolves once connectivity is restored + */ +export const waitForServerReachable = (): Promise => { + return new Promise((resolve) => { + const run = () => { + setTimeout(async () => { + await waitForNetworkAvailable(); + if (await isServerReachable()) { + resolve(); + } else { + run(); + } + }, CHECK_INTERVAL); + }; + run(); + }); +}; diff --git a/dac/ui-lib/.gitignore b/dac/ui-lib/.gitignore index 03c957c719..8560016dde 100644 --- a/dac/ui-lib/.gitignore +++ b/dac/ui-lib/.gitignore @@ -23,7 +23,7 @@ yarn-error.log* package-lock.json dist-* -example/manifest.json +iconmanifest.json !.config !.npmrc !.storybook diff --git a/dac/ui-lib/.npmrc b/dac/ui-lib/.npmrc deleted file mode 100644 index 11659fdad3..0000000000 --- a/dac/ui-lib/.npmrc +++ /dev/null @@ -1,2 +0,0 @@ -# At this time we're packing node_modules into a .jar so we can't use symlinks to the global store -node-linker=hoisted diff --git a/dac/ui-lib/components/Button/Button.tsx b/dac/ui-lib/components/Button/Button.tsx index 91a963867d..387da60a0f 100644 --- a/dac/ui-lib/components/Button/Button.tsx +++ b/dac/ui-lib/components/Button/Button.tsx @@ -19,6 +19,7 @@ import * as React from "react"; import clsx from "clsx"; import { forwardRef, type ReactNode } from "react"; import { Spinner } from "../Spinner/Spinner"; +import { Tooltip } from "../Tooltip/Tooltip"; type ButtonProps = { as?: string | React.FunctionComponent | React.ComponentClass; @@ -48,6 +49,11 @@ type ButtonProps = { */ success?: boolean; + /** + * If defined, a tooltip will be added to the button + */ + tooltip?: string; + /** * The visual style of the button */ @@ -84,6 +90,7 @@ export const Button = forwardRef( variant, pending, type = "button", + tooltip, ...rest } = props; const buttonProps = @@ -109,7 +116,13 @@ export const Button = forwardRef( }, <> {prefix &&
      {prefix}
      } -
      {getContent(props)}
      + {tooltip ? ( + +
      {getContent(props)}
      +
      + ) : ( +
      {getContent(props)}
      + )} {suffix &&
      {suffix}
      } ); diff --git a/dac/ui-lib/components/Checkbox.tsx b/dac/ui-lib/components/Checkbox.tsx new file mode 100644 index 0000000000..764150ca6b --- /dev/null +++ b/dac/ui-lib/components/Checkbox.tsx @@ -0,0 +1,18 @@ +import clsx from "clsx"; +import * as React from "react"; + +type CheckboxProps = { + className?: string; + label?: string; +}; + +export const Checkbox = (props: CheckboxProps) => ( + +); diff --git a/dac/ui-lib/components/CopyButton.tsx b/dac/ui-lib/components/CopyButton.tsx index 0041f475f3..38d33bd183 100644 --- a/dac/ui-lib/components/CopyButton.tsx +++ b/dac/ui-lib/components/CopyButton.tsx @@ -17,24 +17,38 @@ import * as React from "react"; import { IconButton } from "./IconButton"; import { CopyContainer } from "./CopyContainer"; +import { type Placement } from "@floating-ui/react-dom-interactions"; type Props = { contents: string; + className?: string; + size?: "S" | "L"; + placement?: Placement; }; export const CopyButton = (props: Props) => { + const { size = "S", contents, className, placement } = props; + let copyButtonStyle; + switch (size) { + case "S": + copyButtonStyle = { + blockSize: "1.25em", + inlineSize: "1.25em", + }; + break; + case "L": + copyButtonStyle = { + blockSize: "1.75em", + inlineSize: "1.75em", + }; + break; + } + return ( - - + + {/*@ts-ignore*/} - + {/*@ts-ignore*/} diff --git a/dac/ui-lib/components/CopyContainer.tsx b/dac/ui-lib/components/CopyContainer.tsx index 4640e2b31d..9caa7b23d7 100644 --- a/dac/ui-lib/components/CopyContainer.tsx +++ b/dac/ui-lib/components/CopyContainer.tsx @@ -17,6 +17,8 @@ import * as React from "react"; import { Tooltip } from "./Tooltip/Tooltip"; import { useHasClipboardPermissions } from "./utilities/useHasClipboardPermissions"; +import copy from "copy-to-clipboard"; +import { type Placement } from "@floating-ui/react-dom-interactions"; const writeToClipboard = (text: string): Promise => { return navigator.clipboard.writeText(text); @@ -25,19 +27,16 @@ const writeToClipboard = (text: string): Promise => { type Props = { children: JSX.Element; contents: string; + placement?: Placement; }; export const CopyContainer = (props: Props) => { const hasPermission = useHasClipboardPermissions(); const [hasCopied, setHasCopied] = React.useState(false); - if (!hasPermission) { - return null; - } - return ( Copied : Copy} onClose={() => { setHasCopied(false); @@ -45,7 +44,9 @@ export const CopyContainer = (props: Props) => { > {React.cloneElement(props.children, { onClick: () => { - writeToClipboard(props.contents); + hasPermission + ? writeToClipboard(props.contents) + : copy(props.contents); setHasCopied(true); }, })} diff --git a/dac/ui-lib/components/ErrorDisplay/CollapsibleStacktrace.tsx b/dac/ui-lib/components/ErrorDisplay/CollapsibleStacktrace.tsx new file mode 100644 index 0000000000..6c326212ab --- /dev/null +++ b/dac/ui-lib/components/ErrorDisplay/CollapsibleStacktrace.tsx @@ -0,0 +1,49 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +//@ts-nocheck +/* eslint-disable react/prop-types */ +import * as React from "react"; +import { useCollapsed } from "./useCollapsed"; +import { IconButton } from "../IconButton"; + +export const CollapsibleStacktrace = (props) => { + const [collapsed, toggleCollapsed] = useCollapsed(true); + + return ( + +

      + + + {props.title} + +

      + +
      + ); +}; diff --git a/dac/ui-lib/components/ErrorDisplay/ErrorDisplay.tsx b/dac/ui-lib/components/ErrorDisplay/ErrorDisplay.tsx new file mode 100644 index 0000000000..802dff521e --- /dev/null +++ b/dac/ui-lib/components/ErrorDisplay/ErrorDisplay.tsx @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import * as React from "react"; +import type { ErrorDisplayProps } from "./ErrorDisplay.type"; +import { renderDevInfo } from "./renderDevInfo"; +import { renderProdInfo } from "./renderProdInfo"; + +const narwhalErrorIcon = ( + //@ts-ignore + +); + +/** + * An error overlay component providing support information (in production mode) + * or stack traces (in development mode). Can be used inside of an ErrorBoundary + * or rendered directly as a standalone component. + */ +export const ErrorDisplay: React.FC = (props) => { + return ( +
      +
      +
      + {narwhalErrorIcon} +
      {props.title}
      +
      + {props.production + ? renderProdInfo({ + supportMessage: props.supportMessage, + renderSupportInfo: props.renderSupportInfo, + }) + : renderDevInfo({ error: props.error, errorInfo: props.errorInfo })} +
      +
      + ); +}; diff --git a/dac/ui-lib/components/ErrorDisplay/ErrorDisplay.type.ts b/dac/ui-lib/components/ErrorDisplay/ErrorDisplay.type.ts new file mode 100644 index 0000000000..d1e8183bc0 --- /dev/null +++ b/dac/ui-lib/components/ErrorDisplay/ErrorDisplay.type.ts @@ -0,0 +1,27 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +export type ErrorDisplayProps = { + details?: React.ReactNode; + error: Error; + errorInfo?: { componentStack: string }; + production?: boolean; + renderSupportInfo?: () => JSX.Element; + + // A customized support message depending on the product edition + supportMessage: string; + title: string; +}; diff --git a/dac/ui-lib/components/ErrorDisplay/renderDevInfo.tsx b/dac/ui-lib/components/ErrorDisplay/renderDevInfo.tsx new file mode 100644 index 0000000000..5b47278bbc --- /dev/null +++ b/dac/ui-lib/components/ErrorDisplay/renderDevInfo.tsx @@ -0,0 +1,31 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +//@ts-nocheck +import * as React from "react"; +import { CollapsibleStacktrace } from "./CollapsibleStacktrace"; +export const renderDevInfo = ({ error, errorInfo }) => { + return ( + +
      {error.message}
      + {errorInfo?.componentStack && ( + + )} +
      + ); +}; diff --git a/dac/ui-lib/components/ErrorDisplay/renderProdInfo.tsx b/dac/ui-lib/components/ErrorDisplay/renderProdInfo.tsx new file mode 100644 index 0000000000..5018828f9d --- /dev/null +++ b/dac/ui-lib/components/ErrorDisplay/renderProdInfo.tsx @@ -0,0 +1,30 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +//@ts-nocheck +import * as React from "react"; +export const renderProdInfo = ({ renderSupportInfo, supportMessage }) => { + return ( + +

      + {supportMessage} +

      + {renderSupportInfo?.()} +
      + ); +}; diff --git a/dac/ui-lib/components/ErrorDisplay/useCollapsed.ts b/dac/ui-lib/components/ErrorDisplay/useCollapsed.ts new file mode 100644 index 0000000000..5436e0bdb8 --- /dev/null +++ b/dac/ui-lib/components/ErrorDisplay/useCollapsed.ts @@ -0,0 +1,27 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { useCallback, useState } from "react"; + +export const useCollapsed = (initialState = false) => { + const [collapsed, setCollapsed] = useState(initialState); + + const toggleCollapsed = useCallback(() => { + setCollapsed((prev) => !prev); + }, [setCollapsed]); + + return [collapsed, toggleCollapsed]; +}; diff --git a/dac/ui-lib/components/FloatingContainer.tsx b/dac/ui-lib/components/FloatingContainer.tsx new file mode 100644 index 0000000000..dab3af9e21 --- /dev/null +++ b/dac/ui-lib/components/FloatingContainer.tsx @@ -0,0 +1,75 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//@ts-nocheck +import React from "react"; +import clsx from "clsx"; +import { + useFloating, + flip, + autoUpdate, + size, + offset, +} from "@floating-ui/react-dom"; +import { CSSTransition } from "react-transition-group"; +import { cloneElement } from "react"; +import mergeRefs from "react-merge-refs"; + +export const FloatingContainer = (props) => { + const { x, y, strategy, refs } = useFloating({ + middleware: [ + offset(4), + flip(), + size({ + apply({ rects, elements }) { + Object.assign(elements.floating.style, { + width: `${rects.reference.width}px`, + }); + }, + }), + ], + strategy: "fixed", + whileElementsMounted: autoUpdate, + }); + const { trigger, children, isOpen } = props; + return ( + <> + {cloneElement(trigger, { + ref: mergeRefs([trigger.ref, refs.setReference]), + })} + + refs.floating.current.addEventListener("transitionend", done, false) + } + > + {cloneElement(children, { + className: clsx("float-container", children.props.className), + ref: mergeRefs([children.ref, refs.setFloating]), + style: { + ...children.props.style, + ...(!isOpen && { display: "none" }), + position: strategy, + top: y ?? 0, + left: x ?? 0, + }, + })} + + + ); +}; diff --git a/dac/ui-lib/components/IconButton.tsx b/dac/ui-lib/components/IconButton.tsx index 3aa9211f95..58bad38b62 100644 --- a/dac/ui-lib/components/IconButton.tsx +++ b/dac/ui-lib/components/IconButton.tsx @@ -84,6 +84,7 @@ export const IconButton = React.forwardRef( onClose={onTooltipClose} portal={tooltipPortal} delay={tooltipDelay} + shouldWrapChildren > {ButtonElement}
      diff --git a/dac/ui-lib/components/Input.tsx b/dac/ui-lib/components/Input.tsx new file mode 100644 index 0000000000..e6b0025f32 --- /dev/null +++ b/dac/ui-lib/components/Input.tsx @@ -0,0 +1,54 @@ +//@ts-nocheck +import { forwardRef, HTMLProps, useRef, useState } from "react"; +import mergeRefs from "react-merge-refs"; +import clsx from "clsx"; + +type InputProps = Omit, "prefix"> & { + wrapperRef?: any; + clearable?: boolean; + prefix?: JSX.Element; + suffix?: JSX.Element; +}; + +export const Input = forwardRef((props, ref) => { + const inputEl = useRef(null); + const [internalValue, setInternalValue] = useState(props.value); + const { wrapperRef, clearable, prefix, suffix, style, className, ...rest } = + props; + const handleClear = (e) => { + e.preventDefault(); + e.stopPropagation(); + setInternalValue(""); + inputEl.current.value = ""; + inputEl.current.dispatchEvent(new Event("input", { bubbles: true })); + inputEl.current.focus(); + }; + return ( +
      + {prefix} + { + setInternalValue(e.target.value); + props.onChange?.(e); + }} + /> + {suffix} + {clearable && internalValue?.length > 0 && ( + + )} +
      + ); +}); diff --git a/dac/ui-lib/components/MessageDetails.tsx b/dac/ui-lib/components/MessageDetails.tsx new file mode 100644 index 0000000000..cc3d48e414 --- /dev/null +++ b/dac/ui-lib/components/MessageDetails.tsx @@ -0,0 +1,94 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/* eslint-disable react/prop-types */ +import * as React from "react"; +import { forwardRef } from "react"; +import clsx from "clsx"; + +type Props = { + renderButton?: ( + show: boolean, + setShow: (newVal: boolean) => void + ) => JSX.Element; + message: JSX.Element | string; + details?: JSX.Element | string; + show?: boolean; // Initialize shown +} & React.HTMLAttributes; + +/** + * For use with SectionMessage, MessageDetails provides a way to progressively disclose error/warning/info details to the user. + */ + +export const MessageDetails = forwardRef( + (props, ref): JSX.Element => { + const { + className, + renderButton = (show: boolean, setShow: (newVal: boolean) => void) => { + const onClick = (e: any) => { + e.preventDefault(); + e.stopPropagation(); + setShow(!show); + }; + return ( + + {show ? "Show less" : "Show more"} + + ); + }, + message, + details, + show: showProp = false, + ...rest + } = props; + const [show, setShow] = React.useState(showProp); + + return ( +
      +
      +
      + {message} + {details && ( + + {renderButton(show, setShow)} + + )} +
      + {show && ( +
      + {details} +
      + )} +
      +
      + ); + } +); diff --git a/dac/ui-lib/components/PasswordInput.tsx b/dac/ui-lib/components/PasswordInput.tsx new file mode 100644 index 0000000000..fb05eeb92c --- /dev/null +++ b/dac/ui-lib/components/PasswordInput.tsx @@ -0,0 +1,40 @@ +//@ts-nocheck +import { forwardRef, useState } from "react"; +import { Input } from "./Input"; +export const PasswordInput = forwardRef((props, ref) => { + const { ...rest } = props; + const [revealed, setRevealed] = useState(false); + const handleReveal = (e) => { + e.stopPropagation(); + e.preventDefault(); + setRevealed((x) => !x); + }; + return ( + + {revealed ? ( + + ) : ( + + )} + + } + /> + ); +}); diff --git a/dac/ui-lib/components/Radio.tsx b/dac/ui-lib/components/Radio.tsx new file mode 100644 index 0000000000..c746153538 --- /dev/null +++ b/dac/ui-lib/components/Radio.tsx @@ -0,0 +1,18 @@ +import clsx from "clsx"; +import * as React from "react"; + +type RadioProps = { + className?: string; + label?: string; +}; + +export const Radio = (props: RadioProps) => ( + +); diff --git a/dac/ui-lib/components/SectionMessage.tsx b/dac/ui-lib/components/SectionMessage.tsx index dc535bbfb2..02f1e3e802 100644 --- a/dac/ui-lib/components/SectionMessage.tsx +++ b/dac/ui-lib/components/SectionMessage.tsx @@ -28,13 +28,13 @@ type SectionMessageAppearance = type Props = { appearance: SectionMessageAppearance; children?: JSX.Element | JSX.Element[] | string; - title: JSX.Element | string; + title?: JSX.Element | string; } & React.HTMLAttributes; const appearanceIcons: Record = { - information: "", + information: "interface/warning", success: "engine-state/running-engine", - warning: "", + warning: "interface/warning", danger: "engine-state/stopped", discovery: "", }; @@ -59,7 +59,9 @@ export const SectionMessage = forwardRef(
      -
      {title}
      + {title && ( +
      {title}
      + )} {children}
      diff --git a/dac/ui-lib/components/Select.tsx b/dac/ui-lib/components/Select.tsx new file mode 100644 index 0000000000..40032b3399 --- /dev/null +++ b/dac/ui-lib/components/Select.tsx @@ -0,0 +1,110 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//@ts-nocheck +import React from "react"; +import { useSelect as useDownshiftSelect } from "downshift"; +import { useState } from "react"; +import { FloatingContainer } from "./FloatingContainer"; +import { type ReactNode } from "react"; + +type SelectProps = { + onChange: (value: T) => void; + options: T; + renderOptionLabel: (value: T[number]) => ReactNode; + renderButtonLabel: (value: T[number] | null) => ReactNode; + value: T[number] | null; + disabled?: boolean; + onOpened?: () => void; + onClosed?: () => void; + renderLoading?: () => ReactNode; +}; + +export const useSelect = ( + initial: T | null = null +) => { + const [selected, setSelected] = useState(initial); + + return { + value: selected, + onChange: (value: T) => { + setSelected(value); + }, + }; +}; + +export const Select = (props: SelectProps) => { + const { + isOpen, + selectedItem, + getToggleButtonProps, + getMenuProps, + highlightedIndex, + getItemProps, + } = useDownshiftSelect({ + items: props.options || [], + selectedItem: props.value || null, + onSelectedItemChange: ({ selectedItem }) => { + props.onChange?.(selectedItem || null); + }, + onIsOpenChange: ({ isOpen }) => { + if (isOpen) { + props.onOpened?.(); + } else { + props.onClosed?.(); + } + }, + }); + return ( + + {props.renderButtonLabel(selectedItem)} + + + } + > +
        + {!props.options && props.renderLoading()} + {!!props.options && + props.options.map((item, index) => ( +
      • + {props.renderOptionLabel(item)} +
      • + ))} +
      +
      + ); +}; diff --git a/dac/ui-lib/components/Tag.tsx b/dac/ui-lib/components/Tag.tsx index 8e26e40856..fcb96c3383 100644 --- a/dac/ui-lib/components/Tag.tsx +++ b/dac/ui-lib/components/Tag.tsx @@ -22,18 +22,13 @@ import clsx from "clsx"; type TagProps = { className?: string; - variant: "success" | "info" | "warning" | "danger"; }; export const Tag = forwardRef>( (props, ref) => { - const { className, children, variant, ...rest } = props; + const { className, children, ...rest } = props; return ( - + {children} ); diff --git a/dac/ui-lib/components/Tooltip/Tooltip.tsx b/dac/ui-lib/components/Tooltip/Tooltip.tsx index d5139eedb9..45a2251415 100644 --- a/dac/ui-lib/components/Tooltip/Tooltip.tsx +++ b/dac/ui-lib/components/Tooltip/Tooltip.tsx @@ -49,6 +49,10 @@ type TooltipProps = { placement?: Placement; portal?: boolean; + /** + * Use when a disabled element does not show the tooltip + */ + shouldWrapChildren?: boolean; /** * Called when the tooltip is closed */ @@ -70,6 +74,7 @@ export const Tooltip = (props: TooltipProps): JSX.Element => { interactive = false, placement = "bottom", portal = false, + shouldWrapChildren = false, onClose, onOpen, } = props; @@ -89,7 +94,7 @@ export const Tooltip = (props: TooltipProps): JSX.Element => { const { x, y, context, floating, reference, strategy, middlewareData } = useFloating({ middleware: [ - offset(16), + offset(8), flip(), shift({ padding: 8 }), arrow({ element: arrowElRef }), @@ -177,9 +182,18 @@ export const Tooltip = (props: TooltipProps): JSX.Element => { return ( <> - {React.cloneElement( - children, - getReferenceProps({ ref, ...children.props }) + {shouldWrapChildren ? ( + + {React.cloneElement(children, { ...children.props })} + + ) : ( + React.cloneElement( + children, + getReferenceProps({ ref, ...children.props }) + ) )} {portal ? createPortal(tooltipContent, document.body!) : tooltipContent} diff --git a/dac/ui-lib/components/index.ts b/dac/ui-lib/components/index.ts index 492c9307a9..d57e1066fa 100644 --- a/dac/ui-lib/components/index.ts +++ b/dac/ui-lib/components/index.ts @@ -18,18 +18,26 @@ export * from "./Avatar"; export * from "./AvatarGroup"; export * from "./Button/Button"; export * from "./Card/Card"; +export * from "./Checkbox"; export * from "./CopyButton"; export * from "./CopyContainer"; export * from "./Dialog/DialogContent"; export * from "./Drawer/Drawer"; +export * from "./ErrorDisplay/ErrorDisplay"; export * from "./ExternalLink"; export * from "./icon/configureDremioIcon"; export * from "./IconButton"; +export * from "./Input"; +export * from "./FloatingContainer"; export * from "./mantineTheme"; +export * from "./PasswordInput"; export * from "./ModalContainer/ModalContainer"; export * from "./Page/Page"; +export * from "./Radio"; export * from "./Section/Section"; export * from "./SectionMessage"; +export * from "./MessageDetails"; +export * from "./Select"; export * from "./SegmentedControl"; export * from "./Skeleton"; export * from "./Spinner/Spinner"; diff --git a/dac/ui-lib/components/mantineTheme.ts b/dac/ui-lib/components/mantineTheme.ts index cea727dfd4..ef885d361f 100644 --- a/dac/ui-lib/components/mantineTheme.ts +++ b/dac/ui-lib/components/mantineTheme.ts @@ -17,13 +17,13 @@ export const mantineTheme = { colors: { primary: [ - "var(--dremio--color--primary--100)", - "var(--dremio--color--primary--150)", - "var(--dremio--color--primary--200)", - "var(--dremio--color--primary--500)", - "var(--dremio--color--primary--600)", - "var(--dremio--color--primary--700)", - "var(--dremio--color--primary--800)", + "var(--color--brand--25)", + "var(--color--brand--50)", + "var(--color--brand--100)", + "var(--color--brand--300)", + "var(--color--brand--400)", + "var(--color--brand--600)", + "var(--color--brand--800)", ], }, fontFamily: "var(--dremio--font-family)", diff --git a/dac/ui-lib/example/icon_preview.html b/dac/ui-lib/example/icon_preview.html deleted file mode 100644 index 9d25996db6..0000000000 --- a/dac/ui-lib/example/icon_preview.html +++ /dev/null @@ -1,39 +0,0 @@ - - - Dremio UI Icon Preview - - - -
        -

        - - - diff --git a/dac/ui-lib/icons/dremio/brand/arctic-jobs.svg b/dac/ui-lib/icons/dremio/brand/arctic-jobs.svg new file mode 100644 index 0000000000..4d885fcff4 --- /dev/null +++ b/dac/ui-lib/icons/dremio/brand/arctic-jobs.svg @@ -0,0 +1,4 @@ + + + + diff --git a/dac/ui-lib/icons/dremio/data-types/Date.svg b/dac/ui-lib/icons/dremio/data-types/Date.svg index 8c40e908f9..210e06eabd 100644 --- a/dac/ui-lib/icons/dremio/data-types/Date.svg +++ b/dac/ui-lib/icons/dremio/data-types/Date.svg @@ -1,24 +1,4 @@ - - - Created with Sketch. - - - - - - - - - - - - - - - - - - - - + + + diff --git a/dac/ui-lib/icons/dremio/data-types/Time.svg b/dac/ui-lib/icons/dremio/data-types/Time.svg index 5bc93afd7a..af10cdf7fc 100644 --- a/dac/ui-lib/icons/dremio/data-types/Time.svg +++ b/dac/ui-lib/icons/dremio/data-types/Time.svg @@ -1,18 +1,4 @@ - - - Created with Sketch. - - - - - - - - - - - - - - + + + diff --git a/dac/ui-lib/icons/dremio/data-types/TypeArrayAndList.svg b/dac/ui-lib/icons/dremio/data-types/TypeArrayAndList.svg new file mode 100644 index 0000000000..be7091337c --- /dev/null +++ b/dac/ui-lib/icons/dremio/data-types/TypeArrayAndList.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/dac/ui-lib/icons/dremio/data-types/TypeBinary.svg b/dac/ui-lib/icons/dremio/data-types/TypeBinary.svg index a2571da0b0..e5212f56da 100644 --- a/dac/ui-lib/icons/dremio/data-types/TypeBinary.svg +++ b/dac/ui-lib/icons/dremio/data-types/TypeBinary.svg @@ -1,15 +1,8 @@ - - - Created with Sketch. - - - - - - - - - - - + + + + + + + diff --git a/dac/ui-lib/icons/dremio/data-types/TypeBoolean.svg b/dac/ui-lib/icons/dremio/data-types/TypeBoolean.svg index 4f9fe025bf..f2b8825905 100644 --- a/dac/ui-lib/icons/dremio/data-types/TypeBoolean.svg +++ b/dac/ui-lib/icons/dremio/data-types/TypeBoolean.svg @@ -1,16 +1,4 @@ - - - Created with Sketch. - - - - - - - - - - - - + + + diff --git a/dac/ui-lib/icons/dremio/data-types/TypeDateTime.svg b/dac/ui-lib/icons/dremio/data-types/TypeDateTime.svg index 2ec516c4d9..2e22b5d141 100644 --- a/dac/ui-lib/icons/dremio/data-types/TypeDateTime.svg +++ b/dac/ui-lib/icons/dremio/data-types/TypeDateTime.svg @@ -1,28 +1,6 @@ - - - Created with Sketch. - - - - - - - - - - - - - - - - - - - - - - - - + + + + + diff --git a/dac/ui-lib/icons/dremio/data-types/TypeDecimal.svg b/dac/ui-lib/icons/dremio/data-types/TypeDecimal.svg index 1b9d972fc7..8463e375dd 100644 --- a/dac/ui-lib/icons/dremio/data-types/TypeDecimal.svg +++ b/dac/ui-lib/icons/dremio/data-types/TypeDecimal.svg @@ -1,22 +1,6 @@ - - - Created with Sketch. - - - - - - - - - - - - - - - - - - + + + + + diff --git a/dac/ui-lib/icons/dremio/data-types/TypeFloat.svg b/dac/ui-lib/icons/dremio/data-types/TypeFloat.svg index 4fc7d48201..f54ec241fa 100644 --- a/dac/ui-lib/icons/dremio/data-types/TypeFloat.svg +++ b/dac/ui-lib/icons/dremio/data-types/TypeFloat.svg @@ -1,21 +1,5 @@ - - - Created with Sketch. - - - - - - - - - - - - - - - - - + + + + diff --git a/dac/ui-lib/icons/dremio/data-types/TypeFormula.svg b/dac/ui-lib/icons/dremio/data-types/TypeFormula.svg new file mode 100644 index 0000000000..25a28100f2 --- /dev/null +++ b/dac/ui-lib/icons/dremio/data-types/TypeFormula.svg @@ -0,0 +1,4 @@ + + + + diff --git a/dac/ui-lib/icons/dremio/data-types/TypeGeo.svg b/dac/ui-lib/icons/dremio/data-types/TypeGeo.svg index 05875539c8..6367fc4279 100644 --- a/dac/ui-lib/icons/dremio/data-types/TypeGeo.svg +++ b/dac/ui-lib/icons/dremio/data-types/TypeGeo.svg @@ -1,22 +1,3 @@ - - - Created with Sketch. - - - - - - - - - - - - - - - - - - + + diff --git a/dac/ui-lib/icons/dremio/data-types/TypeInteger.svg b/dac/ui-lib/icons/dremio/data-types/TypeInteger.svg index 782aff04c3..02bd73ad3b 100644 --- a/dac/ui-lib/icons/dremio/data-types/TypeInteger.svg +++ b/dac/ui-lib/icons/dremio/data-types/TypeInteger.svg @@ -1,15 +1,3 @@ - - - Created with Sketch. - - - - - - - - - - - + + diff --git a/dac/ui-lib/icons/dremio/data-types/TypeList.svg b/dac/ui-lib/icons/dremio/data-types/TypeList.svg index 3335ee447b..b5a20b68d2 100644 --- a/dac/ui-lib/icons/dremio/data-types/TypeList.svg +++ b/dac/ui-lib/icons/dremio/data-types/TypeList.svg @@ -1,19 +1,4 @@ - - - Created with Sketch. - - - - - - - - - - - - - - - + + + diff --git a/dac/ui-lib/icons/dremio/data-types/TypeMap.svg b/dac/ui-lib/icons/dremio/data-types/TypeMap.svg index 88621c9a31..1ee3098267 100644 --- a/dac/ui-lib/icons/dremio/data-types/TypeMap.svg +++ b/dac/ui-lib/icons/dremio/data-types/TypeMap.svg @@ -1,4 +1,4 @@ - + diff --git a/dac/ui-lib/icons/dremio/data-types/TypeNull.svg b/dac/ui-lib/icons/dremio/data-types/TypeNull.svg new file mode 100644 index 0000000000..d48d7e47cf --- /dev/null +++ b/dac/ui-lib/icons/dremio/data-types/TypeNull.svg @@ -0,0 +1,3 @@ + + + diff --git a/dac/ui-lib/icons/dremio/data-types/TypeStruct.svg b/dac/ui-lib/icons/dremio/data-types/TypeStruct.svg index efcdc729cc..4c689f80d2 100644 --- a/dac/ui-lib/icons/dremio/data-types/TypeStruct.svg +++ b/dac/ui-lib/icons/dremio/data-types/TypeStruct.svg @@ -1,4 +1,4 @@ - + diff --git a/dac/ui-lib/icons/dremio/data-types/TypeTable.svg b/dac/ui-lib/icons/dremio/data-types/TypeTable.svg new file mode 100644 index 0000000000..9a6590d661 --- /dev/null +++ b/dac/ui-lib/icons/dremio/data-types/TypeTable.svg @@ -0,0 +1,3 @@ + + + diff --git a/dac/ui-lib/icons/dremio/data-types/TypeText.svg b/dac/ui-lib/icons/dremio/data-types/TypeText.svg index 3ef5cd7e9d..79950e9441 100644 --- a/dac/ui-lib/icons/dremio/data-types/TypeText.svg +++ b/dac/ui-lib/icons/dremio/data-types/TypeText.svg @@ -1,15 +1,6 @@ - - - Created with Sketch. - - - - - - - - - - - + + + + + diff --git a/dac/ui-lib/icons/dremio/entities/iceberg-table.svg b/dac/ui-lib/icons/dremio/entities/iceberg-table.svg new file mode 100644 index 0000000000..e321859a11 --- /dev/null +++ b/dac/ui-lib/icons/dremio/entities/iceberg-table.svg @@ -0,0 +1,59 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dac/ui-lib/icons/dremio/entities/iceberg-view.svg b/dac/ui-lib/icons/dremio/entities/iceberg-view.svg new file mode 100644 index 0000000000..c389ab6206 --- /dev/null +++ b/dac/ui-lib/icons/dremio/entities/iceberg-view.svg @@ -0,0 +1,59 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dac/ui-lib/icons/dremio/interface/angle-up-down.svg b/dac/ui-lib/icons/dremio/interface/angle-up-down.svg new file mode 100644 index 0000000000..71347bc18e --- /dev/null +++ b/dac/ui-lib/icons/dremio/interface/angle-up-down.svg @@ -0,0 +1 @@ + diff --git a/dac/ui-lib/icons/dremio/interface/data-optimization.svg b/dac/ui-lib/icons/dremio/interface/data-optimization.svg new file mode 100644 index 0000000000..6945a789ff --- /dev/null +++ b/dac/ui-lib/icons/dremio/interface/data-optimization.svg @@ -0,0 +1,4 @@ + + + + diff --git a/dac/ui-lib/icons/dremio/interface/history.svg b/dac/ui-lib/icons/dremio/interface/history.svg index b1ead3c4eb..145be80493 100644 --- a/dac/ui-lib/icons/dremio/interface/history.svg +++ b/dac/ui-lib/icons/dremio/interface/history.svg @@ -1,4 +1,3 @@ -` diff --git a/dac/ui-lib/icons/dremio/interface/meta.svg b/dac/ui-lib/icons/dremio/interface/meta.svg new file mode 100644 index 0000000000..3554a2d819 --- /dev/null +++ b/dac/ui-lib/icons/dremio/interface/meta.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/dac/ui-lib/icons/dremio/interface/reflections-outlined.svg b/dac/ui-lib/icons/dremio/interface/reflections-outlined.svg new file mode 100644 index 0000000000..a886c37477 --- /dev/null +++ b/dac/ui-lib/icons/dremio/interface/reflections-outlined.svg @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/dac/ui-lib/icons/dremio/scripts/CollapseLeft.svg b/dac/ui-lib/icons/dremio/scripts/CollapseLeft.svg index 008ecb3eed..c01bf27d8c 100644 --- a/dac/ui-lib/icons/dremio/scripts/CollapseLeft.svg +++ b/dac/ui-lib/icons/dremio/scripts/CollapseLeft.svg @@ -1,3 +1,3 @@ - + diff --git a/dac/ui-lib/icons/dremio/scripts/CollapseRight.svg b/dac/ui-lib/icons/dremio/scripts/CollapseRight.svg index 6d6716c787..75d4eec149 100644 --- a/dac/ui-lib/icons/dremio/scripts/CollapseRight.svg +++ b/dac/ui-lib/icons/dremio/scripts/CollapseRight.svg @@ -1,3 +1,3 @@ - + diff --git a/dac/ui-lib/icons/dremio/settings/preferences.svg b/dac/ui-lib/icons/dremio/settings/preferences.svg new file mode 100644 index 0000000000..6076b9da20 --- /dev/null +++ b/dac/ui-lib/icons/dremio/settings/preferences.svg @@ -0,0 +1,3 @@ + + + diff --git a/dac/ui-lib/icons/dremio/sources/ADL.svg b/dac/ui-lib/icons/dremio/sources/ADL.svg index d98361fe25..e4d621a37d 100644 --- a/dac/ui-lib/icons/dremio/sources/ADL.svg +++ b/dac/ui-lib/icons/dremio/sources/ADL.svg @@ -1,11 +1,8 @@ - - - - - - - - - - + + + + + + + diff --git a/dac/ui-lib/icons/dremio/sources/AMAZONELASTIC.svg b/dac/ui-lib/icons/dremio/sources/AMAZONELASTIC.svg index 46bb41c9d4..cfb730b8d2 100644 --- a/dac/ui-lib/icons/dremio/sources/AMAZONELASTIC.svg +++ b/dac/ui-lib/icons/dremio/sources/AMAZONELASTIC.svg @@ -1,21 +1,15 @@ - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + diff --git a/dac/ui-lib/icons/dremio/sources/AWSGLUE.svg b/dac/ui-lib/icons/dremio/sources/AWSGLUE.svg index 3d30c44ce1..6af14824e3 100644 --- a/dac/ui-lib/icons/dremio/sources/AWSGLUE.svg +++ b/dac/ui-lib/icons/dremio/sources/AWSGLUE.svg @@ -1,19 +1,10 @@ - - - - - - - - - - - - - - - - - + + + + + + + + + diff --git a/dac/ui-lib/icons/dremio/sources/AZURE_STORAGE.svg b/dac/ui-lib/icons/dremio/sources/AZURE_STORAGE.svg index 3b8cde9d5b..51b495a104 100644 --- a/dac/ui-lib/icons/dremio/sources/AZURE_STORAGE.svg +++ b/dac/ui-lib/icons/dremio/sources/AZURE_STORAGE.svg @@ -1,10 +1,3 @@ - - - - - - - - - + + diff --git a/dac/ui-lib/icons/dremio/sources/DREMIOTODREMIO.svg b/dac/ui-lib/icons/dremio/sources/DREMIOTODREMIO.svg index 7b27732f0a..d99d7d6f4c 100644 --- a/dac/ui-lib/icons/dremio/sources/DREMIOTODREMIO.svg +++ b/dac/ui-lib/icons/dremio/sources/DREMIOTODREMIO.svg @@ -1,18 +1,16 @@ - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + diff --git a/dac/ui-lib/icons/dremio/sources/ELASTIC.svg b/dac/ui-lib/icons/dremio/sources/ELASTIC.svg index d16a539b95..7b140cc093 100644 --- a/dac/ui-lib/icons/dremio/sources/ELASTIC.svg +++ b/dac/ui-lib/icons/dremio/sources/ELASTIC.svg @@ -1,20 +1,16 @@ - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + diff --git a/dac/ui-lib/icons/dremio/sources/GCS.svg b/dac/ui-lib/icons/dremio/sources/GCS.svg index cf92f2914b..81ed1b5bb9 100644 --- a/dac/ui-lib/icons/dremio/sources/GCS.svg +++ b/dac/ui-lib/icons/dremio/sources/GCS.svg @@ -1,14 +1,11 @@ - - - - - - - - - - - - - + + + + + + + + + + diff --git a/dac/ui-lib/icons/dremio/sources/HDFS.svg b/dac/ui-lib/icons/dremio/sources/HDFS.svg index c7de1dcab3..30df8afd99 100644 --- a/dac/ui-lib/icons/dremio/sources/HDFS.svg +++ b/dac/ui-lib/icons/dremio/sources/HDFS.svg @@ -1,68 +1,19 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + diff --git a/dac/ui-lib/icons/dremio/sources/HIVE.svg b/dac/ui-lib/icons/dremio/sources/HIVE.svg index 22ffaf0ce6..c2593f5bf8 100644 --- a/dac/ui-lib/icons/dremio/sources/HIVE.svg +++ b/dac/ui-lib/icons/dremio/sources/HIVE.svg @@ -1,61 +1,30 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dac/ui-lib/icons/dremio/sources/HIVE3.svg b/dac/ui-lib/icons/dremio/sources/HIVE3.svg index 33763b42c8..2f2b2e0c86 100644 --- a/dac/ui-lib/icons/dremio/sources/HIVE3.svg +++ b/dac/ui-lib/icons/dremio/sources/HIVE3.svg @@ -1,61 +1,30 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dac/ui-lib/icons/dremio/sources/MONGO.svg b/dac/ui-lib/icons/dremio/sources/MONGO.svg index f1e7408f8c..83712a47e2 100644 --- a/dac/ui-lib/icons/dremio/sources/MONGO.svg +++ b/dac/ui-lib/icons/dremio/sources/MONGO.svg @@ -1,18 +1,5 @@ - - - - - - - - - - - - - - - - - + + + + diff --git a/dac/ui-lib/icons/dremio/sources/MSSQL.svg b/dac/ui-lib/icons/dremio/sources/MSSQL.svg index 950263637f..1a8eb1da53 100644 --- a/dac/ui-lib/icons/dremio/sources/MSSQL.svg +++ b/dac/ui-lib/icons/dremio/sources/MSSQL.svg @@ -1,34 +1,19 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + diff --git a/dac/ui-lib/icons/dremio/sources/MYSQL.svg b/dac/ui-lib/icons/dremio/sources/MYSQL.svg index b6556115be..0ad93a05a3 100644 --- a/dac/ui-lib/icons/dremio/sources/MYSQL.svg +++ b/dac/ui-lib/icons/dremio/sources/MYSQL.svg @@ -1,37 +1,3 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + diff --git a/dac/ui-lib/icons/dremio/sources/NAS.svg b/dac/ui-lib/icons/dremio/sources/NAS.svg index 807543a6e5..290aa8a1d2 100644 --- a/dac/ui-lib/icons/dremio/sources/NAS.svg +++ b/dac/ui-lib/icons/dremio/sources/NAS.svg @@ -1,19 +1,4 @@ - - - - - - - - - - - - - - - - - - + + + diff --git a/dac/ui-lib/icons/dremio/sources/OPENSEARCH.svg b/dac/ui-lib/icons/dremio/sources/OPENSEARCH.svg new file mode 100644 index 0000000000..6a78d1a6c8 --- /dev/null +++ b/dac/ui-lib/icons/dremio/sources/OPENSEARCH.svg @@ -0,0 +1,12 @@ + + + + + + + + + + + + diff --git a/dac/ui-lib/icons/dremio/sources/ORACLE.svg b/dac/ui-lib/icons/dremio/sources/ORACLE.svg index 0dbfb884c3..47db3d75a4 100644 --- a/dac/ui-lib/icons/dremio/sources/ORACLE.svg +++ b/dac/ui-lib/icons/dremio/sources/ORACLE.svg @@ -1,25 +1,3 @@ - - - - - - - - - - - - - - - - - - - - - - - - + + diff --git a/dac/ui-lib/icons/dremio/sources/POSTGRES.svg b/dac/ui-lib/icons/dremio/sources/POSTGRES.svg index 31517e895a..457f76185d 100644 --- a/dac/ui-lib/icons/dremio/sources/POSTGRES.svg +++ b/dac/ui-lib/icons/dremio/sources/POSTGRES.svg @@ -1,30 +1,16 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + diff --git a/dac/ui-lib/icons/dremio/sources/REDSHIFT.svg b/dac/ui-lib/icons/dremio/sources/REDSHIFT.svg index 32bd1e40db..a535d8e131 100644 --- a/dac/ui-lib/icons/dremio/sources/REDSHIFT.svg +++ b/dac/ui-lib/icons/dremio/sources/REDSHIFT.svg @@ -1,23 +1,10 @@ - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + diff --git a/dac/ui-lib/icons/dremio/sources/S3.svg b/dac/ui-lib/icons/dremio/sources/S3.svg index 5703986800..edd8a409fb 100644 --- a/dac/ui-lib/icons/dremio/sources/S3.svg +++ b/dac/ui-lib/icons/dremio/sources/S3.svg @@ -1,26 +1,10 @@ - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + diff --git a/dac/ui-lib/icons/dremio/sources/SNOWFLAKE.svg b/dac/ui-lib/icons/dremio/sources/SNOWFLAKE.svg index a37d7a3ad7..f1ca271187 100644 --- a/dac/ui-lib/icons/dremio/sources/SNOWFLAKE.svg +++ b/dac/ui-lib/icons/dremio/sources/SNOWFLAKE.svg @@ -1,5 +1,3 @@ - - - - + + diff --git a/dac/ui-lib/icons/dremio/sources/SYNAPSE.svg b/dac/ui-lib/icons/dremio/sources/SYNAPSE.svg index 9e056d2975..25e2e572d1 100644 --- a/dac/ui-lib/icons/dremio/sources/SYNAPSE.svg +++ b/dac/ui-lib/icons/dremio/sources/SYNAPSE.svg @@ -1,196 +1,24 @@ - + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dac/ui-lib/icons/dremio/sources/SampleSource.svg b/dac/ui-lib/icons/dremio/sources/SampleSource.svg index 779442673c..bc37f70fa5 100644 --- a/dac/ui-lib/icons/dremio/sources/SampleSource.svg +++ b/dac/ui-lib/icons/dremio/sources/SampleSource.svg @@ -1,21 +1,16 @@ - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + diff --git a/dac/ui-lib/icons/dremio/sources/TERADATA.svg b/dac/ui-lib/icons/dremio/sources/TERADATA.svg index f566ca40ac..43344fc852 100644 --- a/dac/ui-lib/icons/dremio/sources/TERADATA.svg +++ b/dac/ui-lib/icons/dremio/sources/TERADATA.svg @@ -1,13 +1,18 @@ - - - - - - - - - - - - + + + + + + + + + + + + + + + + + diff --git a/dac/ui-lib/icons/dremio/sql-editor/panel-hide.svg b/dac/ui-lib/icons/dremio/sql-editor/panel-hide.svg new file mode 100644 index 0000000000..9c8ff96cad --- /dev/null +++ b/dac/ui-lib/icons/dremio/sql-editor/panel-hide.svg @@ -0,0 +1,3 @@ + + + diff --git a/dac/ui-lib/icons/dremio/sql-editor/panel-show.svg b/dac/ui-lib/icons/dremio/sql-editor/panel-show.svg new file mode 100644 index 0000000000..b270c6947b --- /dev/null +++ b/dac/ui-lib/icons/dremio/sql-editor/panel-show.svg @@ -0,0 +1,3 @@ + + + diff --git a/dac/ui-lib/icons/dremio/vcs/snapshot.svg b/dac/ui-lib/icons/dremio/vcs/snapshot.svg new file mode 100644 index 0000000000..014b0c1c64 --- /dev/null +++ b/dac/ui-lib/icons/dremio/vcs/snapshot.svg @@ -0,0 +1,4 @@ + + + + diff --git a/dac/ui-lib/icons/dremio/vcs/timestamp.svg b/dac/ui-lib/icons/dremio/vcs/timestamp.svg new file mode 100644 index 0000000000..af95f8cd34 --- /dev/null +++ b/dac/ui-lib/icons/dremio/vcs/timestamp.svg @@ -0,0 +1,3 @@ + + + diff --git a/dac/ui-lib/package.json b/dac/ui-lib/package.json index 823d112897..6fecb64179 100644 --- a/dac/ui-lib/package.json +++ b/dac/ui-lib/package.json @@ -5,6 +5,20 @@ "main": "dist/index.js", "module": "dist/index.modern.js", "source": "src/index.js", + "exports": { + ".": { + "require": "./dist/index.js", + "import": "./dist/index.modern.js" + }, + "./components": { + "import": "./dist-esm/index.js", + "require": "./dist-cjs/index.js" + }, + "./icons/*": "./icons/*", + "./images/*": "./images/*", + "./dist-themes/*": "./dist-themes/*", + "./dist/index.css": "./dist/index.css" + }, "scripts": { "build": "npm-run-all --print-label --parallel dist dist-cjs dist-esm dist-icons dist-themes", "dist": "microbundle-crl --css-modules false --format modern,cjs && tsc --project tsconfig.src.json --emitDeclarationOnly --declaration --declarationMap false --declarationDir dist", @@ -14,15 +28,13 @@ "dist-themes": "node scripts/buildThemes.mjs", "lint:css": "stylelint themes/**/*", "lint:js": "eslint .", - "manifest.json": "ts-node scripts/writeIconManifest.ts", - "test": "npm-run-all --print-label --parallel test:js lint:js lint:css", + "iconmanifest.json": "ts-node scripts/writeIconManifest.ts", + "test": "npm-run-all --print-label --parallel test:js lint:js", "test:js": "cross-env CI=1 node scripts/test.js --env=jsdom", "test:watch": "node scripts/test.js --env=jsdom", "deploy": "gh-pages -d example/build", "predeploy": "cd example && yarn install && yarn run build", - "start": "microbundle-crl watch --no-compress --css-modules false --format modern,cjs", - "storybook": "start-storybook -p 6006", - "build-storybook": "build-storybook" + "start": "microbundle-crl watch --no-compress --css-modules false --format modern,cjs" }, "peerDependencies": { "@mui/material": ">= 5.0.0", @@ -37,14 +49,6 @@ "@emotion/react": "^11.9.3", "@emotion/styled": "^11.9.3", "@mui/material": "^5.9.0", - "@storybook/addon-actions": "^6.5.12", - "@storybook/addon-essentials": "^6.5.12", - "@storybook/addon-interactions": "^6.5.12", - "@storybook/addon-links": "^6.5.12", - "@storybook/builder-webpack4": "^6.5.12", - "@storybook/manager-webpack4": "^6.5.12", - "@storybook/react": "^6.5.12", - "@storybook/testing-library": "0.0.13", "@swc/cli": "^0.1.57", "@swc/core": "^1.2.203", "@types/node": "^18.11.9", @@ -59,6 +63,7 @@ "babel-loader": "^8.2.5", "babel-preset-react-app": "^9.1.2", "cross-env": "^7.0.2", + "css-loader": "^4", "enzyme": "^3.11.0", "eslint": "^6.8.0", "eslint-config-prettier": "^6", @@ -83,8 +88,8 @@ "react-intl": "^2.3.0", "require-from-string": "^2.0.2", "sass": "^1.49.9", - "storybook-addon-sass-postcss": "^0.1.3", - "storybook-dark-mode": "^1.1.2", + "sass-loader": "^10", + "style-loader": "^2", "stylelint": "^14", "stylelint-config-prettier": "^9", "stylelint-config-standard": "^25", @@ -104,10 +109,15 @@ "images" ], "dependencies": { + "@floating-ui/react-dom": "^1.3.0", "@floating-ui/react-dom-interactions": "^0.9.3", "@types/react": "^18", "clsx": "^1.1.1", + "copy-to-clipboard": "^3.0.8", "dialog-polyfill": "^0.5.6", + "downshift": "^7.2.0", + "invariant": "^2.2.4", + "leantable": "^0.4.12", "lodash": "4.17.21", "react-copy-to-clipboard": "^5.0.3", "react-merge-refs": "1.1.0", @@ -129,7 +139,7 @@ ] } }, - "packageManager": "pnpm@7.2.1", + "packageManager": "pnpm@8.1.0", "jest": { "roots": [ "/src" diff --git a/dac/ui-lib/pnpm-lock.yaml b/dac/ui-lib/pnpm-lock.yaml index 06bf5e9ce5..0fa1ef09a2 100644 --- a/dac/ui-lib/pnpm-lock.yaml +++ b/dac/ui-lib/pnpm-lock.yaml @@ -1,156 +1,219 @@ -lockfileVersion: 5.4 - -specifiers: - '@babel/core': 7.9.0 - '@emotion/react': ^11.9.3 - '@emotion/styled': ^11.9.3 - '@floating-ui/react-dom-interactions': ^0.9.3 - '@mui/material': ^5.9.0 - '@storybook/addon-actions': ^6.5.12 - '@storybook/addon-essentials': ^6.5.12 - '@storybook/addon-interactions': ^6.5.12 - '@storybook/addon-links': ^6.5.12 - '@storybook/builder-webpack4': ^6.5.12 - '@storybook/manager-webpack4': ^6.5.12 - '@storybook/react': ^6.5.12 - '@storybook/testing-library': 0.0.13 - '@swc/cli': ^0.1.57 - '@swc/core': ^1.2.203 - '@types/node': ^18.11.9 - '@types/react': ^18 - '@types/react-dom': ^18 - '@types/react-intl': ^3.0.0 - '@types/react-syntax-highlighter': ^15.5.2 - '@types/react-transition-group': ^4.4.5 - '@typescript-eslint/eslint-plugin': ^4 - '@typescript-eslint/parser': ^4 - '@wojtekmaj/enzyme-adapter-react-17': ^0.6.7 - babel-jest: ^24.9.0 - babel-loader: ^8.2.5 - babel-preset-react-app: ^9.1.2 - clsx: ^1.1.1 - cross-env: ^7.0.2 - dialog-polyfill: ^0.5.6 - enzyme: ^3.11.0 - eslint: ^6.8.0 - eslint-config-prettier: ^6 - eslint-plugin-react: ^7 - eslint-plugin-react-hooks: ^4 - formik: ^2.2.6 - fs-extra: ^10.1.0 - gh-pages: ^2.2.0 - glob: ^7.2.0 - jest: 24.9.0 - jest-environment-jsdom-fourteen: 1.0.1 - jest-resolve: 24.9.0 - jest-watch-typeahead: 0.4.2 - lodash: 4.17.21 - microbundle-crl: ^0.13.10 - npm-run-all: ^4.1.5 - postcss: ^8 - prettier: ~2.5 - prop-types: 15.5.10 - react: ^18.2.0 - react-app-polyfill: ^3.0.0 - react-copy-to-clipboard: ^5.0.3 - react-dom: ^18.2.0 - react-intl: ^2.3.0 - react-merge-refs: 1.1.0 - react-syntax-highlighter: ^15.5.0 - react-transition-group: ^4.4.5 - require-from-string: ^2.0.2 - sass: ^1.49.9 - smart-icon: ^1.4.3 - storybook-addon-sass-postcss: ^0.1.3 - storybook-dark-mode: ^1.1.2 - stylelint: ^14 - stylelint-config-prettier: ^9 - stylelint-config-standard: ^25 - stylelint-config-standard-scss: ^3 - stylelint-use-logical-spec: ^3 - svg-sprite: ^2.0.0 - ts-node: ^10.9.1 - typescript: 4.7.4 - uuid: ^8.3.2 +lockfileVersion: '6.0' dependencies: - '@floating-ui/react-dom-interactions': 0.9.3_2zx2umvpluuhvlq44va5bta2da - '@types/react': 18.0.25 - clsx: 1.1.1 - dialog-polyfill: 0.5.6 - lodash: 4.17.21 - react-copy-to-clipboard: 5.0.3_react@18.2.0 - react-merge-refs: 1.1.0 - react-syntax-highlighter: 15.5.0_react@18.2.0 - react-transition-group: 4.4.5_biqbaboplfbrettd7655fr4n2y - smart-icon: 1.4.3 - uuid: 8.3.2 + '@floating-ui/react-dom': + specifier: ^1.3.0 + version: 1.3.0(react-dom@18.2.0)(react@18.2.0) + '@floating-ui/react-dom-interactions': + specifier: ^0.9.3 + version: 0.9.3(@types/react@18.0.25)(react-dom@18.2.0)(react@18.2.0) + '@types/react': + specifier: ^18 + version: 18.0.25 + clsx: + specifier: ^1.1.1 + version: 1.1.1 + copy-to-clipboard: + specifier: ^3.0.8 + version: 3.3.1 + dialog-polyfill: + specifier: ^0.5.6 + version: 0.5.6 + downshift: + specifier: ^7.2.0 + version: 7.2.0(react@18.2.0) + invariant: + specifier: ^2.2.4 + version: 2.2.4 + leantable: + specifier: ^0.4.12 + version: 0.4.12(react@18.2.0) + lodash: + specifier: 4.17.21 + version: 4.17.21 + react-copy-to-clipboard: + specifier: ^5.0.3 + version: 5.0.3(react@18.2.0) + react-merge-refs: + specifier: 1.1.0 + version: 1.1.0 + react-syntax-highlighter: + specifier: ^15.5.0 + version: 15.5.0(react@18.2.0) + react-transition-group: + specifier: ^4.4.5 + version: 4.4.5(react-dom@18.2.0)(react@18.2.0) + smart-icon: + specifier: ^1.4.3 + version: 1.4.3 + uuid: + specifier: ^8.3.2 + version: 8.3.2 devDependencies: - '@babel/core': 7.9.0 - '@emotion/react': 11.9.3_ydn2tsgf5ytsgv3ci7p3vfge5u - '@emotion/styled': 11.9.3_wmwfkcpmce6bptfofq7aisovym - '@mui/material': 5.9.0_52mfwclubjytm6epji3ivfuwoq - '@storybook/addon-actions': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/addon-essentials': 6.5.12_duglcd73h73mao2wnbgtxva32e - '@storybook/addon-interactions': 6.5.12_vzusiue5kdnywyapnxbtdipxcu - '@storybook/addon-links': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/builder-webpack4': 6.5.12_fur7q2nuabhivyjrvrzihm4lnq - '@storybook/manager-webpack4': 6.5.12_fur7q2nuabhivyjrvrzihm4lnq - '@storybook/react': 6.5.12_bwa4o22mlyfulxfxxeljxljceq - '@storybook/testing-library': 0.0.13_biqbaboplfbrettd7655fr4n2y - '@swc/cli': 0.1.57_@swc+core@1.2.203 - '@swc/core': 1.2.203 - '@types/node': 18.11.9 - '@types/react-dom': 18.0.6 - '@types/react-intl': 3.0.0_5z6wstateidnxll3plz2xpyagu - '@types/react-syntax-highlighter': 15.5.2 - '@types/react-transition-group': 4.4.5 - '@typescript-eslint/eslint-plugin': 4.33.0_jzkccvb4lenajp6msx3prptnxi - '@typescript-eslint/parser': 4.33.0_nbcwli3tdjzkqybobvobagytby - '@wojtekmaj/enzyme-adapter-react-17': 0.6.7_todk22eekuihjg65rlnudp4qdi - babel-jest: 24.9.0_@babel+core@7.9.0 - babel-loader: 8.2.5_@babel+core@7.9.0 - babel-preset-react-app: 9.1.2 - cross-env: 7.0.2 - enzyme: 3.11.0 - eslint: 6.8.0 - eslint-config-prettier: 6.15.0_eslint@6.8.0 - eslint-plugin-react: 7.29.4_eslint@6.8.0 - eslint-plugin-react-hooks: 4.3.0_eslint@6.8.0 - formik: 2.2.6_react@18.2.0 - fs-extra: 10.1.0 - gh-pages: 2.2.0 - glob: 7.2.0 - jest: 24.9.0 - jest-environment-jsdom-fourteen: 1.0.1 - jest-resolve: 24.9.0 - jest-watch-typeahead: 0.4.2 - microbundle-crl: 0.13.11 - npm-run-all: 4.1.5 - postcss: 8.4.8 - prettier: 2.5.1 - prop-types: 15.5.10 - react: 18.2.0 - react-app-polyfill: 3.0.0 - react-dom: 18.2.0_react@18.2.0 - react-intl: 2.9.0_5z6wstateidnxll3plz2xpyagu - require-from-string: 2.0.2 - sass: 1.49.9 - storybook-addon-sass-postcss: 0.1.3 - storybook-dark-mode: 1.1.2_biqbaboplfbrettd7655fr4n2y - stylelint: 14.5.3 - stylelint-config-prettier: 9.0.3_stylelint@14.5.3 - stylelint-config-standard: 25.0.0_stylelint@14.5.3 - stylelint-config-standard-scss: 3.0.0_2vmivtitabo2vy5qmz3lkxyvvi - stylelint-use-logical-spec: 3.2.2_stylelint@14.5.3 - svg-sprite: 2.0.0 - ts-node: 10.9.1_f2hgdakwhr3ybdo3xzav4gmxom - typescript: 4.7.4 + '@babel/core': + specifier: 7.9.0 + version: 7.9.0 + '@emotion/react': + specifier: ^11.9.3 + version: 11.9.3(@babel/core@7.9.0)(@types/react@18.0.25)(react@18.2.0) + '@emotion/styled': + specifier: ^11.9.3 + version: 11.9.3(@babel/core@7.9.0)(@emotion/react@11.9.3)(@types/react@18.0.25)(react@18.2.0) + '@mui/material': + specifier: ^5.9.0 + version: 5.9.0(@emotion/react@11.9.3)(@emotion/styled@11.9.3)(@types/react@18.0.25)(react-dom@18.2.0)(react@18.2.0) + '@swc/cli': + specifier: ^0.1.57 + version: 0.1.57(@swc/core@1.2.203) + '@swc/core': + specifier: ^1.2.203 + version: 1.2.203 + '@types/node': + specifier: ^18.11.9 + version: 18.11.9 + '@types/react-dom': + specifier: ^18 + version: 18.0.6 + '@types/react-intl': + specifier: ^3.0.0 + version: 3.0.0(prop-types@15.5.10)(react@18.2.0) + '@types/react-syntax-highlighter': + specifier: ^15.5.2 + version: 15.5.2 + '@types/react-transition-group': + specifier: ^4.4.5 + version: 4.4.5 + '@typescript-eslint/eslint-plugin': + specifier: ^4 + version: 4.33.0(@typescript-eslint/parser@4.33.0)(eslint@6.8.0)(typescript@4.7.4) + '@typescript-eslint/parser': + specifier: ^4 + version: 4.33.0(eslint@6.8.0)(typescript@4.7.4) + '@wojtekmaj/enzyme-adapter-react-17': + specifier: ^0.6.7 + version: 0.6.7(enzyme@3.11.0)(react-dom@18.2.0)(react@18.2.0) + babel-jest: + specifier: ^24.9.0 + version: 24.9.0(@babel/core@7.9.0) + babel-loader: + specifier: ^8.2.5 + version: 8.2.5(@babel/core@7.9.0) + babel-preset-react-app: + specifier: ^9.1.2 + version: 9.1.2 + cross-env: + specifier: ^7.0.2 + version: 7.0.2 + css-loader: + specifier: ^4 + version: 4.3.0 + enzyme: + specifier: ^3.11.0 + version: 3.11.0 + eslint: + specifier: ^6.8.0 + version: 6.8.0 + eslint-config-prettier: + specifier: ^6 + version: 6.15.0(eslint@6.8.0) + eslint-plugin-react: + specifier: ^7 + version: 7.29.4(eslint@6.8.0) + eslint-plugin-react-hooks: + specifier: ^4 + version: 4.3.0(eslint@6.8.0) + formik: + specifier: ^2.2.6 + version: 2.2.6(react@18.2.0) + fs-extra: + specifier: ^10.1.0 + version: 10.1.0 + gh-pages: + specifier: ^2.2.0 + version: 2.2.0 + glob: + specifier: ^7.2.0 + version: 7.2.0 + jest: + specifier: 24.9.0 + version: 24.9.0 + jest-environment-jsdom-fourteen: + specifier: 1.0.1 + version: 1.0.1 + jest-resolve: + specifier: 24.9.0 + version: 24.9.0 + jest-watch-typeahead: + specifier: 0.4.2 + version: 0.4.2 + microbundle-crl: + specifier: ^0.13.10 + version: 0.13.11 + npm-run-all: + specifier: ^4.1.5 + version: 4.1.5 + postcss: + specifier: ^8 + version: 8.4.8 + prettier: + specifier: ~2.5 + version: 2.5.1 + prop-types: + specifier: 15.5.10 + version: 15.5.10 + react: + specifier: ^18.2.0 + version: 18.2.0 + react-app-polyfill: + specifier: ^3.0.0 + version: 3.0.0 + react-dom: + specifier: ^18.2.0 + version: 18.2.0(react@18.2.0) + react-intl: + specifier: ^2.3.0 + version: 2.9.0(prop-types@15.5.10)(react@18.2.0) + require-from-string: + specifier: ^2.0.2 + version: 2.0.2 + sass: + specifier: ^1.49.9 + version: 1.49.9 + sass-loader: + specifier: ^10 + version: 10.3.1(sass@1.49.9) + style-loader: + specifier: ^2 + version: 2.0.0 + stylelint: + specifier: ^14 + version: 14.5.3 + stylelint-config-prettier: + specifier: ^9 + version: 9.0.3(stylelint@14.5.3) + stylelint-config-standard: + specifier: ^25 + version: 25.0.0(stylelint@14.5.3) + stylelint-config-standard-scss: + specifier: ^3 + version: 3.0.0(postcss@8.4.8)(stylelint@14.5.3) + stylelint-use-logical-spec: + specifier: ^3 + version: 3.2.2(stylelint@14.5.3) + svg-sprite: + specifier: ^2.0.0 + version: 2.0.0 + ts-node: + specifier: ^10.9.1 + version: 10.9.1(@swc/core@1.2.203)(@types/node@18.11.9)(typescript@4.7.4) + typescript: + specifier: 4.7.4 + version: 4.7.4 packages: - /@ampproject/remapping/2.2.0: + /@ampproject/remapping@2.2.0: resolution: {integrity: sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w==} engines: {node: '>=6.0.0'} dependencies: @@ -158,36 +221,36 @@ packages: '@jridgewell/trace-mapping': 0.3.15 dev: true - /@babel/code-frame/7.10.4: + /@babel/code-frame@7.10.4: resolution: {integrity: sha512-vG6SvB6oYEhvgisZNFRmRCUkLz11c7rp+tbNTynGqc6mS1d5ATd/sGyV6W0KZZnXRKMTzZDRgQT3Ou9jhpAfUg==} dependencies: '@babel/highlight': 7.10.4 dev: true - /@babel/code-frame/7.16.7: + /@babel/code-frame@7.16.7: resolution: {integrity: sha512-iAXqUn8IIeBTNd72xsFlgaXHkMBMt6y4HJp1tIaK465CWLT/fG1aqB7ykr95gHHmlBdGbFeWWfyB4NJJ0nmeIg==} engines: {node: '>=6.9.0'} dependencies: '@babel/highlight': 7.17.12 dev: true - /@babel/code-frame/7.18.6: + /@babel/code-frame@7.18.6: resolution: {integrity: sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==} engines: {node: '>=6.9.0'} dependencies: '@babel/highlight': 7.18.6 dev: true - /@babel/compat-data/7.12.7: + /@babel/compat-data@7.12.7: resolution: {integrity: sha512-YaxPMGs/XIWtYqrdEOZOCPsVWfEoriXopnsz3/i7apYPXQ3698UFhS6dVT1KN5qOsWmVgw/FOrmQgpRaZayGsw==} dev: true - /@babel/compat-data/7.19.1: + /@babel/compat-data@7.19.1: resolution: {integrity: sha512-72a9ghR0gnESIa7jBN53U32FOVCEoztyIlKaNoU05zRhEecduGK9L9c3ww7Mp06JiR+0ls0GBPFJQwwtjn9ksg==} engines: {node: '>=6.9.0'} dev: true - /@babel/core/7.12.9: + /@babel/core@7.12.9: resolution: {integrity: sha512-gTXYh3M5wb7FRXQy+FErKFAv90BnlOuNn1QkCK2lREoPAjrQCO49+HVSrFoe5uakFAF5eenS75KbO2vQiLrTMQ==} engines: {node: '>=6.9.0'} dependencies: @@ -202,7 +265,7 @@ packages: convert-source-map: 1.7.0 debug: 4.3.3 gensync: 1.0.0-beta.2 - json5: 2.1.3 + json5: 2.2.1 lodash: 4.17.21 resolve: 1.19.0 semver: 5.7.1 @@ -211,14 +274,14 @@ packages: - supports-color dev: true - /@babel/core/7.19.1: + /@babel/core@7.19.1: resolution: {integrity: sha512-1H8VgqXme4UXCRv7/Wa1bq7RVymKOzC7znjyFM8KiEzwFqcKUKYNoQef4GhdklgNvoBXyW4gYhuBNCM5o1zImw==} engines: {node: '>=6.9.0'} dependencies: '@ampproject/remapping': 2.2.0 '@babel/code-frame': 7.18.6 '@babel/generator': 7.19.0 - '@babel/helper-compilation-targets': 7.19.1_@babel+core@7.19.1 + '@babel/helper-compilation-targets': 7.19.1(@babel/core@7.19.1) '@babel/helper-module-transforms': 7.19.0 '@babel/helpers': 7.19.0 '@babel/parser': 7.19.1 @@ -234,7 +297,7 @@ packages: - supports-color dev: true - /@babel/core/7.9.0: + /@babel/core@7.9.0: resolution: {integrity: sha512-kWc7L0fw1xwvI0zi8OKVBuxRVefwGOrKSQMvrQ3dW+bIIavBY3/NpXmpjMy7bQnLgwgzWQZ8TlM57YHpHNHz4w==} engines: {node: '>=6.9.0'} dependencies: @@ -258,7 +321,7 @@ packages: - supports-color dev: true - /@babel/generator/7.12.5: + /@babel/generator@7.12.5: resolution: {integrity: sha512-m16TQQJ8hPt7E+OS/XVQg/7U184MLXtvuGbCdA7na61vha+ImkyyNM/9DDA0unYCVZn3ZOhng+qz48/KBOT96A==} dependencies: '@babel/types': 7.12.7 @@ -266,7 +329,7 @@ packages: source-map: 0.5.7 dev: true - /@babel/generator/7.19.0: + /@babel/generator@7.19.0: resolution: {integrity: sha512-S1ahxf1gZ2dpoiFgA+ohK9DIpz50bJ0CWs7Zlzb54Z4sG8qmdIrGrVqmy1sAtTVRb+9CU6U8VqT9L0Zj7hxHVg==} engines: {node: '>=6.9.0'} dependencies: @@ -275,35 +338,27 @@ packages: jsesc: 2.5.2 dev: true - /@babel/helper-annotate-as-pure/7.10.4: + /@babel/helper-annotate-as-pure@7.10.4: resolution: {integrity: sha512-XQlqKQP4vXFB7BN8fEEerrmYvHp3fK/rBkRFz9jaJbzK0B1DSfej9Kc7ZzE8Z/OnId1jpJdNAZ3BFQjWG68rcA==} dependencies: '@babel/types': 7.19.0 dev: true - /@babel/helper-annotate-as-pure/7.18.6: + /@babel/helper-annotate-as-pure@7.18.6: resolution: {integrity: sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA==} engines: {node: '>=6.9.0'} dependencies: '@babel/types': 7.19.0 dev: true - /@babel/helper-builder-binary-assignment-operator-visitor/7.10.4: + /@babel/helper-builder-binary-assignment-operator-visitor@7.10.4: resolution: {integrity: sha512-L0zGlFrGWZK4PbT8AszSfLTM5sDU1+Az/En9VrdT8/LmEiJt4zXt+Jve9DCAnQcbqDhCI+29y/L93mrDzddCcg==} dependencies: '@babel/helper-explode-assignable-expression': 7.12.1 '@babel/types': 7.19.0 dev: true - /@babel/helper-builder-binary-assignment-operator-visitor/7.18.9: - resolution: {integrity: sha512-yFQ0YCHoIqarl8BCRwBL8ulYUaZpz3bNsA7oFepAzee+8/+ImtADXNOmO5vJvsPff3qi+hvpkY/NYBTrBQgdNw==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/helper-explode-assignable-expression': 7.18.6 - '@babel/types': 7.19.0 - dev: true - - /@babel/helper-builder-react-jsx-experimental/7.12.4: + /@babel/helper-builder-react-jsx-experimental@7.12.4: resolution: {integrity: sha512-AjEa0jrQqNk7eDQOo0pTfUOwQBMF+xVqrausQwT9/rTKy0g04ggFNaJpaE09IQMn9yExluigWMJcj0WC7bq+Og==} dependencies: '@babel/helper-annotate-as-pure': 7.10.4 @@ -311,14 +366,14 @@ packages: '@babel/types': 7.19.0 dev: true - /@babel/helper-builder-react-jsx/7.10.4: + /@babel/helper-builder-react-jsx@7.10.4: resolution: {integrity: sha512-5nPcIZ7+KKDxT1427oBivl9V9YTal7qk0diccnh7RrcgrT/pGFOjgGw1dgryyx1GvHEpXVfoDF6Ak3rTiWh8Rg==} dependencies: '@babel/helper-annotate-as-pure': 7.10.4 '@babel/types': 7.19.0 dev: true - /@babel/helper-compilation-targets/7.12.5_@babel+core@7.12.9: + /@babel/helper-compilation-targets@7.12.5(@babel/core@7.12.9): resolution: {integrity: sha512-+qH6NrscMolUlzOYngSBMIOQpKUGPPsc61Bu5W10mg84LxZ7cmvnBHzARKbDoFxVvqqAbj6Tg6N7bSrWSPXMyw==} peerDependencies: '@babel/core': ^7.0.0 || 7 @@ -330,7 +385,7 @@ packages: semver: 5.7.1 dev: true - /@babel/helper-compilation-targets/7.12.5_@babel+core@7.9.0: + /@babel/helper-compilation-targets@7.12.5(@babel/core@7.9.0): resolution: {integrity: sha512-+qH6NrscMolUlzOYngSBMIOQpKUGPPsc61Bu5W10mg84LxZ7cmvnBHzARKbDoFxVvqqAbj6Tg6N7bSrWSPXMyw==} peerDependencies: '@babel/core': ^7.0.0 || 7 @@ -342,7 +397,7 @@ packages: semver: 5.7.1 dev: true - /@babel/helper-compilation-targets/7.19.1_@babel+core@7.19.1: + /@babel/helper-compilation-targets@7.19.1(@babel/core@7.19.1): resolution: {integrity: sha512-LlLkkqhCMyz2lkQPvJNdIYU7O5YjWRgC2R4omjCTpZd8u8KMQzZvX4qce+/BluN1rcQiV7BoGUpmQ0LeHerbhg==} engines: {node: '>=6.9.0'} peerDependencies: @@ -355,20 +410,7 @@ packages: semver: 6.3.0 dev: true - /@babel/helper-compilation-targets/7.19.1_@babel+core@7.9.0: - resolution: {integrity: sha512-LlLkkqhCMyz2lkQPvJNdIYU7O5YjWRgC2R4omjCTpZd8u8KMQzZvX4qce+/BluN1rcQiV7BoGUpmQ0LeHerbhg==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0 || 7 - dependencies: - '@babel/compat-data': 7.19.1 - '@babel/core': 7.9.0 - '@babel/helper-validator-option': 7.18.6 - browserslist: 4.21.4 - semver: 6.3.0 - dev: true - - /@babel/helper-create-class-features-plugin/7.12.1_@babel+core@7.12.9: + /@babel/helper-create-class-features-plugin@7.12.1(@babel/core@7.12.9): resolution: {integrity: sha512-hkL++rWeta/OVOBTRJc9a5Azh5mt5WgZUGAKMD8JM141YsE08K//bp1unBBieO6rUKkIPyUE0USQ30jAy3Sk1w==} peerDependencies: '@babel/core': ^7.0.0 || 7 @@ -383,7 +425,7 @@ packages: - supports-color dev: true - /@babel/helper-create-class-features-plugin/7.12.1_@babel+core@7.9.0: + /@babel/helper-create-class-features-plugin@7.12.1(@babel/core@7.9.0): resolution: {integrity: sha512-hkL++rWeta/OVOBTRJc9a5Azh5mt5WgZUGAKMD8JM141YsE08K//bp1unBBieO6rUKkIPyUE0USQ30jAy3Sk1w==} peerDependencies: '@babel/core': ^7.0.0 || 7 @@ -398,7 +440,7 @@ packages: - supports-color dev: true - /@babel/helper-create-class-features-plugin/7.18.0_@babel+core@7.12.9: + /@babel/helper-create-class-features-plugin@7.18.0(@babel/core@7.12.9): resolution: {integrity: sha512-Kh8zTGR9de3J63e5nS0rQUdRs/kbtwoeQQ0sriS0lItjC96u8XXZN6lKpuyWd2coKSU13py/y+LTmThLuVX0Pg==} engines: {node: '>=6.9.0'} peerDependencies: @@ -416,25 +458,7 @@ packages: - supports-color dev: true - /@babel/helper-create-class-features-plugin/7.18.0_@babel+core@7.19.1: - resolution: {integrity: sha512-Kh8zTGR9de3J63e5nS0rQUdRs/kbtwoeQQ0sriS0lItjC96u8XXZN6lKpuyWd2coKSU13py/y+LTmThLuVX0Pg==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-annotate-as-pure': 7.18.6 - '@babel/helper-environment-visitor': 7.18.9 - '@babel/helper-function-name': 7.19.0 - '@babel/helper-member-expression-to-functions': 7.17.7 - '@babel/helper-optimise-call-expression': 7.16.7 - '@babel/helper-replace-supers': 7.19.1 - '@babel/helper-split-export-declaration': 7.18.6 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/helper-create-class-features-plugin/7.18.0_@babel+core@7.9.0: + /@babel/helper-create-class-features-plugin@7.18.0(@babel/core@7.9.0): resolution: {integrity: sha512-Kh8zTGR9de3J63e5nS0rQUdRs/kbtwoeQQ0sriS0lItjC96u8XXZN6lKpuyWd2coKSU13py/y+LTmThLuVX0Pg==} engines: {node: '>=6.9.0'} peerDependencies: @@ -452,7 +476,7 @@ packages: - supports-color dev: true - /@babel/helper-create-class-features-plugin/7.19.0_@babel+core@7.12.9: + /@babel/helper-create-class-features-plugin@7.19.0(@babel/core@7.12.9): resolution: {integrity: sha512-NRz8DwF4jT3UfrmUoZjd0Uph9HQnP30t7Ash+weACcyNkiYTywpIjDBgReJMKgr+n86sn2nPVVmJ28Dm053Kqw==} engines: {node: '>=6.9.0'} peerDependencies: @@ -470,43 +494,7 @@ packages: - supports-color dev: true - /@babel/helper-create-class-features-plugin/7.19.0_@babel+core@7.19.1: - resolution: {integrity: sha512-NRz8DwF4jT3UfrmUoZjd0Uph9HQnP30t7Ash+weACcyNkiYTywpIjDBgReJMKgr+n86sn2nPVVmJ28Dm053Kqw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-annotate-as-pure': 7.18.6 - '@babel/helper-environment-visitor': 7.18.9 - '@babel/helper-function-name': 7.19.0 - '@babel/helper-member-expression-to-functions': 7.18.9 - '@babel/helper-optimise-call-expression': 7.18.6 - '@babel/helper-replace-supers': 7.19.1 - '@babel/helper-split-export-declaration': 7.18.6 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/helper-create-class-features-plugin/7.19.0_@babel+core@7.9.0: - resolution: {integrity: sha512-NRz8DwF4jT3UfrmUoZjd0Uph9HQnP30t7Ash+weACcyNkiYTywpIjDBgReJMKgr+n86sn2nPVVmJ28Dm053Kqw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-annotate-as-pure': 7.18.6 - '@babel/helper-environment-visitor': 7.18.9 - '@babel/helper-function-name': 7.19.0 - '@babel/helper-member-expression-to-functions': 7.18.9 - '@babel/helper-optimise-call-expression': 7.18.6 - '@babel/helper-replace-supers': 7.19.1 - '@babel/helper-split-export-declaration': 7.18.6 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/helper-create-regexp-features-plugin/7.12.7_@babel+core@7.12.9: + /@babel/helper-create-regexp-features-plugin@7.12.7(@babel/core@7.12.9): resolution: {integrity: sha512-idnutvQPdpbduutvi3JVfEgcVIHooQnhvhx0Nk9isOINOIGYkZea1Pk2JlJRiUnMefrlvr0vkByATBY/mB4vjQ==} peerDependencies: '@babel/core': ^7.0.0 || 7 @@ -516,7 +504,7 @@ packages: regexpu-core: 4.7.1 dev: true - /@babel/helper-create-regexp-features-plugin/7.12.7_@babel+core@7.9.0: + /@babel/helper-create-regexp-features-plugin@7.12.7(@babel/core@7.9.0): resolution: {integrity: sha512-idnutvQPdpbduutvi3JVfEgcVIHooQnhvhx0Nk9isOINOIGYkZea1Pk2JlJRiUnMefrlvr0vkByATBY/mB4vjQ==} peerDependencies: '@babel/core': ^7.0.0 || 7 @@ -526,29 +514,7 @@ packages: regexpu-core: 4.7.1 dev: true - /@babel/helper-create-regexp-features-plugin/7.19.0_@babel+core@7.19.1: - resolution: {integrity: sha512-htnV+mHX32DF81amCDrwIDr8nrp1PTm+3wfBN9/v8QJOLEioOCOG7qNyq0nHeFiWbT3Eb7gsPwEmV64UCQ1jzw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-annotate-as-pure': 7.18.6 - regexpu-core: 5.2.1 - dev: true - - /@babel/helper-create-regexp-features-plugin/7.19.0_@babel+core@7.9.0: - resolution: {integrity: sha512-htnV+mHX32DF81amCDrwIDr8nrp1PTm+3wfBN9/v8QJOLEioOCOG7qNyq0nHeFiWbT3Eb7gsPwEmV64UCQ1jzw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-annotate-as-pure': 7.18.6 - regexpu-core: 5.2.1 - dev: true - - /@babel/helper-define-map/7.10.5: + /@babel/helper-define-map@7.10.5: resolution: {integrity: sha512-fMw4kgFB720aQFXSVaXr79pjjcW5puTCM16+rECJ/plGS+zByelE8l9nCpV1GibxTnFVmUuYG9U8wYfQHdzOEQ==} dependencies: '@babel/helper-function-name': 7.19.0 @@ -556,80 +522,23 @@ packages: lodash: 4.17.21 dev: true - /@babel/helper-define-polyfill-provider/0.1.5_@babel+core@7.19.1: - resolution: {integrity: sha512-nXuzCSwlJ/WKr8qxzW816gwyT6VZgiJG17zR40fou70yfAcqjoNyTLl/DQ+FExw5Hx5KNqshmN8Ldl/r2N7cTg==} - peerDependencies: - '@babel/core': ^7.4.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-compilation-targets': 7.19.1_@babel+core@7.19.1 - '@babel/helper-module-imports': 7.18.6 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/traverse': 7.19.1 - debug: 4.3.3 - lodash.debounce: 4.0.8 - resolve: 1.19.0 - semver: 6.3.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/helper-define-polyfill-provider/0.3.3_@babel+core@7.19.1: - resolution: {integrity: sha512-z5aQKU4IzbqCC1XH0nAqfsFLMVSo22SBKUc0BxGrLkolTdPTructy0ToNnlO2zA4j9Q/7pjMZf0DSY+DSTYzww==} - peerDependencies: - '@babel/core': ^7.4.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-compilation-targets': 7.19.1_@babel+core@7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - debug: 4.3.3 - lodash.debounce: 4.0.8 - resolve: 1.19.0 - semver: 6.3.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/helper-define-polyfill-provider/0.3.3_@babel+core@7.9.0: - resolution: {integrity: sha512-z5aQKU4IzbqCC1XH0nAqfsFLMVSo22SBKUc0BxGrLkolTdPTructy0ToNnlO2zA4j9Q/7pjMZf0DSY+DSTYzww==} - peerDependencies: - '@babel/core': ^7.4.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-compilation-targets': 7.19.1_@babel+core@7.9.0 - '@babel/helper-plugin-utils': 7.19.0 - debug: 4.3.3 - lodash.debounce: 4.0.8 - resolve: 1.19.0 - semver: 6.3.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/helper-environment-visitor/7.18.2: + /@babel/helper-environment-visitor@7.18.2: resolution: {integrity: sha512-14GQKWkX9oJzPiQQ7/J36FTXcD4kSp8egKjO9nINlSKiHITRA9q/R74qu8S9xlc/b/yjsJItQUeeh3xnGN0voQ==} engines: {node: '>=6.9.0'} dev: true - /@babel/helper-environment-visitor/7.18.9: + /@babel/helper-environment-visitor@7.18.9: resolution: {integrity: sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==} engines: {node: '>=6.9.0'} dev: true - /@babel/helper-explode-assignable-expression/7.12.1: + /@babel/helper-explode-assignable-expression@7.12.1: resolution: {integrity: sha512-dmUwH8XmlrUpVqgtZ737tK88v07l840z9j3OEhCLwKTkjlvKpfqXVIZ0wpK3aeOxspwGrf/5AP5qLx4rO3w5rA==} dependencies: '@babel/types': 7.19.0 dev: true - /@babel/helper-explode-assignable-expression/7.18.6: - resolution: {integrity: sha512-eyAYAsQmB80jNfg4baAtLeWAQHfHFiR483rzFK+BhETlGZaQC9bsfrugfXDCbRHLQbIA7U5NxhhOxN7p/dWIcg==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/types': 7.19.0 - dev: true - - /@babel/helper-function-name/7.10.4: + /@babel/helper-function-name@7.10.4: resolution: {integrity: sha512-YdaSyz1n8gY44EmN7x44zBn9zQ1Ry2Y+3GTA+3vH6Mizke1Vw0aWDM66FOYEPw8//qKkmqOckrGgTYa+6sceqQ==} dependencies: '@babel/helper-get-function-arity': 7.10.4 @@ -637,7 +546,7 @@ packages: '@babel/types': 7.18.7 dev: true - /@babel/helper-function-name/7.17.9: + /@babel/helper-function-name@7.17.9: resolution: {integrity: sha512-7cRisGlVtiVqZ0MW0/yFB4atgpGLWEHUVYnb448hZK4x+vih0YO5UoS11XIYtZYqHd0dIPMdUSv8q5K4LdMnIg==} engines: {node: '>=6.9.0'} dependencies: @@ -645,7 +554,7 @@ packages: '@babel/types': 7.19.0 dev: true - /@babel/helper-function-name/7.19.0: + /@babel/helper-function-name@7.19.0: resolution: {integrity: sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w==} engines: {node: '>=6.9.0'} dependencies: @@ -653,60 +562,60 @@ packages: '@babel/types': 7.19.0 dev: true - /@babel/helper-get-function-arity/7.10.4: + /@babel/helper-get-function-arity@7.10.4: resolution: {integrity: sha512-EkN3YDB+SRDgiIUnNgcmiD361ti+AVbL3f3Henf6dqqUyr5dMsorno0lJWJuLhDhkI5sYEpgj6y9kB8AOU1I2A==} dependencies: '@babel/types': 7.19.0 dev: true - /@babel/helper-hoist-variables/7.16.7: + /@babel/helper-hoist-variables@7.16.7: resolution: {integrity: sha512-m04d/0Op34H5v7pbZw6pSKP7weA6lsMvfiIAMeIvkY/R4xQtBSMFEigu9QTZ2qB/9l22vsxtM8a+Q8CzD255fg==} engines: {node: '>=6.9.0'} dependencies: '@babel/types': 7.19.0 dev: true - /@babel/helper-hoist-variables/7.18.6: + /@babel/helper-hoist-variables@7.18.6: resolution: {integrity: sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==} engines: {node: '>=6.9.0'} dependencies: '@babel/types': 7.19.0 dev: true - /@babel/helper-member-expression-to-functions/7.12.7: + /@babel/helper-member-expression-to-functions@7.12.7: resolution: {integrity: sha512-DCsuPyeWxeHgh1Dus7APn7iza42i/qXqiFPWyBDdOFtvS581JQePsc1F/nD+fHrcswhLlRc2UpYS1NwERxZhHw==} dependencies: '@babel/types': 7.19.0 dev: true - /@babel/helper-member-expression-to-functions/7.17.7: + /@babel/helper-member-expression-to-functions@7.17.7: resolution: {integrity: sha512-thxXgnQ8qQ11W2wVUObIqDL4p148VMxkt5T/qpN5k2fboRyzFGFmKsTGViquyM5QHKUy48OZoca8kw4ajaDPyw==} engines: {node: '>=6.9.0'} dependencies: '@babel/types': 7.19.0 dev: true - /@babel/helper-member-expression-to-functions/7.18.9: + /@babel/helper-member-expression-to-functions@7.18.9: resolution: {integrity: sha512-RxifAh2ZoVU67PyKIO4AMi1wTenGfMR/O/ae0CCRqwgBAt5v7xjdtRw7UoSbsreKrQn5t7r89eruK/9JjYHuDg==} engines: {node: '>=6.9.0'} dependencies: '@babel/types': 7.19.0 dev: true - /@babel/helper-module-imports/7.12.5: + /@babel/helper-module-imports@7.12.5: resolution: {integrity: sha512-SR713Ogqg6++uexFRORf/+nPXMmWIn80TALu0uaFb+iQIUoR7bOC7zBWyzBs5b3tBBJXuyD0cRu1F15GyzjOWA==} dependencies: '@babel/types': 7.18.7 dev: true - /@babel/helper-module-imports/7.18.6: + /@babel/helper-module-imports@7.18.6: resolution: {integrity: sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA==} engines: {node: '>=6.9.0'} dependencies: '@babel/types': 7.18.7 dev: true - /@babel/helper-module-transforms/7.12.1: + /@babel/helper-module-transforms@7.12.1: resolution: {integrity: sha512-QQzehgFAZ2bbISiCpmVGfiGux8YVFXQ0abBic2Envhej22DVXV9nCFaS5hIQbkyo1AdGb+gNME2TSh3hYJVV/w==} dependencies: '@babel/helper-module-imports': 7.12.5 @@ -722,7 +631,7 @@ packages: - supports-color dev: true - /@babel/helper-module-transforms/7.19.0: + /@babel/helper-module-transforms@7.19.0: resolution: {integrity: sha512-3HBZ377Fe14RbLIA+ac3sY4PTgpxHVkFrESaWhoI5PuyXPBBX8+C34qblV9G89ZtycGJCmCI/Ut+VUDK4bltNQ==} engines: {node: '>=6.9.0'} dependencies: @@ -738,41 +647,41 @@ packages: - supports-color dev: true - /@babel/helper-optimise-call-expression/7.12.7: + /@babel/helper-optimise-call-expression@7.12.7: resolution: {integrity: sha512-I5xc9oSJ2h59OwyUqjv95HRyzxj53DAubUERgQMrpcCEYQyToeHA+NEcUEsVWB4j53RDeskeBJ0SgRAYHDBckw==} dependencies: '@babel/types': 7.19.0 dev: true - /@babel/helper-optimise-call-expression/7.16.7: + /@babel/helper-optimise-call-expression@7.16.7: resolution: {integrity: sha512-EtgBhg7rd/JcnpZFXpBy0ze1YRfdm7BnBX4uKMBd3ixa3RGAE002JZB66FJyNH7g0F38U05pXmA5P8cBh7z+1w==} engines: {node: '>=6.9.0'} dependencies: '@babel/types': 7.19.0 dev: true - /@babel/helper-optimise-call-expression/7.18.6: + /@babel/helper-optimise-call-expression@7.18.6: resolution: {integrity: sha512-HP59oD9/fEHQkdcbgFCnbmgH5vIQTJbxh2yf+CdM89/glUNnuzr87Q8GIjGEnOktTROemO0Pe0iPAYbqZuOUiA==} engines: {node: '>=6.9.0'} dependencies: '@babel/types': 7.19.0 dev: true - /@babel/helper-plugin-utils/7.10.4: + /@babel/helper-plugin-utils@7.10.4: resolution: {integrity: sha512-O4KCvQA6lLiMU9l2eawBPMf1xPP8xPfB3iEQw150hOVTqj/rfXz0ThTb4HEzqQfs2Bmo5Ay8BzxfzVtBrr9dVg==} dev: true - /@babel/helper-plugin-utils/7.18.6: + /@babel/helper-plugin-utils@7.18.6: resolution: {integrity: sha512-gvZnm1YAAxh13eJdkb9EWHBnF3eAub3XTLCZEehHT2kWxiKVRL64+ae5Y6Ivne0mVHmMYKT+xWgZO+gQhuLUBg==} engines: {node: '>=6.9.0'} dev: true - /@babel/helper-plugin-utils/7.19.0: + /@babel/helper-plugin-utils@7.19.0: resolution: {integrity: sha512-40Ryx7I8mT+0gaNxm8JGTZFUITNqdLAgdg0hXzeVZxVD6nFsdhQvip6v8dqkRHzsz1VFpFAaOCHNn0vKBL7Czw==} engines: {node: '>=6.9.0'} dev: true - /@babel/helper-remap-async-to-generator/7.12.1: + /@babel/helper-remap-async-to-generator@7.12.1: resolution: {integrity: sha512-9d0KQCRM8clMPcDwo8SevNs+/9a8yWVVmaE80FGJcEP8N1qToREmWEGnBn8BUlJhYRFz6fqxeRL1sl5Ogsed7A==} dependencies: '@babel/helper-annotate-as-pure': 7.18.6 @@ -782,37 +691,7 @@ packages: - supports-color dev: true - /@babel/helper-remap-async-to-generator/7.18.9_@babel+core@7.19.1: - resolution: {integrity: sha512-dI7q50YKd8BAv3VEfgg7PS7yD3Rtbi2J1XMXaalXO0W0164hYLnh8zpjRS0mte9MfVp/tltvr/cfdXPvJr1opA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-annotate-as-pure': 7.18.6 - '@babel/helper-environment-visitor': 7.18.9 - '@babel/helper-wrap-function': 7.19.0 - '@babel/types': 7.19.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/helper-remap-async-to-generator/7.18.9_@babel+core@7.9.0: - resolution: {integrity: sha512-dI7q50YKd8BAv3VEfgg7PS7yD3Rtbi2J1XMXaalXO0W0164hYLnh8zpjRS0mte9MfVp/tltvr/cfdXPvJr1opA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-annotate-as-pure': 7.18.6 - '@babel/helper-environment-visitor': 7.18.9 - '@babel/helper-wrap-function': 7.19.0 - '@babel/types': 7.19.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/helper-replace-supers/7.12.5: + /@babel/helper-replace-supers@7.12.5: resolution: {integrity: sha512-5YILoed0ZyIpF4gKcpZitEnXEJ9UoDRki1Ey6xz46rxOzfNMAhVIJMoune1hmPVxh40LRv1+oafz7UsWX+vyWA==} dependencies: '@babel/helper-member-expression-to-functions': 7.12.7 @@ -823,7 +702,7 @@ packages: - supports-color dev: true - /@babel/helper-replace-supers/7.18.2: + /@babel/helper-replace-supers@7.18.2: resolution: {integrity: sha512-XzAIyxx+vFnrOxiQrToSUOzUOn0e1J2Li40ntddek1Y69AXUTXoDJ40/D5RdjFu7s7qHiaeoTiempZcbuVXh2Q==} engines: {node: '>=6.9.0'} dependencies: @@ -836,7 +715,7 @@ packages: - supports-color dev: true - /@babel/helper-replace-supers/7.19.1: + /@babel/helper-replace-supers@7.19.1: resolution: {integrity: sha512-T7ahH7wV0Hfs46SFh5Jz3s0B6+o8g3c+7TMxu7xKfmHikg7EAZ3I2Qk9LFhjxXq8sL7UkP5JflezNwoZa8WvWw==} engines: {node: '>=6.9.0'} dependencies: @@ -849,69 +728,62 @@ packages: - supports-color dev: true - /@babel/helper-simple-access/7.12.1: + /@babel/helper-simple-access@7.12.1: resolution: {integrity: sha512-OxBp7pMrjVewSSC8fXDFrHrBcJATOOFssZwv16F3/6Xtc138GHybBfPbm9kfiqQHKhYQrlamWILwlDCeyMFEaA==} dependencies: '@babel/types': 7.18.7 dev: true - /@babel/helper-simple-access/7.18.6: + /@babel/helper-simple-access@7.18.6: resolution: {integrity: sha512-iNpIgTgyAvDQpDj76POqg+YEt8fPxx3yaNBg3S30dxNKm2SWfYhD0TGrK/Eu9wHpUW63VQU894TsTg+GLbUa1g==} engines: {node: '>=6.9.0'} dependencies: '@babel/types': 7.19.0 dev: true - /@babel/helper-skip-transparent-expression-wrappers/7.12.1: + /@babel/helper-skip-transparent-expression-wrappers@7.12.1: resolution: {integrity: sha512-Mf5AUuhG1/OCChOJ/HcADmvcHM42WJockombn8ATJG3OnyiSxBK/Mm5x78BQWvmtXZKHgbjdGL2kin/HOLlZGA==} dependencies: '@babel/types': 7.19.0 dev: true - /@babel/helper-skip-transparent-expression-wrappers/7.18.9: - resolution: {integrity: sha512-imytd2gHi3cJPsybLRbmFrF7u5BIEuI2cNheyKi3/iOBC63kNn3q8Crn2xVuESli0aM4KYsyEqKyS7lFL8YVtw==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/types': 7.19.0 - dev: true - - /@babel/helper-split-export-declaration/7.11.0: + /@babel/helper-split-export-declaration@7.11.0: resolution: {integrity: sha512-74Vejvp6mHkGE+m+k5vHY93FX2cAtrw1zXrZXRlG4l410Nm9PxfEiVTn1PjDPV5SnmieiueY4AFg2xqhNFuuZg==} dependencies: '@babel/types': 7.18.7 dev: true - /@babel/helper-split-export-declaration/7.18.6: + /@babel/helper-split-export-declaration@7.18.6: resolution: {integrity: sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==} engines: {node: '>=6.9.0'} dependencies: '@babel/types': 7.19.0 dev: true - /@babel/helper-string-parser/7.18.10: + /@babel/helper-string-parser@7.18.10: resolution: {integrity: sha512-XtIfWmeNY3i4t7t4D2t02q50HvqHybPqW2ki1kosnvWCwuCMeo81Jf0gwr85jy/neUdg5XDdeFE/80DXiO+njw==} engines: {node: '>=6.9.0'} dev: true - /@babel/helper-validator-identifier/7.10.4: + /@babel/helper-validator-identifier@7.10.4: resolution: {integrity: sha512-3U9y+43hz7ZM+rzG24Qe2mufW5KhvFg/NhnNph+i9mgCtdTCtMJuI1TMkrIUiK7Ix4PYlRF9I5dhqaLYA/ADXw==} dev: true - /@babel/helper-validator-identifier/7.18.6: + /@babel/helper-validator-identifier@7.18.6: resolution: {integrity: sha512-MmetCkz9ej86nJQV+sFCxoGGrUbU3q02kgLciwkrt9QqEB7cP39oKEY0PakknEO0Gu20SskMRi+AYZ3b1TpN9g==} engines: {node: '>=6.9.0'} dev: true - /@babel/helper-validator-option/7.12.1: + /@babel/helper-validator-option@7.12.1: resolution: {integrity: sha512-YpJabsXlJVWP0USHjnC/AQDTLlZERbON577YUVO/wLpqyj6HAtVYnWaQaN0iUN+1/tWn3c+uKKXjRut5115Y2A==} dev: true - /@babel/helper-validator-option/7.18.6: + /@babel/helper-validator-option@7.18.6: resolution: {integrity: sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw==} engines: {node: '>=6.9.0'} dev: true - /@babel/helper-wrap-function/7.12.3: + /@babel/helper-wrap-function@7.12.3: resolution: {integrity: sha512-Cvb8IuJDln3rs6tzjW3Y8UeelAOdnpB8xtQ4sme2MSZ9wOxrbThporC0y/EtE16VAtoyEfLM404Xr1e0OOp+ow==} dependencies: '@babel/helper-function-name': 7.19.0 @@ -922,19 +794,7 @@ packages: - supports-color dev: true - /@babel/helper-wrap-function/7.19.0: - resolution: {integrity: sha512-txX8aN8CZyYGTwcLhlk87KRqncAzhh5TpQamZUa0/u3an36NtDpUP6bQgBCBcLeBs09R/OwQu3OjK0k/HwfNDg==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/helper-function-name': 7.19.0 - '@babel/template': 7.18.10 - '@babel/traverse': 7.19.1 - '@babel/types': 7.19.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/helpers/7.12.5: + /@babel/helpers@7.12.5: resolution: {integrity: sha512-lgKGMQlKqA8meJqKsW6rUnc4MdUk35Ln0ATDqdM1a/UpARODdI4j5Y5lVfUScnSNkJcdCRAaWkspykNoFg9sJA==} dependencies: '@babel/template': 7.12.7 @@ -944,7 +804,7 @@ packages: - supports-color dev: true - /@babel/helpers/7.19.0: + /@babel/helpers@7.19.0: resolution: {integrity: sha512-DRBCKGwIEdqY3+rPJgG/dKfQy9+08rHIAJx8q2p+HSWP87s2HCrQmaAMMyMll2kIXKCW0cO1RdQskx15Xakftg==} engines: {node: '>=6.9.0'} dependencies: @@ -955,7 +815,7 @@ packages: - supports-color dev: true - /@babel/highlight/7.10.4: + /@babel/highlight@7.10.4: resolution: {integrity: sha512-i6rgnR/YgPEQzZZnbTHHuZdlE8qyoBNalD6F+q4vAFlcMEcqmkoG+mPqJYJCo63qPf74+Y1UZsl3l6f7/RIkmA==} dependencies: '@babel/helper-validator-identifier': 7.18.6 @@ -963,7 +823,7 @@ packages: js-tokens: 4.0.0 dev: true - /@babel/highlight/7.17.12: + /@babel/highlight@7.17.12: resolution: {integrity: sha512-7yykMVF3hfZY2jsHZEEgLc+3x4o1O+fYyULu11GynEUQNwB6lua+IIQn1FiJxNucd5UlyJryrwsOh8PL9Sn8Qg==} engines: {node: '>=6.9.0'} dependencies: @@ -972,7 +832,7 @@ packages: js-tokens: 4.0.0 dev: true - /@babel/highlight/7.18.6: + /@babel/highlight@7.18.6: resolution: {integrity: sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==} engines: {node: '>=6.9.0'} dependencies: @@ -981,7 +841,7 @@ packages: js-tokens: 4.0.0 dev: true - /@babel/parser/7.12.7: + /@babel/parser@7.12.7: resolution: {integrity: sha512-oWR02Ubp4xTLCAqPRiNIuMVgNO5Aif/xpXtabhzW2HWUD47XJsAB4Zd/Rg30+XeQA3juXigV7hlquOTmwqLiwg==} engines: {node: '>=6.0.0'} hasBin: true @@ -989,7 +849,7 @@ packages: '@babel/types': 7.12.7 dev: true - /@babel/parser/7.19.1: + /@babel/parser@7.19.1: resolution: {integrity: sha512-h7RCSorm1DdTVGJf3P2Mhj3kdnkmF/EiysUkzS2TdgAYqyjFdMQJbVuXOBej2SBJaXan/lIVtT6KkGbyyq753A==} engines: {node: '>=6.0.0'} hasBin: true @@ -997,51 +857,7 @@ packages: '@babel/types': 7.19.0 dev: true - /@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-Dgxsyg54Fx1d4Nge8UnvTrED63vrwOdPmyvPzlNN/boaliRP54pm3pGzZD1SJUwrBA+Cs/xdG8kXX6Mn/RfISQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-Dgxsyg54Fx1d4Nge8UnvTrED63vrwOdPmyvPzlNN/boaliRP54pm3pGzZD1SJUwrBA+Cs/xdG8kXX6Mn/RfISQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/7.18.9_@babel+core@7.19.1: - resolution: {integrity: sha512-AHrP9jadvH7qlOj6PINbgSuphjQUAK7AOT7DPjBo9EHoLhQTnnK5u45e1Hd4DbSQEO9nqPWtQ89r+XEOWFScKg==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.13.0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/helper-skip-transparent-expression-wrappers': 7.18.9 - '@babel/plugin-proposal-optional-chaining': 7.18.9_@babel+core@7.19.1 - dev: true - - /@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/7.18.9_@babel+core@7.9.0: - resolution: {integrity: sha512-AHrP9jadvH7qlOj6PINbgSuphjQUAK7AOT7DPjBo9EHoLhQTnnK5u45e1Hd4DbSQEO9nqPWtQ89r+XEOWFScKg==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.13.0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/helper-skip-transparent-expression-wrappers': 7.18.9 - '@babel/plugin-proposal-optional-chaining': 7.18.9_@babel+core@7.9.0 - dev: true - - /@babel/plugin-proposal-async-generator-functions/7.12.1_@babel+core@7.12.9: + /@babel/plugin-proposal-async-generator-functions@7.12.1(@babel/core@7.12.9): resolution: {integrity: sha512-d+/o30tJxFxrA1lhzJqiUcEJdI6jKlNregCv5bASeGf2Q4MXmnwH7viDo7nhx1/ohf09oaH8j1GVYG/e3Yqk6A==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 @@ -1049,12 +865,12 @@ packages: '@babel/core': 7.12.9 '@babel/helper-plugin-utils': 7.18.6 '@babel/helper-remap-async-to-generator': 7.12.1 - '@babel/plugin-syntax-async-generators': 7.8.4_@babel+core@7.12.9 + '@babel/plugin-syntax-async-generators': 7.8.4(@babel/core@7.12.9) transitivePeerDependencies: - supports-color dev: true - /@babel/plugin-proposal-async-generator-functions/7.12.1_@babel+core@7.9.0: + /@babel/plugin-proposal-async-generator-functions@7.12.1(@babel/core@7.9.0): resolution: {integrity: sha512-d+/o30tJxFxrA1lhzJqiUcEJdI6jKlNregCv5bASeGf2Q4MXmnwH7viDo7nhx1/ohf09oaH8j1GVYG/e3Yqk6A==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 @@ -1062,754 +878,638 @@ packages: '@babel/core': 7.9.0 '@babel/helper-plugin-utils': 7.18.6 '@babel/helper-remap-async-to-generator': 7.12.1 - '@babel/plugin-syntax-async-generators': 7.8.4_@babel+core@7.9.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/plugin-proposal-async-generator-functions/7.19.1_@babel+core@7.19.1: - resolution: {integrity: sha512-0yu8vNATgLy4ivqMNBIwb1HebCelqN7YX8SL3FDXORv/RqT0zEEWUCH4GH44JsSrvCu6GqnAdR5EBFAPeNBB4Q==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-environment-visitor': 7.18.9 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/helper-remap-async-to-generator': 7.18.9_@babel+core@7.19.1 - '@babel/plugin-syntax-async-generators': 7.8.4_@babel+core@7.19.1 + '@babel/plugin-syntax-async-generators': 7.8.4(@babel/core@7.9.0) transitivePeerDependencies: - supports-color dev: true - /@babel/plugin-proposal-async-generator-functions/7.19.1_@babel+core@7.9.0: - resolution: {integrity: sha512-0yu8vNATgLy4ivqMNBIwb1HebCelqN7YX8SL3FDXORv/RqT0zEEWUCH4GH44JsSrvCu6GqnAdR5EBFAPeNBB4Q==} - engines: {node: '>=6.9.0'} + /@babel/plugin-proposal-class-properties@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-cKp3dlQsFsEs5CWKnN7BnSHOd0EOW8EKpEjkoz1pO2E5KzIDNV9Ros1b0CnmbVgAGXJubOYVBOGCT1OmJwOI7w==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.9.0 - '@babel/helper-environment-visitor': 7.18.9 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/helper-remap-async-to-generator': 7.18.9_@babel+core@7.9.0 - '@babel/plugin-syntax-async-generators': 7.8.4_@babel+core@7.9.0 + '@babel/core': 7.12.9 + '@babel/helper-create-class-features-plugin': 7.18.0(@babel/core@7.12.9) + '@babel/helper-plugin-utils': 7.18.6 transitivePeerDependencies: - supports-color dev: true - /@babel/plugin-proposal-class-properties/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-cKp3dlQsFsEs5CWKnN7BnSHOd0EOW8EKpEjkoz1pO2E5KzIDNV9Ros1b0CnmbVgAGXJubOYVBOGCT1OmJwOI7w==} + /@babel/plugin-proposal-class-properties@7.7.4(@babel/core@7.12.9): + resolution: {integrity: sha512-EcuXeV4Hv1X3+Q1TsuOmyyxeTRiSqurGJ26+I/FW1WbymmRRapVORm6x1Zl3iDIHyRxEs+VXWp6qnlcfcJSbbw==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.12.9 - '@babel/helper-create-class-features-plugin': 7.18.0_@babel+core@7.12.9 + '@babel/helper-create-class-features-plugin': 7.12.1(@babel/core@7.12.9) '@babel/helper-plugin-utils': 7.18.6 transitivePeerDependencies: - supports-color dev: true - /@babel/plugin-proposal-class-properties/7.12.1_@babel+core@7.19.1: - resolution: {integrity: sha512-cKp3dlQsFsEs5CWKnN7BnSHOd0EOW8EKpEjkoz1pO2E5KzIDNV9Ros1b0CnmbVgAGXJubOYVBOGCT1OmJwOI7w==} + /@babel/plugin-proposal-class-properties@7.8.3(@babel/core@7.9.0): + resolution: {integrity: sha512-EqFhbo7IosdgPgZggHaNObkmO1kNUe3slaKu54d5OWvy+p9QIKOzK1GAEpAIsZtWVtPXUHSMcT4smvDrCfY4AA==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-create-class-features-plugin': 7.18.0_@babel+core@7.19.1 - '@babel/helper-plugin-utils': 7.18.6 + '@babel/core': 7.9.0 + '@babel/helper-create-class-features-plugin': 7.12.1(@babel/core@7.9.0) + '@babel/helper-plugin-utils': 7.10.4 transitivePeerDependencies: - supports-color dev: true - /@babel/plugin-proposal-class-properties/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ==} - engines: {node: '>=6.9.0'} + /@babel/plugin-proposal-decorators@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-knNIuusychgYN8fGJHONL0RbFxLGawhXOJNLBk75TniTsZZeA+wdkDuv6wp4lGwzQEKjZi6/WYtnb3udNPmQmQ==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-create-class-features-plugin': 7.19.0_@babel+core@7.19.1 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/core': 7.12.9 + '@babel/helper-create-class-features-plugin': 7.12.1(@babel/core@7.12.9) + '@babel/helper-plugin-utils': 7.18.6 + '@babel/plugin-syntax-decorators': 7.12.1(@babel/core@7.12.9) transitivePeerDependencies: - supports-color dev: true - /@babel/plugin-proposal-class-properties/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ==} - engines: {node: '>=6.9.0'} + /@babel/plugin-proposal-decorators@7.8.3(@babel/core@7.9.0): + resolution: {integrity: sha512-e3RvdvS4qPJVTe288DlXjwKflpfy1hr0j5dz5WpIYYeP7vQZg2WfAEIp8k5/Lwis/m5REXEteIz6rrcDtXXG7w==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 - '@babel/helper-create-class-features-plugin': 7.19.0_@babel+core@7.9.0 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/helper-create-class-features-plugin': 7.12.1(@babel/core@7.9.0) + '@babel/helper-plugin-utils': 7.10.4 + '@babel/plugin-syntax-decorators': 7.12.1(@babel/core@7.9.0) transitivePeerDependencies: - supports-color dev: true - /@babel/plugin-proposal-class-properties/7.7.4_@babel+core@7.12.9: - resolution: {integrity: sha512-EcuXeV4Hv1X3+Q1TsuOmyyxeTRiSqurGJ26+I/FW1WbymmRRapVORm6x1Zl3iDIHyRxEs+VXWp6qnlcfcJSbbw==} + /@babel/plugin-proposal-dynamic-import@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-a4rhUSZFuq5W8/OO8H7BL5zspjnc1FLd9hlOxIK/f7qG4a0qsqk8uvF/ywgBA8/OmjsapjpvaEOYItfGG1qIvQ==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.12.9 - '@babel/helper-create-class-features-plugin': 7.12.1_@babel+core@7.12.9 '@babel/helper-plugin-utils': 7.18.6 - transitivePeerDependencies: - - supports-color + '@babel/plugin-syntax-dynamic-import': 7.8.3(@babel/core@7.12.9) dev: true - /@babel/plugin-proposal-class-properties/7.8.3_@babel+core@7.9.0: - resolution: {integrity: sha512-EqFhbo7IosdgPgZggHaNObkmO1kNUe3slaKu54d5OWvy+p9QIKOzK1GAEpAIsZtWVtPXUHSMcT4smvDrCfY4AA==} + /@babel/plugin-proposal-dynamic-import@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-a4rhUSZFuq5W8/OO8H7BL5zspjnc1FLd9hlOxIK/f7qG4a0qsqk8uvF/ywgBA8/OmjsapjpvaEOYItfGG1qIvQ==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 - '@babel/helper-create-class-features-plugin': 7.12.1_@babel+core@7.9.0 - '@babel/helper-plugin-utils': 7.10.4 - transitivePeerDependencies: - - supports-color + '@babel/helper-plugin-utils': 7.18.6 + '@babel/plugin-syntax-dynamic-import': 7.8.3(@babel/core@7.9.0) dev: true - /@babel/plugin-proposal-class-static-block/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-+I3oIiNxrCpup3Gi8n5IGMwj0gOCAjcJUSQEcotNnCCPMEnixawOQ+KeJPlgfjzx+FKQ1QSyZOWe7wmoJp7vhw==} - engines: {node: '>=6.9.0'} + /@babel/plugin-proposal-export-namespace-from@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-6CThGf0irEkzujYS5LQcjBx8j/4aQGiVv7J9+2f7pGfxqyKh3WnmVJYW3hdrQjyksErMGBPQrCnHfOtna+WLbw==} peerDependencies: - '@babel/core': ^7.12.0 || 7 + '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-create-class-features-plugin': 7.19.0_@babel+core@7.19.1 + '@babel/core': 7.12.9 '@babel/helper-plugin-utils': 7.19.0 - '@babel/plugin-syntax-class-static-block': 7.14.5_@babel+core@7.19.1 - transitivePeerDependencies: - - supports-color + '@babel/plugin-syntax-export-namespace-from': 7.8.3(@babel/core@7.12.9) dev: true - /@babel/plugin-proposal-class-static-block/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-+I3oIiNxrCpup3Gi8n5IGMwj0gOCAjcJUSQEcotNnCCPMEnixawOQ+KeJPlgfjzx+FKQ1QSyZOWe7wmoJp7vhw==} - engines: {node: '>=6.9.0'} + /@babel/plugin-proposal-json-strings@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-GoLDUi6U9ZLzlSda2Df++VSqDJg3CG+dR0+iWsv6XRw1rEq+zwt4DirM9yrxW6XWaTpmai1cWJLMfM8qQJf+yw==} peerDependencies: - '@babel/core': ^7.12.0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-create-class-features-plugin': 7.19.0_@babel+core@7.9.0 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/plugin-syntax-class-static-block': 7.14.5_@babel+core@7.9.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/plugin-proposal-decorators/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-knNIuusychgYN8fGJHONL0RbFxLGawhXOJNLBk75TniTsZZeA+wdkDuv6wp4lGwzQEKjZi6/WYtnb3udNPmQmQ==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.12.9 - '@babel/helper-create-class-features-plugin': 7.12.1_@babel+core@7.12.9 '@babel/helper-plugin-utils': 7.18.6 - '@babel/plugin-syntax-decorators': 7.12.1_@babel+core@7.12.9 - transitivePeerDependencies: - - supports-color + '@babel/plugin-syntax-json-strings': 7.8.3(@babel/core@7.12.9) dev: true - /@babel/plugin-proposal-decorators/7.19.1_@babel+core@7.19.1: - resolution: {integrity: sha512-LfIKNBBY7Q1OX5C4xAgRQffOg2OnhAo9fnbcOHgOC9Yytm2Sw+4XqHufRYU86tHomzepxtvuVaNO+3EVKR4ivw==} - engines: {node: '>=6.9.0'} + /@babel/plugin-proposal-json-strings@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-GoLDUi6U9ZLzlSda2Df++VSqDJg3CG+dR0+iWsv6XRw1rEq+zwt4DirM9yrxW6XWaTpmai1cWJLMfM8qQJf+yw==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-create-class-features-plugin': 7.19.0_@babel+core@7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/helper-replace-supers': 7.19.1 - '@babel/helper-split-export-declaration': 7.18.6 - '@babel/plugin-syntax-decorators': 7.19.0_@babel+core@7.19.1 - transitivePeerDependencies: - - supports-color + '@babel/core': 7.9.0 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/plugin-syntax-json-strings': 7.8.3(@babel/core@7.9.0) dev: true - /@babel/plugin-proposal-decorators/7.8.3_@babel+core@7.9.0: - resolution: {integrity: sha512-e3RvdvS4qPJVTe288DlXjwKflpfy1hr0j5dz5WpIYYeP7vQZg2WfAEIp8k5/Lwis/m5REXEteIz6rrcDtXXG7w==} + /@babel/plugin-proposal-logical-assignment-operators@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-k8ZmVv0JU+4gcUGeCDZOGd0lCIamU/sMtIiX3UWnUc5yzgq6YUGyEolNYD+MLYKfSzgECPcqetVcJP9Afe/aCA==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.9.0 - '@babel/helper-create-class-features-plugin': 7.12.1_@babel+core@7.9.0 - '@babel/helper-plugin-utils': 7.10.4 - '@babel/plugin-syntax-decorators': 7.12.1_@babel+core@7.9.0 - transitivePeerDependencies: - - supports-color + '@babel/core': 7.12.9 + '@babel/helper-plugin-utils': 7.19.0 + '@babel/plugin-syntax-logical-assignment-operators': 7.10.4(@babel/core@7.12.9) dev: true - /@babel/plugin-proposal-dynamic-import/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-a4rhUSZFuq5W8/OO8H7BL5zspjnc1FLd9hlOxIK/f7qG4a0qsqk8uvF/ywgBA8/OmjsapjpvaEOYItfGG1qIvQ==} + /@babel/plugin-proposal-nullish-coalescing-operator@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-nZY0ESiaQDI1y96+jk6VxMOaL4LPo/QDHBqL+SF3/vl6dHkTwHlOI8L4ZwuRBHgakRBw5zsVylel7QPbbGuYgg==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.12.9 '@babel/helper-plugin-utils': 7.18.6 - '@babel/plugin-syntax-dynamic-import': 7.8.3_@babel+core@7.12.9 + '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3(@babel/core@7.12.9) dev: true - /@babel/plugin-proposal-dynamic-import/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-a4rhUSZFuq5W8/OO8H7BL5zspjnc1FLd9hlOxIK/f7qG4a0qsqk8uvF/ywgBA8/OmjsapjpvaEOYItfGG1qIvQ==} + /@babel/plugin-proposal-nullish-coalescing-operator@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-nZY0ESiaQDI1y96+jk6VxMOaL4LPo/QDHBqL+SF3/vl6dHkTwHlOI8L4ZwuRBHgakRBw5zsVylel7QPbbGuYgg==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 '@babel/helper-plugin-utils': 7.18.6 - '@babel/plugin-syntax-dynamic-import': 7.8.3_@babel+core@7.9.0 + '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3(@babel/core@7.9.0) dev: true - /@babel/plugin-proposal-dynamic-import/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-1auuwmK+Rz13SJj36R+jqFPMJWyKEDd7lLSdOj4oJK0UTgGueSAtkrCvz9ewmgyU/P941Rv2fQwZJN8s6QruXw==} - engines: {node: '>=6.9.0'} + /@babel/plugin-proposal-nullish-coalescing-operator@7.8.3(@babel/core@7.9.0): + resolution: {integrity: sha512-TS9MlfzXpXKt6YYomudb/KU7nQI6/xnapG6in1uZxoxDghuSMZsPb6D2fyUwNYSAp4l1iR7QtFOjkqcRYcUsfw==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/plugin-syntax-dynamic-import': 7.8.3_@babel+core@7.19.1 + '@babel/core': 7.9.0 + '@babel/helper-plugin-utils': 7.10.4 + '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3(@babel/core@7.9.0) dev: true - /@babel/plugin-proposal-dynamic-import/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-1auuwmK+Rz13SJj36R+jqFPMJWyKEDd7lLSdOj4oJK0UTgGueSAtkrCvz9ewmgyU/P941Rv2fQwZJN8s6QruXw==} - engines: {node: '>=6.9.0'} + /@babel/plugin-proposal-numeric-separator@7.12.7(@babel/core@7.12.9): + resolution: {integrity: sha512-8c+uy0qmnRTeukiGsjLGy6uVs/TFjJchGXUeBqlG4VWYOdJWkhhVPdQ3uHwbmalfJwv2JsV0qffXP4asRfL2SQ==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/plugin-syntax-dynamic-import': 7.8.3_@babel+core@7.9.0 + '@babel/core': 7.12.9 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/plugin-syntax-numeric-separator': 7.10.4(@babel/core@7.12.9) dev: true - /@babel/plugin-proposal-export-default-from/7.18.10_@babel+core@7.19.1: - resolution: {integrity: sha512-5H2N3R2aQFxkV4PIBUR/i7PUSwgTZjouJKzI8eKswfIjT0PhvzkPn0t0wIS5zn6maQuvtT0t1oHtMUz61LOuow==} - engines: {node: '>=6.9.0'} + /@babel/plugin-proposal-numeric-separator@7.12.7(@babel/core@7.9.0): + resolution: {integrity: sha512-8c+uy0qmnRTeukiGsjLGy6uVs/TFjJchGXUeBqlG4VWYOdJWkhhVPdQ3uHwbmalfJwv2JsV0qffXP4asRfL2SQ==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/plugin-syntax-export-default-from': 7.18.6_@babel+core@7.19.1 + '@babel/core': 7.9.0 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/plugin-syntax-numeric-separator': 7.10.4(@babel/core@7.9.0) dev: true - /@babel/plugin-proposal-export-namespace-from/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-6CThGf0irEkzujYS5LQcjBx8j/4aQGiVv7J9+2f7pGfxqyKh3WnmVJYW3hdrQjyksErMGBPQrCnHfOtna+WLbw==} + /@babel/plugin-proposal-numeric-separator@7.8.3(@babel/core@7.9.0): + resolution: {integrity: sha512-jWioO1s6R/R+wEHizfaScNsAx+xKgwTLNXSh7tTC4Usj3ItsPEhYkEpU4h+lpnBwq7NBVOJXfO6cRFYcX69JUQ==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/plugin-syntax-export-namespace-from': 7.8.3_@babel+core@7.12.9 + '@babel/core': 7.9.0 + '@babel/helper-plugin-utils': 7.10.4 + '@babel/plugin-syntax-numeric-separator': 7.10.4(@babel/core@7.9.0) dev: true - /@babel/plugin-proposal-export-namespace-from/7.18.9_@babel+core@7.19.1: - resolution: {integrity: sha512-k1NtHyOMvlDDFeb9G5PhUXuGj8m/wiwojgQVEhJ/fsVsMCpLyOP4h0uGEjYJKrRI+EVPlb5Jk+Gt9P97lOGwtA==} - engines: {node: '>=6.9.0'} + /@babel/plugin-proposal-object-rest-spread@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-s6SowJIjzlhx8o7lsFx5zmY4At6CTtDvgNQDdPzkBQucle58A6b/TTeEBYtyDgmcXjUTM+vE8YOGHZzzbc/ioA==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/plugin-syntax-export-namespace-from': 7.8.3_@babel+core@7.19.1 + '@babel/core': 7.12.9 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/core@7.12.9) + '@babel/plugin-transform-parameters': 7.12.1(@babel/core@7.12.9) dev: true - /@babel/plugin-proposal-export-namespace-from/7.18.9_@babel+core@7.9.0: - resolution: {integrity: sha512-k1NtHyOMvlDDFeb9G5PhUXuGj8m/wiwojgQVEhJ/fsVsMCpLyOP4h0uGEjYJKrRI+EVPlb5Jk+Gt9P97lOGwtA==} - engines: {node: '>=6.9.0'} + /@babel/plugin-proposal-object-rest-spread@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-s6SowJIjzlhx8o7lsFx5zmY4At6CTtDvgNQDdPzkBQucle58A6b/TTeEBYtyDgmcXjUTM+vE8YOGHZzzbc/ioA==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/plugin-syntax-export-namespace-from': 7.8.3_@babel+core@7.9.0 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/core@7.9.0) + '@babel/plugin-transform-parameters': 7.12.1(@babel/core@7.9.0) dev: true - /@babel/plugin-proposal-json-strings/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-GoLDUi6U9ZLzlSda2Df++VSqDJg3CG+dR0+iWsv6XRw1rEq+zwt4DirM9yrxW6XWaTpmai1cWJLMfM8qQJf+yw==} + /@babel/plugin-proposal-optional-catch-binding@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-hFvIjgprh9mMw5v42sJWLI1lzU5L2sznP805zeT6rySVRA0Y18StRhDqhSxlap0oVgItRsB6WSROp4YnJTJz0g==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.12.9 '@babel/helper-plugin-utils': 7.18.6 - '@babel/plugin-syntax-json-strings': 7.8.3_@babel+core@7.12.9 + '@babel/plugin-syntax-optional-catch-binding': 7.8.3(@babel/core@7.12.9) dev: true - /@babel/plugin-proposal-json-strings/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-GoLDUi6U9ZLzlSda2Df++VSqDJg3CG+dR0+iWsv6XRw1rEq+zwt4DirM9yrxW6XWaTpmai1cWJLMfM8qQJf+yw==} + /@babel/plugin-proposal-optional-catch-binding@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-hFvIjgprh9mMw5v42sJWLI1lzU5L2sznP805zeT6rySVRA0Y18StRhDqhSxlap0oVgItRsB6WSROp4YnJTJz0g==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 '@babel/helper-plugin-utils': 7.18.6 - '@babel/plugin-syntax-json-strings': 7.8.3_@babel+core@7.9.0 + '@babel/plugin-syntax-optional-catch-binding': 7.8.3(@babel/core@7.9.0) dev: true - /@babel/plugin-proposal-json-strings/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-lr1peyn9kOdbYc0xr0OdHTZ5FMqS6Di+H0Fz2I/JwMzGmzJETNeOFq2pBySw6X/KFL5EWDjlJuMsUGRFb8fQgQ==} - engines: {node: '>=6.9.0'} + /@babel/plugin-proposal-optional-chaining@7.12.7(@babel/core@7.12.9): + resolution: {integrity: sha512-4ovylXZ0PWmwoOvhU2vhnzVNnm88/Sm9nx7V8BPgMvAzn5zDou3/Awy0EjglyubVHasJj+XCEkr/r1X3P5elCA==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/plugin-syntax-json-strings': 7.8.3_@babel+core@7.19.1 + '@babel/core': 7.12.9 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/helper-skip-transparent-expression-wrappers': 7.12.1 + '@babel/plugin-syntax-optional-chaining': 7.8.3(@babel/core@7.12.9) dev: true - /@babel/plugin-proposal-json-strings/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-lr1peyn9kOdbYc0xr0OdHTZ5FMqS6Di+H0Fz2I/JwMzGmzJETNeOFq2pBySw6X/KFL5EWDjlJuMsUGRFb8fQgQ==} - engines: {node: '>=6.9.0'} + /@babel/plugin-proposal-optional-chaining@7.12.7(@babel/core@7.9.0): + resolution: {integrity: sha512-4ovylXZ0PWmwoOvhU2vhnzVNnm88/Sm9nx7V8BPgMvAzn5zDou3/Awy0EjglyubVHasJj+XCEkr/r1X3P5elCA==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/plugin-syntax-json-strings': 7.8.3_@babel+core@7.9.0 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/helper-skip-transparent-expression-wrappers': 7.12.1 + '@babel/plugin-syntax-optional-chaining': 7.8.3(@babel/core@7.9.0) dev: true - /@babel/plugin-proposal-logical-assignment-operators/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-k8ZmVv0JU+4gcUGeCDZOGd0lCIamU/sMtIiX3UWnUc5yzgq6YUGyEolNYD+MLYKfSzgECPcqetVcJP9Afe/aCA==} + /@babel/plugin-proposal-optional-chaining@7.9.0(@babel/core@7.9.0): + resolution: {integrity: sha512-NDn5tu3tcv4W30jNhmc2hyD5c56G6cXx4TesJubhxrJeCvuuMpttxr0OnNCqbZGhFjLrg+NIhxxC+BK5F6yS3w==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/plugin-syntax-logical-assignment-operators': 7.10.4_@babel+core@7.12.9 + '@babel/core': 7.9.0 + '@babel/helper-plugin-utils': 7.10.4 + '@babel/plugin-syntax-optional-chaining': 7.8.3(@babel/core@7.9.0) dev: true - /@babel/plugin-proposal-logical-assignment-operators/7.18.9_@babel+core@7.19.1: - resolution: {integrity: sha512-128YbMpjCrP35IOExw2Fq+x55LMP42DzhOhX2aNNIdI9avSWl2PI0yuBWarr3RYpZBSPtabfadkH2yeRiMD61Q==} - engines: {node: '>=6.9.0'} + /@babel/plugin-proposal-private-methods@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-mwZ1phvH7/NHK6Kf8LP7MYDogGV+DKB1mryFOEwx5EBNQrosvIczzZFTUmWaeujd5xT6G1ELYWUz3CutMhjE1w==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 + '@babel/core': 7.12.9 + '@babel/helper-create-class-features-plugin': 7.19.0(@babel/core@7.12.9) '@babel/helper-plugin-utils': 7.19.0 - '@babel/plugin-syntax-logical-assignment-operators': 7.10.4_@babel+core@7.19.1 + transitivePeerDependencies: + - supports-color dev: true - /@babel/plugin-proposal-logical-assignment-operators/7.18.9_@babel+core@7.9.0: - resolution: {integrity: sha512-128YbMpjCrP35IOExw2Fq+x55LMP42DzhOhX2aNNIdI9avSWl2PI0yuBWarr3RYpZBSPtabfadkH2yeRiMD61Q==} - engines: {node: '>=6.9.0'} + /@babel/plugin-proposal-unicode-property-regex@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-MYq+l+PvHuw/rKUz1at/vb6nCnQ2gmJBNaM62z0OgH7B2W1D9pvkpYtlti9bGtizNIU1K3zm4bZF9F91efVY0w==} + engines: {node: '>=4'} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/plugin-syntax-logical-assignment-operators': 7.10.4_@babel+core@7.9.0 + '@babel/core': 7.12.9 + '@babel/helper-create-regexp-features-plugin': 7.12.7(@babel/core@7.12.9) + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-proposal-nullish-coalescing-operator/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-nZY0ESiaQDI1y96+jk6VxMOaL4LPo/QDHBqL+SF3/vl6dHkTwHlOI8L4ZwuRBHgakRBw5zsVylel7QPbbGuYgg==} + /@babel/plugin-proposal-unicode-property-regex@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-MYq+l+PvHuw/rKUz1at/vb6nCnQ2gmJBNaM62z0OgH7B2W1D9pvkpYtlti9bGtizNIU1K3zm4bZF9F91efVY0w==} + engines: {node: '>=4'} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.12.9 + '@babel/core': 7.9.0 + '@babel/helper-create-regexp-features-plugin': 7.12.7(@babel/core@7.9.0) '@babel/helper-plugin-utils': 7.18.6 - '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3_@babel+core@7.12.9 dev: true - /@babel/plugin-proposal-nullish-coalescing-operator/7.12.1_@babel+core@7.19.1: - resolution: {integrity: sha512-nZY0ESiaQDI1y96+jk6VxMOaL4LPo/QDHBqL+SF3/vl6dHkTwHlOI8L4ZwuRBHgakRBw5zsVylel7QPbbGuYgg==} + /@babel/plugin-syntax-async-generators@7.8.4(@babel/core@7.12.9): + resolution: {integrity: sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 + '@babel/core': 7.12.9 '@babel/helper-plugin-utils': 7.18.6 - '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3_@babel+core@7.19.1 dev: true - /@babel/plugin-proposal-nullish-coalescing-operator/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-nZY0ESiaQDI1y96+jk6VxMOaL4LPo/QDHBqL+SF3/vl6dHkTwHlOI8L4ZwuRBHgakRBw5zsVylel7QPbbGuYgg==} + /@babel/plugin-syntax-async-generators@7.8.4(@babel/core@7.9.0): + resolution: {integrity: sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 '@babel/helper-plugin-utils': 7.18.6 - '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3_@babel+core@7.9.0 dev: true - /@babel/plugin-proposal-nullish-coalescing-operator/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA==} - engines: {node: '>=6.9.0'} + /@babel/plugin-syntax-class-properties@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-U40A76x5gTwmESz+qiqssqmeEsKvcSyvtgktrm0uzcARAmM9I1jR221f6Oq+GmHrcD+LvZDag1UTOTe2fL3TeA==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 + '@babel/core': 7.12.9 '@babel/helper-plugin-utils': 7.19.0 - '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3_@babel+core@7.19.1 dev: true - /@babel/plugin-proposal-nullish-coalescing-operator/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA==} - engines: {node: '>=6.9.0'} + /@babel/plugin-syntax-decorators@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-ir9YW5daRrTYiy9UJ2TzdNIJEZu8KclVzDcfSt4iEmOtwQ4llPtWInNKJyKnVXp1vE4bbVd5S31M/im3mYMO1w==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3_@babel+core@7.9.0 + '@babel/core': 7.12.9 + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-proposal-nullish-coalescing-operator/7.8.3_@babel+core@7.9.0: - resolution: {integrity: sha512-TS9MlfzXpXKt6YYomudb/KU7nQI6/xnapG6in1uZxoxDghuSMZsPb6D2fyUwNYSAp4l1iR7QtFOjkqcRYcUsfw==} + /@babel/plugin-syntax-decorators@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-ir9YW5daRrTYiy9UJ2TzdNIJEZu8KclVzDcfSt4iEmOtwQ4llPtWInNKJyKnVXp1vE4bbVd5S31M/im3mYMO1w==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.10.4 - '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3_@babel+core@7.9.0 + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-proposal-numeric-separator/7.12.7_@babel+core@7.12.9: - resolution: {integrity: sha512-8c+uy0qmnRTeukiGsjLGy6uVs/TFjJchGXUeBqlG4VWYOdJWkhhVPdQ3uHwbmalfJwv2JsV0qffXP4asRfL2SQ==} + /@babel/plugin-syntax-dynamic-import@7.8.3(@babel/core@7.12.9): + resolution: {integrity: sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.18.6 - '@babel/plugin-syntax-numeric-separator': 7.10.4_@babel+core@7.12.9 + '@babel/helper-plugin-utils': 7.19.0 dev: true - /@babel/plugin-proposal-numeric-separator/7.12.7_@babel+core@7.9.0: - resolution: {integrity: sha512-8c+uy0qmnRTeukiGsjLGy6uVs/TFjJchGXUeBqlG4VWYOdJWkhhVPdQ3uHwbmalfJwv2JsV0qffXP4asRfL2SQ==} + /@babel/plugin-syntax-dynamic-import@7.8.3(@babel/core@7.9.0): + resolution: {integrity: sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.18.6 - '@babel/plugin-syntax-numeric-separator': 7.10.4_@babel+core@7.9.0 + '@babel/helper-plugin-utils': 7.19.0 dev: true - /@babel/plugin-proposal-numeric-separator/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-ozlZFogPqoLm8WBr5Z8UckIoE4YQ5KESVcNudyXOR8uqIkliTEgJ3RoketfG6pmzLdeZF0H/wjE9/cCEitBl7Q==} - engines: {node: '>=6.9.0'} + /@babel/plugin-syntax-export-namespace-from@7.8.3(@babel/core@7.12.9): + resolution: {integrity: sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 + '@babel/core': 7.12.9 '@babel/helper-plugin-utils': 7.19.0 - '@babel/plugin-syntax-numeric-separator': 7.10.4_@babel+core@7.19.1 dev: true - /@babel/plugin-proposal-numeric-separator/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-ozlZFogPqoLm8WBr5Z8UckIoE4YQ5KESVcNudyXOR8uqIkliTEgJ3RoketfG6pmzLdeZF0H/wjE9/cCEitBl7Q==} - engines: {node: '>=6.9.0'} + /@babel/plugin-syntax-flow@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-1lBLLmtxrwpm4VKmtVFselI/P3pX+G63fAtUUt6b2Nzgao77KNDwyuRt90Mj2/9pKobtt68FdvjfqohZjg/FCA==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/plugin-syntax-numeric-separator': 7.10.4_@babel+core@7.9.0 + '@babel/core': 7.12.9 + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-proposal-numeric-separator/7.8.3_@babel+core@7.9.0: - resolution: {integrity: sha512-jWioO1s6R/R+wEHizfaScNsAx+xKgwTLNXSh7tTC4Usj3ItsPEhYkEpU4h+lpnBwq7NBVOJXfO6cRFYcX69JUQ==} + /@babel/plugin-syntax-flow@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-1lBLLmtxrwpm4VKmtVFselI/P3pX+G63fAtUUt6b2Nzgao77KNDwyuRt90Mj2/9pKobtt68FdvjfqohZjg/FCA==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.10.4 - '@babel/plugin-syntax-numeric-separator': 7.10.4_@babel+core@7.9.0 + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-proposal-object-rest-spread/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-s6SowJIjzlhx8o7lsFx5zmY4At6CTtDvgNQDdPzkBQucle58A6b/TTeEBYtyDgmcXjUTM+vE8YOGHZzzbc/ioA==} + /@babel/plugin-syntax-json-strings@7.8.3(@babel/core@7.12.9): + resolution: {integrity: sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.12.9 '@babel/helper-plugin-utils': 7.18.6 - '@babel/plugin-syntax-object-rest-spread': 7.8.3_@babel+core@7.12.9 - '@babel/plugin-transform-parameters': 7.12.1_@babel+core@7.12.9 dev: true - /@babel/plugin-proposal-object-rest-spread/7.12.1_@babel+core@7.19.1: - resolution: {integrity: sha512-s6SowJIjzlhx8o7lsFx5zmY4At6CTtDvgNQDdPzkBQucle58A6b/TTeEBYtyDgmcXjUTM+vE8YOGHZzzbc/ioA==} + /@babel/plugin-syntax-json-strings@7.8.3(@babel/core@7.9.0): + resolution: {integrity: sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 + '@babel/core': 7.9.0 '@babel/helper-plugin-utils': 7.18.6 - '@babel/plugin-syntax-object-rest-spread': 7.8.3_@babel+core@7.19.1 - '@babel/plugin-transform-parameters': 7.12.1_@babel+core@7.19.1 dev: true - /@babel/plugin-proposal-object-rest-spread/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-s6SowJIjzlhx8o7lsFx5zmY4At6CTtDvgNQDdPzkBQucle58A6b/TTeEBYtyDgmcXjUTM+vE8YOGHZzzbc/ioA==} + /@babel/plugin-syntax-jsx@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-1yRi7yAtB0ETgxdY9ti/p2TivUxJkTdhu/ZbF9MshVGqOx1TdB3b7xCXs49Fupgg50N45KcAsRP/ZqWjs9SRjg==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.9.0 + '@babel/core': 7.12.9 '@babel/helper-plugin-utils': 7.18.6 - '@babel/plugin-syntax-object-rest-spread': 7.8.3_@babel+core@7.9.0 - '@babel/plugin-transform-parameters': 7.12.1_@babel+core@7.9.0 dev: true - /@babel/plugin-proposal-object-rest-spread/7.18.9_@babel+core@7.19.1: - resolution: {integrity: sha512-kDDHQ5rflIeY5xl69CEqGEZ0KY369ehsCIEbTGb4siHG5BE9sga/T0r0OUwyZNLMmZE79E1kbsqAjwFCW4ds6Q==} + /@babel/plugin-syntax-jsx@7.18.6(@babel/core@7.12.9): + resolution: {integrity: sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/compat-data': 7.19.1 - '@babel/core': 7.19.1 - '@babel/helper-compilation-targets': 7.19.1_@babel+core@7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/plugin-syntax-object-rest-spread': 7.8.3_@babel+core@7.19.1 - '@babel/plugin-transform-parameters': 7.18.8_@babel+core@7.19.1 + '@babel/core': 7.12.9 + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-proposal-object-rest-spread/7.18.9_@babel+core@7.9.0: - resolution: {integrity: sha512-kDDHQ5rflIeY5xl69CEqGEZ0KY369ehsCIEbTGb4siHG5BE9sga/T0r0OUwyZNLMmZE79E1kbsqAjwFCW4ds6Q==} + /@babel/plugin-syntax-jsx@7.18.6(@babel/core@7.9.0): + resolution: {integrity: sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/compat-data': 7.19.1 '@babel/core': 7.9.0 - '@babel/helper-compilation-targets': 7.19.1_@babel+core@7.9.0 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-syntax-logical-assignment-operators@7.10.4(@babel/core@7.12.9): + resolution: {integrity: sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==} + peerDependencies: + '@babel/core': ^7.0.0-0 || 7 + dependencies: + '@babel/core': 7.12.9 '@babel/helper-plugin-utils': 7.19.0 - '@babel/plugin-syntax-object-rest-spread': 7.8.3_@babel+core@7.9.0 - '@babel/plugin-transform-parameters': 7.18.8_@babel+core@7.9.0 dev: true - /@babel/plugin-proposal-optional-catch-binding/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-hFvIjgprh9mMw5v42sJWLI1lzU5L2sznP805zeT6rySVRA0Y18StRhDqhSxlap0oVgItRsB6WSROp4YnJTJz0g==} + /@babel/plugin-syntax-nullish-coalescing-operator@7.8.3(@babel/core@7.12.9): + resolution: {integrity: sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.12.9 '@babel/helper-plugin-utils': 7.18.6 - '@babel/plugin-syntax-optional-catch-binding': 7.8.3_@babel+core@7.12.9 dev: true - /@babel/plugin-proposal-optional-catch-binding/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-hFvIjgprh9mMw5v42sJWLI1lzU5L2sznP805zeT6rySVRA0Y18StRhDqhSxlap0oVgItRsB6WSROp4YnJTJz0g==} + /@babel/plugin-syntax-nullish-coalescing-operator@7.8.3(@babel/core@7.9.0): + resolution: {integrity: sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 '@babel/helper-plugin-utils': 7.18.6 - '@babel/plugin-syntax-optional-catch-binding': 7.8.3_@babel+core@7.9.0 dev: true - /@babel/plugin-proposal-optional-catch-binding/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-Q40HEhs9DJQyaZfUjjn6vE8Cv4GmMHCYuMGIWUnlxH6400VGxOuwWsPt4FxXxJkC/5eOzgn0z21M9gMT4MOhbw==} - engines: {node: '>=6.9.0'} + /@babel/plugin-syntax-numeric-separator@7.10.4(@babel/core@7.12.9): + resolution: {integrity: sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/plugin-syntax-optional-catch-binding': 7.8.3_@babel+core@7.19.1 + '@babel/core': 7.12.9 + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-proposal-optional-catch-binding/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-Q40HEhs9DJQyaZfUjjn6vE8Cv4GmMHCYuMGIWUnlxH6400VGxOuwWsPt4FxXxJkC/5eOzgn0z21M9gMT4MOhbw==} - engines: {node: '>=6.9.0'} + /@babel/plugin-syntax-numeric-separator@7.10.4(@babel/core@7.9.0): + resolution: {integrity: sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/plugin-syntax-optional-catch-binding': 7.8.3_@babel+core@7.9.0 + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-proposal-optional-chaining/7.12.7_@babel+core@7.12.9: - resolution: {integrity: sha512-4ovylXZ0PWmwoOvhU2vhnzVNnm88/Sm9nx7V8BPgMvAzn5zDou3/Awy0EjglyubVHasJj+XCEkr/r1X3P5elCA==} + /@babel/plugin-syntax-object-rest-spread@7.8.3(@babel/core@7.12.9): + resolution: {integrity: sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.12.9 '@babel/helper-plugin-utils': 7.18.6 - '@babel/helper-skip-transparent-expression-wrappers': 7.12.1 - '@babel/plugin-syntax-optional-chaining': 7.8.3_@babel+core@7.12.9 dev: true - /@babel/plugin-proposal-optional-chaining/7.12.7_@babel+core@7.19.1: - resolution: {integrity: sha512-4ovylXZ0PWmwoOvhU2vhnzVNnm88/Sm9nx7V8BPgMvAzn5zDou3/Awy0EjglyubVHasJj+XCEkr/r1X3P5elCA==} + /@babel/plugin-syntax-object-rest-spread@7.8.3(@babel/core@7.19.1): + resolution: {integrity: sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.19.1 '@babel/helper-plugin-utils': 7.18.6 - '@babel/helper-skip-transparent-expression-wrappers': 7.12.1 - '@babel/plugin-syntax-optional-chaining': 7.8.3_@babel+core@7.19.1 dev: true - /@babel/plugin-proposal-optional-chaining/7.12.7_@babel+core@7.9.0: - resolution: {integrity: sha512-4ovylXZ0PWmwoOvhU2vhnzVNnm88/Sm9nx7V8BPgMvAzn5zDou3/Awy0EjglyubVHasJj+XCEkr/r1X3P5elCA==} + /@babel/plugin-syntax-object-rest-spread@7.8.3(@babel/core@7.9.0): + resolution: {integrity: sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 '@babel/helper-plugin-utils': 7.18.6 - '@babel/helper-skip-transparent-expression-wrappers': 7.12.1 - '@babel/plugin-syntax-optional-chaining': 7.8.3_@babel+core@7.9.0 dev: true - /@babel/plugin-proposal-optional-chaining/7.18.9_@babel+core@7.19.1: - resolution: {integrity: sha512-v5nwt4IqBXihxGsW2QmCWMDS3B3bzGIk/EQVZz2ei7f3NJl8NzAJVvUmpDW5q1CRNY+Beb/k58UAH1Km1N411w==} - engines: {node: '>=6.9.0'} + /@babel/plugin-syntax-optional-catch-binding@7.8.3(@babel/core@7.12.9): + resolution: {integrity: sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/helper-skip-transparent-expression-wrappers': 7.18.9 - '@babel/plugin-syntax-optional-chaining': 7.8.3_@babel+core@7.19.1 + '@babel/core': 7.12.9 + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-proposal-optional-chaining/7.18.9_@babel+core@7.9.0: - resolution: {integrity: sha512-v5nwt4IqBXihxGsW2QmCWMDS3B3bzGIk/EQVZz2ei7f3NJl8NzAJVvUmpDW5q1CRNY+Beb/k58UAH1Km1N411w==} - engines: {node: '>=6.9.0'} + /@babel/plugin-syntax-optional-catch-binding@7.8.3(@babel/core@7.9.0): + resolution: {integrity: sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/helper-skip-transparent-expression-wrappers': 7.18.9 - '@babel/plugin-syntax-optional-chaining': 7.8.3_@babel+core@7.9.0 + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-proposal-optional-chaining/7.9.0_@babel+core@7.9.0: - resolution: {integrity: sha512-NDn5tu3tcv4W30jNhmc2hyD5c56G6cXx4TesJubhxrJeCvuuMpttxr0OnNCqbZGhFjLrg+NIhxxC+BK5F6yS3w==} + /@babel/plugin-syntax-optional-chaining@7.8.3(@babel/core@7.12.9): + resolution: {integrity: sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.10.4 - '@babel/plugin-syntax-optional-chaining': 7.8.3_@babel+core@7.9.0 + '@babel/core': 7.12.9 + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-proposal-private-methods/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-mwZ1phvH7/NHK6Kf8LP7MYDogGV+DKB1mryFOEwx5EBNQrosvIczzZFTUmWaeujd5xT6G1ELYWUz3CutMhjE1w==} + /@babel/plugin-syntax-optional-chaining@7.8.3(@babel/core@7.9.0): + resolution: {integrity: sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.12.9 - '@babel/helper-create-class-features-plugin': 7.19.0_@babel+core@7.12.9 - '@babel/helper-plugin-utils': 7.19.0 - transitivePeerDependencies: - - supports-color + '@babel/core': 7.9.0 + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-proposal-private-methods/7.12.1_@babel+core@7.19.1: - resolution: {integrity: sha512-mwZ1phvH7/NHK6Kf8LP7MYDogGV+DKB1mryFOEwx5EBNQrosvIczzZFTUmWaeujd5xT6G1ELYWUz3CutMhjE1w==} + /@babel/plugin-syntax-top-level-await@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-i7ooMZFS+a/Om0crxZodrTzNEPJHZrlMVGMTEpFAj6rYY/bKCddB0Dk/YxfPuYXOopuhKk/e1jV6h+WUU9XN3A==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-create-class-features-plugin': 7.19.0_@babel+core@7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - transitivePeerDependencies: - - supports-color + '@babel/core': 7.12.9 + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-proposal-private-methods/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA==} - engines: {node: '>=6.9.0'} + /@babel/plugin-syntax-top-level-await@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-i7ooMZFS+a/Om0crxZodrTzNEPJHZrlMVGMTEpFAj6rYY/bKCddB0Dk/YxfPuYXOopuhKk/e1jV6h+WUU9XN3A==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-create-class-features-plugin': 7.19.0_@babel+core@7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - transitivePeerDependencies: - - supports-color + '@babel/core': 7.9.0 + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-proposal-private-methods/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA==} + /@babel/plugin-syntax-typescript@7.17.12(@babel/core@7.9.0): + resolution: {integrity: sha512-TYY0SXFiO31YXtNg3HtFwNJHjLsAyIIhAhNWkQ5whPPS7HWUFlg9z0Ta4qAQNjQbP1wsSt/oKkmZ/4/WWdMUpw==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 - '@babel/helper-create-class-features-plugin': 7.19.0_@babel+core@7.9.0 '@babel/helper-plugin-utils': 7.19.0 - transitivePeerDependencies: - - supports-color dev: true - /@babel/plugin-proposal-private-property-in-object/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-9Rysx7FOctvT5ouj5JODjAFAkgGoudQuLPamZb0v1TGLpapdNaftzifU8NTWQm0IRjqoYypdrSmyWgkocDQ8Dw==} - engines: {node: '>=6.9.0'} + /@babel/plugin-transform-arrow-functions@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-5QB50qyN44fzzz4/qxDPQMBCTHgxg3n0xRBLJUmBlLoU/sFvxVWGZF/ZUfMVDQuJUKXaBhbupxIzIfZ6Fwk/0A==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-annotate-as-pure': 7.18.6 - '@babel/helper-create-class-features-plugin': 7.19.0_@babel+core@7.19.1 + '@babel/core': 7.12.9 '@babel/helper-plugin-utils': 7.18.6 - '@babel/plugin-syntax-private-property-in-object': 7.14.5_@babel+core@7.19.1 - transitivePeerDependencies: - - supports-color dev: true - /@babel/plugin-proposal-private-property-in-object/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-9Rysx7FOctvT5ouj5JODjAFAkgGoudQuLPamZb0v1TGLpapdNaftzifU8NTWQm0IRjqoYypdrSmyWgkocDQ8Dw==} - engines: {node: '>=6.9.0'} + /@babel/plugin-transform-arrow-functions@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-5QB50qyN44fzzz4/qxDPQMBCTHgxg3n0xRBLJUmBlLoU/sFvxVWGZF/ZUfMVDQuJUKXaBhbupxIzIfZ6Fwk/0A==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 - '@babel/helper-annotate-as-pure': 7.18.6 - '@babel/helper-create-class-features-plugin': 7.19.0_@babel+core@7.9.0 '@babel/helper-plugin-utils': 7.18.6 - '@babel/plugin-syntax-private-property-in-object': 7.14.5_@babel+core@7.9.0 - transitivePeerDependencies: - - supports-color dev: true - /@babel/plugin-proposal-unicode-property-regex/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-MYq+l+PvHuw/rKUz1at/vb6nCnQ2gmJBNaM62z0OgH7B2W1D9pvkpYtlti9bGtizNIU1K3zm4bZF9F91efVY0w==} - engines: {node: '>=4'} + /@babel/plugin-transform-async-to-generator@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-SDtqoEcarK1DFlRJ1hHRY5HvJUj5kX4qmtpMAm2QnhOlyuMC4TMdCRgW6WXpv93rZeYNeLP22y8Aq2dbcDRM1A==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.12.9 - '@babel/helper-create-regexp-features-plugin': 7.12.7_@babel+core@7.12.9 + '@babel/helper-module-imports': 7.18.6 '@babel/helper-plugin-utils': 7.18.6 + '@babel/helper-remap-async-to-generator': 7.12.1 + transitivePeerDependencies: + - supports-color dev: true - /@babel/plugin-proposal-unicode-property-regex/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-MYq+l+PvHuw/rKUz1at/vb6nCnQ2gmJBNaM62z0OgH7B2W1D9pvkpYtlti9bGtizNIU1K3zm4bZF9F91efVY0w==} - engines: {node: '>=4'} + /@babel/plugin-transform-async-to-generator@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-SDtqoEcarK1DFlRJ1hHRY5HvJUj5kX4qmtpMAm2QnhOlyuMC4TMdCRgW6WXpv93rZeYNeLP22y8Aq2dbcDRM1A==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 - '@babel/helper-create-regexp-features-plugin': 7.12.7_@babel+core@7.9.0 + '@babel/helper-module-imports': 7.18.6 '@babel/helper-plugin-utils': 7.18.6 + '@babel/helper-remap-async-to-generator': 7.12.1 + transitivePeerDependencies: + - supports-color dev: true - /@babel/plugin-proposal-unicode-property-regex/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-2BShG/d5yoZyXZfVePH91urL5wTG6ASZU9M4o03lKK8u8UW1y08OMttBSOADTcJrnPMpvDXRG3G8fyLh4ovs8w==} - engines: {node: '>=4'} + /@babel/plugin-transform-block-scoped-functions@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-5OpxfuYnSgPalRpo8EWGPzIYf0lHBWORCkj5M0oLBwHdlux9Ri36QqGW3/LR13RSVOAoUUMzoPI/jpE4ABcHoA==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-create-regexp-features-plugin': 7.19.0_@babel+core@7.19.1 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/core': 7.12.9 + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-proposal-unicode-property-regex/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-2BShG/d5yoZyXZfVePH91urL5wTG6ASZU9M4o03lKK8u8UW1y08OMttBSOADTcJrnPMpvDXRG3G8fyLh4ovs8w==} - engines: {node: '>=4'} + /@babel/plugin-transform-block-scoped-functions@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-5OpxfuYnSgPalRpo8EWGPzIYf0lHBWORCkj5M0oLBwHdlux9Ri36QqGW3/LR13RSVOAoUUMzoPI/jpE4ABcHoA==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 - '@babel/helper-create-regexp-features-plugin': 7.19.0_@babel+core@7.9.0 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-syntax-async-generators/7.8.4_@babel+core@7.12.9: - resolution: {integrity: sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==} + /@babel/plugin-transform-block-scoping@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-zJyAC9sZdE60r1nVQHblcfCj29Dh2Y0DOvlMkcqSo0ckqjiCwNiUezUKw+RjOCwGfpLRwnAeQ2XlLpsnGkvv9w==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: @@ -1817,64 +1517,80 @@ packages: '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-syntax-async-generators/7.8.4_@babel+core@7.19.1: - resolution: {integrity: sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==} + /@babel/plugin-transform-block-scoping@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-zJyAC9sZdE60r1nVQHblcfCj29Dh2Y0DOvlMkcqSo0ckqjiCwNiUezUKw+RjOCwGfpLRwnAeQ2XlLpsnGkvv9w==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 + '@babel/core': 7.9.0 '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-syntax-async-generators/7.8.4_@babel+core@7.9.0: - resolution: {integrity: sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==} + /@babel/plugin-transform-classes@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-/74xkA7bVdzQTBeSUhLLJgYIcxw/dpEpCdRDiHgPJ3Mv6uC11UhjpOhl72CgqbBCmt1qtssCyB2xnJm1+PFjog==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.18.6 + '@babel/core': 7.12.9 + '@babel/helper-annotate-as-pure': 7.18.6 + '@babel/helper-define-map': 7.10.5 + '@babel/helper-function-name': 7.17.9 + '@babel/helper-optimise-call-expression': 7.16.7 + '@babel/helper-plugin-utils': 7.19.0 + '@babel/helper-replace-supers': 7.19.1 + '@babel/helper-split-export-declaration': 7.18.6 + globals: 11.12.0 + transitivePeerDependencies: + - supports-color dev: true - /@babel/plugin-syntax-class-properties/7.12.13_@babel+core@7.19.1: - resolution: {integrity: sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==} + /@babel/plugin-transform-classes@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-/74xkA7bVdzQTBeSUhLLJgYIcxw/dpEpCdRDiHgPJ3Mv6uC11UhjpOhl72CgqbBCmt1qtssCyB2xnJm1+PFjog==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 + '@babel/core': 7.9.0 + '@babel/helper-annotate-as-pure': 7.18.6 + '@babel/helper-define-map': 7.10.5 + '@babel/helper-function-name': 7.17.9 + '@babel/helper-optimise-call-expression': 7.16.7 '@babel/helper-plugin-utils': 7.19.0 + '@babel/helper-replace-supers': 7.19.1 + '@babel/helper-split-export-declaration': 7.18.6 + globals: 11.12.0 + transitivePeerDependencies: + - supports-color dev: true - /@babel/plugin-syntax-class-properties/7.12.13_@babel+core@7.9.0: - resolution: {integrity: sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==} + /@babel/plugin-transform-computed-properties@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-vVUOYpPWB7BkgUWPo4C44mUQHpTZXakEqFjbv8rQMg7TC6S6ZhGZ3otQcRH6u7+adSlE5i0sp63eMC/XGffrzg==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/core': 7.12.9 + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-syntax-class-properties/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-U40A76x5gTwmESz+qiqssqmeEsKvcSyvtgktrm0uzcARAmM9I1jR221f6Oq+GmHrcD+LvZDag1UTOTe2fL3TeA==} + /@babel/plugin-transform-computed-properties@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-vVUOYpPWB7BkgUWPo4C44mUQHpTZXakEqFjbv8rQMg7TC6S6ZhGZ3otQcRH6u7+adSlE5i0sp63eMC/XGffrzg==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/core': 7.9.0 + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-syntax-class-static-block/7.14.5_@babel+core@7.19.1: - resolution: {integrity: sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==} - engines: {node: '>=6.9.0'} + /@babel/plugin-transform-destructuring@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-fRMYFKuzi/rSiYb2uRLiUENJOKq4Gnl+6qOv5f8z0TZXg3llUwUhsNNwrwaT/6dUhJTzNpBr+CUvEWBtfNY1cw==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 + '@babel/core': 7.12.9 '@babel/helper-plugin-utils': 7.19.0 dev: true - /@babel/plugin-syntax-class-static-block/7.14.5_@babel+core@7.9.0: - resolution: {integrity: sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==} - engines: {node: '>=6.9.0'} + /@babel/plugin-transform-destructuring@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-fRMYFKuzi/rSiYb2uRLiUENJOKq4Gnl+6qOv5f8z0TZXg3llUwUhsNNwrwaT/6dUhJTzNpBr+CUvEWBtfNY1cw==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: @@ -1882,156 +1598,151 @@ packages: '@babel/helper-plugin-utils': 7.19.0 dev: true - /@babel/plugin-syntax-decorators/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-ir9YW5daRrTYiy9UJ2TzdNIJEZu8KclVzDcfSt4iEmOtwQ4llPtWInNKJyKnVXp1vE4bbVd5S31M/im3mYMO1w==} + /@babel/plugin-transform-dotall-regex@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-B2pXeRKoLszfEW7J4Hg9LoFaWEbr/kzo3teWHmtFCszjRNa/b40f9mfeqZsIDLLt/FjwQ6pz/Gdlwy85xNckBA==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.12.9 + '@babel/helper-create-regexp-features-plugin': 7.12.7(@babel/core@7.12.9) '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-syntax-decorators/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-ir9YW5daRrTYiy9UJ2TzdNIJEZu8KclVzDcfSt4iEmOtwQ4llPtWInNKJyKnVXp1vE4bbVd5S31M/im3mYMO1w==} + /@babel/plugin-transform-dotall-regex@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-B2pXeRKoLszfEW7J4Hg9LoFaWEbr/kzo3teWHmtFCszjRNa/b40f9mfeqZsIDLLt/FjwQ6pz/Gdlwy85xNckBA==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 + '@babel/helper-create-regexp-features-plugin': 7.12.7(@babel/core@7.9.0) '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-syntax-decorators/7.19.0_@babel+core@7.19.1: - resolution: {integrity: sha512-xaBZUEDntt4faL1yN8oIFlhfXeQAWJW7CLKYsHTUqriCUbj8xOra8bfxxKGi/UwExPFBuPdH4XfHc9rGQhrVkQ==} - engines: {node: '>=6.9.0'} + /@babel/plugin-transform-duplicate-keys@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-iRght0T0HztAb/CazveUpUQrZY+aGKKaWXMJ4uf9YJtqxSUe09j3wteztCUDRHs+SRAL7yMuFqUsLoAKKzgXjw==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/core': 7.12.9 + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-syntax-dynamic-import/7.8.3_@babel+core@7.12.9: - resolution: {integrity: sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==} + /@babel/plugin-transform-duplicate-keys@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-iRght0T0HztAb/CazveUpUQrZY+aGKKaWXMJ4uf9YJtqxSUe09j3wteztCUDRHs+SRAL7yMuFqUsLoAKKzgXjw==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/core': 7.9.0 + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-syntax-dynamic-import/7.8.3_@babel+core@7.19.1: - resolution: {integrity: sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==} + /@babel/plugin-transform-exponentiation-operator@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-7tqwy2bv48q+c1EHbXK0Zx3KXd2RVQp6OC7PbwFNt/dPTAV3Lu5sWtWuAj8owr5wqtWnqHfl2/mJlUmqkChKug==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/core': 7.12.9 + '@babel/helper-builder-binary-assignment-operator-visitor': 7.10.4 + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-syntax-dynamic-import/7.8.3_@babel+core@7.9.0: - resolution: {integrity: sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==} + /@babel/plugin-transform-exponentiation-operator@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-7tqwy2bv48q+c1EHbXK0Zx3KXd2RVQp6OC7PbwFNt/dPTAV3Lu5sWtWuAj8owr5wqtWnqHfl2/mJlUmqkChKug==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/helper-builder-binary-assignment-operator-visitor': 7.10.4 + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-syntax-export-default-from/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-Kr//z3ujSVNx6E9z9ih5xXXMqK07VVTuqPmqGe6Mss/zW5XPeLZeSDZoP9ab/hT4wPKqAgjl2PnhPrcpk8Seew==} - engines: {node: '>=6.9.0'} + /@babel/plugin-transform-flow-strip-types@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-8hAtkmsQb36yMmEtk2JZ9JnVyDSnDOdlB+0nEGzIDLuK4yR3JcEjfuFPYkdEPSh8Id+rAMeBEn+X0iVEyho6Hg==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/core': 7.12.9 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/plugin-syntax-flow': 7.12.1(@babel/core@7.12.9) dev: true - /@babel/plugin-syntax-export-namespace-from/7.8.3_@babel+core@7.12.9: - resolution: {integrity: sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==} + /@babel/plugin-transform-flow-strip-types@7.9.0(@babel/core@7.9.0): + resolution: {integrity: sha512-7Qfg0lKQhEHs93FChxVLAvhBshOPQDtJUTVHr/ZwQNRccCm4O9D79r9tVSoV8iNwjP1YgfD+e/fgHcPkN1qEQg==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/core': 7.9.0 + '@babel/helper-plugin-utils': 7.10.4 + '@babel/plugin-syntax-flow': 7.12.1(@babel/core@7.9.0) dev: true - /@babel/plugin-syntax-export-namespace-from/7.8.3_@babel+core@7.19.1: - resolution: {integrity: sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==} + /@babel/plugin-transform-for-of@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-Zaeq10naAsuHo7heQvyV0ptj4dlZJwZgNAtBYBnu5nNKJoW62m0zKcIEyVECrUKErkUkg6ajMy4ZfnVZciSBhg==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/core': 7.12.9 + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-syntax-export-namespace-from/7.8.3_@babel+core@7.9.0: - resolution: {integrity: sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==} + /@babel/plugin-transform-for-of@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-Zaeq10naAsuHo7heQvyV0ptj4dlZJwZgNAtBYBnu5nNKJoW62m0zKcIEyVECrUKErkUkg6ajMy4ZfnVZciSBhg==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-syntax-flow/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-1lBLLmtxrwpm4VKmtVFselI/P3pX+G63fAtUUt6b2Nzgao77KNDwyuRt90Mj2/9pKobtt68FdvjfqohZjg/FCA==} + /@babel/plugin-transform-function-name@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-JF3UgJUILoFrFMEnOJLJkRHSk6LUSXLmEFsA23aR2O5CSLUxbeUX1IZ1YQ7Sn0aXb601Ncwjx73a+FVqgcljVw==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.12.9 + '@babel/helper-function-name': 7.17.9 '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-syntax-flow/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-1lBLLmtxrwpm4VKmtVFselI/P3pX+G63fAtUUt6b2Nzgao77KNDwyuRt90Mj2/9pKobtt68FdvjfqohZjg/FCA==} + /@babel/plugin-transform-function-name@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-JF3UgJUILoFrFMEnOJLJkRHSk6LUSXLmEFsA23aR2O5CSLUxbeUX1IZ1YQ7Sn0aXb601Ncwjx73a+FVqgcljVw==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 + '@babel/helper-function-name': 7.17.9 '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-syntax-import-assertions/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-/DU3RXad9+bZwrgWJQKbr39gYbJpLJHezqEzRzi/BHRlJ9zsQb4CK2CA/5apllXNomwA1qHwzvHl+AdEmC5krQ==} - engines: {node: '>=6.9.0'} + /@babel/plugin-transform-literals@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-+PxVGA+2Ag6uGgL0A5f+9rklOnnMccwEBzwYFL3EUaKuiyVnUipyXncFcfjSkbimLrODoqki1U9XxZzTvfN7IQ==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/core': 7.12.9 + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-syntax-import-assertions/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-/DU3RXad9+bZwrgWJQKbr39gYbJpLJHezqEzRzi/BHRlJ9zsQb4CK2CA/5apllXNomwA1qHwzvHl+AdEmC5krQ==} - engines: {node: '>=6.9.0'} + /@babel/plugin-transform-literals@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-+PxVGA+2Ag6uGgL0A5f+9rklOnnMccwEBzwYFL3EUaKuiyVnUipyXncFcfjSkbimLrODoqki1U9XxZzTvfN7IQ==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-syntax-json-strings/7.8.3_@babel+core@7.12.9: - resolution: {integrity: sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.12.9 '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-syntax-json-strings/7.8.3_@babel+core@7.19.1: - resolution: {integrity: sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==} + /@babel/plugin-transform-member-expression-literals@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-1sxePl6z9ad0gFMB9KqmYofk34flq62aqMt9NqliS/7hPEpURUCMbyHXrMPlo282iY7nAvUB1aQd5mg79UD9Jg==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 + '@babel/core': 7.12.9 '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-syntax-json-strings/7.8.3_@babel+core@7.9.0: - resolution: {integrity: sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==} + /@babel/plugin-transform-member-expression-literals@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-1sxePl6z9ad0gFMB9KqmYofk34flq62aqMt9NqliS/7hPEpURUCMbyHXrMPlo282iY7nAvUB1aQd5mg79UD9Jg==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: @@ -2039,101 +1750,134 @@ packages: '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-syntax-jsx/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-1yRi7yAtB0ETgxdY9ti/p2TivUxJkTdhu/ZbF9MshVGqOx1TdB3b7xCXs49Fupgg50N45KcAsRP/ZqWjs9SRjg==} + /@babel/plugin-transform-modules-amd@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-tDW8hMkzad5oDtzsB70HIQQRBiTKrhfgwC/KkJeGsaNFTdWhKNt/BiE8c5yj19XiGyrxpbkOfH87qkNg1YGlOQ==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.12.9 + '@babel/helper-module-transforms': 7.12.1 '@babel/helper-plugin-utils': 7.18.6 + babel-plugin-dynamic-import-node: 2.3.3 + transitivePeerDependencies: + - supports-color dev: true - /@babel/plugin-syntax-jsx/7.18.6_@babel+core@7.12.9: - resolution: {integrity: sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q==} - engines: {node: '>=6.9.0'} + /@babel/plugin-transform-modules-amd@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-tDW8hMkzad5oDtzsB70HIQQRBiTKrhfgwC/KkJeGsaNFTdWhKNt/BiE8c5yj19XiGyrxpbkOfH87qkNg1YGlOQ==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.12.9 + '@babel/core': 7.9.0 + '@babel/helper-module-transforms': 7.12.1 '@babel/helper-plugin-utils': 7.18.6 + babel-plugin-dynamic-import-node: 2.3.3 + transitivePeerDependencies: + - supports-color dev: true - /@babel/plugin-syntax-jsx/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q==} - engines: {node: '>=6.9.0'} + /@babel/plugin-transform-modules-commonjs@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-dY789wq6l0uLY8py9c1B48V8mVL5gZh/+PQ5ZPrylPYsnAvnEMjqsUXkuoDVPeVK+0VyGar+D08107LzDQ6pag==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 + '@babel/core': 7.12.9 + '@babel/helper-module-transforms': 7.12.1 '@babel/helper-plugin-utils': 7.18.6 + '@babel/helper-simple-access': 7.12.1 + babel-plugin-dynamic-import-node: 2.3.3 + transitivePeerDependencies: + - supports-color dev: true - /@babel/plugin-syntax-jsx/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q==} - engines: {node: '>=6.9.0'} + /@babel/plugin-transform-modules-commonjs@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-dY789wq6l0uLY8py9c1B48V8mVL5gZh/+PQ5ZPrylPYsnAvnEMjqsUXkuoDVPeVK+0VyGar+D08107LzDQ6pag==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 + '@babel/helper-module-transforms': 7.12.1 '@babel/helper-plugin-utils': 7.18.6 + '@babel/helper-simple-access': 7.12.1 + babel-plugin-dynamic-import-node: 2.3.3 + transitivePeerDependencies: + - supports-color dev: true - /@babel/plugin-syntax-logical-assignment-operators/7.10.4_@babel+core@7.12.9: - resolution: {integrity: sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==} + /@babel/plugin-transform-modules-systemjs@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-Hn7cVvOavVh8yvW6fLwveFqSnd7rbQN3zJvoPNyNaQSvgfKmDBO9U1YL9+PCXGRlZD9tNdWTy5ACKqMuzyn32Q==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/helper-hoist-variables': 7.16.7 + '@babel/helper-module-transforms': 7.12.1 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/helper-validator-identifier': 7.18.6 + babel-plugin-dynamic-import-node: 2.3.3 + transitivePeerDependencies: + - supports-color dev: true - /@babel/plugin-syntax-logical-assignment-operators/7.10.4_@babel+core@7.19.1: - resolution: {integrity: sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==} + /@babel/plugin-transform-modules-systemjs@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-Hn7cVvOavVh8yvW6fLwveFqSnd7rbQN3zJvoPNyNaQSvgfKmDBO9U1YL9+PCXGRlZD9tNdWTy5ACKqMuzyn32Q==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/core': 7.9.0 + '@babel/helper-hoist-variables': 7.16.7 + '@babel/helper-module-transforms': 7.12.1 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/helper-validator-identifier': 7.18.6 + babel-plugin-dynamic-import-node: 2.3.3 + transitivePeerDependencies: + - supports-color dev: true - /@babel/plugin-syntax-logical-assignment-operators/7.10.4_@babel+core@7.9.0: - resolution: {integrity: sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==} + /@babel/plugin-transform-modules-umd@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-aEIubCS0KHKM0zUos5fIoQm+AZUMt1ZvMpqz0/H5qAQ7vWylr9+PLYurT+Ic7ID/bKLd4q8hDovaG3Zch2uz5Q==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/core': 7.12.9 + '@babel/helper-module-transforms': 7.12.1 + '@babel/helper-plugin-utils': 7.18.6 + transitivePeerDependencies: + - supports-color dev: true - /@babel/plugin-syntax-nullish-coalescing-operator/7.8.3_@babel+core@7.12.9: - resolution: {integrity: sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==} + /@babel/plugin-transform-modules-umd@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-aEIubCS0KHKM0zUos5fIoQm+AZUMt1ZvMpqz0/H5qAQ7vWylr9+PLYurT+Ic7ID/bKLd4q8hDovaG3Zch2uz5Q==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.12.9 + '@babel/core': 7.9.0 + '@babel/helper-module-transforms': 7.12.1 '@babel/helper-plugin-utils': 7.18.6 + transitivePeerDependencies: + - supports-color dev: true - /@babel/plugin-syntax-nullish-coalescing-operator/7.8.3_@babel+core@7.19.1: - resolution: {integrity: sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==} + /@babel/plugin-transform-named-capturing-groups-regex@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-tB43uQ62RHcoDp9v2Nsf+dSM8sbNodbEicbQNA53zHz8pWUhsgHSJCGpt7daXxRydjb0KnfmB+ChXOv3oADp1Q==} peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + '@babel/core': ^7.0.0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.18.6 + '@babel/core': 7.12.9 + '@babel/helper-create-regexp-features-plugin': 7.12.7(@babel/core@7.12.9) dev: true - /@babel/plugin-syntax-nullish-coalescing-operator/7.8.3_@babel+core@7.9.0: - resolution: {integrity: sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==} + /@babel/plugin-transform-named-capturing-groups-regex@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-tB43uQ62RHcoDp9v2Nsf+dSM8sbNodbEicbQNA53zHz8pWUhsgHSJCGpt7daXxRydjb0KnfmB+ChXOv3oADp1Q==} peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + '@babel/core': ^7.0.0 || 7 dependencies: '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.18.6 + '@babel/helper-create-regexp-features-plugin': 7.12.7(@babel/core@7.9.0) dev: true - /@babel/plugin-syntax-numeric-separator/7.10.4_@babel+core@7.12.9: - resolution: {integrity: sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==} + /@babel/plugin-transform-new-target@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-+eW/VLcUL5L9IvJH7rT1sT0CzkdUTvPrXC2PXTn/7z7tXLBuKvezYbGdxD5WMRoyvyaujOq2fWoKl869heKjhw==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: @@ -2141,44 +1885,50 @@ packages: '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-syntax-numeric-separator/7.10.4_@babel+core@7.19.1: - resolution: {integrity: sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==} + /@babel/plugin-transform-new-target@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-+eW/VLcUL5L9IvJH7rT1sT0CzkdUTvPrXC2PXTn/7z7tXLBuKvezYbGdxD5WMRoyvyaujOq2fWoKl869heKjhw==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 + '@babel/core': 7.9.0 '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-syntax-numeric-separator/7.10.4_@babel+core@7.9.0: - resolution: {integrity: sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==} + /@babel/plugin-transform-object-super@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-AvypiGJH9hsquNUn+RXVcBdeE3KHPZexWRdimhuV59cSoOt5kFBmqlByorAeUlGG2CJWd0U+4ZtNKga/TB0cAw==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.9.0 + '@babel/core': 7.12.9 '@babel/helper-plugin-utils': 7.18.6 + '@babel/helper-replace-supers': 7.18.2 + transitivePeerDependencies: + - supports-color dev: true - /@babel/plugin-syntax-object-rest-spread/7.8.3_@babel+core@7.12.9: - resolution: {integrity: sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==} + /@babel/plugin-transform-object-super@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-AvypiGJH9hsquNUn+RXVcBdeE3KHPZexWRdimhuV59cSoOt5kFBmqlByorAeUlGG2CJWd0U+4ZtNKga/TB0cAw==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.12.9 + '@babel/core': 7.9.0 '@babel/helper-plugin-utils': 7.18.6 + '@babel/helper-replace-supers': 7.18.2 + transitivePeerDependencies: + - supports-color dev: true - /@babel/plugin-syntax-object-rest-spread/7.8.3_@babel+core@7.19.1: - resolution: {integrity: sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==} + /@babel/plugin-transform-parameters@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-xq9C5EQhdPK23ZeCdMxl8bbRnAgHFrw5EOC3KJUsSylZqdkCaFEXxGSBuTSObOpiiHHNyb82es8M1QYgfQGfNg==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 + '@babel/core': 7.12.9 '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-syntax-object-rest-spread/7.8.3_@babel+core@7.9.0: - resolution: {integrity: sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==} + /@babel/plugin-transform-parameters@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-xq9C5EQhdPK23ZeCdMxl8bbRnAgHFrw5EOC3KJUsSylZqdkCaFEXxGSBuTSObOpiiHHNyb82es8M1QYgfQGfNg==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: @@ -2186,8 +1936,8 @@ packages: '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-syntax-optional-catch-binding/7.8.3_@babel+core@7.12.9: - resolution: {integrity: sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==} + /@babel/plugin-transform-property-literals@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-6MTCR/mZ1MQS+AwZLplX4cEySjCpnIF26ToWo942nqn8hXSm7McaHQNeGx/pt7suI1TWOWMfa/NgBhiqSnX0cQ==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: @@ -2195,17 +1945,26 @@ packages: '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-syntax-optional-catch-binding/7.8.3_@babel+core@7.19.1: - resolution: {integrity: sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==} + /@babel/plugin-transform-property-literals@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-6MTCR/mZ1MQS+AwZLplX4cEySjCpnIF26ToWo942nqn8hXSm7McaHQNeGx/pt7suI1TWOWMfa/NgBhiqSnX0cQ==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 + '@babel/core': 7.9.0 '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-syntax-optional-catch-binding/7.8.3_@babel+core@7.9.0: - resolution: {integrity: sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==} + /@babel/plugin-transform-react-constant-elements@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-KOHd0tIRLoER+J+8f9DblZDa1fLGPwaaN1DI1TVHuQFOpjHV22C3CUB3obeC4fexHY9nx+fH0hQNvLFFfA1mxA==} + peerDependencies: + '@babel/core': ^7.0.0-0 || 7 + dependencies: + '@babel/core': 7.12.9 + '@babel/helper-plugin-utils': 7.19.0 + dev: true + + /@babel/plugin-transform-react-display-name@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-cAzB+UzBIrekfYxyLlFqf/OagTvHLcVBb5vpouzkYkBclRPraiygVnafvAoipErZLI8ANv8Ecn6E/m5qPXD26w==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: @@ -2213,55 +1972,66 @@ packages: '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-syntax-optional-chaining/7.8.3_@babel+core@7.12.9: - resolution: {integrity: sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==} + /@babel/plugin-transform-react-display-name@7.18.6(@babel/core@7.12.9): + resolution: {integrity: sha512-TV4sQ+T013n61uMoygyMRm+xf04Bd5oqFpv2jAEQwSZ8NwQA7zeRPg1LMVg2PWi3zWBz+CLKD+v5bcpZ/BS0aA==} + engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.18.6 + '@babel/helper-plugin-utils': 7.19.0 dev: true - /@babel/plugin-syntax-optional-chaining/7.8.3_@babel+core@7.19.1: - resolution: {integrity: sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==} + /@babel/plugin-transform-react-display-name@7.8.3(@babel/core@7.9.0): + resolution: {integrity: sha512-3Jy/PCw8Fe6uBKtEgz3M82ljt+lTg+xJaM4og+eyu83qLT87ZUSckn0wy7r31jflURWLO83TW6Ylf7lyXj3m5A==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.18.6 + '@babel/core': 7.9.0 + '@babel/helper-plugin-utils': 7.10.4 dev: true - /@babel/plugin-syntax-optional-chaining/7.8.3_@babel+core@7.9.0: - resolution: {integrity: sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==} + /@babel/plugin-transform-react-jsx-development@7.12.7(@babel/core@7.9.0): + resolution: {integrity: sha512-Rs3ETtMtR3VLXFeYRChle5SsP/P9Jp/6dsewBQfokDSzKJThlsuFcnzLTDRALiUmTC48ej19YD9uN1mupEeEDg==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 + '@babel/helper-builder-react-jsx-experimental': 7.12.4 '@babel/helper-plugin-utils': 7.18.6 + '@babel/plugin-syntax-jsx': 7.18.6(@babel/core@7.9.0) dev: true - /@babel/plugin-syntax-private-property-in-object/7.14.5_@babel+core@7.19.1: - resolution: {integrity: sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==} + /@babel/plugin-transform-react-jsx-development@7.18.6(@babel/core@7.12.9): + resolution: {integrity: sha512-SA6HEjwYFKF7WDjWcMcMGUimmw/nhNRDWxr+KaLSCrkD/LMDBvWRmHAYgE1HDeF8KUuI8OAu+RT6EOtKxSW2qA==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/core': 7.12.9 + '@babel/plugin-transform-react-jsx': 7.19.0(@babel/core@7.12.9) dev: true - /@babel/plugin-syntax-private-property-in-object/7.14.5_@babel+core@7.9.0: - resolution: {integrity: sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==} - engines: {node: '>=6.9.0'} + /@babel/plugin-transform-react-jsx-self@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-FbpL0ieNWiiBB5tCldX17EtXgmzeEZjFrix72rQYeq9X6nUK38HCaxexzVQrZWXanxKJPKVVIU37gFjEQYkPkA==} + peerDependencies: + '@babel/core': ^7.0.0-0 || 7 + dependencies: + '@babel/core': 7.12.9 + '@babel/helper-plugin-utils': 7.18.6 + dev: true + + /@babel/plugin-transform-react-jsx-self@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-FbpL0ieNWiiBB5tCldX17EtXgmzeEZjFrix72rQYeq9X6nUK38HCaxexzVQrZWXanxKJPKVVIU37gFjEQYkPkA==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-syntax-top-level-await/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-i7ooMZFS+a/Om0crxZodrTzNEPJHZrlMVGMTEpFAj6rYY/bKCddB0Dk/YxfPuYXOopuhKk/e1jV6h+WUU9XN3A==} + /@babel/plugin-transform-react-jsx-source@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-keQ5kBfjJNRc6zZN1/nVHCd6LLIHq4aUKcVnvE/2l+ZZROSbqoiGFRtT5t3Is89XJxBQaP7NLZX2jgGHdZvvFQ==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: @@ -2269,8 +2039,8 @@ packages: '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-syntax-top-level-await/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-i7ooMZFS+a/Om0crxZodrTzNEPJHZrlMVGMTEpFAj6rYY/bKCddB0Dk/YxfPuYXOopuhKk/e1jV6h+WUU9XN3A==} + /@babel/plugin-transform-react-jsx-source@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-keQ5kBfjJNRc6zZN1/nVHCd6LLIHq4aUKcVnvE/2l+ZZROSbqoiGFRtT5t3Is89XJxBQaP7NLZX2jgGHdZvvFQ==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: @@ -2278,149 +2048,143 @@ packages: '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-syntax-top-level-await/7.14.5_@babel+core@7.19.1: - resolution: {integrity: sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==} - engines: {node: '>=6.9.0'} + /@babel/plugin-transform-react-jsx@7.12.7(@babel/core@7.12.9): + resolution: {integrity: sha512-YFlTi6MEsclFAPIDNZYiCRbneg1MFGao9pPG9uD5htwE0vDbPaMUMeYd6itWjw7K4kro4UbdQf3ljmFl9y48dQ==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/core': 7.12.9 + '@babel/helper-builder-react-jsx': 7.10.4 + '@babel/helper-builder-react-jsx-experimental': 7.12.4 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/plugin-syntax-jsx': 7.18.6(@babel/core@7.12.9) dev: true - /@babel/plugin-syntax-top-level-await/7.14.5_@babel+core@7.9.0: - resolution: {integrity: sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==} - engines: {node: '>=6.9.0'} + /@babel/plugin-transform-react-jsx@7.12.7(@babel/core@7.9.0): + resolution: {integrity: sha512-YFlTi6MEsclFAPIDNZYiCRbneg1MFGao9pPG9uD5htwE0vDbPaMUMeYd6itWjw7K4kro4UbdQf3ljmFl9y48dQ==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/helper-builder-react-jsx': 7.10.4 + '@babel/helper-builder-react-jsx-experimental': 7.12.4 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/plugin-syntax-jsx': 7.18.6(@babel/core@7.9.0) dev: true - /@babel/plugin-syntax-typescript/7.17.12_@babel+core@7.9.0: - resolution: {integrity: sha512-TYY0SXFiO31YXtNg3HtFwNJHjLsAyIIhAhNWkQ5whPPS7HWUFlg9z0Ta4qAQNjQbP1wsSt/oKkmZ/4/WWdMUpw==} + /@babel/plugin-transform-react-jsx@7.19.0(@babel/core@7.12.9): + resolution: {integrity: sha512-UVEvX3tXie3Szm3emi1+G63jyw1w5IcMY0FSKM+CRnKRI5Mr1YbCNgsSTwoTwKphQEG9P+QqmuRFneJPZuHNhg==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.9.0 + '@babel/core': 7.12.9 + '@babel/helper-annotate-as-pure': 7.18.6 + '@babel/helper-module-imports': 7.18.6 '@babel/helper-plugin-utils': 7.19.0 + '@babel/plugin-syntax-jsx': 7.18.6(@babel/core@7.12.9) + '@babel/types': 7.19.0 dev: true - /@babel/plugin-syntax-typescript/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-mAWAuq4rvOepWCBid55JuRNvpTNf2UGVgoz4JV0fXEKolsVZDzsa4NqCef758WZJj/GDu0gVGItjKFiClTAmZA==} + /@babel/plugin-transform-react-pure-annotations@7.18.6(@babel/core@7.12.9): + resolution: {integrity: sha512-I8VfEPg9r2TRDdvnHgPepTKvuRomzA8+u+nhY7qSI1fR2hRNebasZEETLyM5mAUr0Ku56OkXJ0I7NHJnO6cJiQ==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 + '@babel/core': 7.12.9 + '@babel/helper-annotate-as-pure': 7.18.6 '@babel/helper-plugin-utils': 7.19.0 dev: true - /@babel/plugin-transform-arrow-functions/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-5QB50qyN44fzzz4/qxDPQMBCTHgxg3n0xRBLJUmBlLoU/sFvxVWGZF/ZUfMVDQuJUKXaBhbupxIzIfZ6Fwk/0A==} + /@babel/plugin-transform-regenerator@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-gYrHqs5itw6i4PflFX3OdBPMQdPbF4bj2REIUxlMRUFk0/ZOAIpDFuViuxPjUL7YC8UPnf+XG7/utJvqXdPKng==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.18.6 + regenerator-transform: 0.14.5 dev: true - /@babel/plugin-transform-arrow-functions/7.12.1_@babel+core@7.19.1: - resolution: {integrity: sha512-5QB50qyN44fzzz4/qxDPQMBCTHgxg3n0xRBLJUmBlLoU/sFvxVWGZF/ZUfMVDQuJUKXaBhbupxIzIfZ6Fwk/0A==} + /@babel/plugin-transform-regenerator@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-gYrHqs5itw6i4PflFX3OdBPMQdPbF4bj2REIUxlMRUFk0/ZOAIpDFuViuxPjUL7YC8UPnf+XG7/utJvqXdPKng==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.18.6 + '@babel/core': 7.9.0 + regenerator-transform: 0.14.5 dev: true - /@babel/plugin-transform-arrow-functions/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-5QB50qyN44fzzz4/qxDPQMBCTHgxg3n0xRBLJUmBlLoU/sFvxVWGZF/ZUfMVDQuJUKXaBhbupxIzIfZ6Fwk/0A==} + /@babel/plugin-transform-reserved-words@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-pOnUfhyPKvZpVyBHhSBoX8vfA09b7r00Pmm1sH+29ae2hMTKVmSp4Ztsr8KBKjLjx17H0eJqaRC3bR2iThM54A==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.9.0 + '@babel/core': 7.12.9 '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-transform-arrow-functions/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-9S9X9RUefzrsHZmKMbDXxweEH+YlE8JJEuat9FdvW9Qh1cw7W64jELCtWNkPBPX5En45uy28KGvA/AySqUh8CQ==} - engines: {node: '>=6.9.0'} + /@babel/plugin-transform-reserved-words@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-pOnUfhyPKvZpVyBHhSBoX8vfA09b7r00Pmm1sH+29ae2hMTKVmSp4Ztsr8KBKjLjx17H0eJqaRC3bR2iThM54A==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/core': 7.9.0 + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-transform-arrow-functions/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-9S9X9RUefzrsHZmKMbDXxweEH+YlE8JJEuat9FdvW9Qh1cw7W64jELCtWNkPBPX5En45uy28KGvA/AySqUh8CQ==} - engines: {node: '>=6.9.0'} + /@babel/plugin-transform-runtime@7.9.0(@babel/core@7.9.0): + resolution: {integrity: sha512-pUu9VSf3kI1OqbWINQ7MaugnitRss1z533436waNXp+0N3ur3zfut37sXiQMxkuCF4VUjwZucen/quskCh7NHw==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/helper-module-imports': 7.12.5 + '@babel/helper-plugin-utils': 7.10.4 + resolve: 1.19.0 + semver: 5.7.1 dev: true - /@babel/plugin-transform-async-to-generator/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-SDtqoEcarK1DFlRJ1hHRY5HvJUj5kX4qmtpMAm2QnhOlyuMC4TMdCRgW6WXpv93rZeYNeLP22y8Aq2dbcDRM1A==} + /@babel/plugin-transform-shorthand-properties@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-GFZS3c/MhX1OusqB1MZ1ct2xRzX5ppQh2JU1h2Pnfk88HtFTM+TWQqJNfwkmxtPQtb/s1tk87oENfXJlx7rSDw==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.12.9 - '@babel/helper-module-imports': 7.18.6 '@babel/helper-plugin-utils': 7.18.6 - '@babel/helper-remap-async-to-generator': 7.12.1 - transitivePeerDependencies: - - supports-color dev: true - /@babel/plugin-transform-async-to-generator/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-SDtqoEcarK1DFlRJ1hHRY5HvJUj5kX4qmtpMAm2QnhOlyuMC4TMdCRgW6WXpv93rZeYNeLP22y8Aq2dbcDRM1A==} + /@babel/plugin-transform-shorthand-properties@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-GFZS3c/MhX1OusqB1MZ1ct2xRzX5ppQh2JU1h2Pnfk88HtFTM+TWQqJNfwkmxtPQtb/s1tk87oENfXJlx7rSDw==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 - '@babel/helper-module-imports': 7.18.6 '@babel/helper-plugin-utils': 7.18.6 - '@babel/helper-remap-async-to-generator': 7.12.1 - transitivePeerDependencies: - - supports-color dev: true - /@babel/plugin-transform-async-to-generator/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-ARE5wZLKnTgPW7/1ftQmSi1CmkqqHo2DNmtztFhvgtOWSDfq0Cq9/9L+KnZNYSNrydBekhW3rwShduf59RoXag==} - engines: {node: '>=6.9.0'} + /@babel/plugin-transform-spread@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-vuLp8CP0BE18zVYjsEBZ5xoCecMK6LBMMxYzJnh01rxQRvhNhH1csMMmBfNo5tGpGO+NhdSNW2mzIvBu3K1fng==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-module-imports': 7.18.6 + '@babel/core': 7.12.9 '@babel/helper-plugin-utils': 7.19.0 - '@babel/helper-remap-async-to-generator': 7.18.9_@babel+core@7.19.1 - transitivePeerDependencies: - - supports-color + '@babel/helper-skip-transparent-expression-wrappers': 7.12.1 dev: true - /@babel/plugin-transform-async-to-generator/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-ARE5wZLKnTgPW7/1ftQmSi1CmkqqHo2DNmtztFhvgtOWSDfq0Cq9/9L+KnZNYSNrydBekhW3rwShduf59RoXag==} - engines: {node: '>=6.9.0'} + /@babel/plugin-transform-spread@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-vuLp8CP0BE18zVYjsEBZ5xoCecMK6LBMMxYzJnh01rxQRvhNhH1csMMmBfNo5tGpGO+NhdSNW2mzIvBu3K1fng==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 - '@babel/helper-module-imports': 7.18.6 '@babel/helper-plugin-utils': 7.19.0 - '@babel/helper-remap-async-to-generator': 7.18.9_@babel+core@7.9.0 - transitivePeerDependencies: - - supports-color + '@babel/helper-skip-transparent-expression-wrappers': 7.12.1 dev: true - /@babel/plugin-transform-block-scoped-functions/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-5OpxfuYnSgPalRpo8EWGPzIYf0lHBWORCkj5M0oLBwHdlux9Ri36QqGW3/LR13RSVOAoUUMzoPI/jpE4ABcHoA==} + /@babel/plugin-transform-sticky-regex@7.12.7(@babel/core@7.12.9): + resolution: {integrity: sha512-VEiqZL5N/QvDbdjfYQBhruN0HYjSPjC4XkeqW4ny/jNtH9gcbgaqBIXYEZCNnESMAGs0/K/R7oFGMhOyu/eIxg==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: @@ -2428,8 +2192,8 @@ packages: '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-transform-block-scoped-functions/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-5OpxfuYnSgPalRpo8EWGPzIYf0lHBWORCkj5M0oLBwHdlux9Ri36QqGW3/LR13RSVOAoUUMzoPI/jpE4ABcHoA==} + /@babel/plugin-transform-sticky-regex@7.12.7(@babel/core@7.9.0): + resolution: {integrity: sha512-VEiqZL5N/QvDbdjfYQBhruN0HYjSPjC4XkeqW4ny/jNtH9gcbgaqBIXYEZCNnESMAGs0/K/R7oFGMhOyu/eIxg==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: @@ -2437,28 +2201,26 @@ packages: '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-transform-block-scoped-functions/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-ExUcOqpPWnliRcPqves5HJcJOvHvIIWfuS4sroBUenPuMdmW+SMHDakmtS7qOo13sVppmUijqeTv7qqGsvURpQ==} - engines: {node: '>=6.9.0'} + /@babel/plugin-transform-template-literals@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-b4Zx3KHi+taXB1dVRBhVJtEPi9h1THCeKmae2qP0YdUHIFhVjtpqqNfxeVAa1xeHVhAy4SbHxEwx5cltAu5apw==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/core': 7.12.9 + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-transform-block-scoped-functions/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-ExUcOqpPWnliRcPqves5HJcJOvHvIIWfuS4sroBUenPuMdmW+SMHDakmtS7qOo13sVppmUijqeTv7qqGsvURpQ==} - engines: {node: '>=6.9.0'} + /@babel/plugin-transform-template-literals@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-b4Zx3KHi+taXB1dVRBhVJtEPi9h1THCeKmae2qP0YdUHIFhVjtpqqNfxeVAa1xeHVhAy4SbHxEwx5cltAu5apw==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-transform-block-scoping/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-zJyAC9sZdE60r1nVQHblcfCj29Dh2Y0DOvlMkcqSo0ckqjiCwNiUezUKw+RjOCwGfpLRwnAeQ2XlLpsnGkvv9w==} + /@babel/plugin-transform-typeof-symbol@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-EPGgpGy+O5Kg5pJFNDKuxt9RdmTgj5sgrus2XVeMp/ZIbOESadgILUbm50SNpghOh3/6yrbsH+NB5+WJTmsA7Q==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: @@ -2466,8 +2228,8 @@ packages: '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-transform-block-scoping/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-zJyAC9sZdE60r1nVQHblcfCj29Dh2Y0DOvlMkcqSo0ckqjiCwNiUezUKw+RjOCwGfpLRwnAeQ2XlLpsnGkvv9w==} + /@babel/plugin-transform-typeof-symbol@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-EPGgpGy+O5Kg5pJFNDKuxt9RdmTgj5sgrus2XVeMp/ZIbOESadgILUbm50SNpghOh3/6yrbsH+NB5+WJTmsA7Q==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: @@ -2475,4588 +2237,1166 @@ packages: '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-transform-block-scoping/7.18.9_@babel+core@7.19.1: - resolution: {integrity: sha512-5sDIJRV1KtQVEbt/EIBwGy4T01uYIo4KRB3VUqzkhrAIOGx7AoctL9+Ux88btY0zXdDyPJ9mW+bg+v+XEkGmtw==} + /@babel/plugin-transform-typescript@7.18.4(@babel/core@7.9.0): + resolution: {integrity: sha512-l4vHuSLUajptpHNEOUDEGsnpl9pfRLsN1XUoDQDD/YBuXTM+v37SHGS+c6n4jdcZy96QtuUuSvZYMLSSsjH8Mw==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/core': 7.9.0 + '@babel/helper-create-class-features-plugin': 7.18.0(@babel/core@7.9.0) + '@babel/helper-plugin-utils': 7.18.6 + '@babel/plugin-syntax-typescript': 7.17.12(@babel/core@7.9.0) + transitivePeerDependencies: + - supports-color dev: true - /@babel/plugin-transform-block-scoping/7.18.9_@babel+core@7.9.0: - resolution: {integrity: sha512-5sDIJRV1KtQVEbt/EIBwGy4T01uYIo4KRB3VUqzkhrAIOGx7AoctL9+Ux88btY0zXdDyPJ9mW+bg+v+XEkGmtw==} - engines: {node: '>=6.9.0'} + /@babel/plugin-transform-unicode-escapes@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-I8gNHJLIc7GdApm7wkVnStWssPNbSRMPtgHdmH3sRM1zopz09UWPS4x5V4n1yz/MIWTVnJ9sp6IkuXdWM4w+2Q==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.9.0 + '@babel/core': 7.12.9 '@babel/helper-plugin-utils': 7.19.0 dev: true - /@babel/plugin-transform-classes/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-/74xkA7bVdzQTBeSUhLLJgYIcxw/dpEpCdRDiHgPJ3Mv6uC11UhjpOhl72CgqbBCmt1qtssCyB2xnJm1+PFjog==} + /@babel/plugin-transform-unicode-regex@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-SqH4ClNngh/zGwHZOOQMTD+e8FGWexILV+ePMyiDJttAWRh5dhDL8rcl5lSgU3Huiq6Zn6pWTMvdPAb21Dwdyg==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.12.9 - '@babel/helper-annotate-as-pure': 7.18.6 - '@babel/helper-define-map': 7.10.5 - '@babel/helper-function-name': 7.17.9 - '@babel/helper-optimise-call-expression': 7.16.7 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/helper-replace-supers': 7.19.1 - '@babel/helper-split-export-declaration': 7.18.6 - globals: 11.12.0 - transitivePeerDependencies: - - supports-color + '@babel/helper-create-regexp-features-plugin': 7.12.7(@babel/core@7.12.9) + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-transform-classes/7.12.1_@babel+core@7.19.1: - resolution: {integrity: sha512-/74xkA7bVdzQTBeSUhLLJgYIcxw/dpEpCdRDiHgPJ3Mv6uC11UhjpOhl72CgqbBCmt1qtssCyB2xnJm1+PFjog==} + /@babel/plugin-transform-unicode-regex@7.12.1(@babel/core@7.9.0): + resolution: {integrity: sha512-SqH4ClNngh/zGwHZOOQMTD+e8FGWexILV+ePMyiDJttAWRh5dhDL8rcl5lSgU3Huiq6Zn6pWTMvdPAb21Dwdyg==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-annotate-as-pure': 7.18.6 - '@babel/helper-define-map': 7.10.5 - '@babel/helper-function-name': 7.17.9 - '@babel/helper-optimise-call-expression': 7.16.7 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/helper-replace-supers': 7.19.1 - '@babel/helper-split-export-declaration': 7.18.6 - globals: 11.12.0 - transitivePeerDependencies: - - supports-color + '@babel/core': 7.9.0 + '@babel/helper-create-regexp-features-plugin': 7.12.7(@babel/core@7.9.0) + '@babel/helper-plugin-utils': 7.18.6 dev: true - /@babel/plugin-transform-classes/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-/74xkA7bVdzQTBeSUhLLJgYIcxw/dpEpCdRDiHgPJ3Mv6uC11UhjpOhl72CgqbBCmt1qtssCyB2xnJm1+PFjog==} + /@babel/preset-env@7.12.7(@babel/core@7.12.9): + resolution: {integrity: sha512-OnNdfAr1FUQg7ksb7bmbKoby4qFOHw6DKWWUNB9KqnnCldxhxJlP+21dpyaWFmf2h0rTbOkXJtAGevY3XW1eew==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.9.0 - '@babel/helper-annotate-as-pure': 7.18.6 - '@babel/helper-define-map': 7.10.5 - '@babel/helper-function-name': 7.17.9 - '@babel/helper-optimise-call-expression': 7.16.7 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/helper-replace-supers': 7.19.1 - '@babel/helper-split-export-declaration': 7.18.6 - globals: 11.12.0 + '@babel/compat-data': 7.12.7 + '@babel/core': 7.12.9 + '@babel/helper-compilation-targets': 7.12.5(@babel/core@7.12.9) + '@babel/helper-module-imports': 7.18.6 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/helper-validator-option': 7.12.1 + '@babel/plugin-proposal-async-generator-functions': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-proposal-class-properties': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-proposal-dynamic-import': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-proposal-export-namespace-from': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-proposal-json-strings': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-proposal-logical-assignment-operators': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-proposal-nullish-coalescing-operator': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-proposal-numeric-separator': 7.12.7(@babel/core@7.12.9) + '@babel/plugin-proposal-object-rest-spread': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-proposal-optional-catch-binding': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-proposal-optional-chaining': 7.12.7(@babel/core@7.12.9) + '@babel/plugin-proposal-private-methods': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-proposal-unicode-property-regex': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-syntax-async-generators': 7.8.4(@babel/core@7.12.9) + '@babel/plugin-syntax-class-properties': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-syntax-dynamic-import': 7.8.3(@babel/core@7.12.9) + '@babel/plugin-syntax-export-namespace-from': 7.8.3(@babel/core@7.12.9) + '@babel/plugin-syntax-json-strings': 7.8.3(@babel/core@7.12.9) + '@babel/plugin-syntax-logical-assignment-operators': 7.10.4(@babel/core@7.12.9) + '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3(@babel/core@7.12.9) + '@babel/plugin-syntax-numeric-separator': 7.10.4(@babel/core@7.12.9) + '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/core@7.12.9) + '@babel/plugin-syntax-optional-catch-binding': 7.8.3(@babel/core@7.12.9) + '@babel/plugin-syntax-optional-chaining': 7.8.3(@babel/core@7.12.9) + '@babel/plugin-syntax-top-level-await': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-arrow-functions': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-async-to-generator': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-block-scoped-functions': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-block-scoping': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-classes': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-computed-properties': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-destructuring': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-dotall-regex': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-duplicate-keys': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-exponentiation-operator': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-for-of': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-function-name': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-literals': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-member-expression-literals': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-modules-amd': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-modules-commonjs': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-modules-systemjs': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-modules-umd': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-named-capturing-groups-regex': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-new-target': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-object-super': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-parameters': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-property-literals': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-regenerator': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-reserved-words': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-shorthand-properties': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-spread': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-sticky-regex': 7.12.7(@babel/core@7.12.9) + '@babel/plugin-transform-template-literals': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-typeof-symbol': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-unicode-escapes': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-unicode-regex': 7.12.1(@babel/core@7.12.9) + '@babel/preset-modules': 0.1.4(@babel/core@7.12.9) + '@babel/types': 7.18.7 + core-js-compat: 3.7.0 + semver: 5.7.1 transitivePeerDependencies: - supports-color dev: true - /@babel/plugin-transform-classes/7.19.0_@babel+core@7.19.1: - resolution: {integrity: sha512-YfeEE9kCjqTS9IitkgfJuxjcEtLUHMqa8yUJ6zdz8vR7hKuo6mOy2C05P0F1tdMmDCeuyidKnlrw/iTppHcr2A==} - engines: {node: '>=6.9.0'} + /@babel/preset-env@7.9.0(@babel/core@7.9.0): + resolution: {integrity: sha512-712DeRXT6dyKAM/FMbQTV/FvRCms2hPCx+3weRjZ8iQVQWZejWWk1wwG6ViWMyqb/ouBbGOl5b6aCk0+j1NmsQ==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-annotate-as-pure': 7.18.6 - '@babel/helper-compilation-targets': 7.19.1_@babel+core@7.19.1 - '@babel/helper-environment-visitor': 7.18.9 - '@babel/helper-function-name': 7.19.0 - '@babel/helper-optimise-call-expression': 7.18.6 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/helper-replace-supers': 7.19.1 - '@babel/helper-split-export-declaration': 7.18.6 - globals: 11.12.0 + '@babel/compat-data': 7.12.7 + '@babel/core': 7.9.0 + '@babel/helper-compilation-targets': 7.12.5(@babel/core@7.9.0) + '@babel/helper-module-imports': 7.12.5 + '@babel/helper-plugin-utils': 7.10.4 + '@babel/plugin-proposal-async-generator-functions': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-proposal-dynamic-import': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-proposal-json-strings': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-proposal-nullish-coalescing-operator': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-proposal-numeric-separator': 7.12.7(@babel/core@7.9.0) + '@babel/plugin-proposal-object-rest-spread': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-proposal-optional-catch-binding': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-proposal-optional-chaining': 7.12.7(@babel/core@7.9.0) + '@babel/plugin-proposal-unicode-property-regex': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-syntax-async-generators': 7.8.4(@babel/core@7.9.0) + '@babel/plugin-syntax-dynamic-import': 7.8.3(@babel/core@7.9.0) + '@babel/plugin-syntax-json-strings': 7.8.3(@babel/core@7.9.0) + '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3(@babel/core@7.9.0) + '@babel/plugin-syntax-numeric-separator': 7.10.4(@babel/core@7.9.0) + '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/core@7.9.0) + '@babel/plugin-syntax-optional-catch-binding': 7.8.3(@babel/core@7.9.0) + '@babel/plugin-syntax-optional-chaining': 7.8.3(@babel/core@7.9.0) + '@babel/plugin-syntax-top-level-await': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-arrow-functions': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-async-to-generator': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-block-scoped-functions': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-block-scoping': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-classes': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-computed-properties': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-destructuring': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-dotall-regex': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-duplicate-keys': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-exponentiation-operator': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-for-of': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-function-name': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-literals': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-member-expression-literals': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-modules-amd': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-modules-commonjs': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-modules-systemjs': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-modules-umd': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-named-capturing-groups-regex': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-new-target': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-object-super': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-parameters': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-property-literals': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-regenerator': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-reserved-words': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-shorthand-properties': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-spread': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-sticky-regex': 7.12.7(@babel/core@7.9.0) + '@babel/plugin-transform-template-literals': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-typeof-symbol': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-unicode-regex': 7.12.1(@babel/core@7.9.0) + '@babel/preset-modules': 0.1.4(@babel/core@7.9.0) + '@babel/types': 7.12.7 + browserslist: 4.20.4 + core-js-compat: 3.7.0 + invariant: 2.2.4 + levenary: 1.1.1 + semver: 5.7.1 transitivePeerDependencies: - supports-color dev: true - /@babel/plugin-transform-classes/7.19.0_@babel+core@7.9.0: - resolution: {integrity: sha512-YfeEE9kCjqTS9IitkgfJuxjcEtLUHMqa8yUJ6zdz8vR7hKuo6mOy2C05P0F1tdMmDCeuyidKnlrw/iTppHcr2A==} - engines: {node: '>=6.9.0'} + /@babel/preset-flow@7.12.1(@babel/core@7.12.9): + resolution: {integrity: sha512-UAoyMdioAhM6H99qPoKvpHMzxmNVXno8GYU/7vZmGaHk6/KqfDYL1W0NxszVbJ2EP271b7e6Ox+Vk2A9QsB3Sw==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.9.0 - '@babel/helper-annotate-as-pure': 7.18.6 - '@babel/helper-compilation-targets': 7.19.1_@babel+core@7.9.0 - '@babel/helper-environment-visitor': 7.18.9 - '@babel/helper-function-name': 7.19.0 - '@babel/helper-optimise-call-expression': 7.18.6 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/helper-replace-supers': 7.19.1 - '@babel/helper-split-export-declaration': 7.18.6 - globals: 11.12.0 - transitivePeerDependencies: - - supports-color + '@babel/core': 7.12.9 + '@babel/helper-plugin-utils': 7.18.6 + '@babel/plugin-transform-flow-strip-types': 7.12.1(@babel/core@7.12.9) dev: true - /@babel/plugin-transform-computed-properties/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-vVUOYpPWB7BkgUWPo4C44mUQHpTZXakEqFjbv8rQMg7TC6S6ZhGZ3otQcRH6u7+adSlE5i0sp63eMC/XGffrzg==} + /@babel/preset-modules@0.1.4(@babel/core@7.12.9): + resolution: {integrity: sha512-J36NhwnfdzpmH41M1DrnkkgAqhZaqr/NBdPfQ677mLzlaXo+oDiv1deyCDtgAhz8p328otdob0Du7+xgHGZbKg==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.12.9 '@babel/helper-plugin-utils': 7.18.6 + '@babel/plugin-proposal-unicode-property-regex': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-dotall-regex': 7.12.1(@babel/core@7.12.9) + '@babel/types': 7.18.7 + esutils: 2.0.3 dev: true - /@babel/plugin-transform-computed-properties/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-vVUOYpPWB7BkgUWPo4C44mUQHpTZXakEqFjbv8rQMg7TC6S6ZhGZ3otQcRH6u7+adSlE5i0sp63eMC/XGffrzg==} + /@babel/preset-modules@0.1.4(@babel/core@7.9.0): + resolution: {integrity: sha512-J36NhwnfdzpmH41M1DrnkkgAqhZaqr/NBdPfQ677mLzlaXo+oDiv1deyCDtgAhz8p328otdob0Du7+xgHGZbKg==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 '@babel/helper-plugin-utils': 7.18.6 + '@babel/plugin-proposal-unicode-property-regex': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-dotall-regex': 7.12.1(@babel/core@7.9.0) + '@babel/types': 7.18.7 + esutils: 2.0.3 dev: true - /@babel/plugin-transform-computed-properties/7.18.9_@babel+core@7.19.1: - resolution: {integrity: sha512-+i0ZU1bCDymKakLxn5srGHrsAPRELC2WIbzwjLhHW9SIE1cPYkLCL0NlnXMZaM1vhfgA2+M7hySk42VBvrkBRw==} - engines: {node: '>=6.9.0'} + /@babel/preset-react@7.12.7(@babel/core@7.12.9): + resolution: {integrity: sha512-wKeTdnGUP5AEYCYQIMeXMMwU7j+2opxrG0WzuZfxuuW9nhKvvALBjl67653CWamZJVefuJGI219G591RSldrqQ==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.19.1 + '@babel/core': 7.12.9 '@babel/helper-plugin-utils': 7.19.0 + '@babel/plugin-transform-react-display-name': 7.18.6(@babel/core@7.12.9) + '@babel/plugin-transform-react-jsx': 7.19.0(@babel/core@7.12.9) + '@babel/plugin-transform-react-jsx-development': 7.18.6(@babel/core@7.12.9) + '@babel/plugin-transform-react-jsx-self': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-react-jsx-source': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-react-pure-annotations': 7.18.6(@babel/core@7.12.9) dev: true - /@babel/plugin-transform-computed-properties/7.18.9_@babel+core@7.9.0: - resolution: {integrity: sha512-+i0ZU1bCDymKakLxn5srGHrsAPRELC2WIbzwjLhHW9SIE1cPYkLCL0NlnXMZaM1vhfgA2+M7hySk42VBvrkBRw==} - engines: {node: '>=6.9.0'} + /@babel/preset-react@7.9.1(@babel/core@7.9.0): + resolution: {integrity: sha512-aJBYF23MPj0RNdp/4bHnAP0NVqqZRr9kl0NAOP4nJCex6OYVio59+dnQzsAWFuogdLyeaKA1hmfUIVZkY5J+TQ==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/helper-plugin-utils': 7.10.4 + '@babel/plugin-transform-react-display-name': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-react-jsx': 7.12.7(@babel/core@7.9.0) + '@babel/plugin-transform-react-jsx-development': 7.12.7(@babel/core@7.9.0) + '@babel/plugin-transform-react-jsx-self': 7.12.1(@babel/core@7.9.0) + '@babel/plugin-transform-react-jsx-source': 7.12.1(@babel/core@7.9.0) dev: true - /@babel/plugin-transform-destructuring/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-fRMYFKuzi/rSiYb2uRLiUENJOKq4Gnl+6qOv5f8z0TZXg3llUwUhsNNwrwaT/6dUhJTzNpBr+CUvEWBtfNY1cw==} + /@babel/preset-typescript@7.9.0(@babel/core@7.9.0): + resolution: {integrity: sha512-S4cueFnGrIbvYJgwsVFKdvOmpiL0XGw9MFW9D0vgRys5g36PBhZRL8NX8Gr2akz8XRtzq6HuDXPD/1nniagNUg==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/core': 7.9.0 + '@babel/helper-plugin-utils': 7.10.4 + '@babel/plugin-transform-typescript': 7.18.4(@babel/core@7.9.0) + transitivePeerDependencies: + - supports-color dev: true - /@babel/plugin-transform-destructuring/7.12.1_@babel+core@7.19.1: - resolution: {integrity: sha512-fRMYFKuzi/rSiYb2uRLiUENJOKq4Gnl+6qOv5f8z0TZXg3llUwUhsNNwrwaT/6dUhJTzNpBr+CUvEWBtfNY1cw==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + /@babel/runtime@7.13.10: + resolution: {integrity: sha512-4QPkjJq6Ns3V/RgpEahRk+AGfL0eO6RHHtTWoNNr5mO49G6B5+X6d6THgWEAvTrznU5xYpbAlVKRYcsCgh/Akw==} dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - dev: true + regenerator-runtime: 0.13.9 + dev: false - /@babel/plugin-transform-destructuring/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-fRMYFKuzi/rSiYb2uRLiUENJOKq4Gnl+6qOv5f8z0TZXg3llUwUhsNNwrwaT/6dUhJTzNpBr+CUvEWBtfNY1cw==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + /@babel/runtime@7.18.6: + resolution: {integrity: sha512-t9wi7/AW6XtKahAe20Yw0/mMljKq0B1r2fPdvaAdV/KPDZewFXdaaa6K7lxmZBZ8FBNpCiAT6iHPmd6QO9bKfQ==} + engines: {node: '>=6.9.0'} dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 + regenerator-runtime: 0.13.9 + + /@babel/runtime@7.9.0: + resolution: {integrity: sha512-cTIudHnzuWLS56ik4DnRnqqNf8MkdUzV4iFFI1h7Jo9xvrpQROYaAnaSd2mHLQAzzZAPfATynX5ord6YlNYNMA==} + dependencies: + regenerator-runtime: 0.13.9 dev: true - /@babel/plugin-transform-destructuring/7.18.13_@babel+core@7.19.1: - resolution: {integrity: sha512-TodpQ29XekIsex2A+YJPj5ax2plkGa8YYY6mFjCohk/IG9IY42Rtuj1FuDeemfg2ipxIFLzPeA83SIBnlhSIow==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + /@babel/template@7.12.7: + resolution: {integrity: sha512-GkDzmHS6GV7ZeXfJZ0tLRBhZcMcY0/Lnb+eEbXDBfCAcZCjrZKe6p3J4we/D24O9Y8enxWAg1cWwof59yLh2ow==} dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/code-frame': 7.16.7 + '@babel/parser': 7.12.7 + '@babel/types': 7.12.7 dev: true - /@babel/plugin-transform-destructuring/7.18.13_@babel+core@7.9.0: - resolution: {integrity: sha512-TodpQ29XekIsex2A+YJPj5ax2plkGa8YYY6mFjCohk/IG9IY42Rtuj1FuDeemfg2ipxIFLzPeA83SIBnlhSIow==} + /@babel/template@7.18.10: + resolution: {integrity: sha512-TI+rCtooWHr3QJ27kJxfjutghu44DLnasDMwpDqCXVTal9RLp3RSYNh4NdBrRP2cQAoG9A8juOQl6P6oZG4JxA==} engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/code-frame': 7.18.6 + '@babel/parser': 7.19.1 + '@babel/types': 7.19.0 dev: true - /@babel/plugin-transform-dotall-regex/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-B2pXeRKoLszfEW7J4Hg9LoFaWEbr/kzo3teWHmtFCszjRNa/b40f9mfeqZsIDLLt/FjwQ6pz/Gdlwy85xNckBA==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + /@babel/traverse@7.12.9: + resolution: {integrity: sha512-iX9ajqnLdoU1s1nHt36JDI9KG4k+vmI8WgjK5d+aDTwQbL2fUnzedNedssA645Ede3PM2ma1n8Q4h2ohwXgMXw==} dependencies: - '@babel/core': 7.12.9 - '@babel/helper-create-regexp-features-plugin': 7.12.7_@babel+core@7.12.9 - '@babel/helper-plugin-utils': 7.18.6 + '@babel/code-frame': 7.16.7 + '@babel/generator': 7.12.5 + '@babel/helper-function-name': 7.10.4 + '@babel/helper-split-export-declaration': 7.11.0 + '@babel/parser': 7.12.7 + '@babel/types': 7.12.7 + debug: 4.3.3 + globals: 11.12.0 + lodash: 4.17.21 + transitivePeerDependencies: + - supports-color dev: true - /@babel/plugin-transform-dotall-regex/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-B2pXeRKoLszfEW7J4Hg9LoFaWEbr/kzo3teWHmtFCszjRNa/b40f9mfeqZsIDLLt/FjwQ6pz/Gdlwy85xNckBA==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + /@babel/traverse@7.19.1: + resolution: {integrity: sha512-0j/ZfZMxKukDaag2PtOPDbwuELqIar6lLskVPPJDjXMXjfLb1Obo/1yjxIGqqAJrmfaTIY3z2wFLAQ7qSkLsuA==} + engines: {node: '>=6.9.0'} dependencies: - '@babel/core': 7.9.0 - '@babel/helper-create-regexp-features-plugin': 7.12.7_@babel+core@7.9.0 - '@babel/helper-plugin-utils': 7.18.6 + '@babel/code-frame': 7.18.6 + '@babel/generator': 7.19.0 + '@babel/helper-environment-visitor': 7.18.9 + '@babel/helper-function-name': 7.19.0 + '@babel/helper-hoist-variables': 7.18.6 + '@babel/helper-split-export-declaration': 7.18.6 + '@babel/parser': 7.19.1 + '@babel/types': 7.19.0 + debug: 4.3.3 + globals: 11.12.0 + transitivePeerDependencies: + - supports-color dev: true - /@babel/plugin-transform-dotall-regex/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-6S3jpun1eEbAxq7TdjLotAsl4WpQI9DxfkycRcKrjhQYzU87qpXdknpBg/e+TdcMehqGnLFi7tnFUBR02Vq6wg==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + /@babel/types@7.12.7: + resolution: {integrity: sha512-MNyI92qZq6jrQkXvtIiykvl4WtoRrVV9MPn+ZfsoEENjiWcBQ3ZSHrkxnJWgWtLX3XXqX5hrSQ+X69wkmesXuQ==} dependencies: - '@babel/core': 7.19.1 - '@babel/helper-create-regexp-features-plugin': 7.19.0_@babel+core@7.19.1 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/helper-validator-identifier': 7.10.4 + lodash: 4.17.21 + to-fast-properties: 2.0.0 dev: true - /@babel/plugin-transform-dotall-regex/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-6S3jpun1eEbAxq7TdjLotAsl4WpQI9DxfkycRcKrjhQYzU87qpXdknpBg/e+TdcMehqGnLFi7tnFUBR02Vq6wg==} + /@babel/types@7.18.7: + resolution: {integrity: sha512-QG3yxTcTIBoAcQmkCs+wAPYZhu7Dk9rXKacINfNbdJDNERTbLQbHGyVG8q/YGMPeCJRIhSY0+fTc5+xuh6WPSQ==} engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 dependencies: - '@babel/core': 7.9.0 - '@babel/helper-create-regexp-features-plugin': 7.19.0_@babel+core@7.9.0 - '@babel/helper-plugin-utils': 7.19.0 + '@babel/helper-validator-identifier': 7.18.6 + to-fast-properties: 2.0.0 dev: true - /@babel/plugin-transform-duplicate-keys/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-iRght0T0HztAb/CazveUpUQrZY+aGKKaWXMJ4uf9YJtqxSUe09j3wteztCUDRHs+SRAL7yMuFqUsLoAKKzgXjw==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + /@babel/types@7.19.0: + resolution: {integrity: sha512-YuGopBq3ke25BVSiS6fgF49Ul9gH1x70Bcr6bqRLjWCkcX8Hre1/5+z+IiWOIerRMSSEfGZVB9z9kyq7wVs9YA==} + engines: {node: '>=6.9.0'} dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.18.6 + '@babel/helper-string-parser': 7.18.10 + '@babel/helper-validator-identifier': 7.18.6 + to-fast-properties: 2.0.0 dev: true - /@babel/plugin-transform-duplicate-keys/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-iRght0T0HztAb/CazveUpUQrZY+aGKKaWXMJ4uf9YJtqxSUe09j3wteztCUDRHs+SRAL7yMuFqUsLoAKKzgXjw==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + /@cnakazawa/watch@1.0.4: + resolution: {integrity: sha512-v9kIhKwjeZThiWrLmj0y17CWoyddASLj9O2yvbZkbvw/N3rWOYy9zkV66ursAoVr0mV15bL8g0c4QZUE6cdDoQ==} + engines: {node: '>=0.1.95'} + hasBin: true dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.18.6 + exec-sh: 0.3.4 + minimist: 1.2.5 dev: true - /@babel/plugin-transform-duplicate-keys/7.18.9_@babel+core@7.19.1: - resolution: {integrity: sha512-d2bmXCtZXYc59/0SanQKbiWINadaJXqtvIQIzd4+hNwkWBgyCd5F/2t1kXoUdvPMrxzPvhK6EMQRROxsue+mfw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 + /@colors/colors@1.5.0: + resolution: {integrity: sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==} + engines: {node: '>=0.1.90'} + requiresBuild: true dev: true - /@babel/plugin-transform-duplicate-keys/7.18.9_@babel+core@7.9.0: - resolution: {integrity: sha512-d2bmXCtZXYc59/0SanQKbiWINadaJXqtvIQIzd4+hNwkWBgyCd5F/2t1kXoUdvPMrxzPvhK6EMQRROxsue+mfw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + /@cspotcode/source-map-support@0.8.1: + resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==} + engines: {node: '>=12'} dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 + '@jridgewell/trace-mapping': 0.3.9 dev: true - /@babel/plugin-transform-exponentiation-operator/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-7tqwy2bv48q+c1EHbXK0Zx3KXd2RVQp6OC7PbwFNt/dPTAV3Lu5sWtWuAj8owr5wqtWnqHfl2/mJlUmqkChKug==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + /@dabh/diagnostics@2.0.3: + resolution: {integrity: sha512-hrlQOIi7hAfzsMqlGSFyVucrx38O+j6wiGOf//H2ecvIEqYN4ADBSS2iLMh5UFyDunCNniUIPk/q3riFv45xRA==} dependencies: - '@babel/core': 7.12.9 - '@babel/helper-builder-binary-assignment-operator-visitor': 7.10.4 - '@babel/helper-plugin-utils': 7.18.6 + colorspace: 1.1.4 + enabled: 2.0.0 + kuler: 2.0.0 dev: true - /@babel/plugin-transform-exponentiation-operator/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-7tqwy2bv48q+c1EHbXK0Zx3KXd2RVQp6OC7PbwFNt/dPTAV3Lu5sWtWuAj8owr5wqtWnqHfl2/mJlUmqkChKug==} + /@emotion/babel-plugin@11.9.2(@babel/core@7.9.0): + resolution: {integrity: sha512-Pr/7HGH6H6yKgnVFNEj2MVlreu3ADqftqjqwUvDy/OJzKFgxKeTQ+eeUf20FOTuHVkDON2iNa25rAXVYtWJCjw==} peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + '@babel/core': ^7.0.0 || 7 dependencies: '@babel/core': 7.9.0 - '@babel/helper-builder-binary-assignment-operator-visitor': 7.10.4 - '@babel/helper-plugin-utils': 7.18.6 + '@babel/helper-module-imports': 7.18.6 + '@babel/plugin-syntax-jsx': 7.18.6(@babel/core@7.9.0) + '@babel/runtime': 7.18.6 + '@emotion/hash': 0.8.0 + '@emotion/memoize': 0.7.5 + '@emotion/serialize': 1.0.4 + babel-plugin-macros: 2.8.0 + convert-source-map: 1.7.0 + escape-string-regexp: 4.0.0 + find-root: 1.1.0 + source-map: 0.5.7 + stylis: 4.0.13 dev: true - /@babel/plugin-transform-exponentiation-operator/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-wzEtc0+2c88FVR34aQmiz56dxEkxr2g8DQb/KfaFa1JYXOFVsbhvAonFN6PwVWj++fKmku8NP80plJ5Et4wqHw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + /@emotion/cache@11.9.3: + resolution: {integrity: sha512-0dgkI/JKlCXa+lEXviaMtGBL0ynpx4osh7rjOXE71q9bIF8G+XhJgvi+wDu0B0IdCVx37BffiwXlN9I3UuzFvg==} dependencies: - '@babel/core': 7.19.1 - '@babel/helper-builder-binary-assignment-operator-visitor': 7.18.9 - '@babel/helper-plugin-utils': 7.19.0 + '@emotion/memoize': 0.7.5 + '@emotion/sheet': 1.1.1 + '@emotion/utils': 1.1.0 + '@emotion/weak-memoize': 0.2.5 + stylis: 4.0.13 dev: true - /@babel/plugin-transform-exponentiation-operator/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-wzEtc0+2c88FVR34aQmiz56dxEkxr2g8DQb/KfaFa1JYXOFVsbhvAonFN6PwVWj++fKmku8NP80plJ5Et4wqHw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-builder-binary-assignment-operator-visitor': 7.18.9 - '@babel/helper-plugin-utils': 7.19.0 + /@emotion/hash@0.8.0: + resolution: {integrity: sha512-kBJtf7PH6aWwZ6fka3zQ0p6SBYzx4fl1LoZXE2RrnYST9Xljm7WfKJrU4g/Xr3Beg72MLrp1AWNUmuYJTL7Cow==} dev: true - /@babel/plugin-transform-flow-strip-types/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-8hAtkmsQb36yMmEtk2JZ9JnVyDSnDOdlB+0nEGzIDLuK4yR3JcEjfuFPYkdEPSh8Id+rAMeBEn+X0iVEyho6Hg==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + /@emotion/is-prop-valid@1.1.3: + resolution: {integrity: sha512-RFg04p6C+1uO19uG8N+vqanzKqiM9eeV1LDOG3bmkYmuOj7NbKNlFC/4EZq5gnwAIlcC/jOT24f8Td0iax2SXA==} dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.18.6 - '@babel/plugin-syntax-flow': 7.12.1_@babel+core@7.12.9 + '@emotion/memoize': 0.7.5 dev: true - /@babel/plugin-transform-flow-strip-types/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-8hAtkmsQb36yMmEtk2JZ9JnVyDSnDOdlB+0nEGzIDLuK4yR3JcEjfuFPYkdEPSh8Id+rAMeBEn+X0iVEyho6Hg==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.18.6 - '@babel/plugin-syntax-flow': 7.12.1_@babel+core@7.9.0 + /@emotion/memoize@0.7.5: + resolution: {integrity: sha512-igX9a37DR2ZPGYtV6suZ6whr8pTFtyHL3K/oLUotxpSVO2ASaprmAe2Dkq7tBo7CRY7MMDrAa9nuQP9/YG8FxQ==} dev: true - /@babel/plugin-transform-flow-strip-types/7.9.0_@babel+core@7.9.0: - resolution: {integrity: sha512-7Qfg0lKQhEHs93FChxVLAvhBshOPQDtJUTVHr/ZwQNRccCm4O9D79r9tVSoV8iNwjP1YgfD+e/fgHcPkN1qEQg==} + /@emotion/react@11.9.3(@babel/core@7.9.0)(@types/react@18.0.25)(react@18.2.0): + resolution: {integrity: sha512-g9Q1GcTOlzOEjqwuLF/Zd9LC+4FljjPjDfxSM7KmEakm+hsHXk+bYZ2q+/hTJzr0OUNkujo72pXLQvXj6H+GJQ==} peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + '@babel/core': ^7.0.0 || 7 + '@types/react': '*' + react: '>=16.8.0 || 18' + peerDependenciesMeta: + '@babel/core': + optional: true + '@types/react': + optional: true dependencies: '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.10.4 - '@babel/plugin-syntax-flow': 7.12.1_@babel+core@7.9.0 + '@babel/runtime': 7.18.6 + '@emotion/babel-plugin': 11.9.2(@babel/core@7.9.0) + '@emotion/cache': 11.9.3 + '@emotion/serialize': 1.0.4 + '@emotion/utils': 1.1.0 + '@emotion/weak-memoize': 0.2.5 + '@types/react': 18.0.25 + hoist-non-react-statics: 3.3.2 + react: 18.2.0 dev: true - /@babel/plugin-transform-for-of/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-Zaeq10naAsuHo7heQvyV0ptj4dlZJwZgNAtBYBnu5nNKJoW62m0zKcIEyVECrUKErkUkg6ajMy4ZfnVZciSBhg==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + /@emotion/serialize@1.0.4: + resolution: {integrity: sha512-1JHamSpH8PIfFwAMryO2bNka+y8+KA5yga5Ocf2d7ZEiJjb7xlLW7aknBGZqJLajuLOvJ+72vN+IBSwPlXD1Pg==} dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.18.6 + '@emotion/hash': 0.8.0 + '@emotion/memoize': 0.7.5 + '@emotion/unitless': 0.7.5 + '@emotion/utils': 1.1.0 + csstype: 3.1.0 dev: true - /@babel/plugin-transform-for-of/7.12.1_@babel+core@7.19.1: - resolution: {integrity: sha512-Zaeq10naAsuHo7heQvyV0ptj4dlZJwZgNAtBYBnu5nNKJoW62m0zKcIEyVECrUKErkUkg6ajMy4ZfnVZciSBhg==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.18.6 + /@emotion/sheet@1.1.1: + resolution: {integrity: sha512-J3YPccVRMiTZxYAY0IOq3kd+hUP8idY8Kz6B/Cyo+JuXq52Ek+zbPbSQUrVQp95aJ+lsAW7DPL1P2Z+U1jGkKA==} dev: true - /@babel/plugin-transform-for-of/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-Zaeq10naAsuHo7heQvyV0ptj4dlZJwZgNAtBYBnu5nNKJoW62m0zKcIEyVECrUKErkUkg6ajMy4ZfnVZciSBhg==} + /@emotion/styled@11.9.3(@babel/core@7.9.0)(@emotion/react@11.9.3)(@types/react@18.0.25)(react@18.2.0): + resolution: {integrity: sha512-o3sBNwbtoVz9v7WB1/Y/AmXl69YHmei2mrVnK7JgyBJ//Rst5yqPZCecEJlMlJrFeWHp+ki/54uN265V2pEcXA==} peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + '@babel/core': ^7.0.0 || 7 + '@emotion/react': ^11.0.0-rc.0 + '@types/react': '*' + react: '>=16.8.0 || 18' + peerDependenciesMeta: + '@babel/core': + optional: true + '@types/react': + optional: true dependencies: '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.18.6 + '@babel/runtime': 7.18.6 + '@emotion/babel-plugin': 11.9.2(@babel/core@7.9.0) + '@emotion/is-prop-valid': 1.1.3 + '@emotion/react': 11.9.3(@babel/core@7.9.0)(@types/react@18.0.25)(react@18.2.0) + '@emotion/serialize': 1.0.4 + '@emotion/utils': 1.1.0 + '@types/react': 18.0.25 + react: 18.2.0 dev: true - /@babel/plugin-transform-for-of/7.18.8_@babel+core@7.19.1: - resolution: {integrity: sha512-yEfTRnjuskWYo0k1mHUqrVWaZwrdq8AYbfrpqULOJOaucGSp4mNMVps+YtA8byoevxS/urwU75vyhQIxcCgiBQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 + /@emotion/unitless@0.7.5: + resolution: {integrity: sha512-OWORNpfjMsSSUBVrRBVGECkhWcULOAJz9ZW8uK9qgxD+87M7jHRcvh/A96XXNhXTLmKcoYSQtBEX7lHMO7YRwg==} dev: true - /@babel/plugin-transform-for-of/7.18.8_@babel+core@7.9.0: - resolution: {integrity: sha512-yEfTRnjuskWYo0k1mHUqrVWaZwrdq8AYbfrpqULOJOaucGSp4mNMVps+YtA8byoevxS/urwU75vyhQIxcCgiBQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 + /@emotion/utils@1.1.0: + resolution: {integrity: sha512-iRLa/Y4Rs5H/f2nimczYmS5kFJEbpiVvgN3XVfZ022IYhuNA1IRSHEizcof88LtCTXtl9S2Cxt32KgaXEu72JQ==} dev: true - /@babel/plugin-transform-function-name/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-JF3UgJUILoFrFMEnOJLJkRHSk6LUSXLmEFsA23aR2O5CSLUxbeUX1IZ1YQ7Sn0aXb601Ncwjx73a+FVqgcljVw==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.12.9 - '@babel/helper-function-name': 7.17.9 - '@babel/helper-plugin-utils': 7.18.6 + /@emotion/weak-memoize@0.2.5: + resolution: {integrity: sha512-6U71C2Wp7r5XtFtQzYrW5iKFT67OixrSxjI4MptCHzdSVlgabczzqLe0ZSgnub/5Kp4hSbpDB1tMytZY9pwxxA==} dev: true - /@babel/plugin-transform-function-name/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-JF3UgJUILoFrFMEnOJLJkRHSk6LUSXLmEFsA23aR2O5CSLUxbeUX1IZ1YQ7Sn0aXb601Ncwjx73a+FVqgcljVw==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-function-name': 7.17.9 - '@babel/helper-plugin-utils': 7.18.6 - dev: true + /@floating-ui/core@1.2.1: + resolution: {integrity: sha512-LSqwPZkK3rYfD7GKoIeExXOyYx6Q1O4iqZWwIehDNuv3Dv425FIAE8PRwtAx1imEolFTHgBEcoFHm9MDnYgPCg==} + dev: false - /@babel/plugin-transform-function-name/7.18.9_@babel+core@7.19.1: - resolution: {integrity: sha512-WvIBoRPaJQ5yVHzcnJFor7oS5Ls0PYixlTYE63lCj2RtdQEl15M68FXQlxnG6wdraJIXRdR7KI+hQ7q/9QjrCQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + /@floating-ui/dom@1.2.1: + resolution: {integrity: sha512-Rt45SmRiV8eU+xXSB9t0uMYiQ/ZWGE/jumse2o3i5RGlyvcbqOF4q+1qBnzLE2kZ5JGhq0iMkcGXUKbFe7MpTA==} dependencies: - '@babel/core': 7.19.1 - '@babel/helper-compilation-targets': 7.19.1_@babel+core@7.19.1 - '@babel/helper-function-name': 7.19.0 - '@babel/helper-plugin-utils': 7.19.0 - dev: true + '@floating-ui/core': 1.2.1 + dev: false - /@babel/plugin-transform-function-name/7.18.9_@babel+core@7.9.0: - resolution: {integrity: sha512-WvIBoRPaJQ5yVHzcnJFor7oS5Ls0PYixlTYE63lCj2RtdQEl15M68FXQlxnG6wdraJIXRdR7KI+hQ7q/9QjrCQ==} - engines: {node: '>=6.9.0'} + /@floating-ui/react-dom-interactions@0.9.3(@types/react@18.0.25)(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-oHwFLxySRtmhgwg7ZdWswvDDi+ld4mEtxu6ngOd7mRC5L1Rk6adjSfOBOHDxea+ItAWmds8m6A725sn1HQtUyQ==} peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + react: '>=16.8.0 || 18' + react-dom: '>=16.8.0 || 18' dependencies: - '@babel/core': 7.9.0 - '@babel/helper-compilation-targets': 7.19.1_@babel+core@7.9.0 - '@babel/helper-function-name': 7.19.0 - '@babel/helper-plugin-utils': 7.19.0 - dev: true + '@floating-ui/react-dom': 1.3.0(react-dom@18.2.0)(react@18.2.0) + aria-hidden: 1.2.0(@types/react@18.0.25)(react@18.2.0) + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + transitivePeerDependencies: + - '@types/react' + dev: false - /@babel/plugin-transform-literals/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-+PxVGA+2Ag6uGgL0A5f+9rklOnnMccwEBzwYFL3EUaKuiyVnUipyXncFcfjSkbimLrODoqki1U9XxZzTvfN7IQ==} + /@floating-ui/react-dom@1.3.0(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-htwHm67Ji5E/pROEAr7f8IKFShuiCKHwUC/UY4vC3I5jiSvGFAYnSYiZO5MlGmads+QqvUkR9ANHEguGrDv72g==} peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + react: '>=16.8.0 || 18' + react-dom: '>=16.8.0 || 18' dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.18.6 - dev: true + '@floating-ui/dom': 1.2.1 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + dev: false - /@babel/plugin-transform-literals/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-+PxVGA+2Ag6uGgL0A5f+9rklOnnMccwEBzwYFL3EUaKuiyVnUipyXncFcfjSkbimLrODoqki1U9XxZzTvfN7IQ==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + /@jest/console@24.9.0: + resolution: {integrity: sha512-Zuj6b8TnKXi3q4ymac8EQfc3ea/uhLeCGThFqXeC8H9/raaH8ARPUTdId+XyGd03Z4In0/VjD2OYFcBF09fNLQ==} + engines: {node: '>= 6'} dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.18.6 + '@jest/source-map': 24.9.0 + chalk: 2.4.2 + slash: 2.0.0 dev: true - /@babel/plugin-transform-literals/7.18.9_@babel+core@7.19.1: - resolution: {integrity: sha512-IFQDSRoTPnrAIrI5zoZv73IFeZu2dhu6irxQjY9rNjTT53VmKg9fenjvoiOWOkJ6mm4jKVPtdMzBY98Fp4Z4cg==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + /@jest/core@24.9.0: + resolution: {integrity: sha512-Fogg3s4wlAr1VX7q+rhV9RVnUv5tD7VuWfYy1+whMiWUrvl7U3QJSJyWcDio9Lq2prqYsZaeTv2Rz24pWGkJ2A==} + engines: {node: '>= 6'} dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 + '@jest/console': 24.9.0 + '@jest/reporters': 24.9.0 + '@jest/test-result': 24.9.0 + '@jest/transform': 24.9.0 + '@jest/types': 24.9.0 + ansi-escapes: 3.2.0 + chalk: 2.4.2 + exit: 0.1.2 + graceful-fs: 4.2.10 + jest-changed-files: 24.9.0 + jest-config: 24.9.0 + jest-haste-map: 24.9.0 + jest-message-util: 24.9.0 + jest-regex-util: 24.9.0 + jest-resolve: 24.9.0 + jest-resolve-dependencies: 24.9.0 + jest-runner: 24.9.0 + jest-runtime: 24.9.0 + jest-snapshot: 24.9.0 + jest-util: 24.9.0 + jest-validate: 24.9.0 + jest-watcher: 24.9.0 + micromatch: 3.1.10 + p-each-series: 1.0.0 + realpath-native: 1.1.0 + rimraf: 2.7.1 + slash: 2.0.0 + strip-ansi: 5.2.0 + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate dev: true - /@babel/plugin-transform-literals/7.18.9_@babel+core@7.9.0: - resolution: {integrity: sha512-IFQDSRoTPnrAIrI5zoZv73IFeZu2dhu6irxQjY9rNjTT53VmKg9fenjvoiOWOkJ6mm4jKVPtdMzBY98Fp4Z4cg==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + /@jest/environment@24.9.0: + resolution: {integrity: sha512-5A1QluTPhvdIPFYnO3sZC3smkNeXPVELz7ikPbhUj0bQjB07EoE9qtLrem14ZUYWdVayYbsjVwIiL4WBIMV4aQ==} + engines: {node: '>= 6'} dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 + '@jest/fake-timers': 24.9.0 + '@jest/transform': 24.9.0 + '@jest/types': 24.9.0 + jest-mock: 24.9.0 + transitivePeerDependencies: + - supports-color dev: true - /@babel/plugin-transform-member-expression-literals/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-1sxePl6z9ad0gFMB9KqmYofk34flq62aqMt9NqliS/7hPEpURUCMbyHXrMPlo282iY7nAvUB1aQd5mg79UD9Jg==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + /@jest/fake-timers@24.9.0: + resolution: {integrity: sha512-eWQcNa2YSwzXWIMC5KufBh3oWRIijrQFROsIqt6v/NS9Io/gknw1jsAC9c+ih/RQX4A3O7SeWAhQeN0goKhT9A==} + engines: {node: '>= 6'} dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.18.6 + '@jest/types': 24.9.0 + jest-message-util: 24.9.0 + jest-mock: 24.9.0 + transitivePeerDependencies: + - supports-color dev: true - /@babel/plugin-transform-member-expression-literals/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-1sxePl6z9ad0gFMB9KqmYofk34flq62aqMt9NqliS/7hPEpURUCMbyHXrMPlo282iY7nAvUB1aQd5mg79UD9Jg==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + /@jest/reporters@24.9.0: + resolution: {integrity: sha512-mu4X0yjaHrffOsWmVLzitKmmmWSQ3GGuefgNscUSWNiUNcEOSEQk9k3pERKEQVBb0Cnn88+UESIsZEMH3o88Gw==} + engines: {node: '>= 6'} dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.18.6 + '@jest/environment': 24.9.0 + '@jest/test-result': 24.9.0 + '@jest/transform': 24.9.0 + '@jest/types': 24.9.0 + chalk: 2.4.2 + exit: 0.1.2 + glob: 7.2.3 + istanbul-lib-coverage: 2.0.5 + istanbul-lib-instrument: 3.3.0 + istanbul-lib-report: 2.0.8 + istanbul-lib-source-maps: 3.0.6 + istanbul-reports: 2.2.7 + jest-haste-map: 24.9.0 + jest-resolve: 24.9.0 + jest-runtime: 24.9.0 + jest-util: 24.9.0 + jest-worker: 24.9.0 + node-notifier: 5.4.3 + slash: 2.0.0 + source-map: 0.6.1 + string-length: 2.0.0 + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate dev: true - /@babel/plugin-transform-member-expression-literals/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-qSF1ihLGO3q+/g48k85tUjD033C29TNTVB2paCwZPVmOsjn9pClvYYrM2VeJpBY2bcNkuny0YUyTNRyRxJ54KA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + /@jest/source-map@24.9.0: + resolution: {integrity: sha512-/Xw7xGlsZb4MJzNDgB7PW5crou5JqWiBQaz6xyPd3ArOg2nfn/PunV8+olXbbEZzNl591o5rWKE9BRDaFAuIBg==} + engines: {node: '>= 6'} dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 + callsites: 3.1.0 + graceful-fs: 4.2.10 + source-map: 0.6.1 dev: true - /@babel/plugin-transform-member-expression-literals/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-qSF1ihLGO3q+/g48k85tUjD033C29TNTVB2paCwZPVmOsjn9pClvYYrM2VeJpBY2bcNkuny0YUyTNRyRxJ54KA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + /@jest/test-result@24.9.0: + resolution: {integrity: sha512-XEFrHbBonBJ8dGp2JmF8kP/nQI/ImPpygKHwQ/SY+es59Z3L5PI4Qb9TQQMAEeYsThG1xF0k6tmG0tIKATNiiA==} + engines: {node: '>= 6'} dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 + '@jest/console': 24.9.0 + '@jest/types': 24.9.0 + '@types/istanbul-lib-coverage': 2.0.3 dev: true - /@babel/plugin-transform-modules-amd/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-tDW8hMkzad5oDtzsB70HIQQRBiTKrhfgwC/KkJeGsaNFTdWhKNt/BiE8c5yj19XiGyrxpbkOfH87qkNg1YGlOQ==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + /@jest/test-sequencer@24.9.0: + resolution: {integrity: sha512-6qqsU4o0kW1dvA95qfNog8v8gkRN9ph6Lz7r96IvZpHdNipP2cBcb07J1Z45mz/VIS01OHJ3pY8T5fUY38tg4A==} + engines: {node: '>= 6'} dependencies: - '@babel/core': 7.12.9 - '@babel/helper-module-transforms': 7.12.1 - '@babel/helper-plugin-utils': 7.18.6 - babel-plugin-dynamic-import-node: 2.3.3 + '@jest/test-result': 24.9.0 + jest-haste-map: 24.9.0 + jest-runner: 24.9.0 + jest-runtime: 24.9.0 transitivePeerDependencies: + - bufferutil - supports-color + - utf-8-validate dev: true - /@babel/plugin-transform-modules-amd/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-tDW8hMkzad5oDtzsB70HIQQRBiTKrhfgwC/KkJeGsaNFTdWhKNt/BiE8c5yj19XiGyrxpbkOfH87qkNg1YGlOQ==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + /@jest/transform@24.9.0: + resolution: {integrity: sha512-TcQUmyNRxV94S0QpMOnZl0++6RMiqpbH/ZMccFB/amku6Uwvyb1cjYX7xkp5nGNkbX4QPH/FcB6q1HBTHynLmQ==} + engines: {node: '>= 6'} dependencies: - '@babel/core': 7.9.0 - '@babel/helper-module-transforms': 7.12.1 - '@babel/helper-plugin-utils': 7.18.6 - babel-plugin-dynamic-import-node: 2.3.3 + '@babel/core': 7.12.9 + '@jest/types': 24.9.0 + babel-plugin-istanbul: 5.2.0 + chalk: 2.4.2 + convert-source-map: 1.7.0 + fast-json-stable-stringify: 2.1.0 + graceful-fs: 4.2.10 + jest-haste-map: 24.9.0 + jest-regex-util: 24.9.0 + jest-util: 24.9.0 + micromatch: 3.1.10 + pirates: 4.0.1 + realpath-native: 1.1.0 + slash: 2.0.0 + source-map: 0.6.1 + write-file-atomic: 2.4.1 transitivePeerDependencies: - supports-color dev: true - /@babel/plugin-transform-modules-amd/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-Pra5aXsmTsOnjM3IajS8rTaLCy++nGM4v3YR4esk5PCsyg9z8NA5oQLwxzMUtDBd8F+UmVza3VxoAaWCbzH1rg==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + /@jest/types@24.9.0: + resolution: {integrity: sha512-XKK7ze1apu5JWQ5eZjHITP66AX+QsLlbaJRBGYr8pNzwcAE2JVkwnf0yqjHTsDRcjR0mujy/NmZMXw5kl+kGBw==} + engines: {node: '>= 6'} dependencies: - '@babel/core': 7.19.1 - '@babel/helper-module-transforms': 7.19.0 - '@babel/helper-plugin-utils': 7.19.0 - babel-plugin-dynamic-import-node: 2.3.3 - transitivePeerDependencies: - - supports-color + '@types/istanbul-lib-coverage': 2.0.3 + '@types/istanbul-reports': 1.1.2 + '@types/yargs': 13.0.11 dev: true - /@babel/plugin-transform-modules-amd/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-Pra5aXsmTsOnjM3IajS8rTaLCy++nGM4v3YR4esk5PCsyg9z8NA5oQLwxzMUtDBd8F+UmVza3VxoAaWCbzH1rg==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + /@jridgewell/gen-mapping@0.1.1: + resolution: {integrity: sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w==} + engines: {node: '>=6.0.0'} dependencies: - '@babel/core': 7.9.0 - '@babel/helper-module-transforms': 7.19.0 - '@babel/helper-plugin-utils': 7.19.0 - babel-plugin-dynamic-import-node: 2.3.3 - transitivePeerDependencies: - - supports-color + '@jridgewell/set-array': 1.1.1 + '@jridgewell/sourcemap-codec': 1.4.13 dev: true - /@babel/plugin-transform-modules-commonjs/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-dY789wq6l0uLY8py9c1B48V8mVL5gZh/+PQ5ZPrylPYsnAvnEMjqsUXkuoDVPeVK+0VyGar+D08107LzDQ6pag==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + /@jridgewell/gen-mapping@0.3.2: + resolution: {integrity: sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==} + engines: {node: '>=6.0.0'} dependencies: - '@babel/core': 7.12.9 - '@babel/helper-module-transforms': 7.12.1 - '@babel/helper-plugin-utils': 7.18.6 - '@babel/helper-simple-access': 7.12.1 - babel-plugin-dynamic-import-node: 2.3.3 - transitivePeerDependencies: - - supports-color + '@jridgewell/set-array': 1.1.1 + '@jridgewell/sourcemap-codec': 1.4.13 + '@jridgewell/trace-mapping': 0.3.15 dev: true - /@babel/plugin-transform-modules-commonjs/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-dY789wq6l0uLY8py9c1B48V8mVL5gZh/+PQ5ZPrylPYsnAvnEMjqsUXkuoDVPeVK+0VyGar+D08107LzDQ6pag==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-module-transforms': 7.12.1 - '@babel/helper-plugin-utils': 7.18.6 - '@babel/helper-simple-access': 7.12.1 - babel-plugin-dynamic-import-node: 2.3.3 - transitivePeerDependencies: - - supports-color + /@jridgewell/resolve-uri@3.0.7: + resolution: {integrity: sha512-8cXDaBBHOr2pQ7j77Y6Vp5VDT2sIqWyWQ56TjEq4ih/a4iST3dItRe8Q9fp0rrIl9DoKhWQtUQz/YpOxLkXbNA==} + engines: {node: '>=6.0.0'} dev: true - /@babel/plugin-transform-modules-commonjs/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-Qfv2ZOWikpvmedXQJDSbxNqy7Xr/j2Y8/KfijM0iJyKkBTmWuvCA1yeH1yDM7NJhBW/2aXxeucLj6i80/LAJ/Q==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-module-transforms': 7.19.0 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/helper-simple-access': 7.18.6 - babel-plugin-dynamic-import-node: 2.3.3 - transitivePeerDependencies: - - supports-color + /@jridgewell/set-array@1.1.1: + resolution: {integrity: sha512-Ct5MqZkLGEXTVmQYbGtx9SVqD2fqwvdubdps5D3djjAkgkKwT918VNOz65pEHFaYTeWcukmJmH5SwsA9Tn2ObQ==} + engines: {node: '>=6.0.0'} dev: true - /@babel/plugin-transform-modules-commonjs/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-Qfv2ZOWikpvmedXQJDSbxNqy7Xr/j2Y8/KfijM0iJyKkBTmWuvCA1yeH1yDM7NJhBW/2aXxeucLj6i80/LAJ/Q==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-module-transforms': 7.19.0 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/helper-simple-access': 7.18.6 - babel-plugin-dynamic-import-node: 2.3.3 - transitivePeerDependencies: - - supports-color + /@jridgewell/sourcemap-codec@1.4.13: + resolution: {integrity: sha512-GryiOJmNcWbovBxTfZSF71V/mXbgcV3MewDe3kIMCLyIh5e7SKAeUZs+rMnJ8jkMolZ/4/VsdBmMrw3l+VdZ3w==} dev: true - /@babel/plugin-transform-modules-systemjs/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-Hn7cVvOavVh8yvW6fLwveFqSnd7rbQN3zJvoPNyNaQSvgfKmDBO9U1YL9+PCXGRlZD9tNdWTy5ACKqMuzyn32Q==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + /@jridgewell/trace-mapping@0.3.15: + resolution: {integrity: sha512-oWZNOULl+UbhsgB51uuZzglikfIKSUBO/M9W2OfEjn7cmqoAiCgmv9lyACTUacZwBz0ITnJ2NqjU8Tx0DHL88g==} dependencies: - '@babel/core': 7.12.9 - '@babel/helper-hoist-variables': 7.16.7 - '@babel/helper-module-transforms': 7.12.1 - '@babel/helper-plugin-utils': 7.18.6 - '@babel/helper-validator-identifier': 7.18.6 - babel-plugin-dynamic-import-node: 2.3.3 - transitivePeerDependencies: - - supports-color + '@jridgewell/resolve-uri': 3.0.7 + '@jridgewell/sourcemap-codec': 1.4.13 dev: true - /@babel/plugin-transform-modules-systemjs/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-Hn7cVvOavVh8yvW6fLwveFqSnd7rbQN3zJvoPNyNaQSvgfKmDBO9U1YL9+PCXGRlZD9tNdWTy5ACKqMuzyn32Q==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + /@jridgewell/trace-mapping@0.3.9: + resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} dependencies: - '@babel/core': 7.9.0 - '@babel/helper-hoist-variables': 7.16.7 - '@babel/helper-module-transforms': 7.12.1 - '@babel/helper-plugin-utils': 7.18.6 - '@babel/helper-validator-identifier': 7.18.6 - babel-plugin-dynamic-import-node: 2.3.3 - transitivePeerDependencies: - - supports-color + '@jridgewell/resolve-uri': 3.0.7 + '@jridgewell/sourcemap-codec': 1.4.13 dev: true - /@babel/plugin-transform-modules-systemjs/7.19.0_@babel+core@7.19.1: - resolution: {integrity: sha512-x9aiR0WXAWmOWsqcsnrzGR+ieaTMVyGyffPVA7F8cXAGt/UxefYv6uSHZLkAFChN5M5Iy1+wjE+xJuPt22H39A==} - engines: {node: '>=6.9.0'} + /@mui/base@5.0.0-alpha.89(@types/react@18.0.25)(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-2g18hzt947qQ3gQQPOPEBfzQmaT2wafVhyJ7ZOZXeU6kKb88MdlHoPkK2lKXCHMBtRGnnsiF36j0rmhQXu0I5g==} + engines: {node: '>=12.0.0'} peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + '@types/react': ^17.0.0 || ^18.0.0 + react: ^17.0.0 || ^18.0.0 || 18 + react-dom: ^17.0.0 || ^18.0.0 || 18 + peerDependenciesMeta: + '@types/react': + optional: true dependencies: - '@babel/core': 7.19.1 - '@babel/helper-hoist-variables': 7.18.6 - '@babel/helper-module-transforms': 7.19.0 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/helper-validator-identifier': 7.18.6 - babel-plugin-dynamic-import-node: 2.3.3 - transitivePeerDependencies: - - supports-color + '@babel/runtime': 7.18.6 + '@emotion/is-prop-valid': 1.1.3 + '@mui/types': 7.1.4(@types/react@18.0.25) + '@mui/utils': 5.9.0(react@18.2.0) + '@popperjs/core': 2.11.5 + '@types/react': 18.0.25 + clsx: 1.2.1 + prop-types: 15.8.1 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + react-is: 18.2.0 dev: true - /@babel/plugin-transform-modules-systemjs/7.19.0_@babel+core@7.9.0: - resolution: {integrity: sha512-x9aiR0WXAWmOWsqcsnrzGR+ieaTMVyGyffPVA7F8cXAGt/UxefYv6uSHZLkAFChN5M5Iy1+wjE+xJuPt22H39A==} - engines: {node: '>=6.9.0'} + /@mui/material@5.9.0(@emotion/react@11.9.3)(@emotion/styled@11.9.3)(@types/react@18.0.25)(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-KZN3QEeCtwSP1IRpDZ7KQghDX7tyxZojADRCn+UKnoq8HUGNMJm2XKdb7hy9/ybaSW4EXQSKXSGg1AjdfS7Cdg==} + engines: {node: '>=12.0.0'} peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + '@emotion/react': ^11.5.0 + '@emotion/styled': ^11.3.0 + '@types/react': ^17.0.0 || ^18.0.0 + react: ^17.0.0 || ^18.0.0 || 18 + react-dom: ^17.0.0 || ^18.0.0 || 18 + peerDependenciesMeta: + '@emotion/react': + optional: true + '@emotion/styled': + optional: true + '@types/react': + optional: true dependencies: - '@babel/core': 7.9.0 - '@babel/helper-hoist-variables': 7.18.6 - '@babel/helper-module-transforms': 7.19.0 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/helper-validator-identifier': 7.18.6 - babel-plugin-dynamic-import-node: 2.3.3 - transitivePeerDependencies: - - supports-color + '@babel/runtime': 7.18.6 + '@emotion/react': 11.9.3(@babel/core@7.9.0)(@types/react@18.0.25)(react@18.2.0) + '@emotion/styled': 11.9.3(@babel/core@7.9.0)(@emotion/react@11.9.3)(@types/react@18.0.25)(react@18.2.0) + '@mui/base': 5.0.0-alpha.89(@types/react@18.0.25)(react-dom@18.2.0)(react@18.2.0) + '@mui/system': 5.9.0(@emotion/react@11.9.3)(@emotion/styled@11.9.3)(@types/react@18.0.25)(react@18.2.0) + '@mui/types': 7.1.4(@types/react@18.0.25) + '@mui/utils': 5.9.0(react@18.2.0) + '@types/react': 18.0.25 + '@types/react-transition-group': 4.4.5 + clsx: 1.2.1 + csstype: 3.1.0 + prop-types: 15.8.1 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + react-is: 18.2.0 + react-transition-group: 4.4.5(react-dom@18.2.0)(react@18.2.0) dev: true - /@babel/plugin-transform-modules-umd/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-aEIubCS0KHKM0zUos5fIoQm+AZUMt1ZvMpqz0/H5qAQ7vWylr9+PLYurT+Ic7ID/bKLd4q8hDovaG3Zch2uz5Q==} + /@mui/private-theming@5.9.0(@types/react@18.0.25)(react@18.2.0): + resolution: {integrity: sha512-t0ZsWxE/LvX5RH5azjx1esBHbIfD9zjnbSAYkpE59BPpkOrqAYDGoJguL2EPd9LaUb6COmBozmAwNenvI6RJRQ==} + engines: {node: '>=12.0.0'} peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + '@types/react': ^17.0.0 || ^18.0.0 + react: ^17.0.0 || ^18.0.0 || 18 + peerDependenciesMeta: + '@types/react': + optional: true dependencies: - '@babel/core': 7.12.9 - '@babel/helper-module-transforms': 7.12.1 - '@babel/helper-plugin-utils': 7.18.6 - transitivePeerDependencies: - - supports-color + '@babel/runtime': 7.18.6 + '@mui/utils': 5.9.0(react@18.2.0) + '@types/react': 18.0.25 + prop-types: 15.8.1 + react: 18.2.0 dev: true - /@babel/plugin-transform-modules-umd/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-aEIubCS0KHKM0zUos5fIoQm+AZUMt1ZvMpqz0/H5qAQ7vWylr9+PLYurT+Ic7ID/bKLd4q8hDovaG3Zch2uz5Q==} + /@mui/styled-engine@5.8.7(@emotion/react@11.9.3)(@emotion/styled@11.9.3)(react@18.2.0): + resolution: {integrity: sha512-tVqtowjbYmiRq+qcqXK731L9eWoL9H8xTRhuTgaDGKdch1zlt4I2UwInUe1w2N9N/u3/jHsFbLcl1Un3uOwpQg==} + engines: {node: '>=12.0.0'} peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + '@emotion/react': ^11.4.1 + '@emotion/styled': ^11.3.0 + react: ^17.0.0 || ^18.0.0 || 18 + peerDependenciesMeta: + '@emotion/react': + optional: true + '@emotion/styled': + optional: true dependencies: - '@babel/core': 7.9.0 - '@babel/helper-module-transforms': 7.12.1 - '@babel/helper-plugin-utils': 7.18.6 - transitivePeerDependencies: - - supports-color + '@babel/runtime': 7.18.6 + '@emotion/cache': 11.9.3 + '@emotion/react': 11.9.3(@babel/core@7.9.0)(@types/react@18.0.25)(react@18.2.0) + '@emotion/styled': 11.9.3(@babel/core@7.9.0)(@emotion/react@11.9.3)(@types/react@18.0.25)(react@18.2.0) + csstype: 3.1.0 + prop-types: 15.8.1 + react: 18.2.0 dev: true - /@babel/plugin-transform-modules-umd/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-dcegErExVeXcRqNtkRU/z8WlBLnvD4MRnHgNs3MytRO1Mn1sHRyhbcpYbVMGclAqOjdW+9cfkdZno9dFdfKLfQ==} - engines: {node: '>=6.9.0'} + /@mui/system@5.9.0(@emotion/react@11.9.3)(@emotion/styled@11.9.3)(@types/react@18.0.25)(react@18.2.0): + resolution: {integrity: sha512-KLZDYMmT1usokEJH+raGTh1SbdOx4BVrT+wg8nRpKGNii2sfc3ntuJSKuv3Fu9oeC9xVFTnNBHXKrpJuxeDcqg==} + engines: {node: '>=12.0.0'} peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + '@emotion/react': ^11.5.0 + '@emotion/styled': ^11.3.0 + '@types/react': ^17.0.0 || ^18.0.0 + react: ^17.0.0 || ^18.0.0 || 18 + peerDependenciesMeta: + '@emotion/react': + optional: true + '@emotion/styled': + optional: true + '@types/react': + optional: true dependencies: - '@babel/core': 7.19.1 - '@babel/helper-module-transforms': 7.19.0 - '@babel/helper-plugin-utils': 7.19.0 - transitivePeerDependencies: - - supports-color + '@babel/runtime': 7.18.6 + '@emotion/react': 11.9.3(@babel/core@7.9.0)(@types/react@18.0.25)(react@18.2.0) + '@emotion/styled': 11.9.3(@babel/core@7.9.0)(@emotion/react@11.9.3)(@types/react@18.0.25)(react@18.2.0) + '@mui/private-theming': 5.9.0(@types/react@18.0.25)(react@18.2.0) + '@mui/styled-engine': 5.8.7(@emotion/react@11.9.3)(@emotion/styled@11.9.3)(react@18.2.0) + '@mui/types': 7.1.4(@types/react@18.0.25) + '@mui/utils': 5.9.0(react@18.2.0) + '@types/react': 18.0.25 + clsx: 1.2.1 + csstype: 3.1.0 + prop-types: 15.8.1 + react: 18.2.0 dev: true - /@babel/plugin-transform-modules-umd/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-dcegErExVeXcRqNtkRU/z8WlBLnvD4MRnHgNs3MytRO1Mn1sHRyhbcpYbVMGclAqOjdW+9cfkdZno9dFdfKLfQ==} - engines: {node: '>=6.9.0'} + /@mui/types@7.1.4(@types/react@18.0.25): + resolution: {integrity: sha512-uveM3byMbthO+6tXZ1n2zm0W3uJCQYtwt/v5zV5I77v2v18u0ITkb8xwhsDD2i3V2Kye7SaNR6FFJ6lMuY/WqQ==} peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + '@types/react': '*' + peerDependenciesMeta: + '@types/react': + optional: true dependencies: - '@babel/core': 7.9.0 - '@babel/helper-module-transforms': 7.19.0 - '@babel/helper-plugin-utils': 7.19.0 - transitivePeerDependencies: - - supports-color + '@types/react': 18.0.25 dev: true - /@babel/plugin-transform-named-capturing-groups-regex/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-tB43uQ62RHcoDp9v2Nsf+dSM8sbNodbEicbQNA53zHz8pWUhsgHSJCGpt7daXxRydjb0KnfmB+ChXOv3oADp1Q==} + /@mui/utils@5.9.0(react@18.2.0): + resolution: {integrity: sha512-GAaiWP6zBC3RE1NHP9y1c1iKZh5s/nyKKqWxfTrw5lNQY5tWTh9/47F682FuiE5WT1o3h4w/LEkSSIZpMEDzrA==} + engines: {node: '>=12.0.0'} peerDependencies: - '@babel/core': ^7.0.0 || 7 + react: ^17.0.0 || ^18.0.0 || 18 dependencies: - '@babel/core': 7.12.9 - '@babel/helper-create-regexp-features-plugin': 7.12.7_@babel+core@7.12.9 + '@babel/runtime': 7.18.6 + '@types/prop-types': 15.7.5 + '@types/react-is': 17.0.3 + prop-types: 15.8.1 + react: 18.2.0 + react-is: 18.2.0 dev: true - /@babel/plugin-transform-named-capturing-groups-regex/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-tB43uQ62RHcoDp9v2Nsf+dSM8sbNodbEicbQNA53zHz8pWUhsgHSJCGpt7daXxRydjb0KnfmB+ChXOv3oADp1Q==} - peerDependencies: - '@babel/core': ^7.0.0 || 7 + /@nodelib/fs.scandir@2.1.5: + resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} + engines: {node: '>= 8'} dependencies: - '@babel/core': 7.9.0 - '@babel/helper-create-regexp-features-plugin': 7.12.7_@babel+core@7.9.0 + '@nodelib/fs.stat': 2.0.5 + run-parallel: 1.2.0 dev: true - /@babel/plugin-transform-named-capturing-groups-regex/7.19.1_@babel+core@7.19.1: - resolution: {integrity: sha512-oWk9l9WItWBQYS4FgXD4Uyy5kq898lvkXpXQxoJEY1RnvPk4R/Dvu2ebXU9q8lP+rlMwUQTFf2Ok6d78ODa0kw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-create-regexp-features-plugin': 7.19.0_@babel+core@7.19.1 - '@babel/helper-plugin-utils': 7.19.0 + /@nodelib/fs.stat@2.0.5: + resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} + engines: {node: '>= 8'} dev: true - /@babel/plugin-transform-named-capturing-groups-regex/7.19.1_@babel+core@7.9.0: - resolution: {integrity: sha512-oWk9l9WItWBQYS4FgXD4Uyy5kq898lvkXpXQxoJEY1RnvPk4R/Dvu2ebXU9q8lP+rlMwUQTFf2Ok6d78ODa0kw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0 || 7 + /@nodelib/fs.walk@1.2.8: + resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} + engines: {node: '>= 8'} dependencies: - '@babel/core': 7.9.0 - '@babel/helper-create-regexp-features-plugin': 7.19.0_@babel+core@7.9.0 - '@babel/helper-plugin-utils': 7.19.0 + '@nodelib/fs.scandir': 2.1.5 + fastq: 1.13.0 dev: true - /@babel/plugin-transform-new-target/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-+eW/VLcUL5L9IvJH7rT1sT0CzkdUTvPrXC2PXTn/7z7tXLBuKvezYbGdxD5WMRoyvyaujOq2fWoKl869heKjhw==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.18.6 + /@popperjs/core@2.11.5: + resolution: {integrity: sha512-9X2obfABZuDVLCgPK9aX0a/x4jaOEweTTWE2+9sr0Qqqevj2Uv5XorvusThmc9XGYpS9yI+fhh8RTafBtGposw==} dev: true - /@babel/plugin-transform-new-target/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-+eW/VLcUL5L9IvJH7rT1sT0CzkdUTvPrXC2PXTn/7z7tXLBuKvezYbGdxD5WMRoyvyaujOq2fWoKl869heKjhw==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.18.6 - dev: true - - /@babel/plugin-transform-new-target/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-DjwFA/9Iu3Z+vrAn+8pBUGcjhxKguSMlsFqeCKbhb9BAV756v0krzVK04CRDi/4aqmk8BsHb4a/gFcaA5joXRw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-transform-new-target/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-DjwFA/9Iu3Z+vrAn+8pBUGcjhxKguSMlsFqeCKbhb9BAV756v0krzVK04CRDi/4aqmk8BsHb4a/gFcaA5joXRw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-transform-object-super/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-AvypiGJH9hsquNUn+RXVcBdeE3KHPZexWRdimhuV59cSoOt5kFBmqlByorAeUlGG2CJWd0U+4ZtNKga/TB0cAw==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.18.6 - '@babel/helper-replace-supers': 7.18.2 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/plugin-transform-object-super/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-AvypiGJH9hsquNUn+RXVcBdeE3KHPZexWRdimhuV59cSoOt5kFBmqlByorAeUlGG2CJWd0U+4ZtNKga/TB0cAw==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.18.6 - '@babel/helper-replace-supers': 7.18.2 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/plugin-transform-object-super/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-uvGz6zk+pZoS1aTZrOvrbj6Pp/kK2mp45t2B+bTDre2UgsZZ8EZLSJtUg7m/no0zOJUWgFONpB7Zv9W2tSaFlA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/helper-replace-supers': 7.19.1 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/plugin-transform-object-super/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-uvGz6zk+pZoS1aTZrOvrbj6Pp/kK2mp45t2B+bTDre2UgsZZ8EZLSJtUg7m/no0zOJUWgFONpB7Zv9W2tSaFlA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/helper-replace-supers': 7.19.1 - transitivePeerDependencies: - - supports-color + /@resvg/resvg-js-android-arm-eabi@2.1.0: + resolution: {integrity: sha512-JtvWWtC6bYRhyth1qgUgcPQSP+jkwkmUzok/5b/IqKFb6cattMBFFdHnwM8AS+sgzXJKa8LhW48f3FmFQhfdrA==} + engines: {node: '>= 10'} + cpu: [arm] + os: [android] + requiresBuild: true dev: true + optional: true - /@babel/plugin-transform-parameters/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-xq9C5EQhdPK23ZeCdMxl8bbRnAgHFrw5EOC3KJUsSylZqdkCaFEXxGSBuTSObOpiiHHNyb82es8M1QYgfQGfNg==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.18.6 + /@resvg/resvg-js-android-arm64@2.1.0: + resolution: {integrity: sha512-QXFEoTpoZJZjkFh4+aSD3l+Ivrij3nzgrr4FTayey0hsQypJXmbzB6nuqB1qZwMrXPYqYZ33BoRiwCFoJUw2Ww==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [android] + requiresBuild: true dev: true + optional: true - /@babel/plugin-transform-parameters/7.12.1_@babel+core@7.19.1: - resolution: {integrity: sha512-xq9C5EQhdPK23ZeCdMxl8bbRnAgHFrw5EOC3KJUsSylZqdkCaFEXxGSBuTSObOpiiHHNyb82es8M1QYgfQGfNg==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.18.6 + /@resvg/resvg-js-darwin-arm64@2.1.0: + resolution: {integrity: sha512-OrYqlmn2g4Pu/dWr+M5t5W8GDKIX3zk0JxDySU1oNWwhqlmZXBuCrx3TP9dVrTpTYx86E5RQcTZWe64wz8dlIQ==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [darwin] + requiresBuild: true dev: true + optional: true - /@babel/plugin-transform-parameters/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-xq9C5EQhdPK23ZeCdMxl8bbRnAgHFrw5EOC3KJUsSylZqdkCaFEXxGSBuTSObOpiiHHNyb82es8M1QYgfQGfNg==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.18.6 + /@resvg/resvg-js-darwin-x64@2.1.0: + resolution: {integrity: sha512-95F9BoBS1th79n6Zy1tRMKhPlJuhznnQwAPxRhtw0v4DteRKMzaPFfVH6B9BBaoDCa5VMIxH/wYNKtOxCpYPuw==} + engines: {node: '>= 10'} + cpu: [x64] + os: [darwin] + requiresBuild: true dev: true + optional: true - /@babel/plugin-transform-parameters/7.18.8_@babel+core@7.19.1: - resolution: {integrity: sha512-ivfbE3X2Ss+Fj8nnXvKJS6sjRG4gzwPMsP+taZC+ZzEGjAYlvENixmt1sZ5Ca6tWls+BlKSGKPJ6OOXvXCbkFg==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 + /@resvg/resvg-js-linux-arm-gnueabihf@2.1.0: + resolution: {integrity: sha512-8F0ugeAaYGNNZhSCYt+X4YgyKyKcFiH0tqfJmN69+Gqqmu/lmZcn78JVLyTGD/OGHbYfCCYJbxwV+txIOdVNkQ==} + engines: {node: '>= 10'} + cpu: [arm] + os: [linux] + requiresBuild: true dev: true + optional: true - /@babel/plugin-transform-parameters/7.18.8_@babel+core@7.9.0: - resolution: {integrity: sha512-ivfbE3X2Ss+Fj8nnXvKJS6sjRG4gzwPMsP+taZC+ZzEGjAYlvENixmt1sZ5Ca6tWls+BlKSGKPJ6OOXvXCbkFg==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 + /@resvg/resvg-js-linux-arm64-gnu@2.1.0: + resolution: {integrity: sha512-RveUS3sqvUp5eoBzz1QlPv7yBUNOjHtcWtbFo55gQrzBGT4XtnCaQzuXkN0q0j2o2ufxlmXmFI3g3e/0EWjNMg==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [linux] + requiresBuild: true dev: true + optional: true - /@babel/plugin-transform-property-literals/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-6MTCR/mZ1MQS+AwZLplX4cEySjCpnIF26ToWo942nqn8hXSm7McaHQNeGx/pt7suI1TWOWMfa/NgBhiqSnX0cQ==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.18.6 + /@resvg/resvg-js-linux-arm64-musl@2.1.0: + resolution: {integrity: sha512-DzuRbZj5oVXYFAlo2PVbiaTSb14z/FDUlvgfzVFHiKEw3w6gT/soveLTIAvfeIlRYYkwYNHCiEPxFztyr7x/rw==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [linux] + requiresBuild: true dev: true + optional: true - /@babel/plugin-transform-property-literals/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-6MTCR/mZ1MQS+AwZLplX4cEySjCpnIF26ToWo942nqn8hXSm7McaHQNeGx/pt7suI1TWOWMfa/NgBhiqSnX0cQ==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.18.6 + /@resvg/resvg-js-linux-x64-gnu@2.1.0: + resolution: {integrity: sha512-pa4MtKtAEXBj7tl3JXPMQLjgP+KghUYYoXMIX8tlf/xbfJJsOxHpWcwQe/bWPFO4K9hgt/yePkb3G4ydD0uT+g==} + engines: {node: '>= 10'} + cpu: [x64] + os: [linux] + requiresBuild: true dev: true + optional: true - /@babel/plugin-transform-property-literals/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-cYcs6qlgafTud3PAzrrRNbQtfpQ8+y/+M5tKmksS9+M1ckbH6kzY8MrexEM9mcA6JDsukE19iIRvAyYl463sMg==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 + /@resvg/resvg-js-linux-x64-musl@2.1.0: + resolution: {integrity: sha512-mkwGe4I9CmQ1GPSnFa22PHwKbE+TZnRk/ViCvO89UOwypW0I+X+KlQVzVbZn9ypvcrbvzotOvl3OkVRq5MgsBA==} + engines: {node: '>= 10'} + cpu: [x64] + os: [linux] + requiresBuild: true dev: true + optional: true - /@babel/plugin-transform-property-literals/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-cYcs6qlgafTud3PAzrrRNbQtfpQ8+y/+M5tKmksS9+M1ckbH6kzY8MrexEM9mcA6JDsukE19iIRvAyYl463sMg==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 + /@resvg/resvg-js-win32-arm64-msvc@2.1.0: + resolution: {integrity: sha512-DVloJcQsgd3rMAPemy5KjAA6R+RkRz2/xb7zP9px7lr+Gao+xVbNzRQrY7xwCZFM7O7hu9uHvLvkKCttPoL1aA==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [win32] + requiresBuild: true dev: true + optional: true - /@babel/plugin-transform-react-constant-elements/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-KOHd0tIRLoER+J+8f9DblZDa1fLGPwaaN1DI1TVHuQFOpjHV22C3CUB3obeC4fexHY9nx+fH0hQNvLFFfA1mxA==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.19.0 + /@resvg/resvg-js-win32-ia32-msvc@2.1.0: + resolution: {integrity: sha512-RtRQ8loZA4zib8kzD1QjoScb6VAaZTbajB3WU/O6raP2/f2zIk9v4FU2E/hiC0vi5DGhJL5GTmSrsWShbLPjZw==} + engines: {node: '>= 10'} + cpu: [ia32] + os: [win32] + requiresBuild: true dev: true + optional: true - /@babel/plugin-transform-react-display-name/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-cAzB+UzBIrekfYxyLlFqf/OagTvHLcVBb5vpouzkYkBclRPraiygVnafvAoipErZLI8ANv8Ecn6E/m5qPXD26w==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.18.6 + /@resvg/resvg-js-win32-x64-msvc@2.1.0: + resolution: {integrity: sha512-NVYuQn9Aj/ZmRufKON7a+1U1XS+jGKMcWO4J8ZH2xhSP3aNVgO7Nfl45DMgqxdCcn0ZzYhzP+mSQFbA/ENE/mg==} + engines: {node: '>= 10'} + cpu: [x64] + os: [win32] + requiresBuild: true dev: true + optional: true - /@babel/plugin-transform-react-display-name/7.18.6_@babel+core@7.12.9: - resolution: {integrity: sha512-TV4sQ+T013n61uMoygyMRm+xf04Bd5oqFpv2jAEQwSZ8NwQA7zeRPg1LMVg2PWi3zWBz+CLKD+v5bcpZ/BS0aA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.19.0 + /@resvg/resvg-js@2.1.0: + resolution: {integrity: sha512-nR6uVR5ugXLT2jh7U141nhawzgUs4JBl8BpM4XH7/ughSsOA/+WRxVhMUfdtEsz7REpTMKe2Sat+1/eWAuQ04w==} + engines: {node: '>= 10'} + optionalDependencies: + '@resvg/resvg-js-android-arm-eabi': 2.1.0 + '@resvg/resvg-js-android-arm64': 2.1.0 + '@resvg/resvg-js-darwin-arm64': 2.1.0 + '@resvg/resvg-js-darwin-x64': 2.1.0 + '@resvg/resvg-js-linux-arm-gnueabihf': 2.1.0 + '@resvg/resvg-js-linux-arm64-gnu': 2.1.0 + '@resvg/resvg-js-linux-arm64-musl': 2.1.0 + '@resvg/resvg-js-linux-x64-gnu': 2.1.0 + '@resvg/resvg-js-linux-x64-musl': 2.1.0 + '@resvg/resvg-js-win32-arm64-msvc': 2.1.0 + '@resvg/resvg-js-win32-ia32-msvc': 2.1.0 + '@resvg/resvg-js-win32-x64-msvc': 2.1.0 dev: true - /@babel/plugin-transform-react-display-name/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-TV4sQ+T013n61uMoygyMRm+xf04Bd5oqFpv2jAEQwSZ8NwQA7zeRPg1LMVg2PWi3zWBz+CLKD+v5bcpZ/BS0aA==} - engines: {node: '>=6.9.0'} + /@rollup/plugin-alias@3.1.1(rollup@1.32.1): + resolution: {integrity: sha512-hNcQY4bpBUIvxekd26DBPgF7BT4mKVNDF5tBG4Zi+3IgwLxGYRY0itHs9D0oLVwXM5pvJDWJlBQro+au8WaUWw==} + engines: {node: '>=8.0.0'} peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + rollup: ^1.20.0||^2.0.0 dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 + rollup: 1.32.1 + slash: 3.0.0 dev: true - /@babel/plugin-transform-react-display-name/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-TV4sQ+T013n61uMoygyMRm+xf04Bd5oqFpv2jAEQwSZ8NwQA7zeRPg1LMVg2PWi3zWBz+CLKD+v5bcpZ/BS0aA==} - engines: {node: '>=6.9.0'} + /@rollup/plugin-commonjs@11.1.0(rollup@1.32.1): + resolution: {integrity: sha512-Ycr12N3ZPN96Fw2STurD21jMqzKwL9QuFhms3SD7KKRK7oaXUsBU9Zt0jL/rOPHiPYisI21/rXGO3jr9BnLHUA==} + engines: {node: '>= 8.0.0'} peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + rollup: ^1.20.0||^2.0.0 dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 + '@rollup/pluginutils': 3.1.0(rollup@1.32.1) + commondir: 1.0.1 + estree-walker: 1.0.1 + glob: 7.2.3 + is-reference: 1.2.1 + magic-string: 0.25.7 + resolve: 1.19.0 + rollup: 1.32.1 dev: true - /@babel/plugin-transform-react-display-name/7.8.3_@babel+core@7.9.0: - resolution: {integrity: sha512-3Jy/PCw8Fe6uBKtEgz3M82ljt+lTg+xJaM4og+eyu83qLT87ZUSckn0wy7r31jflURWLO83TW6Ylf7lyXj3m5A==} + /@rollup/plugin-json@4.1.0(rollup@1.32.1): + resolution: {integrity: sha512-yfLbTdNS6amI/2OpmbiBoW12vngr5NW2jCJVZSBEz+H5KfUJZ2M7sDjk0U6GOOdCWFVScShte29o9NezJ53TPw==} peerDependencies: - '@babel/core': ^7.0.0-0 || 7 + rollup: ^1.20.0 || ^2.0.0 dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.10.4 + '@rollup/pluginutils': 3.1.0(rollup@1.32.1) + rollup: 1.32.1 dev: true - /@babel/plugin-transform-react-jsx-development/7.12.7_@babel+core@7.9.0: - resolution: {integrity: sha512-Rs3ETtMtR3VLXFeYRChle5SsP/P9Jp/6dsewBQfokDSzKJThlsuFcnzLTDRALiUmTC48ej19YD9uN1mupEeEDg==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-builder-react-jsx-experimental': 7.12.4 - '@babel/helper-plugin-utils': 7.18.6 - '@babel/plugin-syntax-jsx': 7.18.6_@babel+core@7.9.0 - dev: true - - /@babel/plugin-transform-react-jsx-development/7.18.6_@babel+core@7.12.9: - resolution: {integrity: sha512-SA6HEjwYFKF7WDjWcMcMGUimmw/nhNRDWxr+KaLSCrkD/LMDBvWRmHAYgE1HDeF8KUuI8OAu+RT6EOtKxSW2qA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.12.9 - '@babel/plugin-transform-react-jsx': 7.19.0_@babel+core@7.12.9 - dev: true - - /@babel/plugin-transform-react-jsx-development/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-SA6HEjwYFKF7WDjWcMcMGUimmw/nhNRDWxr+KaLSCrkD/LMDBvWRmHAYgE1HDeF8KUuI8OAu+RT6EOtKxSW2qA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/plugin-transform-react-jsx': 7.19.0_@babel+core@7.19.1 - dev: true - - /@babel/plugin-transform-react-jsx-development/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-SA6HEjwYFKF7WDjWcMcMGUimmw/nhNRDWxr+KaLSCrkD/LMDBvWRmHAYgE1HDeF8KUuI8OAu+RT6EOtKxSW2qA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/plugin-transform-react-jsx': 7.19.0_@babel+core@7.9.0 - dev: true - - /@babel/plugin-transform-react-jsx-self/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-FbpL0ieNWiiBB5tCldX17EtXgmzeEZjFrix72rQYeq9X6nUK38HCaxexzVQrZWXanxKJPKVVIU37gFjEQYkPkA==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.18.6 - dev: true - - /@babel/plugin-transform-react-jsx-self/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-FbpL0ieNWiiBB5tCldX17EtXgmzeEZjFrix72rQYeq9X6nUK38HCaxexzVQrZWXanxKJPKVVIU37gFjEQYkPkA==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.18.6 - dev: true - - /@babel/plugin-transform-react-jsx-source/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-keQ5kBfjJNRc6zZN1/nVHCd6LLIHq4aUKcVnvE/2l+ZZROSbqoiGFRtT5t3Is89XJxBQaP7NLZX2jgGHdZvvFQ==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.18.6 - dev: true - - /@babel/plugin-transform-react-jsx-source/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-keQ5kBfjJNRc6zZN1/nVHCd6LLIHq4aUKcVnvE/2l+ZZROSbqoiGFRtT5t3Is89XJxBQaP7NLZX2jgGHdZvvFQ==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.18.6 - dev: true - - /@babel/plugin-transform-react-jsx/7.12.7_@babel+core@7.12.9: - resolution: {integrity: sha512-YFlTi6MEsclFAPIDNZYiCRbneg1MFGao9pPG9uD5htwE0vDbPaMUMeYd6itWjw7K4kro4UbdQf3ljmFl9y48dQ==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.12.9 - '@babel/helper-builder-react-jsx': 7.10.4 - '@babel/helper-builder-react-jsx-experimental': 7.12.4 - '@babel/helper-plugin-utils': 7.18.6 - '@babel/plugin-syntax-jsx': 7.18.6_@babel+core@7.12.9 - dev: true - - /@babel/plugin-transform-react-jsx/7.12.7_@babel+core@7.9.0: - resolution: {integrity: sha512-YFlTi6MEsclFAPIDNZYiCRbneg1MFGao9pPG9uD5htwE0vDbPaMUMeYd6itWjw7K4kro4UbdQf3ljmFl9y48dQ==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-builder-react-jsx': 7.10.4 - '@babel/helper-builder-react-jsx-experimental': 7.12.4 - '@babel/helper-plugin-utils': 7.18.6 - '@babel/plugin-syntax-jsx': 7.18.6_@babel+core@7.9.0 - dev: true - - /@babel/plugin-transform-react-jsx/7.19.0_@babel+core@7.12.9: - resolution: {integrity: sha512-UVEvX3tXie3Szm3emi1+G63jyw1w5IcMY0FSKM+CRnKRI5Mr1YbCNgsSTwoTwKphQEG9P+QqmuRFneJPZuHNhg==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.12.9 - '@babel/helper-annotate-as-pure': 7.18.6 - '@babel/helper-module-imports': 7.18.6 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/plugin-syntax-jsx': 7.18.6_@babel+core@7.12.9 - '@babel/types': 7.19.0 - dev: true - - /@babel/plugin-transform-react-jsx/7.19.0_@babel+core@7.19.1: - resolution: {integrity: sha512-UVEvX3tXie3Szm3emi1+G63jyw1w5IcMY0FSKM+CRnKRI5Mr1YbCNgsSTwoTwKphQEG9P+QqmuRFneJPZuHNhg==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-annotate-as-pure': 7.18.6 - '@babel/helper-module-imports': 7.18.6 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/plugin-syntax-jsx': 7.18.6_@babel+core@7.19.1 - '@babel/types': 7.19.0 - dev: true - - /@babel/plugin-transform-react-jsx/7.19.0_@babel+core@7.9.0: - resolution: {integrity: sha512-UVEvX3tXie3Szm3emi1+G63jyw1w5IcMY0FSKM+CRnKRI5Mr1YbCNgsSTwoTwKphQEG9P+QqmuRFneJPZuHNhg==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-annotate-as-pure': 7.18.6 - '@babel/helper-module-imports': 7.18.6 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/plugin-syntax-jsx': 7.18.6_@babel+core@7.9.0 - '@babel/types': 7.19.0 - dev: true - - /@babel/plugin-transform-react-pure-annotations/7.18.6_@babel+core@7.12.9: - resolution: {integrity: sha512-I8VfEPg9r2TRDdvnHgPepTKvuRomzA8+u+nhY7qSI1fR2hRNebasZEETLyM5mAUr0Ku56OkXJ0I7NHJnO6cJiQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.12.9 - '@babel/helper-annotate-as-pure': 7.18.6 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-transform-react-pure-annotations/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-I8VfEPg9r2TRDdvnHgPepTKvuRomzA8+u+nhY7qSI1fR2hRNebasZEETLyM5mAUr0Ku56OkXJ0I7NHJnO6cJiQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-annotate-as-pure': 7.18.6 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-transform-react-pure-annotations/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-I8VfEPg9r2TRDdvnHgPepTKvuRomzA8+u+nhY7qSI1fR2hRNebasZEETLyM5mAUr0Ku56OkXJ0I7NHJnO6cJiQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-annotate-as-pure': 7.18.6 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-transform-regenerator/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-gYrHqs5itw6i4PflFX3OdBPMQdPbF4bj2REIUxlMRUFk0/ZOAIpDFuViuxPjUL7YC8UPnf+XG7/utJvqXdPKng==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.12.9 - regenerator-transform: 0.14.5 - dev: true - - /@babel/plugin-transform-regenerator/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-gYrHqs5itw6i4PflFX3OdBPMQdPbF4bj2REIUxlMRUFk0/ZOAIpDFuViuxPjUL7YC8UPnf+XG7/utJvqXdPKng==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - regenerator-transform: 0.14.5 - dev: true - - /@babel/plugin-transform-regenerator/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-poqRI2+qiSdeldcz4wTSTXBRryoq3Gc70ye7m7UD5Ww0nE29IXqMl6r7Nd15WBgRd74vloEMlShtH6CKxVzfmQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - regenerator-transform: 0.15.0 - dev: true - - /@babel/plugin-transform-regenerator/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-poqRI2+qiSdeldcz4wTSTXBRryoq3Gc70ye7m7UD5Ww0nE29IXqMl6r7Nd15WBgRd74vloEMlShtH6CKxVzfmQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 - regenerator-transform: 0.15.0 - dev: true - - /@babel/plugin-transform-reserved-words/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-pOnUfhyPKvZpVyBHhSBoX8vfA09b7r00Pmm1sH+29ae2hMTKVmSp4Ztsr8KBKjLjx17H0eJqaRC3bR2iThM54A==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.18.6 - dev: true - - /@babel/plugin-transform-reserved-words/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-pOnUfhyPKvZpVyBHhSBoX8vfA09b7r00Pmm1sH+29ae2hMTKVmSp4Ztsr8KBKjLjx17H0eJqaRC3bR2iThM54A==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.18.6 - dev: true - - /@babel/plugin-transform-reserved-words/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-oX/4MyMoypzHjFrT1CdivfKZ+XvIPMFXwwxHp/r0Ddy2Vuomt4HDFGmft1TAY2yiTKiNSsh3kjBAzcM8kSdsjA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-transform-reserved-words/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-oX/4MyMoypzHjFrT1CdivfKZ+XvIPMFXwwxHp/r0Ddy2Vuomt4HDFGmft1TAY2yiTKiNSsh3kjBAzcM8kSdsjA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-transform-runtime/7.9.0_@babel+core@7.9.0: - resolution: {integrity: sha512-pUu9VSf3kI1OqbWINQ7MaugnitRss1z533436waNXp+0N3ur3zfut37sXiQMxkuCF4VUjwZucen/quskCh7NHw==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-module-imports': 7.12.5 - '@babel/helper-plugin-utils': 7.10.4 - resolve: 1.19.0 - semver: 5.7.1 - dev: true - - /@babel/plugin-transform-shorthand-properties/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-GFZS3c/MhX1OusqB1MZ1ct2xRzX5ppQh2JU1h2Pnfk88HtFTM+TWQqJNfwkmxtPQtb/s1tk87oENfXJlx7rSDw==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.18.6 - dev: true - - /@babel/plugin-transform-shorthand-properties/7.12.1_@babel+core@7.19.1: - resolution: {integrity: sha512-GFZS3c/MhX1OusqB1MZ1ct2xRzX5ppQh2JU1h2Pnfk88HtFTM+TWQqJNfwkmxtPQtb/s1tk87oENfXJlx7rSDw==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.18.6 - dev: true - - /@babel/plugin-transform-shorthand-properties/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-GFZS3c/MhX1OusqB1MZ1ct2xRzX5ppQh2JU1h2Pnfk88HtFTM+TWQqJNfwkmxtPQtb/s1tk87oENfXJlx7rSDw==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.18.6 - dev: true - - /@babel/plugin-transform-shorthand-properties/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-eCLXXJqv8okzg86ywZJbRn19YJHU4XUa55oz2wbHhaQVn/MM+XhukiT7SYqp/7o00dg52Rj51Ny+Ecw4oyoygw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-transform-shorthand-properties/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-eCLXXJqv8okzg86ywZJbRn19YJHU4XUa55oz2wbHhaQVn/MM+XhukiT7SYqp/7o00dg52Rj51Ny+Ecw4oyoygw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-transform-spread/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-vuLp8CP0BE18zVYjsEBZ5xoCecMK6LBMMxYzJnh01rxQRvhNhH1csMMmBfNo5tGpGO+NhdSNW2mzIvBu3K1fng==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/helper-skip-transparent-expression-wrappers': 7.12.1 - dev: true - - /@babel/plugin-transform-spread/7.12.1_@babel+core@7.19.1: - resolution: {integrity: sha512-vuLp8CP0BE18zVYjsEBZ5xoCecMK6LBMMxYzJnh01rxQRvhNhH1csMMmBfNo5tGpGO+NhdSNW2mzIvBu3K1fng==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/helper-skip-transparent-expression-wrappers': 7.12.1 - dev: true - - /@babel/plugin-transform-spread/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-vuLp8CP0BE18zVYjsEBZ5xoCecMK6LBMMxYzJnh01rxQRvhNhH1csMMmBfNo5tGpGO+NhdSNW2mzIvBu3K1fng==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/helper-skip-transparent-expression-wrappers': 7.12.1 - dev: true - - /@babel/plugin-transform-spread/7.19.0_@babel+core@7.19.1: - resolution: {integrity: sha512-RsuMk7j6n+r752EtzyScnWkQyuJdli6LdO5Klv8Yx0OfPVTcQkIUfS8clx5e9yHXzlnhOZF3CbQ8C2uP5j074w==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/helper-skip-transparent-expression-wrappers': 7.18.9 - dev: true - - /@babel/plugin-transform-spread/7.19.0_@babel+core@7.9.0: - resolution: {integrity: sha512-RsuMk7j6n+r752EtzyScnWkQyuJdli6LdO5Klv8Yx0OfPVTcQkIUfS8clx5e9yHXzlnhOZF3CbQ8C2uP5j074w==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/helper-skip-transparent-expression-wrappers': 7.18.9 - dev: true - - /@babel/plugin-transform-sticky-regex/7.12.7_@babel+core@7.12.9: - resolution: {integrity: sha512-VEiqZL5N/QvDbdjfYQBhruN0HYjSPjC4XkeqW4ny/jNtH9gcbgaqBIXYEZCNnESMAGs0/K/R7oFGMhOyu/eIxg==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.18.6 - dev: true - - /@babel/plugin-transform-sticky-regex/7.12.7_@babel+core@7.9.0: - resolution: {integrity: sha512-VEiqZL5N/QvDbdjfYQBhruN0HYjSPjC4XkeqW4ny/jNtH9gcbgaqBIXYEZCNnESMAGs0/K/R7oFGMhOyu/eIxg==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.18.6 - dev: true - - /@babel/plugin-transform-sticky-regex/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-kfiDrDQ+PBsQDO85yj1icueWMfGfJFKN1KCkndygtu/C9+XUfydLC8Iv5UYJqRwy4zk8EcplRxEOeLyjq1gm6Q==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-transform-sticky-regex/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-kfiDrDQ+PBsQDO85yj1icueWMfGfJFKN1KCkndygtu/C9+XUfydLC8Iv5UYJqRwy4zk8EcplRxEOeLyjq1gm6Q==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-transform-template-literals/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-b4Zx3KHi+taXB1dVRBhVJtEPi9h1THCeKmae2qP0YdUHIFhVjtpqqNfxeVAa1xeHVhAy4SbHxEwx5cltAu5apw==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.18.6 - dev: true - - /@babel/plugin-transform-template-literals/7.12.1_@babel+core@7.19.1: - resolution: {integrity: sha512-b4Zx3KHi+taXB1dVRBhVJtEPi9h1THCeKmae2qP0YdUHIFhVjtpqqNfxeVAa1xeHVhAy4SbHxEwx5cltAu5apw==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.18.6 - dev: true - - /@babel/plugin-transform-template-literals/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-b4Zx3KHi+taXB1dVRBhVJtEPi9h1THCeKmae2qP0YdUHIFhVjtpqqNfxeVAa1xeHVhAy4SbHxEwx5cltAu5apw==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.18.6 - dev: true - - /@babel/plugin-transform-template-literals/7.18.9_@babel+core@7.19.1: - resolution: {integrity: sha512-S8cOWfT82gTezpYOiVaGHrCbhlHgKhQt8XH5ES46P2XWmX92yisoZywf5km75wv5sYcXDUCLMmMxOLCtthDgMA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-transform-template-literals/7.18.9_@babel+core@7.9.0: - resolution: {integrity: sha512-S8cOWfT82gTezpYOiVaGHrCbhlHgKhQt8XH5ES46P2XWmX92yisoZywf5km75wv5sYcXDUCLMmMxOLCtthDgMA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-transform-typeof-symbol/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-EPGgpGy+O5Kg5pJFNDKuxt9RdmTgj5sgrus2XVeMp/ZIbOESadgILUbm50SNpghOh3/6yrbsH+NB5+WJTmsA7Q==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.18.6 - dev: true - - /@babel/plugin-transform-typeof-symbol/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-EPGgpGy+O5Kg5pJFNDKuxt9RdmTgj5sgrus2XVeMp/ZIbOESadgILUbm50SNpghOh3/6yrbsH+NB5+WJTmsA7Q==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.18.6 - dev: true - - /@babel/plugin-transform-typeof-symbol/7.18.9_@babel+core@7.19.1: - resolution: {integrity: sha512-SRfwTtF11G2aemAZWivL7PD+C9z52v9EvMqH9BuYbabyPuKUvSWks3oCg6041pT925L4zVFqaVBeECwsmlguEw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-transform-typeof-symbol/7.18.9_@babel+core@7.9.0: - resolution: {integrity: sha512-SRfwTtF11G2aemAZWivL7PD+C9z52v9EvMqH9BuYbabyPuKUvSWks3oCg6041pT925L4zVFqaVBeECwsmlguEw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-transform-typescript/7.18.4_@babel+core@7.9.0: - resolution: {integrity: sha512-l4vHuSLUajptpHNEOUDEGsnpl9pfRLsN1XUoDQDD/YBuXTM+v37SHGS+c6n4jdcZy96QtuUuSvZYMLSSsjH8Mw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-create-class-features-plugin': 7.18.0_@babel+core@7.9.0 - '@babel/helper-plugin-utils': 7.18.6 - '@babel/plugin-syntax-typescript': 7.17.12_@babel+core@7.9.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/plugin-transform-typescript/7.19.1_@babel+core@7.19.1: - resolution: {integrity: sha512-+ILcOU+6mWLlvCwnL920m2Ow3wWx3Wo8n2t5aROQmV55GZt+hOiLvBaa3DNzRjSEHa1aauRs4/YLmkCfFkhhRQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-create-class-features-plugin': 7.19.0_@babel+core@7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/plugin-syntax-typescript': 7.18.6_@babel+core@7.19.1 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/plugin-transform-unicode-escapes/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-I8gNHJLIc7GdApm7wkVnStWssPNbSRMPtgHdmH3sRM1zopz09UWPS4x5V4n1yz/MIWTVnJ9sp6IkuXdWM4w+2Q==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-transform-unicode-escapes/7.18.10_@babel+core@7.19.1: - resolution: {integrity: sha512-kKAdAI+YzPgGY/ftStBFXTI1LZFju38rYThnfMykS+IXy8BVx+res7s2fxf1l8I35DV2T97ezo6+SGrXz6B3iQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-transform-unicode-escapes/7.18.10_@babel+core@7.9.0: - resolution: {integrity: sha512-kKAdAI+YzPgGY/ftStBFXTI1LZFju38rYThnfMykS+IXy8BVx+res7s2fxf1l8I35DV2T97ezo6+SGrXz6B3iQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-transform-unicode-regex/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-SqH4ClNngh/zGwHZOOQMTD+e8FGWexILV+ePMyiDJttAWRh5dhDL8rcl5lSgU3Huiq6Zn6pWTMvdPAb21Dwdyg==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.12.9 - '@babel/helper-create-regexp-features-plugin': 7.12.7_@babel+core@7.12.9 - '@babel/helper-plugin-utils': 7.18.6 - dev: true - - /@babel/plugin-transform-unicode-regex/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-SqH4ClNngh/zGwHZOOQMTD+e8FGWexILV+ePMyiDJttAWRh5dhDL8rcl5lSgU3Huiq6Zn6pWTMvdPAb21Dwdyg==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-create-regexp-features-plugin': 7.12.7_@babel+core@7.9.0 - '@babel/helper-plugin-utils': 7.18.6 - dev: true - - /@babel/plugin-transform-unicode-regex/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-gE7A6Lt7YLnNOL3Pb9BNeZvi+d8l7tcRrG4+pwJjK9hD2xX4mEvjlQW60G9EEmfXVYRPv9VRQcyegIVHCql/AA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-create-regexp-features-plugin': 7.19.0_@babel+core@7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-transform-unicode-regex/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-gE7A6Lt7YLnNOL3Pb9BNeZvi+d8l7tcRrG4+pwJjK9hD2xX4mEvjlQW60G9EEmfXVYRPv9VRQcyegIVHCql/AA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-create-regexp-features-plugin': 7.19.0_@babel+core@7.9.0 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/preset-env/7.12.7_@babel+core@7.12.9: - resolution: {integrity: sha512-OnNdfAr1FUQg7ksb7bmbKoby4qFOHw6DKWWUNB9KqnnCldxhxJlP+21dpyaWFmf2h0rTbOkXJtAGevY3XW1eew==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/compat-data': 7.12.7 - '@babel/core': 7.12.9 - '@babel/helper-compilation-targets': 7.12.5_@babel+core@7.12.9 - '@babel/helper-module-imports': 7.18.6 - '@babel/helper-plugin-utils': 7.18.6 - '@babel/helper-validator-option': 7.12.1 - '@babel/plugin-proposal-async-generator-functions': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-proposal-class-properties': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-proposal-dynamic-import': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-proposal-export-namespace-from': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-proposal-json-strings': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-proposal-logical-assignment-operators': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-proposal-nullish-coalescing-operator': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-proposal-numeric-separator': 7.12.7_@babel+core@7.12.9 - '@babel/plugin-proposal-object-rest-spread': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-proposal-optional-catch-binding': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-proposal-optional-chaining': 7.12.7_@babel+core@7.12.9 - '@babel/plugin-proposal-private-methods': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-proposal-unicode-property-regex': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-syntax-async-generators': 7.8.4_@babel+core@7.12.9 - '@babel/plugin-syntax-class-properties': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-syntax-dynamic-import': 7.8.3_@babel+core@7.12.9 - '@babel/plugin-syntax-export-namespace-from': 7.8.3_@babel+core@7.12.9 - '@babel/plugin-syntax-json-strings': 7.8.3_@babel+core@7.12.9 - '@babel/plugin-syntax-logical-assignment-operators': 7.10.4_@babel+core@7.12.9 - '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3_@babel+core@7.12.9 - '@babel/plugin-syntax-numeric-separator': 7.10.4_@babel+core@7.12.9 - '@babel/plugin-syntax-object-rest-spread': 7.8.3_@babel+core@7.12.9 - '@babel/plugin-syntax-optional-catch-binding': 7.8.3_@babel+core@7.12.9 - '@babel/plugin-syntax-optional-chaining': 7.8.3_@babel+core@7.12.9 - '@babel/plugin-syntax-top-level-await': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-arrow-functions': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-async-to-generator': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-block-scoped-functions': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-block-scoping': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-classes': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-computed-properties': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-destructuring': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-dotall-regex': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-duplicate-keys': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-exponentiation-operator': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-for-of': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-function-name': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-literals': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-member-expression-literals': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-modules-amd': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-modules-commonjs': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-modules-systemjs': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-modules-umd': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-named-capturing-groups-regex': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-new-target': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-object-super': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-parameters': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-property-literals': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-regenerator': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-reserved-words': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-shorthand-properties': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-spread': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-sticky-regex': 7.12.7_@babel+core@7.12.9 - '@babel/plugin-transform-template-literals': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-typeof-symbol': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-unicode-escapes': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-unicode-regex': 7.12.1_@babel+core@7.12.9 - '@babel/preset-modules': 0.1.4_@babel+core@7.12.9 - '@babel/types': 7.18.7 - core-js-compat: 3.7.0 - semver: 5.7.1 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/preset-env/7.19.1_@babel+core@7.19.1: - resolution: {integrity: sha512-c8B2c6D16Lp+Nt6HcD+nHl0VbPKVnNPTpszahuxJJnurfMtKeZ80A+qUv48Y7wqvS+dTFuLuaM9oYxyNHbCLWA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/compat-data': 7.19.1 - '@babel/core': 7.19.1 - '@babel/helper-compilation-targets': 7.19.1_@babel+core@7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/helper-validator-option': 7.18.6 - '@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining': 7.18.9_@babel+core@7.19.1 - '@babel/plugin-proposal-async-generator-functions': 7.19.1_@babel+core@7.19.1 - '@babel/plugin-proposal-class-properties': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-proposal-class-static-block': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-proposal-dynamic-import': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-proposal-export-namespace-from': 7.18.9_@babel+core@7.19.1 - '@babel/plugin-proposal-json-strings': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-proposal-logical-assignment-operators': 7.18.9_@babel+core@7.19.1 - '@babel/plugin-proposal-nullish-coalescing-operator': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-proposal-numeric-separator': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-proposal-object-rest-spread': 7.18.9_@babel+core@7.19.1 - '@babel/plugin-proposal-optional-catch-binding': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-proposal-optional-chaining': 7.18.9_@babel+core@7.19.1 - '@babel/plugin-proposal-private-methods': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-proposal-private-property-in-object': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-proposal-unicode-property-regex': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-syntax-async-generators': 7.8.4_@babel+core@7.19.1 - '@babel/plugin-syntax-class-properties': 7.12.13_@babel+core@7.19.1 - '@babel/plugin-syntax-class-static-block': 7.14.5_@babel+core@7.19.1 - '@babel/plugin-syntax-dynamic-import': 7.8.3_@babel+core@7.19.1 - '@babel/plugin-syntax-export-namespace-from': 7.8.3_@babel+core@7.19.1 - '@babel/plugin-syntax-import-assertions': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-syntax-json-strings': 7.8.3_@babel+core@7.19.1 - '@babel/plugin-syntax-logical-assignment-operators': 7.10.4_@babel+core@7.19.1 - '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3_@babel+core@7.19.1 - '@babel/plugin-syntax-numeric-separator': 7.10.4_@babel+core@7.19.1 - '@babel/plugin-syntax-object-rest-spread': 7.8.3_@babel+core@7.19.1 - '@babel/plugin-syntax-optional-catch-binding': 7.8.3_@babel+core@7.19.1 - '@babel/plugin-syntax-optional-chaining': 7.8.3_@babel+core@7.19.1 - '@babel/plugin-syntax-private-property-in-object': 7.14.5_@babel+core@7.19.1 - '@babel/plugin-syntax-top-level-await': 7.14.5_@babel+core@7.19.1 - '@babel/plugin-transform-arrow-functions': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-transform-async-to-generator': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-transform-block-scoped-functions': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-transform-block-scoping': 7.18.9_@babel+core@7.19.1 - '@babel/plugin-transform-classes': 7.19.0_@babel+core@7.19.1 - '@babel/plugin-transform-computed-properties': 7.18.9_@babel+core@7.19.1 - '@babel/plugin-transform-destructuring': 7.18.13_@babel+core@7.19.1 - '@babel/plugin-transform-dotall-regex': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-transform-duplicate-keys': 7.18.9_@babel+core@7.19.1 - '@babel/plugin-transform-exponentiation-operator': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-transform-for-of': 7.18.8_@babel+core@7.19.1 - '@babel/plugin-transform-function-name': 7.18.9_@babel+core@7.19.1 - '@babel/plugin-transform-literals': 7.18.9_@babel+core@7.19.1 - '@babel/plugin-transform-member-expression-literals': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-transform-modules-amd': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-transform-modules-commonjs': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-transform-modules-systemjs': 7.19.0_@babel+core@7.19.1 - '@babel/plugin-transform-modules-umd': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-transform-named-capturing-groups-regex': 7.19.1_@babel+core@7.19.1 - '@babel/plugin-transform-new-target': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-transform-object-super': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-transform-parameters': 7.18.8_@babel+core@7.19.1 - '@babel/plugin-transform-property-literals': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-transform-regenerator': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-transform-reserved-words': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-transform-shorthand-properties': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-transform-spread': 7.19.0_@babel+core@7.19.1 - '@babel/plugin-transform-sticky-regex': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-transform-template-literals': 7.18.9_@babel+core@7.19.1 - '@babel/plugin-transform-typeof-symbol': 7.18.9_@babel+core@7.19.1 - '@babel/plugin-transform-unicode-escapes': 7.18.10_@babel+core@7.19.1 - '@babel/plugin-transform-unicode-regex': 7.18.6_@babel+core@7.19.1 - '@babel/preset-modules': 0.1.5_@babel+core@7.19.1 - '@babel/types': 7.19.0 - babel-plugin-polyfill-corejs2: 0.3.3_@babel+core@7.19.1 - babel-plugin-polyfill-corejs3: 0.6.0_@babel+core@7.19.1 - babel-plugin-polyfill-regenerator: 0.4.1_@babel+core@7.19.1 - core-js-compat: 3.25.1 - semver: 6.3.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/preset-env/7.19.1_@babel+core@7.9.0: - resolution: {integrity: sha512-c8B2c6D16Lp+Nt6HcD+nHl0VbPKVnNPTpszahuxJJnurfMtKeZ80A+qUv48Y7wqvS+dTFuLuaM9oYxyNHbCLWA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/compat-data': 7.19.1 - '@babel/core': 7.9.0 - '@babel/helper-compilation-targets': 7.19.1_@babel+core@7.9.0 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/helper-validator-option': 7.18.6 - '@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining': 7.18.9_@babel+core@7.9.0 - '@babel/plugin-proposal-async-generator-functions': 7.19.1_@babel+core@7.9.0 - '@babel/plugin-proposal-class-properties': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-proposal-class-static-block': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-proposal-dynamic-import': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-proposal-export-namespace-from': 7.18.9_@babel+core@7.9.0 - '@babel/plugin-proposal-json-strings': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-proposal-logical-assignment-operators': 7.18.9_@babel+core@7.9.0 - '@babel/plugin-proposal-nullish-coalescing-operator': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-proposal-numeric-separator': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-proposal-object-rest-spread': 7.18.9_@babel+core@7.9.0 - '@babel/plugin-proposal-optional-catch-binding': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-proposal-optional-chaining': 7.18.9_@babel+core@7.9.0 - '@babel/plugin-proposal-private-methods': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-proposal-private-property-in-object': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-proposal-unicode-property-regex': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-syntax-async-generators': 7.8.4_@babel+core@7.9.0 - '@babel/plugin-syntax-class-properties': 7.12.13_@babel+core@7.9.0 - '@babel/plugin-syntax-class-static-block': 7.14.5_@babel+core@7.9.0 - '@babel/plugin-syntax-dynamic-import': 7.8.3_@babel+core@7.9.0 - '@babel/plugin-syntax-export-namespace-from': 7.8.3_@babel+core@7.9.0 - '@babel/plugin-syntax-import-assertions': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-syntax-json-strings': 7.8.3_@babel+core@7.9.0 - '@babel/plugin-syntax-logical-assignment-operators': 7.10.4_@babel+core@7.9.0 - '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3_@babel+core@7.9.0 - '@babel/plugin-syntax-numeric-separator': 7.10.4_@babel+core@7.9.0 - '@babel/plugin-syntax-object-rest-spread': 7.8.3_@babel+core@7.9.0 - '@babel/plugin-syntax-optional-catch-binding': 7.8.3_@babel+core@7.9.0 - '@babel/plugin-syntax-optional-chaining': 7.8.3_@babel+core@7.9.0 - '@babel/plugin-syntax-private-property-in-object': 7.14.5_@babel+core@7.9.0 - '@babel/plugin-syntax-top-level-await': 7.14.5_@babel+core@7.9.0 - '@babel/plugin-transform-arrow-functions': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-transform-async-to-generator': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-transform-block-scoped-functions': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-transform-block-scoping': 7.18.9_@babel+core@7.9.0 - '@babel/plugin-transform-classes': 7.19.0_@babel+core@7.9.0 - '@babel/plugin-transform-computed-properties': 7.18.9_@babel+core@7.9.0 - '@babel/plugin-transform-destructuring': 7.18.13_@babel+core@7.9.0 - '@babel/plugin-transform-dotall-regex': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-transform-duplicate-keys': 7.18.9_@babel+core@7.9.0 - '@babel/plugin-transform-exponentiation-operator': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-transform-for-of': 7.18.8_@babel+core@7.9.0 - '@babel/plugin-transform-function-name': 7.18.9_@babel+core@7.9.0 - '@babel/plugin-transform-literals': 7.18.9_@babel+core@7.9.0 - '@babel/plugin-transform-member-expression-literals': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-transform-modules-amd': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-transform-modules-commonjs': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-transform-modules-systemjs': 7.19.0_@babel+core@7.9.0 - '@babel/plugin-transform-modules-umd': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-transform-named-capturing-groups-regex': 7.19.1_@babel+core@7.9.0 - '@babel/plugin-transform-new-target': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-transform-object-super': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-transform-parameters': 7.18.8_@babel+core@7.9.0 - '@babel/plugin-transform-property-literals': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-transform-regenerator': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-transform-reserved-words': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-transform-shorthand-properties': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-transform-spread': 7.19.0_@babel+core@7.9.0 - '@babel/plugin-transform-sticky-regex': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-transform-template-literals': 7.18.9_@babel+core@7.9.0 - '@babel/plugin-transform-typeof-symbol': 7.18.9_@babel+core@7.9.0 - '@babel/plugin-transform-unicode-escapes': 7.18.10_@babel+core@7.9.0 - '@babel/plugin-transform-unicode-regex': 7.18.6_@babel+core@7.9.0 - '@babel/preset-modules': 0.1.5_@babel+core@7.9.0 - '@babel/types': 7.19.0 - babel-plugin-polyfill-corejs2: 0.3.3_@babel+core@7.9.0 - babel-plugin-polyfill-corejs3: 0.6.0_@babel+core@7.9.0 - babel-plugin-polyfill-regenerator: 0.4.1_@babel+core@7.9.0 - core-js-compat: 3.25.1 - semver: 6.3.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/preset-env/7.9.0_@babel+core@7.9.0: - resolution: {integrity: sha512-712DeRXT6dyKAM/FMbQTV/FvRCms2hPCx+3weRjZ8iQVQWZejWWk1wwG6ViWMyqb/ouBbGOl5b6aCk0+j1NmsQ==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/compat-data': 7.12.7 - '@babel/core': 7.9.0 - '@babel/helper-compilation-targets': 7.12.5_@babel+core@7.9.0 - '@babel/helper-module-imports': 7.12.5 - '@babel/helper-plugin-utils': 7.10.4 - '@babel/plugin-proposal-async-generator-functions': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-proposal-dynamic-import': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-proposal-json-strings': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-proposal-nullish-coalescing-operator': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-proposal-numeric-separator': 7.12.7_@babel+core@7.9.0 - '@babel/plugin-proposal-object-rest-spread': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-proposal-optional-catch-binding': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-proposal-optional-chaining': 7.12.7_@babel+core@7.9.0 - '@babel/plugin-proposal-unicode-property-regex': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-syntax-async-generators': 7.8.4_@babel+core@7.9.0 - '@babel/plugin-syntax-dynamic-import': 7.8.3_@babel+core@7.9.0 - '@babel/plugin-syntax-json-strings': 7.8.3_@babel+core@7.9.0 - '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3_@babel+core@7.9.0 - '@babel/plugin-syntax-numeric-separator': 7.10.4_@babel+core@7.9.0 - '@babel/plugin-syntax-object-rest-spread': 7.8.3_@babel+core@7.9.0 - '@babel/plugin-syntax-optional-catch-binding': 7.8.3_@babel+core@7.9.0 - '@babel/plugin-syntax-optional-chaining': 7.8.3_@babel+core@7.9.0 - '@babel/plugin-syntax-top-level-await': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-arrow-functions': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-async-to-generator': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-block-scoped-functions': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-block-scoping': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-classes': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-computed-properties': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-destructuring': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-dotall-regex': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-duplicate-keys': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-exponentiation-operator': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-for-of': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-function-name': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-literals': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-member-expression-literals': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-modules-amd': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-modules-commonjs': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-modules-systemjs': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-modules-umd': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-named-capturing-groups-regex': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-new-target': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-object-super': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-parameters': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-property-literals': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-regenerator': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-reserved-words': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-shorthand-properties': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-spread': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-sticky-regex': 7.12.7_@babel+core@7.9.0 - '@babel/plugin-transform-template-literals': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-typeof-symbol': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-unicode-regex': 7.12.1_@babel+core@7.9.0 - '@babel/preset-modules': 0.1.4_@babel+core@7.9.0 - '@babel/types': 7.12.7 - browserslist: 4.20.4 - core-js-compat: 3.7.0 - invariant: 2.2.4 - levenary: 1.1.1 - semver: 5.7.1 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/preset-flow/7.12.1_@babel+core@7.12.9: - resolution: {integrity: sha512-UAoyMdioAhM6H99qPoKvpHMzxmNVXno8GYU/7vZmGaHk6/KqfDYL1W0NxszVbJ2EP271b7e6Ox+Vk2A9QsB3Sw==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.18.6 - '@babel/plugin-transform-flow-strip-types': 7.12.1_@babel+core@7.12.9 - dev: true - - /@babel/preset-flow/7.12.1_@babel+core@7.9.0: - resolution: {integrity: sha512-UAoyMdioAhM6H99qPoKvpHMzxmNVXno8GYU/7vZmGaHk6/KqfDYL1W0NxszVbJ2EP271b7e6Ox+Vk2A9QsB3Sw==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.18.6 - '@babel/plugin-transform-flow-strip-types': 7.12.1_@babel+core@7.9.0 - dev: true - - /@babel/preset-modules/0.1.4_@babel+core@7.12.9: - resolution: {integrity: sha512-J36NhwnfdzpmH41M1DrnkkgAqhZaqr/NBdPfQ677mLzlaXo+oDiv1deyCDtgAhz8p328otdob0Du7+xgHGZbKg==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.18.6 - '@babel/plugin-proposal-unicode-property-regex': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-dotall-regex': 7.12.1_@babel+core@7.12.9 - '@babel/types': 7.18.7 - esutils: 2.0.3 - dev: true - - /@babel/preset-modules/0.1.4_@babel+core@7.9.0: - resolution: {integrity: sha512-J36NhwnfdzpmH41M1DrnkkgAqhZaqr/NBdPfQ677mLzlaXo+oDiv1deyCDtgAhz8p328otdob0Du7+xgHGZbKg==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.18.6 - '@babel/plugin-proposal-unicode-property-regex': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-dotall-regex': 7.12.1_@babel+core@7.9.0 - '@babel/types': 7.18.7 - esutils: 2.0.3 - dev: true - - /@babel/preset-modules/0.1.5_@babel+core@7.19.1: - resolution: {integrity: sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/plugin-proposal-unicode-property-regex': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-transform-dotall-regex': 7.18.6_@babel+core@7.19.1 - '@babel/types': 7.19.0 - esutils: 2.0.3 - dev: true - - /@babel/preset-modules/0.1.5_@babel+core@7.9.0: - resolution: {integrity: sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/plugin-proposal-unicode-property-regex': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-transform-dotall-regex': 7.18.6_@babel+core@7.9.0 - '@babel/types': 7.19.0 - esutils: 2.0.3 - dev: true - - /@babel/preset-react/7.12.7_@babel+core@7.12.9: - resolution: {integrity: sha512-wKeTdnGUP5AEYCYQIMeXMMwU7j+2opxrG0WzuZfxuuW9nhKvvALBjl67653CWamZJVefuJGI219G591RSldrqQ==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/plugin-transform-react-display-name': 7.18.6_@babel+core@7.12.9 - '@babel/plugin-transform-react-jsx': 7.19.0_@babel+core@7.12.9 - '@babel/plugin-transform-react-jsx-development': 7.18.6_@babel+core@7.12.9 - '@babel/plugin-transform-react-jsx-self': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-react-jsx-source': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-react-pure-annotations': 7.18.6_@babel+core@7.12.9 - dev: true - - /@babel/preset-react/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-zXr6atUmyYdiWRVLOZahakYmOBHtWc2WGCkP8PYTgZi0iJXDY2CN180TdrIW4OGOAdLc7TifzDIvtx6izaRIzg==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.18.6 - '@babel/helper-validator-option': 7.18.6 - '@babel/plugin-transform-react-display-name': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-transform-react-jsx': 7.19.0_@babel+core@7.19.1 - '@babel/plugin-transform-react-jsx-development': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-transform-react-pure-annotations': 7.18.6_@babel+core@7.19.1 - dev: true - - /@babel/preset-react/7.18.6_@babel+core@7.9.0: - resolution: {integrity: sha512-zXr6atUmyYdiWRVLOZahakYmOBHtWc2WGCkP8PYTgZi0iJXDY2CN180TdrIW4OGOAdLc7TifzDIvtx6izaRIzg==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.18.6 - '@babel/helper-validator-option': 7.18.6 - '@babel/plugin-transform-react-display-name': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-transform-react-jsx': 7.19.0_@babel+core@7.9.0 - '@babel/plugin-transform-react-jsx-development': 7.18.6_@babel+core@7.9.0 - '@babel/plugin-transform-react-pure-annotations': 7.18.6_@babel+core@7.9.0 - dev: true - - /@babel/preset-react/7.9.1_@babel+core@7.9.0: - resolution: {integrity: sha512-aJBYF23MPj0RNdp/4bHnAP0NVqqZRr9kl0NAOP4nJCex6OYVio59+dnQzsAWFuogdLyeaKA1hmfUIVZkY5J+TQ==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.10.4 - '@babel/plugin-transform-react-display-name': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-react-jsx': 7.12.7_@babel+core@7.9.0 - '@babel/plugin-transform-react-jsx-development': 7.12.7_@babel+core@7.9.0 - '@babel/plugin-transform-react-jsx-self': 7.12.1_@babel+core@7.9.0 - '@babel/plugin-transform-react-jsx-source': 7.12.1_@babel+core@7.9.0 - dev: true - - /@babel/preset-typescript/7.18.6_@babel+core@7.19.1: - resolution: {integrity: sha512-s9ik86kXBAnD760aybBucdpnLsAt0jK1xqJn2juOn9lkOvSHV60os5hxoVJsPzMQxvnUJFAlkont2DvvaYEBtQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-plugin-utils': 7.19.0 - '@babel/helper-validator-option': 7.18.6 - '@babel/plugin-transform-typescript': 7.19.1_@babel+core@7.19.1 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/preset-typescript/7.9.0_@babel+core@7.9.0: - resolution: {integrity: sha512-S4cueFnGrIbvYJgwsVFKdvOmpiL0XGw9MFW9D0vgRys5g36PBhZRL8NX8Gr2akz8XRtzq6HuDXPD/1nniagNUg==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-plugin-utils': 7.10.4 - '@babel/plugin-transform-typescript': 7.18.4_@babel+core@7.9.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/register/7.18.9_@babel+core@7.19.1: - resolution: {integrity: sha512-ZlbnXDcNYHMR25ITwwNKT88JiaukkdVj/nG7r3wnuXkOTHc60Uy05PwMCPre0hSkY68E6zK3xz+vUJSP2jWmcw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - clone-deep: 4.0.1 - find-cache-dir: 2.1.0 - make-dir: 2.1.0 - pirates: 4.0.5 - source-map-support: 0.5.19 - dev: true - - /@babel/runtime/7.13.10: - resolution: {integrity: sha512-4QPkjJq6Ns3V/RgpEahRk+AGfL0eO6RHHtTWoNNr5mO49G6B5+X6d6THgWEAvTrznU5xYpbAlVKRYcsCgh/Akw==} - dependencies: - regenerator-runtime: 0.13.9 - dev: false - - /@babel/runtime/7.18.6: - resolution: {integrity: sha512-t9wi7/AW6XtKahAe20Yw0/mMljKq0B1r2fPdvaAdV/KPDZewFXdaaa6K7lxmZBZ8FBNpCiAT6iHPmd6QO9bKfQ==} - engines: {node: '>=6.9.0'} - dependencies: - regenerator-runtime: 0.13.9 - - /@babel/runtime/7.5.5: - resolution: {integrity: sha512-28QvEGyQyNkB0/m2B4FU7IEZGK2NUrcMtT6BZEFALTguLk+AUT6ofsHtPk5QyjAdUkpMJ+/Em+quwz4HOt30AQ==} - dependencies: - regenerator-runtime: 0.13.9 - dev: true - - /@babel/runtime/7.7.2: - resolution: {integrity: sha512-JONRbXbTXc9WQE2mAZd1p0Z3DZ/6vaQIkgYMSTP3KjRCyd7rCZCcfhCyX+YjwcKxcZ82UrxbRD358bpExNgrjw==} - dependencies: - regenerator-runtime: 0.13.9 - dev: true - - /@babel/runtime/7.9.0: - resolution: {integrity: sha512-cTIudHnzuWLS56ik4DnRnqqNf8MkdUzV4iFFI1h7Jo9xvrpQROYaAnaSd2mHLQAzzZAPfATynX5ord6YlNYNMA==} - dependencies: - regenerator-runtime: 0.13.9 - dev: true - - /@babel/template/7.12.7: - resolution: {integrity: sha512-GkDzmHS6GV7ZeXfJZ0tLRBhZcMcY0/Lnb+eEbXDBfCAcZCjrZKe6p3J4we/D24O9Y8enxWAg1cWwof59yLh2ow==} - dependencies: - '@babel/code-frame': 7.16.7 - '@babel/parser': 7.12.7 - '@babel/types': 7.12.7 - dev: true - - /@babel/template/7.18.10: - resolution: {integrity: sha512-TI+rCtooWHr3QJ27kJxfjutghu44DLnasDMwpDqCXVTal9RLp3RSYNh4NdBrRP2cQAoG9A8juOQl6P6oZG4JxA==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/code-frame': 7.18.6 - '@babel/parser': 7.19.1 - '@babel/types': 7.19.0 - dev: true - - /@babel/traverse/7.12.9: - resolution: {integrity: sha512-iX9ajqnLdoU1s1nHt36JDI9KG4k+vmI8WgjK5d+aDTwQbL2fUnzedNedssA645Ede3PM2ma1n8Q4h2ohwXgMXw==} - dependencies: - '@babel/code-frame': 7.16.7 - '@babel/generator': 7.12.5 - '@babel/helper-function-name': 7.10.4 - '@babel/helper-split-export-declaration': 7.11.0 - '@babel/parser': 7.12.7 - '@babel/types': 7.12.7 - debug: 4.3.3 - globals: 11.12.0 - lodash: 4.17.21 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/traverse/7.19.1: - resolution: {integrity: sha512-0j/ZfZMxKukDaag2PtOPDbwuELqIar6lLskVPPJDjXMXjfLb1Obo/1yjxIGqqAJrmfaTIY3z2wFLAQ7qSkLsuA==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/code-frame': 7.18.6 - '@babel/generator': 7.19.0 - '@babel/helper-environment-visitor': 7.18.9 - '@babel/helper-function-name': 7.19.0 - '@babel/helper-hoist-variables': 7.18.6 - '@babel/helper-split-export-declaration': 7.18.6 - '@babel/parser': 7.19.1 - '@babel/types': 7.19.0 - debug: 4.3.3 - globals: 11.12.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/types/7.12.7: - resolution: {integrity: sha512-MNyI92qZq6jrQkXvtIiykvl4WtoRrVV9MPn+ZfsoEENjiWcBQ3ZSHrkxnJWgWtLX3XXqX5hrSQ+X69wkmesXuQ==} - dependencies: - '@babel/helper-validator-identifier': 7.10.4 - lodash: 4.17.21 - to-fast-properties: 2.0.0 - dev: true - - /@babel/types/7.18.7: - resolution: {integrity: sha512-QG3yxTcTIBoAcQmkCs+wAPYZhu7Dk9rXKacINfNbdJDNERTbLQbHGyVG8q/YGMPeCJRIhSY0+fTc5+xuh6WPSQ==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/helper-validator-identifier': 7.18.6 - to-fast-properties: 2.0.0 - dev: true - - /@babel/types/7.19.0: - resolution: {integrity: sha512-YuGopBq3ke25BVSiS6fgF49Ul9gH1x70Bcr6bqRLjWCkcX8Hre1/5+z+IiWOIerRMSSEfGZVB9z9kyq7wVs9YA==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/helper-string-parser': 7.18.10 - '@babel/helper-validator-identifier': 7.18.6 - to-fast-properties: 2.0.0 - dev: true - - /@base2/pretty-print-object/1.0.1: - resolution: {integrity: sha512-4iri8i1AqYHJE2DstZYkyEprg6Pq6sKx3xn5FpySk9sNhH7qN2LLlHJCfDTZRILNwQNPD7mATWM0TBui7uC1pA==} - dev: true - - /@bcoe/v8-coverage/0.2.3: - resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==} - dev: true - - /@cnakazawa/watch/1.0.4: - resolution: {integrity: sha512-v9kIhKwjeZThiWrLmj0y17CWoyddASLj9O2yvbZkbvw/N3rWOYy9zkV66ursAoVr0mV15bL8g0c4QZUE6cdDoQ==} - engines: {node: '>=0.1.95'} - hasBin: true - dependencies: - exec-sh: 0.3.4 - minimist: 1.2.5 - dev: true - - /@colors/colors/1.5.0: - resolution: {integrity: sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==} - engines: {node: '>=0.1.90'} - requiresBuild: true - dev: true - - /@cspotcode/source-map-support/0.8.1: - resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==} - engines: {node: '>=12'} - dependencies: - '@jridgewell/trace-mapping': 0.3.9 - dev: true - - /@dabh/diagnostics/2.0.3: - resolution: {integrity: sha512-hrlQOIi7hAfzsMqlGSFyVucrx38O+j6wiGOf//H2ecvIEqYN4ADBSS2iLMh5UFyDunCNniUIPk/q3riFv45xRA==} - dependencies: - colorspace: 1.1.4 - enabled: 2.0.0 - kuler: 2.0.0 - dev: true - - /@design-systems/utils/2.12.0_2zx2umvpluuhvlq44va5bta2da: - resolution: {integrity: sha512-Y/d2Zzr+JJfN6u1gbuBUb1ufBuLMJJRZQk+dRmw8GaTpqKx5uf7cGUYGTwN02dIb3I+Tf+cW8jcGBTRiFxdYFg==} - peerDependencies: - '@types/react': '*' - react: '>= 16.8.6 || 18' - react-dom: '>= 16.8.6 || 18' - dependencies: - '@babel/runtime': 7.18.6 - '@types/react': 18.0.25 - clsx: 1.2.1 - focus-lock: 0.8.1 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - react-merge-refs: 1.1.0 - dev: true - - /@devtools-ds/object-inspector/1.2.0_2zx2umvpluuhvlq44va5bta2da: - resolution: {integrity: sha512-VztcwqVwScSvYdvJVZBJYsVO/2Pew3JPpFV3T9fuCHQLlHcLYOV3aU/kBS2ScuE2O1JN0ZbobLqFLa3vQF54Fw==} - peerDependencies: - react: '>= 16.8.6 || 18' - dependencies: - '@babel/runtime': 7.7.2 - '@devtools-ds/object-parser': 1.2.0 - '@devtools-ds/themes': 1.2.0_2zx2umvpluuhvlq44va5bta2da - '@devtools-ds/tree': 1.2.0_2zx2umvpluuhvlq44va5bta2da - clsx: 1.1.0 - react: 18.2.0 - transitivePeerDependencies: - - '@types/react' - - react-dom - dev: true - - /@devtools-ds/object-parser/1.2.0: - resolution: {integrity: sha512-SjGGyiFFY8dtUpiWXAvRSzRT+hE11EAAysrq2PsC/GVLf2ZLyT2nHlQO5kDStywyTz+fjw7S7pyDRj1HG9YTTA==} - dependencies: - '@babel/runtime': 7.5.5 - dev: true - - /@devtools-ds/themes/1.2.0_2zx2umvpluuhvlq44va5bta2da: - resolution: {integrity: sha512-LimEITorE6yWZWWuMc6OiBfLQgPrQqWbyMEmfRUDPa3PHXoAY4SpDxczfg31fgyRDUNWnZhjaJH5bBbu8VEbIw==} - peerDependencies: - react: '>= 16.8.6 || 18' - dependencies: - '@babel/runtime': 7.5.5 - '@design-systems/utils': 2.12.0_2zx2umvpluuhvlq44va5bta2da - clsx: 1.1.0 - react: 18.2.0 - transitivePeerDependencies: - - '@types/react' - - react-dom - dev: true - - /@devtools-ds/tree/1.2.0_2zx2umvpluuhvlq44va5bta2da: - resolution: {integrity: sha512-hC4g4ocuo2eg7jsnzKdauxH0sDQiPW3KSM2+uK3kRgcmr9PzpBD5Kob+Y/WFSVKswFleftOGKL4BQLuRv0sPxA==} - peerDependencies: - react: '>= 16.8.6 || 18' - dependencies: - '@babel/runtime': 7.7.2 - '@devtools-ds/themes': 1.2.0_2zx2umvpluuhvlq44va5bta2da - clsx: 1.1.0 - react: 18.2.0 - transitivePeerDependencies: - - '@types/react' - - react-dom - dev: true - - /@discoveryjs/json-ext/0.5.7: - resolution: {integrity: sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw==} - engines: {node: '>=10.0.0'} - dev: true - - /@emotion/babel-plugin/11.9.2_@babel+core@7.9.0: - resolution: {integrity: sha512-Pr/7HGH6H6yKgnVFNEj2MVlreu3ADqftqjqwUvDy/OJzKFgxKeTQ+eeUf20FOTuHVkDON2iNa25rAXVYtWJCjw==} - peerDependencies: - '@babel/core': ^7.0.0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-module-imports': 7.18.6 - '@babel/plugin-syntax-jsx': 7.18.6_@babel+core@7.9.0 - '@babel/runtime': 7.18.6 - '@emotion/hash': 0.8.0 - '@emotion/memoize': 0.7.5 - '@emotion/serialize': 1.0.4 - babel-plugin-macros: 2.8.0 - convert-source-map: 1.7.0 - escape-string-regexp: 4.0.0 - find-root: 1.1.0 - source-map: 0.5.7 - stylis: 4.0.13 - dev: true - - /@emotion/cache/11.9.3: - resolution: {integrity: sha512-0dgkI/JKlCXa+lEXviaMtGBL0ynpx4osh7rjOXE71q9bIF8G+XhJgvi+wDu0B0IdCVx37BffiwXlN9I3UuzFvg==} - dependencies: - '@emotion/memoize': 0.7.5 - '@emotion/sheet': 1.1.1 - '@emotion/utils': 1.1.0 - '@emotion/weak-memoize': 0.2.5 - stylis: 4.0.13 - dev: true - - /@emotion/hash/0.8.0: - resolution: {integrity: sha512-kBJtf7PH6aWwZ6fka3zQ0p6SBYzx4fl1LoZXE2RrnYST9Xljm7WfKJrU4g/Xr3Beg72MLrp1AWNUmuYJTL7Cow==} - dev: true - - /@emotion/is-prop-valid/1.1.3: - resolution: {integrity: sha512-RFg04p6C+1uO19uG8N+vqanzKqiM9eeV1LDOG3bmkYmuOj7NbKNlFC/4EZq5gnwAIlcC/jOT24f8Td0iax2SXA==} - dependencies: - '@emotion/memoize': 0.7.5 - dev: true - - /@emotion/memoize/0.7.5: - resolution: {integrity: sha512-igX9a37DR2ZPGYtV6suZ6whr8pTFtyHL3K/oLUotxpSVO2ASaprmAe2Dkq7tBo7CRY7MMDrAa9nuQP9/YG8FxQ==} - dev: true - - /@emotion/react/11.9.3_ydn2tsgf5ytsgv3ci7p3vfge5u: - resolution: {integrity: sha512-g9Q1GcTOlzOEjqwuLF/Zd9LC+4FljjPjDfxSM7KmEakm+hsHXk+bYZ2q+/hTJzr0OUNkujo72pXLQvXj6H+GJQ==} - peerDependencies: - '@babel/core': ^7.0.0 || 7 - '@types/react': '*' - react: '>=16.8.0 || 18' - peerDependenciesMeta: - '@babel/core': - optional: true - '@types/react': - optional: true - dependencies: - '@babel/core': 7.9.0 - '@babel/runtime': 7.18.6 - '@emotion/babel-plugin': 11.9.2_@babel+core@7.9.0 - '@emotion/cache': 11.9.3 - '@emotion/serialize': 1.0.4 - '@emotion/utils': 1.1.0 - '@emotion/weak-memoize': 0.2.5 - '@types/react': 18.0.25 - hoist-non-react-statics: 3.3.2 - react: 18.2.0 - dev: true - - /@emotion/serialize/1.0.4: - resolution: {integrity: sha512-1JHamSpH8PIfFwAMryO2bNka+y8+KA5yga5Ocf2d7ZEiJjb7xlLW7aknBGZqJLajuLOvJ+72vN+IBSwPlXD1Pg==} - dependencies: - '@emotion/hash': 0.8.0 - '@emotion/memoize': 0.7.5 - '@emotion/unitless': 0.7.5 - '@emotion/utils': 1.1.0 - csstype: 3.1.0 - dev: true - - /@emotion/sheet/1.1.1: - resolution: {integrity: sha512-J3YPccVRMiTZxYAY0IOq3kd+hUP8idY8Kz6B/Cyo+JuXq52Ek+zbPbSQUrVQp95aJ+lsAW7DPL1P2Z+U1jGkKA==} - dev: true - - /@emotion/styled/11.9.3_wmwfkcpmce6bptfofq7aisovym: - resolution: {integrity: sha512-o3sBNwbtoVz9v7WB1/Y/AmXl69YHmei2mrVnK7JgyBJ//Rst5yqPZCecEJlMlJrFeWHp+ki/54uN265V2pEcXA==} - peerDependencies: - '@babel/core': ^7.0.0 || 7 - '@emotion/react': ^11.0.0-rc.0 - '@types/react': '*' - react: '>=16.8.0 || 18' - peerDependenciesMeta: - '@babel/core': - optional: true - '@types/react': - optional: true - dependencies: - '@babel/core': 7.9.0 - '@babel/runtime': 7.18.6 - '@emotion/babel-plugin': 11.9.2_@babel+core@7.9.0 - '@emotion/is-prop-valid': 1.1.3 - '@emotion/react': 11.9.3_ydn2tsgf5ytsgv3ci7p3vfge5u - '@emotion/serialize': 1.0.4 - '@emotion/utils': 1.1.0 - '@types/react': 18.0.25 - react: 18.2.0 - dev: true - - /@emotion/unitless/0.7.5: - resolution: {integrity: sha512-OWORNpfjMsSSUBVrRBVGECkhWcULOAJz9ZW8uK9qgxD+87M7jHRcvh/A96XXNhXTLmKcoYSQtBEX7lHMO7YRwg==} - dev: true - - /@emotion/utils/1.1.0: - resolution: {integrity: sha512-iRLa/Y4Rs5H/f2nimczYmS5kFJEbpiVvgN3XVfZ022IYhuNA1IRSHEizcof88LtCTXtl9S2Cxt32KgaXEu72JQ==} - dev: true - - /@emotion/weak-memoize/0.2.5: - resolution: {integrity: sha512-6U71C2Wp7r5XtFtQzYrW5iKFT67OixrSxjI4MptCHzdSVlgabczzqLe0ZSgnub/5Kp4hSbpDB1tMytZY9pwxxA==} - dev: true - - /@floating-ui/core/1.0.1: - resolution: {integrity: sha512-bO37brCPfteXQfFY0DyNDGB3+IMe4j150KFQcgJ5aBP295p9nBGeHEs/p0czrRbtlHq4Px/yoPXO/+dOCcF4uA==} - dev: false - - /@floating-ui/dom/1.0.1: - resolution: {integrity: sha512-wBDiLUKWU8QNPNOTAFHiIAkBv1KlHauG2AhqjSeh2H+wR8PX+AArXfz8NkRexH5PgMJMmSOS70YS89AbWYh5dA==} - dependencies: - '@floating-ui/core': 1.0.1 - dev: false - - /@floating-ui/react-dom-interactions/0.9.3_2zx2umvpluuhvlq44va5bta2da: - resolution: {integrity: sha512-oHwFLxySRtmhgwg7ZdWswvDDi+ld4mEtxu6ngOd7mRC5L1Rk6adjSfOBOHDxea+ItAWmds8m6A725sn1HQtUyQ==} - peerDependencies: - react: '>=16.8.0 || 18' - react-dom: '>=16.8.0 || 18' - dependencies: - '@floating-ui/react-dom': 1.0.0_biqbaboplfbrettd7655fr4n2y - aria-hidden: 1.2.0_fan5qbzahqtxlm5dzefqlqx5ia - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - transitivePeerDependencies: - - '@types/react' - dev: false - - /@floating-ui/react-dom/1.0.0_biqbaboplfbrettd7655fr4n2y: - resolution: {integrity: sha512-uiOalFKPG937UCLm42RxjESTWUVpbbatvlphQAU6bsv+ence6IoVG8JOUZcy8eW81NkU+Idiwvx10WFLmR4MIg==} - peerDependencies: - react: '>=16.8.0 || 18' - react-dom: '>=16.8.0 || 18' - dependencies: - '@floating-ui/dom': 1.0.1 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - dev: false - - /@gar/promisify/1.1.3: - resolution: {integrity: sha512-k2Ty1JcVojjJFwrg/ThKi2ujJ7XNLYaFGNB/bWT9wGR+oSMJHMa5w+CUq6p/pVrKeNNgA7pCqEcjSnHVoqJQFw==} - dev: true - - /@istanbuljs/load-nyc-config/1.1.0: - resolution: {integrity: sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==} - engines: {node: '>=8'} - dependencies: - camelcase: 5.3.1 - find-up: 4.1.0 - get-package-type: 0.1.0 - js-yaml: 3.14.0 - resolve-from: 5.0.0 - dev: true - - /@istanbuljs/schema/0.1.3: - resolution: {integrity: sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==} - engines: {node: '>=8'} - dev: true - - /@jest/console/24.9.0: - resolution: {integrity: sha512-Zuj6b8TnKXi3q4ymac8EQfc3ea/uhLeCGThFqXeC8H9/raaH8ARPUTdId+XyGd03Z4In0/VjD2OYFcBF09fNLQ==} - engines: {node: '>= 6'} - dependencies: - '@jest/source-map': 24.9.0 - chalk: 2.4.2 - slash: 2.0.0 - dev: true - - /@jest/core/24.9.0: - resolution: {integrity: sha512-Fogg3s4wlAr1VX7q+rhV9RVnUv5tD7VuWfYy1+whMiWUrvl7U3QJSJyWcDio9Lq2prqYsZaeTv2Rz24pWGkJ2A==} - engines: {node: '>= 6'} - dependencies: - '@jest/console': 24.9.0 - '@jest/reporters': 24.9.0 - '@jest/test-result': 24.9.0 - '@jest/transform': 24.9.0 - '@jest/types': 24.9.0 - ansi-escapes: 3.2.0 - chalk: 2.4.2 - exit: 0.1.2 - graceful-fs: 4.2.10 - jest-changed-files: 24.9.0 - jest-config: 24.9.0 - jest-haste-map: 24.9.0 - jest-message-util: 24.9.0 - jest-regex-util: 24.9.0 - jest-resolve: 24.9.0 - jest-resolve-dependencies: 24.9.0 - jest-runner: 24.9.0 - jest-runtime: 24.9.0 - jest-snapshot: 24.9.0 - jest-util: 24.9.0 - jest-validate: 24.9.0 - jest-watcher: 24.9.0 - micromatch: 3.1.10 - p-each-series: 1.0.0 - realpath-native: 1.1.0 - rimraf: 2.7.1 - slash: 2.0.0 - strip-ansi: 5.2.0 - transitivePeerDependencies: - - bufferutil - - supports-color - - utf-8-validate - dev: true - - /@jest/environment/24.9.0: - resolution: {integrity: sha512-5A1QluTPhvdIPFYnO3sZC3smkNeXPVELz7ikPbhUj0bQjB07EoE9qtLrem14ZUYWdVayYbsjVwIiL4WBIMV4aQ==} - engines: {node: '>= 6'} - dependencies: - '@jest/fake-timers': 24.9.0 - '@jest/transform': 24.9.0 - '@jest/types': 24.9.0 - jest-mock: 24.9.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@jest/fake-timers/24.9.0: - resolution: {integrity: sha512-eWQcNa2YSwzXWIMC5KufBh3oWRIijrQFROsIqt6v/NS9Io/gknw1jsAC9c+ih/RQX4A3O7SeWAhQeN0goKhT9A==} - engines: {node: '>= 6'} - dependencies: - '@jest/types': 24.9.0 - jest-message-util: 24.9.0 - jest-mock: 24.9.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@jest/reporters/24.9.0: - resolution: {integrity: sha512-mu4X0yjaHrffOsWmVLzitKmmmWSQ3GGuefgNscUSWNiUNcEOSEQk9k3pERKEQVBb0Cnn88+UESIsZEMH3o88Gw==} - engines: {node: '>= 6'} - dependencies: - '@jest/environment': 24.9.0 - '@jest/test-result': 24.9.0 - '@jest/transform': 24.9.0 - '@jest/types': 24.9.0 - chalk: 2.4.2 - exit: 0.1.2 - glob: 7.2.3 - istanbul-lib-coverage: 2.0.5 - istanbul-lib-instrument: 3.3.0 - istanbul-lib-report: 2.0.8 - istanbul-lib-source-maps: 3.0.6 - istanbul-reports: 2.2.7 - jest-haste-map: 24.9.0 - jest-resolve: 24.9.0 - jest-runtime: 24.9.0 - jest-util: 24.9.0 - jest-worker: 24.9.0 - node-notifier: 5.4.3 - slash: 2.0.0 - source-map: 0.6.1 - string-length: 2.0.0 - transitivePeerDependencies: - - bufferutil - - supports-color - - utf-8-validate - dev: true - - /@jest/source-map/24.9.0: - resolution: {integrity: sha512-/Xw7xGlsZb4MJzNDgB7PW5crou5JqWiBQaz6xyPd3ArOg2nfn/PunV8+olXbbEZzNl591o5rWKE9BRDaFAuIBg==} - engines: {node: '>= 6'} - dependencies: - callsites: 3.1.0 - graceful-fs: 4.2.10 - source-map: 0.6.1 - dev: true - - /@jest/test-result/24.9.0: - resolution: {integrity: sha512-XEFrHbBonBJ8dGp2JmF8kP/nQI/ImPpygKHwQ/SY+es59Z3L5PI4Qb9TQQMAEeYsThG1xF0k6tmG0tIKATNiiA==} - engines: {node: '>= 6'} - dependencies: - '@jest/console': 24.9.0 - '@jest/types': 24.9.0 - '@types/istanbul-lib-coverage': 2.0.3 - dev: true - - /@jest/test-sequencer/24.9.0: - resolution: {integrity: sha512-6qqsU4o0kW1dvA95qfNog8v8gkRN9ph6Lz7r96IvZpHdNipP2cBcb07J1Z45mz/VIS01OHJ3pY8T5fUY38tg4A==} - engines: {node: '>= 6'} - dependencies: - '@jest/test-result': 24.9.0 - jest-haste-map: 24.9.0 - jest-runner: 24.9.0 - jest-runtime: 24.9.0 - transitivePeerDependencies: - - bufferutil - - supports-color - - utf-8-validate - dev: true - - /@jest/transform/24.9.0: - resolution: {integrity: sha512-TcQUmyNRxV94S0QpMOnZl0++6RMiqpbH/ZMccFB/amku6Uwvyb1cjYX7xkp5nGNkbX4QPH/FcB6q1HBTHynLmQ==} - engines: {node: '>= 6'} - dependencies: - '@babel/core': 7.12.9 - '@jest/types': 24.9.0 - babel-plugin-istanbul: 5.2.0 - chalk: 2.4.2 - convert-source-map: 1.7.0 - fast-json-stable-stringify: 2.1.0 - graceful-fs: 4.2.10 - jest-haste-map: 24.9.0 - jest-regex-util: 24.9.0 - jest-util: 24.9.0 - micromatch: 3.1.10 - pirates: 4.0.1 - realpath-native: 1.1.0 - slash: 2.0.0 - source-map: 0.6.1 - write-file-atomic: 2.4.1 - transitivePeerDependencies: - - supports-color - dev: true - - /@jest/transform/26.6.2: - resolution: {integrity: sha512-E9JjhUgNzvuQ+vVAL21vlyfy12gP0GhazGgJC4h6qUt1jSdUXGWJ1wfu/X7Sd8etSgxV4ovT1pb9v5D6QW4XgA==} - engines: {node: '>= 10.14.2'} - dependencies: - '@babel/core': 7.19.1 - '@jest/types': 26.6.2 - babel-plugin-istanbul: 6.1.1 - chalk: 4.1.2 - convert-source-map: 1.7.0 - fast-json-stable-stringify: 2.1.0 - graceful-fs: 4.2.10 - jest-haste-map: 26.6.2 - jest-regex-util: 26.0.0 - jest-util: 26.6.2 - micromatch: 4.0.4 - pirates: 4.0.5 - slash: 3.0.0 - source-map: 0.6.1 - write-file-atomic: 3.0.3 - transitivePeerDependencies: - - supports-color - dev: true - - /@jest/types/24.9.0: - resolution: {integrity: sha512-XKK7ze1apu5JWQ5eZjHITP66AX+QsLlbaJRBGYr8pNzwcAE2JVkwnf0yqjHTsDRcjR0mujy/NmZMXw5kl+kGBw==} - engines: {node: '>= 6'} - dependencies: - '@types/istanbul-lib-coverage': 2.0.3 - '@types/istanbul-reports': 1.1.2 - '@types/yargs': 13.0.11 - dev: true - - /@jest/types/26.6.2: - resolution: {integrity: sha512-fC6QCp7Sc5sX6g8Tvbmj4XUTbyrik0akgRy03yjXbQaBWWNWGE7SGtJk98m0N8nzegD/7SggrUlivxo5ax4KWQ==} - engines: {node: '>= 10.14.2'} - dependencies: - '@types/istanbul-lib-coverage': 2.0.3 - '@types/istanbul-reports': 3.0.1 - '@types/node': 18.11.9 - '@types/yargs': 15.0.14 - chalk: 4.1.2 - dev: true - - /@jest/types/27.5.1: - resolution: {integrity: sha512-Cx46iJ9QpwQTjIdq5VJu2QTMMs3QlEjI0x1QbBP5W1+nMzyc2XmimiRR/CbX9TO0cPTeUlxWMOu8mslYsJ8DEw==} - engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} - dependencies: - '@types/istanbul-lib-coverage': 2.0.3 - '@types/istanbul-reports': 3.0.1 - '@types/node': 18.11.9 - '@types/yargs': 16.0.4 - chalk: 4.1.2 - dev: true - - /@jridgewell/gen-mapping/0.1.1: - resolution: {integrity: sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w==} - engines: {node: '>=6.0.0'} - dependencies: - '@jridgewell/set-array': 1.1.1 - '@jridgewell/sourcemap-codec': 1.4.13 - dev: true - - /@jridgewell/gen-mapping/0.3.2: - resolution: {integrity: sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==} - engines: {node: '>=6.0.0'} - dependencies: - '@jridgewell/set-array': 1.1.1 - '@jridgewell/sourcemap-codec': 1.4.13 - '@jridgewell/trace-mapping': 0.3.15 - dev: true - - /@jridgewell/resolve-uri/3.0.7: - resolution: {integrity: sha512-8cXDaBBHOr2pQ7j77Y6Vp5VDT2sIqWyWQ56TjEq4ih/a4iST3dItRe8Q9fp0rrIl9DoKhWQtUQz/YpOxLkXbNA==} - engines: {node: '>=6.0.0'} - dev: true - - /@jridgewell/set-array/1.1.1: - resolution: {integrity: sha512-Ct5MqZkLGEXTVmQYbGtx9SVqD2fqwvdubdps5D3djjAkgkKwT918VNOz65pEHFaYTeWcukmJmH5SwsA9Tn2ObQ==} - engines: {node: '>=6.0.0'} - dev: true - - /@jridgewell/source-map/0.3.2: - resolution: {integrity: sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw==} - dependencies: - '@jridgewell/gen-mapping': 0.3.2 - '@jridgewell/trace-mapping': 0.3.15 - dev: true - - /@jridgewell/sourcemap-codec/1.4.13: - resolution: {integrity: sha512-GryiOJmNcWbovBxTfZSF71V/mXbgcV3MewDe3kIMCLyIh5e7SKAeUZs+rMnJ8jkMolZ/4/VsdBmMrw3l+VdZ3w==} - dev: true - - /@jridgewell/trace-mapping/0.3.15: - resolution: {integrity: sha512-oWZNOULl+UbhsgB51uuZzglikfIKSUBO/M9W2OfEjn7cmqoAiCgmv9lyACTUacZwBz0ITnJ2NqjU8Tx0DHL88g==} - dependencies: - '@jridgewell/resolve-uri': 3.0.7 - '@jridgewell/sourcemap-codec': 1.4.13 - dev: true - - /@jridgewell/trace-mapping/0.3.9: - resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} - dependencies: - '@jridgewell/resolve-uri': 3.0.7 - '@jridgewell/sourcemap-codec': 1.4.13 - dev: true - - /@mdx-js/mdx/1.6.22: - resolution: {integrity: sha512-AMxuLxPz2j5/6TpF/XSdKpQP1NlG0z11dFOlq+2IP/lSgl11GY8ji6S/rgsViN/L0BDvHvUMruRb7ub+24LUYA==} - dependencies: - '@babel/core': 7.12.9 - '@babel/plugin-syntax-jsx': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-syntax-object-rest-spread': 7.8.3_@babel+core@7.12.9 - '@mdx-js/util': 1.6.22 - babel-plugin-apply-mdx-type-prop: 1.6.22_@babel+core@7.12.9 - babel-plugin-extract-import-names: 1.6.22 - camelcase-css: 2.0.1 - detab: 2.0.4 - hast-util-raw: 6.0.1 - lodash.uniq: 4.5.0 - mdast-util-to-hast: 10.0.1 - remark-footnotes: 2.0.0 - remark-mdx: 1.6.22 - remark-parse: 8.0.3 - remark-squeeze-paragraphs: 4.0.0 - style-to-object: 0.3.0 - unified: 9.2.0 - unist-builder: 2.0.3 - unist-util-visit: 2.0.3 - transitivePeerDependencies: - - supports-color - dev: true - - /@mdx-js/react/1.6.22_react@18.2.0: - resolution: {integrity: sha512-TDoPum4SHdfPiGSAaRBw7ECyI8VaHpK8GJugbJIJuqyh6kzw9ZLJZW3HGL3NNrJGxcAixUvqROm+YuQOo5eXtg==} - peerDependencies: - react: ^16.13.1 || ^17.0.0 || 18 - dependencies: - react: 18.2.0 - dev: true - - /@mdx-js/util/1.6.22: - resolution: {integrity: sha512-H1rQc1ZOHANWBvPcW+JpGwr+juXSxM8Q8YCkm3GhZd8REu1fHR3z99CErO1p9pkcfcxZnMdIZdIsXkOHY0NilA==} - dev: true - - /@mrmlnc/readdir-enhanced/2.2.1: - resolution: {integrity: sha512-bPHp6Ji8b41szTOcaP63VlnbbO5Ny6dwAATtY6JTjh5N2OLrb5Qk/Th5cRkRQhkWCt+EJsYrNB0MiL+Gpn6e3g==} - engines: {node: '>=4'} - dependencies: - call-me-maybe: 1.0.1 - glob-to-regexp: 0.3.0 - dev: true - - /@mui/base/5.0.0-alpha.89_2zx2umvpluuhvlq44va5bta2da: - resolution: {integrity: sha512-2g18hzt947qQ3gQQPOPEBfzQmaT2wafVhyJ7ZOZXeU6kKb88MdlHoPkK2lKXCHMBtRGnnsiF36j0rmhQXu0I5g==} - engines: {node: '>=12.0.0'} - peerDependencies: - '@types/react': ^17.0.0 || ^18.0.0 - react: ^17.0.0 || ^18.0.0 || 18 - react-dom: ^17.0.0 || ^18.0.0 || 18 - peerDependenciesMeta: - '@types/react': - optional: true - dependencies: - '@babel/runtime': 7.18.6 - '@emotion/is-prop-valid': 1.1.3 - '@mui/types': 7.1.4_@types+react@18.0.25 - '@mui/utils': 5.9.0_react@18.2.0 - '@popperjs/core': 2.11.5 - '@types/react': 18.0.25 - clsx: 1.2.1 - prop-types: 15.8.1 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - react-is: 18.2.0 - dev: true - - /@mui/material/5.9.0_52mfwclubjytm6epji3ivfuwoq: - resolution: {integrity: sha512-KZN3QEeCtwSP1IRpDZ7KQghDX7tyxZojADRCn+UKnoq8HUGNMJm2XKdb7hy9/ybaSW4EXQSKXSGg1AjdfS7Cdg==} - engines: {node: '>=12.0.0'} - peerDependencies: - '@emotion/react': ^11.5.0 - '@emotion/styled': ^11.3.0 - '@types/react': ^17.0.0 || ^18.0.0 - react: ^17.0.0 || ^18.0.0 || 18 - react-dom: ^17.0.0 || ^18.0.0 || 18 - peerDependenciesMeta: - '@emotion/react': - optional: true - '@emotion/styled': - optional: true - '@types/react': - optional: true - dependencies: - '@babel/runtime': 7.18.6 - '@emotion/react': 11.9.3_ydn2tsgf5ytsgv3ci7p3vfge5u - '@emotion/styled': 11.9.3_wmwfkcpmce6bptfofq7aisovym - '@mui/base': 5.0.0-alpha.89_2zx2umvpluuhvlq44va5bta2da - '@mui/system': 5.9.0_nyvo5qgv3b3dpmcnub4tvv4jpq - '@mui/types': 7.1.4_@types+react@18.0.25 - '@mui/utils': 5.9.0_react@18.2.0 - '@types/react': 18.0.25 - '@types/react-transition-group': 4.4.5 - clsx: 1.2.1 - csstype: 3.1.0 - prop-types: 15.8.1 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - react-is: 18.2.0 - react-transition-group: 4.4.5_biqbaboplfbrettd7655fr4n2y - dev: true - - /@mui/private-theming/5.9.0_fan5qbzahqtxlm5dzefqlqx5ia: - resolution: {integrity: sha512-t0ZsWxE/LvX5RH5azjx1esBHbIfD9zjnbSAYkpE59BPpkOrqAYDGoJguL2EPd9LaUb6COmBozmAwNenvI6RJRQ==} - engines: {node: '>=12.0.0'} - peerDependencies: - '@types/react': ^17.0.0 || ^18.0.0 - react: ^17.0.0 || ^18.0.0 || 18 - peerDependenciesMeta: - '@types/react': - optional: true - dependencies: - '@babel/runtime': 7.18.6 - '@mui/utils': 5.9.0_react@18.2.0 - '@types/react': 18.0.25 - prop-types: 15.8.1 - react: 18.2.0 - dev: true - - /@mui/styled-engine/5.8.7_fdnqutfacy7v3gmlcm66flps3q: - resolution: {integrity: sha512-tVqtowjbYmiRq+qcqXK731L9eWoL9H8xTRhuTgaDGKdch1zlt4I2UwInUe1w2N9N/u3/jHsFbLcl1Un3uOwpQg==} - engines: {node: '>=12.0.0'} - peerDependencies: - '@emotion/react': ^11.4.1 - '@emotion/styled': ^11.3.0 - react: ^17.0.0 || ^18.0.0 || 18 - peerDependenciesMeta: - '@emotion/react': - optional: true - '@emotion/styled': - optional: true - dependencies: - '@babel/runtime': 7.18.6 - '@emotion/cache': 11.9.3 - '@emotion/react': 11.9.3_ydn2tsgf5ytsgv3ci7p3vfge5u - '@emotion/styled': 11.9.3_wmwfkcpmce6bptfofq7aisovym - csstype: 3.1.0 - prop-types: 15.8.1 - react: 18.2.0 - dev: true - - /@mui/system/5.9.0_nyvo5qgv3b3dpmcnub4tvv4jpq: - resolution: {integrity: sha512-KLZDYMmT1usokEJH+raGTh1SbdOx4BVrT+wg8nRpKGNii2sfc3ntuJSKuv3Fu9oeC9xVFTnNBHXKrpJuxeDcqg==} - engines: {node: '>=12.0.0'} - peerDependencies: - '@emotion/react': ^11.5.0 - '@emotion/styled': ^11.3.0 - '@types/react': ^17.0.0 || ^18.0.0 - react: ^17.0.0 || ^18.0.0 || 18 - peerDependenciesMeta: - '@emotion/react': - optional: true - '@emotion/styled': - optional: true - '@types/react': - optional: true - dependencies: - '@babel/runtime': 7.18.6 - '@emotion/react': 11.9.3_ydn2tsgf5ytsgv3ci7p3vfge5u - '@emotion/styled': 11.9.3_wmwfkcpmce6bptfofq7aisovym - '@mui/private-theming': 5.9.0_fan5qbzahqtxlm5dzefqlqx5ia - '@mui/styled-engine': 5.8.7_fdnqutfacy7v3gmlcm66flps3q - '@mui/types': 7.1.4_@types+react@18.0.25 - '@mui/utils': 5.9.0_react@18.2.0 - '@types/react': 18.0.25 - clsx: 1.2.1 - csstype: 3.1.0 - prop-types: 15.8.1 - react: 18.2.0 - dev: true - - /@mui/types/7.1.4_@types+react@18.0.25: - resolution: {integrity: sha512-uveM3byMbthO+6tXZ1n2zm0W3uJCQYtwt/v5zV5I77v2v18u0ITkb8xwhsDD2i3V2Kye7SaNR6FFJ6lMuY/WqQ==} - peerDependencies: - '@types/react': '*' - peerDependenciesMeta: - '@types/react': - optional: true - dependencies: - '@types/react': 18.0.25 - dev: true - - /@mui/utils/5.9.0_react@18.2.0: - resolution: {integrity: sha512-GAaiWP6zBC3RE1NHP9y1c1iKZh5s/nyKKqWxfTrw5lNQY5tWTh9/47F682FuiE5WT1o3h4w/LEkSSIZpMEDzrA==} - engines: {node: '>=12.0.0'} - peerDependencies: - react: ^17.0.0 || ^18.0.0 || 18 - dependencies: - '@babel/runtime': 7.18.6 - '@types/prop-types': 15.7.5 - '@types/react-is': 17.0.3 - prop-types: 15.8.1 - react: 18.2.0 - react-is: 18.2.0 - dev: true - - /@nodelib/fs.scandir/2.1.5: - resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} - engines: {node: '>= 8'} - dependencies: - '@nodelib/fs.stat': 2.0.5 - run-parallel: 1.2.0 - dev: true - - /@nodelib/fs.stat/1.1.3: - resolution: {integrity: sha512-shAmDyaQC4H92APFoIaVDHCx5bStIocgvbwQyxPRrbUY20V1EYTbSDchWbuwlMG3V17cprZhA6+78JfB+3DTPw==} - engines: {node: '>= 6'} - dev: true - - /@nodelib/fs.stat/2.0.5: - resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} - engines: {node: '>= 8'} - dev: true - - /@nodelib/fs.walk/1.2.8: - resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} - engines: {node: '>= 8'} - dependencies: - '@nodelib/fs.scandir': 2.1.5 - fastq: 1.13.0 - dev: true - - /@npmcli/fs/1.1.1: - resolution: {integrity: sha512-8KG5RD0GVP4ydEzRn/I4BNDuxDtqVbOdm8675T49OIG/NGhaK0pjPX7ZcDlvKYbA+ulvVK3ztfcF4uBdOxuJbQ==} - dependencies: - '@gar/promisify': 1.1.3 - semver: 7.3.5 - dev: true - - /@npmcli/move-file/1.1.2: - resolution: {integrity: sha512-1SUf/Cg2GzGDyaf15aR9St9TWlb+XvbZXWpDx8YKs7MLzMH/BCeopv+y9vzrzgkfykCGuWOlSu3mZhj2+FQcrg==} - engines: {node: '>=10'} - dependencies: - mkdirp: 1.0.4 - rimraf: 3.0.2 - dev: true - - /@pmmmwh/react-refresh-webpack-plugin/0.5.7_metx475lqcp4j5c75za4zf7xbi: - resolution: {integrity: sha512-bcKCAzF0DV2IIROp9ZHkRJa6O4jy7NlnHdWL3GmcUxYWNjLXkK5kfELELwEfSP5hXPfVL/qOGMAROuMQb9GG8Q==} - engines: {node: '>= 10.13'} - peerDependencies: - '@types/webpack': 4.x || 5.x - react-refresh: '>=0.10.0 <1.0.0' - sockjs-client: ^1.4.0 - type-fest: '>=0.17.0 <3.0.0' - webpack: '>=4.43.0 <6.0.0' - webpack-dev-server: 3.x || 4.x - webpack-hot-middleware: 2.x - webpack-plugin-serve: 0.x || 1.x - peerDependenciesMeta: - '@types/webpack': - optional: true - sockjs-client: - optional: true - type-fest: - optional: true - webpack: - optional: true - webpack-dev-server: - optional: true - webpack-hot-middleware: - optional: true - webpack-plugin-serve: - optional: true - dependencies: - ansi-html-community: 0.0.8 - common-path-prefix: 3.0.0 - core-js-pure: 3.25.1 - error-stack-parser: 2.1.4 - find-up: 5.0.0 - html-entities: 2.3.3 - loader-utils: 2.0.2 - react-refresh: 0.11.0 - schema-utils: 3.1.1 - source-map: 0.7.3 - webpack: 5.74.0_@swc+core@1.2.203 - dev: true - - /@popperjs/core/2.11.5: - resolution: {integrity: sha512-9X2obfABZuDVLCgPK9aX0a/x4jaOEweTTWE2+9sr0Qqqevj2Uv5XorvusThmc9XGYpS9yI+fhh8RTafBtGposw==} - dev: true - - /@resvg/resvg-js-android-arm-eabi/2.1.0: - resolution: {integrity: sha512-JtvWWtC6bYRhyth1qgUgcPQSP+jkwkmUzok/5b/IqKFb6cattMBFFdHnwM8AS+sgzXJKa8LhW48f3FmFQhfdrA==} - engines: {node: '>= 10'} - cpu: [arm] - os: [android] - requiresBuild: true - dev: true - optional: true - - /@resvg/resvg-js-android-arm64/2.1.0: - resolution: {integrity: sha512-QXFEoTpoZJZjkFh4+aSD3l+Ivrij3nzgrr4FTayey0hsQypJXmbzB6nuqB1qZwMrXPYqYZ33BoRiwCFoJUw2Ww==} - engines: {node: '>= 10'} - cpu: [arm64] - os: [android] - requiresBuild: true - dev: true - optional: true - - /@resvg/resvg-js-darwin-arm64/2.1.0: - resolution: {integrity: sha512-OrYqlmn2g4Pu/dWr+M5t5W8GDKIX3zk0JxDySU1oNWwhqlmZXBuCrx3TP9dVrTpTYx86E5RQcTZWe64wz8dlIQ==} - engines: {node: '>= 10'} - cpu: [arm64] - os: [darwin] - requiresBuild: true - dev: true - optional: true - - /@resvg/resvg-js-darwin-x64/2.1.0: - resolution: {integrity: sha512-95F9BoBS1th79n6Zy1tRMKhPlJuhznnQwAPxRhtw0v4DteRKMzaPFfVH6B9BBaoDCa5VMIxH/wYNKtOxCpYPuw==} - engines: {node: '>= 10'} - cpu: [x64] - os: [darwin] - requiresBuild: true - dev: true - optional: true - - /@resvg/resvg-js-linux-arm-gnueabihf/2.1.0: - resolution: {integrity: sha512-8F0ugeAaYGNNZhSCYt+X4YgyKyKcFiH0tqfJmN69+Gqqmu/lmZcn78JVLyTGD/OGHbYfCCYJbxwV+txIOdVNkQ==} - engines: {node: '>= 10'} - cpu: [arm] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@resvg/resvg-js-linux-arm64-gnu/2.1.0: - resolution: {integrity: sha512-RveUS3sqvUp5eoBzz1QlPv7yBUNOjHtcWtbFo55gQrzBGT4XtnCaQzuXkN0q0j2o2ufxlmXmFI3g3e/0EWjNMg==} - engines: {node: '>= 10'} - cpu: [arm64] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@resvg/resvg-js-linux-arm64-musl/2.1.0: - resolution: {integrity: sha512-DzuRbZj5oVXYFAlo2PVbiaTSb14z/FDUlvgfzVFHiKEw3w6gT/soveLTIAvfeIlRYYkwYNHCiEPxFztyr7x/rw==} - engines: {node: '>= 10'} - cpu: [arm64] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@resvg/resvg-js-linux-x64-gnu/2.1.0: - resolution: {integrity: sha512-pa4MtKtAEXBj7tl3JXPMQLjgP+KghUYYoXMIX8tlf/xbfJJsOxHpWcwQe/bWPFO4K9hgt/yePkb3G4ydD0uT+g==} - engines: {node: '>= 10'} - cpu: [x64] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@resvg/resvg-js-linux-x64-musl/2.1.0: - resolution: {integrity: sha512-mkwGe4I9CmQ1GPSnFa22PHwKbE+TZnRk/ViCvO89UOwypW0I+X+KlQVzVbZn9ypvcrbvzotOvl3OkVRq5MgsBA==} - engines: {node: '>= 10'} - cpu: [x64] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@resvg/resvg-js-win32-arm64-msvc/2.1.0: - resolution: {integrity: sha512-DVloJcQsgd3rMAPemy5KjAA6R+RkRz2/xb7zP9px7lr+Gao+xVbNzRQrY7xwCZFM7O7hu9uHvLvkKCttPoL1aA==} - engines: {node: '>= 10'} - cpu: [arm64] - os: [win32] - requiresBuild: true - dev: true - optional: true - - /@resvg/resvg-js-win32-ia32-msvc/2.1.0: - resolution: {integrity: sha512-RtRQ8loZA4zib8kzD1QjoScb6VAaZTbajB3WU/O6raP2/f2zIk9v4FU2E/hiC0vi5DGhJL5GTmSrsWShbLPjZw==} - engines: {node: '>= 10'} - cpu: [ia32] - os: [win32] - requiresBuild: true - dev: true - optional: true - - /@resvg/resvg-js-win32-x64-msvc/2.1.0: - resolution: {integrity: sha512-NVYuQn9Aj/ZmRufKON7a+1U1XS+jGKMcWO4J8ZH2xhSP3aNVgO7Nfl45DMgqxdCcn0ZzYhzP+mSQFbA/ENE/mg==} - engines: {node: '>= 10'} - cpu: [x64] - os: [win32] - requiresBuild: true - dev: true - optional: true - - /@resvg/resvg-js/2.1.0: - resolution: {integrity: sha512-nR6uVR5ugXLT2jh7U141nhawzgUs4JBl8BpM4XH7/ughSsOA/+WRxVhMUfdtEsz7REpTMKe2Sat+1/eWAuQ04w==} - engines: {node: '>= 10'} - optionalDependencies: - '@resvg/resvg-js-android-arm-eabi': 2.1.0 - '@resvg/resvg-js-android-arm64': 2.1.0 - '@resvg/resvg-js-darwin-arm64': 2.1.0 - '@resvg/resvg-js-darwin-x64': 2.1.0 - '@resvg/resvg-js-linux-arm-gnueabihf': 2.1.0 - '@resvg/resvg-js-linux-arm64-gnu': 2.1.0 - '@resvg/resvg-js-linux-arm64-musl': 2.1.0 - '@resvg/resvg-js-linux-x64-gnu': 2.1.0 - '@resvg/resvg-js-linux-x64-musl': 2.1.0 - '@resvg/resvg-js-win32-arm64-msvc': 2.1.0 - '@resvg/resvg-js-win32-ia32-msvc': 2.1.0 - '@resvg/resvg-js-win32-x64-msvc': 2.1.0 - dev: true - - /@rollup/plugin-alias/3.1.1_rollup@1.32.1: - resolution: {integrity: sha512-hNcQY4bpBUIvxekd26DBPgF7BT4mKVNDF5tBG4Zi+3IgwLxGYRY0itHs9D0oLVwXM5pvJDWJlBQro+au8WaUWw==} - engines: {node: '>=8.0.0'} - peerDependencies: - rollup: ^1.20.0||^2.0.0 - dependencies: - rollup: 1.32.1 - slash: 3.0.0 - dev: true - - /@rollup/plugin-commonjs/11.1.0_rollup@1.32.1: - resolution: {integrity: sha512-Ycr12N3ZPN96Fw2STurD21jMqzKwL9QuFhms3SD7KKRK7oaXUsBU9Zt0jL/rOPHiPYisI21/rXGO3jr9BnLHUA==} - engines: {node: '>= 8.0.0'} - peerDependencies: - rollup: ^1.20.0||^2.0.0 - dependencies: - '@rollup/pluginutils': 3.1.0_rollup@1.32.1 - commondir: 1.0.1 - estree-walker: 1.0.1 - glob: 7.2.3 - is-reference: 1.2.1 - magic-string: 0.25.7 - resolve: 1.19.0 - rollup: 1.32.1 - dev: true - - /@rollup/plugin-json/4.1.0_rollup@1.32.1: - resolution: {integrity: sha512-yfLbTdNS6amI/2OpmbiBoW12vngr5NW2jCJVZSBEz+H5KfUJZ2M7sDjk0U6GOOdCWFVScShte29o9NezJ53TPw==} - peerDependencies: - rollup: ^1.20.0 || ^2.0.0 - dependencies: - '@rollup/pluginutils': 3.1.0_rollup@1.32.1 - rollup: 1.32.1 - dev: true - - /@rollup/plugin-node-resolve/6.1.0_rollup@1.32.1: - resolution: {integrity: sha512-Cv7PDIvxdE40SWilY5WgZpqfIUEaDxFxs89zCAHjqyRwlTSuql4M5hjIuc5QYJkOH0/vyiyNXKD72O+LhRipGA==} - engines: {node: '>= 8.0.0'} - peerDependencies: - rollup: ^1.20.0 - dependencies: - '@rollup/pluginutils': 3.1.0_rollup@1.32.1 - '@types/resolve': 0.0.8 - builtin-modules: 3.1.0 - is-module: 1.0.0 - resolve: 1.19.0 - rollup: 1.32.1 - dev: true - - /@rollup/pluginutils/3.1.0_rollup@1.32.1: - resolution: {integrity: sha512-GksZ6pr6TpIjHm8h9lSQ8pi8BE9VeubNT0OMJ3B5uZJ8pz73NPiqOtCog/x2/QzM1ENChPKxMDhiQuRHsqc+lg==} - engines: {node: '>= 8.0.0'} - peerDependencies: - rollup: ^1.20.0||^2.0.0 - dependencies: - '@types/estree': 0.0.39 - estree-walker: 1.0.1 - picomatch: 2.3.1 - rollup: 1.32.1 - dev: true - - /@storybook/addon-actions/6.5.12_biqbaboplfbrettd7655fr4n2y: - resolution: {integrity: sha512-yEbyKjBsSRUr61SlS+SOTqQwdumO8Wa3GoHO3AfmvoKfzdGrM7w8G5Zs9Iev16khWg/7bQvoH3KZsg/hQuKnNg==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - peerDependenciesMeta: - react: - optional: true - react-dom: - optional: true - dependencies: - '@storybook/addons': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/api': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/client-logger': 6.5.12 - '@storybook/components': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/core-events': 6.5.12 - '@storybook/csf': 0.0.2--canary.4566f4d.1 - '@storybook/theming': 6.5.12_biqbaboplfbrettd7655fr4n2y - core-js: 3.23.1 - fast-deep-equal: 3.1.3 - global: 4.4.0 - lodash: 4.17.21 - polished: 4.2.2 - prop-types: 15.8.1 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - react-inspector: 5.1.1_react@18.2.0 - regenerator-runtime: 0.13.9 - telejson: 6.0.8 - ts-dedent: 2.2.0 - util-deprecate: 1.0.2 - uuid-browser: 3.1.0 - dev: true - - /@storybook/addon-backgrounds/6.5.12_biqbaboplfbrettd7655fr4n2y: - resolution: {integrity: sha512-S0QThY1jnU7Q+HY+g9JgpAJszzNmNkigZ4+X/4qlUXE0WYYn9i2YG5H6me1+57QmIXYddcWWqqgF9HUXl667NA==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - peerDependenciesMeta: - react: - optional: true - react-dom: - optional: true - dependencies: - '@storybook/addons': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/api': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/client-logger': 6.5.12 - '@storybook/components': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/core-events': 6.5.12 - '@storybook/csf': 0.0.2--canary.4566f4d.1 - '@storybook/theming': 6.5.12_biqbaboplfbrettd7655fr4n2y - core-js: 3.23.1 - global: 4.4.0 - memoizerific: 1.11.3 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - regenerator-runtime: 0.13.9 - ts-dedent: 2.2.0 - util-deprecate: 1.0.2 - dev: true - - /@storybook/addon-controls/6.5.12_fur7q2nuabhivyjrvrzihm4lnq: - resolution: {integrity: sha512-UoaamkGgAQXplr0kixkPhROdzkY+ZJQpG7VFDU6kmZsIgPRNfX/QoJFR5vV6TpDArBIjWaUUqWII+GHgPRzLgQ==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - peerDependenciesMeta: - react: - optional: true - react-dom: - optional: true - dependencies: - '@storybook/addons': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/api': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/client-logger': 6.5.12 - '@storybook/components': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/core-common': 6.5.12_fur7q2nuabhivyjrvrzihm4lnq - '@storybook/csf': 0.0.2--canary.4566f4d.1 - '@storybook/node-logger': 6.5.12 - '@storybook/store': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/theming': 6.5.12_biqbaboplfbrettd7655fr4n2y - core-js: 3.23.1 - lodash: 4.17.21 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - ts-dedent: 2.2.0 - transitivePeerDependencies: - - eslint - - supports-color - - typescript - - vue-template-compiler - - webpack-cli - - webpack-command - dev: true - - /@storybook/addon-docs/6.5.12_jyra2u4zpzi3ijvdwesou24mia: - resolution: {integrity: sha512-T+QTkmF7QlMVfXHXEberP8CYti/XMTo9oi6VEbZLx+a2N3qY4GZl7X2g26Sf5V4Za+xnapYKBMEIiJ5SvH9weQ==} - peerDependencies: - '@storybook/mdx2-csf': ^0.0.3 - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - peerDependenciesMeta: - '@storybook/mdx2-csf': - optional: true - react: - optional: true - react-dom: - optional: true - dependencies: - '@babel/plugin-transform-react-jsx': 7.19.0_@babel+core@7.9.0 - '@babel/preset-env': 7.19.1_@babel+core@7.9.0 - '@jest/transform': 26.6.2 - '@mdx-js/react': 1.6.22_react@18.2.0 - '@storybook/addons': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/api': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/components': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/core-common': 6.5.12_fur7q2nuabhivyjrvrzihm4lnq - '@storybook/core-events': 6.5.12 - '@storybook/csf': 0.0.2--canary.4566f4d.1 - '@storybook/docs-tools': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/mdx1-csf': 0.0.1_@babel+core@7.9.0 - '@storybook/node-logger': 6.5.12 - '@storybook/postinstall': 6.5.12 - '@storybook/preview-web': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/source-loader': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/store': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/theming': 6.5.12_biqbaboplfbrettd7655fr4n2y - babel-loader: 8.2.5_@babel+core@7.9.0 - core-js: 3.23.1 - fast-deep-equal: 3.1.3 - global: 4.4.0 - lodash: 4.17.21 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - regenerator-runtime: 0.13.9 - remark-external-links: 8.0.0 - remark-slug: 6.1.0 - ts-dedent: 2.2.0 - util-deprecate: 1.0.2 - transitivePeerDependencies: - - '@babel/core' - - eslint - - supports-color - - typescript - - vue-template-compiler - - webpack - - webpack-cli - - webpack-command - dev: true - - /@storybook/addon-essentials/6.5.12_duglcd73h73mao2wnbgtxva32e: - resolution: {integrity: sha512-4AAV0/mQPSk3V0Pie1NIqqgBgScUc0VtBEXDm8BgPeuDNVhPEupnaZgVt+I3GkzzPPo6JjdCsp2L11f3bBSEjw==} - peerDependencies: - '@babel/core': ^7.9.6 || 7 - '@storybook/angular': '*' - '@storybook/builder-manager4': '*' - '@storybook/builder-manager5': '*' - '@storybook/builder-webpack4': '*' - '@storybook/builder-webpack5': '*' - '@storybook/html': '*' - '@storybook/vue': '*' - '@storybook/vue3': '*' - '@storybook/web-components': '*' - lit: '*' - lit-html: '*' - react: '*' - react-dom: '*' - svelte: '*' - sveltedoc-parser: '*' - vue: '*' - webpack: '*' - peerDependenciesMeta: - '@storybook/angular': - optional: true - '@storybook/builder-manager4': - optional: true - '@storybook/builder-manager5': - optional: true - '@storybook/builder-webpack4': - optional: true - '@storybook/builder-webpack5': - optional: true - '@storybook/html': - optional: true - '@storybook/vue': - optional: true - '@storybook/vue3': - optional: true - '@storybook/web-components': - optional: true - lit: - optional: true - lit-html: - optional: true - react: - optional: true - react-dom: - optional: true - svelte: - optional: true - sveltedoc-parser: - optional: true - vue: - optional: true - webpack: - optional: true - dependencies: - '@babel/core': 7.9.0 - '@storybook/addon-actions': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/addon-backgrounds': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/addon-controls': 6.5.12_fur7q2nuabhivyjrvrzihm4lnq - '@storybook/addon-docs': 6.5.12_jyra2u4zpzi3ijvdwesou24mia - '@storybook/addon-measure': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/addon-outline': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/addon-toolbars': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/addon-viewport': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/addons': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/api': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/builder-webpack4': 6.5.12_fur7q2nuabhivyjrvrzihm4lnq - '@storybook/core-common': 6.5.12_fur7q2nuabhivyjrvrzihm4lnq - '@storybook/node-logger': 6.5.12 - core-js: 3.23.1 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - regenerator-runtime: 0.13.9 - ts-dedent: 2.2.0 - transitivePeerDependencies: - - '@storybook/mdx2-csf' - - eslint - - supports-color - - typescript - - vue-template-compiler - - webpack-cli - - webpack-command - dev: true - - /@storybook/addon-interactions/6.5.12_vzusiue5kdnywyapnxbtdipxcu: - resolution: {integrity: sha512-wAkKqUX57qZMzgykMIltjDrFfA0II+pKz2SchK4hXHK3/2rtOe+Ah0Sj5nTQo++Ee5oC1PVZWIq/Y1kufCTBJQ==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - peerDependenciesMeta: - react: - optional: true - react-dom: - optional: true - dependencies: - '@devtools-ds/object-inspector': 1.2.0_2zx2umvpluuhvlq44va5bta2da - '@storybook/addons': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/api': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/client-logger': 6.5.12 - '@storybook/components': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/core-common': 6.5.12_fur7q2nuabhivyjrvrzihm4lnq - '@storybook/core-events': 6.5.12 - '@storybook/csf': 0.0.2--canary.4566f4d.1 - '@storybook/instrumenter': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/theming': 6.5.12_biqbaboplfbrettd7655fr4n2y - core-js: 3.23.1 - global: 4.4.0 - jest-mock: 27.5.1 - polished: 4.2.2 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - ts-dedent: 2.2.0 - transitivePeerDependencies: - - '@types/react' - - eslint - - supports-color - - typescript - - vue-template-compiler - - webpack-cli - - webpack-command - dev: true - - /@storybook/addon-links/6.5.12_biqbaboplfbrettd7655fr4n2y: - resolution: {integrity: sha512-Dyt922J5nTBwM/9KtuuDIt3sX8xdTkKh+aXSoOX6OzT04Xwm5NumFOvuQ2YA00EM+3Ihn7Ayc3urvxnHTixmKg==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - peerDependenciesMeta: - react: - optional: true - react-dom: - optional: true - dependencies: - '@storybook/addons': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/client-logger': 6.5.12 - '@storybook/core-events': 6.5.12 - '@storybook/csf': 0.0.2--canary.4566f4d.1 - '@storybook/router': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@types/qs': 6.9.7 - core-js: 3.23.1 - global: 4.4.0 - prop-types: 15.8.1 - qs: 6.11.0 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - regenerator-runtime: 0.13.9 - ts-dedent: 2.2.0 - dev: true - - /@storybook/addon-measure/6.5.12_biqbaboplfbrettd7655fr4n2y: - resolution: {integrity: sha512-zmolO6+VG4ov2620G7f1myqLQLztfU+ykN+U5y52GXMFsCOyB7fMoVWIMrZwsNlinDu+CnUvelXHUNbqqnjPRg==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - peerDependenciesMeta: - react: - optional: true - react-dom: - optional: true - dependencies: - '@storybook/addons': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/api': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/client-logger': 6.5.12 - '@storybook/components': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/core-events': 6.5.12 - '@storybook/csf': 0.0.2--canary.4566f4d.1 - core-js: 3.23.1 - global: 4.4.0 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - dev: true - - /@storybook/addon-outline/6.5.12_biqbaboplfbrettd7655fr4n2y: - resolution: {integrity: sha512-jXwLz2rF/CZt6Cgy+QUTa+pNW0IevSONYwS3D533E9z5h0T5ZKJbbxG5jxM+oC+FpZ/nFk5mEmUaYNkxgIVdpw==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - peerDependenciesMeta: - react: - optional: true - react-dom: - optional: true - dependencies: - '@storybook/addons': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/api': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/client-logger': 6.5.12 - '@storybook/components': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/core-events': 6.5.12 - '@storybook/csf': 0.0.2--canary.4566f4d.1 - core-js: 3.23.1 - global: 4.4.0 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - regenerator-runtime: 0.13.9 - ts-dedent: 2.2.0 - dev: true - - /@storybook/addon-toolbars/6.5.12_biqbaboplfbrettd7655fr4n2y: - resolution: {integrity: sha512-+QjoEHkekz4wTy8zqxYdV9ijDJ5YcjDc/qdnV8wx22zkoVU93FQlo0CHHVjpyvc3ilQliZbdQDJx62BcHXw30Q==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - peerDependenciesMeta: - react: - optional: true - react-dom: - optional: true - dependencies: - '@storybook/addons': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/api': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/client-logger': 6.5.12 - '@storybook/components': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/theming': 6.5.12_biqbaboplfbrettd7655fr4n2y - core-js: 3.23.1 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - regenerator-runtime: 0.13.9 - dev: true - - /@storybook/addon-viewport/6.5.12_biqbaboplfbrettd7655fr4n2y: - resolution: {integrity: sha512-eQ1UrmbiMiPmWe+fdMWIc0F6brh/S2z4ADfwFz0tTd+vOLWRZp1xw8JYQ9P2ZasE+PM3WFOVT9jvNjZj/cHnfw==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - peerDependenciesMeta: - react: - optional: true - react-dom: - optional: true - dependencies: - '@storybook/addons': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/api': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/client-logger': 6.5.12 - '@storybook/components': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/core-events': 6.5.12 - '@storybook/theming': 6.5.12_biqbaboplfbrettd7655fr4n2y - core-js: 3.23.1 - global: 4.4.0 - memoizerific: 1.11.3 - prop-types: 15.8.1 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - regenerator-runtime: 0.13.9 - dev: true - - /@storybook/addons/6.5.12_biqbaboplfbrettd7655fr4n2y: - resolution: {integrity: sha512-y3cgxZq41YGnuIlBJEuJjSFdMsm8wnvlNOGUP9Q+Er2dgfx8rJz4Q22o4hPjpvpaj4XdBtxCJXI2NeFpN59+Cw==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - dependencies: - '@storybook/api': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/channels': 6.5.12 - '@storybook/client-logger': 6.5.12 - '@storybook/core-events': 6.5.12 - '@storybook/csf': 0.0.2--canary.4566f4d.1 - '@storybook/router': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/theming': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@types/webpack-env': 1.18.0 - core-js: 3.23.1 - global: 4.4.0 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - regenerator-runtime: 0.13.9 - dev: true - - /@storybook/api/6.5.12_biqbaboplfbrettd7655fr4n2y: - resolution: {integrity: sha512-DuUZmMlQxkFNU9Vgkp9aNfCkAongU76VVmygvCuSpMVDI9HQ2lG0ydL+ppL4XKoSMCCoXTY6+rg4hJANnH+1AQ==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - dependencies: - '@storybook/channels': 6.5.12 - '@storybook/client-logger': 6.5.12 - '@storybook/core-events': 6.5.12 - '@storybook/csf': 0.0.2--canary.4566f4d.1 - '@storybook/router': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/semver': 7.3.2 - '@storybook/theming': 6.5.12_biqbaboplfbrettd7655fr4n2y - core-js: 3.23.1 - fast-deep-equal: 3.1.3 - global: 4.4.0 - lodash: 4.17.21 - memoizerific: 1.11.3 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - regenerator-runtime: 0.13.9 - store2: 2.14.2 - telejson: 6.0.8 - ts-dedent: 2.2.0 - util-deprecate: 1.0.2 - dev: true - - /@storybook/builder-webpack4/6.5.12_fur7q2nuabhivyjrvrzihm4lnq: - resolution: {integrity: sha512-TsthT5jm9ZxQPNOZJbF5AV24me3i+jjYD7gbdKdSHrOVn1r3ydX4Z8aD6+BjLCtTn3T+e8NMvUkL4dInEo1x6g==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - typescript: '*' - peerDependenciesMeta: - typescript: - optional: true - dependencies: - '@babel/core': 7.19.1 - '@storybook/addons': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/api': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/channel-postmessage': 6.5.12 - '@storybook/channels': 6.5.12 - '@storybook/client-api': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/client-logger': 6.5.12 - '@storybook/components': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/core-common': 6.5.12_fur7q2nuabhivyjrvrzihm4lnq - '@storybook/core-events': 6.5.12 - '@storybook/node-logger': 6.5.12 - '@storybook/preview-web': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/router': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/semver': 7.3.2 - '@storybook/store': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/theming': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/ui': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@types/node': 16.11.59 - '@types/webpack': 4.41.32 - autoprefixer: 9.8.6 - babel-loader: 8.2.5_7k5t74zmen3ocxyd32avkcyrwe - case-sensitive-paths-webpack-plugin: 2.4.0 - core-js: 3.23.1 - css-loader: 3.6.0_webpack@4.46.0 - file-loader: 6.2.0_webpack@4.46.0 - find-up: 5.0.0 - fork-ts-checker-webpack-plugin: 4.1.6_b3umd4nqfgagbb4ysmdohlejqu - glob: 7.2.0 - glob-promise: 3.4.0_glob@7.2.0 - global: 4.4.0 - html-webpack-plugin: 4.5.2_webpack@4.46.0 - pnp-webpack-plugin: 1.6.4_typescript@4.7.4 - postcss: 7.0.39 - postcss-flexbugs-fixes: 4.2.1 - postcss-loader: 4.3.0_gzaxsinx64nntyd3vmdqwl7coe - raw-loader: 4.0.2_webpack@4.46.0 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - stable: 0.1.8 - style-loader: 1.3.0_webpack@4.46.0 - terser-webpack-plugin: 4.2.3_webpack@4.46.0 - ts-dedent: 2.2.0 - typescript: 4.7.4 - url-loader: 4.1.1_lit45vopotvaqup7lrvlnvtxwy - util-deprecate: 1.0.2 - webpack: 4.46.0 - webpack-dev-middleware: 3.7.3_webpack@4.46.0 - webpack-filter-warnings-plugin: 1.2.1_webpack@4.46.0 - webpack-hot-middleware: 2.25.2 - webpack-virtual-modules: 0.2.2 - transitivePeerDependencies: - - bluebird - - eslint - - supports-color - - vue-template-compiler - - webpack-cli - - webpack-command - dev: true - - /@storybook/channel-postmessage/6.5.12: - resolution: {integrity: sha512-SL/tJBLOdDlbUAAxhiZWOEYd5HI4y8rN50r6jeed5nD8PlocZjxJ6mO0IxnePqIL9Yu3nSrQRHrtp8AJvPX0Yg==} - dependencies: - '@storybook/channels': 6.5.12 - '@storybook/client-logger': 6.5.12 - '@storybook/core-events': 6.5.12 - core-js: 3.23.1 - global: 4.4.0 - qs: 6.11.0 - telejson: 6.0.8 - dev: true - - /@storybook/channel-websocket/6.5.12: - resolution: {integrity: sha512-0t5dLselHVKTRYaphxx1dRh4pmOFCfR7h8oNJlOvJ29Qy5eNyVujDG9nhwWbqU6IKayuP4nZrAbe9Req9YZYlQ==} - dependencies: - '@storybook/channels': 6.5.12 - '@storybook/client-logger': 6.5.12 - core-js: 3.23.1 - global: 4.4.0 - telejson: 6.0.8 - dev: true - - /@storybook/channels/6.5.12: - resolution: {integrity: sha512-X5XaKbe4b7LXJ4sUakBo00x6pXnW78JkOonHoaKoWsccHLlEzwfBZpVVekhVZnqtCoLT23dB8wjKgA71RYWoiw==} - dependencies: - core-js: 3.23.1 - ts-dedent: 2.2.0 - util-deprecate: 1.0.2 - dev: true - - /@storybook/client-api/6.5.12_biqbaboplfbrettd7655fr4n2y: - resolution: {integrity: sha512-+JiRSgiU829KPc25nG/k0+Ao2nUelHUe8Y/9cRoKWbCAGzi4xd0JLhHAOr9Oi2szWx/OI1L08lxVv1+WTveAeA==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - dependencies: - '@storybook/addons': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/channel-postmessage': 6.5.12 - '@storybook/channels': 6.5.12 - '@storybook/client-logger': 6.5.12 - '@storybook/core-events': 6.5.12 - '@storybook/csf': 0.0.2--canary.4566f4d.1 - '@storybook/store': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@types/qs': 6.9.7 - '@types/webpack-env': 1.18.0 - core-js: 3.23.1 - fast-deep-equal: 3.1.3 - global: 4.4.0 - lodash: 4.17.21 - memoizerific: 1.11.3 - qs: 6.11.0 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - regenerator-runtime: 0.13.9 - store2: 2.14.2 - synchronous-promise: 2.0.16 - ts-dedent: 2.2.0 - util-deprecate: 1.0.2 - dev: true - - /@storybook/client-logger/6.5.12: - resolution: {integrity: sha512-IrkMr5KZcudX935/C2balFbxLHhkvQnJ78rbVThHDVckQ7l3oIXTh66IMzldeOabVFDZEMiW8AWuGEYof+JtLw==} - dependencies: - core-js: 3.23.1 - global: 4.4.0 - dev: true - - /@storybook/components/6.5.12_biqbaboplfbrettd7655fr4n2y: - resolution: {integrity: sha512-NAAGl5PDXaHdVLd6hA+ttmLwH3zAVGXeUmEubzKZ9bJzb+duhFKxDa9blM4YEkI+palumvgAMm0UgS7ou680Ig==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - dependencies: - '@storybook/client-logger': 6.5.12 - '@storybook/csf': 0.0.2--canary.4566f4d.1 - '@storybook/theming': 6.5.12_biqbaboplfbrettd7655fr4n2y - core-js: 3.23.1 - memoizerific: 1.11.3 - qs: 6.11.0 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - regenerator-runtime: 0.13.9 - util-deprecate: 1.0.2 - dev: true - - /@storybook/core-client/6.5.12_c3hoyc4loabfhtyuh36vjkyyai: - resolution: {integrity: sha512-jyAd0ud6zO+flpLv0lEHbbt1Bv9Ms225M6WTQLrfe7kN/7j1pVKZEoeVCLZwkJUtSKcNiWQxZbS15h31pcYwqg==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - typescript: '*' - webpack: '*' - peerDependenciesMeta: - typescript: - optional: true - webpack: - optional: true - dependencies: - '@storybook/addons': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/channel-postmessage': 6.5.12 - '@storybook/channel-websocket': 6.5.12 - '@storybook/client-api': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/client-logger': 6.5.12 - '@storybook/core-events': 6.5.12 - '@storybook/csf': 0.0.2--canary.4566f4d.1 - '@storybook/preview-web': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/store': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/ui': 6.5.12_biqbaboplfbrettd7655fr4n2y - airbnb-js-shims: 2.2.1 - ansi-to-html: 0.6.15 - core-js: 3.23.1 - global: 4.4.0 - lodash: 4.17.21 - qs: 6.11.0 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - regenerator-runtime: 0.13.9 - ts-dedent: 2.2.0 - typescript: 4.7.4 - unfetch: 4.2.0 - util-deprecate: 1.0.2 - webpack: 4.46.0 - dev: true - - /@storybook/core-client/6.5.12_qyk6idcafspi7uz7vy6757spbm: - resolution: {integrity: sha512-jyAd0ud6zO+flpLv0lEHbbt1Bv9Ms225M6WTQLrfe7kN/7j1pVKZEoeVCLZwkJUtSKcNiWQxZbS15h31pcYwqg==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - typescript: '*' - webpack: '*' - peerDependenciesMeta: - typescript: - optional: true - webpack: - optional: true - dependencies: - '@storybook/addons': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/channel-postmessage': 6.5.12 - '@storybook/channel-websocket': 6.5.12 - '@storybook/client-api': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/client-logger': 6.5.12 - '@storybook/core-events': 6.5.12 - '@storybook/csf': 0.0.2--canary.4566f4d.1 - '@storybook/preview-web': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/store': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/ui': 6.5.12_biqbaboplfbrettd7655fr4n2y - airbnb-js-shims: 2.2.1 - ansi-to-html: 0.6.15 - core-js: 3.23.1 - global: 4.4.0 - lodash: 4.17.21 - qs: 6.11.0 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - regenerator-runtime: 0.13.9 - ts-dedent: 2.2.0 - typescript: 4.7.4 - unfetch: 4.2.0 - util-deprecate: 1.0.2 - webpack: 5.74.0_@swc+core@1.2.203 - dev: true - - /@storybook/core-common/6.5.12_fur7q2nuabhivyjrvrzihm4lnq: - resolution: {integrity: sha512-gG20+eYdIhwQNu6Xs805FLrOCWtkoc8Rt8gJiRt8yXzZh9EZkU4xgCRoCxrrJ03ys/gTiCFbBOfRi749uM3z4w==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - typescript: '*' - peerDependenciesMeta: - typescript: - optional: true - dependencies: - '@babel/core': 7.19.1 - '@babel/plugin-proposal-class-properties': 7.12.1_@babel+core@7.19.1 - '@babel/plugin-proposal-decorators': 7.19.1_@babel+core@7.19.1 - '@babel/plugin-proposal-export-default-from': 7.18.10_@babel+core@7.19.1 - '@babel/plugin-proposal-nullish-coalescing-operator': 7.12.1_@babel+core@7.19.1 - '@babel/plugin-proposal-object-rest-spread': 7.12.1_@babel+core@7.19.1 - '@babel/plugin-proposal-optional-chaining': 7.12.7_@babel+core@7.19.1 - '@babel/plugin-proposal-private-methods': 7.12.1_@babel+core@7.19.1 - '@babel/plugin-proposal-private-property-in-object': 7.18.6_@babel+core@7.19.1 - '@babel/plugin-syntax-dynamic-import': 7.8.3_@babel+core@7.19.1 - '@babel/plugin-transform-arrow-functions': 7.12.1_@babel+core@7.19.1 - '@babel/plugin-transform-block-scoping': 7.18.9_@babel+core@7.19.1 - '@babel/plugin-transform-classes': 7.12.1_@babel+core@7.19.1 - '@babel/plugin-transform-destructuring': 7.12.1_@babel+core@7.19.1 - '@babel/plugin-transform-for-of': 7.12.1_@babel+core@7.19.1 - '@babel/plugin-transform-parameters': 7.12.1_@babel+core@7.19.1 - '@babel/plugin-transform-shorthand-properties': 7.12.1_@babel+core@7.19.1 - '@babel/plugin-transform-spread': 7.12.1_@babel+core@7.19.1 - '@babel/preset-env': 7.19.1_@babel+core@7.19.1 - '@babel/preset-react': 7.18.6_@babel+core@7.19.1 - '@babel/preset-typescript': 7.18.6_@babel+core@7.19.1 - '@babel/register': 7.18.9_@babel+core@7.19.1 - '@storybook/node-logger': 6.5.12 - '@storybook/semver': 7.3.2 - '@types/node': 16.11.59 - '@types/pretty-hrtime': 1.0.1 - babel-loader: 8.2.5_7k5t74zmen3ocxyd32avkcyrwe - babel-plugin-macros: 3.1.0 - babel-plugin-polyfill-corejs3: 0.1.7_@babel+core@7.19.1 - chalk: 4.1.2 - core-js: 3.23.1 - express: 4.18.1 - file-system-cache: 1.1.0 - find-up: 5.0.0 - fork-ts-checker-webpack-plugin: 6.5.2_b3umd4nqfgagbb4ysmdohlejqu - fs-extra: 9.1.0 - glob: 7.2.3 - handlebars: 4.7.7 - interpret: 2.2.0 - json5: 2.1.3 - lazy-universal-dotenv: 3.0.1 - picomatch: 2.3.1 - pkg-dir: 5.0.0 - pretty-hrtime: 1.0.3 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - resolve-from: 5.0.0 - slash: 3.0.0 - telejson: 6.0.8 - ts-dedent: 2.2.0 - typescript: 4.7.4 - util-deprecate: 1.0.2 - webpack: 4.46.0 - transitivePeerDependencies: - - eslint - - supports-color - - vue-template-compiler - - webpack-cli - - webpack-command - dev: true - - /@storybook/core-events/6.5.12: - resolution: {integrity: sha512-0AMyMM19R/lHsYRfWqM8zZTXthasTAK2ExkSRzYi2GkIaVMxRKtM33YRwxKIpJ6KmIKIs8Ru3QCXu1mfCmGzNg==} - dependencies: - core-js: 3.23.1 - dev: true - - /@storybook/core-server/6.5.12_fur7q2nuabhivyjrvrzihm4lnq: - resolution: {integrity: sha512-q1b/XKwoLUcCoCQ+8ndPD5THkEwXZYJ9ROv16i2VGUjjjAuSqpEYBq5GMGQUgxlWp1bkxtdGL2Jz+6pZfvldzA==} - peerDependencies: - '@storybook/builder-webpack5': '*' - '@storybook/manager-webpack5': '*' - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - typescript: '*' - peerDependenciesMeta: - '@storybook/builder-webpack5': - optional: true - '@storybook/manager-webpack5': - optional: true - typescript: - optional: true - dependencies: - '@discoveryjs/json-ext': 0.5.7 - '@storybook/builder-webpack4': 6.5.12_fur7q2nuabhivyjrvrzihm4lnq - '@storybook/core-client': 6.5.12_c3hoyc4loabfhtyuh36vjkyyai - '@storybook/core-common': 6.5.12_fur7q2nuabhivyjrvrzihm4lnq - '@storybook/core-events': 6.5.12 - '@storybook/csf': 0.0.2--canary.4566f4d.1 - '@storybook/csf-tools': 6.5.12 - '@storybook/manager-webpack4': 6.5.12_fur7q2nuabhivyjrvrzihm4lnq - '@storybook/node-logger': 6.5.12 - '@storybook/semver': 7.3.2 - '@storybook/store': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/telemetry': 6.5.12_fur7q2nuabhivyjrvrzihm4lnq - '@types/node': 16.11.59 - '@types/node-fetch': 2.6.2 - '@types/pretty-hrtime': 1.0.1 - '@types/webpack': 4.41.32 - better-opn: 2.1.1 - boxen: 5.1.2 - chalk: 4.1.2 - cli-table3: 0.6.3 - commander: 6.2.1 - compression: 1.7.4 - core-js: 3.23.1 - cpy: 8.1.2 - detect-port: 1.3.0 - express: 4.18.1 - fs-extra: 9.1.0 - global: 4.4.0 - globby: 11.1.0 - ip: 2.0.0 - lodash: 4.17.21 - node-fetch: 2.6.7 - open: 8.4.0 - pretty-hrtime: 1.0.3 - prompts: 2.4.2 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - regenerator-runtime: 0.13.9 - serve-favicon: 2.5.0 - slash: 3.0.0 - telejson: 6.0.8 - ts-dedent: 2.2.0 - typescript: 4.7.4 - util-deprecate: 1.0.2 - watchpack: 2.4.0 - webpack: 4.46.0 - ws: 8.8.1 - x-default-browser: 0.4.0 - transitivePeerDependencies: - - '@storybook/mdx2-csf' - - bluebird - - bufferutil - - encoding - - eslint - - supports-color - - utf-8-validate - - vue-template-compiler - - webpack-cli - - webpack-command - dev: true - - /@storybook/core/6.5.12_sujs6dc3berks4hwyskywndbou: - resolution: {integrity: sha512-+o3psAVWL+5LSwyJmEbvhgxKO1Et5uOX8ujNVt/f1fgwJBIf6BypxyPKu9YGQDRzcRssESQQZWNrZCCAZlFeuQ==} - peerDependencies: - '@storybook/builder-webpack5': '*' - '@storybook/manager-webpack5': '*' - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - typescript: '*' - webpack: '*' - peerDependenciesMeta: - '@storybook/builder-webpack5': - optional: true - '@storybook/manager-webpack5': - optional: true - typescript: - optional: true - webpack: - optional: true - dependencies: - '@storybook/core-client': 6.5.12_qyk6idcafspi7uz7vy6757spbm - '@storybook/core-server': 6.5.12_fur7q2nuabhivyjrvrzihm4lnq - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - typescript: 4.7.4 - webpack: 5.74.0_@swc+core@1.2.203 - transitivePeerDependencies: - - '@storybook/mdx2-csf' - - bluebird - - bufferutil - - encoding - - eslint - - supports-color - - utf-8-validate - - vue-template-compiler - - webpack-cli - - webpack-command - dev: true - - /@storybook/csf-tools/6.5.12: - resolution: {integrity: sha512-BPhnB1xJtBVOzXuCURzQRdXcstE27ht4qoTgQkbwUTy4MEtUZ/f1AnHSYRdzrgukXdUFWseNIK4RkNdJpfOfNQ==} - peerDependencies: - '@storybook/mdx2-csf': ^0.0.3 - peerDependenciesMeta: - '@storybook/mdx2-csf': - optional: true - dependencies: - '@babel/core': 7.19.1 - '@babel/generator': 7.19.0 - '@babel/parser': 7.19.1 - '@babel/plugin-transform-react-jsx': 7.19.0_@babel+core@7.19.1 - '@babel/preset-env': 7.19.1_@babel+core@7.19.1 - '@babel/traverse': 7.19.1 - '@babel/types': 7.19.0 - '@storybook/csf': 0.0.2--canary.4566f4d.1 - '@storybook/mdx1-csf': 0.0.1_@babel+core@7.19.1 - core-js: 3.23.1 - fs-extra: 9.1.0 - global: 4.4.0 - regenerator-runtime: 0.13.9 - ts-dedent: 2.2.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@storybook/csf/0.0.2--canary.4566f4d.1: - resolution: {integrity: sha512-9OVvMVh3t9znYZwb0Svf/YQoxX2gVOeQTGe2bses2yj+a3+OJnCrUF3/hGv6Em7KujtOdL2LL+JnG49oMVGFgQ==} - dependencies: - lodash: 4.17.21 - dev: true - - /@storybook/docs-tools/6.5.12_biqbaboplfbrettd7655fr4n2y: - resolution: {integrity: sha512-8brf8W89KVk95flVqW0sYEqkL+FBwb5W9CnwI+Ggd6r2cqXe9jyg+0vDZFdYp6kYNQKrPr4fbXGrGVXQG18/QQ==} - dependencies: - '@babel/core': 7.19.1 - '@storybook/csf': 0.0.2--canary.4566f4d.1 - '@storybook/store': 6.5.12_biqbaboplfbrettd7655fr4n2y - core-js: 3.23.1 - doctrine: 3.0.0 - lodash: 4.17.21 - regenerator-runtime: 0.13.9 - transitivePeerDependencies: - - react - - react-dom - - supports-color - dev: true - - /@storybook/instrumenter/6.5.12_biqbaboplfbrettd7655fr4n2y: - resolution: {integrity: sha512-VMl+Yg6ab79A1VqlhC69sBFI6ySw4Lganqdf9JJf6Y7uNRcZwsjfGpGfYiL4WGH1q72tGiq/25XKGIJV0jgi1Q==} - dependencies: - '@storybook/addons': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/client-logger': 6.5.12 - '@storybook/core-events': 6.5.12 - core-js: 3.23.1 - global: 4.4.0 - transitivePeerDependencies: - - react - - react-dom - dev: true - - /@storybook/manager-webpack4/6.5.12_fur7q2nuabhivyjrvrzihm4lnq: - resolution: {integrity: sha512-LH3e6qfvq2znEdxe2kaWtmdDPTnvSkufzoC9iwOgNvo3YrTGrYNyUTDegvW293TOTVfUn7j6TBcsOxIgRnt28g==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - typescript: '*' - peerDependenciesMeta: - typescript: - optional: true - dependencies: - '@babel/core': 7.19.1 - '@babel/plugin-transform-template-literals': 7.12.1_@babel+core@7.19.1 - '@babel/preset-react': 7.18.6_@babel+core@7.19.1 - '@storybook/addons': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/core-client': 6.5.12_c3hoyc4loabfhtyuh36vjkyyai - '@storybook/core-common': 6.5.12_fur7q2nuabhivyjrvrzihm4lnq - '@storybook/node-logger': 6.5.12 - '@storybook/theming': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/ui': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@types/node': 16.11.59 - '@types/webpack': 4.41.32 - babel-loader: 8.2.5_7k5t74zmen3ocxyd32avkcyrwe - case-sensitive-paths-webpack-plugin: 2.4.0 - chalk: 4.1.2 - core-js: 3.23.1 - css-loader: 3.6.0_webpack@4.46.0 - express: 4.18.1 - file-loader: 6.2.0_webpack@4.46.0 - find-up: 5.0.0 - fs-extra: 9.1.0 - html-webpack-plugin: 4.5.2_webpack@4.46.0 - node-fetch: 2.6.7 - pnp-webpack-plugin: 1.6.4_typescript@4.7.4 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - read-pkg-up: 7.0.1 - regenerator-runtime: 0.13.9 - resolve-from: 5.0.0 - style-loader: 1.3.0_webpack@4.46.0 - telejson: 6.0.8 - terser-webpack-plugin: 4.2.3_webpack@4.46.0 - ts-dedent: 2.2.0 - typescript: 4.7.4 - url-loader: 4.1.1_lit45vopotvaqup7lrvlnvtxwy - util-deprecate: 1.0.2 - webpack: 4.46.0 - webpack-dev-middleware: 3.7.3_webpack@4.46.0 - webpack-virtual-modules: 0.2.2 - transitivePeerDependencies: - - bluebird - - encoding - - eslint - - supports-color - - vue-template-compiler - - webpack-cli - - webpack-command - dev: true - - /@storybook/mdx1-csf/0.0.1_@babel+core@7.19.1: - resolution: {integrity: sha512-4biZIWWzoWlCarMZmTpqcJNgo/RBesYZwGFbQeXiGYsswuvfWARZnW9RE9aUEMZ4XPn7B1N3EKkWcdcWe/K2tg==} - dependencies: - '@babel/generator': 7.19.0 - '@babel/parser': 7.19.1 - '@babel/preset-env': 7.19.1_@babel+core@7.19.1 - '@babel/types': 7.19.0 - '@mdx-js/mdx': 1.6.22 - '@types/lodash': 4.14.185 - js-string-escape: 1.0.1 - loader-utils: 2.0.2 - lodash: 4.17.21 - prettier: 2.3.0 - ts-dedent: 2.2.0 - transitivePeerDependencies: - - '@babel/core' - - supports-color - dev: true - - /@storybook/mdx1-csf/0.0.1_@babel+core@7.9.0: - resolution: {integrity: sha512-4biZIWWzoWlCarMZmTpqcJNgo/RBesYZwGFbQeXiGYsswuvfWARZnW9RE9aUEMZ4XPn7B1N3EKkWcdcWe/K2tg==} - dependencies: - '@babel/generator': 7.19.0 - '@babel/parser': 7.19.1 - '@babel/preset-env': 7.19.1_@babel+core@7.9.0 - '@babel/types': 7.19.0 - '@mdx-js/mdx': 1.6.22 - '@types/lodash': 4.14.185 - js-string-escape: 1.0.1 - loader-utils: 2.0.2 - lodash: 4.17.21 - prettier: 2.3.0 - ts-dedent: 2.2.0 - transitivePeerDependencies: - - '@babel/core' - - supports-color - dev: true - - /@storybook/node-logger/6.5.12: - resolution: {integrity: sha512-jdLtT3mX5GQKa+0LuX0q0sprKxtCGf6HdXlKZGD5FEuz4MgJUGaaiN0Hgi+U7Z4tVNOtSoIbYBYXHqfUgJrVZw==} - dependencies: - '@types/npmlog': 4.1.4 - chalk: 4.1.2 - core-js: 3.23.1 - npmlog: 5.0.1 - pretty-hrtime: 1.0.3 - dev: true - - /@storybook/postinstall/6.5.12: - resolution: {integrity: sha512-6K73f9c2UO+w4Wtyo2BxEpEsnhPvMgqHSaJ9Yt6Tc90LaDGUbcVgy6PNibsRyuJ/KQ543WeiRO5rSZfm2uJU9A==} - dependencies: - core-js: 3.23.1 - dev: true - - /@storybook/preview-web/6.5.12_biqbaboplfbrettd7655fr4n2y: - resolution: {integrity: sha512-Q5mduCJsY9zhmlsrhHvtOBA3Jt2n45bhfVkiUEqtj8fDit45/GW+eLoffv8GaVTGjV96/Y1JFwDZUwU6mEfgGQ==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - dependencies: - '@storybook/addons': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/channel-postmessage': 6.5.12 - '@storybook/client-logger': 6.5.12 - '@storybook/core-events': 6.5.12 - '@storybook/csf': 0.0.2--canary.4566f4d.1 - '@storybook/store': 6.5.12_biqbaboplfbrettd7655fr4n2y - ansi-to-html: 0.6.15 - core-js: 3.23.1 - global: 4.4.0 - lodash: 4.17.21 - qs: 6.11.0 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - regenerator-runtime: 0.13.9 - synchronous-promise: 2.0.16 - ts-dedent: 2.2.0 - unfetch: 4.2.0 - util-deprecate: 1.0.2 - dev: true - - /@storybook/react-docgen-typescript-plugin/1.0.2-canary.6.9d540b91e815f8fc2f8829189deb00553559ff63.0_xnp4kzegbjokq62cajex2ovgkm: - resolution: {integrity: sha512-eVg3BxlOm2P+chijHBTByr90IZVUtgRW56qEOLX7xlww2NBuKrcavBlcmn+HH7GIUktquWkMPtvy6e0W0NgA5w==} - peerDependencies: - typescript: '>= 3.x || 4' - webpack: '>= 4' - peerDependenciesMeta: - webpack: - optional: true - dependencies: - debug: 4.3.3 - endent: 2.1.0 - find-cache-dir: 3.3.1 - flat-cache: 3.0.4 - micromatch: 4.0.4 - react-docgen-typescript: 2.2.2_typescript@4.7.4 - tslib: 2.4.0 - typescript: 4.7.4 - webpack: 5.74.0_@swc+core@1.2.203 - transitivePeerDependencies: - - supports-color - dev: true - - /@storybook/react/6.5.12_bwa4o22mlyfulxfxxeljxljceq: - resolution: {integrity: sha512-1tG8EdSfp+OZAKAWPT2UrexF4o007jEMwQFFXw1atIQrQOADzSnZ7lTYJ08o5TyJwksswtr18tH3oJJ9sG3KPw==} - engines: {node: '>=10.13.0'} - hasBin: true - peerDependencies: - '@babel/core': ^7.11.5 || 7 - '@storybook/builder-webpack4': '*' - '@storybook/builder-webpack5': '*' - '@storybook/manager-webpack4': '*' - '@storybook/manager-webpack5': '*' - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - require-from-string: ^2.0.2 - typescript: '*' - peerDependenciesMeta: - '@babel/core': - optional: true - '@storybook/builder-webpack4': - optional: true - '@storybook/builder-webpack5': - optional: true - '@storybook/manager-webpack4': - optional: true - '@storybook/manager-webpack5': - optional: true - typescript: - optional: true - dependencies: - '@babel/core': 7.9.0 - '@babel/preset-flow': 7.12.1_@babel+core@7.9.0 - '@babel/preset-react': 7.18.6_@babel+core@7.9.0 - '@pmmmwh/react-refresh-webpack-plugin': 0.5.7_metx475lqcp4j5c75za4zf7xbi - '@storybook/addons': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/builder-webpack4': 6.5.12_fur7q2nuabhivyjrvrzihm4lnq - '@storybook/client-logger': 6.5.12 - '@storybook/core': 6.5.12_sujs6dc3berks4hwyskywndbou - '@storybook/core-common': 6.5.12_fur7q2nuabhivyjrvrzihm4lnq - '@storybook/csf': 0.0.2--canary.4566f4d.1 - '@storybook/docs-tools': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/manager-webpack4': 6.5.12_fur7q2nuabhivyjrvrzihm4lnq - '@storybook/node-logger': 6.5.12 - '@storybook/react-docgen-typescript-plugin': 1.0.2-canary.6.9d540b91e815f8fc2f8829189deb00553559ff63.0_xnp4kzegbjokq62cajex2ovgkm - '@storybook/semver': 7.3.2 - '@storybook/store': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@types/estree': 0.0.51 - '@types/node': 16.11.59 - '@types/webpack-env': 1.18.0 - acorn: 7.4.1 - acorn-jsx: 5.3.1_acorn@7.4.1 - acorn-walk: 7.2.0 - babel-plugin-add-react-displayname: 0.0.5 - babel-plugin-react-docgen: 4.2.1 - core-js: 3.23.1 - escodegen: 2.0.0 - fs-extra: 9.1.0 - global: 4.4.0 - html-tags: 3.1.0 - lodash: 4.17.21 - prop-types: 15.8.1 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - react-element-to-jsx-string: 14.3.4_biqbaboplfbrettd7655fr4n2y - react-refresh: 0.11.0 - read-pkg-up: 7.0.1 - regenerator-runtime: 0.13.9 - require-from-string: 2.0.2 - ts-dedent: 2.2.0 - typescript: 4.7.4 - util-deprecate: 1.0.2 - webpack: 5.74.0_@swc+core@1.2.203 - transitivePeerDependencies: - - '@storybook/mdx2-csf' - - '@swc/core' - - '@types/webpack' - - bluebird - - bufferutil - - encoding - - esbuild - - eslint - - sockjs-client - - supports-color - - type-fest - - uglify-js - - utf-8-validate - - vue-template-compiler - - webpack-cli - - webpack-command - - webpack-dev-server - - webpack-hot-middleware - - webpack-plugin-serve - dev: true - - /@storybook/router/6.5.12_biqbaboplfbrettd7655fr4n2y: - resolution: {integrity: sha512-xHubde9YnBbpkDY5+zGO4Pr6VPxP8H9J2v4OTF3H82uaxCIKR0PKG0utS9pFKIsEiP3aM62Hb9qB8nU+v1nj3w==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - dependencies: - '@storybook/client-logger': 6.5.12 - core-js: 3.23.1 - memoizerific: 1.11.3 - qs: 6.11.0 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - regenerator-runtime: 0.13.9 - dev: true - - /@storybook/semver/7.3.2: - resolution: {integrity: sha512-SWeszlsiPsMI0Ps0jVNtH64cI5c0UF3f7KgjVKJoNP30crQ6wUSddY2hsdeczZXEKVJGEn50Q60flcGsQGIcrg==} - engines: {node: '>=10'} - hasBin: true - dependencies: - core-js: 3.23.1 - find-up: 4.1.0 - dev: true - - /@storybook/source-loader/6.5.12_biqbaboplfbrettd7655fr4n2y: - resolution: {integrity: sha512-4iuILFsKNV70sEyjzIkOqgzgQx7CJ8kTEFz590vkmWXQNKz7YQzjgISIwL7GBw/myJgeb04bl5psVgY0cbG5vg==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - dependencies: - '@storybook/addons': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/client-logger': 6.5.12 - '@storybook/csf': 0.0.2--canary.4566f4d.1 - core-js: 3.23.1 - estraverse: 5.3.0 - global: 4.4.0 - loader-utils: 2.0.2 - lodash: 4.17.21 - prettier: 2.3.0 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - regenerator-runtime: 0.13.9 - dev: true - - /@storybook/store/6.5.12_biqbaboplfbrettd7655fr4n2y: - resolution: {integrity: sha512-SMQOr0XvV0mhTuqj3XOwGGc4kTPVjh3xqrG1fqkj9RGs+2jRdmO6mnwzda5gPwUmWNTorZ7FxZ1iEoyfYNtuiQ==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - dependencies: - '@storybook/addons': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/client-logger': 6.5.12 - '@storybook/core-events': 6.5.12 - '@storybook/csf': 0.0.2--canary.4566f4d.1 - core-js: 3.23.1 - fast-deep-equal: 3.1.3 - global: 4.4.0 - lodash: 4.17.21 - memoizerific: 1.11.3 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - regenerator-runtime: 0.13.9 - slash: 3.0.0 - stable: 0.1.8 - synchronous-promise: 2.0.16 - ts-dedent: 2.2.0 - util-deprecate: 1.0.2 - dev: true - - /@storybook/telemetry/6.5.12_fur7q2nuabhivyjrvrzihm4lnq: - resolution: {integrity: sha512-mCHxx7NmQ3n7gx0nmblNlZE5ZgrjQm6B08mYeWg6Y7r4GZnqS6wZbvAwVhZZ3Gg/9fdqaBApHsdAXp0d5BrlxA==} - dependencies: - '@storybook/client-logger': 6.5.12 - '@storybook/core-common': 6.5.12_fur7q2nuabhivyjrvrzihm4lnq - chalk: 4.1.2 - core-js: 3.23.1 - detect-package-manager: 2.0.1 - fetch-retry: 5.0.3 - fs-extra: 9.1.0 - global: 4.4.0 - isomorphic-unfetch: 3.1.0 - nanoid: 3.3.1 - read-pkg-up: 7.0.1 - regenerator-runtime: 0.13.9 - transitivePeerDependencies: - - encoding - - eslint - - react - - react-dom - - supports-color - - typescript - - vue-template-compiler - - webpack-cli - - webpack-command - dev: true - - /@storybook/testing-library/0.0.13_biqbaboplfbrettd7655fr4n2y: - resolution: {integrity: sha512-vRMeIGer4EjJkTgI8sQyK9W431ekPWYCWL//OmSDJ64IT3h7FnW7Xg6p+eqM3oII98/O5pcya5049GxnjaPtxw==} - dependencies: - '@storybook/client-logger': 6.5.12 - '@storybook/instrumenter': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@testing-library/dom': 8.17.1 - '@testing-library/user-event': 13.5.0_wl4iynrlixafokvgqnhzlvigei - ts-dedent: 2.2.0 - transitivePeerDependencies: - - react - - react-dom - dev: true - - /@storybook/theming/6.5.12_biqbaboplfbrettd7655fr4n2y: - resolution: {integrity: sha512-uWOo84qMQ2R6c1C0faZ4Q0nY01uNaX7nXoJKieoiJ6ZqY9PSYxJl1kZLi3uPYnrxLZjzjVyXX8MgdxzbppYItA==} + /@rollup/plugin-node-resolve@6.1.0(rollup@1.32.1): + resolution: {integrity: sha512-Cv7PDIvxdE40SWilY5WgZpqfIUEaDxFxs89zCAHjqyRwlTSuql4M5hjIuc5QYJkOH0/vyiyNXKD72O+LhRipGA==} + engines: {node: '>= 8.0.0'} peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 + rollup: ^1.20.0 dependencies: - '@storybook/client-logger': 6.5.12 - core-js: 3.23.1 - memoizerific: 1.11.3 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - regenerator-runtime: 0.13.9 + '@rollup/pluginutils': 3.1.0(rollup@1.32.1) + '@types/resolve': 0.0.8 + builtin-modules: 3.1.0 + is-module: 1.0.0 + resolve: 1.19.0 + rollup: 1.32.1 dev: true - /@storybook/ui/6.5.12_biqbaboplfbrettd7655fr4n2y: - resolution: {integrity: sha512-P7+ARI5NvaEYkrbIciT/UMgy3kxMt4WCtHMXss2T01UMCIWh1Ws4BJaDNqtQSpKuwjjS4eqZL3aQWhlUpYAUEg==} + /@rollup/pluginutils@3.1.0(rollup@1.32.1): + resolution: {integrity: sha512-GksZ6pr6TpIjHm8h9lSQ8pi8BE9VeubNT0OMJ3B5uZJ8pz73NPiqOtCog/x2/QzM1ENChPKxMDhiQuRHsqc+lg==} + engines: {node: '>= 8.0.0'} peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - dependencies: - '@storybook/addons': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/api': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/channels': 6.5.12 - '@storybook/client-logger': 6.5.12 - '@storybook/components': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/core-events': 6.5.12 - '@storybook/router': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/semver': 7.3.2 - '@storybook/theming': 6.5.12_biqbaboplfbrettd7655fr4n2y - core-js: 3.23.1 - memoizerific: 1.11.3 - qs: 6.11.0 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - regenerator-runtime: 0.13.9 - resolve-from: 5.0.0 + rollup: ^1.20.0||^2.0.0 + dependencies: + '@types/estree': 0.0.39 + estree-walker: 1.0.1 + picomatch: 2.3.1 + rollup: 1.32.1 dev: true - /@svgr/babel-plugin-add-jsx-attribute/5.4.0: + /@svgr/babel-plugin-add-jsx-attribute@5.4.0: resolution: {integrity: sha512-ZFf2gs/8/6B8PnSofI0inYXr2SDNTDScPXhN7k5EqD4aZ3gi6u+rbmZHVB8IM3wDyx8ntKACZbtXSm7oZGRqVg==} engines: {node: '>=10'} dev: true - /@svgr/babel-plugin-remove-jsx-attribute/5.4.0: + /@svgr/babel-plugin-remove-jsx-attribute@5.4.0: resolution: {integrity: sha512-yaS4o2PgUtwLFGTKbsiAy6D0o3ugcUhWK0Z45umJ66EPWunAz9fuFw2gJuje6wqQvQWOTJvIahUwndOXb7QCPg==} engines: {node: '>=10'} dev: true - /@svgr/babel-plugin-remove-jsx-empty-expression/5.0.1: + /@svgr/babel-plugin-remove-jsx-empty-expression@5.0.1: resolution: {integrity: sha512-LA72+88A11ND/yFIMzyuLRSMJ+tRKeYKeQ+mR3DcAZ5I4h5CPWN9AHyUzJbWSYp/u2u0xhmgOe0+E41+GjEueA==} engines: {node: '>=10'} dev: true - /@svgr/babel-plugin-replace-jsx-attribute-value/5.0.1: + /@svgr/babel-plugin-replace-jsx-attribute-value@5.0.1: resolution: {integrity: sha512-PoiE6ZD2Eiy5mK+fjHqwGOS+IXX0wq/YDtNyIgOrc6ejFnxN4b13pRpiIPbtPwHEc+NT2KCjteAcq33/F1Y9KQ==} engines: {node: '>=10'} dev: true - /@svgr/babel-plugin-svg-dynamic-title/5.4.0: + /@svgr/babel-plugin-svg-dynamic-title@5.4.0: resolution: {integrity: sha512-zSOZH8PdZOpuG1ZVx/cLVePB2ibo3WPpqo7gFIjLV9a0QsuQAzJiwwqmuEdTaW2pegyBE17Uu15mOgOcgabQZg==} engines: {node: '>=10'} dev: true - /@svgr/babel-plugin-svg-em-dimensions/5.4.0: + /@svgr/babel-plugin-svg-em-dimensions@5.4.0: resolution: {integrity: sha512-cPzDbDA5oT/sPXDCUYoVXEmm3VIoAWAPT6mSPTJNbQaBNUuEKVKyGH93oDY4e42PYHRW67N5alJx/eEol20abw==} engines: {node: '>=10'} dev: true - /@svgr/babel-plugin-transform-react-native-svg/5.4.0: + /@svgr/babel-plugin-transform-react-native-svg@5.4.0: resolution: {integrity: sha512-3eYP/SaopZ41GHwXma7Rmxcv9uRslRDTY1estspeB1w1ueZWd/tPlMfEOoccYpEMZU3jD4OU7YitnXcF5hLW2Q==} engines: {node: '>=10'} dev: true - /@svgr/babel-plugin-transform-svg-component/5.5.0: + /@svgr/babel-plugin-transform-svg-component@5.5.0: resolution: {integrity: sha512-q4jSH1UUvbrsOtlo/tKcgSeiCHRSBdXoIoqX1pgcKK/aU3JD27wmMKwGtpB8qRYUYoyXvfGxUVKchLuR5pB3rQ==} engines: {node: '>=10'} dev: true - /@svgr/babel-preset/5.5.0: + /@svgr/babel-preset@5.5.0: resolution: {integrity: sha512-4FiXBjvQ+z2j7yASeGPEi8VD/5rrGQk4Xrq3EdJmoZgz/tpqChpo5hgXDvmEauwtvOc52q8ghhZK4Oy7qph4ig==} engines: {node: '>=10'} dependencies: @@ -7070,7 +3410,7 @@ packages: '@svgr/babel-plugin-transform-svg-component': 5.5.0 dev: true - /@svgr/core/5.5.0: + /@svgr/core@5.5.0: resolution: {integrity: sha512-q52VOcsJPvV3jO1wkPtzTuKlvX7Y3xIcWRpCMtBF3MrteZJtBfQw/+u0B1BHy5ColpQc1/YVTrPEtSYIMNZlrQ==} engines: {node: '>=10'} dependencies: @@ -7081,14 +3421,14 @@ packages: - supports-color dev: true - /@svgr/hast-util-to-babel-ast/5.5.0: + /@svgr/hast-util-to-babel-ast@5.5.0: resolution: {integrity: sha512-cAaR/CAiZRB8GP32N+1jocovUtvlj0+e65TB50/6Lcime+EA49m/8l+P2ko+XPJ4dw3xaPS3jOL4F2X4KWxoeQ==} engines: {node: '>=10'} dependencies: '@babel/types': 7.19.0 dev: true - /@svgr/plugin-jsx/5.5.0: + /@svgr/plugin-jsx@5.5.0: resolution: {integrity: sha512-V/wVh33j12hGh05IDg8GpIUXbjAPnTdPTKuP4VNLggnwaHMPNQNae2pRnyTAILWCQdz5GyMqtO488g7CKM8CBA==} engines: {node: '>=10'} dependencies: @@ -7100,7 +3440,7 @@ packages: - supports-color dev: true - /@svgr/plugin-svgo/5.5.0: + /@svgr/plugin-svgo@5.5.0: resolution: {integrity: sha512-r5swKk46GuQl4RrVejVwpeeJaydoxkdwkM1mBKOgJLBUJPGaLci6ylg/IjhrRsREKDkr4kbMWdgOtbXEh0fyLQ==} engines: {node: '>=10'} dependencies: @@ -7109,14 +3449,14 @@ packages: svgo: 1.3.2 dev: true - /@svgr/rollup/5.5.0: + /@svgr/rollup@5.5.0: resolution: {integrity: sha512-EiZmH2VTr+Xzyb6Ga8XtGa9MEbiU3WQnB5vHmqhwAUqibU3uwuwr7MN+QwIh/gtBk1ucMim8BCfcRTlLVREM8A==} engines: {node: '>=10'} dependencies: '@babel/core': 7.12.9 - '@babel/plugin-transform-react-constant-elements': 7.12.1_@babel+core@7.12.9 - '@babel/preset-env': 7.12.7_@babel+core@7.12.9 - '@babel/preset-react': 7.12.7_@babel+core@7.12.9 + '@babel/plugin-transform-react-constant-elements': 7.12.1(@babel/core@7.12.9) + '@babel/preset-env': 7.12.7(@babel/core@7.12.9) + '@babel/preset-react': 7.12.7(@babel/core@7.12.9) '@svgr/core': 5.5.0 '@svgr/plugin-jsx': 5.5.0 '@svgr/plugin-svgo': 5.5.0 @@ -7125,7 +3465,7 @@ packages: - supports-color dev: true - /@swc/cli/0.1.57_@swc+core@1.2.203: + /@swc/cli@0.1.57(@swc/core@1.2.203): resolution: {integrity: sha512-HxM8TqYHhAg+zp7+RdTU69bnkl4MWdt1ygyp6BDIPjTiaJVH6Dizn2ezbgDS8mnFZI1FyhKvxU/bbaUs8XhzQg==} engines: {node: '>= 12.13'} hasBin: true @@ -7143,7 +3483,7 @@ packages: source-map: 0.7.3 dev: true - /@swc/core-android-arm-eabi/1.2.203: + /@swc/core-android-arm-eabi@1.2.203: resolution: {integrity: sha512-maKYooa0+h66Y/t81lJblimJYWAON1onMwczxe+uQs1FkcnGa/ixhnmRDXIM0wpivMu93EIq3teKR43nr2K/Yg==} engines: {node: '>=10'} cpu: [arm] @@ -7152,7 +3492,7 @@ packages: dev: true optional: true - /@swc/core-android-arm64/1.2.203: + /@swc/core-android-arm64@1.2.203: resolution: {integrity: sha512-Zg57EuQa06cTNk2enort0/djXyEaYI0ectydZLPv4oj0ubjLGTZMDkuxPaYWSs9eHT1A6Ge8bwQCA7t/GLYGGA==} engines: {node: '>=10'} cpu: [arm64] @@ -7161,7 +3501,7 @@ packages: dev: true optional: true - /@swc/core-darwin-arm64/1.2.203: + /@swc/core-darwin-arm64@1.2.203: resolution: {integrity: sha512-BVwIAhkMz58V6I+xLsVMeOKSORe8iaYnCHUZbgI0NfAqvUYBUqmwzt+Fww44wv3Ibxb4my1zk7BG02d7Ku94+A==} engines: {node: '>=10'} cpu: [arm64] @@ -7170,7 +3510,7 @@ packages: dev: true optional: true - /@swc/core-darwin-x64/1.2.203: + /@swc/core-darwin-x64@1.2.203: resolution: {integrity: sha512-Z9gwtHwv3jEntjVANYmhzVvIVkgbkWAsLGP2UBez2D8CgScx+5Gnb0C5qT4nwX0Q+YD42rdHp7M551ZqVOo2FQ==} engines: {node: '>=10'} cpu: [x64] @@ -7179,7 +3519,7 @@ packages: dev: true optional: true - /@swc/core-freebsd-x64/1.2.203: + /@swc/core-freebsd-x64@1.2.203: resolution: {integrity: sha512-9aCC80BvU+IGqrmyY2r/3NRveOQg9BSCT+6N4esBKMLlTaDmuARSBON1TXjUF7HPUqzNB4ahri9HIx52wImXqQ==} engines: {node: '>=10'} cpu: [x64] @@ -7188,7 +3528,7 @@ packages: dev: true optional: true - /@swc/core-linux-arm-gnueabihf/1.2.203: + /@swc/core-linux-arm-gnueabihf@1.2.203: resolution: {integrity: sha512-SoeXRqawk5aufUArS1s58prCAT24+p3lITh5Jv4dYk2PwGZpOHC7ADcVKq/55XayTxSafwXD9jObNTJzQ6moqw==} engines: {node: '>=10'} cpu: [arm] @@ -7197,7 +3537,7 @@ packages: dev: true optional: true - /@swc/core-linux-arm64-gnu/1.2.203: + /@swc/core-linux-arm64-gnu@1.2.203: resolution: {integrity: sha512-bF8t8fd8MSx6qWgi1mYlyj1XYPWeGtGRVei1C1AcyXzcD34H0H37D6z2YBXfQrMhFED/s0oCPB2qvPh0j1jbjw==} engines: {node: '>=10'} cpu: [arm64] @@ -7206,7 +3546,7 @@ packages: dev: true optional: true - /@swc/core-linux-arm64-musl/1.2.203: + /@swc/core-linux-arm64-musl@1.2.203: resolution: {integrity: sha512-lFfPFgbEGhxsgL3PWRp4exzIlI3MuJWFFkiYqKMeDdHSUOdhtcQUCGw9D6Iat/1mCNxuTrDxQOBQBUhc9g6DoA==} engines: {node: '>=10'} cpu: [arm64] @@ -7215,7 +3555,7 @@ packages: dev: true optional: true - /@swc/core-linux-x64-gnu/1.2.203: + /@swc/core-linux-x64-gnu@1.2.203: resolution: {integrity: sha512-46ykzctv5W4PxeRE/brZyxWRSfdhJllCFUySRubhMLCuhs6VLtZzmWBefxPHTUDpBzmhX8kcaiKwwY2tqV0A9g==} engines: {node: '>=10'} cpu: [x64] @@ -7224,7 +3564,7 @@ packages: dev: true optional: true - /@swc/core-linux-x64-musl/1.2.203: + /@swc/core-linux-x64-musl@1.2.203: resolution: {integrity: sha512-LXPlxppioO9d1kpqu8qJiLvyDYJmXO7vcbmtOuM3nCPQPdVDii7sx4JtbunOMs/sY2ilFUfF7f6oNf2RkRPu1Q==} engines: {node: '>=10'} cpu: [x64] @@ -7233,7 +3573,7 @@ packages: dev: true optional: true - /@swc/core-win32-arm64-msvc/1.2.203: + /@swc/core-win32-arm64-msvc@1.2.203: resolution: {integrity: sha512-De9btHHbi6nTKSMaujAdpvM40XaEH1dTkKPK0H4JX+6WZYhOFYl0silvd6CIFewdnkKLdSVvTnfPubV+c0S8eA==} engines: {node: '>=10'} cpu: [arm64] @@ -7242,7 +3582,7 @@ packages: dev: true optional: true - /@swc/core-win32-ia32-msvc/1.2.203: + /@swc/core-win32-ia32-msvc@1.2.203: resolution: {integrity: sha512-YwGOD22qbDZ+ByiPnLqQdbGVE8k61R/mx3bZOpQnK0hkg/W5ysUBOYwr9aflLcNMRJuKxzVrCmSGBHMJN5AjfA==} engines: {node: '>=10'} cpu: [ia32] @@ -7251,7 +3591,7 @@ packages: dev: true optional: true - /@swc/core-win32-x64-msvc/1.2.203: + /@swc/core-win32-x64-msvc@1.2.203: resolution: {integrity: sha512-LAlXKK7rl+sLAgyXxuzCkaYQdoG797O/sRFC6eMyb4/eDtSctmVSCQl5xefuH+cofuZCTSk4OgzqmdJ2Ue/Jmw==} engines: {node: '>=10'} cpu: [x64] @@ -7260,7 +3600,7 @@ packages: dev: true optional: true - /@swc/core/1.2.203: + /@swc/core@1.2.203: resolution: {integrity: sha512-GZXeITqg3YuXFPaSMYk3g9h9j+pIc5sjt4jS5VvFHk8wXUfk/tvP5GwOPmEyXmVJkvEDJPXLip6lqfeKlvNceA==} engines: {node: '>=10'} hasBin: true @@ -7280,56 +3620,28 @@ packages: '@swc/core-win32-x64-msvc': 1.2.203 dev: true - /@testing-library/dom/8.17.1: - resolution: {integrity: sha512-KnH2MnJUzmFNPW6RIKfd+zf2Wue8mEKX0M3cpX6aKl5ZXrJM1/c/Pc8c2xDNYQCnJO48Sm5ITbMXgqTr3h4jxQ==} - engines: {node: '>=12'} - dependencies: - '@babel/code-frame': 7.16.7 - '@babel/runtime': 7.18.6 - '@types/aria-query': 4.2.2 - aria-query: 5.0.2 - chalk: 4.1.2 - dom-accessibility-api: 0.5.14 - lz-string: 1.4.4 - pretty-format: 27.5.1 - dev: true - - /@testing-library/user-event/13.5.0_wl4iynrlixafokvgqnhzlvigei: - resolution: {integrity: sha512-5Kwtbo3Y/NowpkbRuSepbyMFkZmHgD+vPzYB/RJ4oxt5Gj/avFFBYjhw27cqSVPVw/3a67NK1PbiIr9k4Gwmdg==} - engines: {node: '>=10', npm: '>=6'} - peerDependencies: - '@testing-library/dom': '>=7.21.4' - dependencies: - '@babel/runtime': 7.18.6 - '@testing-library/dom': 8.17.1 - dev: true - - /@trysound/sax/0.2.0: + /@trysound/sax@0.2.0: resolution: {integrity: sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA==} engines: {node: '>=10.13.0'} dev: true - /@tsconfig/node10/1.0.9: + /@tsconfig/node10@1.0.9: resolution: {integrity: sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==} dev: true - /@tsconfig/node12/1.0.11: + /@tsconfig/node12@1.0.11: resolution: {integrity: sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==} dev: true - /@tsconfig/node14/1.0.3: + /@tsconfig/node14@1.0.3: resolution: {integrity: sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==} dev: true - /@tsconfig/node16/1.0.3: + /@tsconfig/node16@1.0.3: resolution: {integrity: sha512-yOlFc+7UtL/89t2ZhjPvvB/DeAr3r+Dq58IgzsFkOAvVC6NMJXmCGjbptdXdR9qsX7pKcTL+s87FtYREi2dEEQ==} dev: true - /@types/aria-query/4.2.2: - resolution: {integrity: sha512-HnYpAE1Y6kRyKM/XkEuiRQhTHvkzMBurTHnpFLYLBGPIylZNPs9jJcuOOYWxPLJCSEtmZT0Y8rHDokKN7rRTig==} - dev: true - - /@types/babel__core/7.1.12: + /@types/babel__core@7.1.12: resolution: {integrity: sha512-wMTHiiTiBAAPebqaPiPDLFA4LYPKr6Ph0Xq/6rq1Ur3v66HXyG+clfR9CNETkD7MQS8ZHvpQOtA53DLws5WAEQ==} dependencies: '@babel/parser': 7.12.7 @@ -7339,288 +3651,152 @@ packages: '@types/babel__traverse': 7.0.15 dev: true - /@types/babel__generator/7.6.2: + /@types/babel__generator@7.6.2: resolution: {integrity: sha512-MdSJnBjl+bdwkLskZ3NGFp9YcXGx5ggLpQQPqtgakVhsWK0hTtNYhjpZLlWQTviGTvF8at+Bvli3jV7faPdgeQ==} dependencies: '@babel/types': 7.18.7 dev: true - /@types/babel__template/7.4.0: + /@types/babel__template@7.4.0: resolution: {integrity: sha512-NTPErx4/FiPCGScH7foPyr+/1Dkzkni+rHiYHHoTjvwou7AQzJkNeD60A9CXRy+ZEN2B1bggmkTMCDb+Mv5k+A==} dependencies: '@babel/parser': 7.12.7 '@babel/types': 7.18.7 dev: true - /@types/babel__traverse/7.0.15: + /@types/babel__traverse@7.0.15: resolution: {integrity: sha512-Pzh9O3sTK8V6I1olsXpCfj2k/ygO2q1X0vhhnDrEQyYLHZesWz+zMZMVcwXLCYf0U36EtmyYaFGPfXlTtDHe3A==} dependencies: '@babel/types': 7.18.7 dev: true - /@types/eslint-scope/3.7.4: - resolution: {integrity: sha512-9K4zoImiZc3HlIp6AVUDE4CWYx22a+lhSZMYNpbjW04+YF0KWj4pJXnEMjdnFTiQibFFmElcsasJXDbdI/EPhA==} - dependencies: - '@types/eslint': 8.4.6 - '@types/estree': 0.0.51 - dev: true - - /@types/eslint/8.4.6: - resolution: {integrity: sha512-/fqTbjxyFUaYNO7VcW5g+4npmqVACz1bB7RTHYuLj+PRjw9hrCwrUXVQFpChUS0JsyEFvMZ7U/PfmvWgxJhI9g==} - dependencies: - '@types/estree': 0.0.51 - '@types/json-schema': 7.0.9 - dev: true - - /@types/estree/0.0.39: + /@types/estree@0.0.39: resolution: {integrity: sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw==} dev: true - /@types/estree/0.0.51: + /@types/estree@0.0.51: resolution: {integrity: sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ==} dev: true - /@types/glob/7.2.0: - resolution: {integrity: sha512-ZUxbzKl0IfJILTS6t7ip5fQQM/J3TJYubDm3nMbgubNNYS62eXeUpoLUC8/7fJNiFYHTrGPQn7hspDUzIHX3UA==} - dependencies: - '@types/minimatch': 5.1.2 - '@types/node': 18.11.9 - dev: true - - /@types/glob/8.0.0: - resolution: {integrity: sha512-l6NQsDDyQUVeoTynNpC9uRvCUint/gSUXQA2euwmTuWGvPY5LSDUu6tkCtJB2SvGQlJQzLaKqcGZP4//7EDveA==} - dependencies: - '@types/minimatch': 5.1.2 - '@types/node': 18.11.9 - dev: true - - /@types/graceful-fs/4.1.5: - resolution: {integrity: sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw==} - dependencies: - '@types/node': 18.11.9 - dev: true - - /@types/hast/2.3.4: + /@types/hast@2.3.4: resolution: {integrity: sha512-wLEm0QvaoawEDoTRwzTXp4b4jpwiJDvR5KMnFnVodm3scufTlBOWRD6N1OBf9TZMhjlNsSfcO5V+7AF4+Vy+9g==} dependencies: '@types/unist': 2.0.6 + dev: false - /@types/html-minifier-terser/5.1.2: - resolution: {integrity: sha512-h4lTMgMJctJybDp8CQrxTUiiYmedihHWkjnF/8Pxseu2S6Nlfcy8kwboQ8yejh456rP2yWoEVm1sS/FVsfM48w==} - dev: true - - /@types/is-function/1.0.1: - resolution: {integrity: sha512-A79HEEiwXTFtfY+Bcbo58M2GRYzCr9itHWzbzHVFNEYCcoU/MMGwYYf721gBrnhpj1s6RGVVha/IgNFnR0Iw/Q==} - dev: true - - /@types/istanbul-lib-coverage/2.0.3: + /@types/istanbul-lib-coverage@2.0.3: resolution: {integrity: sha512-sz7iLqvVUg1gIedBOvlkxPlc8/uVzyS5OwGz1cKjXzkl3FpL3al0crU8YGU1WoHkxn0Wxbw5tyi6hvzJKNzFsw==} dev: true - /@types/istanbul-lib-report/3.0.0: + /@types/istanbul-lib-report@3.0.0: resolution: {integrity: sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg==} dependencies: '@types/istanbul-lib-coverage': 2.0.3 dev: true - /@types/istanbul-reports/1.1.2: + /@types/istanbul-reports@1.1.2: resolution: {integrity: sha512-P/W9yOX/3oPZSpaYOCQzGqgCQRXn0FFO/V8bWrCQs+wLmvVVxk6CRBXALEvNs9OHIatlnlFokfhuDo2ug01ciw==} dependencies: '@types/istanbul-lib-coverage': 2.0.3 '@types/istanbul-lib-report': 3.0.0 dev: true - /@types/istanbul-reports/3.0.1: - resolution: {integrity: sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw==} - dependencies: - '@types/istanbul-lib-report': 3.0.0 - dev: true - - /@types/json-schema/7.0.9: + /@types/json-schema@7.0.9: resolution: {integrity: sha512-qcUXuemtEu+E5wZSJHNxUXeCZhAfXKQ41D+duX+VYPde7xyEVZci+/oXKJL13tnRs9lR2pr4fod59GT6/X1/yQ==} dev: true - /@types/lodash/4.14.185: - resolution: {integrity: sha512-evMDG1bC4rgQg4ku9tKpuMh5iBNEwNa3tf9zRHdP1qlv+1WUg44xat4IxCE14gIpZRGUUWAx2VhItCZc25NfMA==} - dev: true - - /@types/mdast/3.0.10: - resolution: {integrity: sha512-W864tg/Osz1+9f4lrGTZpCSO5/z4608eUp19tbozkq2HJK6i3z1kT0H9tlADXuYIb1YYOBByU4Jsqkk75q48qA==} - dependencies: - '@types/unist': 2.0.6 - dev: true - - /@types/minimatch/5.1.2: - resolution: {integrity: sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA==} - dev: true - - /@types/minimist/1.2.2: + /@types/minimist@1.2.2: resolution: {integrity: sha512-jhuKLIRrhvCPLqwPcx6INqmKeiA5EWrsCOPhrlFSrbrmU4ZMPjj5Ul/oLCMDO98XRUIwVm78xICz4EPCektzeQ==} dev: true - /@types/node-fetch/2.6.2: - resolution: {integrity: sha512-DHqhlq5jeESLy19TYhLakJ07kNumXWjcDdxXsLUMJZ6ue8VZJj4kLPQVE/2mdHh3xZziNF1xppu5lwmS53HR+A==} - dependencies: - '@types/node': 18.11.9 - form-data: 3.0.1 - dev: true - - /@types/node/16.11.59: - resolution: {integrity: sha512-6u+36Dj3aDzhfBVUf/mfmc92OEdzQ2kx2jcXGdigfl70E/neV21ZHE6UCz4MDzTRcVqGAM27fk+DLXvyDsn3Jw==} - dev: true - - /@types/node/18.11.9: + /@types/node@18.11.9: resolution: {integrity: sha512-CRpX21/kGdzjOpFsZSkcrXMGIBWMGNIHXXBVFSH+ggkftxg+XYP20TESbh+zFvFj3EQOl5byk0HTRn1IL6hbqg==} dev: true - /@types/normalize-package-data/2.4.1: + /@types/normalize-package-data@2.4.1: resolution: {integrity: sha512-Gj7cI7z+98M282Tqmp2K5EIsoouUEzbBJhQQzDE3jSIRk6r9gsz0oUokqIUR4u1R3dMHo0pDHM7sNOHyhulypw==} dev: true - /@types/npmlog/4.1.4: - resolution: {integrity: sha512-WKG4gTr8przEZBiJ5r3s8ZIAoMXNbOgQ+j/d5O4X3x6kZJRLNvyUJuUK/KoG3+8BaOHPhp2m7WC6JKKeovDSzQ==} - dev: true - - /@types/parse-json/4.0.0: + /@types/parse-json@4.0.0: resolution: {integrity: sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==} dev: true - /@types/parse5/5.0.3: - resolution: {integrity: sha512-kUNnecmtkunAoQ3CnjmMkzNU/gtxG8guhi+Fk2U/kOpIKjIMKnXGp4IJCgQJrXSgMsWYimYG4TGjz/UzbGEBTw==} - dev: true - - /@types/pretty-hrtime/1.0.1: - resolution: {integrity: sha512-VjID5MJb1eGKthz2qUerWT8+R4b9N+CHvGCzg9fn4kWZgaF9AhdYikQio3R7wV8YY1NsQKPaCwKz1Yff+aHNUQ==} - dev: true - - /@types/prop-types/15.7.5: + /@types/prop-types@15.7.5: resolution: {integrity: sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==} - /@types/q/1.5.4: + /@types/q@1.5.4: resolution: {integrity: sha512-1HcDas8SEj4z1Wc696tH56G8OlRaH/sqZOynNNB+HF0WOeXPaxTtbYzJY2oEfiUxjSKjhCKr+MvR7dCHcEelug==} dev: true - /@types/qs/6.9.7: - resolution: {integrity: sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==} - dev: true - - /@types/react-dom/18.0.6: + /@types/react-dom@18.0.6: resolution: {integrity: sha512-/5OFZgfIPSwy+YuIBP/FgJnQnsxhZhjjrnxudMddeblOouIodEQ75X14Rr4wGSG/bknL+Omy9iWlLo1u/9GzAA==} dependencies: '@types/react': 18.0.25 dev: true - /@types/react-intl/3.0.0_5z6wstateidnxll3plz2xpyagu: + /@types/react-intl@3.0.0(prop-types@15.5.10)(react@18.2.0): resolution: {integrity: sha512-k8F3d05XQGEqSWIfK97bBjZe4z9RruXU9Wa7OZ2iUC5pdeIpzuQDZe/9C2J3Xir5//ZtAkhcv08Wfx3n5TBTQg==} deprecated: This is a stub types definition. react-intl provides its own type definitions, so you do not need this installed. dependencies: - react-intl: 2.9.0_5z6wstateidnxll3plz2xpyagu + react-intl: 2.9.0(prop-types@15.5.10)(react@18.2.0) transitivePeerDependencies: - prop-types - react dev: true - /@types/react-is/17.0.3: + /@types/react-is@17.0.3: resolution: {integrity: sha512-aBTIWg1emtu95bLTLx0cpkxwGW3ueZv71nE2YFBpL8k/z5czEW8yYpOo8Dp+UUAFAtKwNaOsh/ioSeQnWlZcfw==} dependencies: '@types/react': 18.0.25 dev: true - /@types/react-syntax-highlighter/15.5.2: + /@types/react-syntax-highlighter@15.5.2: resolution: {integrity: sha512-cJJvwU8lQv/efGSo/LmPoaOqWi/B0AG4CNKKCn7HPUL25SqiPn1Vl+fV1JiUigJv97ruTZ8mo08+b8/0zoYufA==} dependencies: '@types/react': 18.0.25 dev: true - /@types/react-transition-group/4.4.5: + /@types/react-transition-group@4.4.5: resolution: {integrity: sha512-juKD/eiSM3/xZYzjuzH6ZwpP+/lejltmiS3QEzV/vmb/Q8+HfDmxu+Baga8UEMGBqV88Nbg4l2hY/K2DkyaLLA==} dependencies: '@types/react': 18.0.25 dev: true - /@types/react/18.0.25: + /@types/react@18.0.25: resolution: {integrity: sha512-xD6c0KDT4m7n9uD4ZHi02lzskaiqcBxf4zi+tXZY98a04wvc0hi/TcCPC2FOESZi51Nd7tlUeOJY8RofL799/g==} dependencies: '@types/prop-types': 15.7.5 '@types/scheduler': 0.16.1 csstype: 3.1.0 - /@types/resolve/0.0.8: + /@types/resolve@0.0.8: resolution: {integrity: sha512-auApPaJf3NPfe18hSoJkp8EbZzer2ISk7o8mCC3M9he/a04+gbMF97NkpD2S8riMGvm4BMRI59/SZQSaLTKpsQ==} dependencies: '@types/node': 18.11.9 dev: true - /@types/scheduler/0.16.1: + /@types/scheduler@0.16.1: resolution: {integrity: sha512-EaCxbanVeyxDRTQBkdLb3Bvl/HK7PBK6UJjsSixB0iHKoWxE5uu2Q/DgtpOhPIojN0Zl1whvOd7PoHs2P0s5eA==} - /@types/source-list-map/0.1.2: - resolution: {integrity: sha512-K5K+yml8LTo9bWJI/rECfIPrGgxdpeNbj+d53lwN4QjW1MCwlkhUms+gtdzigTeUyBr09+u8BwOIY3MXvHdcsA==} - dev: true - - /@types/stack-utils/1.0.1: + /@types/stack-utils@1.0.1: resolution: {integrity: sha512-l42BggppR6zLmpfU6fq9HEa2oGPEI8yrSPL3GITjfRInppYFahObbIQOQK3UGxEnyQpltZLaPe75046NOZQikw==} dev: true - /@types/tapable/1.0.8: - resolution: {integrity: sha512-ipixuVrh2OdNmauvtT51o3d8z12p6LtFW9in7U79der/kwejjdNchQC5UMn5u/KxNoM7VHHOs/l8KS8uHxhODQ==} - dev: true - - /@types/uglify-js/3.17.0: - resolution: {integrity: sha512-3HO6rm0y+/cqvOyA8xcYLweF0TKXlAxmQASjbOi49Co51A1N4nR4bEwBgRoD9kNM+rqFGArjKr654SLp2CoGmQ==} - dependencies: - source-map: 0.6.1 - dev: true - - /@types/unist/2.0.6: + /@types/unist@2.0.6: resolution: {integrity: sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ==} + dev: false - /@types/webpack-env/1.18.0: - resolution: {integrity: sha512-56/MAlX5WMsPVbOg7tAxnYvNYMMWr/QJiIp6BxVSW3JJXUVzzOn64qW8TzQyMSqSUFM2+PVI4aUHcHOzIz/1tg==} - dev: true - - /@types/webpack-sources/3.2.0: - resolution: {integrity: sha512-Ft7YH3lEVRQ6ls8k4Ff1oB4jN6oy/XmU6tQISKdhfh+1mR+viZFphS6WL0IrtDOzvefmJg5a0s7ZQoRXwqTEFg==} - dependencies: - '@types/node': 18.11.9 - '@types/source-list-map': 0.1.2 - source-map: 0.7.3 - dev: true - - /@types/webpack/4.41.32: - resolution: {integrity: sha512-cb+0ioil/7oz5//7tZUSwbrSAN/NWHrQylz5cW8G0dWTcF/g+/dSdMlKVZspBYuMAN1+WnwHrkxiRrLcwd0Heg==} - dependencies: - '@types/node': 18.11.9 - '@types/tapable': 1.0.8 - '@types/uglify-js': 3.17.0 - '@types/webpack-sources': 3.2.0 - anymatch: 3.1.1 - source-map: 0.6.1 - dev: true - - /@types/yargs-parser/15.0.0: + /@types/yargs-parser@15.0.0: resolution: {integrity: sha512-FA/BWv8t8ZWJ+gEOnLLd8ygxH/2UFbAvgEonyfN6yWGLKc7zVjbpl2Y4CTjid9h2RfgPP6SEt6uHwEOply00yw==} dev: true - /@types/yargs/13.0.11: + /@types/yargs@13.0.11: resolution: {integrity: sha512-NRqD6T4gktUrDi1o1wLH3EKC1o2caCr7/wR87ODcbVITQF106OM3sFN92ysZ++wqelOd1CTzatnOBRDYYG6wGQ==} dependencies: '@types/yargs-parser': 15.0.0 dev: true - /@types/yargs/15.0.14: - resolution: {integrity: sha512-yEJzHoxf6SyQGhBhIYGXQDSCkJjB6HohDShto7m8vaKg9Yp0Yn8+71J9eakh2bnPg6BfsH9PRMhiRTZnd4eXGQ==} - dependencies: - '@types/yargs-parser': 15.0.0 - dev: true - - /@types/yargs/16.0.4: - resolution: {integrity: sha512-T8Yc9wt/5LbJyCaLiHPReJa0kApcIgJ7Bn735GjItUfh08Z1pJvu8QZqb9s+mMvKV6WUQRV7K2R46YbjMXTTJw==} - dependencies: - '@types/yargs-parser': 15.0.0 - dev: true - - /@typescript-eslint/eslint-plugin/4.33.0_jzkccvb4lenajp6msx3prptnxi: + /@typescript-eslint/eslint-plugin@4.33.0(@typescript-eslint/parser@4.33.0)(eslint@6.8.0)(typescript@4.7.4): resolution: {integrity: sha512-aINiAxGVdOl1eJyVjaWn/YcVAq4Gi/Yo35qHGCnqbWVz61g39D0h23veY/MA0rFFGfxK7TySg2uwDeNv+JgVpg==} engines: {node: ^10.12.0 || >=12.0.0} peerDependencies: @@ -7631,8 +3807,8 @@ packages: typescript: optional: true dependencies: - '@typescript-eslint/experimental-utils': 4.33.0_nbcwli3tdjzkqybobvobagytby - '@typescript-eslint/parser': 4.33.0_nbcwli3tdjzkqybobvobagytby + '@typescript-eslint/experimental-utils': 4.33.0(eslint@6.8.0)(typescript@4.7.4) + '@typescript-eslint/parser': 4.33.0(eslint@6.8.0)(typescript@4.7.4) '@typescript-eslint/scope-manager': 4.33.0 debug: 4.3.3 eslint: 6.8.0 @@ -7640,13 +3816,13 @@ packages: ignore: 5.2.0 regexpp: 3.2.0 semver: 7.3.5 - tsutils: 3.21.0_typescript@4.7.4 + tsutils: 3.21.0(typescript@4.7.4) typescript: 4.7.4 transitivePeerDependencies: - supports-color dev: true - /@typescript-eslint/experimental-utils/4.33.0_nbcwli3tdjzkqybobvobagytby: + /@typescript-eslint/experimental-utils@4.33.0(eslint@6.8.0)(typescript@4.7.4): resolution: {integrity: sha512-zeQjOoES5JFjTnAhI5QY7ZviczMzDptls15GFsI6jyUOq0kOf9+WonkhtlIhh0RgHRnqj5gdNxW5j1EvAyYg6Q==} engines: {node: ^10.12.0 || >=12.0.0} peerDependencies: @@ -7655,16 +3831,16 @@ packages: '@types/json-schema': 7.0.9 '@typescript-eslint/scope-manager': 4.33.0 '@typescript-eslint/types': 4.33.0 - '@typescript-eslint/typescript-estree': 4.33.0_typescript@4.7.4 + '@typescript-eslint/typescript-estree': 4.33.0(typescript@4.7.4) eslint: 6.8.0 eslint-scope: 5.1.1 - eslint-utils: 3.0.0_eslint@6.8.0 + eslint-utils: 3.0.0(eslint@6.8.0) transitivePeerDependencies: - supports-color - typescript dev: true - /@typescript-eslint/parser/4.33.0_nbcwli3tdjzkqybobvobagytby: + /@typescript-eslint/parser@4.33.0(eslint@6.8.0)(typescript@4.7.4): resolution: {integrity: sha512-ZohdsbXadjGBSK0/r+d87X0SBmKzOq4/S5nzK6SBgJspFo9/CUDJ7hjayuze+JK7CZQLDMroqytp7pOcFKTxZA==} engines: {node: ^10.12.0 || >=12.0.0} peerDependencies: @@ -7676,7 +3852,7 @@ packages: dependencies: '@typescript-eslint/scope-manager': 4.33.0 '@typescript-eslint/types': 4.33.0 - '@typescript-eslint/typescript-estree': 4.33.0_typescript@4.7.4 + '@typescript-eslint/typescript-estree': 4.33.0(typescript@4.7.4) debug: 4.3.3 eslint: 6.8.0 typescript: 4.7.4 @@ -7684,7 +3860,7 @@ packages: - supports-color dev: true - /@typescript-eslint/scope-manager/4.33.0: + /@typescript-eslint/scope-manager@4.33.0: resolution: {integrity: sha512-5IfJHpgTsTZuONKbODctL4kKuQje/bzBRkwHE8UOZ4f89Zeddg+EGZs8PD8NcN4LdM3ygHWYB3ukPAYjvl/qbQ==} engines: {node: ^8.10.0 || ^10.13.0 || >=11.10.1} dependencies: @@ -7692,12 +3868,12 @@ packages: '@typescript-eslint/visitor-keys': 4.33.0 dev: true - /@typescript-eslint/types/4.33.0: + /@typescript-eslint/types@4.33.0: resolution: {integrity: sha512-zKp7CjQzLQImXEpLt2BUw1tvOMPfNoTAfb8l51evhYbOEEzdWyQNmHWWGPR6hwKJDAi+1VXSBmnhL9kyVTTOuQ==} engines: {node: ^8.10.0 || ^10.13.0 || >=11.10.1} dev: true - /@typescript-eslint/typescript-estree/4.33.0_typescript@4.7.4: + /@typescript-eslint/typescript-estree@4.33.0(typescript@4.7.4): resolution: {integrity: sha512-rkWRY1MPFzjwnEVHsxGemDzqqddw2QbTJlICPD9p9I9LfsO8fdmfQPOX3uKfUaGRDFJbfrtm/sXhVXN4E+bzCA==} engines: {node: ^10.12.0 || >=12.0.0} peerDependencies: @@ -7711,14 +3887,14 @@ packages: debug: 4.3.3 globby: 11.1.0 is-glob: 4.0.1 - semver: 7.3.5 - tsutils: 3.21.0_typescript@4.7.4 + semver: 7.3.8 + tsutils: 3.21.0(typescript@4.7.4) typescript: 4.7.4 transitivePeerDependencies: - supports-color dev: true - /@typescript-eslint/visitor-keys/4.33.0: + /@typescript-eslint/visitor-keys@4.33.0: resolution: {integrity: sha512-uqi/2aSz9g2ftcHWf8uLPJA70rUv6yuMW5Bohw+bwcuzaxQIHaKFZCKGoGXIrc9vkTJ3+0txM73K0Hq3d5wgIg==} engines: {node: ^8.10.0 || ^10.13.0 || >=11.10.1} dependencies: @@ -7726,258 +3902,25 @@ packages: eslint-visitor-keys: 2.1.0 dev: true - /@webassemblyjs/ast/1.11.1: - resolution: {integrity: sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw==} - dependencies: - '@webassemblyjs/helper-numbers': 1.11.1 - '@webassemblyjs/helper-wasm-bytecode': 1.11.1 - dev: true - - /@webassemblyjs/ast/1.9.0: - resolution: {integrity: sha512-C6wW5L+b7ogSDVqymbkkvuW9kruN//YisMED04xzeBBqjHa2FYnmvOlS6Xj68xWQRgWvI9cIglsjFowH/RJyEA==} - dependencies: - '@webassemblyjs/helper-module-context': 1.9.0 - '@webassemblyjs/helper-wasm-bytecode': 1.9.0 - '@webassemblyjs/wast-parser': 1.9.0 - dev: true - - /@webassemblyjs/floating-point-hex-parser/1.11.1: - resolution: {integrity: sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ==} - dev: true - - /@webassemblyjs/floating-point-hex-parser/1.9.0: - resolution: {integrity: sha512-TG5qcFsS8QB4g4MhrxK5TqfdNe7Ey/7YL/xN+36rRjl/BlGE/NcBvJcqsRgCP6Z92mRE+7N50pRIi8SmKUbcQA==} - dev: true - - /@webassemblyjs/helper-api-error/1.11.1: - resolution: {integrity: sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg==} - dev: true - - /@webassemblyjs/helper-api-error/1.9.0: - resolution: {integrity: sha512-NcMLjoFMXpsASZFxJ5h2HZRcEhDkvnNFOAKneP5RbKRzaWJN36NC4jqQHKwStIhGXu5mUWlUUk7ygdtrO8lbmw==} - dev: true - - /@webassemblyjs/helper-buffer/1.11.1: - resolution: {integrity: sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA==} - dev: true - - /@webassemblyjs/helper-buffer/1.9.0: - resolution: {integrity: sha512-qZol43oqhq6yBPx7YM3m9Bv7WMV9Eevj6kMi6InKOuZxhw+q9hOkvq5e/PpKSiLfyetpaBnogSbNCfBwyB00CA==} - dev: true - - /@webassemblyjs/helper-code-frame/1.9.0: - resolution: {integrity: sha512-ERCYdJBkD9Vu4vtjUYe8LZruWuNIToYq/ME22igL+2vj2dQ2OOujIZr3MEFvfEaqKoVqpsFKAGsRdBSBjrIvZA==} - dependencies: - '@webassemblyjs/wast-printer': 1.9.0 - dev: true - - /@webassemblyjs/helper-fsm/1.9.0: - resolution: {integrity: sha512-OPRowhGbshCb5PxJ8LocpdX9Kl0uB4XsAjl6jH/dWKlk/mzsANvhwbiULsaiqT5GZGT9qinTICdj6PLuM5gslw==} - dev: true - - /@webassemblyjs/helper-module-context/1.9.0: - resolution: {integrity: sha512-MJCW8iGC08tMk2enck1aPW+BE5Cw8/7ph/VGZxwyvGbJwjktKkDK7vy7gAmMDx88D7mhDTCNKAW5tED+gZ0W8g==} - dependencies: - '@webassemblyjs/ast': 1.9.0 - dev: true - - /@webassemblyjs/helper-numbers/1.11.1: - resolution: {integrity: sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ==} - dependencies: - '@webassemblyjs/floating-point-hex-parser': 1.11.1 - '@webassemblyjs/helper-api-error': 1.11.1 - '@xtuc/long': 4.2.2 - dev: true - - /@webassemblyjs/helper-wasm-bytecode/1.11.1: - resolution: {integrity: sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q==} - dev: true - - /@webassemblyjs/helper-wasm-bytecode/1.9.0: - resolution: {integrity: sha512-R7FStIzyNcd7xKxCZH5lE0Bqy+hGTwS3LJjuv1ZVxd9O7eHCedSdrId/hMOd20I+v8wDXEn+bjfKDLzTepoaUw==} - dev: true - - /@webassemblyjs/helper-wasm-section/1.11.1: - resolution: {integrity: sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg==} - dependencies: - '@webassemblyjs/ast': 1.11.1 - '@webassemblyjs/helper-buffer': 1.11.1 - '@webassemblyjs/helper-wasm-bytecode': 1.11.1 - '@webassemblyjs/wasm-gen': 1.11.1 - dev: true - - /@webassemblyjs/helper-wasm-section/1.9.0: - resolution: {integrity: sha512-XnMB8l3ek4tvrKUUku+IVaXNHz2YsJyOOmz+MMkZvh8h1uSJpSen6vYnw3IoQ7WwEuAhL8Efjms1ZWjqh2agvw==} - dependencies: - '@webassemblyjs/ast': 1.9.0 - '@webassemblyjs/helper-buffer': 1.9.0 - '@webassemblyjs/helper-wasm-bytecode': 1.9.0 - '@webassemblyjs/wasm-gen': 1.9.0 - dev: true - - /@webassemblyjs/ieee754/1.11.1: - resolution: {integrity: sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ==} - dependencies: - '@xtuc/ieee754': 1.2.0 - dev: true - - /@webassemblyjs/ieee754/1.9.0: - resolution: {integrity: sha512-dcX8JuYU/gvymzIHc9DgxTzUUTLexWwt8uCTWP3otys596io0L5aW02Gb1RjYpx2+0Jus1h4ZFqjla7umFniTg==} - dependencies: - '@xtuc/ieee754': 1.2.0 - dev: true - - /@webassemblyjs/leb128/1.11.1: - resolution: {integrity: sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw==} - dependencies: - '@xtuc/long': 4.2.2 - dev: true - - /@webassemblyjs/leb128/1.9.0: - resolution: {integrity: sha512-ENVzM5VwV1ojs9jam6vPys97B/S65YQtv/aanqnU7D8aSoHFX8GyhGg0CMfyKNIHBuAVjy3tlzd5QMMINa7wpw==} - dependencies: - '@xtuc/long': 4.2.2 - dev: true - - /@webassemblyjs/utf8/1.11.1: - resolution: {integrity: sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ==} - dev: true - - /@webassemblyjs/utf8/1.9.0: - resolution: {integrity: sha512-GZbQlWtopBTP0u7cHrEx+73yZKrQoBMpwkGEIqlacljhXCkVM1kMQge/Mf+csMJAjEdSwhOyLAS0AoR3AG5P8w==} - dev: true - - /@webassemblyjs/wasm-edit/1.11.1: - resolution: {integrity: sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA==} - dependencies: - '@webassemblyjs/ast': 1.11.1 - '@webassemblyjs/helper-buffer': 1.11.1 - '@webassemblyjs/helper-wasm-bytecode': 1.11.1 - '@webassemblyjs/helper-wasm-section': 1.11.1 - '@webassemblyjs/wasm-gen': 1.11.1 - '@webassemblyjs/wasm-opt': 1.11.1 - '@webassemblyjs/wasm-parser': 1.11.1 - '@webassemblyjs/wast-printer': 1.11.1 - dev: true - - /@webassemblyjs/wasm-edit/1.9.0: - resolution: {integrity: sha512-FgHzBm80uwz5M8WKnMTn6j/sVbqilPdQXTWraSjBwFXSYGirpkSWE2R9Qvz9tNiTKQvoKILpCuTjBKzOIm0nxw==} - dependencies: - '@webassemblyjs/ast': 1.9.0 - '@webassemblyjs/helper-buffer': 1.9.0 - '@webassemblyjs/helper-wasm-bytecode': 1.9.0 - '@webassemblyjs/helper-wasm-section': 1.9.0 - '@webassemblyjs/wasm-gen': 1.9.0 - '@webassemblyjs/wasm-opt': 1.9.0 - '@webassemblyjs/wasm-parser': 1.9.0 - '@webassemblyjs/wast-printer': 1.9.0 - dev: true - - /@webassemblyjs/wasm-gen/1.11.1: - resolution: {integrity: sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA==} - dependencies: - '@webassemblyjs/ast': 1.11.1 - '@webassemblyjs/helper-wasm-bytecode': 1.11.1 - '@webassemblyjs/ieee754': 1.11.1 - '@webassemblyjs/leb128': 1.11.1 - '@webassemblyjs/utf8': 1.11.1 - dev: true - - /@webassemblyjs/wasm-gen/1.9.0: - resolution: {integrity: sha512-cPE3o44YzOOHvlsb4+E9qSqjc9Qf9Na1OO/BHFy4OI91XDE14MjFN4lTMezzaIWdPqHnsTodGGNP+iRSYfGkjA==} - dependencies: - '@webassemblyjs/ast': 1.9.0 - '@webassemblyjs/helper-wasm-bytecode': 1.9.0 - '@webassemblyjs/ieee754': 1.9.0 - '@webassemblyjs/leb128': 1.9.0 - '@webassemblyjs/utf8': 1.9.0 - dev: true - - /@webassemblyjs/wasm-opt/1.11.1: - resolution: {integrity: sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw==} - dependencies: - '@webassemblyjs/ast': 1.11.1 - '@webassemblyjs/helper-buffer': 1.11.1 - '@webassemblyjs/wasm-gen': 1.11.1 - '@webassemblyjs/wasm-parser': 1.11.1 - dev: true - - /@webassemblyjs/wasm-opt/1.9.0: - resolution: {integrity: sha512-Qkjgm6Anhm+OMbIL0iokO7meajkzQD71ioelnfPEj6r4eOFuqm4YC3VBPqXjFyyNwowzbMD+hizmprP/Fwkl2A==} - dependencies: - '@webassemblyjs/ast': 1.9.0 - '@webassemblyjs/helper-buffer': 1.9.0 - '@webassemblyjs/wasm-gen': 1.9.0 - '@webassemblyjs/wasm-parser': 1.9.0 - dev: true - - /@webassemblyjs/wasm-parser/1.11.1: - resolution: {integrity: sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA==} - dependencies: - '@webassemblyjs/ast': 1.11.1 - '@webassemblyjs/helper-api-error': 1.11.1 - '@webassemblyjs/helper-wasm-bytecode': 1.11.1 - '@webassemblyjs/ieee754': 1.11.1 - '@webassemblyjs/leb128': 1.11.1 - '@webassemblyjs/utf8': 1.11.1 - dev: true - - /@webassemblyjs/wasm-parser/1.9.0: - resolution: {integrity: sha512-9+wkMowR2AmdSWQzsPEjFU7njh8HTO5MqO8vjwEHuM+AMHioNqSBONRdr0NQQ3dVQrzp0s8lTcYqzUdb7YgELA==} - dependencies: - '@webassemblyjs/ast': 1.9.0 - '@webassemblyjs/helper-api-error': 1.9.0 - '@webassemblyjs/helper-wasm-bytecode': 1.9.0 - '@webassemblyjs/ieee754': 1.9.0 - '@webassemblyjs/leb128': 1.9.0 - '@webassemblyjs/utf8': 1.9.0 - dev: true - - /@webassemblyjs/wast-parser/1.9.0: - resolution: {integrity: sha512-qsqSAP3QQ3LyZjNC/0jBJ/ToSxfYJ8kYyuiGvtn/8MK89VrNEfwj7BPQzJVHi0jGTRK2dGdJ5PRqhtjzoww+bw==} - dependencies: - '@webassemblyjs/ast': 1.9.0 - '@webassemblyjs/floating-point-hex-parser': 1.9.0 - '@webassemblyjs/helper-api-error': 1.9.0 - '@webassemblyjs/helper-code-frame': 1.9.0 - '@webassemblyjs/helper-fsm': 1.9.0 - '@xtuc/long': 4.2.2 - dev: true - - /@webassemblyjs/wast-printer/1.11.1: - resolution: {integrity: sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg==} - dependencies: - '@webassemblyjs/ast': 1.11.1 - '@xtuc/long': 4.2.2 - dev: true - - /@webassemblyjs/wast-printer/1.9.0: - resolution: {integrity: sha512-2J0nE95rHXHyQ24cWjMKJ1tqB/ds8z/cyeOZxJhcb+rW+SQASVjuznUSmdz5GpVJTzU8JkhYut0D3siFDD6wsA==} - dependencies: - '@webassemblyjs/ast': 1.9.0 - '@webassemblyjs/wast-parser': 1.9.0 - '@xtuc/long': 4.2.2 - dev: true - - /@wojtekmaj/enzyme-adapter-react-17/0.6.7_todk22eekuihjg65rlnudp4qdi: + /@wojtekmaj/enzyme-adapter-react-17@0.6.7(enzyme@3.11.0)(react-dom@18.2.0)(react@18.2.0): resolution: {integrity: sha512-B+byiwi/T1bx5hcj9wc0fUL5Hlb5giSXJzcnEfJVl2j6dGV2NJfcxDBYX0WWwIxlzNiFz8kAvlkFWI2y/nscZQ==} peerDependencies: enzyme: ^3.0.0 react: ^17.0.0-0 || 18 react-dom: ^17.0.0-0 || 18 dependencies: - '@wojtekmaj/enzyme-adapter-utils': 0.1.4_react@18.2.0 + '@wojtekmaj/enzyme-adapter-utils': 0.1.4(react@18.2.0) enzyme: 3.11.0 enzyme-shallow-equal: 1.0.4 has: 1.0.3 prop-types: 15.8.1 react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 + react-dom: 18.2.0(react@18.2.0) react-is: 17.0.2 - react-test-renderer: 17.0.2_react@18.2.0 + react-test-renderer: 17.0.2(react@18.2.0) dev: true - /@wojtekmaj/enzyme-adapter-utils/0.1.4_react@18.2.0: + /@wojtekmaj/enzyme-adapter-utils@0.1.4(react@18.2.0): resolution: {integrity: sha512-ARGIQSIIv3oBia1m5Ihn1VU0FGmft6KPe39SBKTb8p7LSXO23YI4kNtc4M/cKoIY7P+IYdrZcgMObvedyjoSQA==} peerDependencies: react: ^17.0.0-0 || 18 @@ -7989,47 +3932,23 @@ packages: react: 18.2.0 dev: true - /@xmldom/xmldom/0.8.3: + /@xmldom/xmldom@0.8.3: resolution: {integrity: sha512-Lv2vySXypg4nfa51LY1nU8yDAGo/5YwF+EY/rUZgIbfvwVARcd67ttCM8SMsTeJy51YhHYavEq+FS6R0hW9PFQ==} engines: {node: '>=10.0.0'} dev: true - /@xtuc/ieee754/1.2.0: - resolution: {integrity: sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==} - dev: true - - /@xtuc/long/4.2.2: - resolution: {integrity: sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==} - dev: true - - /abab/2.0.5: + /abab@2.0.5: resolution: {integrity: sha512-9IK9EadsbHo6jLWIpxpR6pL0sazTXV6+SQv25ZB+F7Bj9mJNaOc4nCRabwd5M/JwmUa8idz6Eci6eKfJryPs6Q==} dev: true - /accepts/1.3.8: - resolution: {integrity: sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==} - engines: {node: '>= 0.6'} - dependencies: - mime-types: 2.1.35 - negotiator: 0.6.3 - dev: true - - /acorn-globals/4.3.4: + /acorn-globals@4.3.4: resolution: {integrity: sha512-clfQEh21R+D0leSbUdWf3OcfqyaCSAQ8Ryq00bofSekfr9W8u1jyYZo6ir0xu9Gtcf7BjcHJpnbZH7JOCpP60A==} dependencies: acorn: 6.4.2 acorn-walk: 6.2.0 dev: true - /acorn-import-assertions/1.8.0_acorn@8.7.1: - resolution: {integrity: sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw==} - peerDependencies: - acorn: ^8 - dependencies: - acorn: 8.7.1 - dev: true - - /acorn-jsx/5.3.1_acorn@7.4.1: + /acorn-jsx@5.3.1(acorn@7.4.1): resolution: {integrity: sha512-K0Ptm/47OKfQRpNQ2J/oIN/3QYiK6FwW+eJbILhsdxh2WTLdl+30o8aGdTbm5JbffpFFAg/g+zi1E+jvJha5ng==} peerDependencies: acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 @@ -8037,89 +3956,41 @@ packages: acorn: 7.4.1 dev: true - /acorn-walk/6.2.0: + /acorn-walk@6.2.0: resolution: {integrity: sha512-7evsyfH1cLOCdAzZAd43Cic04yKydNx0cF+7tiA19p1XnLLPU4dpCQOqpjqwokFe//vS0QqfqqjCS2JkiIs0cA==} engines: {node: '>=0.4.0'} dev: true - /acorn-walk/7.2.0: - resolution: {integrity: sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA==} - engines: {node: '>=0.4.0'} - dev: true - - /acorn-walk/8.2.0: + /acorn-walk@8.2.0: resolution: {integrity: sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==} engines: {node: '>=0.4.0'} dev: true - /acorn/5.7.4: + /acorn@5.7.4: resolution: {integrity: sha512-1D++VG7BhrtvQpNbBzovKNc1FLGGEE/oGe7b9xJm/RFHMBeUaUGpluV9RLjZa47YFdPcDAenEYuq9pQPcMdLJg==} engines: {node: '>=0.4.0'} hasBin: true dev: true - /acorn/6.4.2: + /acorn@6.4.2: resolution: {integrity: sha512-XtGIhXwF8YM8bJhGxG5kXgjkEuNGLTkoYqVE+KMR+aspr4KGYmKYg7yUe3KghyQ9yheNwLnjmzh/7+gfDBmHCQ==} engines: {node: '>=0.4.0'} hasBin: true dev: true - /acorn/7.4.1: + /acorn@7.4.1: resolution: {integrity: sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==} engines: {node: '>=0.4.0'} hasBin: true dev: true - /acorn/8.7.1: + /acorn@8.7.1: resolution: {integrity: sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A==} engines: {node: '>=0.4.0'} hasBin: true dev: true - /address/1.2.1: - resolution: {integrity: sha512-B+6bi5D34+fDYENiH5qOlA0cV2rAGKuWZ9LeyUUehbXy8e0VS9e498yO0Jeeh+iM+6KbfudHTFjXw2MmJD4QRA==} - engines: {node: '>= 10.0.0'} - dev: true - - /aggregate-error/3.1.0: - resolution: {integrity: sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==} - engines: {node: '>=8'} - dependencies: - clean-stack: 2.2.0 - indent-string: 4.0.0 - dev: true - - /airbnb-js-shims/2.2.1: - resolution: {integrity: sha512-wJNXPH66U2xjgo1Zwyjf9EydvJ2Si94+vSdk6EERcBfB2VZkeltpqIats0cqIZMLCXP3zcyaUKGYQeIBT6XjsQ==} - dependencies: - array-includes: 3.1.4 - array.prototype.flat: 1.2.4 - array.prototype.flatmap: 1.2.5 - es5-shim: 4.6.7 - es6-shim: 0.35.6 - function.prototype.name: 1.1.2 - globalthis: 1.0.3 - object.entries: 1.1.5 - object.fromentries: 2.0.5 - object.getownpropertydescriptors: 2.1.0 - object.values: 1.1.5 - promise.allsettled: 1.0.5 - promise.prototype.finally: 3.1.3 - string.prototype.matchall: 4.0.6 - string.prototype.padend: 3.1.1 - string.prototype.padstart: 3.1.3 - symbol.prototype.description: 1.0.5 - dev: true - - /ajv-errors/1.0.1_ajv@6.12.6: - resolution: {integrity: sha512-DCRfO/4nQ+89p/RK43i8Ezd41EqdGIU4ld7nGF8OQ14oc/we5rEntLCUa7+jrn3nn83BosfwZA0wb4pon2o8iQ==} - peerDependencies: - ajv: '>=5.0.0' - dependencies: - ajv: 6.12.6 - dev: true - - /ajv-keywords/3.5.2_ajv@6.12.6: + /ajv-keywords@3.5.2(ajv@6.12.6): resolution: {integrity: sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==} peerDependencies: ajv: ^6.9.1 @@ -8127,7 +3998,7 @@ packages: ajv: 6.12.6 dev: true - /ajv/6.12.6: + /ajv@6.12.6: resolution: {integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==} dependencies: fast-deep-equal: 3.1.3 @@ -8136,7 +4007,7 @@ packages: uri-js: 4.4.0 dev: true - /ajv/8.10.0: + /ajv@8.10.0: resolution: {integrity: sha512-bzqAEZOjkrUMl2afH8dknrq5KEk2SrwdBROR+vH1EKVQTqaUbJVPdc/gEdggTMM0Se+s+Ja4ju4TlNcStKl2Hw==} dependencies: fast-deep-equal: 3.1.3 @@ -8145,92 +4016,62 @@ packages: uri-js: 4.4.0 dev: true - /alphanum-sort/1.0.2: - resolution: {integrity: sha1-l6ERlkmyEa0zaR2fn0hqjsn74KM=} - dev: true - - /ansi-align/3.0.1: - resolution: {integrity: sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w==} - dependencies: - string-width: 4.2.3 - dev: true - - /ansi-colors/3.2.4: - resolution: {integrity: sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA==} - engines: {node: '>=6'} + /alphanum-sort@1.0.2: + resolution: {integrity: sha1-l6ERlkmyEa0zaR2fn0hqjsn74KM=} dev: true - /ansi-escapes/3.2.0: + /ansi-escapes@3.2.0: resolution: {integrity: sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==} engines: {node: '>=4'} dev: true - /ansi-escapes/4.3.1: + /ansi-escapes@4.3.1: resolution: {integrity: sha512-JWF7ocqNrp8u9oqpgV+wH5ftbt+cfvv+PTjOvKLT3AdYly/LmORARfEVT1iyjwN+4MqE5UmVKoAdIBqeoCHgLA==} engines: {node: '>=8'} dependencies: type-fest: 0.11.0 dev: true - /ansi-html-community/0.0.8: - resolution: {integrity: sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw==} - engines: {'0': node >= 0.8.0} - hasBin: true - dev: true - - /ansi-regex/2.1.1: + /ansi-regex@2.1.1: resolution: {integrity: sha512-TIGnTpdo+E3+pCyAluZvtED5p5wCqLdezCyhPZzKPcxvFplEt4i+W7OONCKgeZFT3+y5NZZfOOS/Bdcanm1MYA==} engines: {node: '>=0.10.0'} dev: true - /ansi-regex/3.0.0: + /ansi-regex@3.0.0: resolution: {integrity: sha512-wFUFA5bg5dviipbQQ32yOQhl6gcJaJXiHE7dvR8VYPG97+J/GNC5FKGepKdEDUFeXRzDxPF1X/Btc8L+v7oqIQ==} engines: {node: '>=4'} dev: true - /ansi-regex/4.1.0: + /ansi-regex@4.1.0: resolution: {integrity: sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==} engines: {node: '>=6'} dev: true - /ansi-regex/5.0.1: + /ansi-regex@5.0.1: resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} engines: {node: '>=8'} dev: true - /ansi-styles/2.2.1: + /ansi-styles@2.2.1: resolution: {integrity: sha512-kmCevFghRiWM7HB5zTPULl4r9bVFSWjz62MhqizDGUrq2NWuNMQyuv4tHHoKJHs69M/MF64lEcHdYIocrdWQYA==} engines: {node: '>=0.10.0'} dev: true - /ansi-styles/3.2.1: + /ansi-styles@3.2.1: resolution: {integrity: sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==} engines: {node: '>=4'} dependencies: color-convert: 1.9.3 dev: true - /ansi-styles/4.3.0: + /ansi-styles@4.3.0: resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} engines: {node: '>=8'} dependencies: color-convert: 2.0.1 dev: true - /ansi-styles/5.2.0: - resolution: {integrity: sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==} - engines: {node: '>=10'} - dev: true - - /ansi-to-html/0.6.15: - resolution: {integrity: sha512-28ijx2aHJGdzbs+O5SNQF65r6rrKYnkuwTYm8lZlChuoJ9P1vVzIpWO20sQTqTPDXYp6NFwk326vApTtLVFXpQ==} - engines: {node: '>=8.0.0'} - hasBin: true - dependencies: - entities: 2.2.0 - dev: true - - /anymatch/2.0.0: + /anymatch@2.0.0: resolution: {integrity: sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw==} dependencies: micromatch: 3.1.10 @@ -8239,7 +4080,7 @@ packages: - supports-color dev: true - /anymatch/3.1.1: + /anymatch@3.1.1: resolution: {integrity: sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==} engines: {node: '>= 8'} dependencies: @@ -8247,41 +4088,21 @@ packages: picomatch: 2.3.1 dev: true - /app-root-dir/1.0.2: - resolution: {integrity: sha512-jlpIfsOoNoafl92Sz//64uQHGSyMrD2vYG5d8o2a4qGvyNCvXur7bzIsWtAC/6flI2RYAp3kv8rsfBtaLm7w0g==} - dev: true - - /aproba/1.2.0: - resolution: {integrity: sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==} - dev: true - - /aproba/2.0.0: - resolution: {integrity: sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==} - dev: true - - /are-we-there-yet/2.0.0: - resolution: {integrity: sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==} - engines: {node: '>=10'} - dependencies: - delegates: 1.0.0 - readable-stream: 3.6.0 - dev: true - - /arg/4.1.3: + /arg@4.1.3: resolution: {integrity: sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==} dev: true - /argparse/1.0.10: + /argparse@1.0.10: resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==} dependencies: sprintf-js: 1.0.3 dev: true - /argparse/2.0.1: + /argparse@2.0.1: resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} dev: true - /aria-hidden/1.2.0_fan5qbzahqtxlm5dzefqlqx5ia: + /aria-hidden@1.2.0(@types/react@18.0.25)(react@18.2.0): resolution: {integrity: sha512-gk7QBfz7M9dMK6xZmlCZkR0wqGe9ojBmYHCAZUhdvdYpfY1BLnnLDxdNGzxXhPAtbr09FZS3exsZhX9ELnJJ0w==} engines: {node: '>=10'} peerDependencies: @@ -8296,45 +4117,30 @@ packages: tslib: 2.4.0 dev: false - /aria-query/5.0.2: - resolution: {integrity: sha512-eigU3vhqSO+Z8BKDnVLN/ompjhf3pYzecKXz8+whRy+9gZu8n1TCGfwzQUUPnqdHl9ax1Hr9031orZ+UOEYr7Q==} - engines: {node: '>=6.0'} - dev: true - - /arr-diff/4.0.0: + /arr-diff@4.0.0: resolution: {integrity: sha512-YVIQ82gZPGBebQV/a8dar4AitzCQs0jjXwMPZllpXMaGjXPYVUawSxQrRsjhjupyVxEvbHgUmIhKVlND+j02kA==} engines: {node: '>=0.10.0'} dev: true - /arr-flatten/1.1.0: + /arr-flatten@1.1.0: resolution: {integrity: sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg==} engines: {node: '>=0.10.0'} dev: true - /arr-union/3.1.0: + /arr-union@3.1.0: resolution: {integrity: sha512-sKpyeERZ02v1FeCZT8lrfJq5u6goHCtpTAzPwJYe7c8SPFOboNjNg1vz2L4VTn9T4PQxEx13TbXLmYUcS6Ug7Q==} engines: {node: '>=0.10.0'} dev: true - /array-equal/1.0.0: + /array-equal@1.0.0: resolution: {integrity: sha512-H3LU5RLiSsGXPhN+Nipar0iR0IofH+8r89G2y1tBKxQ/agagKyAjhkAFDRBfodP2caPrNKHpAWNIM/c9yeL7uA==} dev: true - /array-filter/1.0.0: + /array-filter@1.0.0: resolution: {integrity: sha1-uveeYubvTCpMC4MSMtr/7CUfnYM=} dev: true - /array-find-index/1.0.2: - resolution: {integrity: sha512-M1HQyIXcBGtVywBt8WVdim+lrNaK7VHp99Qt5pSNziXznKHViIBbXWtfRTpEFpF/c4FdfxNAsCCwPp5phBYJtw==} - engines: {node: '>=0.10.0'} - dev: true - optional: true - - /array-flatten/1.1.1: - resolution: {integrity: sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==} - dev: true - - /array-includes/3.1.4: + /array-includes@3.1.4: resolution: {integrity: sha512-ZTNSQkmWumEbiHO2GF4GmWxYVTiQyJy2XOTa15sdQSrvKn7l+180egQMqlrMOUMCyLMD7pmyQe4mMDUT6Behrw==} engines: {node: '>= 0.4'} dependencies: @@ -8345,29 +4151,29 @@ packages: is-string: 1.0.7 dev: true - /array-union/1.0.2: + /array-union@1.0.2: resolution: {integrity: sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk=} engines: {node: '>=0.10.0'} dependencies: array-uniq: 1.0.3 dev: true - /array-union/2.1.0: + /array-union@2.1.0: resolution: {integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==} engines: {node: '>=8'} dev: true - /array-uniq/1.0.3: + /array-uniq@1.0.3: resolution: {integrity: sha1-r2rId6Jcx/dOBYiUdThY39sk/bY=} engines: {node: '>=0.10.0'} dev: true - /array-unique/0.3.2: + /array-unique@0.3.2: resolution: {integrity: sha512-SleRWjh9JUud2wH1hPs9rZBZ33H6T9HOiL0uwGnGx9FpE6wKGyfWugmbkEOIs6qWrZhg0LWeLziLrEwQJhs5mQ==} engines: {node: '>=0.10.0'} dev: true - /array.prototype.flat/1.2.4: + /array.prototype.flat@1.2.4: resolution: {integrity: sha512-4470Xi3GAPAjZqFcljX2xzckv1qeKPizoNkiS0+O4IoPR2ZNpcjE0pkhdihlDouK+x6QOast26B4Q/O9DJnwSg==} engines: {node: '>= 0.4'} dependencies: @@ -8376,7 +4182,7 @@ packages: es-abstract: 1.19.1 dev: true - /array.prototype.flatmap/1.2.5: + /array.prototype.flatmap@1.2.5: resolution: {integrity: sha512-08u6rVyi1Lj7oqWbS9nUxliETrtIROT4XGTA4D/LWGten6E3ocm7cy9SIrmNHOL5XVbVuckUp3X6Xyg8/zpvHA==} engines: {node: '>= 0.4'} dependencies: @@ -8385,130 +4191,70 @@ packages: es-abstract: 1.19.1 dev: true - /array.prototype.map/1.0.4: - resolution: {integrity: sha512-Qds9QnX7A0qISY7JT5WuJO0NJPE9CMlC6JzHQfhpqAAQQzufVRoeH7EzUY5GcPTx72voG8LV/5eo+b8Qi8hmhA==} - engines: {node: '>= 0.4'} - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.3 - es-abstract: 1.19.1 - es-array-method-boxes-properly: 1.0.0 - is-string: 1.0.7 - dev: true - - /array.prototype.reduce/1.0.4: - resolution: {integrity: sha512-WnM+AjG/DvLRLo4DDl+r+SvCzYtD2Jd9oeBYMcEaI7t3fFrHY9M53/wdLcTvmZNQ70IU6Htj0emFkZ5TS+lrdw==} - engines: {node: '>= 0.4'} - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.20.2 - es-array-method-boxes-properly: 1.0.0 - is-string: 1.0.7 - dev: true - - /arrify/1.0.1: + /arrify@1.0.1: resolution: {integrity: sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=} engines: {node: '>=0.10.0'} dev: true - /arrify/2.0.1: - resolution: {integrity: sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==} - engines: {node: '>=8'} - dev: true - - /asap/2.0.6: + /asap@2.0.6: resolution: {integrity: sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==} dev: true - /asn1.js/5.4.1: - resolution: {integrity: sha512-+I//4cYPccV8LdmBLiX8CYvf9Sp3vQsrqu2QNXRcrbiWvcx/UdlFiqUJJzxRQxgsZmvhXhn4cSKeSmoFjVdupA==} - dependencies: - bn.js: 4.12.0 - inherits: 2.0.4 - minimalistic-assert: 1.0.1 - safer-buffer: 2.1.2 - dev: true - - /asn1/0.2.4: + /asn1@0.2.4: resolution: {integrity: sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==} dependencies: safer-buffer: 2.1.2 dev: true - /assert-plus/1.0.0: + /assert-plus@1.0.0: resolution: {integrity: sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==} engines: {node: '>=0.8'} dev: true - /assert/1.5.0: - resolution: {integrity: sha512-EDsgawzwoun2CZkCgtxJbv392v4nbk9XDD06zI+kQYoBM/3RBWLlEyJARDOmhAAosBjWACEkKL6S+lIZtcAubA==} - dependencies: - object-assign: 4.1.1 - util: 0.10.3 - dev: true - - /assign-symbols/1.0.0: + /assign-symbols@1.0.0: resolution: {integrity: sha512-Q+JC7Whu8HhmTdBph/Tq59IoRtoy6KAm5zzPv00WdujX82lbAL8K7WVjne7vdCsAmbF4AYaDOPyO3k0kl8qIrw==} engines: {node: '>=0.10.0'} dev: true - /ast-types/0.14.2: - resolution: {integrity: sha512-O0yuUDnZeQDL+ncNGlJ78BiO4jnYI3bvMsD5prT0/nsgijG/LpNBIr63gTjVTNsiGkgQhiyCShTgxt8oXOrklA==} - engines: {node: '>=4'} - dependencies: - tslib: 2.4.0 - dev: true - - /astral-regex/1.0.0: + /astral-regex@1.0.0: resolution: {integrity: sha512-+Ryf6g3BKoRc7jfp7ad8tM4TtMiaWvbF/1/sQcZPkkS7ag3D5nMBCe2UfOTONtAkaG0tO0ij3C5Lwmf1EiyjHg==} engines: {node: '>=4'} dev: true - /astral-regex/2.0.0: + /astral-regex@2.0.0: resolution: {integrity: sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==} engines: {node: '>=8'} dev: true - /async-each/1.0.3: - resolution: {integrity: sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ==} - dev: true - optional: true - - /async-limiter/1.0.1: + /async-limiter@1.0.1: resolution: {integrity: sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ==} dev: true - /async/2.6.3: + /async@2.6.3: resolution: {integrity: sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==} dependencies: lodash: 4.17.21 dev: true - /async/3.2.4: + /async@3.2.4: resolution: {integrity: sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ==} dev: true - /asynckit/0.4.0: + /asynckit@0.4.0: resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} dev: true - /asyncro/3.0.0: + /asyncro@3.0.0: resolution: {integrity: sha512-nEnWYfrBmA3taTiuiOoZYmgJ/CNrSoQLeLs29SeLcPu60yaw/mHDBHV0iOZ051fTvsTHxpCY+gXibqT9wbQYfg==} dev: true - /at-least-node/1.0.0: - resolution: {integrity: sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==} - engines: {node: '>= 4.0.0'} - dev: true - - /atob/2.1.2: + /atob@2.1.2: resolution: {integrity: sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg==} engines: {node: '>= 4.5.0'} hasBin: true dev: true - /autoprefixer/9.8.6: + /autoprefixer@9.8.6: resolution: {integrity: sha512-XrvP4VVHdRBCdX1S3WXVD8+RyG9qeb1D5Sn1DeLiG2xfSpzellk5k54xbUERJ3M5DggQxes39UGOTP8CFrEGbg==} hasBin: true dependencies: @@ -8517,19 +4263,19 @@ packages: colorette: 1.2.1 normalize-range: 0.1.2 num2fraction: 1.2.2 - postcss: 7.0.35 + postcss: 7.0.39 postcss-value-parser: 4.2.0 dev: true - /aws-sign2/0.7.0: + /aws-sign2@0.7.0: resolution: {integrity: sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA==} dev: true - /aws4/1.11.0: + /aws4@1.11.0: resolution: {integrity: sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==} dev: true - /babel-jest/24.9.0_@babel+core@7.19.1: + /babel-jest@24.9.0(@babel/core@7.19.1): resolution: {integrity: sha512-ntuddfyiN+EhMw58PTNL1ph4C9rECiQXjI4nMMBKBaNjXvqLdkXpPRcMSr4iyBrJg/+wz9brFUD6RhOAT6r4Iw==} engines: {node: '>= 6'} peerDependencies: @@ -8540,14 +4286,14 @@ packages: '@jest/types': 24.9.0 '@types/babel__core': 7.1.12 babel-plugin-istanbul: 5.2.0 - babel-preset-jest: 24.9.0_@babel+core@7.19.1 + babel-preset-jest: 24.9.0(@babel/core@7.19.1) chalk: 2.4.2 slash: 2.0.0 transitivePeerDependencies: - supports-color dev: true - /babel-jest/24.9.0_@babel+core@7.9.0: + /babel-jest@24.9.0(@babel/core@7.9.0): resolution: {integrity: sha512-ntuddfyiN+EhMw58PTNL1ph4C9rECiQXjI4nMMBKBaNjXvqLdkXpPRcMSr4iyBrJg/+wz9brFUD6RhOAT6r4Iw==} engines: {node: '>= 6'} peerDependencies: @@ -8558,32 +4304,14 @@ packages: '@jest/types': 24.9.0 '@types/babel__core': 7.1.12 babel-plugin-istanbul: 5.2.0 - babel-preset-jest: 24.9.0_@babel+core@7.9.0 + babel-preset-jest: 24.9.0(@babel/core@7.9.0) chalk: 2.4.2 slash: 2.0.0 transitivePeerDependencies: - supports-color dev: true - /babel-loader/8.2.5_7k5t74zmen3ocxyd32avkcyrwe: - resolution: {integrity: sha512-OSiFfH89LrEMiWd4pLNqGz4CwJDtbs2ZVc+iGu2HrkRfPxId9F2anQj38IxWpmRfsUY0aBZYi1EFcd3mhtRMLQ==} - engines: {node: '>= 8.9'} - peerDependencies: - '@babel/core': ^7.0.0 || 7 - webpack: '>=2' - peerDependenciesMeta: - webpack: - optional: true - dependencies: - '@babel/core': 7.19.1 - find-cache-dir: 3.3.1 - loader-utils: 2.0.2 - make-dir: 3.1.0 - schema-utils: 2.7.1 - webpack: 4.46.0 - dev: true - - /babel-loader/8.2.5_@babel+core@7.9.0: + /babel-loader@8.2.5(@babel/core@7.9.0): resolution: {integrity: sha512-OSiFfH89LrEMiWd4pLNqGz4CwJDtbs2ZVc+iGu2HrkRfPxId9F2anQj38IxWpmRfsUY0aBZYi1EFcd3mhtRMLQ==} engines: {node: '>= 8.9'} peerDependencies: @@ -8600,33 +4328,13 @@ packages: schema-utils: 2.7.1 dev: true - /babel-plugin-add-react-displayname/0.0.5: - resolution: {integrity: sha512-LY3+Y0XVDYcShHHorshrDbt4KFWL4bSeniCtl4SYZbask+Syngk1uMPCeN9+nSiZo6zX5s0RTq/J9Pnaaf/KHw==} - dev: true - - /babel-plugin-apply-mdx-type-prop/1.6.22_@babel+core@7.12.9: - resolution: {integrity: sha512-VefL+8o+F/DfK24lPZMtJctrCVOfgbqLAGZSkxwhazQv4VxPg3Za/i40fu22KR2m8eEda+IfSOlPLUSIiLcnCQ==} - peerDependencies: - '@babel/core': ^7.11.6 || 7 - dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.10.4 - '@mdx-js/util': 1.6.22 - dev: true - - /babel-plugin-dynamic-import-node/2.3.3: + /babel-plugin-dynamic-import-node@2.3.3: resolution: {integrity: sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ==} dependencies: object.assign: 4.1.2 dev: true - /babel-plugin-extract-import-names/1.6.22: - resolution: {integrity: sha512-yJ9BsJaISua7d8zNT7oRG1ZLBJCIdZ4PZqmH8qa9N5AK01ifk3fnkc98AXhtzE7UkfCsEumvoQWgoYLhOnJ7jQ==} - dependencies: - '@babel/helper-plugin-utils': 7.10.4 - dev: true - - /babel-plugin-istanbul/5.2.0: + /babel-plugin-istanbul@5.2.0: resolution: {integrity: sha512-5LphC0USA8t4i1zCtjbbNb6jJj/9+X6P37Qfirc/70EQ34xKlMW+a1RHGwxGI+SwWpNwZ27HqvzAobeqaXwiZw==} engines: {node: '>=6'} dependencies: @@ -8638,27 +4346,14 @@ packages: - supports-color dev: true - /babel-plugin-istanbul/6.1.1: - resolution: {integrity: sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==} - engines: {node: '>=8'} - dependencies: - '@babel/helper-plugin-utils': 7.19.0 - '@istanbuljs/load-nyc-config': 1.1.0 - '@istanbuljs/schema': 0.1.3 - istanbul-lib-instrument: 5.2.0 - test-exclude: 6.0.0 - transitivePeerDependencies: - - supports-color - dev: true - - /babel-plugin-jest-hoist/24.9.0: + /babel-plugin-jest-hoist@24.9.0: resolution: {integrity: sha512-2EMA2P8Vp7lG0RAzr4HXqtYwacfMErOuv1U3wrvxHX6rD1sV6xS3WXG3r8TRQ2r6w8OhvSdWt+z41hQNwNm3Xw==} engines: {node: '>= 6'} dependencies: '@types/babel__traverse': 7.0.15 dev: true - /babel-plugin-macros/2.8.0: + /babel-plugin-macros@2.8.0: resolution: {integrity: sha512-SEP5kJpfGYqYKpBrj5XU3ahw5p5GOHJ0U5ssOSQ/WBVdwkD2Dzlce95exQTs3jOVWPPKLBN2rlEWkCK7dSmLvg==} dependencies: '@babel/runtime': 7.18.6 @@ -8666,118 +4361,15 @@ packages: resolve: 1.19.0 dev: true - /babel-plugin-macros/3.1.0: - resolution: {integrity: sha512-Cg7TFGpIr01vOQNODXOOaGz2NpCU5gl8x1qJFbb6hbZxR7XrcE2vtbAsTAbJ7/xwJtUuJEw8K8Zr/AE0LHlesg==} - engines: {node: '>=10', npm: '>=6'} - dependencies: - '@babel/runtime': 7.18.6 - cosmiconfig: 7.0.1 - resolve: 1.19.0 - dev: true - - /babel-plugin-polyfill-corejs2/0.3.3_@babel+core@7.19.1: - resolution: {integrity: sha512-8hOdmFYFSZhqg2C/JgLUQ+t52o5nirNwaWM2B9LWteozwIvM14VSwdsCAUET10qT+kmySAlseadmfeeSWFCy+Q==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/compat-data': 7.19.1 - '@babel/core': 7.19.1 - '@babel/helper-define-polyfill-provider': 0.3.3_@babel+core@7.19.1 - semver: 6.3.0 - transitivePeerDependencies: - - supports-color - dev: true - - /babel-plugin-polyfill-corejs2/0.3.3_@babel+core@7.9.0: - resolution: {integrity: sha512-8hOdmFYFSZhqg2C/JgLUQ+t52o5nirNwaWM2B9LWteozwIvM14VSwdsCAUET10qT+kmySAlseadmfeeSWFCy+Q==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/compat-data': 7.19.1 - '@babel/core': 7.9.0 - '@babel/helper-define-polyfill-provider': 0.3.3_@babel+core@7.9.0 - semver: 6.3.0 - transitivePeerDependencies: - - supports-color - dev: true - - /babel-plugin-polyfill-corejs3/0.1.7_@babel+core@7.19.1: - resolution: {integrity: sha512-u+gbS9bbPhZWEeyy1oR/YaaSpod/KDT07arZHb80aTpl8H5ZBq+uN1nN9/xtX7jQyfLdPfoqI4Rue/MQSWJquw==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-define-polyfill-provider': 0.1.5_@babel+core@7.19.1 - core-js-compat: 3.25.1 - transitivePeerDependencies: - - supports-color - dev: true - - /babel-plugin-polyfill-corejs3/0.6.0_@babel+core@7.19.1: - resolution: {integrity: sha512-+eHqR6OPcBhJOGgsIar7xoAB1GcSwVUA3XjAd7HJNzOXT4wv6/H7KIdA/Nc60cvUlDbKApmqNvD1B1bzOt4nyA==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-define-polyfill-provider': 0.3.3_@babel+core@7.19.1 - core-js-compat: 3.25.1 - transitivePeerDependencies: - - supports-color - dev: true - - /babel-plugin-polyfill-corejs3/0.6.0_@babel+core@7.9.0: - resolution: {integrity: sha512-+eHqR6OPcBhJOGgsIar7xoAB1GcSwVUA3XjAd7HJNzOXT4wv6/H7KIdA/Nc60cvUlDbKApmqNvD1B1bzOt4nyA==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-define-polyfill-provider': 0.3.3_@babel+core@7.9.0 - core-js-compat: 3.25.1 - transitivePeerDependencies: - - supports-color - dev: true - - /babel-plugin-polyfill-regenerator/0.4.1_@babel+core@7.19.1: - resolution: {integrity: sha512-NtQGmyQDXjQqQ+IzRkBVwEOz9lQ4zxAQZgoAYEtU9dJjnl1Oc98qnN7jcp+bE7O7aYzVpavXE3/VKXNzUbh7aw==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.19.1 - '@babel/helper-define-polyfill-provider': 0.3.3_@babel+core@7.19.1 - transitivePeerDependencies: - - supports-color - dev: true - - /babel-plugin-polyfill-regenerator/0.4.1_@babel+core@7.9.0: - resolution: {integrity: sha512-NtQGmyQDXjQqQ+IzRkBVwEOz9lQ4zxAQZgoAYEtU9dJjnl1Oc98qnN7jcp+bE7O7aYzVpavXE3/VKXNzUbh7aw==} - peerDependencies: - '@babel/core': ^7.0.0-0 || 7 - dependencies: - '@babel/core': 7.9.0 - '@babel/helper-define-polyfill-provider': 0.3.3_@babel+core@7.9.0 - transitivePeerDependencies: - - supports-color - dev: true - - /babel-plugin-react-docgen/4.2.1: - resolution: {integrity: sha512-UQ0NmGHj/HAqi5Bew8WvNfCk8wSsmdgNd8ZdMjBCICtyCJCq9LiqgqvjCYe570/Wg7AQArSq1VQ60Dd/CHN7mQ==} - dependencies: - ast-types: 0.14.2 - lodash: 4.17.21 - react-docgen: 5.4.3 - transitivePeerDependencies: - - supports-color - dev: true - - /babel-plugin-transform-async-to-promises/0.8.15: + /babel-plugin-transform-async-to-promises@0.8.15: resolution: {integrity: sha512-fDXP68ZqcinZO2WCiimCL9zhGjGXOnn3D33zvbh+yheZ/qOrNVVDDIBtAaM3Faz8TRvQzHiRKsu3hfrBAhEncQ==} dev: true - /babel-plugin-transform-react-remove-prop-types/0.4.24: + /babel-plugin-transform-react-remove-prop-types@0.4.24: resolution: {integrity: sha512-eqj0hVcJUR57/Ug2zE1Yswsw4LhuqqHhD+8v120T1cl3kjg76QwtyBrdIk4WVwK+lAhBJVYCd/v+4nc4y+8JsA==} dev: true - /babel-plugin-transform-replace-expressions/0.2.0_@babel+core@7.12.9: + /babel-plugin-transform-replace-expressions@0.2.0(@babel/core@7.12.9): resolution: {integrity: sha512-Eh1rRd9hWEYgkgoA3D0kGp7xJ/wgVshgsqmq60iC4HVWD+Lux+fNHSHBa2v1Hsv+dHflShC71qKhiH40OiPtDA==} peerDependencies: '@babel/core': ^7.0.0-0 || 7 @@ -8786,43 +4378,43 @@ packages: '@babel/parser': 7.12.7 dev: true - /babel-preset-jest/24.9.0_@babel+core@7.19.1: + /babel-preset-jest@24.9.0(@babel/core@7.19.1): resolution: {integrity: sha512-izTUuhE4TMfTRPF92fFwD2QfdXaZW08qvWTFCI51V8rW5x00UuPgc3ajRoWofXOuxjfcOM5zzSYsQS3H8KGCAg==} engines: {node: '>= 6'} peerDependencies: '@babel/core': ^7.0.0 || 7 dependencies: '@babel/core': 7.19.1 - '@babel/plugin-syntax-object-rest-spread': 7.8.3_@babel+core@7.19.1 + '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/core@7.19.1) babel-plugin-jest-hoist: 24.9.0 dev: true - /babel-preset-jest/24.9.0_@babel+core@7.9.0: + /babel-preset-jest@24.9.0(@babel/core@7.9.0): resolution: {integrity: sha512-izTUuhE4TMfTRPF92fFwD2QfdXaZW08qvWTFCI51V8rW5x00UuPgc3ajRoWofXOuxjfcOM5zzSYsQS3H8KGCAg==} engines: {node: '>= 6'} peerDependencies: '@babel/core': ^7.0.0 || 7 dependencies: '@babel/core': 7.9.0 - '@babel/plugin-syntax-object-rest-spread': 7.8.3_@babel+core@7.9.0 + '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/core@7.9.0) babel-plugin-jest-hoist: 24.9.0 dev: true - /babel-preset-react-app/9.1.2: + /babel-preset-react-app@9.1.2: resolution: {integrity: sha512-k58RtQOKH21NyKtzptoAvtAODuAJJs3ZhqBMl456/GnXEQ/0La92pNmwgWoMn5pBTrsvk3YYXdY7zpY4e3UIxA==} dependencies: '@babel/core': 7.9.0 - '@babel/plugin-proposal-class-properties': 7.8.3_@babel+core@7.9.0 - '@babel/plugin-proposal-decorators': 7.8.3_@babel+core@7.9.0 - '@babel/plugin-proposal-nullish-coalescing-operator': 7.8.3_@babel+core@7.9.0 - '@babel/plugin-proposal-numeric-separator': 7.8.3_@babel+core@7.9.0 - '@babel/plugin-proposal-optional-chaining': 7.9.0_@babel+core@7.9.0 - '@babel/plugin-transform-flow-strip-types': 7.9.0_@babel+core@7.9.0 - '@babel/plugin-transform-react-display-name': 7.8.3_@babel+core@7.9.0 - '@babel/plugin-transform-runtime': 7.9.0_@babel+core@7.9.0 - '@babel/preset-env': 7.9.0_@babel+core@7.9.0 - '@babel/preset-react': 7.9.1_@babel+core@7.9.0 - '@babel/preset-typescript': 7.9.0_@babel+core@7.9.0 + '@babel/plugin-proposal-class-properties': 7.8.3(@babel/core@7.9.0) + '@babel/plugin-proposal-decorators': 7.8.3(@babel/core@7.9.0) + '@babel/plugin-proposal-nullish-coalescing-operator': 7.8.3(@babel/core@7.9.0) + '@babel/plugin-proposal-numeric-separator': 7.8.3(@babel/core@7.9.0) + '@babel/plugin-proposal-optional-chaining': 7.9.0(@babel/core@7.9.0) + '@babel/plugin-transform-flow-strip-types': 7.9.0(@babel/core@7.9.0) + '@babel/plugin-transform-react-display-name': 7.8.3(@babel/core@7.9.0) + '@babel/plugin-transform-runtime': 7.9.0(@babel/core@7.9.0) + '@babel/preset-env': 7.9.0(@babel/core@7.9.0) + '@babel/preset-react': 7.9.1(@babel/core@7.9.0) + '@babel/preset-typescript': 7.9.0(@babel/core@7.9.0) '@babel/runtime': 7.9.0 babel-plugin-macros: 2.8.0 babel-plugin-transform-react-remove-prop-types: 0.4.24 @@ -8830,19 +4422,15 @@ packages: - supports-color dev: true - /bail/1.0.5: - resolution: {integrity: sha512-xFbRxM1tahm08yHBP16MMjVUAvDaBMD38zsM9EMAUN61omwLmKlOpB/Zku5QkjZ8TZ4vn53pj+t518cH0S03RQ==} - dev: true - - /balanced-match/1.0.0: + /balanced-match@1.0.0: resolution: {integrity: sha1-ibTRmasr7kneFk6gK4nORi1xt2c=} dev: true - /balanced-match/2.0.0: + /balanced-match@2.0.0: resolution: {integrity: sha512-1ugUSr8BHXRnK23KfuYS+gVMC3LB8QGH9W1iGtDPsNWoQbgtXSExkBu2aDR4epiGWZOjZsj6lDl/N/AqqTC3UA==} dev: true - /base/0.11.2: + /base@0.11.2: resolution: {integrity: sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg==} engines: {node: '>=0.10.0'} dependencies: @@ -8855,116 +4443,40 @@ packages: pascalcase: 0.1.1 dev: true - /base64-js/1.5.1: - resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} - dev: true - - /bcrypt-pbkdf/1.0.2: + /bcrypt-pbkdf@1.0.2: resolution: {integrity: sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==} dependencies: tweetnacl: 0.14.5 dev: true - /better-opn/2.1.1: - resolution: {integrity: sha512-kIPXZS5qwyKiX/HcRvDYfmBQUa8XP17I0mYZZ0y4UhpYOSvtsLHDYqmomS+Mj20aDvD3knEiQ0ecQy2nhio3yA==} - engines: {node: '>8.0.0'} - dependencies: - open: 7.4.2 - dev: true - - /big-integer/1.6.51: - resolution: {integrity: sha512-GPEid2Y9QU1Exl1rpO9B2IPJGHPSupF5GnVIP0blYvNOMer2bTvSWs1jGOUg04hTmu67nmLsQ9TBo1puaotBHg==} - engines: {node: '>=0.6'} - dev: true - optional: true - - /big.js/5.2.2: + /big.js@5.2.2: resolution: {integrity: sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ==} dev: true - /binary-extensions/1.13.1: - resolution: {integrity: sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw==} - engines: {node: '>=0.10.0'} - dev: true - optional: true - - /binary-extensions/2.1.0: + /binary-extensions@2.1.0: resolution: {integrity: sha512-1Yj8h9Q+QDF5FzhMs/c9+6UntbD5MkRfRwac8DoEm9ZfUBZ7tZ55YcGVAzEe4bXsdQHEk+s9S5wsOKVdZrw0tQ==} engines: {node: '>=8'} dev: true - /bindings/1.5.0: + /bindings@1.5.0: resolution: {integrity: sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==} dependencies: file-uri-to-path: 1.0.0 dev: true optional: true - /bluebird/3.7.2: - resolution: {integrity: sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==} - dev: true - - /bn.js/4.12.0: - resolution: {integrity: sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==} - dev: true - - /bn.js/5.2.1: - resolution: {integrity: sha512-eXRvHzWyYPBuB4NBy0cmYQjGitUrtqwbvlzP3G6VFnNRbsZQIxQ10PbKKHt8gZ/HW/D/747aDl+QkDqg3KQLMQ==} - dev: true - - /body-parser/1.20.0: - resolution: {integrity: sha512-DfJ+q6EPcGKZD1QWUjSpqp+Q7bDQTsQIF4zfUAtZ6qk+H/3/QRhg9CEp39ss+/T2vw0+HaidC0ecJj/DRLIaKg==} - engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16} - dependencies: - bytes: 3.1.2 - content-type: 1.0.4 - debug: 2.6.9 - depd: 2.0.0 - destroy: 1.2.0 - http-errors: 2.0.0 - iconv-lite: 0.4.24 - on-finished: 2.4.1 - qs: 6.10.3 - raw-body: 2.5.1 - type-is: 1.6.18 - unpipe: 1.0.0 - transitivePeerDependencies: - - supports-color - dev: true - - /boolbase/1.0.0: + /boolbase@1.0.0: resolution: {integrity: sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==} dev: true - /boxen/5.1.2: - resolution: {integrity: sha512-9gYgQKXx+1nP8mP7CzFyaUARhg7D3n1dF/FnErWmu9l6JvGpNUN278h0aSb+QjoiKSWG+iZ3uHrcqk0qrY9RQQ==} - engines: {node: '>=10'} - dependencies: - ansi-align: 3.0.1 - camelcase: 6.2.0 - chalk: 4.1.2 - cli-boxes: 2.2.1 - string-width: 4.2.3 - type-fest: 0.20.2 - widest-line: 3.1.0 - wrap-ansi: 7.0.0 - dev: true - - /bplist-parser/0.1.1: - resolution: {integrity: sha512-2AEM0FXy8ZxVLBuqX0hqt1gDwcnz2zygEkQ6zaD5Wko/sB9paUNwlpawrFtKeHUAQUOzjVy9AO4oeonqIHKA9Q==} - dependencies: - big-integer: 1.6.51 - dev: true - optional: true - - /brace-expansion/1.1.11: + /brace-expansion@1.1.11: resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==} dependencies: balanced-match: 1.0.0 concat-map: 0.0.1 dev: true - /braces/2.3.2: + /braces@2.3.2: resolution: {integrity: sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==} engines: {node: '>=0.10.0'} dependencies: @@ -8973,99 +4485,40 @@ packages: extend-shallow: 2.0.1 fill-range: 4.0.0 isobject: 3.0.1 - repeat-element: 1.1.3 - snapdragon: 0.8.2 - snapdragon-node: 2.1.1 - split-string: 3.1.0 - to-regex: 3.0.2 - transitivePeerDependencies: - - supports-color - dev: true - - /braces/3.0.2: - resolution: {integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==} - engines: {node: '>=8'} - dependencies: - fill-range: 7.0.1 - dev: true - - /brorand/1.1.0: - resolution: {integrity: sha512-cKV8tMCEpQs4hK/ik71d6LrPOnpkpGBR0wzxqr68g2m/LB2GxVYQroAjMJZRVM1Y4BCjCKc3vAamxSzOY2RP+w==} - dev: true - - /brotli-size/4.0.0: - resolution: {integrity: sha512-uA9fOtlTRC0iqKfzff1W34DXUA3GyVqbUaeo3Rw3d4gd1eavKVCETXrn3NzO74W+UVkG3UHu8WxUi+XvKI/huA==} - engines: {node: '>= 10.16.0'} - dependencies: - duplexer: 0.1.1 - dev: true - - /browser-process-hrtime/1.0.0: - resolution: {integrity: sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow==} - dev: true - - /browser-resolve/1.11.3: - resolution: {integrity: sha512-exDi1BYWB/6raKHmDTCicQfTkqwN5fioMFV4j8BsfMU4R2DK/QfZfK7kOVkmWCNANf0snkBzqGqAJBao9gZMdQ==} - dependencies: - resolve: 1.1.7 - dev: true - - /browserify-aes/1.2.0: - resolution: {integrity: sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA==} - dependencies: - buffer-xor: 1.0.3 - cipher-base: 1.0.4 - create-hash: 1.2.0 - evp_bytestokey: 1.0.3 - inherits: 2.0.4 - safe-buffer: 5.2.1 - dev: true - - /browserify-cipher/1.0.1: - resolution: {integrity: sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w==} - dependencies: - browserify-aes: 1.2.0 - browserify-des: 1.0.2 - evp_bytestokey: 1.0.3 + repeat-element: 1.1.3 + snapdragon: 0.8.2 + snapdragon-node: 2.1.1 + split-string: 3.1.0 + to-regex: 3.0.2 + transitivePeerDependencies: + - supports-color dev: true - /browserify-des/1.0.2: - resolution: {integrity: sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A==} + /braces@3.0.2: + resolution: {integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==} + engines: {node: '>=8'} dependencies: - cipher-base: 1.0.4 - des.js: 1.0.1 - inherits: 2.0.4 - safe-buffer: 5.2.1 + fill-range: 7.0.1 dev: true - /browserify-rsa/4.1.0: - resolution: {integrity: sha512-AdEER0Hkspgno2aR97SAf6vi0y0k8NuOpGnVH3O99rcA5Q6sh8QxcngtHuJ6uXwnfAXNM4Gn1Gb7/MV1+Ymbog==} + /brotli-size@4.0.0: + resolution: {integrity: sha512-uA9fOtlTRC0iqKfzff1W34DXUA3GyVqbUaeo3Rw3d4gd1eavKVCETXrn3NzO74W+UVkG3UHu8WxUi+XvKI/huA==} + engines: {node: '>= 10.16.0'} dependencies: - bn.js: 5.2.1 - randombytes: 2.1.0 + duplexer: 0.1.1 dev: true - /browserify-sign/4.2.1: - resolution: {integrity: sha512-/vrA5fguVAKKAVTNJjgSm1tRQDHUU6DbwO9IROu/0WAzC8PKhucDSh18J0RMvVeHAn5puMd+QHC2erPRNf8lmg==} - dependencies: - bn.js: 5.2.1 - browserify-rsa: 4.1.0 - create-hash: 1.2.0 - create-hmac: 1.1.7 - elliptic: 6.5.4 - inherits: 2.0.4 - parse-asn1: 5.1.6 - readable-stream: 3.6.0 - safe-buffer: 5.2.1 + /browser-process-hrtime@1.0.0: + resolution: {integrity: sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow==} dev: true - /browserify-zlib/0.2.0: - resolution: {integrity: sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA==} + /browser-resolve@1.11.3: + resolution: {integrity: sha512-exDi1BYWB/6raKHmDTCicQfTkqwN5fioMFV4j8BsfMU4R2DK/QfZfK7kOVkmWCNANf0snkBzqGqAJBao9gZMdQ==} dependencies: - pako: 1.0.11 + resolve: 1.1.7 dev: true - /browserslist/4.20.4: + /browserslist@4.20.4: resolution: {integrity: sha512-ok1d+1WpnU24XYN7oC3QWgTyMhY/avPJ/r9T00xxvUOIparA/gc+UPUMaod3i+G6s+nI2nUb9xZ5k794uIwShw==} engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} hasBin: true @@ -9077,7 +4530,7 @@ packages: picocolors: 1.0.0 dev: true - /browserslist/4.21.4: + /browserslist@4.21.4: resolution: {integrity: sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw==} engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} hasBin: true @@ -9085,116 +4538,25 @@ packages: caniuse-lite: 1.0.30001401 electron-to-chromium: 1.4.253 node-releases: 2.0.6 - update-browserslist-db: 1.0.9_browserslist@4.21.4 + update-browserslist-db: 1.0.9(browserslist@4.21.4) dev: true - /bser/2.1.1: + /bser@2.1.1: resolution: {integrity: sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==} dependencies: node-int64: 0.4.0 dev: true - /buffer-from/1.1.1: + /buffer-from@1.1.1: resolution: {integrity: sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==} dev: true - /buffer-xor/1.0.3: - resolution: {integrity: sha512-571s0T7nZWK6vB67HI5dyUF7wXiNcfaPPPTl6zYCNApANjIvYJTg7hlud/+cJpdAhS7dVzqMLmfhfHR3rAcOjQ==} - dev: true - - /buffer/4.9.2: - resolution: {integrity: sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==} - dependencies: - base64-js: 1.5.1 - ieee754: 1.2.1 - isarray: 1.0.0 - dev: true - - /builtin-modules/3.1.0: + /builtin-modules@3.1.0: resolution: {integrity: sha512-k0KL0aWZuBt2lrxrcASWDfwOLMnodeQjodT/1SxEQAXsHANgo6ZC/VEaSEHCXt7aSTZ4/4H5LKa+tBXmW7Vtvw==} engines: {node: '>=6'} dev: true - /builtin-status-codes/3.0.0: - resolution: {integrity: sha512-HpGFw18DgFWlncDfjTa2rcQ4W88O1mC8e8yZ2AvQY5KDaktSTwo+KRf6nHK6FRI5FyRyb/5T6+TSxfP7QyGsmQ==} - dev: true - - /bytes/3.0.0: - resolution: {integrity: sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw==} - engines: {node: '>= 0.8'} - dev: true - - /bytes/3.1.2: - resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==} - engines: {node: '>= 0.8'} - dev: true - - /c8/7.12.0: - resolution: {integrity: sha512-CtgQrHOkyxr5koX1wEUmN/5cfDa2ckbHRA4Gy5LAL0zaCFtVWJS5++n+w4/sr2GWGerBxgTjpKeDclk/Qk6W/A==} - engines: {node: '>=10.12.0'} - hasBin: true - dependencies: - '@bcoe/v8-coverage': 0.2.3 - '@istanbuljs/schema': 0.1.3 - find-up: 5.0.0 - foreground-child: 2.0.0 - istanbul-lib-coverage: 3.2.0 - istanbul-lib-report: 3.0.0 - istanbul-reports: 3.1.5 - rimraf: 3.0.2 - test-exclude: 6.0.0 - v8-to-istanbul: 9.0.1 - yargs: 16.2.0 - yargs-parser: 20.2.9 - dev: true - - /cacache/12.0.4: - resolution: {integrity: sha512-a0tMB40oefvuInr4Cwb3GerbL9xTj1D5yg0T5xrjGCGyfvbxseIXX7BAO/u/hIXdafzOI5JC3wDwHyf24buOAQ==} - dependencies: - bluebird: 3.7.2 - chownr: 1.1.4 - figgy-pudding: 3.5.2 - glob: 7.2.3 - graceful-fs: 4.2.10 - infer-owner: 1.0.4 - lru-cache: 5.1.1 - mississippi: 3.0.0 - mkdirp: 0.5.5 - move-concurrently: 1.0.1 - promise-inflight: 1.0.1_bluebird@3.7.2 - rimraf: 2.7.1 - ssri: 6.0.2 - unique-filename: 1.1.1 - y18n: 4.0.0 - dev: true - - /cacache/15.3.0: - resolution: {integrity: sha512-VVdYzXEn+cnbXpFgWs5hTT7OScegHVmLhJIR8Ufqk3iFD6A6j5iSX1KuBTfNEv4tdJWE2PzA6IVFtcLC7fN9wQ==} - engines: {node: '>= 10'} - dependencies: - '@npmcli/fs': 1.1.1 - '@npmcli/move-file': 1.1.2 - chownr: 2.0.0 - fs-minipass: 2.1.0 - glob: 7.2.3 - infer-owner: 1.0.4 - lru-cache: 6.0.0 - minipass: 3.3.4 - minipass-collect: 1.0.2 - minipass-flush: 1.0.5 - minipass-pipeline: 1.2.4 - mkdirp: 1.0.4 - p-map: 4.0.0 - promise-inflight: 1.0.1 - rimraf: 3.0.2 - ssri: 8.0.1 - tar: 6.1.11 - unique-filename: 1.1.1 - transitivePeerDependencies: - - bluebird - dev: true - - /cache-base/1.0.1: + /cache-base@1.0.1: resolution: {integrity: sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ==} engines: {node: '>=0.10.0'} dependencies: @@ -9209,63 +4571,38 @@ packages: unset-value: 1.0.0 dev: true - /call-bind/1.0.2: + /call-bind@1.0.2: resolution: {integrity: sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==} dependencies: function-bind: 1.1.1 get-intrinsic: 1.1.1 dev: true - /call-me-maybe/1.0.1: - resolution: {integrity: sha512-wCyFsDQkKPwwF8BDwOiWNx/9K45L/hvggQiDbve+viMNMQnWhrlYIuBk09offfwCRtCO9P6XwUttufzU11WCVw==} - dev: true - - /caller-callsite/2.0.0: + /caller-callsite@2.0.0: resolution: {integrity: sha1-hH4PzgoiN1CpoCfFSzNzGtMVQTQ=} engines: {node: '>=4'} dependencies: callsites: 2.0.0 dev: true - /caller-path/2.0.0: + /caller-path@2.0.0: resolution: {integrity: sha1-Ro+DBE42mrIBD6xfBs7uFbsssfQ=} engines: {node: '>=4'} dependencies: caller-callsite: 2.0.0 dev: true - /callsites/2.0.0: + /callsites@2.0.0: resolution: {integrity: sha512-ksWePWBloaWPxJYQ8TL0JHvtci6G5QTKwQ95RcWAa/lzoAKuAOflGdAK92hpHXjkwb8zLxoLNUoNYZgVsaJzvQ==} engines: {node: '>=4'} dev: true - /callsites/3.1.0: + /callsites@3.1.0: resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} engines: {node: '>=6'} dev: true - /camel-case/4.1.2: - resolution: {integrity: sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==} - dependencies: - pascal-case: 3.1.2 - tslib: 2.4.0 - dev: true - - /camelcase-css/2.0.1: - resolution: {integrity: sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==} - engines: {node: '>= 6'} - dev: true - - /camelcase-keys/2.1.0: - resolution: {integrity: sha512-bA/Z/DERHKqoEOrp+qeGKw1QlvEQkGZSc0XaY6VnTxZr+Kv1G5zFwttpjv8qxZ/sBPT4nthwZaAcsAZTJlSKXQ==} - engines: {node: '>=0.10.0'} - dependencies: - camelcase: 2.1.1 - map-obj: 1.0.1 - dev: true - optional: true - - /camelcase-keys/6.2.2: + /camelcase-keys@6.2.2: resolution: {integrity: sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg==} engines: {node: '>=8'} dependencies: @@ -9274,23 +4611,17 @@ packages: quick-lru: 4.0.1 dev: true - /camelcase/2.1.1: - resolution: {integrity: sha512-DLIsRzJVBQu72meAKPkWQOLcujdXT32hwdfnkI1frSiSRMK1MofjKHf+MEx0SB6fjEFXL8fBDv1dKymBlOp4Qw==} - engines: {node: '>=0.10.0'} - dev: true - optional: true - - /camelcase/5.3.1: + /camelcase@5.3.1: resolution: {integrity: sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==} engines: {node: '>=6'} dev: true - /camelcase/6.2.0: + /camelcase@6.2.0: resolution: {integrity: sha512-c7wVvbw3f37nuobQNtgsgG9POC9qMbNuMQmTCqZv23b6MIz0fcYpBiOlv9gEN/hdLdnZTDQhg6e9Dq5M1vKvfg==} engines: {node: '>=10'} dev: true - /caniuse-api/3.0.0: + /caniuse-api@3.0.0: resolution: {integrity: sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw==} dependencies: browserslist: 4.21.4 @@ -9299,35 +4630,26 @@ packages: lodash.uniq: 4.5.0 dev: true - /caniuse-lite/1.0.30001355: + /caniuse-lite@1.0.30001355: resolution: {integrity: sha512-Sd6pjJHF27LzCB7pT7qs+kuX2ndurzCzkpJl6Qct7LPSZ9jn0bkOA8mdgMgmqnQAWLVOOGjLpc+66V57eLtb1g==} dev: true - /caniuse-lite/1.0.30001401: + /caniuse-lite@1.0.30001401: resolution: {integrity: sha512-fmC/D1YCOvs5uWFP3FNVJGTdE0QFQLs1dJ7W94wP0p46lXrDl3BNgZArKPm6+XdIVtczMN1dPNVOFsJd/HRnGQ==} dev: true - /capture-exit/2.0.0: + /capture-exit@2.0.0: resolution: {integrity: sha512-PiT/hQmTonHhl/HFGN+Lx3JJUznrVYJ3+AQsnthneZbvW7x+f08Tk7yLJTLEOUvBTbduLeeBkxEaYXUOUrRq6g==} engines: {node: 6.* || 8.* || >= 10.*} dependencies: rsvp: 4.8.5 dev: true - /case-sensitive-paths-webpack-plugin/2.4.0: - resolution: {integrity: sha512-roIFONhcxog0JSSWbvVAh3OocukmSgpqOH6YpMkCvav/ySIV3JKg4Dc8vYtQjYi/UxpNE36r/9v+VqTQqgkYmw==} - engines: {node: '>=4'} - dev: true - - /caseless/0.12.0: + /caseless@0.12.0: resolution: {integrity: sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw==} dev: true - /ccount/1.1.0: - resolution: {integrity: sha512-vlNK021QdI7PNeiUh/lKkC/mNHHfV0m/Ad5JoI0TYtlBnJAslM/JIkm/tGC88bkLIwO6OQ5uV6ztS6kVAtCDlg==} - dev: true - - /chalk/1.1.3: + /chalk@1.1.3: resolution: {integrity: sha512-U3lRVLMSlsCfjqYPbLyVv11M9CPW4I728d6TCKMAOJueEeB9/8o+eSsMnxPJD+Q+K909sdESg7C+tIkoH6on1A==} engines: {node: '>=0.10.0'} dependencies: @@ -9338,7 +4660,7 @@ packages: supports-color: 2.0.0 dev: true - /chalk/2.4.2: + /chalk@2.4.2: resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==} engines: {node: '>=4'} dependencies: @@ -9347,7 +4669,7 @@ packages: supports-color: 5.5.0 dev: true - /chalk/4.1.2: + /chalk@4.1.2: resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} engines: {node: '>=10'} dependencies: @@ -9355,20 +4677,23 @@ packages: supports-color: 7.2.0 dev: true - /character-entities-legacy/1.1.4: + /character-entities-legacy@1.1.4: resolution: {integrity: sha512-3Xnr+7ZFS1uxeiUDvV02wQ+QDbc55o97tIV5zHScSPJpcLm/r0DFPcoY3tYRp+VZukxuMeKgXYmsXQHO05zQeA==} + dev: false - /character-entities/1.2.4: + /character-entities@1.2.4: resolution: {integrity: sha512-iBMyeEHxfVnIakwOuDXpVkc54HijNgCyQB2w0VfGQThle6NXn50zU6V/u+LDhxHcDUPojn6Kpga3PTAD8W1bQw==} + dev: false - /character-reference-invalid/1.1.4: + /character-reference-invalid@1.1.4: resolution: {integrity: sha512-mKKUkUbhPpQlCOfIuZkvSEgktjPFIsZKRRbC6KWVEMvlzblj3i3asQv5ODsrwt0N3pHAEvjP8KTQPHkp0+6jOg==} + dev: false - /chardet/0.7.0: + /chardet@0.7.0: resolution: {integrity: sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==} dev: true - /cheerio/1.0.0-rc.3: + /cheerio@1.0.0-rc.3: resolution: {integrity: sha512-0td5ijfUPuubwLUu0OBoe98gZj8C/AA+RW3v67GPlGOrvxWjZmBXiBCRU+I8VEiNyJzjth40POfHiz2RB3gImA==} engines: {node: '>= 0.6'} dependencies: @@ -9380,29 +4705,7 @@ packages: parse5: 3.0.3 dev: true - /chokidar/2.1.8: - resolution: {integrity: sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg==} - deprecated: Chokidar 2 does not receive security updates since 2019. Upgrade to chokidar 3 with 15x fewer dependencies - dependencies: - anymatch: 2.0.0 - async-each: 1.0.3 - braces: 2.3.2 - glob-parent: 3.1.0 - inherits: 2.0.4 - is-binary-path: 1.0.1 - is-glob: 4.0.1 - normalize-path: 3.0.0 - path-is-absolute: 1.0.1 - readdirp: 2.2.1 - upath: 1.2.0 - optionalDependencies: - fsevents: 1.2.13 - transitivePeerDependencies: - - supports-color - dev: true - optional: true - - /chokidar/3.4.3: + /chokidar@3.4.3: resolution: {integrity: sha512-DtM3g7juCXQxFVSNPNByEC2+NImtBuxQQvWlHunpJIS5Ocr0lG306cC7FCi7cEA0fzmybPUIl4txBIobk1gGOQ==} engines: {node: '>= 8.10.0'} dependencies: @@ -9417,32 +4720,11 @@ packages: fsevents: 2.1.3 dev: true - /chownr/1.1.4: - resolution: {integrity: sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==} - dev: true - - /chownr/2.0.0: - resolution: {integrity: sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==} - engines: {node: '>=10'} - dev: true - - /chrome-trace-event/1.0.3: - resolution: {integrity: sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg==} - engines: {node: '>=6.0'} - dev: true - - /ci-info/2.0.0: + /ci-info@2.0.0: resolution: {integrity: sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==} dev: true - /cipher-base/1.0.4: - resolution: {integrity: sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q==} - dependencies: - inherits: 2.0.4 - safe-buffer: 5.2.1 - dev: true - - /class-utils/0.3.6: + /class-utils@0.3.6: resolution: {integrity: sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg==} engines: {node: '>=0.10.0'} dependencies: @@ -9452,45 +4734,19 @@ packages: static-extend: 0.1.2 dev: true - /clean-css/4.2.4: - resolution: {integrity: sha512-EJUDT7nDVFDvaQgAo2G/PJvxmp1o/c6iXLbswsBbUFXi1Nr+AjA2cKmfbKDMjMvzEe75g3P6JkaDDAKk96A85A==} - engines: {node: '>= 4.0'} - dependencies: - source-map: 0.6.1 - dev: true - - /clean-stack/2.2.0: - resolution: {integrity: sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==} - engines: {node: '>=6'} - dev: true - - /cli-boxes/2.2.1: - resolution: {integrity: sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw==} - engines: {node: '>=6'} - dev: true - - /cli-cursor/3.1.0: + /cli-cursor@3.1.0: resolution: {integrity: sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==} engines: {node: '>=8'} dependencies: restore-cursor: 3.1.0 dev: true - /cli-table3/0.6.3: - resolution: {integrity: sha512-w5Jac5SykAeZJKntOxJCrm63Eg5/4dhMWIcuTbo9rpE+brgaSZo0RuNJZeOyMgsUdhDeojvgyQLmjI+K50ZGyg==} - engines: {node: 10.* || >= 12.*} - dependencies: - string-width: 4.2.3 - optionalDependencies: - '@colors/colors': 1.5.0 - dev: true - - /cli-width/3.0.0: + /cli-width@3.0.0: resolution: {integrity: sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==} engines: {node: '>= 10'} dev: true - /cliui/5.0.0: + /cliui@5.0.0: resolution: {integrity: sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==} dependencies: string-width: 3.1.0 @@ -9498,15 +4754,7 @@ packages: wrap-ansi: 5.1.0 dev: true - /cliui/7.0.4: - resolution: {integrity: sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==} - dependencies: - string-width: 4.2.3 - strip-ansi: 6.0.1 - wrap-ansi: 7.0.0 - dev: true - - /cliui/8.0.1: + /cliui@8.0.1: resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} engines: {node: '>=12'} dependencies: @@ -9515,37 +4763,28 @@ packages: wrap-ansi: 7.0.0 dev: true - /clone-buffer/1.0.0: + /clone-buffer@1.0.0: resolution: {integrity: sha512-KLLTJWrvwIP+OPfMn0x2PheDEP20RPUcGXj/ERegTgdmPEZylALQldygiqrPPu8P45uNuPs7ckmReLY6v/iA5g==} engines: {node: '>= 0.10'} dev: true - /clone-deep/4.0.1: - resolution: {integrity: sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==} - engines: {node: '>=6'} - dependencies: - is-plain-object: 2.0.4 - kind-of: 6.0.3 - shallow-clone: 3.0.1 - dev: true - - /clone-regexp/2.2.0: + /clone-regexp@2.2.0: resolution: {integrity: sha512-beMpP7BOtTipFuW8hrJvREQ2DrRu3BE7by0ZpibtfBA+qfHYvMGTc2Yb1JMYPKg/JUw0CHYvpg796aNTSW9z7Q==} engines: {node: '>=6'} dependencies: is-regexp: 2.1.0 dev: true - /clone-stats/1.0.0: + /clone-stats@1.0.0: resolution: {integrity: sha512-au6ydSpg6nsrigcZ4m8Bc9hxjeW+GJ8xh5G3BJCMt4WXe1H10UNaVOamqQTmrx1kjVuxAHIQSNU6hY4Nsn9/ag==} dev: true - /clone/2.1.2: + /clone@2.1.2: resolution: {integrity: sha512-3Pe/CF1Nn94hyhIYpjtiLhdCoEoz0DqQ+988E9gmeEdQZlojxnOb74wctFyuwWQHzqyf9X7C7MG8juUpqBJT8w==} engines: {node: '>=0.8'} dev: true - /cloneable-readable/1.1.3: + /cloneable-readable@1.1.3: resolution: {integrity: sha512-2EF8zTQOxYq70Y4XKtorQupqF0m49MBz2/yf5Bj+MHjvpG3Hy7sImifnqD6UA+TKYxeSV+u6qqQPawN5UvnpKQ==} dependencies: inherits: 2.0.4 @@ -9553,27 +4792,21 @@ packages: readable-stream: 2.3.7 dev: true - /clsx/1.1.0: - resolution: {integrity: sha512-3avwM37fSK5oP6M5rQ9CNe99lwxhXDOeSWVPAOYF6OazUTgZCMb0yWlJpmdD74REy1gkEaFiub2ULv4fq9GUhA==} - engines: {node: '>=6'} - dev: true - - /clsx/1.1.1: + /clsx@1.1.1: resolution: {integrity: sha512-6/bPho624p3S2pMyvP5kKBPXnI3ufHLObBFCfgx+LkeR5lg2XYy2hqZqUf45ypD8COn2bhgGJSUE+l5dhNBieA==} engines: {node: '>=6'} dev: false - /clsx/1.2.1: + /clsx@1.2.1: resolution: {integrity: sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg==} engines: {node: '>=6'} - dev: true - /co/4.6.0: + /co@4.6.0: resolution: {integrity: sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==} engines: {iojs: '>= 1.0.0', node: '>= 0.12.0'} dev: true - /coa/2.0.2: + /coa@2.0.2: resolution: {integrity: sha512-q5/jG+YQnSy4nRTV4F7lPepBJZ8qBNJJDBuJdoejDyLXgmL7IEo+Le2JDZudFTFt7mrCqIRaSjws4ygRCTCAXA==} engines: {node: '>= 4.0'} dependencies: @@ -9582,11 +4815,7 @@ packages: q: 1.5.1 dev: true - /collapse-white-space/1.0.6: - resolution: {integrity: sha512-jEovNnrhMuqyCcjfEJA56v0Xq8SkIoPKDyaHahwo3POf4qcSXqMYuwNcOTzp74vTsR9Tn08z4MxWqAhcekogkQ==} - dev: true - - /collection-visit/1.0.0: + /collection-visit@1.0.0: resolution: {integrity: sha512-lNkKvzEeMBBjUGHZ+q6z9pSJla0KWAQPvtzhEV9+iGyQYG+pBpl7xKDhxoNSOZH2hhv0v5k0y2yAM4o4SjoSkw==} engines: {node: '>=0.10.0'} dependencies: @@ -9594,239 +4823,138 @@ packages: object-visit: 1.0.1 dev: true - /color-convert/1.9.3: + /color-convert@1.9.3: resolution: {integrity: sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==} dependencies: color-name: 1.1.3 dev: true - /color-convert/2.0.1: + /color-convert@2.0.1: resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} engines: {node: '>=7.0.0'} dependencies: color-name: 1.1.4 dev: true - /color-name/1.1.3: + /color-name@1.1.3: resolution: {integrity: sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==} dev: true - /color-name/1.1.4: + /color-name@1.1.4: resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} dev: true - /color-string/1.5.4: + /color-string@1.5.4: resolution: {integrity: sha512-57yF5yt8Xa3czSEW1jfQDE79Idk0+AkN/4KWad6tbdxUmAs3MvjxlWSWD4deYytcRfoZ9nhKyFl1kj5tBvidbw==} dependencies: color-name: 1.1.4 simple-swizzle: 0.2.2 dev: true - /color-support/1.1.3: - resolution: {integrity: sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==} - hasBin: true - dev: true - - /color/3.1.3: + /color@3.1.3: resolution: {integrity: sha512-xgXAcTHa2HeFCGLE9Xs/R82hujGtu9Jd9x4NW3T34+OMs7VoPsjwzRczKHvTAHeJwWFwX5j15+MgAppE8ztObQ==} dependencies: color-convert: 1.9.3 color-string: 1.5.4 dev: true - /colord/2.9.2: + /colord@2.9.2: resolution: {integrity: sha512-Uqbg+J445nc1TKn4FoDPS6ZZqAvEDnwrH42yo8B40JSOgSLxMZ/gt3h4nmCtPLQeXhjJJkqBx7SCY35WnIixaQ==} dev: true - /colorette/1.2.1: + /colorette@1.2.1: resolution: {integrity: sha512-puCDz0CzydiSYOrnXpz/PKd69zRrribezjtE9yd4zvytoRc8+RY/KJPvtPFKZS3E3wP6neGyMe0vOTlHO5L3Pw==} dev: true - /colorspace/1.1.4: + /colorspace@1.1.4: resolution: {integrity: sha512-BgvKJiuVu1igBUF2kEjRCZXol6wiiGbY5ipL/oVPwm0BL9sIpMIzM8IK7vwuxIIzOXMV3Ey5w+vxhm0rR/TN8w==} dependencies: color: 3.1.3 text-hex: 1.0.0 dev: true - /combined-stream/1.0.8: + /combined-stream@1.0.8: resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} engines: {node: '>= 0.8'} dependencies: delayed-stream: 1.0.0 dev: true - /comma-separated-tokens/1.0.8: + /comma-separated-tokens@1.0.8: resolution: {integrity: sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw==} + dev: false - /commander/2.20.3: + /commander@2.20.3: resolution: {integrity: sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==} dev: true - /commander/4.1.1: - resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==} - engines: {node: '>= 6'} - dev: true - - /commander/6.2.1: - resolution: {integrity: sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==} - engines: {node: '>= 6'} - dev: true - - /commander/7.2.0: + /commander@7.2.0: resolution: {integrity: sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==} engines: {node: '>= 10'} dev: true - /common-path-prefix/3.0.0: - resolution: {integrity: sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w==} - dev: true - - /commondir/1.0.1: + /commondir@1.0.1: resolution: {integrity: sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==} dev: true - /component-emitter/1.3.0: + /component-emitter@1.3.0: resolution: {integrity: sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==} dev: true - /compressible/2.0.18: - resolution: {integrity: sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==} - engines: {node: '>= 0.6'} - dependencies: - mime-db: 1.52.0 - dev: true - - /compression/1.7.4: - resolution: {integrity: sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==} - engines: {node: '>= 0.8.0'} - dependencies: - accepts: 1.3.8 - bytes: 3.0.0 - compressible: 2.0.18 - debug: 2.6.9 - on-headers: 1.0.2 - safe-buffer: 5.1.2 - vary: 1.1.2 - transitivePeerDependencies: - - supports-color - dev: true + /compute-scroll-into-view@2.0.4: + resolution: {integrity: sha512-y/ZA3BGnxoM/QHHQ2Uy49CLtnWPbt4tTPpEEZiEmmiWBFKjej7nEyH8Ryz54jH0MLXflUYA3Er2zUxPSJu5R+g==} + dev: false - /concat-map/0.0.1: + /concat-map@0.0.1: resolution: {integrity: sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=} dev: true - /concat-stream/1.6.2: - resolution: {integrity: sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==} - engines: {'0': node >= 0.8} - dependencies: - buffer-from: 1.1.1 - inherits: 2.0.4 - readable-stream: 2.3.7 - typedarray: 0.0.6 - dev: true - - /concat-with-sourcemaps/1.1.0: + /concat-with-sourcemaps@1.1.0: resolution: {integrity: sha512-4gEjHJFT9e+2W/77h/DS5SGUgwDaOwprX8L/gl5+3ixnzkVJJsZWDSelmN3Oilw3LNDZjZV0yqH1hLG3k6nghg==} dependencies: source-map: 0.6.1 dev: true - /console-browserify/1.2.0: - resolution: {integrity: sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA==} - dev: true - - /console-control-strings/1.1.0: - resolution: {integrity: sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==} - dev: true - - /constants-browserify/1.0.0: - resolution: {integrity: sha512-xFxOwqIzR/e1k1gLiWEophSCMqXcwVHIH7akf7b/vxcUeGunlj3hvZaaqxwHsTgn+IndtkQJgSztIDWeumWJDQ==} - dev: true - - /content-disposition/0.5.4: - resolution: {integrity: sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==} - engines: {node: '>= 0.6'} - dependencies: - safe-buffer: 5.2.1 - dev: true - - /content-type/1.0.4: - resolution: {integrity: sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==} - engines: {node: '>= 0.6'} - dev: true - - /convert-source-map/1.7.0: + /convert-source-map@1.7.0: resolution: {integrity: sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA==} dependencies: safe-buffer: 5.1.2 dev: true - /cookie-signature/1.0.6: - resolution: {integrity: sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==} - dev: true - - /cookie/0.5.0: - resolution: {integrity: sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==} - engines: {node: '>= 0.6'} - dev: true - - /copy-concurrently/1.0.5: - resolution: {integrity: sha512-f2domd9fsVDFtaFcbaRZuYXwtdmnzqbADSwhSWYxYB/Q8zsdUUFMXVRwXGDMWmbEzAn1kdRrtI1T/KTFOL4X2A==} - dependencies: - aproba: 1.2.0 - fs-write-stream-atomic: 1.0.10 - iferr: 0.1.5 - mkdirp: 0.5.5 - rimraf: 2.7.1 - run-queue: 1.0.3 - dev: true - - /copy-descriptor/0.1.1: + /copy-descriptor@0.1.1: resolution: {integrity: sha512-XgZ0pFcakEUlbwQEVNg3+QAis1FyTL3Qel9FYy8pSkQqoG3PNoT0bOCQtOXcOkur21r2Eq2kI+IE+gsmAEVlYw==} engines: {node: '>=0.10.0'} dev: true - /copy-to-clipboard/3.3.1: + /copy-to-clipboard@3.3.1: resolution: {integrity: sha512-i13qo6kIHTTpCm8/Wup+0b1mVWETvu2kIMzKoK8FpkLkFxlt0znUAHcMzox+T8sPlqtZXq3CulEjQHsYiGFJUw==} dependencies: toggle-selection: 1.0.6 dev: false - /core-js-compat/3.25.1: - resolution: {integrity: sha512-pOHS7O0i8Qt4zlPW/eIFjwp+NrTPx+wTL0ctgI2fHn31sZOq89rDsmtc/A2vAX7r6shl+bmVI+678He46jgBlw==} - dependencies: - browserslist: 4.21.4 - dev: true - - /core-js-compat/3.7.0: + /core-js-compat@3.7.0: resolution: {integrity: sha512-V8yBI3+ZLDVomoWICO6kq/CD28Y4r1M7CWeO4AGpMdMfseu8bkSubBmUPySMGKRTS+su4XQ07zUkAsiu9FCWTg==} dependencies: browserslist: 4.20.4 semver: 7.0.0 dev: true - /core-js-pure/3.25.1: - resolution: {integrity: sha512-7Fr74bliUDdeJCBMxkkIuQ4xfxn/SwrVg+HkJUAoNEXVqYLv55l6Af0dJ5Lq2YBUW9yKqSkLXaS5SYPK6MGa/A==} - requiresBuild: true - dev: true - - /core-js/1.2.7: + /core-js@1.2.7: resolution: {integrity: sha512-ZiPp9pZlgxpWRu0M+YWbm6+aQ84XEfH1JRXvfOc/fILWI0VKhLC2LX13X1NYq4fULzLMq7Hfh43CSo2/aIaUPA==} deprecated: core-js@<3.23.3 is no longer maintained and not recommended for usage due to the number of issues. Because of the V8 engine whims, feature detection in old core-js versions could cause a slowdown up to 100x even if nothing is polyfilled. Some versions have web compatibility issues. Please, upgrade your dependencies to the actual version of core-js. dev: true - /core-js/3.23.1: + /core-js@3.23.1: resolution: {integrity: sha512-wfMYHWi1WQjpgZNC9kAlN4ut04TM9fUTdi7CqIoTVM7yaiOUQTklOzfb+oWH3r9edQcT3F887swuVmxrV+CC8w==} deprecated: core-js@<3.23.3 is no longer maintained and not recommended for usage due to the number of issues. Because of the V8 engine whims, feature detection in old core-js versions could cause a slowdown up to 100x even if nothing is polyfilled. Some versions have web compatibility issues. Please, upgrade your dependencies to the actual version of core-js. requiresBuild: true dev: true - /core-util-is/1.0.2: + /core-util-is@1.0.2: resolution: {integrity: sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ==} dev: true - /cosmiconfig/5.2.1: + /cosmiconfig@5.2.1: resolution: {integrity: sha512-H65gsXo1SKjf8zmrJ67eJk8aIRKV5ff2D4uKZIBZShbhGSpEmsQOPW/SKMKYhSTrqR7ufy6RP69rPogdaPh/kA==} engines: {node: '>=4'} dependencies: @@ -9836,7 +4964,7 @@ packages: parse-json: 4.0.0 dev: true - /cosmiconfig/6.0.0: + /cosmiconfig@6.0.0: resolution: {integrity: sha512-xb3ZL6+L8b9JLLCx3ZdoZy4+2ECphCMo2PwqgP1tlfVq6M6YReyzBJtvWWtbDSpNr9hn96pkCiZqUcFEc+54Qg==} engines: {node: '>=8'} dependencies: @@ -9847,7 +4975,7 @@ packages: yaml: 1.10.0 dev: true - /cosmiconfig/7.0.1: + /cosmiconfig@7.0.1: resolution: {integrity: sha512-a1YWNUV2HwGimB7dU2s1wUMurNKjpx60HxBB6xUM8Re+2s1g1IIfJvFR0/iCF+XHdE0GMTKTuLR32UQff4TEyQ==} engines: {node: '>=10'} dependencies: @@ -9858,66 +4986,11 @@ packages: yaml: 1.10.0 dev: true - /cp-file/7.0.0: - resolution: {integrity: sha512-0Cbj7gyvFVApzpK/uhCtQ/9kE9UnYpxMzaq5nQQC/Dh4iaj5fxp7iEFIullrYwzj8nf0qnsI1Qsx34hAeAebvw==} - engines: {node: '>=8'} - dependencies: - graceful-fs: 4.2.10 - make-dir: 3.1.0 - nested-error-stacks: 2.1.1 - p-event: 4.2.0 - dev: true - - /cpy/8.1.2: - resolution: {integrity: sha512-dmC4mUesv0OYH2kNFEidtf/skUwv4zePmGeepjyyJ0qTo5+8KhA1o99oIAwVVLzQMAeDJml74d6wPPKb6EZUTg==} - engines: {node: '>=8'} - dependencies: - arrify: 2.0.1 - cp-file: 7.0.0 - globby: 9.2.0 - has-glob: 1.0.0 - junk: 3.1.0 - nested-error-stacks: 2.1.1 - p-all: 2.1.0 - p-filter: 2.1.0 - p-map: 3.0.0 - transitivePeerDependencies: - - supports-color - dev: true - - /create-ecdh/4.0.4: - resolution: {integrity: sha512-mf+TCx8wWc9VpuxfP2ht0iSISLZnt0JgWlrOKZiNqyUZWnjIaCIVNQArMHnCZKfEYRg6IM7A+NeJoN8gf/Ws0A==} - dependencies: - bn.js: 4.12.0 - elliptic: 6.5.4 - dev: true - - /create-hash/1.2.0: - resolution: {integrity: sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg==} - dependencies: - cipher-base: 1.0.4 - inherits: 2.0.4 - md5.js: 1.3.5 - ripemd160: 2.0.2 - sha.js: 2.4.11 - dev: true - - /create-hmac/1.1.7: - resolution: {integrity: sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg==} - dependencies: - cipher-base: 1.0.4 - create-hash: 1.2.0 - inherits: 2.0.4 - ripemd160: 2.0.2 - safe-buffer: 5.2.1 - sha.js: 2.4.11 - dev: true - - /create-require/1.1.1: + /create-require@1.1.1: resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==} dev: true - /cross-env/7.0.2: + /cross-env@7.0.2: resolution: {integrity: sha512-KZP/bMEOJEDCkDQAyRhu3RL2ZO/SUVrxQVI0G3YEQ+OLbRA3c6zgixe8Mq8a/z7+HKlNEjo8oiLUs8iRijY2Rw==} engines: {node: '>=10.14', npm: '>=6', yarn: '>=1'} hasBin: true @@ -9925,7 +4998,7 @@ packages: cross-spawn: 7.0.3 dev: true - /cross-spawn/6.0.5: + /cross-spawn@6.0.5: resolution: {integrity: sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==} engines: {node: '>=4.8'} dependencies: @@ -9936,7 +5009,7 @@ packages: which: 1.3.1 dev: true - /cross-spawn/7.0.3: + /cross-spawn@7.0.3: resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} engines: {node: '>= 8'} dependencies: @@ -9945,77 +5018,36 @@ packages: which: 2.0.2 dev: true - /crypto-browserify/3.12.0: - resolution: {integrity: sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg==} - dependencies: - browserify-cipher: 1.0.1 - browserify-sign: 4.2.1 - create-ecdh: 4.0.4 - create-hash: 1.2.0 - create-hmac: 1.1.7 - diffie-hellman: 5.0.3 - inherits: 2.0.4 - pbkdf2: 3.1.2 - public-encrypt: 4.0.3 - randombytes: 2.1.0 - randomfill: 1.0.4 - dev: true - - /css-color-names/0.0.4: + /css-color-names@0.0.4: resolution: {integrity: sha1-gIrcLnnPhHOAabZGyyDsJ762KeA=} dev: true - /css-declaration-sorter/4.0.1: + /css-declaration-sorter@4.0.1: resolution: {integrity: sha512-BcxQSKTSEEQUftYpBVnsH4SF05NTuBokb19/sBt6asXGKZ/6VP7PLG1CBCkFDYOnhXhPh0jMhO6xZ71oYHXHBA==} - engines: {node: '>4'} - dependencies: - postcss: 7.0.39 - timsort: 0.3.0 - dev: true - - /css-functions-list/3.0.1: - resolution: {integrity: sha512-PriDuifDt4u4rkDgnqRCLnjfMatufLmWNfQnGCq34xZwpY3oabwhB9SqRBmuvWUgndbemCFlKqg+nO7C2q0SBw==} - engines: {node: '>=12.22'} - dev: true - - /css-loader/3.6.0: - resolution: {integrity: sha512-M5lSukoWi1If8dhQAUCvj4H8vUt3vOnwbQBH9DdTm/s4Ym2B/3dPMtYZeJmq7Q3S3Pa+I94DcZ7pc9bP14cWIQ==} - engines: {node: '>= 8.9.0'} - peerDependencies: - webpack: ^4.0.0 || ^5.0.0 - peerDependenciesMeta: - webpack: - optional: true + engines: {node: '>4'} dependencies: - camelcase: 5.3.1 - cssesc: 3.0.0 - icss-utils: 4.1.1 - loader-utils: 1.4.0 - normalize-path: 3.0.0 postcss: 7.0.39 - postcss-modules-extract-imports: 2.0.0 - postcss-modules-local-by-default: 3.0.3 - postcss-modules-scope: 2.2.0 - postcss-modules-values: 3.0.0 - postcss-value-parser: 4.2.0 - schema-utils: 2.7.1 - semver: 6.3.0 + timsort: 0.3.0 dev: true - /css-loader/3.6.0_webpack@4.46.0: - resolution: {integrity: sha512-M5lSukoWi1If8dhQAUCvj4H8vUt3vOnwbQBH9DdTm/s4Ym2B/3dPMtYZeJmq7Q3S3Pa+I94DcZ7pc9bP14cWIQ==} - engines: {node: '>= 8.9.0'} + /css-functions-list@3.0.1: + resolution: {integrity: sha512-PriDuifDt4u4rkDgnqRCLnjfMatufLmWNfQnGCq34xZwpY3oabwhB9SqRBmuvWUgndbemCFlKqg+nO7C2q0SBw==} + engines: {node: '>=12.22'} + dev: true + + /css-loader@4.3.0: + resolution: {integrity: sha512-rdezjCjScIrsL8BSYszgT4s476IcNKt6yX69t0pHjJVnPUTDpn4WfIpDQTN3wCJvUvfsz/mFjuGOekf3PY3NUg==} + engines: {node: '>= 10.13.0'} peerDependencies: - webpack: ^4.0.0 || ^5.0.0 + webpack: ^4.27.0 || ^5.0.0 peerDependenciesMeta: webpack: optional: true dependencies: - camelcase: 5.3.1 + camelcase: 6.2.0 cssesc: 3.0.0 icss-utils: 4.1.1 - loader-utils: 1.4.0 - normalize-path: 3.0.0 + loader-utils: 2.0.2 postcss: 7.0.39 postcss-modules-extract-imports: 2.0.0 postcss-modules-local-by-default: 3.0.3 @@ -10023,11 +5055,10 @@ packages: postcss-modules-values: 3.0.0 postcss-value-parser: 4.2.0 schema-utils: 2.7.1 - semver: 6.3.0 - webpack: 4.46.0 + semver: 7.3.8 dev: true - /css-modules-loader-core/1.1.0: + /css-modules-loader-core@1.1.0: resolution: {integrity: sha1-WQhmgpShvs0mGuCkziGwtVHyHRY=} dependencies: icss-replace-symbols: 1.1.0 @@ -10038,11 +5069,11 @@ packages: postcss-modules-values: 1.3.0 dev: true - /css-select-base-adapter/0.1.1: + /css-select-base-adapter@0.1.1: resolution: {integrity: sha512-jQVeeRG70QI08vSTwf1jHxp74JoZsr2XSgETae8/xC8ovSnL2WF87GTLO86Sbwdt2lK4Umg4HnnwMO4YF3Ce7w==} dev: true - /css-select/1.2.0: + /css-select@1.2.0: resolution: {integrity: sha1-KzoRBTnFNV8c2NMUYj6HCxIeyFg=} dependencies: boolbase: 1.0.0 @@ -10051,7 +5082,7 @@ packages: nth-check: 1.0.2 dev: true - /css-select/2.1.0: + /css-select@2.1.0: resolution: {integrity: sha512-Dqk7LQKpwLoH3VovzZnkzegqNSuAziQyNZUcrdDM401iY+R5NkGBXGmtO05/yaXQziALuPogeG0b7UAgjnTJTQ==} dependencies: boolbase: 1.0.0 @@ -10060,7 +5091,7 @@ packages: nth-check: 1.0.2 dev: true - /css-select/4.3.0: + /css-select@4.3.0: resolution: {integrity: sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ==} dependencies: boolbase: 1.0.0 @@ -10070,18 +5101,18 @@ packages: nth-check: 2.1.1 dev: true - /css-selector-parser/1.4.1: + /css-selector-parser@1.4.1: resolution: {integrity: sha512-HYPSb7y/Z7BNDCOrakL4raGO2zltZkbeXyAd6Tg9obzix6QhzxCotdBl6VT0Dv4vZfJGVz3WL/xaEI9Ly3ul0g==} dev: true - /css-selector-tokenizer/0.7.3: + /css-selector-tokenizer@0.7.3: resolution: {integrity: sha512-jWQv3oCEL5kMErj4wRnK/OPoBi0D+P1FR2cDCKYPaMeD2eW3/mttav8HT4hT1CKopiJI/psEULjkClhvJo4Lvg==} dependencies: cssesc: 3.0.0 fastparse: 1.1.2 dev: true - /css-tree/1.0.0-alpha.37: + /css-tree@1.0.0-alpha.37: resolution: {integrity: sha512-DMxWJg0rnz7UgxKT0Q1HU/L9BeJI0M6ksor0OgqOnF+aRCDWg/N2641HmVyU9KVIu0OVVWOb2IpC9A+BJRnejg==} engines: {node: '>=8.0.0'} dependencies: @@ -10089,7 +5120,7 @@ packages: source-map: 0.6.1 dev: true - /css-tree/1.1.3: + /css-tree@1.1.3: resolution: {integrity: sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q==} engines: {node: '>=8.0.0'} dependencies: @@ -10097,27 +5128,27 @@ packages: source-map: 0.6.1 dev: true - /css-what/2.1.3: + /css-what@2.1.3: resolution: {integrity: sha512-a+EPoD+uZiNfh+5fxw2nO9QwFa6nJe2Or35fGY6Ipw1R3R4AGz1d1TEZrCegvw2YTmZ0jXirGYlzxxpYSHwpEg==} dev: true - /css-what/3.4.2: + /css-what@3.4.2: resolution: {integrity: sha512-ACUm3L0/jiZTqfzRM3Hi9Q8eZqd6IK37mMWPLz9PJxkLWllYeRf+EHUSHYEtFop2Eqytaq1FizFVh7XfBnXCDQ==} engines: {node: '>= 6'} dev: true - /css-what/6.1.0: + /css-what@6.1.0: resolution: {integrity: sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==} engines: {node: '>= 6'} dev: true - /cssesc/3.0.0: + /cssesc@3.0.0: resolution: {integrity: sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==} engines: {node: '>=4'} hasBin: true dev: true - /cssnano-preset-default/4.0.7: + /cssnano-preset-default@4.0.7: resolution: {integrity: sha512-x0YHHx2h6p0fCl1zY9L9roD7rnlltugGu7zXSKQx6k2rYw0Hi3IqxcoAGF7u9Q5w1nt7vK0ulxV8Lo+EvllGsA==} engines: {node: '>=6.9.0'} dependencies: @@ -10153,82 +5184,70 @@ packages: postcss-unique-selectors: 4.0.1 dev: true - /cssnano-util-get-arguments/4.0.0: + /cssnano-util-get-arguments@4.0.0: resolution: {integrity: sha1-7ToIKZ8h11dBsg87gfGU7UnMFQ8=} engines: {node: '>=6.9.0'} dev: true - /cssnano-util-get-match/4.0.0: + /cssnano-util-get-match@4.0.0: resolution: {integrity: sha1-wOTKB/U4a7F+xeUiULT1lhNlFW0=} engines: {node: '>=6.9.0'} dev: true - /cssnano-util-raw-cache/4.0.1: + /cssnano-util-raw-cache@4.0.1: resolution: {integrity: sha512-qLuYtWK2b2Dy55I8ZX3ky1Z16WYsx544Q0UWViebptpwn/xDBmog2TLg4f+DBMg1rJ6JDWtn96WHbOKDWt1WQA==} engines: {node: '>=6.9.0'} dependencies: postcss: 7.0.39 dev: true - /cssnano-util-same-parent/4.0.1: + /cssnano-util-same-parent@4.0.1: resolution: {integrity: sha512-WcKx5OY+KoSIAxBW6UBBRay1U6vkYheCdjyVNDm85zt5K9mHoGOfsOsqIszfAqrQQFIIKgjh2+FDgIj/zsl21Q==} engines: {node: '>=6.9.0'} dev: true - /cssnano/4.1.10: + /cssnano@4.1.10: resolution: {integrity: sha512-5wny+F6H4/8RgNlaqab4ktc3e0/blKutmq8yNlBFXA//nSFFAqAngjNVRzUvCgYROULmZZUoosL/KSoZo5aUaQ==} engines: {node: '>=6.9.0'} dependencies: cosmiconfig: 5.2.1 cssnano-preset-default: 4.0.7 is-resolvable: 1.1.0 - postcss: 7.0.35 + postcss: 7.0.39 dev: true - /csso/4.2.0: + /csso@4.2.0: resolution: {integrity: sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA==} engines: {node: '>=8.0.0'} dependencies: css-tree: 1.1.3 dev: true - /cssom/0.3.8: + /cssom@0.3.8: resolution: {integrity: sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg==} dev: true - /cssom/0.5.0: + /cssom@0.5.0: resolution: {integrity: sha512-iKuQcq+NdHqlAcwUY0o/HL69XQrUaQdMjmStJ8JFmUaiiQErlhrmuigkg/CU4E2J0IyUKUrMAgl36TvN67MqTw==} dev: true - /cssstyle/1.4.0: + /cssstyle@1.4.0: resolution: {integrity: sha512-GBrLZYZ4X4x6/QEoBnIrqb8B/f5l4+8me2dkom/j1Gtbxy0kBv6OGzKuAsGM75bkGwGAFkt56Iwg28S3XTZgSA==} dependencies: cssom: 0.3.8 dev: true - /csstype/3.1.0: + /csstype@3.1.0: resolution: {integrity: sha512-uX1KG+x9h5hIJsaKR9xHUeUraxf8IODOwq9JLNPq6BwB04a/xgpq3rcx47l5BZu5zBPlgD342tdke3Hom/nJRA==} - /currently-unhandled/0.4.1: - resolution: {integrity: sha512-/fITjgjGU50vjQ4FH6eUoYu+iUoUKIXws2hL15JJpIR+BbTxaXQsMuuyjtNh2WqsSBS5nsaZHFsFecyw5CCAng==} - engines: {node: '>=0.10.0'} - dependencies: - array-find-index: 1.0.2 - dev: true - optional: true - - /cyclist/1.0.1: - resolution: {integrity: sha512-NJGVKPS81XejHcLhaLJS7plab0fK3slPh11mESeeDq2W4ZI5kUKK/LRRdVDvjJseojbPB7ZwjnyOybg3Igea/A==} - dev: true - - /dashdash/1.14.1: + /dashdash@1.14.1: resolution: {integrity: sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g==} engines: {node: '>=0.10'} dependencies: assert-plus: 1.0.0 dev: true - /data-urls/1.1.0: + /data-urls@1.1.0: resolution: {integrity: sha512-YTWYI9se1P55u58gL5GkQHW4P6VJBJ5iBT+B5a7i2Tjadhv52paJG0qHX4A0OR6/t52odI64KP2YvFpkDOi3eQ==} dependencies: abab: 2.0.5 @@ -10236,7 +5255,7 @@ packages: whatwg-url: 7.1.0 dev: true - /debug/2.6.9: + /debug@2.6.9: resolution: {integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==} peerDependencies: supports-color: '*' @@ -10247,18 +5266,7 @@ packages: ms: 2.0.0 dev: true - /debug/3.2.7: - resolution: {integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==} - peerDependencies: - supports-color: '*' - peerDependenciesMeta: - supports-color: - optional: true - dependencies: - ms: 2.1.3 - dev: true - - /debug/4.3.3: + /debug@4.3.3: resolution: {integrity: sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==} engines: {node: '>=6.0'} peerDependencies: @@ -10270,7 +5278,7 @@ packages: ms: 2.1.2 dev: true - /decamelize-keys/1.1.0: + /decamelize-keys@1.1.0: resolution: {integrity: sha1-0XGoeTMlKAfrPLYdwcFEXQeN8tk=} engines: {node: '>=0.10.0'} dependencies: @@ -10278,81 +5286,52 @@ packages: map-obj: 1.0.1 dev: true - /decamelize/1.2.0: + /decamelize@1.2.0: resolution: {integrity: sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=} engines: {node: '>=0.10.0'} dev: true - /decode-uri-component/0.2.0: + /decode-uri-component@0.2.0: resolution: {integrity: sha512-hjf+xovcEn31w/EUYdTXQh/8smFL/dzYjohQGEIgjyNavaJfBY2p5F527Bo1VPATxv0VYTUC2bOcXvqFwk78Og==} engines: {node: '>=0.10'} dev: true - /dedent/0.7.0: - resolution: {integrity: sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA==} - dev: true - - /deep-is/0.1.3: + /deep-is@0.1.3: resolution: {integrity: sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=} dev: true - /deepmerge/2.2.1: + /deepmerge@2.2.1: resolution: {integrity: sha512-R9hc1Xa/NOBi9WRVUWg19rl1UB7Tt4kuPd+thNJgFZoxXsTz7ncaPaeIm+40oSGuP33DfMb4sZt1QIGiJzC4EA==} engines: {node: '>=0.10.0'} dev: true - /deepmerge/4.2.2: + /deepmerge@4.2.2: resolution: {integrity: sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==} engines: {node: '>=0.10.0'} dev: true - /default-browser-id/1.0.4: - resolution: {integrity: sha512-qPy925qewwul9Hifs+3sx1ZYn14obHxpkX+mPD369w4Rzg+YkJBgi3SOvwUq81nWSjqGUegIgEPwD8u+HUnxlw==} - engines: {node: '>=0.10.0'} - hasBin: true - requiresBuild: true - dependencies: - bplist-parser: 0.1.1 - meow: 3.7.0 - untildify: 2.1.0 - dev: true - optional: true - - /define-lazy-prop/2.0.0: - resolution: {integrity: sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==} - engines: {node: '>=8'} - dev: true - - /define-properties/1.1.3: + /define-properties@1.1.3: resolution: {integrity: sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==} engines: {node: '>= 0.4'} dependencies: object-keys: 1.1.1 dev: true - /define-properties/1.1.4: - resolution: {integrity: sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==} - engines: {node: '>= 0.4'} - dependencies: - has-property-descriptors: 1.0.0 - object-keys: 1.1.1 - dev: true - - /define-property/0.2.5: + /define-property@0.2.5: resolution: {integrity: sha512-Rr7ADjQZenceVOAKop6ALkkRAmH1A4Gx9hV/7ZujPUN2rkATqFO0JZLZInbAjpZYoJ1gUx8MRMQVkYemcbMSTA==} engines: {node: '>=0.10.0'} dependencies: is-descriptor: 0.1.6 dev: true - /define-property/1.0.0: + /define-property@1.0.0: resolution: {integrity: sha512-cZTYKFWspt9jZsMscWo8sc/5lbPC9Q0N5nBLgb+Yd915iL3udB1uFgS3B8YCx66UVHq018DAVFoee7x+gxggeA==} engines: {node: '>=0.10.0'} dependencies: is-descriptor: 1.0.2 dev: true - /define-property/2.0.2: + /define-property@2.0.2: resolution: {integrity: sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ==} engines: {node: '>=0.10.0'} dependencies: @@ -10360,139 +5339,69 @@ packages: isobject: 3.0.1 dev: true - /delayed-stream/1.0.0: + /delayed-stream@1.0.0: resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} engines: {node: '>=0.4.0'} dev: true - /delegates/1.0.0: - resolution: {integrity: sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==} - dev: true - - /depd/2.0.0: - resolution: {integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==} - engines: {node: '>= 0.8'} - dev: true - - /des.js/1.0.1: - resolution: {integrity: sha512-Q0I4pfFrv2VPd34/vfLrFOoRmlYj3OV50i7fskps1jZWK1kApMWWT9G6RRUeYedLcBDIhnSDaUvJMb3AhUlaEA==} - dependencies: - inherits: 2.0.4 - minimalistic-assert: 1.0.1 - dev: true - - /destroy/1.2.0: - resolution: {integrity: sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==} - engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16} - dev: true - - /detab/2.0.4: - resolution: {integrity: sha512-8zdsQA5bIkoRECvCrNKPla84lyoR7DSAyf7p0YgXzBO9PDJx8KntPUay7NS6yp+KdxdVtiE5SpHKtbp2ZQyA9g==} - dependencies: - repeat-string: 1.6.1 - dev: true - - /detect-newline/2.1.0: + /detect-newline@2.1.0: resolution: {integrity: sha512-CwffZFvlJffUg9zZA0uqrjQayUTC8ob94pnr5sFwaVv3IOmkfUHcWH+jXaQK3askE51Cqe8/9Ql/0uXNwqZ8Zg==} engines: {node: '>=0.10.0'} dev: true - /detect-package-manager/2.0.1: - resolution: {integrity: sha512-j/lJHyoLlWi6G1LDdLgvUtz60Zo5GEj+sVYtTVXnYLDPuzgC3llMxonXym9zIwhhUII8vjdw0LXxavpLqTbl1A==} - engines: {node: '>=12'} - dependencies: - execa: 5.1.1 - dev: true - - /detect-port/1.3.0: - resolution: {integrity: sha512-E+B1gzkl2gqxt1IhUzwjrxBKRqx1UzC3WLONHinn8S3T6lwV/agVCyitiFOsGJ/eYuEUBvD71MZHy3Pv1G9doQ==} - engines: {node: '>= 4.2.1'} - hasBin: true - dependencies: - address: 1.2.1 - debug: 2.6.9 - transitivePeerDependencies: - - supports-color - dev: true - - /dialog-polyfill/0.5.6: + /dialog-polyfill@0.5.6: resolution: {integrity: sha512-ZbVDJI9uvxPAKze6z146rmfUZjBqNEwcnFTVamQzXH+svluiV7swmVIGr7miwADgfgt1G2JQIytypM9fbyhX4w==} dev: false - /diff-sequences/24.9.0: + /diff-sequences@24.9.0: resolution: {integrity: sha512-Dj6Wk3tWyTE+Fo1rW8v0Xhwk80um6yFYKbuAxc9c3EZxIHFDYwbi34Uk42u1CdnIiVorvt4RmlSDjIPyzGC2ew==} engines: {node: '>= 6'} dev: true - /diff/4.0.2: + /diff@4.0.2: resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==} engines: {node: '>=0.3.1'} dev: true - /diffie-hellman/5.0.3: - resolution: {integrity: sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg==} - dependencies: - bn.js: 4.12.0 - miller-rabin: 4.0.1 - randombytes: 2.1.0 - dev: true - - /dir-glob/2.2.2: - resolution: {integrity: sha512-f9LBi5QWzIW3I6e//uxZoLBlUt9kcp66qo0sSCxL6YZKc75R1c4MFCoe/LaZiBGmgujvQdxc5Bn3QhfyvK5Hsw==} - engines: {node: '>=4'} - dependencies: - path-type: 3.0.0 - dev: true - - /dir-glob/3.0.1: + /dir-glob@3.0.1: resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} engines: {node: '>=8'} dependencies: path-type: 4.0.0 dev: true - /discontinuous-range/1.0.0: + /discontinuous-range@1.0.0: resolution: {integrity: sha1-44Mx8IRLukm5qctxx3FYWqsbxlo=} dev: true - /doctrine/2.1.0: + /doctrine@2.1.0: resolution: {integrity: sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==} engines: {node: '>=0.10.0'} dependencies: esutils: 2.0.3 dev: true - /doctrine/3.0.0: + /doctrine@3.0.0: resolution: {integrity: sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==} engines: {node: '>=6.0.0'} dependencies: esutils: 2.0.3 dev: true - /dom-accessibility-api/0.5.14: - resolution: {integrity: sha512-NMt+m9zFMPZe0JcY9gN224Qvk6qLIdqex29clBvc/y75ZBX9YA9wNK3frsYvu2DI1xcCIwxwnX+TlsJ2DSOADg==} - dev: true - - /dom-converter/0.2.0: - resolution: {integrity: sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA==} - dependencies: - utila: 0.4.0 - dev: true - - /dom-helpers/5.2.0: + /dom-helpers@5.2.0: resolution: {integrity: sha512-Ru5o9+V8CpunKnz5LGgWXkmrH/20cGKwcHwS4m73zIvs54CN9epEmT/HLqFJW3kXpakAFkEdzgy1hzlJe3E4OQ==} dependencies: '@babel/runtime': 7.18.6 csstype: 3.1.0 - /dom-serializer/0.1.1: + /dom-serializer@0.1.1: resolution: {integrity: sha512-l0IU0pPzLWSHBcieZbpOKgkIn3ts3vAh7ZuFyXNwJxJXk/c4Gwj9xaTJwIDVQCXawWD0qb3IzMGH5rglQaO0XA==} dependencies: domelementtype: 1.3.1 entities: 1.1.2 dev: true - /dom-serializer/1.4.1: + /dom-serializer@1.4.1: resolution: {integrity: sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag==} dependencies: domelementtype: 2.3.0 @@ -10500,57 +5409,48 @@ packages: entities: 2.2.0 dev: true - /dom-walk/0.1.2: - resolution: {integrity: sha512-6QvTW9mrGeIegrFXdtQi9pk7O/nSK6lSdXW2eqUspN5LWD7UTji2Fqw5V2YLjBpHEoU9Xl/eUWNpDeZvoyOv2w==} - dev: true - - /domain-browser/1.2.0: - resolution: {integrity: sha512-jnjyiM6eRyZl2H+W8Q/zLMA481hzi0eszAaBUzIVnmYVDBbnLxVNnfu1HgEBvCbL+71FrxMl3E6lpKH7Ge3OXA==} - engines: {node: '>=0.4', npm: '>=1.2'} - dev: true - - /domelementtype/1.3.1: + /domelementtype@1.3.1: resolution: {integrity: sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w==} dev: true - /domelementtype/2.3.0: + /domelementtype@2.3.0: resolution: {integrity: sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==} dev: true - /domexception/1.0.1: + /domexception@1.0.1: resolution: {integrity: sha512-raigMkn7CJNNo6Ihro1fzG7wr3fHuYVytzquZKX5n0yizGsTcYgzdIUwj1X9pK0VvjeihV+XiclP+DjwbsSKug==} dependencies: webidl-conversions: 4.0.2 dev: true - /domhandler/2.4.2: + /domhandler@2.4.2: resolution: {integrity: sha512-JiK04h0Ht5u/80fdLMCEmV4zkNh2BcoMFBmZ/91WtYZ8qVXSKjiw7fXMgFPnHcSZgOo3XdinHvmnDUeMf5R4wA==} dependencies: domelementtype: 1.3.1 dev: true - /domhandler/4.3.1: + /domhandler@4.3.1: resolution: {integrity: sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ==} engines: {node: '>= 4'} dependencies: domelementtype: 2.3.0 dev: true - /domutils/1.5.1: + /domutils@1.5.1: resolution: {integrity: sha512-gSu5Oi/I+3wDENBsOWBiRK1eoGxcywYSqg3rR960/+EfY0CF4EX1VPkgHOZ3WiS/Jg2DtliF6BhWcHlfpYUcGw==} dependencies: dom-serializer: 0.1.1 domelementtype: 1.3.1 dev: true - /domutils/1.7.0: + /domutils@1.7.0: resolution: {integrity: sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg==} dependencies: dom-serializer: 0.1.1 domelementtype: 1.3.1 dev: true - /domutils/2.8.0: + /domutils@2.8.0: resolution: {integrity: sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A==} dependencies: dom-serializer: 1.4.1 @@ -10558,156 +5458,98 @@ packages: domhandler: 4.3.1 dev: true - /dot-case/3.0.4: - resolution: {integrity: sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==} - dependencies: - no-case: 3.0.4 - tslib: 2.4.0 - dev: true - - /dot-prop/5.3.0: + /dot-prop@5.3.0: resolution: {integrity: sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==} engines: {node: '>=8'} dependencies: is-obj: 2.0.0 dev: true - /dotenv-expand/5.1.0: - resolution: {integrity: sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA==} - dev: true - - /dotenv/8.6.0: - resolution: {integrity: sha512-IrPdXQsk2BbzvCBGBOTmmSH5SodmqZNt4ERAZDmW4CT+tL8VtvinqywuANaFu4bOMWki16nqf0e4oC0QIaDr/g==} - engines: {node: '>=10'} - dev: true + /downshift@7.2.0(react@18.2.0): + resolution: {integrity: sha512-dEn1Sshe7iTelUhmdbmiJhtIiwIBxBV8p15PuvEBh0qZcHXZnEt0geuCIIkCL4+ooaKRuLE0Wc+Fz9SwWuBIyg==} + peerDependencies: + react: '>=16.12.0 || 18' + dependencies: + '@babel/runtime': 7.18.6 + compute-scroll-into-view: 2.0.4 + prop-types: 15.8.1 + react: 18.2.0 + react-is: 17.0.2 + tslib: 2.4.0 + dev: false - /duplexer/0.1.1: + /duplexer@0.1.1: resolution: {integrity: sha512-sxNZ+ljy+RA1maXoUReeqBBpBC6RLKmg5ewzV+x+mSETmWNoKdZN6vcQjpFROemza23hGFskJtFNoUWUaQ+R4Q==} dev: true - /duplexer/0.1.2: + /duplexer@0.1.2: resolution: {integrity: sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==} dev: true - /duplexify/3.7.1: - resolution: {integrity: sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g==} - dependencies: - end-of-stream: 1.4.4 - inherits: 2.0.4 - readable-stream: 2.3.7 - stream-shift: 1.0.1 - dev: true - - /ecc-jsbn/0.1.2: + /ecc-jsbn@0.1.2: resolution: {integrity: sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw==} dependencies: jsbn: 0.1.1 safer-buffer: 2.1.2 dev: true - /ee-first/1.1.1: - resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==} - dev: true - - /electron-to-chromium/1.4.158: + /electron-to-chromium@1.4.158: resolution: {integrity: sha512-gppO3/+Y6sP432HtvwvuU8S+YYYLH4PmAYvQwqUtt9HDOmEsBwQfLnK9T8+1NIKwAS1BEygIjTaATC4H5EzvxQ==} dev: true - /electron-to-chromium/1.4.253: + /electron-to-chromium@1.4.253: resolution: {integrity: sha512-1pezJ2E1UyBTGbA7fUlHdPSXQw1k+82VhTFLG5G0AUqLGvsZqFzleOblceqegZzxYX4kC7hGEEdzIQI9RZ1Cuw==} dev: true - /elliptic/6.5.4: - resolution: {integrity: sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ==} - dependencies: - bn.js: 4.12.0 - brorand: 1.1.0 - hash.js: 1.1.7 - hmac-drbg: 1.0.1 - inherits: 2.0.4 - minimalistic-assert: 1.0.1 - minimalistic-crypto-utils: 1.0.1 - dev: true - - /email-addresses/3.1.0: + /email-addresses@3.1.0: resolution: {integrity: sha512-k0/r7GrWVL32kZlGwfPNgB2Y/mMXVTq/decgLczm/j34whdaspNrZO8CnXPf1laaHxI6ptUlsnAxN+UAPw+fzg==} dev: true - /emoji-regex/7.0.3: + /emoji-regex@7.0.3: resolution: {integrity: sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==} dev: true - /emoji-regex/8.0.0: + /emoji-regex@8.0.0: resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} dev: true - /emojis-list/3.0.0: + /emojis-list@3.0.0: resolution: {integrity: sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==} engines: {node: '>= 4'} dev: true - /enabled/2.0.0: + /enabled@2.0.0: resolution: {integrity: sha512-AKrN98kuwOzMIdAizXGI86UFBoo26CL21UM763y1h/GMSJ4/OHU9k2YlsmBpyScFo/wbLzWQJBMCW4+IO3/+OQ==} dev: true - /encodeurl/1.0.2: - resolution: {integrity: sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==} - engines: {node: '>= 0.8'} - dev: true - - /encoding/0.1.13: + /encoding@0.1.13: resolution: {integrity: sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==} dependencies: iconv-lite: 0.6.2 dev: true - /end-of-stream/1.4.4: + /end-of-stream@1.4.4: resolution: {integrity: sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==} dependencies: once: 1.4.0 dev: true - /endent/2.1.0: - resolution: {integrity: sha512-r8VyPX7XL8U01Xgnb1CjZ3XV+z90cXIJ9JPE/R9SEC9vpw2P6CfsRPJmp20DppC5N7ZAMCmjYkJIa744Iyg96w==} - dependencies: - dedent: 0.7.0 - fast-json-parse: 1.0.3 - objectorarray: 1.0.5 - dev: true - - /enhanced-resolve/4.5.0: - resolution: {integrity: sha512-Nv9m36S/vxpsI+Hc4/ZGRs0n9mXqSWGGq49zxb/cJfPAQMbUtttJAlNPS4AQzaBdw/pKskw5bMbekT/Y7W/Wlg==} - engines: {node: '>=6.9.0'} - dependencies: - graceful-fs: 4.2.10 - memory-fs: 0.5.0 - tapable: 1.1.3 - dev: true - - /enhanced-resolve/5.10.0: - resolution: {integrity: sha512-T0yTFjdpldGY8PmuXXR0PyQ1ufZpEGiHVrp7zHKB7jdR4qlmZHhONVM5AQOAWXuF/w3dnHbEQVrNptJgt7F+cQ==} - engines: {node: '>=10.13.0'} - dependencies: - graceful-fs: 4.2.10 - tapable: 2.2.1 - dev: true - - /entities/1.1.2: + /entities@1.1.2: resolution: {integrity: sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w==} dev: true - /entities/2.2.0: + /entities@2.2.0: resolution: {integrity: sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==} dev: true - /enzyme-shallow-equal/1.0.4: + /enzyme-shallow-equal@1.0.4: resolution: {integrity: sha512-MttIwB8kKxypwHvRynuC3ahyNc+cFbR8mjVIltnmzQ0uKGqmsfO4bfBuLxb0beLNPhjblUEYvEbsg+VSygvF1Q==} dependencies: has: 1.0.3 object-is: 1.1.3 dev: true - /enzyme/3.11.0: + /enzyme@3.11.0: resolution: {integrity: sha512-Dw8/Gs4vRjxY6/6i9wU0V+utmQO9kvh9XLnz3LIudviOnVYDEe2ec+0k+NQoMamn1VrjKgCUOWj5jG/5M5M0Qw==} dependencies: array.prototype.flat: 1.2.4 @@ -10734,26 +5576,13 @@ packages: string.prototype.trim: 1.2.3 dev: true - /errno/0.1.8: - resolution: {integrity: sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A==} - hasBin: true - dependencies: - prr: 1.0.1 - dev: true - - /error-ex/1.3.2: + /error-ex@1.3.2: resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==} dependencies: is-arrayish: 0.2.1 dev: true - /error-stack-parser/2.1.4: - resolution: {integrity: sha512-Sk5V6wVazPhq5MhpO+AUxJn5x7XSXGl1R93Vn7i+zS15KDVxQijejNCrz8340/2bgLBjR9GtEG8ZVKONDjcqGQ==} - dependencies: - stackframe: 1.3.4 - dev: true - - /es-abstract/1.19.1: + /es-abstract@1.19.1: resolution: {integrity: sha512-2vJ6tjA/UfqLm2MPs7jxVybLoB8i1t1Jd9R3kISld20sIxPcTbLuggQOUxeWeAvIUkduv/CfMjuh4WmiXr2v9w==} engines: {node: '>= 0.4'} dependencies: @@ -10779,57 +5608,7 @@ packages: unbox-primitive: 1.0.1 dev: true - /es-abstract/1.20.2: - resolution: {integrity: sha512-XxXQuVNrySBNlEkTYJoDNFe5+s2yIOpzq80sUHEdPdQr0S5nTLz4ZPPPswNIpKseDDUS5yghX1gfLIHQZ1iNuQ==} - engines: {node: '>= 0.4'} - dependencies: - call-bind: 1.0.2 - es-to-primitive: 1.2.1 - function-bind: 1.1.1 - function.prototype.name: 1.1.5 - get-intrinsic: 1.1.3 - get-symbol-description: 1.0.0 - has: 1.0.3 - has-property-descriptors: 1.0.0 - has-symbols: 1.0.3 - internal-slot: 1.0.3 - is-callable: 1.2.4 - is-negative-zero: 2.0.2 - is-regex: 1.1.4 - is-shared-array-buffer: 1.0.2 - is-string: 1.0.7 - is-weakref: 1.0.2 - object-inspect: 1.12.2 - object-keys: 1.1.1 - object.assign: 4.1.4 - regexp.prototype.flags: 1.4.3 - string.prototype.trimend: 1.0.5 - string.prototype.trimstart: 1.0.5 - unbox-primitive: 1.0.2 - dev: true - - /es-array-method-boxes-properly/1.0.0: - resolution: {integrity: sha512-wd6JXUmyHmt8T5a2xreUwKcGPq6f1f+WwIJkijUqiGcJz1qqnZgP6XIK+QyIWU5lT7imeNxUll48bziG+TSYcA==} - dev: true - - /es-get-iterator/1.1.2: - resolution: {integrity: sha512-+DTO8GYwbMCwbywjimwZMHp8AuYXOS2JZFWoi2AlPOS3ebnII9w/NLpNZtA7A0YLaVDw+O7KFCeoIV7OPvM7hQ==} - dependencies: - call-bind: 1.0.2 - get-intrinsic: 1.1.1 - has-symbols: 1.0.3 - is-arguments: 1.1.1 - is-map: 2.0.2 - is-set: 2.0.2 - is-string: 1.0.7 - isarray: 2.0.5 - dev: true - - /es-module-lexer/0.9.3: - resolution: {integrity: sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ==} - dev: true - - /es-to-primitive/1.2.1: + /es-to-primitive@1.2.1: resolution: {integrity: sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==} engines: {node: '>= 0.4'} dependencies: @@ -10838,44 +5617,31 @@ packages: is-symbol: 1.0.4 dev: true - /es5-shim/4.6.7: - resolution: {integrity: sha512-jg21/dmlrNQI7JyyA2w7n+yifSxBng0ZralnSfVZjoCawgNTCnS+yBCyVM9DL5itm7SUnDGgv7hcq2XCZX4iRQ==} - engines: {node: '>=0.4.0'} - dev: true - - /es6-promisify/6.1.1: + /es6-promisify@6.1.1: resolution: {integrity: sha512-HBL8I3mIki5C1Cc9QjKUenHtnG0A5/xA8Q/AllRcfiwl2CZFXGK7ddBiCoRwAix4i2KxcQfjtIVcrVbB3vbmwg==} dev: true - /es6-shim/0.35.6: - resolution: {integrity: sha512-EmTr31wppcaIAgblChZiuN/l9Y7DPyw8Xtbg7fIVngn6zMW+IEBJDJngeKC3x6wr0V/vcA2wqeFnaw1bFJbDdA==} - dev: true - - /escalade/3.1.1: + /escalade@3.1.1: resolution: {integrity: sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==} engines: {node: '>=6'} dev: true - /escape-html/1.0.3: - resolution: {integrity: sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==} - dev: true - - /escape-string-regexp/1.0.5: + /escape-string-regexp@1.0.5: resolution: {integrity: sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=} engines: {node: '>=0.8.0'} dev: true - /escape-string-regexp/2.0.0: + /escape-string-regexp@2.0.0: resolution: {integrity: sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==} engines: {node: '>=8'} dev: true - /escape-string-regexp/4.0.0: + /escape-string-regexp@4.0.0: resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==} engines: {node: '>=10'} dev: true - /escodegen/1.14.3: + /escodegen@1.14.3: resolution: {integrity: sha512-qFcX0XJkdg+PB3xjZZG/wKSuT1PnQWx57+TVSjIMmILd2yC/6ByYElPwJnslDsuWuSAp4AwJGumarAAmJch5Kw==} engines: {node: '>=4.0'} hasBin: true @@ -10888,20 +5654,7 @@ packages: source-map: 0.6.1 dev: true - /escodegen/2.0.0: - resolution: {integrity: sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw==} - engines: {node: '>=6.0'} - hasBin: true - dependencies: - esprima: 4.0.1 - estraverse: 5.3.0 - esutils: 2.0.3 - optionator: 0.8.3 - optionalDependencies: - source-map: 0.6.1 - dev: true - - /eslint-config-prettier/6.15.0_eslint@6.8.0: + /eslint-config-prettier@6.15.0(eslint@6.8.0): resolution: {integrity: sha512-a1+kOYLR8wMGustcgAjdydMsQ2A/2ipRPwRKUmfYaSxc9ZPcrku080Ctl6zrZzZNs/U82MjSv+qKREkoq3bJaw==} hasBin: true peerDependencies: @@ -10911,7 +5664,7 @@ packages: get-stdin: 6.0.0 dev: true - /eslint-plugin-react-hooks/4.3.0_eslint@6.8.0: + /eslint-plugin-react-hooks@4.3.0(eslint@6.8.0): resolution: {integrity: sha512-XslZy0LnMn+84NEG9jSGR6eGqaZB3133L8xewQo3fQagbQuGt7a63gf+P1NGKZavEYEC3UXaWEAA/AqDkuN6xA==} engines: {node: '>=10'} peerDependencies: @@ -10920,7 +5673,7 @@ packages: eslint: 6.8.0 dev: true - /eslint-plugin-react/7.29.4_eslint@6.8.0: + /eslint-plugin-react@7.29.4(eslint@6.8.0): resolution: {integrity: sha512-CVCXajliVh509PcZYRFyu/BoUEz452+jtQJq2b3Bae4v3xBUWPLCmtmBM+ZinG4MzwmxJgJ2M5rMqhqLVn7MtQ==} engines: {node: '>=4'} peerDependencies: @@ -10943,15 +5696,7 @@ packages: string.prototype.matchall: 4.0.6 dev: true - /eslint-scope/4.0.3: - resolution: {integrity: sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg==} - engines: {node: '>=4.0.0'} - dependencies: - esrecurse: 4.3.0 - estraverse: 4.3.0 - dev: true - - /eslint-scope/5.1.1: + /eslint-scope@5.1.1: resolution: {integrity: sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==} engines: {node: '>=8.0.0'} dependencies: @@ -10959,14 +5704,14 @@ packages: estraverse: 4.3.0 dev: true - /eslint-utils/1.4.3: + /eslint-utils@1.4.3: resolution: {integrity: sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q==} engines: {node: '>=6'} dependencies: eslint-visitor-keys: 1.3.0 dev: true - /eslint-utils/3.0.0_eslint@6.8.0: + /eslint-utils@3.0.0(eslint@6.8.0): resolution: {integrity: sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==} engines: {node: ^10.0.0 || ^12.0.0 || >= 14.0.0} peerDependencies: @@ -10976,17 +5721,17 @@ packages: eslint-visitor-keys: 2.1.0 dev: true - /eslint-visitor-keys/1.3.0: + /eslint-visitor-keys@1.3.0: resolution: {integrity: sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==} engines: {node: '>=4'} dev: true - /eslint-visitor-keys/2.1.0: + /eslint-visitor-keys@2.1.0: resolution: {integrity: sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==} engines: {node: '>=10'} dev: true - /eslint/6.8.0: + /eslint@6.8.0: resolution: {integrity: sha512-K+Iayyo2LtyYhDSYwz5D5QdWw0hCacNzyq1Y821Xna2xSJj7cijoLLYmLxTQgcgZ9mC61nryMy9S7GRbYpI5Ig==} engines: {node: ^8.10.0 || ^10.13.0 || >=11.10.1} hasBin: true @@ -11032,95 +5777,67 @@ packages: - supports-color dev: true - /espree/6.2.1: + /espree@6.2.1: resolution: {integrity: sha512-ysCxRQY3WaXJz9tdbWOwuWr5Y/XrPTGX9Kiz3yoUXwW0VZ4w30HTkQLaGx/+ttFjF8i+ACbArnB4ce68a9m5hw==} engines: {node: '>=6.0.0'} dependencies: acorn: 7.4.1 - acorn-jsx: 5.3.1_acorn@7.4.1 + acorn-jsx: 5.3.1(acorn@7.4.1) eslint-visitor-keys: 1.3.0 dev: true - /esprima/4.0.1: + /esprima@4.0.1: resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==} engines: {node: '>=4'} hasBin: true dev: true - /esquery/1.3.1: + /esquery@1.3.1: resolution: {integrity: sha512-olpvt9QG0vniUBZspVRN6lwB7hOZoTRtT+jzR+tS4ffYx2mzbw+z0XCOk44aaLYKApNX5nMm+E+P6o25ip/DHQ==} engines: {node: '>=0.10'} dependencies: estraverse: 5.3.0 dev: true - /esrecurse/4.3.0: + /esrecurse@4.3.0: resolution: {integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==} engines: {node: '>=4.0'} dependencies: estraverse: 5.3.0 dev: true - /estraverse/4.3.0: + /estraverse@4.3.0: resolution: {integrity: sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==} engines: {node: '>=4.0'} dev: true - /estraverse/5.3.0: + /estraverse@5.3.0: resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==} engines: {node: '>=4.0'} dev: true - /estree-to-babel/3.2.1: - resolution: {integrity: sha512-YNF+mZ/Wu2FU/gvmzuWtYc8rloubL7wfXCTgouFrnjGVXPA/EeYYA7pupXWrb3Iv1cTBeSSxxJIbK23l4MRNqg==} - engines: {node: '>=8.3.0'} - dependencies: - '@babel/traverse': 7.19.1 - '@babel/types': 7.19.0 - c8: 7.12.0 - transitivePeerDependencies: - - supports-color - dev: true - - /estree-walker/0.6.1: + /estree-walker@0.6.1: resolution: {integrity: sha512-SqmZANLWS0mnatqbSfRP5g8OXZC12Fgg1IwNtLsyHDzJizORW4khDfjPqJZsemPWBB2uqykUah5YpQ6epsqC/w==} dev: true - /estree-walker/1.0.1: + /estree-walker@1.0.1: resolution: {integrity: sha512-1fMXF3YP4pZZVozF8j/ZLfvnR8NSIljt56UhbZ5PeeDmmGHpgpdwQt7ITlGvYaQukCvuBRMLEiKiYC+oeIg4cg==} dev: true - /esutils/2.0.3: + /esutils@2.0.3: resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==} engines: {node: '>=0.10.0'} dev: true - /etag/1.8.1: - resolution: {integrity: sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==} - engines: {node: '>= 0.6'} - dev: true - - /eventemitter3/4.0.7: + /eventemitter3@4.0.7: resolution: {integrity: sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==} dev: true - /events/3.3.0: - resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==} - engines: {node: '>=0.8.x'} - dev: true - - /evp_bytestokey/1.0.3: - resolution: {integrity: sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA==} - dependencies: - md5.js: 1.3.5 - safe-buffer: 5.2.1 - dev: true - - /exec-sh/0.3.4: + /exec-sh@0.3.4: resolution: {integrity: sha512-sEFIkc61v75sWeOe72qyrqg2Qg0OuLESziUDk/O/z2qgS15y2gWVFrI6f2Qn/qw/0/NCfCEsmNA4zOjkwEZT1A==} dev: true - /execa/1.0.0: + /execa@1.0.0: resolution: {integrity: sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==} engines: {node: '>=6'} dependencies: @@ -11133,34 +5850,19 @@ packages: strip-eof: 1.0.0 dev: true - /execa/5.1.1: - resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==} - engines: {node: '>=10'} - dependencies: - cross-spawn: 7.0.3 - get-stream: 6.0.1 - human-signals: 2.1.0 - is-stream: 2.0.1 - merge-stream: 2.0.0 - npm-run-path: 4.0.1 - onetime: 5.1.2 - signal-exit: 3.0.7 - strip-final-newline: 2.0.0 - dev: true - - /execall/2.0.0: + /execall@2.0.0: resolution: {integrity: sha512-0FU2hZ5Hh6iQnarpRtQurM/aAvp3RIbfvgLHrcqJYzhXyV2KFruhuChf9NC6waAhiUR7FFtlugkI4p7f2Fqlow==} engines: {node: '>=8'} dependencies: clone-regexp: 2.2.0 dev: true - /exit/0.1.2: + /exit@0.1.2: resolution: {integrity: sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==} engines: {node: '>= 0.8.0'} dev: true - /expand-brackets/2.1.4: + /expand-brackets@2.1.4: resolution: {integrity: sha512-w/ozOKR9Obk3qoWeY/WDi6MFta9AoMR+zud60mdnbniMcBxRuFJyDt2LdX/14A1UABeqk+Uk+LDfUpvoGKppZA==} engines: {node: '>=0.10.0'} dependencies: @@ -11175,7 +5877,7 @@ packages: - supports-color dev: true - /expect/24.9.0: + /expect@24.9.0: resolution: {integrity: sha512-wvVAx8XIol3Z5m9zvZXiyZOQ+sRJqNTIm6sGjdWlaZIeupQGO3WbYI+15D/AmEwZywL6wtJkbAbJtzkOfBuR0Q==} engines: {node: '>= 6'} dependencies: @@ -11189,53 +5891,14 @@ packages: - supports-color dev: true - /express/4.18.1: - resolution: {integrity: sha512-zZBcOX9TfehHQhtupq57OF8lFZ3UZi08Y97dwFCkD8p9d/d2Y3M+ykKcwaMDEL+4qyUolgBDX6AblpR3fL212Q==} - engines: {node: '>= 0.10.0'} - dependencies: - accepts: 1.3.8 - array-flatten: 1.1.1 - body-parser: 1.20.0 - content-disposition: 0.5.4 - content-type: 1.0.4 - cookie: 0.5.0 - cookie-signature: 1.0.6 - debug: 2.6.9 - depd: 2.0.0 - encodeurl: 1.0.2 - escape-html: 1.0.3 - etag: 1.8.1 - finalhandler: 1.2.0 - fresh: 0.5.2 - http-errors: 2.0.0 - merge-descriptors: 1.0.1 - methods: 1.1.2 - on-finished: 2.4.1 - parseurl: 1.3.3 - path-to-regexp: 0.1.7 - proxy-addr: 2.0.7 - qs: 6.10.3 - range-parser: 1.2.1 - safe-buffer: 5.2.1 - send: 0.18.0 - serve-static: 1.15.0 - setprototypeof: 1.2.0 - statuses: 2.0.1 - type-is: 1.6.18 - utils-merge: 1.0.1 - vary: 1.1.2 - transitivePeerDependencies: - - supports-color - dev: true - - /extend-shallow/2.0.1: + /extend-shallow@2.0.1: resolution: {integrity: sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==} engines: {node: '>=0.10.0'} dependencies: is-extendable: 0.1.1 dev: true - /extend-shallow/3.0.2: + /extend-shallow@3.0.2: resolution: {integrity: sha512-BwY5b5Ql4+qZoefgMj2NUmx+tehVTH/Kf4k1ZEtOHNFcm2wSxMRo992l6X3TIgni2eZVTZ85xMOjF31fwZAj6Q==} engines: {node: '>=0.10.0'} dependencies: @@ -11243,11 +5906,11 @@ packages: is-extendable: 1.0.1 dev: true - /extend/3.0.2: + /extend@3.0.2: resolution: {integrity: sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==} dev: true - /external-editor/3.1.0: + /external-editor@3.1.0: resolution: {integrity: sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==} engines: {node: '>=4'} dependencies: @@ -11256,7 +5919,7 @@ packages: tmp: 0.0.33 dev: true - /extglob/2.0.4: + /extglob@2.0.4: resolution: {integrity: sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw==} engines: {node: '>=0.10.0'} dependencies: @@ -11272,30 +5935,16 @@ packages: - supports-color dev: true - /extsprintf/1.3.0: + /extsprintf@1.3.0: resolution: {integrity: sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g==} engines: {'0': node >=0.6.0} dev: true - /fast-deep-equal/3.1.3: + /fast-deep-equal@3.1.3: resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} dev: true - /fast-glob/2.2.7: - resolution: {integrity: sha512-g1KuQwHOZAmOZMuBtHdxDtju+T2RT8jgCC9aANsbpdiDDTSnjgfuVsIBNKbUeJI3oKMRExcfNDtJl4OhbffMsw==} - engines: {node: '>=4.0.0'} - dependencies: - '@mrmlnc/readdir-enhanced': 2.2.1 - '@nodelib/fs.stat': 1.1.3 - glob-parent: 3.1.0 - is-glob: 4.0.1 - merge2: 1.4.1 - micromatch: 3.1.10 - transitivePeerDependencies: - - supports-color - dev: true - - /fast-glob/3.2.11: + /fast-glob@3.2.11: resolution: {integrity: sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew==} engines: {node: '>=8.6.0'} dependencies: @@ -11306,45 +5955,41 @@ packages: micromatch: 4.0.4 dev: true - /fast-json-parse/1.0.3: - resolution: {integrity: sha512-FRWsaZRWEJ1ESVNbDWmsAlqDk96gPQezzLghafp5J4GUKjbCz3OkAHuZs5TuPEtkbVQERysLp9xv6c24fBm8Aw==} - dev: true - - /fast-json-stable-stringify/2.1.0: + /fast-json-stable-stringify@2.1.0: resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==} dev: true - /fast-levenshtein/2.0.6: + /fast-levenshtein@2.0.6: resolution: {integrity: sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=} dev: true - /fastest-levenshtein/1.0.12: + /fastest-levenshtein@1.0.12: resolution: {integrity: sha512-On2N+BpYJ15xIC974QNVuYGMOlEVt4s0EOI3wwMqOmK1fdDY+FN/zltPV8vosq4ad4c/gJ1KHScUn/6AWIgiow==} dev: true - /fastparse/1.1.2: + /fastparse@1.1.2: resolution: {integrity: sha512-483XLLxTVIwWK3QTrMGRqUfUpoOs/0hbQrl2oz4J0pAcm3A3bu84wxTFqGqkJzewCLdME38xJLJAxBABfQT8sQ==} dev: true - /fastq/1.13.0: + /fastq@1.13.0: resolution: {integrity: sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==} dependencies: reusify: 1.0.4 dev: true - /fault/1.0.4: + /fault@1.0.4: resolution: {integrity: sha512-CJ0HCB5tL5fYTEA7ToAq5+kTwd++Borf1/bifxd9iT70QcXr4MRrO3Llf8Ifs70q+SJcGHFtnIE/Nw6giCtECA==} dependencies: format: 0.2.2 dev: false - /fb-watchman/2.0.1: + /fb-watchman@2.0.1: resolution: {integrity: sha512-DkPJKQeY6kKwmuMretBhr7G6Vodr7bFwDYTXIkfG1gjvNpaxBTQV3PbXg6bR1c1UP4jPOX0jHUbbHANL9vRjVg==} dependencies: bser: 2.1.1 dev: true - /fbjs/0.8.17: + /fbjs@0.8.17: resolution: {integrity: sha1-xNWY6taUkRJlPWWIsBpc3Nn5D90=} dependencies: core-js: 1.2.7 @@ -11356,19 +6001,11 @@ packages: ua-parser-js: 0.7.28 dev: true - /fecha/4.2.3: + /fecha@4.2.3: resolution: {integrity: sha512-OP2IUU6HeYKJi3i0z4A19kHMQoLVs4Hc+DPqqxI2h/DPZHTm/vjsfC6P0b4jCMy14XizLBqvndQ+UilD7707Jw==} dev: true - /fetch-retry/5.0.3: - resolution: {integrity: sha512-uJQyMrX5IJZkhoEUBQ3EjxkeiZkppBd5jS/fMTJmfZxLSiaQjv2zD0kTvuvkSH89uFvgSlB6ueGpjD3HWN7Bxw==} - dev: true - - /figgy-pudding/3.5.2: - resolution: {integrity: sha512-0btnI/H8f2pavGMN8w40mlSKOfTK2SVJmBfBeVIj3kNw0swwgzyRq0d5TJVOwodFmtvpPeWPN/MCcfuWF0Ezbw==} - dev: true - - /figures/1.7.0: + /figures@1.7.0: resolution: {integrity: sha1-y+Hjr/zxzUS4DK3+0o3Hk6lwHS4=} engines: {node: '>=0.10.0'} dependencies: @@ -11376,59 +6013,38 @@ packages: object-assign: 4.1.1 dev: true - /figures/3.2.0: + /figures@3.2.0: resolution: {integrity: sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==} engines: {node: '>=8'} dependencies: escape-string-regexp: 1.0.5 dev: true - /file-entry-cache/5.0.1: + /file-entry-cache@5.0.1: resolution: {integrity: sha512-bCg29ictuBaKUwwArK4ouCaqDgLZcysCFLmM/Yn/FDoqndh/9vNuQfXRDvTuXKLxfD/JtZQGKFT8MGcJBK644g==} engines: {node: '>=4'} dependencies: flat-cache: 2.0.1 dev: true - /file-entry-cache/6.0.1: + /file-entry-cache@6.0.1: resolution: {integrity: sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==} engines: {node: ^10.12.0 || >=12.0.0} dependencies: flat-cache: 3.0.4 dev: true - /file-loader/6.2.0_webpack@4.46.0: - resolution: {integrity: sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw==} - engines: {node: '>= 10.13.0'} - peerDependencies: - webpack: ^4.0.0 || ^5.0.0 - peerDependenciesMeta: - webpack: - optional: true - dependencies: - loader-utils: 2.0.2 - schema-utils: 3.1.1 - webpack: 4.46.0 - dev: true - - /file-system-cache/1.1.0: - resolution: {integrity: sha512-IzF5MBq+5CR0jXx5RxPe4BICl/oEhBSXKaL9fLhAXrIfIUS77Hr4vzrYyqYMHN6uTt+BOqi3fDCTjjEBCjERKw==} - dependencies: - fs-extra: 10.1.0 - ramda: 0.28.0 - dev: true - - /file-uri-to-path/1.0.0: + /file-uri-to-path@1.0.0: resolution: {integrity: sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==} dev: true optional: true - /filename-reserved-regex/1.0.0: + /filename-reserved-regex@1.0.0: resolution: {integrity: sha1-5hz4BfDeHJhFZ9A4bcXfUO5a9+Q=} engines: {node: '>=0.10.0'} dev: true - /filenamify-url/1.0.0: + /filenamify-url@1.0.0: resolution: {integrity: sha1-syvYExnvWGO3MHi+1Q9GpPeXX1A=} engines: {node: '>=0.10.0'} dependencies: @@ -11436,7 +6052,7 @@ packages: humanize-url: 1.0.1 dev: true - /filenamify/1.2.1: + /filenamify@1.2.1: resolution: {integrity: sha1-qfL/0RxQO+0wABUCknI3jx8TZaU=} engines: {node: '>=0.10.0'} dependencies: @@ -11445,12 +6061,12 @@ packages: trim-repeated: 1.0.0 dev: true - /filesize/6.1.0: + /filesize@6.1.0: resolution: {integrity: sha512-LpCHtPQ3sFx67z+uh2HnSyWSLLu5Jxo21795uRDuar/EOuYWXib5EmPaGIBuSnRqH2IODiKA2k5re/K9OnN/Yg==} engines: {node: '>= 0.4.0'} dev: true - /fill-range/4.0.0: + /fill-range@4.0.0: resolution: {integrity: sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==} engines: {node: '>=0.10.0'} dependencies: @@ -11460,38 +6076,14 @@ packages: to-regex-range: 2.1.1 dev: true - /fill-range/7.0.1: + /fill-range@7.0.1: resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==} engines: {node: '>=8'} dependencies: to-regex-range: 5.0.1 dev: true - /finalhandler/1.2.0: - resolution: {integrity: sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==} - engines: {node: '>= 0.8'} - dependencies: - debug: 2.6.9 - encodeurl: 1.0.2 - escape-html: 1.0.3 - on-finished: 2.4.1 - parseurl: 1.3.3 - statuses: 2.0.1 - unpipe: 1.0.0 - transitivePeerDependencies: - - supports-color - dev: true - - /find-cache-dir/2.1.0: - resolution: {integrity: sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==} - engines: {node: '>=6'} - dependencies: - commondir: 1.0.1 - make-dir: 2.1.0 - pkg-dir: 3.0.0 - dev: true - - /find-cache-dir/3.3.1: + /find-cache-dir@3.3.1: resolution: {integrity: sha512-t2GDMt3oGC/v+BMwzmllWDuJF/xcDtE5j/fCGbqDD7OLuJkj0cfh1YSA5VKPvwMeLFLNDBkwOKZ2X85jGLVftQ==} engines: {node: '>=8'} dependencies: @@ -11500,27 +6092,18 @@ packages: pkg-dir: 4.2.0 dev: true - /find-root/1.1.0: + /find-root@1.1.0: resolution: {integrity: sha512-NKfW6bec6GfKc0SGx1e07QZY9PE99u0Bft/0rzSD5k3sO/vwkVUpDUKVm5Gpp5Ue3YfShPFTX2070tDs5kB9Ng==} dev: true - /find-up/1.1.2: - resolution: {integrity: sha512-jvElSjyuo4EMQGoTwo1uJU5pQMwTW5lS1x05zzfJuTIyLR3zwO27LYrxNg+dlvKpGOuGy/MzBdXh80g0ve5+HA==} - engines: {node: '>=0.10.0'} - dependencies: - path-exists: 2.1.0 - pinkie-promise: 2.0.1 - dev: true - optional: true - - /find-up/3.0.0: + /find-up@3.0.0: resolution: {integrity: sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==} engines: {node: '>=6'} dependencies: locate-path: 3.0.0 dev: true - /find-up/4.1.0: + /find-up@4.1.0: resolution: {integrity: sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==} engines: {node: '>=8'} dependencies: @@ -11528,15 +6111,7 @@ packages: path-exists: 4.0.0 dev: true - /find-up/5.0.0: - resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==} - engines: {node: '>=10'} - dependencies: - locate-path: 6.0.0 - path-exists: 4.0.0 - dev: true - - /flat-cache/2.0.1: + /flat-cache@2.0.1: resolution: {integrity: sha512-LoQe6yDuUMDzQAEH8sgmh4Md6oZnc/7PjtwjNFSzveXqSHt6ka9fPBuso7IGf9Rz4uqnSnWiFH2B/zj24a5ReA==} engines: {node: '>=4'} dependencies: @@ -11545,7 +6120,7 @@ packages: write: 1.0.3 dev: true - /flat-cache/3.0.4: + /flat-cache@3.0.4: resolution: {integrity: sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==} engines: {node: ^10.12.0 || >=12.0.0} dependencies: @@ -11553,114 +6128,28 @@ packages: rimraf: 3.0.2 dev: true - /flatted/2.0.2: + /flatted@2.0.2: resolution: {integrity: sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA==} dev: true - /flatted/3.2.5: + /flatted@3.2.5: resolution: {integrity: sha512-WIWGi2L3DyTUvUrwRKgGi9TwxQMUEqPOPQBVi71R96jZXJdFskXEmf54BoZaS1kknGODoIGASGEzBUYdyMCBJg==} dev: true - /flush-write-stream/1.1.1: - resolution: {integrity: sha512-3Z4XhFZ3992uIq0XOqb9AreonueSYphE6oYbpt5+3u06JWklbsPkNv3ZKkP9Bz/r+1MWCaMoSQ28P85+1Yc77w==} - dependencies: - inherits: 2.0.4 - readable-stream: 2.3.7 - dev: true - - /fn.name/1.1.0: + /fn.name@1.1.0: resolution: {integrity: sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw==} dev: true - /focus-lock/0.8.1: - resolution: {integrity: sha512-/LFZOIo82WDsyyv7h7oc0MJF9ACOvDRdx9rWPZ2pgMfNWu/z8hQDBtOchuB/0BVLmuFOZjV02YwUVzNsWx/EzA==} - engines: {node: '>=10'} - dependencies: - tslib: 1.14.1 - dev: true - - /for-in/1.0.2: + /for-in@1.0.2: resolution: {integrity: sha512-7EwmXrOjyL+ChxMhmG5lnW9MPt1aIeZEwKhQzoBUdTV0N3zuwWDZYVJatDvZ2OyzPUvdIAZDsCetk3coyMfcnQ==} engines: {node: '>=0.10.0'} dev: true - /foreground-child/2.0.0: - resolution: {integrity: sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==} - engines: {node: '>=8.0.0'} - dependencies: - cross-spawn: 7.0.3 - signal-exit: 3.0.7 - dev: true - - /forever-agent/0.6.1: + /forever-agent@0.6.1: resolution: {integrity: sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw==} dev: true - /fork-ts-checker-webpack-plugin/4.1.6_b3umd4nqfgagbb4ysmdohlejqu: - resolution: {integrity: sha512-DUxuQaKoqfNne8iikd14SAkh5uw4+8vNifp6gmA73yYNS6ywLIWSLD/n/mBzHQRpW3J7rbATEakmiA8JvkTyZw==} - engines: {node: '>=6.11.5', yarn: '>=1.0.0'} - peerDependencies: - eslint: '>= 6' - typescript: '>= 2.7 || 4' - vue-template-compiler: '*' - webpack: '>= 4' - peerDependenciesMeta: - eslint: - optional: true - vue-template-compiler: - optional: true - webpack: - optional: true - dependencies: - '@babel/code-frame': 7.18.6 - chalk: 2.4.2 - eslint: 6.8.0 - micromatch: 3.1.10 - minimatch: 3.1.2 - semver: 5.7.1 - tapable: 1.1.3 - typescript: 4.7.4 - webpack: 4.46.0 - worker-rpc: 0.1.1 - transitivePeerDependencies: - - supports-color - dev: true - - /fork-ts-checker-webpack-plugin/6.5.2_b3umd4nqfgagbb4ysmdohlejqu: - resolution: {integrity: sha512-m5cUmF30xkZ7h4tWUgTAcEaKmUW7tfyUyTqNNOz7OxWJ0v1VWKTcOvH8FWHUwSjlW/356Ijc9vi3XfcPstpQKA==} - engines: {node: '>=10', yarn: '>=1.0.0'} - peerDependencies: - eslint: '>= 6' - typescript: '>= 2.7 || 4' - vue-template-compiler: '*' - webpack: '>= 4' - peerDependenciesMeta: - eslint: - optional: true - vue-template-compiler: - optional: true - webpack: - optional: true - dependencies: - '@babel/code-frame': 7.18.6 - '@types/json-schema': 7.0.9 - chalk: 4.1.2 - chokidar: 3.4.3 - cosmiconfig: 6.0.0 - deepmerge: 4.2.2 - eslint: 6.8.0 - fs-extra: 9.1.0 - glob: 7.2.3 - memfs: 3.4.7 - minimatch: 3.1.2 - schema-utils: 2.7.0 - semver: 7.3.5 - tapable: 1.1.3 - typescript: 4.7.4 - webpack: 4.46.0 - dev: true - - /form-data/2.3.3: + /form-data@2.3.3: resolution: {integrity: sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==} engines: {node: '>= 0.12'} dependencies: @@ -11669,21 +6158,12 @@ packages: mime-types: 2.1.35 dev: true - /form-data/3.0.1: - resolution: {integrity: sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==} - engines: {node: '>= 6'} - dependencies: - asynckit: 0.4.0 - combined-stream: 1.0.8 - mime-types: 2.1.35 - dev: true - - /format/0.2.2: + /format@0.2.2: resolution: {integrity: sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==} engines: {node: '>=0.4.x'} dev: false - /formik/2.2.6_react@18.2.0: + /formik@2.2.6(react@18.2.0): resolution: {integrity: sha512-Kxk2zQRafy56zhLmrzcbryUpMBvT0tal5IvcifK5+4YNGelKsnrODFJ0sZQRMQboblWNym4lAW3bt+tf2vApSA==} peerDependencies: react: '>=16.8.0 || 18' @@ -11698,31 +6178,14 @@ packages: tslib: 1.14.1 dev: true - /forwarded/0.2.0: - resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==} - engines: {node: '>= 0.6'} - dev: true - - /fragment-cache/0.2.1: + /fragment-cache@0.2.1: resolution: {integrity: sha512-GMBAbW9antB8iZRHLoGw0b3HANt57diZYFO/HL1JGIC1MjKrdmhxvrJbupnVvpys0zsz7yBApXdQyfepKly2kA==} engines: {node: '>=0.10.0'} dependencies: map-cache: 0.2.2 dev: true - /fresh/0.5.2: - resolution: {integrity: sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==} - engines: {node: '>= 0.6'} - dev: true - - /from2/2.3.0: - resolution: {integrity: sha512-OMcX/4IC/uqEPVgGeyfN22LJk6AZrMkRZHxcHBMBvHScDGgwTm2GT2Wkgtocyd3JfZffjj2kYUDXXII0Fk9W0g==} - dependencies: - inherits: 2.0.4 - readable-stream: 2.3.7 - dev: true - - /fs-extra/10.1.0: + /fs-extra@10.1.0: resolution: {integrity: sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==} engines: {node: '>=12'} dependencies: @@ -11731,7 +6194,7 @@ packages: universalify: 2.0.0 dev: true - /fs-extra/8.1.0: + /fs-extra@8.1.0: resolution: {integrity: sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==} engines: {node: '>=6 <7 || >=8'} dependencies: @@ -11740,41 +6203,11 @@ packages: universalify: 0.1.2 dev: true - /fs-extra/9.1.0: - resolution: {integrity: sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==} - engines: {node: '>=10'} - dependencies: - at-least-node: 1.0.0 - graceful-fs: 4.2.10 - jsonfile: 6.1.0 - universalify: 2.0.0 - dev: true - - /fs-minipass/2.1.0: - resolution: {integrity: sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==} - engines: {node: '>= 8'} - dependencies: - minipass: 3.3.4 - dev: true - - /fs-monkey/1.0.3: - resolution: {integrity: sha512-cybjIfiiE+pTWicSCLFHSrXZ6EilF30oh91FDP9S2B051prEa7QWfrVTQm10/dDpswBDXZugPa1Ogu8Yh+HV0Q==} - dev: true - - /fs-write-stream-atomic/1.0.10: - resolution: {integrity: sha512-gehEzmPn2nAwr39eay+x3X34Ra+M2QlVUTLhkXPjWdeO8RF9kszk116avgBJM3ZyNHgHXBNx+VmPaFC36k0PzA==} - dependencies: - graceful-fs: 4.2.10 - iferr: 0.1.5 - imurmurhash: 0.1.4 - readable-stream: 2.3.7 - dev: true - - /fs.realpath/1.0.0: + /fs.realpath@1.0.0: resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} dev: true - /fsevents/1.2.13: + /fsevents@1.2.13: resolution: {integrity: sha512-oWb1Z6mkHIskLzEJ/XWX0srkpkTQ7vaopMQkyaEIoq0fmtFVxOthb8cCxeT+p3ynTdkk/RZwbgG4brR5BeWECw==} engines: {node: '>= 4.0'} os: [darwin] @@ -11786,7 +6219,7 @@ packages: dev: true optional: true - /fsevents/2.1.3: + /fsevents@2.1.3: resolution: {integrity: sha512-Auw9a4AxqWpa9GUfj370BMPzzyncfBABW8Mab7BGWBYDj4Isgq+cDKtx0i6u9jcX9pQDnswsaaOTgTmA5pEjuQ==} engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} os: [darwin] @@ -11795,11 +6228,11 @@ packages: dev: true optional: true - /function-bind/1.1.1: + /function-bind@1.1.1: resolution: {integrity: sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==} dev: true - /function.prototype.name/1.1.2: + /function.prototype.name@1.1.2: resolution: {integrity: sha512-C8A+LlHBJjB2AdcRPorc5JvJ5VUoWlXdEHLOJdCI7kjHEtGTpHQUiqMvCIKUwIsGwZX2jZJy761AXsn356bJQg==} engines: {node: '>= 0.4'} dependencies: @@ -11808,60 +6241,31 @@ packages: functions-have-names: 1.2.1 dev: true - /function.prototype.name/1.1.5: - resolution: {integrity: sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==} - engines: {node: '>= 0.4'} - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.20.2 - functions-have-names: 1.2.3 - dev: true - - /functional-red-black-tree/1.0.1: + /functional-red-black-tree@1.0.1: resolution: {integrity: sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=} dev: true - /functions-have-names/1.2.1: + /functions-have-names@1.2.1: resolution: {integrity: sha512-j48B/ZI7VKs3sgeI2cZp7WXWmZXu7Iq5pl5/vptV5N2mq+DGFuS/ulaDjtaoLpYzuD6u8UgrUKHfgo7fDTSiBA==} dev: true - /functions-have-names/1.2.3: - resolution: {integrity: sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==} - dev: true - - /gauge/3.0.2: - resolution: {integrity: sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==} - engines: {node: '>=10'} - dependencies: - aproba: 2.0.0 - color-support: 1.1.3 - console-control-strings: 1.1.0 - has-unicode: 2.0.1 - object-assign: 4.1.1 - signal-exit: 3.0.7 - string-width: 4.2.3 - strip-ansi: 6.0.1 - wide-align: 1.1.5 - dev: true - - /generic-names/2.0.1: + /generic-names@2.0.1: resolution: {integrity: sha512-kPCHWa1m9wGG/OwQpeweTwM/PYiQLrUIxXbt/P4Nic3LbGjCP0YwrALHW1uNLKZ0LIMg+RF+XRlj2ekT9ZlZAQ==} dependencies: loader-utils: 1.4.0 dev: true - /gensync/1.0.0-beta.2: + /gensync@1.0.0-beta.2: resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} engines: {node: '>=6.9.0'} dev: true - /get-caller-file/2.0.5: + /get-caller-file@2.0.5: resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} engines: {node: 6.* || 8.* || >= 10.*} dev: true - /get-intrinsic/1.1.1: + /get-intrinsic@1.1.1: resolution: {integrity: sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==} dependencies: function-bind: 1.1.1 @@ -11869,48 +6273,24 @@ packages: has-symbols: 1.0.3 dev: true - /get-intrinsic/1.1.3: - resolution: {integrity: sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==} - dependencies: - function-bind: 1.1.1 - has: 1.0.3 - has-symbols: 1.0.3 - dev: true - - /get-package-type/0.1.0: - resolution: {integrity: sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==} - engines: {node: '>=8.0.0'} - dev: true - - /get-stdin/4.0.1: - resolution: {integrity: sha512-F5aQMywwJ2n85s4hJPTT9RPxGmubonuB10MNYo17/xph174n2MIR33HRguhzVag10O/npM7SPk73LMZNP+FaWw==} - engines: {node: '>=0.10.0'} - dev: true - optional: true - - /get-stdin/6.0.0: + /get-stdin@6.0.0: resolution: {integrity: sha512-jp4tHawyV7+fkkSKyvjuLZswblUtz+SQKzSWnBbii16BuZksJlU1wuBYXY75r+duh/llF1ur6oNwi+2ZzjKZ7g==} engines: {node: '>=4'} dev: true - /get-stdin/8.0.0: + /get-stdin@8.0.0: resolution: {integrity: sha512-sY22aA6xchAzprjyqmSEQv4UbAAzRN0L2dQB0NlN5acTTK9Don6nhoc3eAbUnpZiCANAMfd/+40kVdKfFygohg==} engines: {node: '>=10'} dev: true - /get-stream/4.1.0: + /get-stream@4.1.0: resolution: {integrity: sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==} engines: {node: '>=6'} dependencies: pump: 3.0.0 dev: true - /get-stream/6.0.1: - resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} - engines: {node: '>=10'} - dev: true - - /get-symbol-description/1.0.0: + /get-symbol-description@1.0.0: resolution: {integrity: sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==} engines: {node: '>= 0.4'} dependencies: @@ -11918,18 +6298,18 @@ packages: get-intrinsic: 1.1.1 dev: true - /get-value/2.0.6: + /get-value@2.0.6: resolution: {integrity: sha512-Ln0UQDlxH1BapMu3GPtf7CuYNwRZf2gwCuPqbyG6pB8WfmFpzqcy4xtAaAMUhnNqjMKTiCPZG2oMT3YSx8U2NA==} engines: {node: '>=0.10.0'} dev: true - /getpass/0.1.7: + /getpass@0.1.7: resolution: {integrity: sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng==} dependencies: assert-plus: 1.0.0 dev: true - /gh-pages/2.2.0: + /gh-pages@2.2.0: resolution: {integrity: sha512-c+yPkNOPMFGNisYg9r4qvsMIjVYikJv7ImFOhPIVPt0+AcRUamZ7zkGRLHz7FKB0xrlZ+ddSOJsZv9XAFVXLmA==} engines: {node: '>=6'} hasBin: true @@ -11942,43 +6322,14 @@ packages: globby: 6.1.0 dev: true - /github-slugger/1.4.0: - resolution: {integrity: sha512-w0dzqw/nt51xMVmlaV1+JRzN+oCa1KfcgGEWhxUG16wbdA+Xnt/yoFO8Z8x/V82ZcZ0wy6ln9QDup5avbhiDhQ==} - dev: true - - /glob-parent/3.1.0: - resolution: {integrity: sha512-E8Ak/2+dZY6fnzlR7+ueWvhsH1SjHr4jjss4YS/h4py44jY9MhK/VFdaZJAWDz6BbL21KeteKxFSFpq8OS5gVA==} - dependencies: - is-glob: 3.1.0 - path-dirname: 1.0.2 - dev: true - - /glob-parent/5.1.2: + /glob-parent@5.1.2: resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} engines: {node: '>= 6'} dependencies: is-glob: 4.0.1 dev: true - /glob-promise/3.4.0_glob@7.2.0: - resolution: {integrity: sha512-q08RJ6O+eJn+dVanerAndJwIcumgbDdYiUT7zFQl3Wm1xD6fBKtah7H8ZJChj4wP+8C+QfeVy8xautR7rdmKEw==} - engines: {node: '>=4'} - peerDependencies: - glob: '*' - dependencies: - '@types/glob': 8.0.0 - glob: 7.2.0 - dev: true - - /glob-to-regexp/0.3.0: - resolution: {integrity: sha512-Iozmtbqv0noj0uDDqoL0zNq0VBEfK2YFoMAZoxJe4cwphvLR+JskfF30QhXHOR4m3KrE6NLRYw+U9MRXvifyig==} - dev: true - - /glob-to-regexp/0.4.1: - resolution: {integrity: sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==} - dev: true - - /glob/7.2.0: + /glob@7.2.0: resolution: {integrity: sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==} dependencies: fs.realpath: 1.0.0 @@ -11989,7 +6340,7 @@ packages: path-is-absolute: 1.0.1 dev: true - /glob/7.2.3: + /glob@7.2.3: resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==} dependencies: fs.realpath: 1.0.0 @@ -12000,14 +6351,14 @@ packages: path-is-absolute: 1.0.1 dev: true - /global-modules/2.0.0: + /global-modules@2.0.0: resolution: {integrity: sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A==} engines: {node: '>=6'} dependencies: global-prefix: 3.0.0 dev: true - /global-prefix/3.0.0: + /global-prefix@3.0.0: resolution: {integrity: sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg==} engines: {node: '>=6'} dependencies: @@ -12016,37 +6367,23 @@ packages: which: 1.3.1 dev: true - /global/4.4.0: - resolution: {integrity: sha512-wv/LAoHdRE3BeTGz53FAamhGlPLhlssK45usmGFThIi4XqnBmjKQ16u+RNbP7WvigRZDxUsM0J3gcQ5yicaL0w==} - dependencies: - min-document: 2.19.0 - process: 0.11.10 - dev: true - - /globals/11.12.0: + /globals@11.12.0: resolution: {integrity: sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==} engines: {node: '>=4'} dev: true - /globals/12.4.0: + /globals@12.4.0: resolution: {integrity: sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg==} engines: {node: '>=8'} dependencies: type-fest: 0.8.1 dev: true - /globalthis/1.0.3: - resolution: {integrity: sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==} - engines: {node: '>= 0.4'} - dependencies: - define-properties: 1.1.3 - dev: true - - /globalyzer/0.1.0: + /globalyzer@0.1.0: resolution: {integrity: sha512-40oNTM9UfG6aBmuKxk/giHn5nQ8RVz/SS4Ir6zgzOv9/qC3kKZ9v4etGTcJbEl/NyVQH7FGU7d+X1egr57Md2Q==} dev: true - /globby/11.1.0: + /globby@11.1.0: resolution: {integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==} engines: {node: '>=10'} dependencies: @@ -12058,7 +6395,7 @@ packages: slash: 3.0.0 dev: true - /globby/6.1.0: + /globby@6.1.0: resolution: {integrity: sha1-9abXDoOV4hyFj7BInWTfAkJNUGw=} engines: {node: '>=0.10.0'} dependencies: @@ -12069,46 +6406,30 @@ packages: pinkie-promise: 2.0.1 dev: true - /globby/9.2.0: - resolution: {integrity: sha512-ollPHROa5mcxDEkwg6bPt3QbEf4pDQSNtd6JPL1YvOvAo/7/0VAm9TccUeoTmarjPw4pfUthSCqcyfNB1I3ZSg==} - engines: {node: '>=6'} - dependencies: - '@types/glob': 7.2.0 - array-union: 1.0.2 - dir-glob: 2.2.2 - fast-glob: 2.2.7 - glob: 7.2.3 - ignore: 4.0.6 - pify: 4.0.1 - slash: 2.0.0 - transitivePeerDependencies: - - supports-color - dev: true - - /globjoin/0.1.4: + /globjoin@0.1.4: resolution: {integrity: sha1-L0SUrIkZ43Z8XLtpHp9GMyQoXUM=} dev: true - /globrex/0.1.2: + /globrex@0.1.2: resolution: {integrity: sha512-uHJgbwAMwNFf5mLst7IWLNg14x1CkeqglJb/K3doi4dw6q2IvAAmM/Y81kevy83wP+Sst+nutFTYOGg3d1lsxg==} dev: true - /graceful-fs/4.2.10: + /graceful-fs@4.2.10: resolution: {integrity: sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==} dev: true - /growly/1.3.0: + /growly@1.3.0: resolution: {integrity: sha512-+xGQY0YyAWCnqy7Cd++hc2JqMYzlm0dG30Jd0beaA64sROr8C4nt8Yc9V5Ro3avlSUDTN0ulqP/VBKi1/lLygw==} dev: true - /gzip-size/3.0.0: + /gzip-size@3.0.0: resolution: {integrity: sha512-6s8trQiK+OMzSaCSVXX+iqIcLV9tC+E73jrJrJTyS4h/AJhlxHvzFKqM1YLDJWRGgHX8uLkBeXkA0njNj39L4w==} engines: {node: '>=0.12.0'} dependencies: duplexer: 0.1.2 dev: true - /gzip-size/5.1.1: + /gzip-size@5.1.1: resolution: {integrity: sha512-FNHi6mmoHvs1mxZAds4PpdCS6QG8B4C1krxJsMutgxl5t3+GlRTzzI3NEkifXx2pVsOvJdOGSmIgDhQ55FwdPA==} engines: {node: '>=6'} dependencies: @@ -12116,25 +6437,12 @@ packages: pify: 4.0.1 dev: true - /handlebars/4.7.7: - resolution: {integrity: sha512-aAcXm5OAfE/8IXkcZvCepKU3VzW1/39Fb5ZuqMtgI/hT8X2YgoMvBY5dLhq/cpOvw7Lk1nK/UF71aLG/ZnVYRA==} - engines: {node: '>=0.4.7'} - hasBin: true - dependencies: - minimist: 1.2.5 - neo-async: 2.6.2 - source-map: 0.6.1 - wordwrap: 1.0.0 - optionalDependencies: - uglify-js: 3.17.0 - dev: true - - /har-schema/2.0.0: + /har-schema@2.0.0: resolution: {integrity: sha512-Oqluz6zhGX8cyRaTQlFMPw80bSJVG2x/cFb8ZPhUILGgHka9SsokCCOQgpveePerqidZOrT14ipqfJb7ILcW5Q==} engines: {node: '>=4'} dev: true - /har-validator/5.1.5: + /har-validator@5.1.5: resolution: {integrity: sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w==} engines: {node: '>=6'} deprecated: this library is no longer supported @@ -12143,71 +6451,50 @@ packages: har-schema: 2.0.0 dev: true - /hard-rejection/2.1.0: + /hard-rejection@2.1.0: resolution: {integrity: sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA==} engines: {node: '>=6'} dev: true - /has-ansi/2.0.0: + /has-ansi@2.0.0: resolution: {integrity: sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE=} engines: {node: '>=0.10.0'} dependencies: ansi-regex: 2.1.1 dev: true - /has-bigints/1.0.1: + /has-bigints@1.0.1: resolution: {integrity: sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA==} dev: true - /has-bigints/1.0.2: - resolution: {integrity: sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==} - dev: true - - /has-flag/1.0.0: + /has-flag@1.0.0: resolution: {integrity: sha512-DyYHfIYwAJmjAjSSPKANxI8bFY9YtFrgkAfinBojQ8YJTOuOuav64tMUJv584SES4xl74PmuaevIyaLESHdTAA==} engines: {node: '>=0.10.0'} dev: true - /has-flag/3.0.0: + /has-flag@3.0.0: resolution: {integrity: sha1-tdRU3CGZriJWmfNGfloH87lVuv0=} engines: {node: '>=4'} dev: true - /has-flag/4.0.0: + /has-flag@4.0.0: resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} engines: {node: '>=8'} dev: true - /has-glob/1.0.0: - resolution: {integrity: sha512-D+8A457fBShSEI3tFCj65PAbT++5sKiFtdCdOam0gnfBgw9D277OERk+HM9qYJXmdVLZ/znez10SqHN0BBQ50g==} - engines: {node: '>=0.10.0'} - dependencies: - is-glob: 3.1.0 - dev: true - - /has-property-descriptors/1.0.0: - resolution: {integrity: sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==} - dependencies: - get-intrinsic: 1.1.3 - dev: true - - /has-symbols/1.0.3: + /has-symbols@1.0.3: resolution: {integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==} engines: {node: '>= 0.4'} dev: true - /has-tostringtag/1.0.0: + /has-tostringtag@1.0.0: resolution: {integrity: sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==} engines: {node: '>= 0.4'} dependencies: has-symbols: 1.0.3 dev: true - /has-unicode/2.0.1: - resolution: {integrity: sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==} - dev: true - - /has-value/0.3.1: + /has-value@0.3.1: resolution: {integrity: sha512-gpG936j8/MzaeID5Yif+577c17TxaDmhuyVgSwtnL/q8UUTySg8Mecb+8Cf1otgLoD7DDH75axp86ER7LFsf3Q==} engines: {node: '>=0.10.0'} dependencies: @@ -12216,103 +6503,40 @@ packages: isobject: 2.1.0 dev: true - /has-value/1.0.0: - resolution: {integrity: sha512-IBXk4GTsLYdQ7Rvt+GRBrFSVEkmuOUy4re0Xjd9kJSUQpnTrWR4/y9RpfexN9vkAPMFuQoeWKwqzPozRTlasGw==} - engines: {node: '>=0.10.0'} - dependencies: - get-value: 2.0.6 - has-values: 1.0.0 - isobject: 3.0.1 - dev: true - - /has-values/0.1.4: - resolution: {integrity: sha512-J8S0cEdWuQbqD9//tlZxiMuMNmxB8PlEwvYwuxsTmR1G5RXUePEX/SJn7aD0GMLieuZYSwNH0cQuJGwnYunXRQ==} - engines: {node: '>=0.10.0'} - dev: true - - /has-values/1.0.0: - resolution: {integrity: sha512-ODYZC64uqzmtfGMEAX/FvZiRyWLpAC3vYnNunURUnkGVTS+mI0smVsWaPydRBsE3g+ok7h960jChO8mFcWlHaQ==} - engines: {node: '>=0.10.0'} - dependencies: - is-number: 3.0.0 - kind-of: 4.0.0 - dev: true - - /has/1.0.3: - resolution: {integrity: sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==} - engines: {node: '>= 0.4.0'} - dependencies: - function-bind: 1.1.1 - dev: true - - /hash-base/3.1.0: - resolution: {integrity: sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA==} - engines: {node: '>=4'} - dependencies: - inherits: 2.0.4 - readable-stream: 3.6.0 - safe-buffer: 5.2.1 - dev: true - - /hash.js/1.1.7: - resolution: {integrity: sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA==} - dependencies: - inherits: 2.0.4 - minimalistic-assert: 1.0.1 - dev: true - - /hast-to-hyperscript/9.0.1: - resolution: {integrity: sha512-zQgLKqF+O2F72S1aa4y2ivxzSlko3MAvxkwG8ehGmNiqd98BIN3JM1rAJPmplEyLmGLO2QZYJtIneOSZ2YbJuA==} - dependencies: - '@types/unist': 2.0.6 - comma-separated-tokens: 1.0.8 - property-information: 5.6.0 - space-separated-tokens: 1.1.5 - style-to-object: 0.3.0 - unist-util-is: 4.1.0 - web-namespaces: 1.1.4 - dev: true - - /hast-util-from-parse5/6.0.1: - resolution: {integrity: sha512-jeJUWiN5pSxW12Rh01smtVkZgZr33wBokLzKLwinYOUfSzm1Nl/c3GUGebDyOKjdsRgMvoVbV0VpAcpjF4NrJA==} + /has-value@1.0.0: + resolution: {integrity: sha512-IBXk4GTsLYdQ7Rvt+GRBrFSVEkmuOUy4re0Xjd9kJSUQpnTrWR4/y9RpfexN9vkAPMFuQoeWKwqzPozRTlasGw==} + engines: {node: '>=0.10.0'} dependencies: - '@types/parse5': 5.0.3 - hastscript: 6.0.0 - property-information: 5.6.0 - vfile: 4.2.1 - vfile-location: 3.2.0 - web-namespaces: 1.1.4 + get-value: 2.0.6 + has-values: 1.0.0 + isobject: 3.0.1 dev: true - /hast-util-parse-selector/2.2.5: - resolution: {integrity: sha512-7j6mrk/qqkSehsM92wQjdIgWM2/BW61u/53G6xmC8i1OmEdKLHbk419QKQUjz6LglWsfqoiHmyMRkP1BGjecNQ==} + /has-values@0.1.4: + resolution: {integrity: sha512-J8S0cEdWuQbqD9//tlZxiMuMNmxB8PlEwvYwuxsTmR1G5RXUePEX/SJn7aD0GMLieuZYSwNH0cQuJGwnYunXRQ==} + engines: {node: '>=0.10.0'} + dev: true - /hast-util-raw/6.0.1: - resolution: {integrity: sha512-ZMuiYA+UF7BXBtsTBNcLBF5HzXzkyE6MLzJnL605LKE8GJylNjGc4jjxazAHUtcwT5/CEt6afRKViYB4X66dig==} + /has-values@1.0.0: + resolution: {integrity: sha512-ODYZC64uqzmtfGMEAX/FvZiRyWLpAC3vYnNunURUnkGVTS+mI0smVsWaPydRBsE3g+ok7h960jChO8mFcWlHaQ==} + engines: {node: '>=0.10.0'} dependencies: - '@types/hast': 2.3.4 - hast-util-from-parse5: 6.0.1 - hast-util-to-parse5: 6.0.0 - html-void-elements: 1.0.5 - parse5: 6.0.1 - unist-util-position: 3.1.0 - vfile: 4.2.1 - web-namespaces: 1.1.4 - xtend: 4.0.2 - zwitch: 1.0.5 + is-number: 3.0.0 + kind-of: 4.0.0 dev: true - /hast-util-to-parse5/6.0.0: - resolution: {integrity: sha512-Lu5m6Lgm/fWuz8eWnrKezHtVY83JeRGaNQ2kn9aJgqaxvVkFCZQBEhgodZUDUvoodgyROHDb3r5IxAEdl6suJQ==} + /has@1.0.3: + resolution: {integrity: sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==} + engines: {node: '>= 0.4.0'} dependencies: - hast-to-hyperscript: 9.0.1 - property-information: 5.6.0 - web-namespaces: 1.1.4 - xtend: 4.0.2 - zwitch: 1.0.5 + function-bind: 1.1.1 dev: true - /hastscript/6.0.0: + /hast-util-parse-selector@2.2.5: + resolution: {integrity: sha512-7j6mrk/qqkSehsM92wQjdIgWM2/BW61u/53G6xmC8i1OmEdKLHbk419QKQUjz6LglWsfqoiHmyMRkP1BGjecNQ==} + dev: false + + /hastscript@6.0.0: resolution: {integrity: sha512-nDM6bvd7lIqDUiYEiu5Sl/+6ReP0BMk/2f4U/Rooccxkj0P5nm+acM5PrGJ/t5I8qPGiqZSE6hVAwZEdZIvP4w==} dependencies: '@types/hast': 2.3.4 @@ -12320,122 +6544,67 @@ packages: hast-util-parse-selector: 2.2.5 property-information: 5.6.0 space-separated-tokens: 1.1.5 + dev: false - /he/1.2.0: - resolution: {integrity: sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==} - hasBin: true - dev: true - - /hex-color-regex/1.1.0: + /hex-color-regex@1.1.0: resolution: {integrity: sha512-l9sfDFsuqtOqKDsQdqrMRk0U85RZc0RtOR9yPI7mRVOa4FsR/BVnZ0shmQRM96Ji99kYZP/7hn1cedc1+ApsTQ==} dev: true - /highlight.js/10.7.3: + /highlight.js@10.7.3: resolution: {integrity: sha512-tzcUFauisWKNHaRkN4Wjl/ZA07gENAjFl3J/c480dprkGTg5EQstgaNFqBfUqCq54kZRIEcreTsAgF/m2quD7A==} dev: false - /hmac-drbg/1.0.1: - resolution: {integrity: sha512-Tti3gMqLdZfhOQY1Mzf/AanLiqh1WTiJgEj26ZuYQ9fbkLomzGchCws4FyrSd4VkpBfiNhaE1On+lOz894jvXg==} - dependencies: - hash.js: 1.1.7 - minimalistic-assert: 1.0.1 - minimalistic-crypto-utils: 1.0.1 - dev: true - - /hoist-non-react-statics/3.3.2: + /hoist-non-react-statics@3.3.2: resolution: {integrity: sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==} dependencies: react-is: 16.13.1 dev: true - /hosted-git-info/2.8.8: + /hosted-git-info@2.8.8: resolution: {integrity: sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg==} dev: true - /hosted-git-info/4.1.0: + /hosted-git-info@4.1.0: resolution: {integrity: sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==} engines: {node: '>=10'} dependencies: lru-cache: 6.0.0 dev: true - /hsl-regex/1.0.0: + /hsl-regex@1.0.0: resolution: {integrity: sha1-1JMwx4ntgZ4nakwNJy3/owsY/m4=} dev: true - /hsla-regex/1.0.0: + /hsla-regex@1.0.0: resolution: {integrity: sha1-wc56MWjIxmFAM6S194d/OyJfnDg=} dev: true - /html-comment-regex/1.1.2: + /html-comment-regex@1.1.2: resolution: {integrity: sha512-P+M65QY2JQ5Y0G9KKdlDpo0zK+/OHptU5AaBwUfAIDJZk1MYf32Frm84EcOytfJE0t5JvkAnKlmjsXDnWzCJmQ==} dev: true - /html-element-map/1.2.0: + /html-element-map@1.2.0: resolution: {integrity: sha512-0uXq8HsuG1v2TmQ8QkIhzbrqeskE4kn52Q18QJ9iAA/SnHoEKXWiUxHQtclRsCFWEUD2So34X+0+pZZu862nnw==} dependencies: array-filter: 1.0.0 dev: true - /html-encoding-sniffer/1.0.2: + /html-encoding-sniffer@1.0.2: resolution: {integrity: sha512-71lZziiDnsuabfdYiUeWdCVyKuqwWi23L8YeIgV9jSSZHCtb6wB1BKWooH7L3tn4/FuZJMVWyNaIDr4RGmaSYw==} dependencies: whatwg-encoding: 1.0.5 dev: true - /html-entities/2.3.3: - resolution: {integrity: sha512-DV5Ln36z34NNTDgnz0EWGBLZENelNAtkiFA4kyNOG2tDI6Mz1uSWiq1wAKdyjnJwyDiDO7Fa2SO1CTxPXL8VxA==} - dev: true - - /html-escaper/2.0.2: + /html-escaper@2.0.2: resolution: {integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==} dev: true - /html-minifier-terser/5.1.1: - resolution: {integrity: sha512-ZPr5MNObqnV/T9akshPKbVgyOqLmy+Bxo7juKCfTfnjNniTAMdy4hz21YQqoofMBJD2kdREaqPPdThoR78Tgxg==} - engines: {node: '>=6'} - hasBin: true - dependencies: - camel-case: 4.1.2 - clean-css: 4.2.4 - commander: 4.1.1 - he: 1.2.0 - param-case: 3.0.4 - relateurl: 0.2.7 - terser: 4.8.0 - dev: true - - /html-tags/3.1.0: + /html-tags@3.1.0: resolution: {integrity: sha512-1qYz89hW3lFDEazhjW0yVAV87lw8lVkrJocr72XmBkMKsoSVJCQx3W8BXsC7hO2qAt8BoVjYjtAcZ9perqGnNg==} engines: {node: '>=8'} dev: true - /html-void-elements/1.0.5: - resolution: {integrity: sha512-uE/TxKuyNIcx44cIWnjr/rfIATDH7ZaOMmstu0CwhFG1Dunhlp4OC6/NMbhiwoq5BpW0ubi303qnEk/PZj614w==} - dev: true - - /html-webpack-plugin/4.5.2_webpack@4.46.0: - resolution: {integrity: sha512-q5oYdzjKUIPQVjOosjgvCHQOv9Ett9CYYHlgvJeXG0qQvdSojnBq4vAdQBwn1+yGveAwHCoe/rMR86ozX3+c2A==} - engines: {node: '>=6.9'} - peerDependencies: - webpack: ^4.0.0 || ^5.0.0 - peerDependenciesMeta: - webpack: - optional: true - dependencies: - '@types/html-minifier-terser': 5.1.2 - '@types/tapable': 1.0.8 - '@types/webpack': 4.41.32 - html-minifier-terser: 5.1.1 - loader-utils: 1.4.0 - lodash: 4.17.21 - pretty-error: 2.1.2 - tapable: 1.1.3 - util.promisify: 1.0.0 - webpack: 4.46.0 - dev: true - - /htmlparser2/3.10.1: + /htmlparser2@3.10.1: resolution: {integrity: sha512-IgieNijUMbkDovyoKObU1DUhm1iwNYE/fuifEoEHfd1oZKZDaONBSkal7Y01shxsM49R4XaMdGez3WnF9UfiCQ==} dependencies: domelementtype: 1.3.1 @@ -12446,27 +6615,7 @@ packages: readable-stream: 3.6.0 dev: true - /htmlparser2/6.1.0: - resolution: {integrity: sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A==} - dependencies: - domelementtype: 2.3.0 - domhandler: 4.3.1 - domutils: 2.8.0 - entities: 2.2.0 - dev: true - - /http-errors/2.0.0: - resolution: {integrity: sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==} - engines: {node: '>= 0.8'} - dependencies: - depd: 2.0.0 - inherits: 2.0.4 - setprototypeof: 1.2.0 - statuses: 2.0.1 - toidentifier: 1.0.1 - dev: true - - /http-signature/1.2.0: + /http-signature@1.2.0: resolution: {integrity: sha512-CAbnr6Rz4CYQkLYUtSNXxQPUH2gK8f3iWexVlsnMeD+GjlsQ0Xsy1cOX+mN3dtxYomRy21CiOzU8Uhw6OwncEQ==} engines: {node: '>=0.8', npm: '>=1.3.7'} dependencies: @@ -12475,16 +6624,7 @@ packages: sshpk: 1.16.1 dev: true - /https-browserify/1.0.0: - resolution: {integrity: sha512-J+FkSdyD+0mA0N+81tMotaRMfSL9SGi+xpD3T6YApKsc3bGSXJlfXri3VyFOeYkfLRQisDk1W+jIFFKBeUBbBg==} - dev: true - - /human-signals/2.1.0: - resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} - engines: {node: '>=10.17.0'} - dev: true - - /humanize-url/1.0.1: + /humanize-url@1.0.1: resolution: {integrity: sha1-9KuZ4NKIF0yk4eUEB8VfuuRk7/8=} engines: {node: '>=0.10.0'} dependencies: @@ -12492,68 +6632,60 @@ packages: strip-url-auth: 1.0.1 dev: true - /iconv-lite/0.4.24: + /iconv-lite@0.4.24: resolution: {integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==} engines: {node: '>=0.10.0'} dependencies: safer-buffer: 2.1.2 dev: true - /iconv-lite/0.6.2: + /iconv-lite@0.6.2: resolution: {integrity: sha512-2y91h5OpQlolefMPmUlivelittSWy0rP+oYVpn6A7GwVHNE8AWzoYOBNmlwks3LobaJxgHCYZAnyNo2GgpNRNQ==} engines: {node: '>=0.10.0'} dependencies: safer-buffer: 2.1.2 dev: true - /icss-replace-symbols/1.1.0: + /icss-replace-symbols@1.1.0: resolution: {integrity: sha1-Bupvg2ead0njhs/h/oEq5dsiPe0=} dev: true - /icss-utils/4.1.1: + /icss-utils@4.1.1: resolution: {integrity: sha512-4aFq7wvWyMHKgxsH8QQtGpvbASCf+eM3wPRLI6R+MgAnTCZ6STYsRvttLvRWK0Nfif5piF394St3HeJDaljGPA==} engines: {node: '>= 6'} dependencies: postcss: 7.0.39 dev: true - /ieee754/1.2.1: - resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} - dev: true - - /iferr/0.1.5: - resolution: {integrity: sha512-DUNFN5j7Tln0D+TxzloUjKB+CtVu6myn0JEFak6dG18mNt9YkQ6lzGCdafwofISZ1lLF3xRHJ98VKy9ynkcFaA==} - dev: true - - /ignore/4.0.6: + /ignore@4.0.6: resolution: {integrity: sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==} engines: {node: '>= 4'} dev: true - /ignore/5.2.0: + /ignore@5.2.0: resolution: {integrity: sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==} engines: {node: '>= 4'} dev: true - /immutable/4.0.0: + /immutable@4.0.0: resolution: {integrity: sha512-zIE9hX70qew5qTUjSS7wi1iwj/l7+m54KWU247nhM3v806UdGj1yDndXj+IOYxxtW9zyLI+xqFNZjTuDaLUqFw==} dev: true - /import-cwd/2.1.0: + /import-cwd@2.1.0: resolution: {integrity: sha1-qmzzbnInYShcs3HsZRn1PiQ1sKk=} engines: {node: '>=4'} dependencies: import-from: 2.1.0 dev: true - /import-cwd/3.0.0: + /import-cwd@3.0.0: resolution: {integrity: sha512-4pnzH16plW+hgvRECbDWpQl3cqtvSofHWh44met7ESfZ8UZOWWddm8hEyDTqREJ9RbYHY8gi8DqmaelApoOGMg==} engines: {node: '>=8'} dependencies: import-from: 3.0.0 dev: true - /import-fresh/2.0.0: + /import-fresh@2.0.0: resolution: {integrity: sha512-eZ5H8rcgYazHbKC3PG4ClHNykCSxtAhxSSEM+2mb+7evD2CKF5V7c0dNum7AdpDh0ZdICwZY9sRSn8f+KH96sg==} engines: {node: '>=4'} dependencies: @@ -12561,7 +6693,7 @@ packages: resolve-from: 3.0.0 dev: true - /import-fresh/3.2.2: + /import-fresh@3.2.2: resolution: {integrity: sha512-cTPNrlvJT6twpYy+YmKUKrTSjWFs3bjYjAhCwm+z4EOCubZxAuO+hHpRN64TqjEaYSHs7tJAE0w1CKMGmsG/lw==} engines: {node: '>=6'} dependencies: @@ -12569,26 +6701,26 @@ packages: resolve-from: 4.0.0 dev: true - /import-from/2.1.0: + /import-from@2.1.0: resolution: {integrity: sha1-M1238qev/VOqpHHUuAId7ja387E=} engines: {node: '>=4'} dependencies: resolve-from: 3.0.0 dev: true - /import-from/3.0.0: + /import-from@3.0.0: resolution: {integrity: sha512-CiuXOFFSzkU5x/CR0+z7T91Iht4CXgfCxVOFRhh2Zyhg5wOpWvvDLQUsWl+gcN+QscYBjez8hDCt85O7RLDttQ==} engines: {node: '>=8'} dependencies: resolve-from: 5.0.0 dev: true - /import-lazy/4.0.0: + /import-lazy@4.0.0: resolution: {integrity: sha512-rKtvo6a868b5Hu3heneU+L4yEQ4jYKLtjpnPeUdK7h0yzXGmyBTypknlkCvHFBqfX9YlorEiMM6Dnq/5atfHkw==} engines: {node: '>=8'} dev: true - /import-local/2.0.0: + /import-local@2.0.0: resolution: {integrity: sha512-b6s04m3O+s3CGSbqDIyP4R6aAwAeYlVq9+WUWep6iHa8ETRf9yei1U48C5MmfJmV9AiLYYBKPMq/W+/WRpQmCQ==} engines: {node: '>=6'} hasBin: true @@ -12597,61 +6729,37 @@ packages: resolve-cwd: 2.0.0 dev: true - /imurmurhash/0.1.4: + /imurmurhash@0.1.4: resolution: {integrity: sha1-khi5srkoojixPcT7a21XbyMUU+o=} engines: {node: '>=0.8.19'} dev: true - /indent-string/2.1.0: - resolution: {integrity: sha512-aqwDFWSgSgfRaEwao5lg5KEcVd/2a+D1rvoG7NdilmYz0NwRk6StWpWdz/Hpk34MKPpx7s8XxUqimfcQK6gGlg==} - engines: {node: '>=0.10.0'} - dependencies: - repeating: 2.0.1 - dev: true - optional: true - - /indent-string/4.0.0: + /indent-string@4.0.0: resolution: {integrity: sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==} engines: {node: '>=8'} dev: true - /indexes-of/1.0.1: + /indexes-of@1.0.1: resolution: {integrity: sha1-8w9xbI4r00bHtn0985FVZqfAVgc=} dev: true - /infer-owner/1.0.4: - resolution: {integrity: sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A==} - dev: true - - /inflight/1.0.6: + /inflight@1.0.6: resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} dependencies: once: 1.4.0 wrappy: 1.0.2 dev: true - /inherits/2.0.1: - resolution: {integrity: sha512-8nWq2nLTAwd02jTqJExUYFSD/fKq6VH9Y/oG2accc/kdI0V98Bag8d5a4gi3XHz73rDWa2PvTtvcWYquKqSENA==} - dev: true - - /inherits/2.0.3: - resolution: {integrity: sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw==} - dev: true - - /inherits/2.0.4: + /inherits@2.0.4: resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} dev: true - /ini/1.3.5: + /ini@1.3.5: resolution: {integrity: sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==} deprecated: Please update to ini >=1.3.6 to avoid a prototype pollution issue dev: true - /inline-style-parser/0.1.1: - resolution: {integrity: sha512-7NXolsK4CAS5+xvdj5OMMbI962hU/wvwoxk+LWR9Ek9bVtyuuYScDN6eS0rUm6TxApFpw7CX1o4uJzcd4AyD3Q==} - dev: true - - /inquirer/7.3.3: + /inquirer@7.3.3: resolution: {integrity: sha512-JG3eIAj5V9CwcGvuOmoo6LB9kbAYT8HXffUl6memuszlwDC/qvFAJw49XJ5NROSFNPxp3iQg1GqkFhaY/CR0IA==} engines: {node: '>=8.0.0'} dependencies: @@ -12670,7 +6778,7 @@ packages: through: 2.3.8 dev: true - /internal-slot/1.0.3: + /internal-slot@1.0.3: resolution: {integrity: sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA==} engines: {node: '>= 0.4'} dependencies: @@ -12679,119 +6787,85 @@ packages: side-channel: 1.0.4 dev: true - /interpret/2.2.0: - resolution: {integrity: sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==} - engines: {node: '>= 0.10'} - dev: true - - /intl-format-cache/2.2.9: + /intl-format-cache@2.2.9: resolution: {integrity: sha512-Zv/u8wRpekckv0cLkwpVdABYST4hZNTDaX7reFetrYTJwxExR2VyTqQm+l0WmL0Qo8Mjb9Tf33qnfj0T7pjxdQ==} dev: true - /intl-messageformat-parser/1.4.0: + /intl-messageformat-parser@1.4.0: resolution: {integrity: sha1-tD1FqXRoytvkQzHXS7Ho3qRPwHU=} deprecated: We've written a new parser that's 6x faster and is backwards compatible. Please use @formatjs/icu-messageformat-parser dev: true - /intl-messageformat/2.2.0: + /intl-messageformat@2.2.0: resolution: {integrity: sha1-NFvNRt5jC3aDMwwuUhd/9eq0hPw=} dependencies: intl-messageformat-parser: 1.4.0 dev: true - /intl-relativeformat/2.2.0: + /intl-relativeformat@2.2.0: resolution: {integrity: sha512-4bV/7kSKaPEmu6ArxXf9xjv1ny74Zkwuey8Pm01NH4zggPP7JHwg2STk8Y3JdspCKRDriwIyLRfEXnj2ZLr4Bw==} deprecated: This package has been deprecated, please see migration guide at 'https://github.com/formatjs/formatjs/tree/master/packages/intl-relativeformat#migration-guide' dependencies: intl-messageformat: 2.2.0 dev: true - /invariant/2.2.4: + /invariant@2.2.4: resolution: {integrity: sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==} dependencies: loose-envify: 1.4.0 - dev: true - - /ip/2.0.0: - resolution: {integrity: sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==} - dev: true - /ipaddr.js/1.9.1: - resolution: {integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==} - engines: {node: '>= 0.10'} - dev: true - - /is-absolute-url/2.1.0: + /is-absolute-url@2.1.0: resolution: {integrity: sha512-vOx7VprsKyllwjSkLV79NIhpyLfr3jAp7VaTCMXOJHu4m0Ew1CZ2fcjASwmV1jI3BWuWHB013M48eyeldk9gYg==} engines: {node: '>=0.10.0'} dev: true - /is-absolute-url/3.0.3: - resolution: {integrity: sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q==} - engines: {node: '>=8'} - dev: true - - /is-accessor-descriptor/0.1.6: + /is-accessor-descriptor@0.1.6: resolution: {integrity: sha512-e1BM1qnDbMRG3ll2U9dSK0UMHuWOs3pY3AtcFsmvwPtKL3MML/Q86i+GilLfvqEs4GW+ExB91tQ3Ig9noDIZ+A==} engines: {node: '>=0.10.0'} dependencies: kind-of: 3.2.2 dev: true - /is-accessor-descriptor/1.0.0: + /is-accessor-descriptor@1.0.0: resolution: {integrity: sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==} engines: {node: '>=0.10.0'} dependencies: kind-of: 6.0.3 dev: true - /is-alphabetical/1.0.4: + /is-alphabetical@1.0.4: resolution: {integrity: sha512-DwzsA04LQ10FHTZuL0/grVDk4rFoVH1pjAToYwBrHSxcrBIGQuXrQMtD5U1b0U2XVgKZCTLLP8u2Qxqhy3l2Vg==} + dev: false - /is-alphanumerical/1.0.4: + /is-alphanumerical@1.0.4: resolution: {integrity: sha512-UzoZUr+XfVz3t3v4KyGEniVL9BDRoQtY7tOyrRybkVNjDFWyo1yhXNGrrBTQxp3ib9BLAWs7k2YKBQsFRkZG9A==} dependencies: is-alphabetical: 1.0.4 is-decimal: 1.0.4 + dev: false - /is-arguments/1.1.1: - resolution: {integrity: sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==} - engines: {node: '>= 0.4'} - dependencies: - call-bind: 1.0.2 - has-tostringtag: 1.0.0 - dev: true - - /is-arrayish/0.2.1: + /is-arrayish@0.2.1: resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==} dev: true - /is-arrayish/0.3.2: + /is-arrayish@0.3.2: resolution: {integrity: sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==} dev: true - /is-bigint/1.0.4: + /is-bigint@1.0.4: resolution: {integrity: sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==} dependencies: has-bigints: 1.0.1 dev: true - /is-binary-path/1.0.1: - resolution: {integrity: sha512-9fRVlXc0uCxEDj1nQzaWONSpbTfx0FmJfzHF7pwlI8DkWGoHBBea4Pg5Ky0ojwwxQmnSifgbKkI06Qv0Ljgj+Q==} - engines: {node: '>=0.10.0'} - dependencies: - binary-extensions: 1.13.1 - dev: true - optional: true - - /is-binary-path/2.1.0: + /is-binary-path@2.1.0: resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==} engines: {node: '>=8'} dependencies: binary-extensions: 2.1.0 dev: true - /is-boolean-object/1.1.2: + /is-boolean-object@1.1.2: resolution: {integrity: sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==} engines: {node: '>= 0.4'} dependencies: @@ -12799,28 +6873,23 @@ packages: has-tostringtag: 1.0.0 dev: true - /is-buffer/1.1.6: + /is-buffer@1.1.6: resolution: {integrity: sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==} dev: true - /is-buffer/2.0.5: - resolution: {integrity: sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ==} - engines: {node: '>=4'} - dev: true - - /is-callable/1.2.4: + /is-callable@1.2.4: resolution: {integrity: sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==} engines: {node: '>= 0.4'} dev: true - /is-ci/2.0.0: + /is-ci@2.0.0: resolution: {integrity: sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==} hasBin: true dependencies: ci-info: 2.0.0 dev: true - /is-color-stop/1.1.0: + /is-color-stop@1.1.0: resolution: {integrity: sha1-z/9HGu5N1cnhWFmPvhKWe1za00U=} dependencies: css-color-names: 0.0.4 @@ -12831,35 +6900,36 @@ packages: rgba-regex: 1.0.0 dev: true - /is-core-module/2.8.1: + /is-core-module@2.8.1: resolution: {integrity: sha512-SdNCUs284hr40hFTFP6l0IfZ/RSrMXF3qgoRHd3/79unUTvrFO/JoXwkGm+5J/Oe3E/b5GsnG330uUNgRpu1PA==} dependencies: has: 1.0.3 dev: true - /is-data-descriptor/0.1.4: + /is-data-descriptor@0.1.4: resolution: {integrity: sha512-+w9D5ulSoBNlmw9OHn3U2v51SyoCd0he+bB3xMl62oijhrspxowjU+AIcDY0N3iEJbUEkB15IlMASQsxYigvXg==} engines: {node: '>=0.10.0'} dependencies: kind-of: 3.2.2 dev: true - /is-data-descriptor/1.0.0: + /is-data-descriptor@1.0.0: resolution: {integrity: sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==} engines: {node: '>=0.10.0'} dependencies: kind-of: 6.0.3 dev: true - /is-date-object/1.0.2: + /is-date-object@1.0.2: resolution: {integrity: sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==} engines: {node: '>= 0.4'} dev: true - /is-decimal/1.0.4: + /is-decimal@1.0.4: resolution: {integrity: sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw==} + dev: false - /is-descriptor/0.1.6: + /is-descriptor@0.1.6: resolution: {integrity: sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg==} engines: {node: '>=0.10.0'} dependencies: @@ -12868,7 +6938,7 @@ packages: kind-of: 5.1.0 dev: true - /is-descriptor/1.0.2: + /is-descriptor@1.0.2: resolution: {integrity: sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==} engines: {node: '>=0.10.0'} dependencies: @@ -12877,151 +6947,109 @@ packages: kind-of: 6.0.3 dev: true - /is-directory/0.3.1: + /is-directory@0.3.1: resolution: {integrity: sha1-YTObbyR1/Hcv2cnYP1yFddwVSuE=} engines: {node: '>=0.10.0'} dev: true - /is-docker/2.2.1: - resolution: {integrity: sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==} - engines: {node: '>=8'} - hasBin: true - dev: true - - /is-dom/1.1.0: - resolution: {integrity: sha512-u82f6mvhYxRPKpw8V1N0W8ce1xXwOrQtgGcxl6UCL5zBmZu3is/18K0rR7uFCnMDuAsS/3W54mGL4vsaFUQlEQ==} - dependencies: - is-object: 1.0.2 - is-window: 1.0.2 - dev: true - - /is-extendable/0.1.1: + /is-extendable@0.1.1: resolution: {integrity: sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==} engines: {node: '>=0.10.0'} dev: true - /is-extendable/1.0.1: + /is-extendable@1.0.1: resolution: {integrity: sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==} engines: {node: '>=0.10.0'} dependencies: is-plain-object: 2.0.4 dev: true - /is-extglob/2.1.1: + /is-extglob@2.1.1: resolution: {integrity: sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=} engines: {node: '>=0.10.0'} dev: true - /is-finite/1.1.0: - resolution: {integrity: sha512-cdyMtqX/BOqqNBBiKlIVkytNHm49MtMlYyn1zxzvJKWmFMlGzm+ry5BBfYyeY9YmNKbRSo/o7OX9w9ale0wg3w==} - engines: {node: '>=0.10.0'} - dev: true - optional: true - - /is-fullwidth-code-point/2.0.0: + /is-fullwidth-code-point@2.0.0: resolution: {integrity: sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==} engines: {node: '>=4'} dev: true - /is-fullwidth-code-point/3.0.0: + /is-fullwidth-code-point@3.0.0: resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} engines: {node: '>=8'} dev: true - /is-function/1.0.2: - resolution: {integrity: sha512-lw7DUp0aWXYg+CBCN+JKkcE0Q2RayZnSvnZBlwgxHBQhqt5pZNVy4Ri7H9GmmXkdu7LUthszM+Tor1u/2iBcpQ==} - dev: true - - /is-generator-fn/2.1.0: + /is-generator-fn@2.1.0: resolution: {integrity: sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==} engines: {node: '>=6'} dev: true - /is-glob/3.1.0: - resolution: {integrity: sha512-UFpDDrPgM6qpnFNI+rh/p3bUaq9hKLZN8bMUWzxmcnZVS3omf4IPK+BrewlnWjO1WmUsMYuSjKh4UJuV4+Lqmw==} - engines: {node: '>=0.10.0'} - dependencies: - is-extglob: 2.1.1 - dev: true - - /is-glob/4.0.1: + /is-glob@4.0.1: resolution: {integrity: sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==} engines: {node: '>=0.10.0'} dependencies: is-extglob: 2.1.1 dev: true - /is-hexadecimal/1.0.4: + /is-hexadecimal@1.0.4: resolution: {integrity: sha512-gyPJuv83bHMpocVYoqof5VDiZveEoGoFL8m3BXNb2VW8Xs+rz9kqO8LOQ5DH6EsuvilT1ApazU0pyl+ytbPtlw==} + dev: false - /is-map/2.0.2: - resolution: {integrity: sha512-cOZFQQozTha1f4MxLFzlgKYPTyj26picdZTx82hbc/Xf4K/tZOOXSCkMvU4pKioRXGDLJRn0GM7Upe7kR721yg==} - dev: true - - /is-module/1.0.0: + /is-module@1.0.0: resolution: {integrity: sha1-Mlj7afeMFNW4FdZkM2tM/7ZEFZE=} dev: true - /is-negative-zero/2.0.2: + /is-negative-zero@2.0.2: resolution: {integrity: sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==} engines: {node: '>= 0.4'} dev: true - /is-number-object/1.0.4: + /is-number-object@1.0.4: resolution: {integrity: sha512-zohwelOAur+5uXtk8O3GPQ1eAcu4ZX3UwxQhUlfFFMNpUd83gXgjbhJh6HmB6LUNV/ieOLQuDwJO3dWJosUeMw==} engines: {node: '>= 0.4'} dev: true - /is-number/3.0.0: + /is-number@3.0.0: resolution: {integrity: sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==} engines: {node: '>=0.10.0'} dependencies: kind-of: 3.2.2 dev: true - /is-number/7.0.0: + /is-number@7.0.0: resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} engines: {node: '>=0.12.0'} dev: true - /is-obj/2.0.0: + /is-obj@2.0.0: resolution: {integrity: sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==} engines: {node: '>=8'} dev: true - /is-object/1.0.2: - resolution: {integrity: sha512-2rRIahhZr2UWb45fIOuvZGpFtz0TyOZLf32KxBbSoUCeZR495zCKlWUKKUByk3geS2eAs7ZAABt0Y/Rx0GiQGA==} - dev: true - - /is-plain-obj/1.1.0: + /is-plain-obj@1.1.0: resolution: {integrity: sha1-caUMhCnfync8kqOQpKA7OfzVHT4=} engines: {node: '>=0.10.0'} dev: true - /is-plain-obj/2.1.0: - resolution: {integrity: sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==} - engines: {node: '>=8'} - dev: true - - /is-plain-object/2.0.4: + /is-plain-object@2.0.4: resolution: {integrity: sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==} engines: {node: '>=0.10.0'} dependencies: isobject: 3.0.1 dev: true - /is-plain-object/5.0.0: + /is-plain-object@5.0.0: resolution: {integrity: sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==} engines: {node: '>=0.10.0'} dev: true - /is-reference/1.2.1: + /is-reference@1.2.1: resolution: {integrity: sha512-U82MsXXiFIrjCK4otLT+o2NA2Cd2g5MLoOVXUZjIOhLurrRxpEXzI8O0KZHr3IjLvlAH1kTPYSuqer5T9ZVBKQ==} dependencies: '@types/estree': 0.0.51 dev: true - /is-regex/1.1.4: + /is-regex@1.1.4: resolution: {integrity: sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==} engines: {node: '>= 0.4'} dependencies: @@ -13029,168 +7057,111 @@ packages: has-tostringtag: 1.0.0 dev: true - /is-regexp/2.1.0: + /is-regexp@2.1.0: resolution: {integrity: sha512-OZ4IlER3zmRIoB9AqNhEggVxqIH4ofDns5nRrPS6yQxXE1TPCUpFznBfRQmQa8uC+pXqjMnukiJBxCisIxiLGA==} engines: {node: '>=6'} dev: true - /is-resolvable/1.1.0: + /is-resolvable@1.1.0: resolution: {integrity: sha512-qgDYXFSR5WvEfuS5dMj6oTMEbrrSaM0CrFk2Yiq/gXnBvD9pMa2jGXxyhGLfvhZpuMZe18CJpFxAt3CRs42NMg==} dev: true - /is-set/2.0.2: - resolution: {integrity: sha512-+2cnTEZeY5z/iXGbLhPrOAaK/Mau5k5eXq9j14CpRTftq0pAJu2MwVRSZhyZWBzx3o6X795Lz6Bpb6R0GKf37g==} - dev: true - - /is-shared-array-buffer/1.0.1: + /is-shared-array-buffer@1.0.1: resolution: {integrity: sha512-IU0NmyknYZN0rChcKhRO1X8LYz5Isj/Fsqh8NJOSf+N/hCOTwy29F32Ik7a+QszE63IdvmwdTPDd6cZ5pg4cwA==} dev: true - /is-shared-array-buffer/1.0.2: - resolution: {integrity: sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==} - dependencies: - call-bind: 1.0.2 - dev: true - - /is-stream/1.1.0: + /is-stream@1.1.0: resolution: {integrity: sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ==} engines: {node: '>=0.10.0'} dev: true - /is-stream/2.0.1: + /is-stream@2.0.1: resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} engines: {node: '>=8'} dev: true - /is-string/1.0.7: + /is-string@1.0.7: resolution: {integrity: sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==} engines: {node: '>= 0.4'} dependencies: has-tostringtag: 1.0.0 dev: true - /is-subset/0.1.1: + /is-subset@0.1.1: resolution: {integrity: sha1-ilkRfZMt4d4A8kX83TnOQ/HpOaY=} dev: true - /is-svg/3.0.0: + /is-svg@3.0.0: resolution: {integrity: sha512-gi4iHK53LR2ujhLVVj+37Ykh9GLqYHX6JOVXbLAucaG/Cqw9xwdFOjDM2qeifLs1sF1npXXFvDu0r5HNgCMrzQ==} engines: {node: '>=4'} dependencies: html-comment-regex: 1.1.2 dev: true - /is-symbol/1.0.4: + /is-symbol@1.0.4: resolution: {integrity: sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==} engines: {node: '>= 0.4'} dependencies: has-symbols: 1.0.3 dev: true - /is-typedarray/1.0.0: + /is-typedarray@1.0.0: resolution: {integrity: sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==} dev: true - /is-utf8/0.2.1: - resolution: {integrity: sha512-rMYPYvCzsXywIsldgLaSoPlw5PfoB/ssr7hY4pLfcodrA5M/eArza1a9VmTiNIBNMjOGr1Ow9mTyU2o69U6U9Q==} - dev: true - optional: true - - /is-weakref/1.0.2: + /is-weakref@1.0.2: resolution: {integrity: sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==} dependencies: call-bind: 1.0.2 dev: true - /is-whitespace-character/1.0.4: - resolution: {integrity: sha512-SDweEzfIZM0SJV0EUga669UTKlmL0Pq8Lno0QDQsPnvECB3IM2aP0gdx5TrU0A01MAPfViaZiI2V1QMZLaKK5w==} - dev: true - - /is-window/1.0.2: - resolution: {integrity: sha512-uj00kdXyZb9t9RcAUAwMZAnkBUwdYGhYlt7djMXhfyhUCzwNba50tIiBKR7q0l7tdoBtFVw/3JmLY6fI3rmZmg==} - dev: true - - /is-windows/1.0.2: + /is-windows@1.0.2: resolution: {integrity: sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==} engines: {node: '>=0.10.0'} dev: true - /is-word-character/1.0.4: - resolution: {integrity: sha512-5SMO8RVennx3nZrqtKwCGyyetPE9VDba5ugvKLaD4KopPG5kR4mQ7tNt/r7feL5yt5h3lpuBbIUmCOG2eSzXHA==} - dev: true - - /is-wsl/1.1.0: + /is-wsl@1.1.0: resolution: {integrity: sha512-gfygJYZ2gLTDlmbWMI0CE2MwnFzSN/2SZfkMlItC4K/JBlsWVDB0bO6XhqcY13YXE7iMcAJnzTCJjPiTeJJ0Mw==} engines: {node: '>=4'} dev: true - /is-wsl/2.2.0: - resolution: {integrity: sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==} - engines: {node: '>=8'} - dependencies: - is-docker: 2.2.1 - dev: true - - /isarray/1.0.0: + /isarray@1.0.0: resolution: {integrity: sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==} dev: true - /isarray/2.0.5: - resolution: {integrity: sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==} - dev: true - - /isexe/2.0.0: + /isexe@2.0.0: resolution: {integrity: sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=} dev: true - /isobject/2.1.0: + /isobject@2.1.0: resolution: {integrity: sha512-+OUdGJlgjOBZDfxnDjYYG6zp487z0JGNQq3cYQYg5f5hKR+syHMsaztzGeml/4kGG55CSpKSpWTY+jYGgsHLgA==} engines: {node: '>=0.10.0'} dependencies: isarray: 1.0.0 dev: true - /isobject/3.0.1: + /isobject@3.0.1: resolution: {integrity: sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==} engines: {node: '>=0.10.0'} dev: true - /isobject/4.0.0: - resolution: {integrity: sha512-S/2fF5wH8SJA/kmwr6HYhK/RI/OkhD84k8ntalo0iJjZikgq1XFvR5M8NPT1x5F7fBwCG3qHfnzeP/Vh/ZxCUA==} - engines: {node: '>=0.10.0'} - dev: true - - /isomorphic-fetch/2.2.1: + /isomorphic-fetch@2.2.1: resolution: {integrity: sha1-YRrhrPFPXoH3KVB0coGf6XM1WKk=} dependencies: node-fetch: 1.7.3 whatwg-fetch: 3.6.2 dev: true - /isomorphic-unfetch/3.1.0: - resolution: {integrity: sha512-geDJjpoZ8N0kWexiwkX8F9NkTsXhetLPVbZFQ+JTW239QNOwvB0gniuR1Wc6f0AMTn7/mFGyXvHTifrCp/GH8Q==} - dependencies: - node-fetch: 2.6.7 - unfetch: 4.2.0 - transitivePeerDependencies: - - encoding - dev: true - - /isstream/0.1.2: + /isstream@0.1.2: resolution: {integrity: sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g==} dev: true - /istanbul-lib-coverage/2.0.5: + /istanbul-lib-coverage@2.0.5: resolution: {integrity: sha512-8aXznuEPCJvGnMSRft4udDRDtb1V3pkQkMMI5LI+6HuQz5oQ4J2UFn1H82raA3qJtyOLkkwVqICBQkjnGtn5mA==} engines: {node: '>=6'} dev: true - /istanbul-lib-coverage/3.2.0: - resolution: {integrity: sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw==} - engines: {node: '>=8'} - dev: true - - /istanbul-lib-instrument/3.3.0: + /istanbul-lib-instrument@3.3.0: resolution: {integrity: sha512-5nnIN4vo5xQZHdXno/YDXJ0G+I3dAm4XgzfSVTPLQpj/zAV2dV6Juy0yaf10/zrJOJeHoN3fraFe+XRq2bFVZA==} engines: {node: '>=6'} dependencies: @@ -13205,20 +7176,7 @@ packages: - supports-color dev: true - /istanbul-lib-instrument/5.2.0: - resolution: {integrity: sha512-6Lthe1hqXHBNsqvgDzGO6l03XNeu3CrG4RqQ1KM9+l5+jNGpEJfIELx1NS3SEHmJQA8np/u+E4EPRKRiu6m19A==} - engines: {node: '>=8'} - dependencies: - '@babel/core': 7.19.1 - '@babel/parser': 7.19.1 - '@istanbuljs/schema': 0.1.3 - istanbul-lib-coverage: 3.2.0 - semver: 6.3.0 - transitivePeerDependencies: - - supports-color - dev: true - - /istanbul-lib-report/2.0.8: + /istanbul-lib-report@2.0.8: resolution: {integrity: sha512-fHBeG573EIihhAblwgxrSenp0Dby6tJMFR/HvlerBsrCTD5bkUuoNtn3gVh29ZCS824cGGBPn7Sg7cNk+2xUsQ==} engines: {node: '>=6'} dependencies: @@ -13227,16 +7185,7 @@ packages: supports-color: 6.1.0 dev: true - /istanbul-lib-report/3.0.0: - resolution: {integrity: sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==} - engines: {node: '>=8'} - dependencies: - istanbul-lib-coverage: 3.2.0 - make-dir: 3.1.0 - supports-color: 7.2.0 - dev: true - - /istanbul-lib-source-maps/3.0.6: + /istanbul-lib-source-maps@3.0.6: resolution: {integrity: sha512-R47KzMtDJH6X4/YW9XTx+jrLnZnscW4VpNN+1PViSYTejLVPWv7oov+Duf8YQSPyVRUvueQqz1TcsC6mooZTXw==} engines: {node: '>=6'} dependencies: @@ -13249,33 +7198,14 @@ packages: - supports-color dev: true - /istanbul-reports/2.2.7: + /istanbul-reports@2.2.7: resolution: {integrity: sha512-uu1F/L1o5Y6LzPVSVZXNOoD/KXpJue9aeLRd0sM9uMXfZvzomB0WxVamWb5ue8kA2vVWEmW7EG+A5n3f1kqHKg==} engines: {node: '>=6'} dependencies: html-escaper: 2.0.2 dev: true - /istanbul-reports/3.1.5: - resolution: {integrity: sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w==} - engines: {node: '>=8'} - dependencies: - html-escaper: 2.0.2 - istanbul-lib-report: 3.0.0 - dev: true - - /iterate-iterator/1.0.2: - resolution: {integrity: sha512-t91HubM4ZDQ70M9wqp+pcNpu8OyJ9UAtXntT/Bcsvp5tZMnz9vRa+IunKXeI8AnfZMTv0jNuVEmGeLSMjVvfPw==} - dev: true - - /iterate-value/1.0.2: - resolution: {integrity: sha512-A6fMAio4D2ot2r/TYzr4yUWrmwNdsN5xL7+HUiyACE4DXm+q8HtPcnFTp+NnW3k4N05tZ7FVYFFb2CR13NxyHQ==} - dependencies: - es-get-iterator: 1.1.2 - iterate-iterator: 1.0.2 - dev: true - - /jest-changed-files/24.9.0: + /jest-changed-files@24.9.0: resolution: {integrity: sha512-6aTWpe2mHF0DhL28WjdkO8LyGjs3zItPET4bMSeXU6T3ub4FPMw+mcOcbdGXQOAfmLcxofD23/5Bl9Z4AkFwqg==} engines: {node: '>= 6'} dependencies: @@ -13284,7 +7214,7 @@ packages: throat: 4.1.0 dev: true - /jest-cli/24.9.0: + /jest-cli@24.9.0: resolution: {integrity: sha512-+VLRKyitT3BWoMeSUIHRxV/2g8y9gw91Jh5z2UmXZzkZKpbC08CSehVxgHUwTpy+HwGcns/tqafQDJW7imYvGg==} engines: {node: '>= 6'} hasBin: true @@ -13308,14 +7238,14 @@ packages: - utf-8-validate dev: true - /jest-config/24.9.0: + /jest-config@24.9.0: resolution: {integrity: sha512-RATtQJtVYQrp7fvWg6f5y3pEFj9I+H8sWw4aKxnDZ96mob5i5SD6ZEGWgMLXQ4LE8UurrjbdlLWdUeo+28QpfQ==} engines: {node: '>= 6'} dependencies: '@babel/core': 7.19.1 '@jest/test-sequencer': 24.9.0 '@jest/types': 24.9.0 - babel-jest: 24.9.0_@babel+core@7.19.1 + babel-jest: 24.9.0(@babel/core@7.19.1) chalk: 2.4.2 glob: 7.2.3 jest-environment-jsdom: 24.9.0 @@ -13335,7 +7265,7 @@ packages: - utf-8-validate dev: true - /jest-diff/24.9.0: + /jest-diff@24.9.0: resolution: {integrity: sha512-qMfrTs8AdJE2iqrTp0hzh7kTd2PQWrsFyj9tORoKmu32xjPjeE4NyjVRDz8ybYwqS2ik8N4hsIpiVTyFeo2lBQ==} engines: {node: '>= 6'} dependencies: @@ -13345,14 +7275,14 @@ packages: pretty-format: 24.9.0 dev: true - /jest-docblock/24.9.0: + /jest-docblock@24.9.0: resolution: {integrity: sha512-F1DjdpDMJMA1cN6He0FNYNZlo3yYmOtRUnktrT9Q37njYzC5WEaDdmbynIgy0L/IvXvvgsG8OsqhLPXTpfmZAA==} engines: {node: '>= 6'} dependencies: detect-newline: 2.1.0 dev: true - /jest-each/24.9.0: + /jest-each@24.9.0: resolution: {integrity: sha512-ONi0R4BvW45cw8s2Lrx8YgbeXL1oCQ/wIDwmsM3CqM/nlblNCPmnC3IPQlMbRFZu3wKdQ2U8BqM6lh3LJ5Bsog==} engines: {node: '>= 6'} dependencies: @@ -13365,7 +7295,7 @@ packages: - supports-color dev: true - /jest-environment-jsdom-fourteen/1.0.1: + /jest-environment-jsdom-fourteen@1.0.1: resolution: {integrity: sha512-DojMX1sY+at5Ep+O9yME34CdidZnO3/zfPh8UW+918C5fIZET5vCjfkegixmsi7AtdYfkr4bPlIzmWnlvQkP7Q==} dependencies: '@jest/environment': 24.9.0 @@ -13380,7 +7310,7 @@ packages: - utf-8-validate dev: true - /jest-environment-jsdom/24.9.0: + /jest-environment-jsdom@24.9.0: resolution: {integrity: sha512-Zv9FV9NBRzLuALXjvRijO2351DRQeLYXtpD4xNvfoVFw21IOKNhZAEUKcbiEtjTkm2GsJ3boMVgkaR7rN8qetA==} engines: {node: '>= 6'} dependencies: @@ -13396,7 +7326,7 @@ packages: - utf-8-validate dev: true - /jest-environment-node/24.9.0: + /jest-environment-node@24.9.0: resolution: {integrity: sha512-6d4V2f4nxzIzwendo27Tr0aFm+IXWa0XEUnaH6nU0FMaozxovt+sfRvh4J47wL1OvF83I3SSTu0XK+i4Bqe7uA==} engines: {node: '>= 6'} dependencies: @@ -13409,12 +7339,12 @@ packages: - supports-color dev: true - /jest-get-type/24.9.0: + /jest-get-type@24.9.0: resolution: {integrity: sha512-lUseMzAley4LhIcpSP9Jf+fTrQ4a1yHQwLNeeVa2cEmbCGeoZAtYPOIv8JaxLD/sUpKxetKGP+gsHl8f8TSj8Q==} engines: {node: '>= 6'} dev: true - /jest-haste-map/24.9.0: + /jest-haste-map@24.9.0: resolution: {integrity: sha512-kfVFmsuWui2Sj1Rp1AJ4D9HqJwE4uwTlS/vO+eRUaMmd54BFpli2XhMQnPC2k4cHFVbB2Q2C+jtI1AGLgEnCjQ==} engines: {node: '>= 6'} dependencies: @@ -13435,30 +7365,7 @@ packages: - supports-color dev: true - /jest-haste-map/26.6.2: - resolution: {integrity: sha512-easWIJXIw71B2RdR8kgqpjQrbMRWQBgiBwXYEhtGUTaX+doCjBheluShdDMeR8IMfJiTqH4+zfhtg29apJf/8w==} - engines: {node: '>= 10.14.2'} - dependencies: - '@jest/types': 26.6.2 - '@types/graceful-fs': 4.1.5 - '@types/node': 18.11.9 - anymatch: 3.1.1 - fb-watchman: 2.0.1 - graceful-fs: 4.2.10 - jest-regex-util: 26.0.0 - jest-serializer: 26.6.2 - jest-util: 26.6.2 - jest-worker: 26.6.2 - micromatch: 4.0.4 - sane: 4.1.0 - walker: 1.0.7 - optionalDependencies: - fsevents: 2.1.3 - transitivePeerDependencies: - - supports-color - dev: true - - /jest-jasmine2/24.9.0: + /jest-jasmine2@24.9.0: resolution: {integrity: sha512-Cq7vkAgaYKp+PsX+2/JbTarrk0DmNhsEtqBXNwUHkdlbrTBLtMJINADf2mf5FkowNsq8evbPc07/qFO0AdKTzw==} engines: {node: '>= 6'} dependencies: @@ -13482,7 +7389,7 @@ packages: - supports-color dev: true - /jest-leak-detector/24.9.0: + /jest-leak-detector@24.9.0: resolution: {integrity: sha512-tYkFIDsiKTGwb2FG1w8hX9V0aUb2ot8zY/2nFg087dUageonw1zrLMP4W6zsRO59dPkTSKie+D4rhMuP9nRmrA==} engines: {node: '>= 6'} dependencies: @@ -13490,7 +7397,7 @@ packages: pretty-format: 24.9.0 dev: true - /jest-matcher-utils/24.9.0: + /jest-matcher-utils@24.9.0: resolution: {integrity: sha512-OZz2IXsu6eaiMAwe67c1T+5tUAtQyQx27/EMEkbFAGiw52tB9em+uGbzpcgYVpA8wl0hlxKPZxrly4CXU/GjHA==} engines: {node: '>= 6'} dependencies: @@ -13500,7 +7407,7 @@ packages: pretty-format: 24.9.0 dev: true - /jest-message-util/24.9.0: + /jest-message-util@24.9.0: resolution: {integrity: sha512-oCj8FiZ3U0hTP4aSui87P4L4jC37BtQwUMqk+zk/b11FR19BJDeZsZAvIHutWnmtw7r85UmR3CEWZ0HWU2mAlw==} engines: {node: '>= 6'} dependencies: @@ -13516,22 +7423,14 @@ packages: - supports-color dev: true - /jest-mock/24.9.0: + /jest-mock@24.9.0: resolution: {integrity: sha512-3BEYN5WbSq9wd+SyLDES7AHnjH9A/ROBwmz7l2y+ol+NtSFO8DYiEBzoO1CeFc9a8DYy10EO4dDFVv/wN3zl1w==} engines: {node: '>= 6'} dependencies: '@jest/types': 24.9.0 dev: true - /jest-mock/27.5.1: - resolution: {integrity: sha512-K4jKbY1d4ENhbrG2zuPWaQBvDly+iZ2yAW+T1fATN78hc0sInwn7wZB8XtlNnvHug5RMwV897Xm4LqmPM4e2Og==} - engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} - dependencies: - '@jest/types': 27.5.1 - '@types/node': 18.11.9 - dev: true - - /jest-pnp-resolver/1.2.2_jest-resolve@24.9.0: + /jest-pnp-resolver@1.2.2(jest-resolve@24.9.0): resolution: {integrity: sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w==} engines: {node: '>=6'} peerDependencies: @@ -13543,17 +7442,12 @@ packages: jest-resolve: 24.9.0 dev: true - /jest-regex-util/24.9.0: + /jest-regex-util@24.9.0: resolution: {integrity: sha512-05Cmb6CuxaA+Ys6fjr3PhvV3bGQmO+2p2La4hFbU+W5uOc479f7FdLXUWXw4pYMAhhSZIuKHwSXSu6CsSBAXQA==} engines: {node: '>= 6'} dev: true - /jest-regex-util/26.0.0: - resolution: {integrity: sha512-Gv3ZIs/nA48/Zvjrl34bf+oD76JHiGDUxNOVgUjh3j890sblXryjY4rss71fPtD/njchl6PSE2hIhvyWa1eT0A==} - engines: {node: '>= 10.14.2'} - dev: true - - /jest-resolve-dependencies/24.9.0: + /jest-resolve-dependencies@24.9.0: resolution: {integrity: sha512-Fm7b6AlWnYhT0BXy4hXpactHIqER7erNgIsIozDXWl5dVm+k8XdGVe1oTg1JyaFnOxarMEbax3wyRJqGP2Pq+g==} engines: {node: '>= 6'} dependencies: @@ -13564,18 +7458,18 @@ packages: - supports-color dev: true - /jest-resolve/24.9.0: + /jest-resolve@24.9.0: resolution: {integrity: sha512-TaLeLVL1l08YFZAt3zaPtjiVvyy4oSA6CRe+0AFPPVX3Q/VI0giIWWoAvoS5L96vj9Dqxj4fB5p2qrHCmTU/MQ==} engines: {node: '>= 6'} dependencies: '@jest/types': 24.9.0 browser-resolve: 1.11.3 chalk: 2.4.2 - jest-pnp-resolver: 1.2.2_jest-resolve@24.9.0 + jest-pnp-resolver: 1.2.2(jest-resolve@24.9.0) realpath-native: 1.1.0 dev: true - /jest-runner/24.9.0: + /jest-runner@24.9.0: resolution: {integrity: sha512-KksJQyI3/0mhcfspnxxEOBueGrd5E4vV7ADQLT9ESaCzz02WnbdbKWIf5Mkaucoaj7obQckYPVX6JJhgUcoWWg==} engines: {node: '>= 6'} dependencies: @@ -13604,7 +7498,7 @@ packages: - utf-8-validate dev: true - /jest-runtime/24.9.0: + /jest-runtime@24.9.0: resolution: {integrity: sha512-8oNqgnmF3v2J6PVRM2Jfuj8oX3syKmaynlDMMKQ4iyzbQzIG6th5ub/lM2bCMTmoTKM3ykcUYI2Pw9xwNtjMnw==} engines: {node: '>= 6'} hasBin: true @@ -13638,20 +7532,12 @@ packages: - utf-8-validate dev: true - /jest-serializer/24.9.0: + /jest-serializer@24.9.0: resolution: {integrity: sha512-DxYipDr8OvfrKH3Kel6NdED3OXxjvxXZ1uIY2I9OFbGg+vUkkg7AGvi65qbhbWNPvDckXmzMPbK3u3HaDO49bQ==} engines: {node: '>= 6'} dev: true - /jest-serializer/26.6.2: - resolution: {integrity: sha512-S5wqyz0DXnNJPd/xfIzZ5Xnp1HrJWBczg8mMfMpN78OJ5eDxXyf+Ygld9wX1DnUWbIbhM1YDY95NjR4CBXkb2g==} - engines: {node: '>= 10.14.2'} - dependencies: - '@types/node': 18.11.9 - graceful-fs: 4.2.10 - dev: true - - /jest-snapshot/24.9.0: + /jest-snapshot@24.9.0: resolution: {integrity: sha512-uI/rszGSs73xCM0l+up7O7a40o90cnrk429LOiK3aeTvfC0HHmldbd81/B7Ix81KSFe1lwkbl7GnBGG4UfuDew==} engines: {node: '>= 6'} dependencies: @@ -13672,7 +7558,7 @@ packages: - supports-color dev: true - /jest-util/24.9.0: + /jest-util@24.9.0: resolution: {integrity: sha512-x+cZU8VRmOJxbA1K5oDBdxQmdq0OIdADarLxk0Mq+3XS4jgvhG/oKGWcIDCtPG0HgjxOYvF+ilPJQsAyXfbNOg==} engines: {node: '>= 6'} dependencies: @@ -13692,19 +7578,7 @@ packages: - supports-color dev: true - /jest-util/26.6.2: - resolution: {integrity: sha512-MDW0fKfsn0OI7MS7Euz6h8HNDXVQ0gaM9uW6RjfDmd1DAFcaxX9OqIakHIqhbnmF08Cf2DLDG+ulq8YQQ0Lp0Q==} - engines: {node: '>= 10.14.2'} - dependencies: - '@jest/types': 26.6.2 - '@types/node': 18.11.9 - chalk: 4.1.2 - graceful-fs: 4.2.10 - is-ci: 2.0.0 - micromatch: 4.0.4 - dev: true - - /jest-validate/24.9.0: + /jest-validate@24.9.0: resolution: {integrity: sha512-HPIt6C5ACwiqSiwi+OfSSHbK8sG7akG8eATl+IPKaeIjtPOeBUd/g3J7DghugzxrGjI93qS/+RPKe1H6PqvhRQ==} engines: {node: '>= 6'} dependencies: @@ -13716,7 +7590,7 @@ packages: pretty-format: 24.9.0 dev: true - /jest-watch-typeahead/0.4.2: + /jest-watch-typeahead@0.4.2: resolution: {integrity: sha512-f7VpLebTdaXs81rg/oj4Vg/ObZy2QtGzAmGLNsqUS5G5KtSN68tFcIsbvNODfNyQxU78g7D8x77o3bgfBTR+2Q==} dependencies: ansi-escapes: 4.3.1 @@ -13730,7 +7604,7 @@ packages: - supports-color dev: true - /jest-watcher/24.9.0: + /jest-watcher@24.9.0: resolution: {integrity: sha512-+/fLOfKPXXYJDYlks62/4R4GoT+GU1tYZed99JSCOsmzkkF7727RqKrjNAxtfO4YpGv11wybgRvCjR73lK2GZw==} engines: {node: '>= 6'} dependencies: @@ -13745,7 +7619,7 @@ packages: - supports-color dev: true - /jest-worker/24.9.0: + /jest-worker@24.9.0: resolution: {integrity: sha512-51PE4haMSXcHohnSMdM42anbvZANYTqMrr52tVKPqqsPJMzoP6FYYDVqahX/HrAoKEKz3uUPzSvKs9A3qR4iVw==} engines: {node: '>= 6'} dependencies: @@ -13753,25 +7627,7 @@ packages: supports-color: 6.1.0 dev: true - /jest-worker/26.6.2: - resolution: {integrity: sha512-KWYVV1c4i+jbMpaBC+U++4Va0cp8OisU185o73T1vo99hqi7w8tSJfUXYswwqqrjzwxa6KpRK54WhPvwf5w6PQ==} - engines: {node: '>= 10.13.0'} - dependencies: - '@types/node': 18.11.9 - merge-stream: 2.0.0 - supports-color: 7.2.0 - dev: true - - /jest-worker/27.5.1: - resolution: {integrity: sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==} - engines: {node: '>= 10.13.0'} - dependencies: - '@types/node': 18.11.9 - merge-stream: 2.0.0 - supports-color: 8.1.1 - dev: true - - /jest/24.9.0: + /jest@24.9.0: resolution: {integrity: sha512-YvkBL1Zm7d2B1+h5fHEOdyjCG+sGMz4f8D86/0HiqJ6MB4MnDc8FgP5vdWsGnemOQro7lnYo8UakZ3+5A0jxGw==} engines: {node: '>= 6'} hasBin: true @@ -13784,15 +7640,10 @@ packages: - utf-8-validate dev: true - /js-string-escape/1.0.1: - resolution: {integrity: sha512-Smw4xcfIQ5LVjAOuJCvN/zIodzA/BBSsluuoSykP+lUvScIi4U6RJLfwHet5cxFnCswUjISV8oAXaqaJDY3chg==} - engines: {node: '>= 0.8'} - dev: true - - /js-tokens/4.0.0: + /js-tokens@4.0.0: resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} - /js-yaml/3.14.0: + /js-yaml@3.14.0: resolution: {integrity: sha512-/4IbIeHcD9VMHFqDR/gQ7EdZdLimOvW2DdcxFjdyyZ9NsbS+ccrXqVWDtab/lRl5AlUqmpBx8EhPaWR+OtY17A==} hasBin: true dependencies: @@ -13800,18 +7651,18 @@ packages: esprima: 4.0.1 dev: true - /js-yaml/4.1.0: + /js-yaml@4.1.0: resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==} hasBin: true dependencies: argparse: 2.0.1 dev: true - /jsbn/0.1.1: + /jsbn@0.1.1: resolution: {integrity: sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg==} dev: true - /jsdom/11.12.0: + /jsdom@11.12.0: resolution: {integrity: sha512-y8Px43oyiBM13Zc1z780FrfNLJCXTL40EWlty/LXUtcjykRBNgLlCjWXpfSPBl2iv+N7koQN+dvqszHZgT/Fjw==} dependencies: abab: 2.0.5 @@ -13829,7 +7680,7 @@ packages: parse5: 4.0.0 pn: 1.1.0 request: 2.88.2 - request-promise-native: 1.0.9_request@2.88.2 + request-promise-native: 1.0.9(request@2.88.2) sax: 1.2.4 symbol-tree: 3.2.4 tough-cookie: 2.5.0 @@ -13845,7 +7696,7 @@ packages: - utf-8-validate dev: true - /jsdom/14.1.0: + /jsdom@14.1.0: resolution: {integrity: sha512-O901mfJSuTdwU2w3Sn+74T+RnDVP+FuV5fH8tcPWyqrseRAb0s5xOtPgCFiPOtLcyK7CLIJwPyD83ZqQWvA5ng==} engines: {node: '>=8'} dependencies: @@ -13863,7 +7714,7 @@ packages: parse5: 5.1.0 pn: 1.1.0 request: 2.88.2 - request-promise-native: 1.0.9_request@2.88.2 + request-promise-native: 1.0.9(request@2.88.2) saxes: 3.1.11 symbol-tree: 3.2.4 tough-cookie: 2.5.0 @@ -13880,53 +7731,53 @@ packages: - utf-8-validate dev: true - /jsesc/0.5.0: + /jsesc@0.5.0: resolution: {integrity: sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA==} hasBin: true dev: true - /jsesc/2.5.2: + /jsesc@2.5.2: resolution: {integrity: sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==} engines: {node: '>=4'} hasBin: true dev: true - /json-parse-better-errors/1.0.2: + /json-parse-better-errors@1.0.2: resolution: {integrity: sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==} dev: true - /json-parse-even-better-errors/2.3.1: + /json-parse-even-better-errors@2.3.1: resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} dev: true - /json-schema-traverse/0.4.1: + /json-schema-traverse@0.4.1: resolution: {integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==} dev: true - /json-schema-traverse/1.0.0: + /json-schema-traverse@1.0.0: resolution: {integrity: sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==} dev: true - /json-schema/0.2.3: + /json-schema@0.2.3: resolution: {integrity: sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=} dev: true - /json-stable-stringify-without-jsonify/1.0.1: + /json-stable-stringify-without-jsonify@1.0.1: resolution: {integrity: sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=} dev: true - /json-stringify-safe/5.0.1: + /json-stringify-safe@5.0.1: resolution: {integrity: sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==} dev: true - /json5/1.0.1: + /json5@1.0.1: resolution: {integrity: sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==} hasBin: true dependencies: minimist: 1.2.5 dev: true - /json5/2.1.3: + /json5@2.1.3: resolution: {integrity: sha512-KXPvOm8K9IJKFM0bmdn8QXh7udDh1g/giieX0NLCaMnb4hEiVFqnop2ImTXCc5e0/oHz3LTqmHGtExn5hfMkOA==} engines: {node: '>=6'} hasBin: true @@ -13934,19 +7785,19 @@ packages: minimist: 1.2.5 dev: true - /json5/2.2.1: + /json5@2.2.1: resolution: {integrity: sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==} engines: {node: '>=6'} hasBin: true dev: true - /jsonfile/4.0.0: + /jsonfile@4.0.0: resolution: {integrity: sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg==} optionalDependencies: graceful-fs: 4.2.10 dev: true - /jsonfile/6.1.0: + /jsonfile@6.1.0: resolution: {integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==} dependencies: universalify: 2.0.0 @@ -13954,7 +7805,7 @@ packages: graceful-fs: 4.2.10 dev: true - /jsprim/1.4.1: + /jsprim@1.4.1: resolution: {integrity: sha512-4Dj8Rf+fQ+/Pn7C5qeEX02op1WfOss3PKTE9Nsop3Dx+6UPxlm1dr/og7o2cRa5hNN07CACr4NFzRLtj/rjWog==} engines: {'0': node >=0.6.0} dependencies: @@ -13964,7 +7815,7 @@ packages: verror: 1.10.0 dev: true - /jsx-ast-utils/3.1.0: + /jsx-ast-utils@3.1.0: resolution: {integrity: sha512-d4/UOjg+mxAWxCiF0c5UTSwyqbchkbqCvK87aBovhnh8GtysTjWmgC63tY0cJx/HzGgm9qnA147jVBdpOiQ2RA==} engines: {node: '>=4.0'} dependencies: @@ -13972,82 +7823,76 @@ packages: object.assign: 4.1.2 dev: true - /junk/3.1.0: - resolution: {integrity: sha512-pBxcB3LFc8QVgdggvZWyeys+hnrNWg4OcZIU/1X59k5jQdLBlCsYGRQaz234SqoRLTCgMH00fY0xRJH+F9METQ==} - engines: {node: '>=8'} - dev: true - - /kind-of/3.2.2: + /kind-of@3.2.2: resolution: {integrity: sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==} engines: {node: '>=0.10.0'} dependencies: is-buffer: 1.1.6 dev: true - /kind-of/4.0.0: + /kind-of@4.0.0: resolution: {integrity: sha512-24XsCxmEbRwEDbz/qz3stgin8TTzZ1ESR56OMCN0ujYg+vRutNSiOj9bHH9u85DKgXguraugV5sFuvbD4FW/hw==} engines: {node: '>=0.10.0'} dependencies: is-buffer: 1.1.6 dev: true - /kind-of/5.1.0: + /kind-of@5.1.0: resolution: {integrity: sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw==} engines: {node: '>=0.10.0'} dev: true - /kind-of/6.0.3: + /kind-of@6.0.3: resolution: {integrity: sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==} engines: {node: '>=0.10.0'} dev: true - /kleur/3.0.3: + /kleur@3.0.3: resolution: {integrity: sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==} engines: {node: '>=6'} dev: true - /klona/2.0.5: + /klona@2.0.5: resolution: {integrity: sha512-pJiBpiXMbt7dkzXe8Ghj/u4FfXOOa98fPW+bihOJ4SjnoijweJrNThJfd3ifXpXhREjpoF2mZVH1GfS9LV3kHQ==} engines: {node: '>= 8'} dev: true - /known-css-properties/0.24.0: + /known-css-properties@0.24.0: resolution: {integrity: sha512-RTSoaUAfLvpR357vWzAz/50Q/BmHfmE6ETSWfutT0AJiw10e6CmcdYRQJlLRd95B53D0Y2aD1jSxD3V3ySF+PA==} dev: true - /kuler/2.0.0: + /kuler@2.0.0: resolution: {integrity: sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A==} dev: true - /lazy-universal-dotenv/3.0.1: - resolution: {integrity: sha512-prXSYk799h3GY3iOWnC6ZigYzMPjxN2svgjJ9shk7oMadSNX3wXy0B6F32PMJv7qtMnrIbUxoEHzbutvxR2LBQ==} - engines: {node: '>=6.0.0', npm: '>=6.0.0', yarn: '>=1.0.0'} + /leantable@0.4.12(react@18.2.0): + resolution: {integrity: sha512-ilBvzlHIDFle9xpTHQV2zzXhsLc9UDbn/eLOxRM3J8CuiJ5M/orY0tJ7fuu5+jr9oUIRPwotA/3Mo6CqK5RgNQ==} + peerDependencies: + react: '>= 16.8.0 || 18' dependencies: - '@babel/runtime': 7.18.6 - app-root-dir: 1.0.2 - core-js: 3.23.1 - dotenv: 8.6.0 - dotenv-expand: 5.1.0 - dev: true + clsx: 1.2.1 + react: 18.2.0 + rxjs: 7.8.0 + dev: false - /left-pad/1.3.0: + /left-pad@1.3.0: resolution: {integrity: sha512-XI5MPzVNApjAyhQzphX8BkmKsKUxD4LdyK24iZeQGinBN9yTQT3bFlCBy/aVx2HrNcqQGsdot8ghrjyrvMCoEA==} deprecated: use String.prototype.padStart() dev: true - /leven/3.1.0: + /leven@3.1.0: resolution: {integrity: sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==} engines: {node: '>=6'} dev: true - /levenary/1.1.1: + /levenary@1.1.1: resolution: {integrity: sha512-mkAdOIt79FD6irqjYSs4rdbnlT5vRonMEvBVPVb3XmevfS8kgRXwfes0dhPdEtzTWD/1eNE/Bm/G1iRt6DcnQQ==} engines: {node: '>= 6'} dependencies: leven: 3.1.0 dev: true - /levn/0.3.0: + /levn@0.3.0: resolution: {integrity: sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=} engines: {node: '>= 0.8.0'} dependencies: @@ -14055,23 +7900,11 @@ packages: type-check: 0.3.2 dev: true - /lines-and-columns/1.1.6: + /lines-and-columns@1.1.6: resolution: {integrity: sha512-8ZmlJFVK9iCmtLz19HpSsR8HaAMWBT284VMNednLwlIMDP2hJDCIhUp0IZ2xUcZ+Ob6BM0VvCSJwzASDM45NLQ==} dev: true - /load-json-file/1.1.0: - resolution: {integrity: sha512-cy7ZdNRXdablkXYNI049pthVeXFurRyb9+hA/dZzerZ0pGTx42z+y+ssxBaVV2l70t1muq5IdKhn4UtcoGUY9A==} - engines: {node: '>=0.10.0'} - dependencies: - graceful-fs: 4.2.10 - parse-json: 2.2.0 - pify: 2.3.0 - pinkie-promise: 2.0.1 - strip-bom: 2.0.0 - dev: true - optional: true - - /load-json-file/4.0.0: + /load-json-file@4.0.0: resolution: {integrity: sha1-L19Fq5HjMhYjT9U62rZo607AmTs=} engines: {node: '>=4'} dependencies: @@ -14081,17 +7914,7 @@ packages: strip-bom: 3.0.0 dev: true - /loader-runner/2.4.0: - resolution: {integrity: sha512-Jsmr89RcXGIwivFY21FcRrisYZfvLMTWx5kOLc+JTxtpBOG6xML0vzbc6SEQG2FO9/4Fc3wW4LVcB5DmGflaRw==} - engines: {node: '>=4.3.0 <5.0.0 || >=5.10'} - dev: true - - /loader-runner/4.3.0: - resolution: {integrity: sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg==} - engines: {node: '>=6.11.5'} - dev: true - - /loader-utils/1.4.0: + /loader-utils@1.4.0: resolution: {integrity: sha512-qH0WSMBtn/oHuwjy/NucEgbx5dbxxnxup9s4PVXJUDHZBQY+s0NWA9rJf53RBnQZxfch7euUui7hpoAPvALZdA==} engines: {node: '>=4.0.0'} dependencies: @@ -14100,16 +7923,16 @@ packages: json5: 1.0.1 dev: true - /loader-utils/2.0.2: + /loader-utils@2.0.2: resolution: {integrity: sha512-TM57VeHptv569d/GKh6TAYdzKblwDNiumOdkFnejjD0XwTH87K90w3O7AiJRqdQoXygvi1VQTJTLGhJl7WqA7A==} engines: {node: '>=8.9.0'} dependencies: big.js: 5.2.2 emojis-list: 3.0.0 - json5: 2.1.3 + json5: 2.2.1 dev: true - /locate-path/3.0.0: + /locate-path@3.0.0: resolution: {integrity: sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==} engines: {node: '>=6'} dependencies: @@ -14117,76 +7940,65 @@ packages: path-exists: 3.0.0 dev: true - /locate-path/5.0.0: + /locate-path@5.0.0: resolution: {integrity: sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==} engines: {node: '>=8'} dependencies: p-locate: 4.1.0 dev: true - /locate-path/6.0.0: - resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==} - engines: {node: '>=10'} - dependencies: - p-locate: 5.0.0 - dev: true - - /lodash-es/4.17.21: + /lodash-es@4.17.21: resolution: {integrity: sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==} dev: true - /lodash.camelcase/4.3.0: + /lodash.camelcase@4.3.0: resolution: {integrity: sha1-soqmKIorn8ZRA1x3EfZathkDMaY=} dev: true - /lodash.debounce/4.0.8: - resolution: {integrity: sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==} - dev: true - - /lodash.escape/4.0.1: + /lodash.escape@4.0.1: resolution: {integrity: sha512-nXEOnb/jK9g0DYMr1/Xvq6l5xMD7GDG55+GSYIYmS0G4tBk/hURD4JR9WCavs04t33WmJx9kCyp9vJ+mr4BOUw==} dev: true - /lodash.flattendeep/4.4.0: + /lodash.flattendeep@4.4.0: resolution: {integrity: sha1-+wMJF/hqMTTlvJvsDWngAT3f7bI=} dev: true - /lodash.isequal/4.5.0: + /lodash.isequal@4.5.0: resolution: {integrity: sha1-QVxEePK8wwEgwizhDtMib30+GOA=} dev: true - /lodash.memoize/4.1.2: + /lodash.memoize@4.1.2: resolution: {integrity: sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4=} dev: true - /lodash.merge/4.6.2: + /lodash.merge@4.6.2: resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==} dev: true - /lodash.sortby/4.7.0: + /lodash.sortby@4.7.0: resolution: {integrity: sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==} dev: true - /lodash.trim/4.5.1: + /lodash.trim@4.5.1: resolution: {integrity: sha512-nJAlRl/K+eiOehWKDzoBVrSMhK0K3A3YQsUNXHQa5yIrKBAhsZgSu3KoAFoFT+mEgiyBHddZ0pRk1ITpIp90Wg==} dev: true - /lodash.trimstart/4.5.1: + /lodash.trimstart@4.5.1: resolution: {integrity: sha512-b/+D6La8tU76L/61/aN0jULWHkT0EeJCmVstPBn/K9MtD2qBW83AsBNrr63dKuWYwVMO7ucv13QNO/Ek/2RKaQ==} dev: true - /lodash.truncate/4.4.2: + /lodash.truncate@4.4.2: resolution: {integrity: sha1-WjUNoLERO4N+z//VgSy+WNbq4ZM=} dev: true - /lodash.uniq/4.5.0: + /lodash.uniq@4.5.0: resolution: {integrity: sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==} dev: true - /lodash/4.17.21: + /lodash@4.17.21: resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==} - /logform/2.4.2: + /logform@2.4.2: resolution: {integrity: sha512-W4c9himeAwXEdZ05dQNerhFz2XG80P9Oj0loPUMV23VC2it0orMHQhJm4hdnnor3rd1HsGf6a2lPwBM1zeXHGw==} dependencies: '@colors/colors': 1.5.0 @@ -14196,65 +8008,39 @@ packages: triple-beam: 1.3.0 dev: true - /loose-envify/1.4.0: + /loose-envify@1.4.0: resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==} hasBin: true dependencies: js-tokens: 4.0.0 - /loud-rejection/1.6.0: - resolution: {integrity: sha512-RPNliZOFkqFumDhvYqOaNY4Uz9oJM2K9tC6JWsJJsNdhuONW4LQHRBpb0qf4pJApVffI5N39SwzWZJuEhfd7eQ==} - engines: {node: '>=0.10.0'} - dependencies: - currently-unhandled: 0.4.1 - signal-exit: 3.0.7 - dev: true - optional: true - - /lower-case/2.0.2: - resolution: {integrity: sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==} - dependencies: - tslib: 2.4.0 - dev: true - - /lowlight/1.20.0: + /lowlight@1.20.0: resolution: {integrity: sha512-8Ktj+prEb1RoCPkEOrPMYUN/nCggB7qAWe3a7OpMjWQkh3l2RD5wKRQ+o8Q8YuI9RG/xs95waaI/E6ym/7NsTw==} dependencies: fault: 1.0.4 highlight.js: 10.7.3 dev: false - /lru-cache/5.1.1: - resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==} - dependencies: - yallist: 3.1.1 - dev: true - - /lru-cache/6.0.0: + /lru-cache@6.0.0: resolution: {integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==} engines: {node: '>=10'} dependencies: yallist: 4.0.0 dev: true - /lz-string/1.4.4: - resolution: {integrity: sha512-0ckx7ZHRPqb0oUm8zNr+90mtf9DQB60H1wMCjBtfi62Kl3a7JbHob6gA2bC+xRvZoOL+1hzUK8jeuEIQE8svEQ==} - hasBin: true - dev: true - - /magic-string/0.22.5: + /magic-string@0.22.5: resolution: {integrity: sha512-oreip9rJZkzvA8Qzk9HFs8fZGF/u7H/gtrE8EN6RjKJ9kh2HlC+yQ2QezifqTZfGyiuAV0dRv5a+y/8gBb1m9w==} dependencies: vlq: 0.2.3 dev: true - /magic-string/0.25.7: + /magic-string@0.25.7: resolution: {integrity: sha512-4CrMT5DOHTDk4HYDlzmwu4FVCcIYI8gauveasrdCu2IKIFOJ3f0v/8MDGJCDL9oD2ppz/Av1b0Nj345H9M+XIA==} dependencies: sourcemap-codec: 1.4.8 dev: true - /make-dir/2.1.0: + /make-dir@2.1.0: resolution: {integrity: sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==} engines: {node: '>=6'} dependencies: @@ -14262,58 +8048,50 @@ packages: semver: 5.7.1 dev: true - /make-dir/3.1.0: + /make-dir@3.1.0: resolution: {integrity: sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==} engines: {node: '>=8'} dependencies: semver: 6.3.0 dev: true - /make-error/1.3.6: + /make-error@1.3.6: resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==} dev: true - /makeerror/1.0.11: + /makeerror@1.0.11: resolution: {integrity: sha512-M/XvMZ6oK4edXjvg/ZYyzByg8kjpVrF/m0x3wbhOlzJfsQgFkqP1rJnLnJExOcslmLSSeLiN6NmF+cBoKJHGTg==} dependencies: tmpl: 1.0.4 dev: true - /map-cache/0.2.2: + /map-cache@0.2.2: resolution: {integrity: sha512-8y/eV9QQZCiyn1SprXSrCmqJN0yNRATe+PO8ztwqrvrbdRLA3eYJF0yaR0YayLWkMbsQSKWS9N2gPcGEc4UsZg==} engines: {node: '>=0.10.0'} dev: true - /map-obj/1.0.1: + /map-obj@1.0.1: resolution: {integrity: sha1-2TPOuSBdgr3PSIb2dCvcK03qFG0=} engines: {node: '>=0.10.0'} dev: true - /map-obj/4.3.0: + /map-obj@4.3.0: resolution: {integrity: sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==} engines: {node: '>=8'} dev: true - /map-or-similar/1.5.0: - resolution: {integrity: sha512-0aF7ZmVon1igznGI4VS30yugpduQW3y3GkcgGJOp7d8x8QrizhigUxjI/m2UojsXXto+jLAH3KSz+xOJTiORjg==} - dev: true - - /map-visit/1.0.0: + /map-visit@1.0.0: resolution: {integrity: sha512-4y7uGv8bd2WdM9vpQsiQNo41Ln1NvhvDRuVt0k2JZQ+ezN2uaQes7lZeZ+QQUHOLQAtDaBJ+7wCbi+ab/KFs+w==} engines: {node: '>=0.10.0'} dependencies: object-visit: 1.0.1 dev: true - /markdown-escapes/1.0.4: - resolution: {integrity: sha512-8z4efJYk43E0upd0NbVXwgSTQs6cT3T06etieCMEg7dRbzCbxUCK/GHlX8mhHRDcp+OLlHkPKsvqQTCvsRl2cg==} - dev: true - - /mathml-tag-names/2.1.3: + /mathml-tag-names@2.1.3: resolution: {integrity: sha512-APMBEanjybaPzUrfqU0IMU5I0AswKMH7k8OTLs0vvV4KZpExkTkY87nR/zpbuTPj+gARop7aGUbl11pnDfW6xg==} dev: true - /maxmin/2.1.0: + /maxmin@2.1.0: resolution: {integrity: sha1-TTsiCQPZXu5+t6x/qGTnLcCaMWY=} engines: {node: '>=0.12'} dependencies: @@ -14323,111 +8101,20 @@ packages: pretty-bytes: 3.0.1 dev: true - /md5.js/1.3.5: - resolution: {integrity: sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg==} - dependencies: - hash-base: 3.1.0 - inherits: 2.0.4 - safe-buffer: 5.2.1 - dev: true - - /mdast-squeeze-paragraphs/4.0.0: - resolution: {integrity: sha512-zxdPn69hkQ1rm4J+2Cs2j6wDEv7O17TfXTJ33tl/+JPIoEmtV9t2ZzBM5LPHE8QlHsmVD8t3vPKCyY3oH+H8MQ==} - dependencies: - unist-util-remove: 2.1.0 - dev: true - - /mdast-util-definitions/4.0.0: - resolution: {integrity: sha512-k8AJ6aNnUkB7IE+5azR9h81O5EQ/cTDXtWdMq9Kk5KcEW/8ritU5CeLg/9HhOC++nALHBlaogJ5jz0Ybk3kPMQ==} - dependencies: - unist-util-visit: 2.0.3 - dev: true - - /mdast-util-to-hast/10.0.1: - resolution: {integrity: sha512-BW3LM9SEMnjf4HXXVApZMt8gLQWVNXc3jryK0nJu/rOXPOnlkUjmdkDlmxMirpbU9ILncGFIwLH/ubnWBbcdgA==} - dependencies: - '@types/mdast': 3.0.10 - '@types/unist': 2.0.6 - mdast-util-definitions: 4.0.0 - mdurl: 1.0.1 - unist-builder: 2.0.3 - unist-util-generated: 1.1.6 - unist-util-position: 3.1.0 - unist-util-visit: 2.0.3 - dev: true - - /mdast-util-to-string/1.1.0: - resolution: {integrity: sha512-jVU0Nr2B9X3MU4tSK7JP1CMkSvOj7X5l/GboG1tKRw52lLF1x2Ju92Ms9tNetCcbfX3hzlM73zYo2NKkWSfF/A==} - dev: true - - /mdn-data/2.0.14: + /mdn-data@2.0.14: resolution: {integrity: sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow==} dev: true - /mdn-data/2.0.4: + /mdn-data@2.0.4: resolution: {integrity: sha512-iV3XNKw06j5Q7mi6h+9vbx23Tv7JkjEVgKHW4pimwyDGWm0OIQntJJ+u1C6mg6mK1EaTv42XQ7w76yuzH7M2cA==} dev: true - /mdurl/1.0.1: - resolution: {integrity: sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==} - dev: true - - /media-typer/0.3.0: - resolution: {integrity: sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==} - engines: {node: '>= 0.6'} - dev: true - - /memfs/3.4.7: - resolution: {integrity: sha512-ygaiUSNalBX85388uskeCyhSAoOSgzBbtVCr9jA2RROssFL9Q19/ZXFqS+2Th2sr1ewNIWgFdLzLC3Yl1Zv+lw==} - engines: {node: '>= 4.0.0'} - dependencies: - fs-monkey: 1.0.3 - dev: true - - /memoizerific/1.11.3: - resolution: {integrity: sha512-/EuHYwAPdLtXwAwSZkh/Gutery6pD2KYd44oQLhAvQp/50mpyduZh8Q7PYHXTCJ+wuXxt7oij2LXyIJOOYFPog==} - dependencies: - map-or-similar: 1.5.0 - dev: true - - /memory-fs/0.4.1: - resolution: {integrity: sha512-cda4JKCxReDXFXRqOHPQscuIYg1PvxbE2S2GP45rnwfEK+vZaXC8C1OFvdHIbgw0DLzowXGVoxLaAmlgRy14GQ==} - dependencies: - errno: 0.1.8 - readable-stream: 2.3.7 - dev: true - - /memory-fs/0.5.0: - resolution: {integrity: sha512-jA0rdU5KoQMC0e6ppoNRtpp6vjFq6+NY7r8hywnC7V+1Xj/MtHwGIbB1QaK/dunyjWteJzmkpd7ooeWg10T7GA==} - engines: {node: '>=4.3.0 <5.0.0 || >=5.10'} - dependencies: - errno: 0.1.8 - readable-stream: 2.3.7 - dev: true - - /memorystream/0.3.1: + /memorystream@0.3.1: resolution: {integrity: sha1-htcJCzDORV1j+64S3aUaR93K+bI=} engines: {node: '>= 0.10.0'} dev: true - /meow/3.7.0: - resolution: {integrity: sha512-TNdwZs0skRlpPpCUK25StC4VH+tP5GgeY1HQOOGP+lQ2xtdkN2VtT/5tiX9k3IWpkBPV9b3LsAWXn4GGi/PrSA==} - engines: {node: '>=0.10.0'} - dependencies: - camelcase-keys: 2.1.0 - decamelize: 1.2.0 - loud-rejection: 1.6.0 - map-obj: 1.0.1 - minimist: 1.2.5 - normalize-package-data: 2.5.0 - object-assign: 4.1.1 - read-pkg-up: 1.0.1 - redent: 1.0.0 - trim-newlines: 1.0.0 - dev: true - optional: true - - /meow/9.0.0: + /meow@9.0.0: resolution: {integrity: sha512-+obSblOQmRhcyBt62furQqRAQpNyWXo8BuQ5bN7dG8wmwQ+vwHKp/rCFD4CrTP8CsDQD1sjoZ94K417XEUk8IQ==} engines: {node: '>=10'} dependencies: @@ -14445,47 +8132,38 @@ packages: yargs-parser: 20.2.9 dev: true - /merge-descriptors/1.0.1: - resolution: {integrity: sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==} - dev: true - - /merge-stream/2.0.0: + /merge-stream@2.0.0: resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} dev: true - /merge2/1.4.1: + /merge2@1.4.1: resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} engines: {node: '>= 8'} dev: true - /methods/1.1.2: - resolution: {integrity: sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==} - engines: {node: '>= 0.6'} - dev: true - - /microbundle-crl/0.13.11: + /microbundle-crl@0.13.11: resolution: {integrity: sha512-3roSSgtJdIvvIOJdhefWD8BoEjHkbInkJzUoSE+ER6NGcpTuPWXNFyAbXamCL9Qg8rTU4urbYtXusk/d1ZpteA==} hasBin: true dependencies: '@babel/core': 7.12.9 - '@babel/plugin-proposal-class-properties': 7.7.4_@babel+core@7.12.9 - '@babel/plugin-proposal-decorators': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-proposal-optional-chaining': 7.12.7_@babel+core@7.12.9 - '@babel/plugin-syntax-jsx': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-flow-strip-types': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-transform-react-jsx': 7.12.7_@babel+core@7.12.9 - '@babel/preset-env': 7.12.7_@babel+core@7.12.9 - '@babel/preset-flow': 7.12.1_@babel+core@7.12.9 - '@rollup/plugin-alias': 3.1.1_rollup@1.32.1 - '@rollup/plugin-commonjs': 11.1.0_rollup@1.32.1 - '@rollup/plugin-json': 4.1.0_rollup@1.32.1 - '@rollup/plugin-node-resolve': 6.1.0_rollup@1.32.1 + '@babel/plugin-proposal-class-properties': 7.7.4(@babel/core@7.12.9) + '@babel/plugin-proposal-decorators': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-proposal-optional-chaining': 7.12.7(@babel/core@7.12.9) + '@babel/plugin-syntax-jsx': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-flow-strip-types': 7.12.1(@babel/core@7.12.9) + '@babel/plugin-transform-react-jsx': 7.12.7(@babel/core@7.12.9) + '@babel/preset-env': 7.12.7(@babel/core@7.12.9) + '@babel/preset-flow': 7.12.1(@babel/core@7.12.9) + '@rollup/plugin-alias': 3.1.1(rollup@1.32.1) + '@rollup/plugin-commonjs': 11.1.0(rollup@1.32.1) + '@rollup/plugin-json': 4.1.0(rollup@1.32.1) + '@rollup/plugin-node-resolve': 6.1.0(rollup@1.32.1) '@svgr/rollup': 5.5.0 asyncro: 3.0.0 autoprefixer: 9.8.6 babel-plugin-macros: 2.8.0 babel-plugin-transform-async-to-promises: 0.8.15 - babel-plugin-transform-replace-expressions: 0.2.0_@babel+core@7.12.9 + babel-plugin-transform-replace-expressions: 0.2.0(@babel/core@7.12.9) brotli-size: 4.0.0 camelcase: 5.3.1 cssnano: 4.1.10 @@ -14497,13 +8175,13 @@ packages: module-details-from-path: 1.0.3 pretty-bytes: 5.4.1 rollup: 1.32.1 - rollup-plugin-babel: 4.4.0_j5c2ibk5rlsxlnmc33p2gjc3da + rollup-plugin-babel: 4.4.0(@babel/core@7.12.9)(rollup@1.32.1) rollup-plugin-bundle-size: 1.0.3 rollup-plugin-es3: 1.1.0 rollup-plugin-postcss: 2.9.0 - rollup-plugin-smart-asset: 2.1.0_rollup@1.32.1 - rollup-plugin-terser: 5.3.1_rollup@1.32.1 - rollup-plugin-typescript2: 0.25.3_u7o3wpaaa4xpcuuxlpndi4hgqe + rollup-plugin-smart-asset: 2.1.0(rollup@1.32.1) + rollup-plugin-terser: 5.3.1(rollup@1.32.1) + rollup-plugin-typescript2: 0.25.3(rollup@1.32.1)(typescript@3.9.10) sade: 1.7.4 tiny-glob: 0.2.8 tslib: 1.14.1 @@ -14512,11 +8190,7 @@ packages: - supports-color dev: true - /microevent.ts/0.1.1: - resolution: {integrity: sha512-jo1OfR4TaEwd5HOrt5+tAZ9mqT4jmpNAusXtyfNzqVm9uiSYFZlKM1wYL4oU7azZW/PxQW53wM0S6OR1JHNa2g==} - dev: true - - /micromatch/3.1.10: + /micromatch@3.1.10: resolution: {integrity: sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==} engines: {node: '>=0.10.0'} dependencies: @@ -14537,7 +8211,7 @@ packages: - supports-color dev: true - /micromatch/4.0.4: + /micromatch@4.0.4: resolution: {integrity: sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==} engines: {node: '>=8.6'} dependencies: @@ -14545,81 +8219,41 @@ packages: picomatch: 2.3.1 dev: true - /miller-rabin/4.0.1: - resolution: {integrity: sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA==} - hasBin: true - dependencies: - bn.js: 4.12.0 - brorand: 1.1.0 - dev: true - - /mime-db/1.44.0: - resolution: {integrity: sha512-/NOTfLrsPBVeH7YtFPgsVWveuL+4SjjYxaQ1xtM1KMFj7HdxlBlxeyNLzhyJVx7r4rZGJAZ/6lkKCitSc/Nmpg==} - engines: {node: '>= 0.6'} - dev: true - - /mime-db/1.52.0: + /mime-db@1.52.0: resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} engines: {node: '>= 0.6'} dev: true - /mime-types/2.1.27: - resolution: {integrity: sha512-JIhqnCasI9yD+SsmkquHBxTSEuZdQX5BuQnS2Vc7puQQQ+8yiP5AY5uWhpdv4YL4VM5c6iliiYWPgJ/nJQLp7w==} - engines: {node: '>= 0.6'} - dependencies: - mime-db: 1.44.0 - dev: true - - /mime-types/2.1.35: + /mime-types@2.1.35: resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} engines: {node: '>= 0.6'} dependencies: mime-db: 1.52.0 dev: true - /mime/1.6.0: - resolution: {integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==} - engines: {node: '>=4'} - hasBin: true - dev: true - - /mime/2.4.6: + /mime@2.4.6: resolution: {integrity: sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA==} engines: {node: '>=4.0.0'} hasBin: true dev: true - /mimic-fn/2.1.0: + /mimic-fn@2.1.0: resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} engines: {node: '>=6'} dev: true - /min-document/2.19.0: - resolution: {integrity: sha512-9Wy1B3m3f66bPPmU5hdA4DR4PB2OfDU/+GS3yAB7IQozE3tqXaVv2zOjgla7MEGSRv95+ILmOuvhLkOK6wJtCQ==} - dependencies: - dom-walk: 0.1.2 - dev: true - - /min-indent/1.0.1: + /min-indent@1.0.1: resolution: {integrity: sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==} engines: {node: '>=4'} dev: true - /minimalistic-assert/1.0.1: - resolution: {integrity: sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==} - dev: true - - /minimalistic-crypto-utils/1.0.1: - resolution: {integrity: sha512-JIYlbt6g8i5jKfJ3xz7rF0LXmv2TkDxBLUkiBeZ7bAx4GnnNMr8xFpGnOxn6GhTEHx3SjRrZEoU+j04prX1ktg==} - dev: true - - /minimatch/3.1.2: + /minimatch@3.1.2: resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} dependencies: brace-expansion: 1.1.11 dev: true - /minimist-options/4.1.0: + /minimist-options@4.1.0: resolution: {integrity: sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A==} engines: {node: '>= 6'} dependencies: @@ -14628,63 +8262,11 @@ packages: kind-of: 6.0.3 dev: true - /minimist/1.2.5: + /minimist@1.2.5: resolution: {integrity: sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==} dev: true - /minipass-collect/1.0.2: - resolution: {integrity: sha512-6T6lH0H8OG9kITm/Jm6tdooIbogG9e0tLgpY6mphXSm/A9u8Nq1ryBG+Qspiub9LjWlBPsPS3tWQ/Botq4FdxA==} - engines: {node: '>= 8'} - dependencies: - minipass: 3.3.4 - dev: true - - /minipass-flush/1.0.5: - resolution: {integrity: sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==} - engines: {node: '>= 8'} - dependencies: - minipass: 3.3.4 - dev: true - - /minipass-pipeline/1.2.4: - resolution: {integrity: sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==} - engines: {node: '>=8'} - dependencies: - minipass: 3.3.4 - dev: true - - /minipass/3.3.4: - resolution: {integrity: sha512-I9WPbWHCGu8W+6k1ZiGpPu0GkoKBeorkfKNuAFBNS1HNFJvke82sxvI5bzcCNpWPorkOO5QQ+zomzzwRxejXiw==} - engines: {node: '>=8'} - dependencies: - yallist: 4.0.0 - dev: true - - /minizlib/2.1.2: - resolution: {integrity: sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==} - engines: {node: '>= 8'} - dependencies: - minipass: 3.3.4 - yallist: 4.0.0 - dev: true - - /mississippi/3.0.0: - resolution: {integrity: sha512-x471SsVjUtBRtcvd4BzKE9kFC+/2TeWgKCgw0bZcw1b9l2X3QX5vCWgF+KaZaYm87Ss//rHnWryupDrgLvmSkA==} - engines: {node: '>=4.0.0'} - dependencies: - concat-stream: 1.6.2 - duplexify: 3.7.1 - end-of-stream: 1.4.4 - flush-write-stream: 1.1.1 - from2: 2.3.0 - parallel-transform: 1.2.0 - pump: 3.0.0 - pumpify: 1.5.1 - stream-each: 1.2.3 - through2: 2.0.5 - dev: true - - /mixin-deep/1.3.2: + /mixin-deep@1.3.2: resolution: {integrity: sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA==} engines: {node: '>=0.10.0'} dependencies: @@ -14692,80 +8274,65 @@ packages: is-extendable: 1.0.1 dev: true - /mkdirp/0.5.5: + /mkdirp@0.5.5: resolution: {integrity: sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==} hasBin: true dependencies: minimist: 1.2.5 dev: true - /mkdirp/1.0.4: + /mkdirp@1.0.4: resolution: {integrity: sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==} engines: {node: '>=10'} hasBin: true dev: true - /module-details-from-path/1.0.3: + /module-details-from-path@1.0.3: resolution: {integrity: sha1-EUyUlnPiqKNenTV4hSeqN7Z52is=} dev: true - /moo/0.5.1: + /moo@0.5.1: resolution: {integrity: sha512-I1mnb5xn4fO80BH9BLcF0yLypy2UKl+Cb01Fu0hJRkJjlCRtxZMWkTdAtDd5ZqCOxtCkhmRwyI57vWT+1iZ67w==} dev: true - /move-concurrently/1.0.1: - resolution: {integrity: sha512-hdrFxZOycD/g6A6SoI2bB5NA/5NEqD0569+S47WZhPvm46sD50ZHdYaFmnua5lndde9rCHGjmfK7Z8BuCt/PcQ==} - dependencies: - aproba: 1.2.0 - copy-concurrently: 1.0.5 - fs-write-stream-atomic: 1.0.10 - mkdirp: 0.5.5 - rimraf: 2.7.1 - run-queue: 1.0.3 - dev: true - - /mri/1.1.6: + /mri@1.1.6: resolution: {integrity: sha512-oi1b3MfbyGa7FJMP9GmLTttni5JoICpYBRlq+x5V16fZbLsnL9N3wFqqIm/nIG43FjUFkFh9Epzp/kzUGUnJxQ==} engines: {node: '>=4'} dev: true - /ms/2.0.0: + /ms@2.0.0: resolution: {integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==} dev: true - /ms/2.1.1: - resolution: {integrity: sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==} - dev: true - - /ms/2.1.2: + /ms@2.1.2: resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} dev: true - /ms/2.1.3: + /ms@2.1.3: resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} dev: true - /mustache/4.2.0: + /mustache@4.2.0: resolution: {integrity: sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==} hasBin: true dev: true - /mute-stream/0.0.8: + /mute-stream@0.0.8: resolution: {integrity: sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==} dev: true - /nan/2.14.2: + /nan@2.14.2: resolution: {integrity: sha512-M2ufzIiINKCuDfBSAUr1vWQ+vuVcA9kqx8JJUsbQi6yf1uGRyb7HfpdfUr5qLXf3B/t8dPvcjhKMmlfnP47EzQ==} dev: true optional: true - /nanoid/3.3.1: + /nanoid@3.3.1: resolution: {integrity: sha512-n6Vs/3KGyxPQd6uO0eH4Bv0ojGSUvuLlIHtC3Y0kEO23YRge8H9x1GCzLn28YX0H66pMkxuaeESFq4tKISKwdw==} engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} hasBin: true dev: true - /nanomatch/1.2.13: + /nanomatch@1.2.13: resolution: {integrity: sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA==} engines: {node: '>=0.10.0'} dependencies: @@ -14784,11 +8351,11 @@ packages: - supports-color dev: true - /natural-compare/1.4.0: + /natural-compare@1.4.0: resolution: {integrity: sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=} dev: true - /nearley/2.19.8: + /nearley@2.19.8: resolution: {integrity: sha512-te4JCrxbzLvVqUWfVOASgsbkWaFvJ6JlHTRQzfnU862bnyHGHEGX2s5OYvLAS4NDPmQvRtC2tBdV6THy6xHFyQ==} hasBin: true dependencies: @@ -14799,94 +8366,31 @@ packages: semver: 5.7.1 dev: true - /negotiator/0.6.3: - resolution: {integrity: sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==} - engines: {node: '>= 0.6'} - dev: true - - /neo-async/2.6.2: + /neo-async@2.6.2: resolution: {integrity: sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==} dev: true - /nested-error-stacks/2.1.1: - resolution: {integrity: sha512-9iN1ka/9zmX1ZvLV9ewJYEk9h7RyRRtqdK0woXcqohu8EWIerfPUjYJPg0ULy0UqP7cslmdGc8xKDJcojlKiaw==} - dev: true - - /nice-try/1.0.5: + /nice-try@1.0.5: resolution: {integrity: sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==} dev: true - /no-case/3.0.4: - resolution: {integrity: sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==} - dependencies: - lower-case: 2.0.2 - tslib: 2.4.0 - dev: true - - /node-dir/0.1.17: - resolution: {integrity: sha512-tmPX422rYgofd4epzrNoOXiE8XFZYOcCq1vD7MAXCDO+O+zndlA2ztdKKMa+EeuBG5tHETpr4ml4RGgpqDCCAg==} - engines: {node: '>= 0.10.5'} - dependencies: - minimatch: 3.1.2 - dev: true - - /node-fetch/1.7.3: + /node-fetch@1.7.3: resolution: {integrity: sha512-NhZ4CsKx7cYm2vSrBAr2PvFOe6sWDf0UYLRqA6svUYg7+/TSfVAu49jYC4BvQ4Sms9SZgdqGBgroqfDhJdTyKQ==} dependencies: encoding: 0.1.13 is-stream: 1.1.0 dev: true - /node-fetch/2.6.7: - resolution: {integrity: sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==} - engines: {node: 4.x || >=6.0.0} - peerDependencies: - encoding: ^0.1.0 - peerDependenciesMeta: - encoding: - optional: true - dependencies: - whatwg-url: 5.0.0 - dev: true - - /node-int64/0.4.0: + /node-int64@0.4.0: resolution: {integrity: sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==} dev: true - /node-libs-browser/2.2.1: - resolution: {integrity: sha512-h/zcD8H9kaDZ9ALUWwlBUDo6TKF8a7qBSCSEGfjTVIYeqsioSKaAX+BN7NgiMGp6iSIXZ3PxgCu8KS3b71YK5Q==} - dependencies: - assert: 1.5.0 - browserify-zlib: 0.2.0 - buffer: 4.9.2 - console-browserify: 1.2.0 - constants-browserify: 1.0.0 - crypto-browserify: 3.12.0 - domain-browser: 1.2.0 - events: 3.3.0 - https-browserify: 1.0.0 - os-browserify: 0.3.0 - path-browserify: 0.0.1 - process: 0.11.10 - punycode: 1.4.1 - querystring-es3: 0.2.1 - readable-stream: 2.3.7 - stream-browserify: 2.0.2 - stream-http: 2.8.3 - string_decoder: 1.3.0 - timers-browserify: 2.0.12 - tty-browserify: 0.0.0 - url: 0.11.0 - util: 0.11.1 - vm-browserify: 1.1.2 - dev: true - - /node-modules-regexp/1.0.0: + /node-modules-regexp@1.0.0: resolution: {integrity: sha512-JMaRS9L4wSRIR+6PTVEikTrq/lMGEZR43a48ETeilY0Q0iMwVnccMFrUM1k+tNzmYuIU0Vh710bCUqHX+/+ctQ==} engines: {node: '>=0.10.0'} dev: true - /node-notifier/5.4.3: + /node-notifier@5.4.3: resolution: {integrity: sha512-M4UBGcs4jeOK9CjTsYwkvH6/MzuUmGCyTW+kCY7uO+1ZVr0+FHGdPdIf5CCLqAaxnRrWidyoQlNkMIIVwbKB8Q==} dependencies: growly: 1.3.0 @@ -14896,15 +8400,15 @@ packages: which: 1.3.1 dev: true - /node-releases/2.0.5: + /node-releases@2.0.5: resolution: {integrity: sha512-U9h1NLROZTq9uE1SNffn6WuPDg8icmi3ns4rEl/oTfIle4iLjTliCzgTsbaIFMq/Xn078/lfY/BL0GWZ+psK4Q==} dev: true - /node-releases/2.0.6: + /node-releases@2.0.6: resolution: {integrity: sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg==} dev: true - /normalize-package-data/2.5.0: + /normalize-package-data@2.5.0: resolution: {integrity: sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==} dependencies: hosted-git-info: 2.8.8 @@ -14913,38 +8417,38 @@ packages: validate-npm-package-license: 3.0.4 dev: true - /normalize-package-data/3.0.3: + /normalize-package-data@3.0.3: resolution: {integrity: sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==} engines: {node: '>=10'} dependencies: hosted-git-info: 4.1.0 is-core-module: 2.8.1 - semver: 7.3.5 + semver: 7.3.8 validate-npm-package-license: 3.0.4 dev: true - /normalize-path/2.1.1: + /normalize-path@2.1.1: resolution: {integrity: sha512-3pKJwH184Xo/lnH6oyP1q2pMd7HcypqqmRs91/6/i2CGtWwIKGCkOOMTm/zXbgTEWHw1uNpNi/igc3ePOYHb6w==} engines: {node: '>=0.10.0'} dependencies: remove-trailing-separator: 1.1.0 dev: true - /normalize-path/3.0.0: + /normalize-path@3.0.0: resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} engines: {node: '>=0.10.0'} dev: true - /normalize-range/0.1.2: + /normalize-range@0.1.2: resolution: {integrity: sha1-LRDAa9/TEuqXd2laTShDlFa3WUI=} engines: {node: '>=0.10.0'} dev: true - /normalize-selector/0.2.0: + /normalize-selector@0.2.0: resolution: {integrity: sha1-0LFF62kRicY6eNIB3E/bEpPvDAM=} dev: true - /normalize-url/1.9.1: + /normalize-url@1.9.1: resolution: {integrity: sha1-LMDWazHqIwNkWENuNiDYWVTGbDw=} engines: {node: '>=4'} dependencies: @@ -14954,12 +8458,12 @@ packages: sort-keys: 1.1.2 dev: true - /normalize-url/3.3.0: + /normalize-url@3.3.0: resolution: {integrity: sha512-U+JJi7duF1o+u2pynbp2zXDW2/PADgC30f0GsHZtRh+HOcXHnw137TrNlyxxRvWW5fjKd3bcLHPxofWuCjaeZg==} engines: {node: '>=6'} dev: true - /npm-run-all/4.1.5: + /npm-run-all@4.1.5: resolution: {integrity: sha512-Oo82gJDAVcaMdi3nuoKFavkIHBRVqQ1qvMb+9LHk/cF4P6B2m8aP04hGf7oL6wZ9BuGwX1onlLhpuoofSyoQDQ==} engines: {node: '>= 4'} hasBin: true @@ -14975,63 +8479,47 @@ packages: string.prototype.padend: 3.1.1 dev: true - /npm-run-path/2.0.2: + /npm-run-path@2.0.2: resolution: {integrity: sha512-lJxZYlT4DW/bRUtFh1MQIWqmLwQfAxnqWG4HhEdjMlkrJYnJn0Jrr2u3mgxqaWsdiBc76TYkTG/mhrnYTuzfHw==} engines: {node: '>=4'} dependencies: path-key: 2.0.1 dev: true - /npm-run-path/4.0.1: - resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==} - engines: {node: '>=8'} - dependencies: - path-key: 3.1.1 - dev: true - - /npmlog/5.0.1: - resolution: {integrity: sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==} - dependencies: - are-we-there-yet: 2.0.0 - console-control-strings: 1.1.0 - gauge: 3.0.2 - set-blocking: 2.0.0 - dev: true - - /nth-check/1.0.2: + /nth-check@1.0.2: resolution: {integrity: sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg==} dependencies: boolbase: 1.0.0 dev: true - /nth-check/2.1.1: + /nth-check@2.1.1: resolution: {integrity: sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==} dependencies: boolbase: 1.0.0 dev: true - /num2fraction/1.2.2: + /num2fraction@1.2.2: resolution: {integrity: sha1-b2gragJ6Tp3fpFZM0lidHU5mnt4=} dev: true - /number-is-nan/1.0.1: + /number-is-nan@1.0.1: resolution: {integrity: sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=} engines: {node: '>=0.10.0'} dev: true - /nwsapi/2.2.0: + /nwsapi@2.2.0: resolution: {integrity: sha512-h2AatdwYH+JHiZpv7pt/gSX1XoRGb7L/qSIeuqA6GwYoF9w1vP1cw42TO0aI2pNyshRK5893hNSl+1//vHK7hQ==} dev: true - /oauth-sign/0.9.0: + /oauth-sign@0.9.0: resolution: {integrity: sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==} dev: true - /object-assign/4.1.1: + /object-assign@4.1.1: resolution: {integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==} engines: {node: '>=0.10.0'} - /object-copy/0.1.0: + /object-copy@0.1.0: resolution: {integrity: sha512-79LYn6VAb63zgtmAteVOWo9Vdj71ZVBy3Pbse+VqxDpEP83XuujMrGqHIwAXJ5I/aM0zU7dIyIAhifVTPrNItQ==} engines: {node: '>=0.10.0'} dependencies: @@ -15040,15 +8528,11 @@ packages: kind-of: 3.2.2 dev: true - /object-inspect/1.12.0: + /object-inspect@1.12.0: resolution: {integrity: sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g==} dev: true - /object-inspect/1.12.2: - resolution: {integrity: sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==} - dev: true - - /object-is/1.1.3: + /object-is@1.1.3: resolution: {integrity: sha512-teyqLvFWzLkq5B9ki8FVWA902UER2qkxmdA4nLf+wjOLAWgxzCWZNCxpDq9MvE8MmhWNr+I8w3BN49Vx36Y6Xg==} engines: {node: '>= 0.4'} dependencies: @@ -15056,19 +8540,19 @@ packages: es-abstract: 1.19.1 dev: true - /object-keys/1.1.1: + /object-keys@1.1.1: resolution: {integrity: sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==} engines: {node: '>= 0.4'} dev: true - /object-visit/1.0.1: + /object-visit@1.0.1: resolution: {integrity: sha512-GBaMwwAVK9qbQN3Scdo0OyvgPW7l3lnaVMj84uTOZlswkX0KpF6fyDBJhtTthf7pymztoN36/KEr1DyhF96zEA==} engines: {node: '>=0.10.0'} dependencies: isobject: 3.0.1 dev: true - /object.assign/4.1.2: + /object.assign@4.1.2: resolution: {integrity: sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ==} engines: {node: '>= 0.4'} dependencies: @@ -15078,17 +8562,7 @@ packages: object-keys: 1.1.1 dev: true - /object.assign/4.1.4: - resolution: {integrity: sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==} - engines: {node: '>= 0.4'} - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - has-symbols: 1.0.3 - object-keys: 1.1.1 - dev: true - - /object.entries/1.1.5: + /object.entries@1.1.5: resolution: {integrity: sha512-TyxmjUoZggd4OrrU1W66FMDG6CuqJxsFvymeyXI51+vQLN67zYfZseptRge703kKQdo4uccgAKebXFcRCzk4+g==} engines: {node: '>= 0.4'} dependencies: @@ -15097,7 +8571,7 @@ packages: es-abstract: 1.19.1 dev: true - /object.fromentries/2.0.5: + /object.fromentries@2.0.5: resolution: {integrity: sha512-CAyG5mWQRRiBU57Re4FKoTBjXfDoNwdFVH2Y1tS9PqCsfUTymAohOkEMSG3aRNKmv4lV3O7p1et7c187q6bynw==} engines: {node: '>= 0.4'} dependencies: @@ -15106,7 +8580,7 @@ packages: es-abstract: 1.19.1 dev: true - /object.getownpropertydescriptors/2.1.0: + /object.getownpropertydescriptors@2.1.0: resolution: {integrity: sha512-Z53Oah9A3TdLoblT7VKJaTDdXdT+lQO+cNpKVnya5JDe9uLvzu1YyY1yFDFrcxrlRgWrEFH0jJtD/IbuwjcEVg==} engines: {node: '>= 0.8'} dependencies: @@ -15114,31 +8588,21 @@ packages: es-abstract: 1.19.1 dev: true - /object.getownpropertydescriptors/2.1.4: - resolution: {integrity: sha512-sccv3L/pMModT6dJAYF3fzGMVcb38ysQ0tEE6ixv2yXJDtEIPph268OlAdJj5/qZMZDq2g/jqvwppt36uS/uQQ==} - engines: {node: '>= 0.8'} - dependencies: - array.prototype.reduce: 1.0.4 - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.20.2 - dev: true - - /object.hasown/1.1.0: + /object.hasown@1.1.0: resolution: {integrity: sha512-MhjYRfj3GBlhSkDHo6QmvgjRLXQ2zndabdf3nX0yTyZK9rPfxb6uRpAac8HXNLy1GpqWtZ81Qh4v3uOls2sRAg==} dependencies: define-properties: 1.1.3 es-abstract: 1.19.1 dev: true - /object.pick/1.3.0: + /object.pick@1.3.0: resolution: {integrity: sha512-tqa/UMy/CCoYmj+H5qc07qvSL9dqcs/WZENZ1JbtWBlATP+iVOe778gE6MSijnyCnORzDuX6hU+LA4SZ09YjFQ==} engines: {node: '>=0.10.0'} dependencies: isobject: 3.0.1 dev: true - /object.values/1.1.5: + /object.values@1.1.5: resolution: {integrity: sha512-QUZRW0ilQ3PnPpbNtgdNV1PDbEqLIiSFB3l+EnGtBQ/8SUTLj1PZwtQHABZtLgwpJZTSZhuGLOGk57Drx2IvYg==} engines: {node: '>= 0.4'} dependencies: @@ -15147,59 +8611,26 @@ packages: es-abstract: 1.19.1 dev: true - /objectorarray/1.0.5: - resolution: {integrity: sha512-eJJDYkhJFFbBBAxeh8xW+weHlkI28n2ZdQV/J/DNfWfSKlGEf2xcfAbZTv3riEXHAhL9SVOTs2pRmXiSTf78xg==} - dev: true - - /on-finished/2.4.1: - resolution: {integrity: sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==} - engines: {node: '>= 0.8'} - dependencies: - ee-first: 1.1.1 - dev: true - - /on-headers/1.0.2: - resolution: {integrity: sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==} - engines: {node: '>= 0.8'} - dev: true - - /once/1.4.0: + /once@1.4.0: resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} dependencies: wrappy: 1.0.2 dev: true - /one-time/1.0.0: + /one-time@1.0.0: resolution: {integrity: sha512-5DXOiRKwuSEcQ/l0kGCF6Q3jcADFv5tSmRaJck/OqkVFcOzutB134KRSfF0xDrL39MNnqxbHBbUUcjZIhTgb2g==} dependencies: fn.name: 1.1.0 dev: true - /onetime/5.1.2: + /onetime@5.1.2: resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} engines: {node: '>=6'} dependencies: mimic-fn: 2.1.0 dev: true - /open/7.4.2: - resolution: {integrity: sha512-MVHddDVweXZF3awtlAS+6pgKLlm/JgxZ90+/NBurBoQctVOOB/zDdVjcyPzQ+0laDGbsWgrRkflI65sQeOgT9Q==} - engines: {node: '>=8'} - dependencies: - is-docker: 2.2.1 - is-wsl: 2.2.0 - dev: true - - /open/8.4.0: - resolution: {integrity: sha512-XgFPPM+B28FtCCgSb9I+s9szOC1vZRSwgWsRUA5ylIxRTgKozqjOCrVOqGsYABPYK5qnfqClxZTFBa8PKt2v6Q==} - engines: {node: '>=12'} - dependencies: - define-lazy-prop: 2.0.0 - is-docker: 2.2.1 - is-wsl: 2.2.0 - dev: true - - /optionator/0.8.3: + /optionator@0.8.3: resolution: {integrity: sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==} engines: {node: '>= 0.8.0'} dependencies: @@ -15211,109 +8642,45 @@ packages: word-wrap: 1.2.3 dev: true - /os-browserify/0.3.0: - resolution: {integrity: sha512-gjcpUc3clBf9+210TRaDWbf+rZZZEshZ+DlXMRCeAjp0xhTrnQsKHypIy1J3d5hKdUzj69t708EHtU8P6bUn0A==} - dev: true - - /os-homedir/1.0.2: - resolution: {integrity: sha512-B5JU3cabzk8c67mRRd3ECmROafjYMXbuzlwtqdM8IbS8ktlTix8aFGb2bAGKrSRIlnfKwovGUUr72JUPyOb6kQ==} - engines: {node: '>=0.10.0'} - dev: true - optional: true - - /os-tmpdir/1.0.2: + /os-tmpdir@1.0.2: resolution: {integrity: sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=} engines: {node: '>=0.10.0'} dev: true - /p-all/2.1.0: - resolution: {integrity: sha512-HbZxz5FONzz/z2gJfk6bFca0BCiSRF8jU3yCsWOen/vR6lZjfPOu/e7L3uFzTW1i0H8TlC3vqQstEJPQL4/uLA==} - engines: {node: '>=6'} - dependencies: - p-map: 2.1.0 - dev: true - - /p-each-series/1.0.0: + /p-each-series@1.0.0: resolution: {integrity: sha512-J/e9xiZZQNrt+958FFzJ+auItsBGq+UrQ7nE89AUP7UOTtjHnkISANXLdayhVzh538UnLMCSlf13lFfRIAKQOA==} engines: {node: '>=4'} dependencies: p-reduce: 1.0.0 dev: true - /p-event/4.2.0: - resolution: {integrity: sha512-KXatOjCRXXkSePPb1Nbi0p0m+gQAwdlbhi4wQKJPI1HsMQS9g+Sqp2o+QHziPr7eYJyOZet836KoHEVM1mwOrQ==} - engines: {node: '>=8'} - dependencies: - p-timeout: 3.2.0 - dev: true - - /p-filter/2.1.0: - resolution: {integrity: sha512-ZBxxZ5sL2HghephhpGAQdoskxplTwr7ICaehZwLIlfL6acuVgZPm8yBNuRAFBGEqtD/hmUeq9eqLg2ys9Xr/yw==} - engines: {node: '>=8'} - dependencies: - p-map: 2.1.0 - dev: true - - /p-finally/1.0.0: + /p-finally@1.0.0: resolution: {integrity: sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==} engines: {node: '>=4'} dev: true - /p-limit/2.3.0: + /p-limit@2.3.0: resolution: {integrity: sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==} engines: {node: '>=6'} dependencies: p-try: 2.2.0 dev: true - /p-limit/3.1.0: - resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==} - engines: {node: '>=10'} - dependencies: - yocto-queue: 0.1.0 - dev: true - - /p-locate/3.0.0: + /p-locate@3.0.0: resolution: {integrity: sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==} engines: {node: '>=6'} dependencies: - p-limit: 2.3.0 - dev: true - - /p-locate/4.1.0: - resolution: {integrity: sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==} - engines: {node: '>=8'} - dependencies: - p-limit: 2.3.0 - dev: true - - /p-locate/5.0.0: - resolution: {integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==} - engines: {node: '>=10'} - dependencies: - p-limit: 3.1.0 - dev: true - - /p-map/2.1.0: - resolution: {integrity: sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw==} - engines: {node: '>=6'} - dev: true - - /p-map/3.0.0: - resolution: {integrity: sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==} - engines: {node: '>=8'} - dependencies: - aggregate-error: 3.1.0 + p-limit: 2.3.0 dev: true - /p-map/4.0.0: - resolution: {integrity: sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==} - engines: {node: '>=10'} + /p-locate@4.1.0: + resolution: {integrity: sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==} + engines: {node: '>=8'} dependencies: - aggregate-error: 3.1.0 + p-limit: 2.3.0 dev: true - /p-queue/6.6.2: + /p-queue@6.6.2: resolution: {integrity: sha512-RwFpb72c/BhQLEXIZ5K2e+AhgNVmIejGlTgiB9MzZ0e93GRvqZ7uSi0dvRF7/XIXDeNkra2fNHBxTyPDGySpjQ==} engines: {node: '>=8'} dependencies: @@ -15321,60 +8688,31 @@ packages: p-timeout: 3.2.0 dev: true - /p-reduce/1.0.0: + /p-reduce@1.0.0: resolution: {integrity: sha512-3Tx1T3oM1xO/Y8Gj0sWyE78EIJZ+t+aEmXUdvQgvGmSMri7aPTHoovbXEreWKkL5j21Er60XAWLTzKbAKYOujQ==} engines: {node: '>=4'} dev: true - /p-timeout/3.2.0: + /p-timeout@3.2.0: resolution: {integrity: sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg==} engines: {node: '>=8'} dependencies: p-finally: 1.0.0 dev: true - /p-try/2.2.0: + /p-try@2.2.0: resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==} engines: {node: '>=6'} dev: true - /pako/1.0.11: - resolution: {integrity: sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==} - dev: true - - /parallel-transform/1.2.0: - resolution: {integrity: sha512-P2vSmIu38uIlvdcU7fDkyrxj33gTUy/ABO5ZUbGowxNCopBq/OoD42bP4UmMrJoPyk4Uqf0mu3mtWBhHCZD8yg==} - dependencies: - cyclist: 1.0.1 - inherits: 2.0.4 - readable-stream: 2.3.7 - dev: true - - /param-case/3.0.4: - resolution: {integrity: sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A==} - dependencies: - dot-case: 3.0.4 - tslib: 2.4.0 - dev: true - - /parent-module/1.0.1: + /parent-module@1.0.1: resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==} engines: {node: '>=6'} dependencies: callsites: 3.1.0 dev: true - /parse-asn1/5.1.6: - resolution: {integrity: sha512-RnZRo1EPU6JBnra2vGHj0yhp6ebyjBZpmUCLHWiFhxlzvBCCpAuZ7elsBp1PVAbQN0/04VD/19rfzlBSwLstMw==} - dependencies: - asn1.js: 5.4.1 - browserify-aes: 1.2.0 - evp_bytestokey: 1.0.3 - pbkdf2: 3.1.2 - safe-buffer: 5.2.1 - dev: true - - /parse-entities/2.0.0: + /parse-entities@2.0.0: resolution: {integrity: sha512-kkywGpCcRYhqQIchaWqZ875wzpS/bMKhz5HnN3p7wveJTkTtyAB/AlnS0f8DFSqYW1T82t6yEAkEcB+A1I3MbQ==} dependencies: character-entities: 1.2.4 @@ -15383,16 +8721,9 @@ packages: is-alphanumerical: 1.0.4 is-decimal: 1.0.4 is-hexadecimal: 1.0.4 + dev: false - /parse-json/2.2.0: - resolution: {integrity: sha512-QR/GGaKCkhwk1ePQNYDRKYZ3mwU9ypsKhB0XyFnLQdomyEqk3e8wpW3V5Jp88zbxK4n5ST1nqo+g9juTpownhQ==} - engines: {node: '>=0.10.0'} - dependencies: - error-ex: 1.3.2 - dev: true - optional: true - - /parse-json/4.0.0: + /parse-json@4.0.0: resolution: {integrity: sha512-aOIos8bujGN93/8Ox/jPLh7RwVnPEysynVFE+fQZyg6jKELEHwzgKdLRFHUgXJL6kylijVSBC4BvN9OmsB48Rw==} engines: {node: '>=4'} dependencies: @@ -15400,7 +8731,7 @@ packages: json-parse-better-errors: 1.0.2 dev: true - /parse-json/5.1.0: + /parse-json@5.1.0: resolution: {integrity: sha512-+mi/lmVVNKFNVyLXV31ERiy2CY5E1/F6QtJFEzoChPRwwngMNXRDQ9GJ5WdE2Z2P4AujsOi0/+2qHID68KwfIQ==} engines: {node: '>=8'} dependencies: @@ -15410,237 +8741,152 @@ packages: lines-and-columns: 1.1.6 dev: true - /parse5/3.0.3: + /parse5@3.0.3: resolution: {integrity: sha512-rgO9Zg5LLLkfJF9E6CCmXlSE4UVceloys8JrFqCcHloC3usd/kJCyPDwH2SOlzix2j3xaP9sUX3e8+kvkuleAA==} dependencies: '@types/node': 18.11.9 dev: true - /parse5/4.0.0: + /parse5@4.0.0: resolution: {integrity: sha512-VrZ7eOd3T1Fk4XWNXMgiGBK/z0MG48BWG2uQNU4I72fkQuKUTZpl+u9k+CxEG0twMVzSmXEEz12z5Fnw1jIQFA==} dev: true - /parse5/5.1.0: + /parse5@5.1.0: resolution: {integrity: sha512-fxNG2sQjHvlVAYmzBZS9YlDp6PTSSDwa98vkD4QgVDDCAo84z5X1t5XyJQ62ImdLXx5NdIIfihey6xpum9/gRQ==} dev: true - /parse5/6.0.1: - resolution: {integrity: sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==} - dev: true - - /parseurl/1.3.3: - resolution: {integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==} - engines: {node: '>= 0.8'} - dev: true - - /pascal-case/3.1.2: - resolution: {integrity: sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==} - dependencies: - no-case: 3.0.4 - tslib: 2.4.0 - dev: true - - /pascalcase/0.1.1: + /pascalcase@0.1.1: resolution: {integrity: sha512-XHXfu/yOQRy9vYOtUDVMN60OEJjW013GoObG1o+xwQTpB9eYJX/BjXMsdW13ZDPruFhYYn0AG22w0xgQMwl3Nw==} engines: {node: '>=0.10.0'} dev: true - /path-browserify/0.0.1: - resolution: {integrity: sha512-BapA40NHICOS+USX9SN4tyhq+A2RrN/Ws5F0Z5aMHDp98Fl86lX8Oti8B7uN93L4Ifv4fHOEA+pQw87gmMO/lQ==} - dev: true - - /path-dirname/1.0.2: - resolution: {integrity: sha512-ALzNPpyNq9AqXMBjeymIjFDAkAFH06mHJH/cSBHAgU0s4vfpBn6b2nf8tiRLvagKD8RbTpq2FKTBg7cl9l3c7Q==} - dev: true - - /path-exists/2.1.0: - resolution: {integrity: sha512-yTltuKuhtNeFJKa1PiRzfLAU5182q1y4Eb4XCJ3PBqyzEDkAZRzBrKKBct682ls9reBVHf9udYLN5Nd+K1B9BQ==} - engines: {node: '>=0.10.0'} - dependencies: - pinkie-promise: 2.0.1 - dev: true - optional: true - - /path-exists/3.0.0: + /path-exists@3.0.0: resolution: {integrity: sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==} engines: {node: '>=4'} dev: true - /path-exists/4.0.0: + /path-exists@4.0.0: resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} engines: {node: '>=8'} dev: true - /path-is-absolute/1.0.1: + /path-is-absolute@1.0.1: resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==} engines: {node: '>=0.10.0'} dev: true - /path-key/2.0.1: + /path-key@2.0.1: resolution: {integrity: sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=} engines: {node: '>=4'} dev: true - /path-key/3.1.1: + /path-key@3.1.1: resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} engines: {node: '>=8'} dev: true - /path-parse/1.0.6: + /path-parse@1.0.6: resolution: {integrity: sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==} dev: true - /path-to-regexp/0.1.7: - resolution: {integrity: sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==} - dev: true - - /path-type/1.1.0: - resolution: {integrity: sha512-S4eENJz1pkiQn9Znv33Q+deTOKmbl+jj1Fl+qiP/vYezj+S8x+J3Uo0ISrx/QoEvIlOaDWJhPaRd1flJ9HXZqg==} - engines: {node: '>=0.10.0'} - dependencies: - graceful-fs: 4.2.10 - pify: 2.3.0 - pinkie-promise: 2.0.1 - dev: true - optional: true - - /path-type/3.0.0: + /path-type@3.0.0: resolution: {integrity: sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==} engines: {node: '>=4'} dependencies: pify: 3.0.0 dev: true - /path-type/4.0.0: + /path-type@4.0.0: resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} engines: {node: '>=8'} dev: true - /pbkdf2/3.1.2: - resolution: {integrity: sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA==} - engines: {node: '>=0.12'} - dependencies: - create-hash: 1.2.0 - create-hmac: 1.1.7 - ripemd160: 2.0.2 - safe-buffer: 5.2.1 - sha.js: 2.4.11 - dev: true - - /performance-now/2.1.0: + /performance-now@2.1.0: resolution: {integrity: sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=} dev: true - /picocolors/0.2.1: + /picocolors@0.2.1: resolution: {integrity: sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA==} dev: true - /picocolors/1.0.0: + /picocolors@1.0.0: resolution: {integrity: sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==} dev: true - /picomatch/2.3.1: + /picomatch@2.3.1: resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} engines: {node: '>=8.6'} dev: true - /pidtree/0.3.1: + /pidtree@0.3.1: resolution: {integrity: sha512-qQbW94hLHEqCg7nhby4yRC7G2+jYHY4Rguc2bjw7Uug4GIJuu1tvf2uHaZv5Q8zdt+WKJ6qK1FOI6amaWUo5FA==} engines: {node: '>=0.10'} hasBin: true dev: true - /pify/2.3.0: + /pify@2.3.0: resolution: {integrity: sha1-7RQaasBDqEnqWISY59yosVMw6Qw=} engines: {node: '>=0.10.0'} dev: true - /pify/3.0.0: + /pify@3.0.0: resolution: {integrity: sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=} engines: {node: '>=4'} dev: true - /pify/4.0.1: + /pify@4.0.1: resolution: {integrity: sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==} engines: {node: '>=6'} dev: true - /pify/5.0.0: + /pify@5.0.0: resolution: {integrity: sha512-eW/gHNMlxdSP6dmG6uJip6FXN0EQBwm2clYYd8Wul42Cwu/DK8HEftzsapcNdYe2MfLiIwZqsDk2RDEsTE79hA==} engines: {node: '>=10'} dev: true - /pinkie-promise/2.0.1: + /pinkie-promise@2.0.1: resolution: {integrity: sha1-ITXW36ejWMBprJsXh3YogihFD/o=} engines: {node: '>=0.10.0'} dependencies: pinkie: 2.0.4 dev: true - /pinkie/2.0.4: + /pinkie@2.0.4: resolution: {integrity: sha1-clVrgM+g1IqXToDnckjoDtT3+HA=} engines: {node: '>=0.10.0'} dev: true - /pirates/4.0.1: + /pirates@4.0.1: resolution: {integrity: sha512-WuNqLTbMI3tmfef2TKxlQmAiLHKtFhlsCZnPIpuv2Ow0RDVO8lfy1Opf4NUzlMXLjPl+Men7AuVdX6TA+s+uGA==} engines: {node: '>= 6'} dependencies: node-modules-regexp: 1.0.0 dev: true - /pirates/4.0.5: - resolution: {integrity: sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ==} - engines: {node: '>= 6'} - dev: true - - /pkg-dir/3.0.0: + /pkg-dir@3.0.0: resolution: {integrity: sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==} engines: {node: '>=6'} dependencies: find-up: 3.0.0 dev: true - /pkg-dir/4.2.0: + /pkg-dir@4.2.0: resolution: {integrity: sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==} engines: {node: '>=8'} dependencies: find-up: 4.1.0 dev: true - /pkg-dir/5.0.0: - resolution: {integrity: sha512-NPE8TDbzl/3YQYY7CSS228s3g2ollTFnc+Qi3tqmqJp9Vg2ovUpixcJEo2HJScN2Ez+kEaal6y70c0ehqJBJeA==} - engines: {node: '>=10'} - dependencies: - find-up: 5.0.0 - dev: true - - /pn/1.1.0: + /pn@1.1.0: resolution: {integrity: sha512-2qHaIQr2VLRFoxe2nASzsV6ef4yOOH+Fi9FBOVH6cqeSgUnoyySPZkxzLuzd+RYOQTRpROA0ztTMqxROKSb/nA==} dev: true - /pnp-webpack-plugin/1.6.4_typescript@4.7.4: - resolution: {integrity: sha512-7Wjy+9E3WwLOEL30D+m8TSTF7qJJUJLONBnwQp0518siuMxUQUbgZwssaFX+QKlZkjHZcw/IpZCt/H0srrntSg==} - engines: {node: '>=6'} - dependencies: - ts-pnp: 1.2.0_typescript@4.7.4 - transitivePeerDependencies: - - typescript - dev: true - - /polished/4.2.2: - resolution: {integrity: sha512-Sz2Lkdxz6F2Pgnpi9U5Ng/WdWAUZxmHrNPoVlm3aAemxoy2Qy7LGjQg4uf8qKelDAUW94F4np3iH2YPf2qefcQ==} - engines: {node: '>=10'} - dependencies: - '@babel/runtime': 7.18.6 - dev: true - - /posix-character-classes/0.1.1: + /posix-character-classes@0.1.1: resolution: {integrity: sha512-xTgYBc3fuo7Yt7JbiuFxSYGToMoz8fLoE6TC9Wx1P/u+LfeThMOAqmuyECnlBaaJb+u1m9hHiXUEtwW4OzfUJg==} engines: {node: '>=0.10.0'} dev: true - /postcss-calc/7.0.5: + /postcss-calc@7.0.5: resolution: {integrity: sha512-1tKHutbGtLtEZF6PT4JSihCHfIVldU72mZ8SdZHIYriIZ9fh9k9aWSppaT8rHsyI3dX+KSR+W+Ix9BMY3AODrg==} dependencies: postcss: 7.0.39 @@ -15648,7 +8894,7 @@ packages: postcss-value-parser: 4.2.0 dev: true - /postcss-colormin/4.0.3: + /postcss-colormin@4.0.3: resolution: {integrity: sha512-WyQFAdDZpExQh32j0U0feWisZ0dmOtPl44qYmJKkq9xFWY3p+4qnRzCHeNrkeRhwPHz9bQ3mo0/yVkaply0MNw==} engines: {node: '>=6.9.0'} dependencies: @@ -15659,7 +8905,7 @@ packages: postcss-value-parser: 3.3.1 dev: true - /postcss-convert-values/4.0.1: + /postcss-convert-values@4.0.1: resolution: {integrity: sha512-Kisdo1y77KUC0Jmn0OXU/COOJbzM8cImvw1ZFsBgBgMgb1iL23Zs/LXRe3r+EZqM3vGYKdQ2YJVQ5VkJI+zEJQ==} engines: {node: '>=6.9.0'} dependencies: @@ -15667,41 +8913,35 @@ packages: postcss-value-parser: 3.3.1 dev: true - /postcss-discard-comments/4.0.2: + /postcss-discard-comments@4.0.2: resolution: {integrity: sha512-RJutN259iuRf3IW7GZyLM5Sw4GLTOH8FmsXBnv8Ab/Tc2k4SR4qbV4DNbyyY4+Sjo362SyDmW2DQ7lBSChrpkg==} engines: {node: '>=6.9.0'} dependencies: postcss: 7.0.39 dev: true - /postcss-discard-duplicates/4.0.2: + /postcss-discard-duplicates@4.0.2: resolution: {integrity: sha512-ZNQfR1gPNAiXZhgENFfEglF93pciw0WxMkJeVmw8eF+JZBbMD7jp6C67GqJAXVZP2BWbOztKfbsdmMp/k8c6oQ==} engines: {node: '>=6.9.0'} dependencies: postcss: 7.0.39 dev: true - /postcss-discard-empty/4.0.1: + /postcss-discard-empty@4.0.1: resolution: {integrity: sha512-B9miTzbznhDjTfjvipfHoqbWKwd0Mj+/fL5s1QOz06wufguil+Xheo4XpOnc4NqKYBCNqqEzgPv2aPBIJLox0w==} engines: {node: '>=6.9.0'} dependencies: postcss: 7.0.39 dev: true - /postcss-discard-overridden/4.0.1: + /postcss-discard-overridden@4.0.1: resolution: {integrity: sha512-IYY2bEDD7g1XM1IDEsUT4//iEYCxAmP5oDSFMVU/JVvT7gh+l4fmjciLqGgwjdWpQIdb0Che2VX00QObS5+cTg==} engines: {node: '>=6.9.0'} dependencies: postcss: 7.0.39 dev: true - /postcss-flexbugs-fixes/4.2.1: - resolution: {integrity: sha512-9SiofaZ9CWpQWxOwRh1b/r85KD5y7GgvsNt1056k6OYLvWUun0czCvogfJgylC22uJTwW1KzY3Gz65NZRlvoiQ==} - dependencies: - postcss: 7.0.39 - dev: true - - /postcss-load-config/2.1.2: + /postcss-load-config@2.1.2: resolution: {integrity: sha512-/rDeGV6vMUo3mwJZmeHfEDvwnTKKqQ0S7OHUi/kJvvtx3aWtyWG2/0ZWnzCt2keEclwN6Tf0DST2v9kITdOKYw==} engines: {node: '>= 4'} dependencies: @@ -15709,48 +8949,11 @@ packages: import-cwd: 2.1.0 dev: true - /postcss-loader/4.3.0_gzaxsinx64nntyd3vmdqwl7coe: - resolution: {integrity: sha512-M/dSoIiNDOo8Rk0mUqoj4kpGq91gcxCfb9PoyZVdZ76/AuhxylHDYZblNE8o+EQ9AMSASeMFEKxZf5aU6wlx1Q==} - engines: {node: '>= 10.13.0'} - peerDependencies: - postcss: ^7.0.0 || ^8.0.1 - webpack: ^4.0.0 || ^5.0.0 - peerDependenciesMeta: - webpack: - optional: true - dependencies: - cosmiconfig: 7.0.1 - klona: 2.0.5 - loader-utils: 2.0.2 - postcss: 7.0.39 - schema-utils: 3.1.1 - semver: 7.3.5 - webpack: 4.46.0 - dev: true - - /postcss-loader/4.3.0_postcss@7.0.39: - resolution: {integrity: sha512-M/dSoIiNDOo8Rk0mUqoj4kpGq91gcxCfb9PoyZVdZ76/AuhxylHDYZblNE8o+EQ9AMSASeMFEKxZf5aU6wlx1Q==} - engines: {node: '>= 10.13.0'} - peerDependencies: - postcss: ^7.0.0 || ^8.0.1 - webpack: ^4.0.0 || ^5.0.0 - peerDependenciesMeta: - webpack: - optional: true - dependencies: - cosmiconfig: 7.0.1 - klona: 2.0.5 - loader-utils: 2.0.2 - postcss: 7.0.39 - schema-utils: 3.1.1 - semver: 7.3.5 - dev: true - - /postcss-media-query-parser/0.2.3: + /postcss-media-query-parser@0.2.3: resolution: {integrity: sha1-J7Ocb02U+Bsac7j3Y1HGCeXO8kQ=} dev: true - /postcss-merge-longhand/4.0.11: + /postcss-merge-longhand@4.0.11: resolution: {integrity: sha512-alx/zmoeXvJjp7L4mxEMjh8lxVlDFX1gqWHzaaQewwMZiVhLo42TEClKaeHbRf6J7j82ZOdTJ808RtN0ZOZwvw==} engines: {node: '>=6.9.0'} dependencies: @@ -15760,7 +8963,7 @@ packages: stylehacks: 4.0.3 dev: true - /postcss-merge-rules/4.0.3: + /postcss-merge-rules@4.0.3: resolution: {integrity: sha512-U7e3r1SbvYzO0Jr3UT/zKBVgYYyhAz0aitvGIYOYK5CPmkNih+WDSsS5tvPrJ8YMQYlEMvsZIiqmn7HdFUaeEQ==} engines: {node: '>=6.9.0'} dependencies: @@ -15772,7 +8975,7 @@ packages: vendors: 1.0.4 dev: true - /postcss-minify-font-values/4.0.2: + /postcss-minify-font-values@4.0.2: resolution: {integrity: sha512-j85oO6OnRU9zPf04+PZv1LYIYOprWm6IA6zkXkrJXyRveDEuQggG6tvoy8ir8ZwjLxLuGfNkCZEQG7zan+Hbtg==} engines: {node: '>=6.9.0'} dependencies: @@ -15780,7 +8983,7 @@ packages: postcss-value-parser: 3.3.1 dev: true - /postcss-minify-gradients/4.0.2: + /postcss-minify-gradients@4.0.2: resolution: {integrity: sha512-qKPfwlONdcf/AndP1U8SJ/uzIJtowHlMaSioKzebAXSG4iJthlWC9iSWznQcX4f66gIWX44RSA841HTHj3wK+Q==} engines: {node: '>=6.9.0'} dependencies: @@ -15790,7 +8993,7 @@ packages: postcss-value-parser: 3.3.1 dev: true - /postcss-minify-params/4.0.2: + /postcss-minify-params@4.0.2: resolution: {integrity: sha512-G7eWyzEx0xL4/wiBBJxJOz48zAKV2WG3iZOqVhPet/9geefm/Px5uo1fzlHu+DOjT+m0Mmiz3jkQzVHe6wxAWg==} engines: {node: '>=6.9.0'} dependencies: @@ -15802,7 +9005,7 @@ packages: uniqs: 2.0.0 dev: true - /postcss-minify-selectors/4.0.2: + /postcss-minify-selectors@4.0.2: resolution: {integrity: sha512-D5S1iViljXBj9kflQo4YutWnJmwm8VvIsU1GeXJGiG9j8CIg9zs4voPMdQDUmIxetUOh60VilsNzCiAFTOqu3g==} engines: {node: '>=6.9.0'} dependencies: @@ -15812,27 +9015,27 @@ packages: postcss-selector-parser: 3.1.2 dev: true - /postcss-modules-extract-imports/1.1.0: + /postcss-modules-extract-imports@1.1.0: resolution: {integrity: sha512-zF9+UIEvtpeqMGxhpeT9XaIevQSrBBCz9fi7SwfkmjVacsSj8DY5eFVgn+wY8I9vvdDDwK5xC8Myq4UkoLFIkA==} dependencies: postcss: 6.0.1 dev: true - /postcss-modules-extract-imports/2.0.0: + /postcss-modules-extract-imports@2.0.0: resolution: {integrity: sha512-LaYLDNS4SG8Q5WAWqIJgdHPJrDDr/Lv775rMBFUbgjTz6j34lUznACHcdRWroPvXANP2Vj7yNK57vp9eFqzLWQ==} engines: {node: '>= 6'} dependencies: postcss: 7.0.39 dev: true - /postcss-modules-local-by-default/1.2.0: + /postcss-modules-local-by-default@1.2.0: resolution: {integrity: sha512-X4cquUPIaAd86raVrBwO8fwRfkIdbwFu7CTfEOjiZQHVQwlHRSkTgH5NLDmMm5+1hQO8u6dZ+TOOJDbay1hYpA==} dependencies: css-selector-tokenizer: 0.7.3 postcss: 6.0.1 dev: true - /postcss-modules-local-by-default/3.0.3: + /postcss-modules-local-by-default@3.0.3: resolution: {integrity: sha512-e3xDq+LotiGesympRlKNgaJ0PCzoUIdpH0dj47iWAui/kyTgh3CiAr1qP54uodmJhl6p9rN6BoNcdEDVJx9RDw==} engines: {node: '>= 6'} dependencies: @@ -15842,14 +9045,14 @@ packages: postcss-value-parser: 4.2.0 dev: true - /postcss-modules-scope/1.1.0: + /postcss-modules-scope@1.1.0: resolution: {integrity: sha512-LTYwnA4C1He1BKZXIx1CYiHixdSe9LWYVKadq9lK5aCCMkoOkFyZ7aigt+srfjlRplJY3gIol6KUNefdMQJdlw==} dependencies: css-selector-tokenizer: 0.7.3 postcss: 6.0.1 dev: true - /postcss-modules-scope/2.2.0: + /postcss-modules-scope@2.2.0: resolution: {integrity: sha512-YyEgsTMRpNd+HmyC7H/mh3y+MeFWevy7V1evVhJWewmMbjDHIbZbOXICC2y+m1xI1UVfIT1HMW/O04Hxyu9oXQ==} engines: {node: '>= 6'} dependencies: @@ -15857,21 +9060,21 @@ packages: postcss-selector-parser: 6.0.9 dev: true - /postcss-modules-values/1.3.0: + /postcss-modules-values@1.3.0: resolution: {integrity: sha512-i7IFaR9hlQ6/0UgFuqM6YWaCfA1Ej8WMg8A5DggnH1UGKJvTV/ugqq/KaULixzzOi3T/tF6ClBXcHGCzdd5unA==} dependencies: icss-replace-symbols: 1.1.0 postcss: 6.0.1 dev: true - /postcss-modules-values/3.0.0: + /postcss-modules-values@3.0.0: resolution: {integrity: sha512-1//E5jCBrZ9DmRX+zCtmQtRSV6PV42Ix7Bzj9GbwJceduuf7IqP8MgeTXuRDHOWj2m0VzZD5+roFWDuU8RQjcg==} dependencies: icss-utils: 4.1.1 postcss: 7.0.39 dev: true - /postcss-modules/2.0.0: + /postcss-modules@2.0.0: resolution: {integrity: sha512-eqp+Bva+U2cwQO7dECJ8/V+X+uH1HduNeITB0CPPFAu6d/8LKQ32/j+p9rQ2YL1QytVcrNU0X+fBqgGmQIA1Rw==} dependencies: css-modules-loader-core: 1.1.0 @@ -15881,14 +9084,14 @@ packages: string-hash: 1.1.3 dev: true - /postcss-normalize-charset/4.0.1: + /postcss-normalize-charset@4.0.1: resolution: {integrity: sha512-gMXCrrlWh6G27U0hF3vNvR3w8I1s2wOBILvA87iNXaPvSNo5uZAMYsZG7XjCUf1eVxuPfyL4TJ7++SGZLc9A3g==} engines: {node: '>=6.9.0'} dependencies: postcss: 7.0.39 dev: true - /postcss-normalize-display-values/4.0.2: + /postcss-normalize-display-values@4.0.2: resolution: {integrity: sha512-3F2jcsaMW7+VtRMAqf/3m4cPFhPD3EFRgNs18u+k3lTJJlVe7d0YPO+bnwqo2xg8YiRpDXJI2u8A0wqJxMsQuQ==} engines: {node: '>=6.9.0'} dependencies: @@ -15897,7 +9100,7 @@ packages: postcss-value-parser: 3.3.1 dev: true - /postcss-normalize-positions/4.0.2: + /postcss-normalize-positions@4.0.2: resolution: {integrity: sha512-Dlf3/9AxpxE+NF1fJxYDeggi5WwV35MXGFnnoccP/9qDtFrTArZ0D0R+iKcg5WsUd8nUYMIl8yXDCtcrT8JrdA==} engines: {node: '>=6.9.0'} dependencies: @@ -15907,7 +9110,7 @@ packages: postcss-value-parser: 3.3.1 dev: true - /postcss-normalize-repeat-style/4.0.2: + /postcss-normalize-repeat-style@4.0.2: resolution: {integrity: sha512-qvigdYYMpSuoFs3Is/f5nHdRLJN/ITA7huIoCyqqENJe9PvPmLhNLMu7QTjPdtnVf6OcYYO5SHonx4+fbJE1+Q==} engines: {node: '>=6.9.0'} dependencies: @@ -15917,7 +9120,7 @@ packages: postcss-value-parser: 3.3.1 dev: true - /postcss-normalize-string/4.0.2: + /postcss-normalize-string@4.0.2: resolution: {integrity: sha512-RrERod97Dnwqq49WNz8qo66ps0swYZDSb6rM57kN2J+aoyEAJfZ6bMx0sx/F9TIEX0xthPGCmeyiam/jXif0eA==} engines: {node: '>=6.9.0'} dependencies: @@ -15926,7 +9129,7 @@ packages: postcss-value-parser: 3.3.1 dev: true - /postcss-normalize-timing-functions/4.0.2: + /postcss-normalize-timing-functions@4.0.2: resolution: {integrity: sha512-acwJY95edP762e++00Ehq9L4sZCEcOPyaHwoaFOhIwWCDfik6YvqsYNxckee65JHLKzuNSSmAdxwD2Cud1Z54A==} engines: {node: '>=6.9.0'} dependencies: @@ -15935,7 +9138,7 @@ packages: postcss-value-parser: 3.3.1 dev: true - /postcss-normalize-unicode/4.0.1: + /postcss-normalize-unicode@4.0.1: resolution: {integrity: sha512-od18Uq2wCYn+vZ/qCOeutvHjB5jm57ToxRaMeNuf0nWVHaP9Hua56QyMF6fs/4FSUnVIw0CBPsU0K4LnBPwYwg==} engines: {node: '>=6.9.0'} dependencies: @@ -15944,7 +9147,7 @@ packages: postcss-value-parser: 3.3.1 dev: true - /postcss-normalize-url/4.0.1: + /postcss-normalize-url@4.0.1: resolution: {integrity: sha512-p5oVaF4+IHwu7VpMan/SSpmpYxcJMtkGppYf0VbdH5B6hN8YNmVyJLuY9FmLQTzY3fag5ESUUHDqM+heid0UVA==} engines: {node: '>=6.9.0'} dependencies: @@ -15954,7 +9157,7 @@ packages: postcss-value-parser: 3.3.1 dev: true - /postcss-normalize-whitespace/4.0.2: + /postcss-normalize-whitespace@4.0.2: resolution: {integrity: sha512-tO8QIgrsI3p95r8fyqKV+ufKlSHh9hMJqACqbv2XknufqEDhDvbguXGBBqxw9nsQoXWf0qOqppziKJKHMD4GtA==} engines: {node: '>=6.9.0'} dependencies: @@ -15962,7 +9165,7 @@ packages: postcss-value-parser: 3.3.1 dev: true - /postcss-ordered-values/4.1.2: + /postcss-ordered-values@4.1.2: resolution: {integrity: sha512-2fCObh5UanxvSxeXrtLtlwVThBvHn6MQcu4ksNT2tsaV2Fg76R2CV98W7wNSlX+5/pFwEyaDwKLLoEV7uRybAw==} engines: {node: '>=6.9.0'} dependencies: @@ -15971,7 +9174,7 @@ packages: postcss-value-parser: 3.3.1 dev: true - /postcss-reduce-initial/4.0.3: + /postcss-reduce-initial@4.0.3: resolution: {integrity: sha512-gKWmR5aUulSjbzOfD9AlJiHCGH6AEVLaM0AV+aSioxUDd16qXP1PCh8d1/BGVvpdWn8k/HiK7n6TjeoXN1F7DA==} engines: {node: '>=6.9.0'} dependencies: @@ -15981,7 +9184,7 @@ packages: postcss: 7.0.39 dev: true - /postcss-reduce-transforms/4.0.2: + /postcss-reduce-transforms@4.0.2: resolution: {integrity: sha512-EEVig1Q2QJ4ELpJXMZR8Vt5DQx8/mo+dGWSR7vWXqcob2gQLyQGsionYcGKATXvQzMPn6DSN1vTN7yFximdIAg==} engines: {node: '>=6.9.0'} dependencies: @@ -15991,11 +9194,11 @@ packages: postcss-value-parser: 3.3.1 dev: true - /postcss-resolve-nested-selector/0.1.1: + /postcss-resolve-nested-selector@0.1.1: resolution: {integrity: sha1-Kcy8fDfe36wwTp//C/FZaz9qDk4=} dev: true - /postcss-safe-parser/6.0.0_postcss@8.4.8: + /postcss-safe-parser@6.0.0(postcss@8.4.8): resolution: {integrity: sha512-FARHN8pwH+WiS2OPCxJI8FuRJpTVnn6ZNFiqAM2aeW2LwTHWWmWgIyKC6cUo0L8aeKiF/14MNvnpls6R2PBeMQ==} engines: {node: '>=12.0'} peerDependencies: @@ -16004,7 +9207,7 @@ packages: postcss: 8.4.8 dev: true - /postcss-scss/4.0.3_postcss@8.4.8: + /postcss-scss@4.0.3(postcss@8.4.8): resolution: {integrity: sha512-j4KxzWovfdHsyxwl1BxkUal/O4uirvHgdzMKS1aWJBAV0qh2qj5qAZqpeBfVUYGWv+4iK9Az7SPyZ4fyNju1uA==} engines: {node: '>=12.0'} peerDependencies: @@ -16013,7 +9216,7 @@ packages: postcss: 8.4.8 dev: true - /postcss-selector-parser/3.1.2: + /postcss-selector-parser@3.1.2: resolution: {integrity: sha512-h7fJ/5uWuRVyOtkO45pnt1Ih40CEleeyCHzipqAZO2e5H20g25Y48uYnFUiShvY4rZWNJ/Bib/KVPmanaCtOhA==} engines: {node: '>=8'} dependencies: @@ -16022,7 +9225,7 @@ packages: uniq: 1.0.1 dev: true - /postcss-selector-parser/6.0.9: + /postcss-selector-parser@6.0.9: resolution: {integrity: sha512-UO3SgnZOVTwu4kyLR22UQ1xZh086RyNZppb7lLAKBFK8a32ttG5i87Y/P3+2bRSjZNyJ1B7hfFNo273tKe9YxQ==} engines: {node: '>=4'} dependencies: @@ -16030,7 +9233,7 @@ packages: util-deprecate: 1.0.2 dev: true - /postcss-svgo/4.0.2: + /postcss-svgo@4.0.2: resolution: {integrity: sha512-C6wyjo3VwFm0QgBy+Fu7gCYOkCmgmClghO+pjcxvrcBKtiKt0uCF+hvbMO1fyv5BMImRK90SMb+dwUnfbGd+jw==} engines: {node: '>=6.9.0'} dependencies: @@ -16040,7 +9243,7 @@ packages: svgo: 1.3.2 dev: true - /postcss-unique-selectors/4.0.1: + /postcss-unique-selectors@4.0.1: resolution: {integrity: sha512-+JanVaryLo9QwZjKrmJgkI4Fn8SBgRO6WXQBJi7KiAVPlmxikB5Jzc4EvXMT2H0/m0RjrVVm9rGNhZddm/8Spg==} engines: {node: '>=6.9.0'} dependencies: @@ -16049,15 +9252,15 @@ packages: uniqs: 2.0.0 dev: true - /postcss-value-parser/3.3.1: + /postcss-value-parser@3.3.1: resolution: {integrity: sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==} dev: true - /postcss-value-parser/4.2.0: + /postcss-value-parser@4.2.0: resolution: {integrity: sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==} dev: true - /postcss/6.0.1: + /postcss@6.0.1: resolution: {integrity: sha512-VbGX1LQgQbf9l3cZ3qbUuC3hGqIEOGQFHAEHQ/Diaeo0yLgpgK5Rb8J+OcamIfQ9PbAU/fzBjVtQX3AhJHUvZw==} engines: {node: '>=4.0.0'} dependencies: @@ -16066,16 +9269,7 @@ packages: supports-color: 3.2.3 dev: true - /postcss/7.0.35: - resolution: {integrity: sha512-3QT8bBJeX/S5zKTTjTCIjRF3If4avAT6kqxcASlTWEtAFCb9NH0OUxNDfgZSWdP5fJnBYCMEWkIFfWeugjzYMg==} - engines: {node: '>=6.0.0'} - dependencies: - chalk: 2.4.2 - source-map: 0.6.1 - supports-color: 6.1.0 - dev: true - - /postcss/7.0.39: + /postcss@7.0.39: resolution: {integrity: sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA==} engines: {node: '>=6.0.0'} dependencies: @@ -16083,7 +9277,7 @@ packages: source-map: 0.6.1 dev: true - /postcss/8.4.8: + /postcss@8.4.8: resolution: {integrity: sha512-2tXEqGxrjvAO6U+CJzDL2Fk2kPHTv1jQsYkSoMeOis2SsYaXRO2COxTdQp99cYvif9JTXaAk9lYGc3VhJt7JPQ==} engines: {node: ^10 || ^12 || >=14} dependencies: @@ -16092,48 +9286,35 @@ packages: source-map-js: 1.0.2 dev: true - /prelude-ls/1.1.2: + /prelude-ls@1.1.2: resolution: {integrity: sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=} engines: {node: '>= 0.8.0'} dev: true - /prepend-http/1.0.4: + /prepend-http@1.0.4: resolution: {integrity: sha1-1PRWKwzjaW5BrFLQ4ALlemNdxtw=} engines: {node: '>=0.10.0'} dev: true - /prettier/2.3.0: - resolution: {integrity: sha512-kXtO4s0Lz/DW/IJ9QdWhAf7/NmPWQXkFr/r/WkR3vyI+0v8amTDxiaQSLzs8NBlytfLWX/7uQUMIW677yLKl4w==} - engines: {node: '>=10.13.0'} - hasBin: true - dev: true - - /prettier/2.5.1: + /prettier@2.5.1: resolution: {integrity: sha512-vBZcPRUR5MZJwoyi3ZoyQlc1rXeEck8KgeC9AwwOn+exuxLxq5toTRDTSaVrXHxelDMHy9zlicw8u66yxoSUFg==} engines: {node: '>=10.13.0'} hasBin: true dev: true - /pretty-bytes/3.0.1: + /pretty-bytes@3.0.1: resolution: {integrity: sha1-J9AAjXeAY6C0gRuzXHnxvV1fvM8=} engines: {node: '>=0.10.0'} dependencies: number-is-nan: 1.0.1 dev: true - /pretty-bytes/5.4.1: + /pretty-bytes@5.4.1: resolution: {integrity: sha512-s1Iam6Gwz3JI5Hweaz4GoCD1WUNUIyzePFy5+Js2hjwGVt2Z79wNN+ZKOZ2vB6C+Xs6njyB84Z1IthQg8d9LxA==} engines: {node: '>=6'} dev: true - /pretty-error/2.1.2: - resolution: {integrity: sha512-EY5oDzmsX5wvuynAByrmY0P0hcp+QpnAKbJng2A2MPjVKXCxrDSUkzghVJ4ZGPIv+JC4gX8fPUWscC0RtjsWGw==} - dependencies: - lodash: 4.17.21 - renderkid: 2.0.7 - dev: true - - /pretty-format/24.9.0: + /pretty-format@24.9.0: resolution: {integrity: sha512-00ZMZUiHaJrNfk33guavqgvfJS30sLYf0f8+Srklv0AMPodGGHcoHgksZ3OThYnIvOd+8yMCn0YiEOogjlgsnA==} engines: {node: '>= 6'} dependencies: @@ -16143,107 +9324,47 @@ packages: react-is: 16.13.1 dev: true - /pretty-format/27.5.1: - resolution: {integrity: sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ==} - engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} - dependencies: - ansi-regex: 5.0.1 - ansi-styles: 5.2.0 - react-is: 17.0.2 - dev: true - - /pretty-hrtime/1.0.3: - resolution: {integrity: sha512-66hKPCr+72mlfiSjlEB1+45IjXSqvVAIy6mocupoww4tBFE9R9IhwwUGoI4G++Tc9Aq+2rxOt0RFU6gPcrte0A==} - engines: {node: '>= 0.8'} - dev: true - - /prettysize/2.0.0: + /prettysize@2.0.0: resolution: {integrity: sha512-VVtxR7sOh0VsG8o06Ttq5TrI1aiZKmC+ClSn4eBPaNf4SHr5lzbYW+kYGX3HocBL/MfpVrRfFZ9V3vCbLaiplg==} dev: true - /prismjs/1.27.0: + /prismjs@1.27.0: resolution: {integrity: sha512-t13BGPUlFDR7wRB5kQDG4jjl7XeuH6jbJGt11JHPL96qwsEHNX2+68tFXqc1/k+/jALsbSWJKUOT/hcYAZ5LkA==} engines: {node: '>=6'} dev: false - /prismjs/1.28.0: + /prismjs@1.28.0: resolution: {integrity: sha512-8aaXdYvl1F7iC7Xm1spqSaY/OJBpYW3v+KJ+F17iYxvdc8sfjW194COK5wVhMZX45tGteiBQgdvD/nhxcRwylw==} engines: {node: '>=6'} dev: false - /process-nextick-args/2.0.1: + /process-nextick-args@2.0.1: resolution: {integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==} dev: true - /process/0.11.10: - resolution: {integrity: sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==} - engines: {node: '>= 0.6.0'} - dev: true - - /progress/2.0.3: + /progress@2.0.3: resolution: {integrity: sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==} engines: {node: '>=0.4.0'} dev: true - /promise-inflight/1.0.1: - resolution: {integrity: sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g==} - peerDependencies: - bluebird: '*' - peerDependenciesMeta: - bluebird: - optional: true - dev: true - - /promise-inflight/1.0.1_bluebird@3.7.2: - resolution: {integrity: sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g==} - peerDependencies: - bluebird: '*' - peerDependenciesMeta: - bluebird: - optional: true - dependencies: - bluebird: 3.7.2 - dev: true - - /promise.allsettled/1.0.5: - resolution: {integrity: sha512-tVDqeZPoBC0SlzJHzWGZ2NKAguVq2oiYj7gbggbiTvH2itHohijTp7njOUA0aQ/nl+0lr/r6egmhoYu63UZ/pQ==} - engines: {node: '>= 0.4'} - dependencies: - array.prototype.map: 1.0.4 - call-bind: 1.0.2 - define-properties: 1.1.3 - es-abstract: 1.19.1 - get-intrinsic: 1.1.1 - iterate-value: 1.0.2 - dev: true - - /promise.prototype.finally/3.1.3: - resolution: {integrity: sha512-EXRF3fC9/0gz4qkt/f5EP5iW4kj9oFpBICNpCNOb/52+8nlHIX07FPLbi/q4qYBQ1xZqivMzTpNQSnArVASolQ==} - engines: {node: '>= 0.4'} - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.3 - es-abstract: 1.19.1 - dev: true - - /promise.series/0.2.0: + /promise.series@0.2.0: resolution: {integrity: sha1-LMfr6Vn8OmYZwEq029yeRS2GS70=} engines: {node: '>=0.12'} dev: true - /promise/7.3.1: + /promise@7.3.1: resolution: {integrity: sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==} dependencies: asap: 2.0.6 dev: true - /promise/8.1.0: + /promise@8.1.0: resolution: {integrity: sha512-W04AqnILOL/sPRXziNicCjSNRruLAuIHEOVBazepu0545DDNGYHz7ar9ZgZ1fMU8/MA4mVxp5rkBWRi6OXIy3Q==} dependencies: asap: 2.0.6 dev: true - /prompts/2.4.2: + /prompts@2.4.2: resolution: {integrity: sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==} engines: {node: '>= 6'} dependencies: @@ -16251,112 +9372,53 @@ packages: sisteransi: 1.0.5 dev: true - /prop-types/15.5.10: + /prop-types@15.5.10: resolution: {integrity: sha1-J5ffwxJhguOpXj37suiT3ddFYVQ=} dependencies: fbjs: 0.8.17 loose-envify: 1.4.0 dev: true - /prop-types/15.8.1: + /prop-types@15.8.1: resolution: {integrity: sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==} dependencies: loose-envify: 1.4.0 object-assign: 4.1.1 react-is: 16.13.1 - /property-information/5.6.0: + /property-information@5.6.0: resolution: {integrity: sha512-YUHSPk+A30YPv+0Qf8i9Mbfe/C0hdPXk1s1jPVToV8pk8BQtpw10ct89Eo7OWkutrwqvT0eicAxlOg3dOAu8JA==} dependencies: xtend: 4.0.2 + dev: false - /proxy-addr/2.0.7: - resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==} - engines: {node: '>= 0.10'} - dependencies: - forwarded: 0.2.0 - ipaddr.js: 1.9.1 - dev: true - - /prr/1.0.1: - resolution: {integrity: sha512-yPw4Sng1gWghHQWj0B3ZggWUm4qVbPwPFcRG8KyxiU7J2OHFSoEHKS+EZ3fv5l1t9CyCiop6l/ZYeWbrgoQejw==} - dev: true - - /psl/1.8.0: + /psl@1.8.0: resolution: {integrity: sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==} dev: true - /public-encrypt/4.0.3: - resolution: {integrity: sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q==} - dependencies: - bn.js: 4.12.0 - browserify-rsa: 4.1.0 - create-hash: 1.2.0 - parse-asn1: 5.1.6 - randombytes: 2.1.0 - safe-buffer: 5.2.1 - dev: true - - /pump/2.0.1: - resolution: {integrity: sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA==} - dependencies: - end-of-stream: 1.4.4 - once: 1.4.0 - dev: true - - /pump/3.0.0: + /pump@3.0.0: resolution: {integrity: sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==} dependencies: end-of-stream: 1.4.4 once: 1.4.0 dev: true - /pumpify/1.5.1: - resolution: {integrity: sha512-oClZI37HvuUJJxSKKrC17bZ9Cu0ZYhEAGPsPUy9KlMUmv9dKX2o77RUmq7f3XjIxbwyGwYzbzQ1L2Ks8sIradQ==} - dependencies: - duplexify: 3.7.1 - inherits: 2.0.4 - pump: 2.0.1 - dev: true - - /punycode/1.3.2: - resolution: {integrity: sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw==} - dev: true - - /punycode/1.4.1: - resolution: {integrity: sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==} - dev: true - - /punycode/2.1.1: + /punycode@2.1.1: resolution: {integrity: sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==} engines: {node: '>=6'} dev: true - /q/1.5.1: + /q@1.5.1: resolution: {integrity: sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc=} engines: {node: '>=0.6.0', teleport: '>=0.2.0'} dev: true - /qs/6.10.3: - resolution: {integrity: sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ==} - engines: {node: '>=0.6'} - dependencies: - side-channel: 1.0.4 - dev: true - - /qs/6.11.0: - resolution: {integrity: sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==} - engines: {node: '>=0.6'} - dependencies: - side-channel: 1.0.4 - dev: true - - /qs/6.5.2: + /qs@6.5.2: resolution: {integrity: sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==} engines: {node: '>=0.6'} dev: true - /query-string/4.3.4: + /query-string@4.3.4: resolution: {integrity: sha1-u7aTucqRXCMlFbIosaArYJBD2+s=} engines: {node: '>=0.10.0'} dependencies: @@ -16364,41 +9426,26 @@ packages: strict-uri-encode: 1.1.0 dev: true - /querystring-es3/0.2.1: - resolution: {integrity: sha512-773xhDQnZBMFobEiztv8LIl70ch5MSF/jUQVlhwFyBILqq96anmoctVIYz+ZRp0qbCKATTn6ev02M3r7Ga5vqA==} - engines: {node: '>=0.4.x'} - dev: true - - /querystring/0.2.0: - resolution: {integrity: sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g==} - engines: {node: '>=0.4.x'} - deprecated: The querystring API is considered Legacy. new code should use the URLSearchParams API instead. - dev: true - - /queue-microtask/1.2.3: + /queue-microtask@1.2.3: resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} dev: true - /quick-lru/4.0.1: + /quick-lru@4.0.1: resolution: {integrity: sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g==} engines: {node: '>=8'} dev: true - /raf/3.4.1: + /raf@3.4.1: resolution: {integrity: sha512-Sq4CW4QhwOHE8ucn6J34MqtZCeWFP2aQSmrlroYgqAV1PjStIhJXxYuTgUIfkEk7zTLjmIjLmU5q+fbD1NnOJA==} dependencies: performance-now: 2.1.0 dev: true - /railroad-diagrams/1.0.0: + /railroad-diagrams@1.0.0: resolution: {integrity: sha1-635iZ1SN3t+4mcG5Dlc3RVnN234=} dev: true - /ramda/0.28.0: - resolution: {integrity: sha512-9QnLuG/kPVgWvMQ4aODhsBUFKOUmnbUnsSXACv+NCQZcHbeb+v8Lodp8OVxtRULN1/xOyYLLaL6npE6dMq5QTA==} - dev: true - - /randexp/0.4.6: + /randexp@0.4.6: resolution: {integrity: sha512-80WNmd9DA0tmZrw9qQa62GPPWfuXJknrmVmLcxvq4uZBdYqb1wYoKTmnlGUchvVWe0XiLupYkBoXVOxz3C8DYQ==} engines: {node: '>=0.12'} dependencies: @@ -16406,49 +9453,13 @@ packages: ret: 0.1.15 dev: true - /randombytes/2.1.0: + /randombytes@2.1.0: resolution: {integrity: sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==} dependencies: safe-buffer: 5.2.1 dev: true - /randomfill/1.0.4: - resolution: {integrity: sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw==} - dependencies: - randombytes: 2.1.0 - safe-buffer: 5.2.1 - dev: true - - /range-parser/1.2.1: - resolution: {integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==} - engines: {node: '>= 0.6'} - dev: true - - /raw-body/2.5.1: - resolution: {integrity: sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==} - engines: {node: '>= 0.8'} - dependencies: - bytes: 3.1.2 - http-errors: 2.0.0 - iconv-lite: 0.4.24 - unpipe: 1.0.0 - dev: true - - /raw-loader/4.0.2_webpack@4.46.0: - resolution: {integrity: sha512-ZnScIV3ag9A4wPX/ZayxL/jZH+euYb6FcUinPcgiQW0+UBtEv0O6Q3lGd3cqJ+GHH+rksEv3Pj99oxJ3u3VIKA==} - engines: {node: '>= 10.13.0'} - peerDependencies: - webpack: ^4.0.0 || ^5.0.0 - peerDependenciesMeta: - webpack: - optional: true - dependencies: - loader-utils: 2.0.2 - schema-utils: 3.1.1 - webpack: 4.46.0 - dev: true - - /react-app-polyfill/3.0.0: + /react-app-polyfill@3.0.0: resolution: {integrity: sha512-sZ41cxiU5llIB003yxxQBYrARBqe0repqPTTYBTmMqTz9szeBbE37BehCE891NZsmdZqqP+xWKdT3eo3vOzN8w==} engines: {node: '>=14'} dependencies: @@ -16460,44 +9471,17 @@ packages: whatwg-fetch: 3.6.2 dev: true - /react-copy-to-clipboard/5.0.3_react@18.2.0: + /react-copy-to-clipboard@5.0.3(react@18.2.0): resolution: {integrity: sha512-9S3j+m+UxDZOM0Qb8mhnT/rMR0NGSrj9A/073yz2DSxPMYhmYFBMYIdI2X4o8AjOjyFsSNxDRnCX6s/gRxpriw==} peerDependencies: - react: ^15.3.0 || ^16.0.0 || ^17.0.0 || 18 - dependencies: - copy-to-clipboard: 3.3.1 - prop-types: 15.8.1 - react: 18.2.0 - dev: false - - /react-docgen-typescript/2.2.2_typescript@4.7.4: - resolution: {integrity: sha512-tvg2ZtOpOi6QDwsb3GZhOjDkkX0h8Z2gipvTg6OVMUyoYoURhEiRNePT8NZItTVCDh39JJHnLdfCOkzoLbFnTg==} - peerDependencies: - typescript: '>= 4.3.x || 4' - dependencies: - typescript: 4.7.4 - dev: true - - /react-docgen/5.4.3: - resolution: {integrity: sha512-xlLJyOlnfr8lLEEeaDZ+X2J/KJoe6Nr9AzxnkdQWush5hz2ZSu66w6iLMOScMmxoSHWpWMn+k3v5ZiyCfcWsOA==} - engines: {node: '>=8.10.0'} - hasBin: true - dependencies: - '@babel/core': 7.19.1 - '@babel/generator': 7.19.0 - '@babel/runtime': 7.18.6 - ast-types: 0.14.2 - commander: 2.20.3 - doctrine: 3.0.0 - estree-to-babel: 3.2.1 - neo-async: 2.6.2 - node-dir: 0.1.17 - strip-indent: 3.0.0 - transitivePeerDependencies: - - supports-color - dev: true + react: ^15.3.0 || ^16.0.0 || ^17.0.0 || 18 + dependencies: + copy-to-clipboard: 3.3.1 + prop-types: 15.8.1 + react: 18.2.0 + dev: false - /react-dom/18.2.0_react@18.2.0: + /react-dom@18.2.0(react@18.2.0): resolution: {integrity: sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g==} peerDependencies: react: ^18.2.0 || 18 @@ -16506,35 +9490,11 @@ packages: react: 18.2.0 scheduler: 0.23.0 - /react-element-to-jsx-string/14.3.4_biqbaboplfbrettd7655fr4n2y: - resolution: {integrity: sha512-t4ZwvV6vwNxzujDQ+37bspnLwA4JlgUPWhLjBJWsNIDceAf6ZKUTCjdm08cN6WeZ5pTMKiCJkmAYnpmR4Bm+dg==} - peerDependencies: - react: ^0.14.8 || ^15.0.1 || ^16.0.0 || ^17.0.1 || 18 - react-dom: ^0.14.8 || ^15.0.1 || ^16.0.0 || ^17.0.1 || 18 - dependencies: - '@base2/pretty-print-object': 1.0.1 - is-plain-object: 5.0.0 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - react-is: 17.0.2 - dev: true - - /react-fast-compare/2.0.4: + /react-fast-compare@2.0.4: resolution: {integrity: sha512-suNP+J1VU1MWFKcyt7RtjiSWUjvidmQSlqu+eHslq+342xCbGTYmC0mEhPCOHxlW0CywylOC1u2DFAT+bv4dBw==} dev: true - /react-inspector/5.1.1_react@18.2.0: - resolution: {integrity: sha512-GURDaYzoLbW8pMGXwYPDBIv6nqei4kK7LPRZ9q9HCZF54wqXz/dnylBp/kfE9XmekBhHvLDdcYeyIwSrvtOiWg==} - peerDependencies: - react: ^16.8.4 || ^17.0.0 || 18 - dependencies: - '@babel/runtime': 7.18.6 - is-dom: 1.1.0 - prop-types: 15.8.1 - react: 18.2.0 - dev: true - - /react-intl/2.9.0_5z6wstateidnxll3plz2xpyagu: + /react-intl@2.9.0(prop-types@15.5.10)(react@18.2.0): resolution: {integrity: sha512-27jnDlb/d2A7mSJwrbOBnUgD+rPep+abmoJE511Tf8BnoONIAUehy/U1zZCHGO17mnOwMWxqN4qC0nW11cD6rA==} peerDependencies: prop-types: ^15.5.4 @@ -16549,26 +9509,21 @@ packages: react: 18.2.0 dev: true - /react-is/16.13.1: + /react-is@16.13.1: resolution: {integrity: sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==} - /react-is/17.0.2: + /react-is@17.0.2: resolution: {integrity: sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==} - dev: true - /react-is/18.2.0: + /react-is@18.2.0: resolution: {integrity: sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==} dev: true - /react-merge-refs/1.1.0: + /react-merge-refs@1.1.0: resolution: {integrity: sha512-alTKsjEL0dKH/ru1Iyn7vliS2QRcBp9zZPGoWxUOvRGWPUYgjo+V01is7p04It6KhgrzhJGnIj9GgX8W4bZoCQ==} + dev: false - /react-refresh/0.11.0: - resolution: {integrity: sha512-F27qZr8uUqwhWZboondsPx8tnC3Ct3SxZA3V5WyEvujRyyNv0VYPhoBg1gZ8/MV5tubQp76Trw8lTv9hzRBa+A==} - engines: {node: '>=0.10.0'} - dev: true - - /react-shallow-renderer/16.15.0_react@18.2.0: + /react-shallow-renderer@16.15.0(react@18.2.0): resolution: {integrity: sha512-oScf2FqQ9LFVQgA73vr86xl2NaOIX73rh+YFqcOp68CWj56tSfgtGKrEbyhCj0rSijyG9M1CYprTh39fBi5hzA==} peerDependencies: react: ^16.0.0 || ^17.0.0 || ^18.0.0 || 18 @@ -16578,7 +9533,7 @@ packages: react-is: 18.2.0 dev: true - /react-syntax-highlighter/15.5.0_react@18.2.0: + /react-syntax-highlighter@15.5.0(react@18.2.0): resolution: {integrity: sha512-+zq2myprEnQmH5yw6Gqc8lD55QHnpKaU8TOcFeC/Lg/MQSs8UknEA0JC4nTZGFAXC2J2Hyj/ijJ7NlabyPi2gg==} peerDependencies: react: '>= 0.14.0 || 18' @@ -16591,7 +9546,7 @@ packages: refractor: 3.6.0 dev: false - /react-test-renderer/17.0.2_react@18.2.0: + /react-test-renderer@17.0.2(react@18.2.0): resolution: {integrity: sha512-yaQ9cB89c17PUb0x6UfWRs7kQCorVdHlutU1boVPEsB8IDZH6n9tHxMacc3y0JoXOJUsZb/t/Mb8FUWMKaM7iQ==} peerDependencies: react: 17.0.2 || 18 @@ -16599,11 +9554,11 @@ packages: object-assign: 4.1.1 react: 18.2.0 react-is: 17.0.2 - react-shallow-renderer: 16.15.0_react@18.2.0 + react-shallow-renderer: 16.15.0(react@18.2.0) scheduler: 0.20.2 dev: true - /react-transition-group/4.4.5_biqbaboplfbrettd7655fr4n2y: + /react-transition-group@4.4.5(react-dom@18.2.0)(react@18.2.0): resolution: {integrity: sha512-pZcd1MCJoiKiBR2NRxeCRg13uCXbydPnmB4EOeRrY7480qNWO8IIgQG6zlDkm6uRMsURXPuKq0GWtiM59a5Q6g==} peerDependencies: react: '>=16.6.0 || 18' @@ -16614,24 +9569,15 @@ packages: loose-envify: 1.4.0 prop-types: 15.8.1 react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 + react-dom: 18.2.0(react@18.2.0) - /react/18.2.0: + /react@18.2.0: resolution: {integrity: sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ==} engines: {node: '>=0.10.0'} dependencies: loose-envify: 1.4.0 - /read-pkg-up/1.0.1: - resolution: {integrity: sha512-WD9MTlNtI55IwYUS27iHh9tK3YoIVhxis8yKhLpTqWtml739uXc9NWTpxoHkfZf3+DkCCsXox94/VWZniuZm6A==} - engines: {node: '>=0.10.0'} - dependencies: - find-up: 1.1.2 - read-pkg: 1.1.0 - dev: true - optional: true - - /read-pkg-up/4.0.0: + /read-pkg-up@4.0.0: resolution: {integrity: sha512-6etQSH7nJGsK0RbG/2TeDzZFa8shjQ1um+SwQQ5cwKy0dhSXdOncEhb1CPpvQG4h7FyOV6EB6YlV0yJvZQNAkA==} engines: {node: '>=6'} dependencies: @@ -16639,7 +9585,7 @@ packages: read-pkg: 3.0.0 dev: true - /read-pkg-up/7.0.1: + /read-pkg-up@7.0.1: resolution: {integrity: sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==} engines: {node: '>=8'} dependencies: @@ -16648,17 +9594,7 @@ packages: type-fest: 0.8.1 dev: true - /read-pkg/1.1.0: - resolution: {integrity: sha512-7BGwRHqt4s/uVbuyoeejRn4YmFnYZiFl4AuaeXHlgZf3sONF0SOGlxs2Pw8g6hCKupo08RafIO5YXFNOKTfwsQ==} - engines: {node: '>=0.10.0'} - dependencies: - load-json-file: 1.1.0 - normalize-package-data: 2.5.0 - path-type: 1.1.0 - dev: true - optional: true - - /read-pkg/3.0.0: + /read-pkg@3.0.0: resolution: {integrity: sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k=} engines: {node: '>=4'} dependencies: @@ -16667,7 +9603,7 @@ packages: path-type: 3.0.0 dev: true - /read-pkg/5.2.0: + /read-pkg@5.2.0: resolution: {integrity: sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==} engines: {node: '>=8'} dependencies: @@ -16677,7 +9613,7 @@ packages: type-fest: 0.6.0 dev: true - /readable-stream/2.3.7: + /readable-stream@2.3.7: resolution: {integrity: sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==} dependencies: core-util-is: 1.0.2 @@ -16689,7 +9625,7 @@ packages: util-deprecate: 1.0.2 dev: true - /readable-stream/3.6.0: + /readable-stream@3.6.0: resolution: {integrity: sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==} engines: {node: '>= 6'} dependencies: @@ -16698,42 +9634,21 @@ packages: util-deprecate: 1.0.2 dev: true - /readdirp/2.2.1: - resolution: {integrity: sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ==} - engines: {node: '>=0.10'} - dependencies: - graceful-fs: 4.2.10 - micromatch: 3.1.10 - readable-stream: 2.3.7 - transitivePeerDependencies: - - supports-color - dev: true - optional: true - - /readdirp/3.5.0: + /readdirp@3.5.0: resolution: {integrity: sha512-cMhu7c/8rdhkHXWsY+osBhfSy0JikwpHK/5+imo+LpeasTF8ouErHrlYkwT0++njiyuDvc7OFY5T3ukvZ8qmFQ==} engines: {node: '>=8.10.0'} dependencies: picomatch: 2.3.1 dev: true - /realpath-native/1.1.0: + /realpath-native@1.1.0: resolution: {integrity: sha512-wlgPA6cCIIg9gKz0fgAPjnzh4yR/LnXovwuo9hvyGvx3h8nX4+/iLZplfUWasXpqD8BdnGnP5njOFjkUwPzvjA==} engines: {node: '>=4'} dependencies: util.promisify: 1.0.0 dev: true - /redent/1.0.0: - resolution: {integrity: sha512-qtW5hKzGQZqKoh6JNSD+4lfitfPKGz42e6QwiRmPM5mmKtR0N41AbJRYu0xJi7nhOJ4WDgRkKvAk6tw4WIwR4g==} - engines: {node: '>=0.10.0'} - dependencies: - indent-string: 2.1.0 - strip-indent: 1.0.1 - dev: true - optional: true - - /redent/3.0.0: + /redent@3.0.0: resolution: {integrity: sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==} engines: {node: '>=8'} dependencies: @@ -16741,7 +9656,7 @@ packages: strip-indent: 3.0.0 dev: true - /refractor/3.6.0: + /refractor@3.6.0: resolution: {integrity: sha512-MY9W41IOWxxk31o+YvFCNyNzdkc9M20NoZK5vq6jkv4I/uh2zkWcfudj0Q1fovjUQJrNewS9NMzeTtqPf+n5EA==} dependencies: hastscript: 6.0.0 @@ -16749,40 +9664,27 @@ packages: prismjs: 1.27.0 dev: false - /regenerate-unicode-properties/10.1.0: - resolution: {integrity: sha512-d1VudCLoIGitcU/hEg2QqvyGZQmdC0Lf8BqdOMXGFSvJP4bNV1+XqbPQeHHLD51Jh4QJJ225dlIFvY4Ly6MXmQ==} - engines: {node: '>=4'} - dependencies: - regenerate: 1.4.2 - dev: true - - /regenerate-unicode-properties/8.2.0: + /regenerate-unicode-properties@8.2.0: resolution: {integrity: sha512-F9DjY1vKLo/tPePDycuH3dn9H1OTPIkVD9Kz4LODu+F2C75mgjAJ7x/gwy6ZcSNRAAkhNlJSOHRe8k3p+K9WhA==} engines: {node: '>=4'} dependencies: regenerate: 1.4.2 dev: true - /regenerate/1.4.2: + /regenerate@1.4.2: resolution: {integrity: sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==} dev: true - /regenerator-runtime/0.13.9: + /regenerator-runtime@0.13.9: resolution: {integrity: sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==} - /regenerator-transform/0.14.5: + /regenerator-transform@0.14.5: resolution: {integrity: sha512-eOf6vka5IO151Jfsw2NO9WpGX58W6wWmefK3I1zEGr0lOD0u8rwPaNqQL1aRxUaxLeKO3ArNh3VYg1KbaD+FFw==} dependencies: '@babel/runtime': 7.18.6 dev: true - /regenerator-transform/0.15.0: - resolution: {integrity: sha512-LsrGtPmbYg19bcPHwdtmXwbW+TqNvtY4riE3P83foeHRroMbH6/2ddFBfab3t7kbzc7v7p4wbkIecHImqt0QNg==} - dependencies: - '@babel/runtime': 7.18.6 - dev: true - - /regex-not/1.0.2: + /regex-not@1.0.2: resolution: {integrity: sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A==} engines: {node: '>=0.10.0'} dependencies: @@ -16790,7 +9692,7 @@ packages: safe-regex: 1.1.0 dev: true - /regexp.prototype.flags/1.4.1: + /regexp.prototype.flags@1.4.1: resolution: {integrity: sha512-pMR7hBVUUGI7PMA37m2ofIdQCsomVnas+Jn5UPGAHQ+/LlwKm/aTLJHdasmHRzlfeZwHiAOaRSo2rbBDm3nNUQ==} engines: {node: '>= 0.4'} dependencies: @@ -16798,26 +9700,17 @@ packages: define-properties: 1.1.3 dev: true - /regexp.prototype.flags/1.4.3: - resolution: {integrity: sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==} - engines: {node: '>= 0.4'} - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - functions-have-names: 1.2.3 - dev: true - - /regexpp/2.0.1: + /regexpp@2.0.1: resolution: {integrity: sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw==} engines: {node: '>=6.5.0'} dev: true - /regexpp/3.2.0: + /regexpp@3.2.0: resolution: {integrity: sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==} engines: {node: '>=8'} dev: true - /regexpu-core/4.7.1: + /regexpu-core@4.7.1: resolution: {integrity: sha512-ywH2VUraA44DZQuRKzARmw6S66mr48pQVva4LBeRhcOltJ6hExvWly5ZjFLYo67xbIxb6W1q4bAGtgfEl20zfQ==} engines: {node: '>=4'} dependencies: @@ -16829,147 +9722,37 @@ packages: unicode-match-property-value-ecmascript: 1.2.0 dev: true - /regexpu-core/5.2.1: - resolution: {integrity: sha512-HrnlNtpvqP1Xkb28tMhBUO2EbyUHdQlsnlAhzWcwHy8WJR53UWr7/MAvqrsQKMbV4qdpv03oTMG8iIhfsPFktQ==} - engines: {node: '>=4'} - dependencies: - regenerate: 1.4.2 - regenerate-unicode-properties: 10.1.0 - regjsgen: 0.7.1 - regjsparser: 0.9.1 - unicode-match-property-ecmascript: 2.0.0 - unicode-match-property-value-ecmascript: 2.0.0 - dev: true - - /regjsgen/0.5.2: + /regjsgen@0.5.2: resolution: {integrity: sha512-OFFT3MfrH90xIW8OOSyUrk6QHD5E9JOTeGodiJeBS3J6IwlgzJMNE/1bZklWz5oTg+9dCMyEetclvCVXOPoN3A==} dev: true - /regjsgen/0.7.1: - resolution: {integrity: sha512-RAt+8H2ZEzHeYWxZ3H2z6tF18zyyOnlcdaafLrm21Bguj7uZy6ULibiAFdXEtKQY4Sy7wDTwDiOazasMLc4KPA==} - dev: true - - /regjsparser/0.6.4: + /regjsparser@0.6.4: resolution: {integrity: sha512-64O87/dPDgfk8/RQqC4gkZoGyyWFIEUTTh80CU6CWuK5vkCGyekIx+oKcEIYtP/RAxSQltCZHCNu/mdd7fqlJw==} hasBin: true dependencies: jsesc: 0.5.0 dev: true - /regjsparser/0.9.1: - resolution: {integrity: sha512-dQUtn90WanSNl+7mQKcXAgZxvUe7Z0SqXlgzv0za4LwiUhyzBC58yQO3liFoUgu8GiJVInAhJjkj1N0EtQ5nkQ==} - hasBin: true - dependencies: - jsesc: 0.5.0 - dev: true - - /relateurl/0.2.7: - resolution: {integrity: sha512-G08Dxvm4iDN3MLM0EsP62EDV9IuhXPR6blNz6Utcp7zyV3tr4HVNINt6MpaRWbxoOHT3Q7YN2P+jaHX8vUbgog==} - engines: {node: '>= 0.10'} - dev: true - - /remark-external-links/8.0.0: - resolution: {integrity: sha512-5vPSX0kHoSsqtdftSHhIYofVINC8qmp0nctkeU9YoJwV3YfiBRiI6cbFRJ0oI/1F9xS+bopXG0m2KS8VFscuKA==} - dependencies: - extend: 3.0.2 - is-absolute-url: 3.0.3 - mdast-util-definitions: 4.0.0 - space-separated-tokens: 1.1.5 - unist-util-visit: 2.0.3 - dev: true - - /remark-footnotes/2.0.0: - resolution: {integrity: sha512-3Clt8ZMH75Ayjp9q4CorNeyjwIxHFcTkaektplKGl2A1jNGEUey8cKL0ZC5vJwfcD5GFGsNLImLG/NGzWIzoMQ==} - dev: true - - /remark-mdx/1.6.22: - resolution: {integrity: sha512-phMHBJgeV76uyFkH4rvzCftLfKCr2RZuF+/gmVcaKrpsihyzmhXjA0BEMDaPTXG5y8qZOKPVo83NAOX01LPnOQ==} - dependencies: - '@babel/core': 7.12.9 - '@babel/helper-plugin-utils': 7.10.4 - '@babel/plugin-proposal-object-rest-spread': 7.12.1_@babel+core@7.12.9 - '@babel/plugin-syntax-jsx': 7.12.1_@babel+core@7.12.9 - '@mdx-js/util': 1.6.22 - is-alphabetical: 1.0.4 - remark-parse: 8.0.3 - unified: 9.2.0 - transitivePeerDependencies: - - supports-color - dev: true - - /remark-parse/8.0.3: - resolution: {integrity: sha512-E1K9+QLGgggHxCQtLt++uXltxEprmWzNfg+MxpfHsZlrddKzZ/hZyWHDbK3/Ap8HJQqYJRXP+jHczdL6q6i85Q==} - dependencies: - ccount: 1.1.0 - collapse-white-space: 1.0.6 - is-alphabetical: 1.0.4 - is-decimal: 1.0.4 - is-whitespace-character: 1.0.4 - is-word-character: 1.0.4 - markdown-escapes: 1.0.4 - parse-entities: 2.0.0 - repeat-string: 1.6.1 - state-toggle: 1.0.3 - trim: 0.0.1 - trim-trailing-lines: 1.1.4 - unherit: 1.1.3 - unist-util-remove-position: 2.0.1 - vfile-location: 3.2.0 - xtend: 4.0.2 - dev: true - - /remark-slug/6.1.0: - resolution: {integrity: sha512-oGCxDF9deA8phWvxFuyr3oSJsdyUAxMFbA0mZ7Y1Sas+emILtO+e5WutF9564gDsEN4IXaQXm5pFo6MLH+YmwQ==} - dependencies: - github-slugger: 1.4.0 - mdast-util-to-string: 1.1.0 - unist-util-visit: 2.0.3 - dev: true - - /remark-squeeze-paragraphs/4.0.0: - resolution: {integrity: sha512-8qRqmL9F4nuLPIgl92XUuxI3pFxize+F1H0e/W3llTk0UsjJaj01+RrirkMw7P21RKe4X6goQhYRSvNWX+70Rw==} - dependencies: - mdast-squeeze-paragraphs: 4.0.0 - dev: true - - /remove-trailing-separator/1.1.0: + /remove-trailing-separator@1.1.0: resolution: {integrity: sha512-/hS+Y0u3aOfIETiaiirUFwDBDzmXPvO+jAfKTitUngIPzdKc6Z0LoFjM/CK5PL4C+eKwHohlHAb6H0VFfmmUsw==} dev: true - /renderkid/2.0.7: - resolution: {integrity: sha512-oCcFyxaMrKsKcTY59qnCAtmDVSLfPbrv6A3tVbPdFMMrv5jaK10V6m40cKsoPNhAqN6rmHW9sswW4o3ruSrwUQ==} - dependencies: - css-select: 4.3.0 - dom-converter: 0.2.0 - htmlparser2: 6.1.0 - lodash: 4.17.21 - strip-ansi: 3.0.1 - dev: true - - /repeat-element/1.1.3: + /repeat-element@1.1.3: resolution: {integrity: sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g==} engines: {node: '>=0.10.0'} dev: true - /repeat-string/1.6.1: + /repeat-string@1.6.1: resolution: {integrity: sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w==} engines: {node: '>=0.10'} dev: true - /repeating/2.0.1: - resolution: {integrity: sha512-ZqtSMuVybkISo2OWvqvm7iHSWngvdaW3IpsT9/uP8v4gMi591LY6h35wdOfvQdWCKFWZWm2Y1Opp4kV7vQKT6A==} - engines: {node: '>=0.10.0'} - dependencies: - is-finite: 1.1.0 - dev: true - optional: true - - /replace-ext/1.0.1: + /replace-ext@1.0.1: resolution: {integrity: sha512-yD5BHCe7quCgBph4rMQ+0KkIRKwWCrHDOX1p1Gp6HwjPM5kVoCdKGNhN7ydqqsX6lJEnQDKZ/tFMiEdQ1dvPEw==} engines: {node: '>= 0.10'} dev: true - /request-promise-core/1.1.4_request@2.88.2: + /request-promise-core@1.1.4(request@2.88.2): resolution: {integrity: sha512-TTbAfBBRdWD7aNNOoVOBH4pN/KigV6LyapYNNlAPA8JwbovRti1E88m3sYAwsLi5ryhPKsE9APwnjFTgdUjTpw==} engines: {node: '>=0.10.0'} peerDependencies: @@ -16979,7 +9762,7 @@ packages: request: 2.88.2 dev: true - /request-promise-native/1.0.9_request@2.88.2: + /request-promise-native@1.0.9(request@2.88.2): resolution: {integrity: sha512-wcW+sIUiWnKgNY0dqCpOZkUbF/I+YPi+f09JZIDa39Ec+q82CpSYniDp+ISgTTbKmnpJWASeJBPZmoxH84wt3g==} engines: {node: '>=0.12.0'} deprecated: request-promise-native has been deprecated because it extends the now deprecated request package, see https://github.com/request/request/issues/3142 @@ -16987,12 +9770,12 @@ packages: request: ^2.34 dependencies: request: 2.88.2 - request-promise-core: 1.1.4_request@2.88.2 + request-promise-core: 1.1.4(request@2.88.2) stealthy-require: 1.1.1 tough-cookie: 2.5.0 dev: true - /request/2.88.2: + /request@2.88.2: resolution: {integrity: sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==} engines: {node: '>= 6'} deprecated: request has been deprecated, see https://github.com/request/request/issues/3142 @@ -17019,72 +9802,72 @@ packages: uuid: 3.4.0 dev: true - /require-directory/2.1.1: + /require-directory@2.1.1: resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} engines: {node: '>=0.10.0'} dev: true - /require-from-string/2.0.2: + /require-from-string@2.0.2: resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==} engines: {node: '>=0.10.0'} dev: true - /require-main-filename/2.0.0: + /require-main-filename@2.0.0: resolution: {integrity: sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==} dev: true - /resolve-cwd/2.0.0: + /resolve-cwd@2.0.0: resolution: {integrity: sha512-ccu8zQTrzVr954472aUVPLEcB3YpKSYR3cg/3lo1okzobPBM+1INXBbBZlDbnI/hbEocnf8j0QVo43hQKrbchg==} engines: {node: '>=4'} dependencies: resolve-from: 3.0.0 dev: true - /resolve-from/3.0.0: + /resolve-from@3.0.0: resolution: {integrity: sha512-GnlH6vxLymXJNMBo7XP1fJIzBFbdYt49CuTwmB/6N53t+kMPRMFKz783LlQ4tv28XoQfMWinAJX6WCGf2IlaIw==} engines: {node: '>=4'} dev: true - /resolve-from/4.0.0: + /resolve-from@4.0.0: resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==} engines: {node: '>=4'} dev: true - /resolve-from/5.0.0: + /resolve-from@5.0.0: resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} engines: {node: '>=8'} dev: true - /resolve-url/0.2.1: + /resolve-url@0.2.1: resolution: {integrity: sha512-ZuF55hVUQaaczgOIwqWzkEcEidmlD/xl44x1UZnhOXcYuFN2S6+rcxpG+C1N3So0wvNI3DmJICUFfu2SxhBmvg==} deprecated: https://github.com/lydell/resolve-url#deprecated dev: true - /resolve/1.1.7: + /resolve@1.1.7: resolution: {integrity: sha512-9znBF0vBcaSN3W2j7wKvdERPwqTxSpCq+if5C0WoTCyV9n24rua28jeuQ2pL/HOf+yUe/Mef+H/5p60K0Id3bg==} dev: true - /resolve/1.12.0: + /resolve@1.12.0: resolution: {integrity: sha512-B/dOmuoAik5bKcD6s6nXDCjzUKnaDvdkRyAk6rsmsKLipWj4797iothd7jmmUhWTfinVMU+wc56rYKsit2Qy4w==} dependencies: path-parse: 1.0.6 dev: true - /resolve/1.19.0: + /resolve@1.19.0: resolution: {integrity: sha512-rArEXAgsBG4UgRGcynxWIWKFvh/XZCcS8UJdHhwy91zwAvCZIbcs+vAbflgBnNjYMs/i/i+/Ux6IZhML1yPvxg==} dependencies: is-core-module: 2.8.1 path-parse: 1.0.6 dev: true - /resolve/2.0.0-next.3: + /resolve@2.0.0-next.3: resolution: {integrity: sha512-W8LucSynKUIDu9ylraa7ueVZ7hc0uAgJBxVsQSKOXOyle8a93qXhcz+XAXZ8bIq2d6i4Ehddn6Evt+0/UwKk6Q==} dependencies: is-core-module: 2.8.1 path-parse: 1.0.6 dev: true - /restore-cursor/3.1.0: + /restore-cursor@3.1.0: resolution: {integrity: sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==} engines: {node: '>=8'} dependencies: @@ -17092,53 +9875,46 @@ packages: signal-exit: 3.0.7 dev: true - /ret/0.1.15: + /ret@0.1.15: resolution: {integrity: sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==} engines: {node: '>=0.12'} dev: true - /reusify/1.0.4: + /reusify@1.0.4: resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} engines: {iojs: '>=1.0.0', node: '>=0.10.0'} dev: true - /rgb-regex/1.0.1: + /rgb-regex@1.0.1: resolution: {integrity: sha1-wODWiC3w4jviVKR16O3UGRX+rrE=} dev: true - /rgba-regex/1.0.0: + /rgba-regex@1.0.0: resolution: {integrity: sha1-QzdOLiyglosO8VI0YLfXMP8i7rM=} dev: true - /rimraf/2.6.3: + /rimraf@2.6.3: resolution: {integrity: sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==} hasBin: true dependencies: glob: 7.2.3 dev: true - /rimraf/2.7.1: + /rimraf@2.7.1: resolution: {integrity: sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==} hasBin: true dependencies: glob: 7.2.3 dev: true - /rimraf/3.0.2: + /rimraf@3.0.2: resolution: {integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==} hasBin: true dependencies: glob: 7.2.3 dev: true - /ripemd160/2.0.2: - resolution: {integrity: sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA==} - dependencies: - hash-base: 3.1.0 - inherits: 2.0.4 - dev: true - - /rollup-plugin-babel/4.4.0_j5c2ibk5rlsxlnmc33p2gjc3da: + /rollup-plugin-babel@4.4.0(@babel/core@7.12.9)(rollup@1.32.1): resolution: {integrity: sha512-Lek/TYp1+7g7I+uMfJnnSJ7YWoD58ajo6Oarhlex7lvUce+RCKRuGRSgztDO3/MF/PuGKmUL5iTHKf208UNszw==} deprecated: This package has been deprecated and is no longer maintained. Please use @rollup/plugin-babel. peerDependencies: @@ -17151,20 +9927,20 @@ packages: rollup-pluginutils: 2.8.2 dev: true - /rollup-plugin-bundle-size/1.0.3: + /rollup-plugin-bundle-size@1.0.3: resolution: {integrity: sha512-aWj0Pvzq90fqbI5vN1IvUrlf4utOqy+AERYxwWjegH1G8PzheMnrRIgQ5tkwKVtQMDP0bHZEACW/zLDF+XgfXQ==} dependencies: chalk: 1.1.3 maxmin: 2.1.0 dev: true - /rollup-plugin-es3/1.1.0: + /rollup-plugin-es3@1.1.0: resolution: {integrity: sha512-jTMqQgMZ/tkjRW4scf4ln5c0OiTSi+Lx/IEyFd41ldgGoLvvg9AQxmVOl93+KaoyB7XRYToYjiHDvO40NPF/fA==} dependencies: magic-string: 0.22.5 dev: true - /rollup-plugin-postcss/2.9.0: + /rollup-plugin-postcss@2.9.0: resolution: {integrity: sha512-Y7qDwlqjZMBexbB1kRJf+jKIQL8HR6C+ay53YzN+nNJ64hn1PNZfBE3c61hFUhD//zrMwmm7uBW30RuTi+CD0w==} engines: {node: '>=10'} dependencies: @@ -17174,7 +9950,7 @@ packages: import-cwd: 3.0.0 p-queue: 6.6.2 pify: 5.0.0 - postcss: 7.0.35 + postcss: 7.0.39 postcss-load-config: 2.1.2 postcss-modules: 2.0.0 promise.series: 0.2.0 @@ -17184,7 +9960,7 @@ packages: style-inject: 0.3.0 dev: true - /rollup-plugin-smart-asset/2.1.0_rollup@1.32.1: + /rollup-plugin-smart-asset@2.1.0(rollup@1.32.1): resolution: {integrity: sha512-bv6gKl4GTwXRiMRrmlGU3nKMbC/qo0EQiRf+JQ644pm7BPx0cszWv2+SaHIqYze6aUhW/Q+Bo3gzNXVOSBAQGQ==} peerDependencies: rollup: '>0.60' @@ -17197,7 +9973,7 @@ packages: rollup-pluginutils: 2.8.2 dev: true - /rollup-plugin-terser/5.3.1_rollup@1.32.1: + /rollup-plugin-terser@5.3.1(rollup@1.32.1): resolution: {integrity: sha512-1pkwkervMJQGFYvM9nscrUoncPwiKR/K+bHdjv6PFgRo3cgPHoRT83y2Aa3GvINj4539S15t/tpFPb775TDs6w==} peerDependencies: rollup: '>=0.66.0 <3' @@ -17210,7 +9986,7 @@ packages: terser: 4.8.0 dev: true - /rollup-plugin-typescript2/0.25.3_u7o3wpaaa4xpcuuxlpndi4hgqe: + /rollup-plugin-typescript2@0.25.3(rollup@1.32.1)(typescript@3.9.10): resolution: {integrity: sha512-ADkSaidKBovJmf5VBnZBZe+WzaZwofuvYdzGAKTN/J4hN7QJCFYAq7IrH9caxlru6T5qhX41PNFS1S4HqhsGQg==} peerDependencies: rollup: '>=1.26.3' @@ -17225,19 +10001,19 @@ packages: typescript: 3.9.10 dev: true - /rollup-pluginutils/2.8.1: + /rollup-pluginutils@2.8.1: resolution: {integrity: sha512-J5oAoysWar6GuZo0s+3bZ6sVZAC0pfqKz68De7ZgDi5z63jOVZn1uJL/+z1jeKHNbGII8kAyHF5q8LnxSX5lQg==} dependencies: estree-walker: 0.6.1 dev: true - /rollup-pluginutils/2.8.2: + /rollup-pluginutils@2.8.2: resolution: {integrity: sha512-EEp9NhnUkwY8aif6bxgovPHMoMoNr2FulJziTndpt5H9RdwC47GSGuII9XxpSdzVGM0GWrNPHV6ie1LTNJPaLQ==} dependencies: estree-walker: 0.6.1 dev: true - /rollup/1.32.1: + /rollup@1.32.1: resolution: {integrity: sha512-/2HA0Ec70TvQnXdzynFffkjA6XN+1e2pEv/uKS5Ulca40g2L7KuOE3riasHoNVHOsFD5KKZgDsMk1CP3Tw9s+A==} hasBin: true dependencies: @@ -17246,81 +10022,77 @@ packages: acorn: 7.4.1 dev: true - /rst-selector-parser/2.2.3: + /rst-selector-parser@2.2.3: resolution: {integrity: sha1-gbIw6i/MYGbInjRy3nlChdmwPZE=} dependencies: lodash.flattendeep: 4.4.0 nearley: 2.19.8 dev: true - /rsvp/4.8.5: + /rsvp@4.8.5: resolution: {integrity: sha512-nfMOlASu9OnRJo1mbEk2cz0D56a1MBNrJ7orjRZQG10XDyuvwksKbuXNp6qa+kbn839HwjwhBzhFmdsaEAfauA==} engines: {node: 6.* || >= 7.*} dev: true - /run-async/2.4.1: + /run-async@2.4.1: resolution: {integrity: sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==} engines: {node: '>=0.12.0'} dev: true - /run-parallel/1.2.0: + /run-parallel@1.2.0: resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} dependencies: queue-microtask: 1.2.3 dev: true - /run-queue/1.0.3: - resolution: {integrity: sha512-ntymy489o0/QQplUDnpYAYUsO50K9SBrIVaKCWDOJzYJts0f9WH9RFJkyagebkw5+y1oi00R7ynNW/d12GBumg==} - dependencies: - aproba: 1.2.0 - dev: true - - /rxjs/6.6.3: + /rxjs@6.6.3: resolution: {integrity: sha512-trsQc+xYYXZ3urjOiJOuCOa5N3jAZ3eiSpQB5hIT8zGlL2QfnHLJ2r7GMkBGuIausdJN1OneaI6gQlsqNHHmZQ==} engines: {npm: '>=2.0.0'} dependencies: tslib: 1.14.1 dev: true - /sade/1.7.4: + /rxjs@7.8.0: + resolution: {integrity: sha512-F2+gxDshqmIub1KdvZkaEfGDwLNpPvk9Fs6LD/MyQxNgMds/WH9OdDDXOmxUZpME+iSK3rQCctkL0DYyytUqMg==} + dependencies: + tslib: 2.4.0 + dev: false + + /sade@1.7.4: resolution: {integrity: sha512-y5yauMD93rX840MwUJr7C1ysLFBgMspsdTo4UVrDg3fXDvtwOyIqykhVAAm6fk/3au77773itJStObgK+LKaiA==} engines: {node: '>= 6'} dependencies: mri: 1.1.6 dev: true - /safe-buffer/5.1.1: - resolution: {integrity: sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg==} - dev: true - - /safe-buffer/5.1.2: + /safe-buffer@5.1.2: resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==} dev: true - /safe-buffer/5.2.1: + /safe-buffer@5.2.1: resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} dev: true - /safe-identifier/0.4.2: + /safe-identifier@0.4.2: resolution: {integrity: sha512-6pNbSMW6OhAi9j+N8V+U715yBQsaWJ7eyEUaOrawX+isg5ZxhUlV1NipNtgaKHmFGiABwt+ZF04Ii+3Xjkg+8w==} dev: true - /safe-regex/1.1.0: + /safe-regex@1.1.0: resolution: {integrity: sha512-aJXcif4xnaNUzvUuC5gcb46oTS7zvg4jpMTnuqtrEPlR3vFr4pxtdTwaF1Qs3Enjn9HK+ZlwQui+a7z0SywIzg==} dependencies: ret: 0.1.15 dev: true - /safe-stable-stringify/2.4.1: + /safe-stable-stringify@2.4.1: resolution: {integrity: sha512-dVHE6bMtS/bnL2mwualjc6IxEv1F+OCUpA46pKUj6F8uDbUM0jCCulPqRNPSnWwGNKx5etqMjZYdXtrm5KJZGA==} engines: {node: '>=10'} dev: true - /safer-buffer/2.1.2: + /safer-buffer@2.1.2: resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} dev: true - /sane/4.1.0: + /sane@4.1.0: resolution: {integrity: sha512-hhbzAgTIX8O7SHfp2c8/kREfEn4qO/9q8C9beyY6+tvZ87EpoZ3i1RIEvp27YBswnNbY9mWd6paKVmKbAgLfZA==} engines: {node: 6.* || 8.* || >= 10.*} deprecated: some dependency vulnerabilities fixed, support for node < 10 dropped, and newer ECMAScript syntax/features added @@ -17339,7 +10111,7 @@ packages: - supports-color dev: true - /sass-loader/10.3.1_sass@1.49.9: + /sass-loader@10.3.1(sass@1.49.9): resolution: {integrity: sha512-y2aBdtYkbqorVavkC3fcJIUDGIegzDWPn3/LAFhsf3G+MzPKTJx37sROf5pXtUeggSVbNbmfj8TgRaSLMelXRA==} engines: {node: '>= 10.13.0'} peerDependencies: @@ -17362,10 +10134,10 @@ packages: neo-async: 2.6.2 sass: 1.49.9 schema-utils: 3.1.1 - semver: 7.3.5 + semver: 7.3.8 dev: true - /sass/1.49.9: + /sass@1.49.9: resolution: {integrity: sha512-YlYWkkHP9fbwaFRZQRXgDi3mXZShslVmmo+FVK3kHLUELHHEYrCmL1x6IUjC7wLS6VuJSAFXRQS/DxdsC4xL1A==} engines: {node: '>=12.0.0'} hasBin: true @@ -17375,81 +10147,63 @@ packages: source-map-js: 1.0.2 dev: true - /sax/1.2.4: + /sax@1.2.4: resolution: {integrity: sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==} dev: true - /saxes/3.1.11: + /saxes@3.1.11: resolution: {integrity: sha512-Ydydq3zC+WYDJK1+gRxRapLIED9PWeSuuS41wqyoRmzvhhh9nc+QQrVMKJYzJFULazeGhzSV0QleN2wD3boh2g==} engines: {node: '>=8'} dependencies: xmlchars: 2.2.0 dev: true - /scheduler/0.20.2: + /scheduler@0.20.2: resolution: {integrity: sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ==} dependencies: loose-envify: 1.4.0 object-assign: 4.1.1 dev: true - /scheduler/0.23.0: + /scheduler@0.23.0: resolution: {integrity: sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw==} dependencies: loose-envify: 1.4.0 - /schema-utils/1.0.0: - resolution: {integrity: sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g==} - engines: {node: '>= 4'} - dependencies: - ajv: 6.12.6 - ajv-errors: 1.0.1_ajv@6.12.6 - ajv-keywords: 3.5.2_ajv@6.12.6 - dev: true - - /schema-utils/2.7.0: - resolution: {integrity: sha512-0ilKFI6QQF5nxDZLFn2dMjvc4hjg/Wkg7rHd3jK6/A4a1Hl9VFdQWvgB1UMGoU94pad1P/8N7fMcEnLnSiju8A==} - engines: {node: '>= 8.9.0'} - dependencies: - '@types/json-schema': 7.0.9 - ajv: 6.12.6 - ajv-keywords: 3.5.2_ajv@6.12.6 - dev: true - - /schema-utils/2.7.1: + /schema-utils@2.7.1: resolution: {integrity: sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg==} engines: {node: '>= 8.9.0'} dependencies: '@types/json-schema': 7.0.9 ajv: 6.12.6 - ajv-keywords: 3.5.2_ajv@6.12.6 + ajv-keywords: 3.5.2(ajv@6.12.6) dev: true - /schema-utils/3.1.1: + /schema-utils@3.1.1: resolution: {integrity: sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==} engines: {node: '>= 10.13.0'} dependencies: '@types/json-schema': 7.0.9 ajv: 6.12.6 - ajv-keywords: 3.5.2_ajv@6.12.6 + ajv-keywords: 3.5.2(ajv@6.12.6) dev: true - /semver/5.7.1: + /semver@5.7.1: resolution: {integrity: sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==} hasBin: true dev: true - /semver/6.3.0: + /semver@6.3.0: resolution: {integrity: sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==} hasBin: true dev: true - /semver/7.0.0: + /semver@7.0.0: resolution: {integrity: sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==} hasBin: true dev: true - /semver/7.3.5: + /semver@7.3.5: resolution: {integrity: sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==} engines: {node: '>=10'} hasBin: true @@ -17457,73 +10211,25 @@ packages: lru-cache: 6.0.0 dev: true - /send/0.18.0: - resolution: {integrity: sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==} - engines: {node: '>= 0.8.0'} + /semver@7.3.8: + resolution: {integrity: sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==} + engines: {node: '>=10'} + hasBin: true dependencies: - debug: 2.6.9 - depd: 2.0.0 - destroy: 1.2.0 - encodeurl: 1.0.2 - escape-html: 1.0.3 - etag: 1.8.1 - fresh: 0.5.2 - http-errors: 2.0.0 - mime: 1.6.0 - ms: 2.1.3 - on-finished: 2.4.1 - range-parser: 1.2.1 - statuses: 2.0.1 - transitivePeerDependencies: - - supports-color + lru-cache: 6.0.0 dev: true - /serialize-javascript/4.0.0: + /serialize-javascript@4.0.0: resolution: {integrity: sha512-GaNA54380uFefWghODBWEGisLZFj00nS5ACs6yHa9nLqlLpVLO8ChDGeKRjZnV4Nh4n0Qi7nhYZD/9fCPzEqkw==} dependencies: randombytes: 2.1.0 dev: true - /serialize-javascript/5.0.1: - resolution: {integrity: sha512-SaaNal9imEO737H2c05Og0/8LUXG7EnsZyMa8MzkmuHoELfT6txuj0cMqRj6zfPKnmQ1yasR4PCJc8x+M4JSPA==} - dependencies: - randombytes: 2.1.0 - dev: true - - /serialize-javascript/6.0.0: - resolution: {integrity: sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==} - dependencies: - randombytes: 2.1.0 - dev: true - - /serve-favicon/2.5.0: - resolution: {integrity: sha512-FMW2RvqNr03x+C0WxTyu6sOv21oOjkq5j8tjquWccwa6ScNyGFOGJVpuS1NmTVGBAHS07xnSKotgf2ehQmf9iA==} - engines: {node: '>= 0.8.0'} - dependencies: - etag: 1.8.1 - fresh: 0.5.2 - ms: 2.1.1 - parseurl: 1.3.3 - safe-buffer: 5.1.1 - dev: true - - /serve-static/1.15.0: - resolution: {integrity: sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==} - engines: {node: '>= 0.8.0'} - dependencies: - encodeurl: 1.0.2 - escape-html: 1.0.3 - parseurl: 1.3.3 - send: 0.18.0 - transitivePeerDependencies: - - supports-color - dev: true - - /set-blocking/2.0.0: + /set-blocking@2.0.0: resolution: {integrity: sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==} dev: true - /set-value/2.0.1: + /set-value@2.0.1: resolution: {integrity: sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw==} engines: {node: '>=0.10.0'} dependencies: @@ -17533,62 +10239,43 @@ packages: split-string: 3.1.0 dev: true - /setimmediate/1.0.5: + /setimmediate@1.0.5: resolution: {integrity: sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU=} dev: true - /setprototypeof/1.2.0: - resolution: {integrity: sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==} - dev: true - - /sha.js/2.4.11: - resolution: {integrity: sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==} - hasBin: true - dependencies: - inherits: 2.0.4 - safe-buffer: 5.2.1 - dev: true - - /shallow-clone/3.0.1: - resolution: {integrity: sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==} - engines: {node: '>=8'} - dependencies: - kind-of: 6.0.3 - dev: true - - /shebang-command/1.2.0: + /shebang-command@1.2.0: resolution: {integrity: sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=} engines: {node: '>=0.10.0'} dependencies: shebang-regex: 1.0.0 dev: true - /shebang-command/2.0.0: + /shebang-command@2.0.0: resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} engines: {node: '>=8'} dependencies: shebang-regex: 3.0.0 dev: true - /shebang-regex/1.0.0: + /shebang-regex@1.0.0: resolution: {integrity: sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=} engines: {node: '>=0.10.0'} dev: true - /shebang-regex/3.0.0: + /shebang-regex@3.0.0: resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} engines: {node: '>=8'} dev: true - /shell-quote/1.7.2: + /shell-quote@1.7.2: resolution: {integrity: sha512-mRz/m/JVscCrkMyPqHc/bczi3OQHkLTqXHEFu0zDhK/qfv3UcOA4SVmRCLmos4bhjr9ekVQubj/R7waKapmiQg==} dev: true - /shellwords/0.1.1: + /shellwords@0.1.1: resolution: {integrity: sha512-vFwSUfQvqybiICwZY5+DAWIPLKsWO31Q91JSKl3UYv+K5c2QRPzn0qzec6QPu1Qc9eHYItiP3NdJqNVqetYAww==} dev: true - /side-channel/1.0.4: + /side-channel@1.0.4: resolution: {integrity: sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==} dependencies: call-bind: 1.0.2 @@ -17596,31 +10283,31 @@ packages: object-inspect: 1.12.0 dev: true - /signal-exit/3.0.7: + /signal-exit@3.0.7: resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} dev: true - /simple-swizzle/0.2.2: + /simple-swizzle@0.2.2: resolution: {integrity: sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==} dependencies: is-arrayish: 0.3.2 dev: true - /sisteransi/1.0.5: + /sisteransi@1.0.5: resolution: {integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==} dev: true - /slash/2.0.0: + /slash@2.0.0: resolution: {integrity: sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==} engines: {node: '>=6'} dev: true - /slash/3.0.0: + /slash@3.0.0: resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} engines: {node: '>=8'} dev: true - /slice-ansi/2.1.0: + /slice-ansi@2.1.0: resolution: {integrity: sha512-Qu+VC3EwYLldKa1fCxuuvULvSJOKEgk9pi8dZeCVK7TqBfUNTH4sFkk4joj8afVSfAYgJoSOetjx9QWOJ5mYoQ==} engines: {node: '>=6'} dependencies: @@ -17629,7 +10316,7 @@ packages: is-fullwidth-code-point: 2.0.0 dev: true - /slice-ansi/4.0.0: + /slice-ansi@4.0.0: resolution: {integrity: sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==} engines: {node: '>=10'} dependencies: @@ -17638,11 +10325,11 @@ packages: is-fullwidth-code-point: 3.0.0 dev: true - /smart-icon/1.4.3: + /smart-icon@1.4.3: resolution: {integrity: sha512-6wif0A3vpGHSEBPNwEtnyBP5c9LuUoWOL9rXgzLwiu9v4QQSJKvL3Ljepljhkl/lo2eWiRTN9XvST+PQqIjjIQ==} dev: false - /snapdragon-node/2.1.1: + /snapdragon-node@2.1.1: resolution: {integrity: sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw==} engines: {node: '>=0.10.0'} dependencies: @@ -17651,14 +10338,14 @@ packages: snapdragon-util: 3.0.1 dev: true - /snapdragon-util/3.0.1: + /snapdragon-util@3.0.1: resolution: {integrity: sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ==} engines: {node: '>=0.10.0'} dependencies: kind-of: 3.2.2 dev: true - /snapdragon/0.8.2: + /snapdragon@0.8.2: resolution: {integrity: sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg==} engines: {node: '>=0.10.0'} dependencies: @@ -17674,23 +10361,19 @@ packages: - supports-color dev: true - /sort-keys/1.1.2: + /sort-keys@1.1.2: resolution: {integrity: sha1-RBttTTRnmPG05J6JIK37oOVD+a0=} engines: {node: '>=0.10.0'} dependencies: is-plain-obj: 1.1.0 dev: true - /source-list-map/2.0.1: - resolution: {integrity: sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw==} - dev: true - - /source-map-js/1.0.2: + /source-map-js@1.0.2: resolution: {integrity: sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==} engines: {node: '>=0.10.0'} dev: true - /source-map-resolve/0.5.3: + /source-map-resolve@0.5.3: resolution: {integrity: sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw==} deprecated: See https://github.com/lydell/source-map-resolve#deprecated dependencies: @@ -17701,86 +10384,87 @@ packages: urix: 0.1.0 dev: true - /source-map-support/0.5.19: + /source-map-support@0.5.19: resolution: {integrity: sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw==} dependencies: buffer-from: 1.1.1 source-map: 0.6.1 dev: true - /source-map-support/0.5.21: + /source-map-support@0.5.21: resolution: {integrity: sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==} dependencies: buffer-from: 1.1.1 source-map: 0.6.1 dev: true - /source-map-url/0.4.0: + /source-map-url@0.4.0: resolution: {integrity: sha512-liJwHPI9x9d9w5WSIjM58MqGmmb7XzNqwdUA3kSBQ4lmDngexlKwawGzK3J1mKXi6+sysoMDlpVyZh9sv5vRfw==} deprecated: See https://github.com/lydell/source-map-url#deprecated dev: true - /source-map/0.5.7: + /source-map@0.5.7: resolution: {integrity: sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==} engines: {node: '>=0.10.0'} dev: true - /source-map/0.6.1: + /source-map@0.6.1: resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==} engines: {node: '>=0.10.0'} dev: true - /source-map/0.7.3: + /source-map@0.7.3: resolution: {integrity: sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==} engines: {node: '>= 8'} dev: true - /sourcemap-codec/1.4.8: + /sourcemap-codec@1.4.8: resolution: {integrity: sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==} dev: true - /space-separated-tokens/1.1.5: + /space-separated-tokens@1.1.5: resolution: {integrity: sha512-q/JSVd1Lptzhf5bkYm4ob4iWPjx0KiRe3sRFBNrVqbJkFaBm5vbbowy1mymoPNLRa52+oadOhJ+K49wsSeSjTA==} + dev: false - /spdx-correct/3.1.1: + /spdx-correct@3.1.1: resolution: {integrity: sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==} dependencies: spdx-expression-parse: 3.0.1 spdx-license-ids: 3.0.6 dev: true - /spdx-exceptions/2.3.0: + /spdx-exceptions@2.3.0: resolution: {integrity: sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==} dev: true - /spdx-expression-parse/3.0.1: + /spdx-expression-parse@3.0.1: resolution: {integrity: sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==} dependencies: spdx-exceptions: 2.3.0 spdx-license-ids: 3.0.6 dev: true - /spdx-license-ids/3.0.6: + /spdx-license-ids@3.0.6: resolution: {integrity: sha512-+orQK83kyMva3WyPf59k1+Y525csj5JejicWut55zeTWANuN17qSiSLUXWtzHeNWORSvT7GLDJ/E/XiIWoXBTw==} dev: true - /specificity/0.4.1: + /specificity@0.4.1: resolution: {integrity: sha512-1klA3Gi5PD1Wv9Q0wUoOQN1IWAuPu0D1U03ThXTr0cJ20+/iq2tHSDnK7Kk/0LXJ1ztUB2/1Os0wKmfyNgUQfg==} hasBin: true dev: true - /split-string/3.1.0: + /split-string@3.1.0: resolution: {integrity: sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw==} engines: {node: '>=0.10.0'} dependencies: extend-shallow: 3.0.2 dev: true - /sprintf-js/1.0.3: + /sprintf-js@1.0.3: resolution: {integrity: sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=} dev: true - /sshpk/1.16.1: + /sshpk@1.16.1: resolution: {integrity: sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==} engines: {node: '>=0.10.0'} hasBin: true @@ -17796,44 +10480,23 @@ packages: tweetnacl: 0.14.5 dev: true - /ssri/6.0.2: - resolution: {integrity: sha512-cepbSq/neFK7xB6A50KHN0xHDotYzq58wWCa5LeWqnPrHG8GzfEjO/4O8kpmcGW+oaxkvhEJCWgbgNk4/ZV93Q==} - dependencies: - figgy-pudding: 3.5.2 - dev: true - - /ssri/8.0.1: - resolution: {integrity: sha512-97qShzy1AiyxvPNIkLWoGua7xoQzzPjQ0HAH4B0rWKo7SZ6USuPcrUiAFrws0UH8RrbWmgq3LMTObhPIHbbBeQ==} - engines: {node: '>= 8'} - dependencies: - minipass: 3.3.4 - dev: true - - /stable/0.1.8: + /stable@0.1.8: resolution: {integrity: sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w==} deprecated: 'Modern JS already guarantees Array#sort() is a stable sort, so this library is deprecated. See the compatibility table on MDN: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/sort#browser_compatibility' dev: true - /stack-trace/0.0.10: + /stack-trace@0.0.10: resolution: {integrity: sha512-KGzahc7puUKkzyMt+IqAep+TVNbKP+k2Lmwhub39m1AsTSkaDutx56aDCo+HLDzf/D26BIHTJWNiTG1KAJiQCg==} dev: true - /stack-utils/1.0.3: + /stack-utils@1.0.3: resolution: {integrity: sha512-WldO+YmqhEpjp23eHZRhOT1NQF51STsbxZ+/AdpFD+EhheFxAe5d0WoK4DQVJkSHacPrJJX3OqRAl9CgHf78pg==} engines: {node: '>=8'} dependencies: escape-string-regexp: 2.0.0 dev: true - /stackframe/1.3.4: - resolution: {integrity: sha512-oeVtt7eWQS+Na6F//S4kJ2K2VbRlS9D43mAlMyVpVWovy9o+jfgH8O9agzANzaiLjclA0oYzUXEM4PurhSUChw==} - dev: true - - /state-toggle/1.0.3: - resolution: {integrity: sha512-d/5Z4/2iiCnHw6Xzghyhb+GcmF89bxwgXG60wjIiZaxnymbyOmI8Hk4VqHXiVVp6u2ysaskFfXg3ekCj4WNftQ==} - dev: true - - /static-extend/0.1.2: + /static-extend@0.1.2: resolution: {integrity: sha512-72E9+uLc27Mt718pMHt9VMNiAL4LMsmDbBva8mxWUCkT07fSzEGMYUCk0XWY6lp0j6RBAG4cJ3mWuZv2OE3s0g==} engines: {node: '>=0.10.0'} dependencies: @@ -17841,98 +10504,21 @@ packages: object-copy: 0.1.0 dev: true - /statuses/2.0.1: - resolution: {integrity: sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==} - engines: {node: '>= 0.8'} - dev: true - - /stealthy-require/1.1.1: + /stealthy-require@1.1.1: resolution: {integrity: sha512-ZnWpYnYugiOVEY5GkcuJK1io5V8QmNYChG62gSit9pQVGErXtrKuPC55ITaVSukmMta5qpMU7vqLt2Lnni4f/g==} engines: {node: '>=0.10.0'} dev: true - /store2/2.14.2: - resolution: {integrity: sha512-siT1RiqlfQnGqgT/YzXVUNsom9S0H1OX+dpdGN1xkyYATo4I6sep5NmsRD/40s3IIOvlCq6akxkqG82urIZW1w==} - dev: true - - /storybook-addon-sass-postcss/0.1.3: - resolution: {integrity: sha512-Jwi7KSx661hrFGO4q3jvMuvgObL8UdBYtvnKzXovBWyZoKsaji+sXKm6IpqGwy44R0To4dmobzZoW9q9rW3sWQ==} - engines: {node: '>=10', yarn: ^1.17.0} - dependencies: - '@storybook/node-logger': 6.5.12 - css-loader: 3.6.0 - postcss: 7.0.39 - postcss-loader: 4.3.0_postcss@7.0.39 - sass: 1.49.9 - sass-loader: 10.3.1_sass@1.49.9 - style-loader: 1.3.0 - transitivePeerDependencies: - - fibers - - node-sass - - webpack - dev: true - - /storybook-dark-mode/1.1.2_biqbaboplfbrettd7655fr4n2y: - resolution: {integrity: sha512-L5QjJN49bl+ktprM6faMkTeW+LCvuMYWQaRo8/JGSMmzomIjLT7Yo20UiTsnMgMYyYWYF5O4EK/F3OvjDNp8tQ==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || 18 - peerDependenciesMeta: - react: - optional: true - react-dom: - optional: true - dependencies: - '@storybook/addons': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/api': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/components': 6.5.12_biqbaboplfbrettd7655fr4n2y - '@storybook/core-events': 6.5.12 - '@storybook/theming': 6.5.12_biqbaboplfbrettd7655fr4n2y - fast-deep-equal: 3.1.3 - global: 4.4.0 - memoizerific: 1.11.3 - react: 18.2.0 - react-dom: 18.2.0_react@18.2.0 - dev: true - - /stream-browserify/2.0.2: - resolution: {integrity: sha512-nX6hmklHs/gr2FuxYDltq8fJA1GDlxKQCz8O/IM4atRqBH8OORmBNgfvW5gG10GT/qQ9u0CzIvr2X5Pkt6ntqg==} - dependencies: - inherits: 2.0.4 - readable-stream: 2.3.7 - dev: true - - /stream-each/1.2.3: - resolution: {integrity: sha512-vlMC2f8I2u/bZGqkdfLQW/13Zihpej/7PmSiMQsbYddxuTsJp8vRe2x2FvVExZg7FaOds43ROAuFJwPR4MTZLw==} - dependencies: - end-of-stream: 1.4.4 - stream-shift: 1.0.1 - dev: true - - /stream-http/2.8.3: - resolution: {integrity: sha512-+TSkfINHDo4J+ZobQLWiMouQYB+UVYFttRA94FpEzzJ7ZdqcL4uUUQ7WkdkI4DSozGmgBUE/a47L+38PenXhUw==} - dependencies: - builtin-status-codes: 3.0.0 - inherits: 2.0.4 - readable-stream: 2.3.7 - to-arraybuffer: 1.0.1 - xtend: 4.0.2 - dev: true - - /stream-shift/1.0.1: - resolution: {integrity: sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ==} - dev: true - - /strict-uri-encode/1.1.0: + /strict-uri-encode@1.1.0: resolution: {integrity: sha1-J5siXfHVgrH1TmWt3UNS4Y+qBxM=} engines: {node: '>=0.10.0'} dev: true - /string-hash/1.1.3: + /string-hash@1.1.3: resolution: {integrity: sha1-6Kr8CsGFW0Zmkp7X3RJ1311sgRs=} dev: true - /string-length/2.0.0: + /string-length@2.0.0: resolution: {integrity: sha512-Qka42GGrS8Mm3SZ+7cH8UXiIWI867/b/Z/feQSpQx/rbfB8UGknGEZVaUQMOUVj+soY6NpWAxily63HI1OckVQ==} engines: {node: '>=4'} dependencies: @@ -17940,7 +10526,7 @@ packages: strip-ansi: 4.0.0 dev: true - /string-length/3.1.0: + /string-length@3.1.0: resolution: {integrity: sha512-Ttp5YvkGm5v9Ijagtaz1BnN+k9ObpvS0eIBblPMp2YWL8FBmi9qblQ9fexc2k/CXFgrTIteU3jAw3payCnwSTA==} engines: {node: '>=8'} dependencies: @@ -17948,7 +10534,7 @@ packages: strip-ansi: 5.2.0 dev: true - /string-width/3.1.0: + /string-width@3.1.0: resolution: {integrity: sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==} engines: {node: '>=6'} dependencies: @@ -17957,7 +10543,7 @@ packages: strip-ansi: 5.2.0 dev: true - /string-width/4.2.3: + /string-width@4.2.3: resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} engines: {node: '>=8'} dependencies: @@ -17966,7 +10552,7 @@ packages: strip-ansi: 6.0.1 dev: true - /string.prototype.matchall/4.0.6: + /string.prototype.matchall@4.0.6: resolution: {integrity: sha512-6WgDX8HmQqvEd7J+G6VtAahhsQIssiZ8zl7zKh1VDMFyL3hRTJP4FTNA3RbIp2TOQ9AYNDcc7e3fH0Qbup+DBg==} dependencies: call-bind: 1.0.2 @@ -17979,7 +10565,7 @@ packages: side-channel: 1.0.4 dev: true - /string.prototype.padend/3.1.1: + /string.prototype.padend@3.1.1: resolution: {integrity: sha512-eCzTASPnoCr5Ht+Vn1YXgm8SB015hHKgEIMu9Nr9bQmLhRBxKRfmzSj/IQsxDFc8JInJDDFA0qXwK+xxI7wDkg==} engines: {node: '>= 0.4'} dependencies: @@ -17988,16 +10574,7 @@ packages: es-abstract: 1.19.1 dev: true - /string.prototype.padstart/3.1.3: - resolution: {integrity: sha512-NZydyOMtYxpTjGqp0VN5PYUF/tsU15yDMZnUdj16qRUIUiMJkHHSDElYyQFrMu+/WloTpA7MQSiADhBicDfaoA==} - engines: {node: '>= 0.4'} - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.3 - es-abstract: 1.19.1 - dev: true - - /string.prototype.trim/1.2.3: + /string.prototype.trim@1.2.3: resolution: {integrity: sha512-16IL9pIBA5asNOSukPfxX2W68BaBvxyiRK16H3RA/lWW9BDosh+w7f+LhomPHpXJ82QEe7w7/rY/S1CV97raLg==} engines: {node: '>= 0.4'} dependencies: @@ -18006,152 +10583,101 @@ packages: es-abstract: 1.19.1 dev: true - /string.prototype.trimend/1.0.4: + /string.prototype.trimend@1.0.4: resolution: {integrity: sha512-y9xCjw1P23Awk8EvTpcyL2NIr1j7wJ39f+k6lvRnSMz+mz9CGz9NYPelDk42kOz6+ql8xjfK8oYzy3jAP5QU5A==} dependencies: call-bind: 1.0.2 define-properties: 1.1.3 dev: true - /string.prototype.trimend/1.0.5: - resolution: {integrity: sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog==} - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.20.2 - dev: true - - /string.prototype.trimstart/1.0.4: + /string.prototype.trimstart@1.0.4: resolution: {integrity: sha512-jh6e984OBfvxS50tdY2nRZnoC5/mLFKOREQfw8t5yytkoUsJRNxvI/E39qu1sD0OtWI3OC0XgKSmcWwziwYuZw==} dependencies: call-bind: 1.0.2 define-properties: 1.1.3 dev: true - /string.prototype.trimstart/1.0.5: - resolution: {integrity: sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg==} - dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.20.2 - dev: true - - /string_decoder/1.1.1: + /string_decoder@1.1.1: resolution: {integrity: sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==} dependencies: safe-buffer: 5.1.2 dev: true - /string_decoder/1.3.0: + /string_decoder@1.3.0: resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} dependencies: safe-buffer: 5.2.1 dev: true - /strip-ansi/3.0.1: + /strip-ansi@3.0.1: resolution: {integrity: sha512-VhumSSbBqDTP8p2ZLKj40UjBCV4+v8bUSEpUb4KjRgWk9pbqGF4REFj6KEagidb2f/M6AzC0EmFyDNGaw9OCzg==} engines: {node: '>=0.10.0'} dependencies: ansi-regex: 2.1.1 dev: true - /strip-ansi/4.0.0: + /strip-ansi@4.0.0: resolution: {integrity: sha512-4XaJ2zQdCzROZDivEVIDPkcQn8LMFSa8kj8Gxb/Lnwzv9A8VctNZ+lfivC/sV3ivW8ElJTERXZoPBRrZKkNKow==} engines: {node: '>=4'} dependencies: ansi-regex: 3.0.0 dev: true - /strip-ansi/5.2.0: + /strip-ansi@5.2.0: resolution: {integrity: sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==} engines: {node: '>=6'} dependencies: ansi-regex: 4.1.0 dev: true - /strip-ansi/6.0.1: + /strip-ansi@6.0.1: resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} engines: {node: '>=8'} dependencies: ansi-regex: 5.0.1 dev: true - /strip-bom/2.0.0: - resolution: {integrity: sha512-kwrX1y7czp1E69n2ajbG65mIo9dqvJ+8aBQXOGVxqwvNbsXdFM6Lq37dLAY3mknUwru8CfcCbfOLL/gMo+fi3g==} - engines: {node: '>=0.10.0'} - dependencies: - is-utf8: 0.2.1 - dev: true - optional: true - - /strip-bom/3.0.0: + /strip-bom@3.0.0: resolution: {integrity: sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==} engines: {node: '>=4'} dev: true - /strip-eof/1.0.0: + /strip-eof@1.0.0: resolution: {integrity: sha512-7FCwGGmx8mD5xQd3RPUvnSpUXHM3BWuzjtpD4TXsfcZ9EL4azvVVUscFYwD9nx8Kh+uCBC00XBtAykoMHwTh8Q==} engines: {node: '>=0.10.0'} dev: true - /strip-final-newline/2.0.0: - resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} - engines: {node: '>=6'} - dev: true - - /strip-indent/1.0.1: - resolution: {integrity: sha512-I5iQq6aFMM62fBEAIB/hXzwJD6EEZ0xEGCX2t7oXqaKPIRgt4WruAQ285BISgdkP+HLGWyeGmNJcpIwFeRYRUA==} - engines: {node: '>=0.10.0'} - hasBin: true - dependencies: - get-stdin: 4.0.1 - dev: true - optional: true - - /strip-indent/3.0.0: + /strip-indent@3.0.0: resolution: {integrity: sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==} engines: {node: '>=8'} dependencies: min-indent: 1.0.1 dev: true - /strip-json-comments/3.1.1: + /strip-json-comments@3.1.1: resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} engines: {node: '>=8'} dev: true - /strip-outer/1.0.1: + /strip-outer@1.0.1: resolution: {integrity: sha512-k55yxKHwaXnpYGsOzg4Vl8+tDrWylxDEpknGjhTiZB8dFRU5rTo9CAzeycivxV3s+zlTKwrs6WxMxR95n26kwg==} engines: {node: '>=0.10.0'} dependencies: escape-string-regexp: 1.0.5 dev: true - /strip-url-auth/1.0.1: + /strip-url-auth@1.0.1: resolution: {integrity: sha1-IrD6OkE4WzO+PzMVUbu4N/oM164=} engines: {node: '>=0.10.0'} dev: true - /style-inject/0.3.0: + /style-inject@0.3.0: resolution: {integrity: sha512-IezA2qp+vcdlhJaVm5SOdPPTUu0FCEqfNSli2vRuSIBbu5Nq5UvygTk/VzeCqfLz2Atj3dVII5QBKGZRZ0edzw==} dev: true - /style-loader/1.3.0: - resolution: {integrity: sha512-V7TCORko8rs9rIqkSrlMfkqA63DfoGBBJmK1kKGCcSi+BWb4cqz0SRsnp4l6rU5iwOEd0/2ePv68SV22VXon4Q==} - engines: {node: '>= 8.9.0'} - peerDependencies: - webpack: ^4.0.0 || ^5.0.0 - peerDependenciesMeta: - webpack: - optional: true - dependencies: - loader-utils: 2.0.2 - schema-utils: 2.7.1 - dev: true - - /style-loader/1.3.0_webpack@4.46.0: - resolution: {integrity: sha512-V7TCORko8rs9rIqkSrlMfkqA63DfoGBBJmK1kKGCcSi+BWb4cqz0SRsnp4l6rU5iwOEd0/2ePv68SV22VXon4Q==} - engines: {node: '>= 8.9.0'} + /style-loader@2.0.0: + resolution: {integrity: sha512-Z0gYUJmzZ6ZdRUqpg1r8GsaFKypE+3xAzuFeMuoHgjc9KZv3wMyCRjQIWEbhoFSq7+7yoHXySDJyyWQaPajeiQ==} + engines: {node: '>= 10.13.0'} peerDependencies: webpack: ^4.0.0 || ^5.0.0 peerDependenciesMeta: @@ -18159,21 +10685,14 @@ packages: optional: true dependencies: loader-utils: 2.0.2 - schema-utils: 2.7.1 - webpack: 4.46.0 + schema-utils: 3.1.1 dev: true - /style-search/0.1.0: + /style-search@0.1.0: resolution: {integrity: sha1-eVjHk+R+MuB9K1yv5cC/jhLneQI=} dev: true - /style-to-object/0.3.0: - resolution: {integrity: sha512-CzFnRRXhzWIdItT3OmF8SQfWyahHhjq3HwcMNCNLn+N7klOOqPjMeG/4JSu77D7ypZdGvSzvkrbyeTMizz2VrA==} - dependencies: - inline-style-parser: 0.1.1 - dev: true - - /stylehacks/4.0.3: + /stylehacks@4.0.3: resolution: {integrity: sha512-7GlLk9JwlElY4Y6a/rmbH2MhVlTyVmiJd1PfTCqFaIBEGMYNsrO/v3SeGTdhBThLg4Z+NbOk/qFMwCa+J+3p/g==} engines: {node: '>=6.9.0'} dependencies: @@ -18182,7 +10701,7 @@ packages: postcss-selector-parser: 3.1.2 dev: true - /stylelint-config-prettier/9.0.3_stylelint@14.5.3: + /stylelint-config-prettier@9.0.3(stylelint@14.5.3): resolution: {integrity: sha512-5n9gUDp/n5tTMCq1GLqSpA30w2sqWITSSEiAWQlpxkKGAUbjcemQ0nbkRvRUa0B1LgD3+hCvdL7B1eTxy1QHJg==} engines: {node: '>= 12'} hasBin: true @@ -18192,20 +10711,20 @@ packages: stylelint: 14.5.3 dev: true - /stylelint-config-recommended-scss/5.0.2_2vmivtitabo2vy5qmz3lkxyvvi: + /stylelint-config-recommended-scss@5.0.2(postcss@8.4.8)(stylelint@14.5.3): resolution: {integrity: sha512-b14BSZjcwW0hqbzm9b0S/ScN2+3CO3O4vcMNOw2KGf8lfVSwJ4p5TbNEXKwKl1+0FMtgRXZj6DqVUe/7nGnuBg==} peerDependencies: stylelint: ^14.0.0 dependencies: - postcss-scss: 4.0.3_postcss@8.4.8 + postcss-scss: 4.0.3(postcss@8.4.8) stylelint: 14.5.3 - stylelint-config-recommended: 6.0.0_stylelint@14.5.3 - stylelint-scss: 4.2.0_stylelint@14.5.3 + stylelint-config-recommended: 6.0.0(stylelint@14.5.3) + stylelint-scss: 4.2.0(stylelint@14.5.3) transitivePeerDependencies: - postcss dev: true - /stylelint-config-recommended/6.0.0_stylelint@14.5.3: + /stylelint-config-recommended@6.0.0(stylelint@14.5.3): resolution: {integrity: sha512-ZorSSdyMcxWpROYUvLEMm0vSZud2uB7tX1hzBZwvVY9SV/uly4AvvJPPhCcymZL3fcQhEQG5AELmrxWqtmzacw==} peerDependencies: stylelint: ^14.0.0 @@ -18213,7 +10732,7 @@ packages: stylelint: 14.5.3 dev: true - /stylelint-config-recommended/7.0.0_stylelint@14.5.3: + /stylelint-config-recommended@7.0.0(stylelint@14.5.3): resolution: {integrity: sha512-yGn84Bf/q41J4luis1AZ95gj0EQwRX8lWmGmBwkwBNSkpGSpl66XcPTulxGa/Z91aPoNGuIGBmFkcM1MejMo9Q==} peerDependencies: stylelint: ^14.4.0 @@ -18221,37 +10740,37 @@ packages: stylelint: 14.5.3 dev: true - /stylelint-config-standard-scss/3.0.0_2vmivtitabo2vy5qmz3lkxyvvi: + /stylelint-config-standard-scss@3.0.0(postcss@8.4.8)(stylelint@14.5.3): resolution: {integrity: sha512-zt3ZbzIbllN1iCmc94e4pDxqpkzeR6CJo5DDXzltshuXr+82B8ylHyMMARNnUYrZH80B7wgY7UkKTYCFM0UUyw==} peerDependencies: stylelint: ^14.0.0 dependencies: stylelint: 14.5.3 - stylelint-config-recommended-scss: 5.0.2_2vmivtitabo2vy5qmz3lkxyvvi - stylelint-config-standard: 24.0.0_stylelint@14.5.3 + stylelint-config-recommended-scss: 5.0.2(postcss@8.4.8)(stylelint@14.5.3) + stylelint-config-standard: 24.0.0(stylelint@14.5.3) transitivePeerDependencies: - postcss dev: true - /stylelint-config-standard/24.0.0_stylelint@14.5.3: + /stylelint-config-standard@24.0.0(stylelint@14.5.3): resolution: {integrity: sha512-+RtU7fbNT+VlNbdXJvnjc3USNPZRiRVp/d2DxOF/vBDDTi0kH5RX2Ny6errdtZJH3boO+bmqIYEllEmok4jiuw==} peerDependencies: stylelint: ^14.0.0 dependencies: stylelint: 14.5.3 - stylelint-config-recommended: 6.0.0_stylelint@14.5.3 + stylelint-config-recommended: 6.0.0(stylelint@14.5.3) dev: true - /stylelint-config-standard/25.0.0_stylelint@14.5.3: + /stylelint-config-standard@25.0.0(stylelint@14.5.3): resolution: {integrity: sha512-21HnP3VSpaT1wFjFvv9VjvOGDtAviv47uTp3uFmzcN+3Lt+RYRv6oAplLaV51Kf792JSxJ6svCJh/G18E9VnCA==} peerDependencies: stylelint: ^14.4.0 dependencies: stylelint: 14.5.3 - stylelint-config-recommended: 7.0.0_stylelint@14.5.3 + stylelint-config-recommended: 7.0.0(stylelint@14.5.3) dev: true - /stylelint-scss/4.2.0_stylelint@14.5.3: + /stylelint-scss@4.2.0(stylelint@14.5.3): resolution: {integrity: sha512-HHHMVKJJ5RM9pPIbgJ/XA67h9H0407G68Rm69H4fzFbFkyDMcTV1Byep3qdze5+fJ3c0U7mJrbj6S0Fg072uZA==} peerDependencies: stylelint: ^14.5.1 @@ -18264,7 +10783,7 @@ packages: stylelint: 14.5.3 dev: true - /stylelint-use-logical-spec/3.2.2_stylelint@14.5.3: + /stylelint-use-logical-spec@3.2.2(stylelint@14.5.3): resolution: {integrity: sha512-NNh1NWIEpponGnBrCQ+jdYgQRvzu0FUnDOO7ZeyPHlNKXHvRz8nvNFkU8zLUCLbpWjc92rN0G0gc0MDsjSRPMA==} engines: {node: '>=8.0.0'} peerDependencies: @@ -18273,7 +10792,7 @@ packages: stylelint: 14.5.3 dev: true - /stylelint/14.5.3: + /stylelint@14.5.3: resolution: {integrity: sha512-omHETL+kGHR+fCXFK1SkZD/A+emCP9esggAdWEl8GPjTNeyRYj+H6uetRDcU+7E451zwWiUYGVAX+lApsAZgsQ==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} hasBin: true @@ -18306,7 +10825,7 @@ packages: postcss: 8.4.8 postcss-media-query-parser: 0.2.3 postcss-resolve-nested-selector: 0.1.1 - postcss-safe-parser: 6.0.0_postcss@8.4.8 + postcss-safe-parser: 6.0.0(postcss@8.4.8) postcss-selector-parser: 6.0.9 postcss-value-parser: 4.2.0 resolve-from: 5.0.0 @@ -18323,51 +10842,44 @@ packages: - supports-color dev: true - /stylis/4.0.13: + /stylis@4.0.13: resolution: {integrity: sha512-xGPXiFVl4YED9Jh7Euv2V220mriG9u4B2TA6Ybjc1catrstKD2PpIdU3U0RKpkVBC2EhmL/F0sPCr9vrFTNRag==} dev: true - /supports-color/2.0.0: + /supports-color@2.0.0: resolution: {integrity: sha512-KKNVtd6pCYgPIKU4cp2733HWYCpplQhddZLBUryaAHou723x+FRzQ5Df824Fj+IyyuiQTRoub4SnIFfIcrp70g==} engines: {node: '>=0.8.0'} dev: true - /supports-color/3.2.3: + /supports-color@3.2.3: resolution: {integrity: sha512-Jds2VIYDrlp5ui7t8abHN2bjAu4LV/q4N2KivFPpGH0lrka0BMq/33AmECUXlKPcHigkNaqfXRENFju+rlcy+A==} engines: {node: '>=0.8.0'} dependencies: has-flag: 1.0.0 dev: true - /supports-color/5.5.0: + /supports-color@5.5.0: resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==} engines: {node: '>=4'} dependencies: has-flag: 3.0.0 dev: true - /supports-color/6.1.0: + /supports-color@6.1.0: resolution: {integrity: sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ==} engines: {node: '>=6'} dependencies: has-flag: 3.0.0 dev: true - /supports-color/7.2.0: + /supports-color@7.2.0: resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} engines: {node: '>=8'} dependencies: has-flag: 4.0.0 dev: true - /supports-color/8.1.1: - resolution: {integrity: sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==} - engines: {node: '>=10'} - dependencies: - has-flag: 4.0.0 - dev: true - - /supports-hyperlinks/2.2.0: + /supports-hyperlinks@2.2.0: resolution: {integrity: sha512-6sXEzV5+I5j8Bmq9/vUphGRM/RJNT9SCURJLjwfOg51heRtguGWDzcaBlgAzKhQa0EVNpPEKzQuBwZ8S8WaCeQ==} engines: {node: '>=8'} dependencies: @@ -18375,11 +10887,11 @@ packages: supports-color: 7.2.0 dev: true - /svg-parser/2.0.4: + /svg-parser@2.0.4: resolution: {integrity: sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ==} dev: true - /svg-sprite/2.0.0: + /svg-sprite@2.0.0: resolution: {integrity: sha512-Kt9vz7TZVw7g4p2sNIfmSodUJ1bVuOE1VtHzA1gMmUmGfVQQvjzuf+12KzYBxa37efAhmQgU6n+bPIlZT+68dA==} engines: {node: '>=12'} hasBin: true @@ -18405,11 +10917,11 @@ packages: yargs: 17.6.0 dev: true - /svg-tags/1.0.0: + /svg-tags@1.0.0: resolution: {integrity: sha1-WPcc7jvVGbWdSyqEO2x95krAR2Q=} dev: true - /svgo/1.3.2: + /svgo@1.3.2: resolution: {integrity: sha512-yhy/sQYxR5BkC98CY7o31VGsg014AKLEPxdfhora76l36hD9Rdy5NZA/Ocn6yayNPgSamYdtX2rFJdcv07AYVw==} engines: {node: '>=4.0.0'} deprecated: This SVGO version is no longer supported. Upgrade to v2.x.x. @@ -18430,7 +10942,7 @@ packages: util.promisify: 1.0.0 dev: true - /svgo/2.8.0: + /svgo@2.8.0: resolution: {integrity: sha512-+N/Q9kV1+F+UeWYoSiULYo4xYSDQlTgb+ayMobAXPwMnLvop7oxKMo9OzIrX5x3eS4L4f2UHhc9axXwY8DpChg==} engines: {node: '>=10.13.0'} hasBin: true @@ -18444,25 +10956,11 @@ packages: stable: 0.1.8 dev: true - /symbol-tree/3.2.4: + /symbol-tree@3.2.4: resolution: {integrity: sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==} dev: true - /symbol.prototype.description/1.0.5: - resolution: {integrity: sha512-x738iXRYsrAt9WBhRCVG5BtIC3B7CUkFwbHW2zOvGtwM33s7JjrCDyq8V0zgMYVb5ymsL8+qkzzpANH63CPQaQ==} - engines: {node: '>= 0.11.15'} - dependencies: - call-bind: 1.0.2 - get-symbol-description: 1.0.0 - has-symbols: 1.0.3 - object.getownpropertydescriptors: 2.1.4 - dev: true - - /synchronous-promise/2.0.16: - resolution: {integrity: sha512-qImOD23aDfnIDNqlG1NOehdB9IYsn1V9oByPjKY1nakv2MQYCEMyX033/q+aEtYCpmYK1cv2+NTmlH+ra6GA5A==} - dev: true - - /table/5.4.6: + /table@5.4.6: resolution: {integrity: sha512-wmEc8m4fjnob4gt5riFRtTu/6+4rSe12TpAELNSqHMfF3IqnA+CH37USM6/YR3qRZv7e56kAEAtd6nKZaxe0Ug==} engines: {node: '>=6.0.0'} dependencies: @@ -18472,7 +10970,7 @@ packages: string-width: 3.1.0 dev: true - /table/6.8.0: + /table@6.8.0: resolution: {integrity: sha512-s/fitrbVeEyHKFa7mFdkuQMWlH1Wgw/yEXMt5xACT4ZpzWFluehAxRtUUQKPuWhaLAWhFcVx6w3oC8VKaUfPGA==} engines: {node: '>=10.0.0'} dependencies: @@ -18483,113 +10981,7 @@ packages: strip-ansi: 6.0.1 dev: true - /tapable/1.1.3: - resolution: {integrity: sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA==} - engines: {node: '>=6'} - dev: true - - /tapable/2.2.1: - resolution: {integrity: sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==} - engines: {node: '>=6'} - dev: true - - /tar/6.1.11: - resolution: {integrity: sha512-an/KZQzQUkZCkuoAA64hM92X0Urb6VpRhAFllDzz44U2mcD5scmT3zBc4VgVpkugF580+DQn8eAFSyoQt0tznA==} - engines: {node: '>= 10'} - dependencies: - chownr: 2.0.0 - fs-minipass: 2.1.0 - minipass: 3.3.4 - minizlib: 2.1.2 - mkdirp: 1.0.4 - yallist: 4.0.0 - dev: true - - /telejson/6.0.8: - resolution: {integrity: sha512-nerNXi+j8NK1QEfBHtZUN/aLdDcyupA//9kAboYLrtzZlPLpUfqbVGWb9zz91f/mIjRbAYhbgtnJHY8I1b5MBg==} - dependencies: - '@types/is-function': 1.0.1 - global: 4.4.0 - is-function: 1.0.2 - is-regex: 1.1.4 - is-symbol: 1.0.4 - isobject: 4.0.0 - lodash: 4.17.21 - memoizerific: 1.11.3 - dev: true - - /terser-webpack-plugin/1.4.5_webpack@4.46.0: - resolution: {integrity: sha512-04Rfe496lN8EYruwi6oPQkG0vo8C+HT49X687FZnpPF0qMAIHONI6HEXYPKDOE8e5HjXTyKfqRd/agHtH0kOtw==} - engines: {node: '>= 6.9.0'} - peerDependencies: - webpack: ^4.0.0 - peerDependenciesMeta: - webpack: - optional: true - dependencies: - cacache: 12.0.4 - find-cache-dir: 2.1.0 - is-wsl: 1.1.0 - schema-utils: 1.0.0 - serialize-javascript: 4.0.0 - source-map: 0.6.1 - terser: 4.8.0 - webpack: 4.46.0 - webpack-sources: 1.4.3 - worker-farm: 1.7.0 - dev: true - - /terser-webpack-plugin/4.2.3_webpack@4.46.0: - resolution: {integrity: sha512-jTgXh40RnvOrLQNgIkwEKnQ8rmHjHK4u+6UBEi+W+FPmvb+uo+chJXntKe7/3lW5mNysgSWD60KyesnhW8D6MQ==} - engines: {node: '>= 10.13.0'} - peerDependencies: - webpack: ^4.0.0 || ^5.0.0 - peerDependenciesMeta: - webpack: - optional: true - dependencies: - cacache: 15.3.0 - find-cache-dir: 3.3.1 - jest-worker: 26.6.2 - p-limit: 3.1.0 - schema-utils: 3.1.1 - serialize-javascript: 5.0.1 - source-map: 0.6.1 - terser: 5.15.0 - webpack: 4.46.0 - webpack-sources: 1.4.3 - transitivePeerDependencies: - - bluebird - dev: true - - /terser-webpack-plugin/5.3.6_bhtm7a3ixzishl2uxypy6qnuwu: - resolution: {integrity: sha512-kfLFk+PoLUQIbLmB1+PZDMRSZS99Mp+/MHqDNmMA6tOItzRt+Npe3E+fsMs5mfcM0wCtrrdU387UnV+vnSffXQ==} - engines: {node: '>= 10.13.0'} - peerDependencies: - '@swc/core': '*' - esbuild: '*' - uglify-js: '*' - webpack: ^5.1.0 - peerDependenciesMeta: - '@swc/core': - optional: true - esbuild: - optional: true - uglify-js: - optional: true - webpack: - optional: true - dependencies: - '@jridgewell/trace-mapping': 0.3.15 - '@swc/core': 1.2.203 - jest-worker: 27.5.1 - schema-utils: 3.1.1 - serialize-javascript: 6.0.0 - terser: 5.15.0 - webpack: 5.74.0_@swc+core@1.2.203 - dev: true - - /terser/4.8.0: + /terser@4.8.0: resolution: {integrity: sha512-EAPipTNeWsb/3wLPeup1tVPaXfIaU68xMnVdPafIL1TV05OhASArYyIfFvnvJCNrR2NIOvDVNNTFRa+Re2MWyw==} engines: {node: '>=6.0.0'} hasBin: true @@ -18600,18 +10992,7 @@ packages: source-map-support: 0.5.19 dev: true - /terser/5.15.0: - resolution: {integrity: sha512-L1BJiXVmheAQQy+as0oF3Pwtlo4s3Wi1X2zNZ2NxOB4wx9bdS9Vk67XQENLFdLYGCK/Z2di53mTj/hBafR+dTA==} - engines: {node: '>=10'} - hasBin: true - dependencies: - '@jridgewell/source-map': 0.3.2 - acorn: 8.7.1 - commander: 2.20.3 - source-map-support: 0.5.21 - dev: true - - /test-exclude/5.2.3: + /test-exclude@5.2.3: resolution: {integrity: sha512-M+oxtseCFO3EDtAaGH7iiej3CBkzXqFMbzqYAACdzKui4eZA+pq3tZEwChvOdNfa7xxy8BfbmgJSIr43cC/+2g==} engines: {node: '>=6'} dependencies: @@ -18621,88 +11002,61 @@ packages: require-main-filename: 2.0.0 dev: true - /test-exclude/6.0.0: - resolution: {integrity: sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==} - engines: {node: '>=8'} - dependencies: - '@istanbuljs/schema': 0.1.3 - glob: 7.2.3 - minimatch: 3.1.2 - dev: true - - /text-hex/1.0.0: + /text-hex@1.0.0: resolution: {integrity: sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==} dev: true - /text-table/0.2.0: + /text-table@0.2.0: resolution: {integrity: sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=} dev: true - /throat/4.1.0: + /throat@4.1.0: resolution: {integrity: sha512-wCVxLDcFxw7ujDxaeJC6nfl2XfHJNYs8yUYJnvMgtPEFlttP9tHSfRUv2vBe6C4hkVFPWoP1P6ZccbYjmSEkKA==} dev: true - /through/2.3.8: + /through@2.3.8: resolution: {integrity: sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=} dev: true - /through2/2.0.5: - resolution: {integrity: sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==} - dependencies: - readable-stream: 2.3.7 - xtend: 4.0.2 - dev: true - - /timers-browserify/2.0.12: - resolution: {integrity: sha512-9phl76Cqm6FhSX9Xe1ZUAMLtm1BLkKj2Qd5ApyWkXzsMRaA7dgr81kf4wJmQf/hAvg8EEyJxDo3du/0KlhPiKQ==} - engines: {node: '>=0.6.0'} - dependencies: - setimmediate: 1.0.5 - dev: true - - /timsort/0.3.0: + /timsort@0.3.0: resolution: {integrity: sha1-QFQRqOfmM5/mTbmiNN4R3DHgK9Q=} dev: true - /tiny-glob/0.2.8: + /tiny-glob@0.2.8: resolution: {integrity: sha512-vkQP7qOslq63XRX9kMswlby99kyO5OvKptw7AMwBVMjXEI7Tb61eoI5DydyEMOseyGS5anDN1VPoVxEvH01q8w==} dependencies: globalyzer: 0.1.0 globrex: 0.1.2 dev: true - /tiny-warning/1.0.3: + /tiny-warning@1.0.3: resolution: {integrity: sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==} dev: true - /tmp/0.0.33: + /tmp@0.0.33: resolution: {integrity: sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==} engines: {node: '>=0.6.0'} dependencies: os-tmpdir: 1.0.2 dev: true - /tmpl/1.0.4: + /tmpl@1.0.4: resolution: {integrity: sha512-9tP427gQBl7Mx3vzr3mquZ+Rq+1sAqIJb5dPSYEjWMYsqitxARsFCHkZS3sDptHAmrUPCZfzXNZqSuBIHdpV5A==} dev: true - /to-arraybuffer/1.0.1: - resolution: {integrity: sha512-okFlQcoGTi4LQBG/PgSYblw9VOyptsz2KJZqc6qtgGdes8VktzUQkj4BI2blit072iS8VODNcMA+tvnS9dnuMA==} - dev: true - - /to-fast-properties/2.0.0: + /to-fast-properties@2.0.0: resolution: {integrity: sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==} engines: {node: '>=4'} dev: true - /to-object-path/0.3.0: + /to-object-path@0.3.0: resolution: {integrity: sha512-9mWHdnGRuh3onocaHzukyvCZhzvr6tiflAy/JRFXcJX0TjgfWA9pk9t8CMbzmBE4Jfw58pXbkngtBtqYxzNEyg==} engines: {node: '>=0.10.0'} dependencies: kind-of: 3.2.2 dev: true - /to-regex-range/2.1.1: + /to-regex-range@2.1.1: resolution: {integrity: sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==} engines: {node: '>=0.10.0'} dependencies: @@ -18710,14 +11064,14 @@ packages: repeat-string: 1.6.1 dev: true - /to-regex-range/5.0.1: + /to-regex-range@5.0.1: resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} engines: {node: '>=8.0'} dependencies: is-number: 7.0.0 dev: true - /to-regex/3.0.2: + /to-regex@3.0.2: resolution: {integrity: sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw==} engines: {node: '>=0.10.0'} dependencies: @@ -18727,16 +11081,11 @@ packages: safe-regex: 1.1.0 dev: true - /toggle-selection/1.0.6: - resolution: {integrity: sha1-bkWxJj8gF/oKzH2J14sVuL932jI=} + /toggle-selection@1.0.6: + resolution: {integrity: sha512-BiZS+C1OS8g/q2RRbJmy59xpyghNBqrr6k5L/uKBGRsTfxmu3ffiRnd8mlGPUVayg8pvfi5urfnu8TU7DVOkLQ==} dev: false - /toidentifier/1.0.1: - resolution: {integrity: sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==} - engines: {node: '>=0.6'} - dev: true - - /tough-cookie/2.5.0: + /tough-cookie@2.5.0: resolution: {integrity: sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==} engines: {node: '>=0.8'} dependencies: @@ -18744,56 +11093,29 @@ packages: punycode: 2.1.1 dev: true - /tr46/0.0.3: - resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==} - dev: true - - /tr46/1.0.1: + /tr46@1.0.1: resolution: {integrity: sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==} dependencies: punycode: 2.1.1 dev: true - /trim-newlines/1.0.0: - resolution: {integrity: sha512-Nm4cF79FhSTzrLKGDMi3I4utBtFv8qKy4sq1enftf2gMdpqI8oVQTAfySkTz5r49giVzDj88SVZXP4CeYQwjaw==} - engines: {node: '>=0.10.0'} - dev: true - optional: true - - /trim-newlines/3.0.1: + /trim-newlines@3.0.1: resolution: {integrity: sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw==} engines: {node: '>=8'} dev: true - /trim-repeated/1.0.0: + /trim-repeated@1.0.0: resolution: {integrity: sha1-42RqLqTokTEr9+rObPsFOAvAHCE=} engines: {node: '>=0.10.0'} dependencies: escape-string-regexp: 1.0.5 dev: true - /trim-trailing-lines/1.1.4: - resolution: {integrity: sha512-rjUWSqnfTNrjbB9NQWfPMH/xRK1deHeGsHoVfpxJ++XeYXE0d6B1En37AHfw3jtfTU7dzMzZL2jjpe8Qb5gLIQ==} - dev: true - - /trim/0.0.1: - resolution: {integrity: sha512-YzQV+TZg4AxpKxaTHK3c3D+kRDCGVEE7LemdlQZoQXn0iennk10RsIoY6ikzAqJTc9Xjl9C1/waHom/J86ziAQ==} - dev: true - - /triple-beam/1.3.0: + /triple-beam@1.3.0: resolution: {integrity: sha512-XrHUvV5HpdLmIj4uVMxHggLbFSZYIn7HEWsqePZcI50pco+MPqJ50wMGY794X7AOOhxOBAjbkqfAbEe/QMp2Lw==} dev: true - /trough/1.0.5: - resolution: {integrity: sha512-rvuRbTarPXmMb79SmzEp8aqXNKcK+y0XaB298IXueQ8I2PsrATcPBCSPyK/dDNa2iWOhKlfNnOjdAOTBU/nkFA==} - dev: true - - /ts-dedent/2.2.0: - resolution: {integrity: sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ==} - engines: {node: '>=6.10'} - dev: true - - /ts-node/10.9.1_f2hgdakwhr3ybdo3xzav4gmxom: + /ts-node@10.9.1(@swc/core@1.2.203)(@types/node@18.11.9)(typescript@4.7.4): resolution: {integrity: sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==} hasBin: true peerDependencies: @@ -18825,30 +11147,19 @@ packages: yn: 3.1.1 dev: true - /ts-pnp/1.2.0_typescript@4.7.4: - resolution: {integrity: sha512-csd+vJOb/gkzvcCHgTGSChYpy5f1/XKNsmvBGO4JXS+z1v2HobugDz4s1IeFXM3wZB44uczs+eazB5Q/ccdhQw==} - engines: {node: '>=6'} - peerDependencies: - typescript: '*' - peerDependenciesMeta: - typescript: - optional: true - dependencies: - typescript: 4.7.4 - dev: true - - /tslib/1.10.0: + /tslib@1.10.0: resolution: {integrity: sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ==} dev: true - /tslib/1.14.1: + /tslib@1.14.1: resolution: {integrity: sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==} dev: true - /tslib/2.4.0: + /tslib@2.4.0: resolution: {integrity: sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==} + dev: false - /tsutils/3.21.0_typescript@4.7.4: + /tsutils@3.21.0(typescript@4.7.4): resolution: {integrity: sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==} engines: {node: '>= 6'} peerDependencies: @@ -18858,279 +11169,124 @@ packages: typescript: 4.7.4 dev: true - /tty-browserify/0.0.0: - resolution: {integrity: sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY=} - dev: true - - /tunnel-agent/0.6.0: + /tunnel-agent@0.6.0: resolution: {integrity: sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==} dependencies: safe-buffer: 5.2.1 dev: true - /tweetnacl/0.14.5: + /tweetnacl@0.14.5: resolution: {integrity: sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==} dev: true - /type-check/0.3.2: + /type-check@0.3.2: resolution: {integrity: sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=} engines: {node: '>= 0.8.0'} dependencies: prelude-ls: 1.1.2 dev: true - /type-fest/0.11.0: + /type-fest@0.11.0: resolution: {integrity: sha512-OdjXJxnCN1AvyLSzeKIgXTXxV+99ZuXl3Hpo9XpJAv9MBcHrrJOQ5kV7ypXOuQie+AmWG25hLbiKdwYTifzcfQ==} engines: {node: '>=8'} dev: true - /type-fest/0.18.1: + /type-fest@0.18.1: resolution: {integrity: sha512-OIAYXk8+ISY+qTOwkHtKqzAuxchoMiD9Udx+FSGQDuiRR+PJKJHc2NJAXlbhkGwTt/4/nKZxELY1w3ReWOL8mw==} engines: {node: '>=10'} dev: true - /type-fest/0.20.2: - resolution: {integrity: sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==} - engines: {node: '>=10'} - dev: true - - /type-fest/0.6.0: + /type-fest@0.6.0: resolution: {integrity: sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==} engines: {node: '>=8'} dev: true - /type-fest/0.8.1: + /type-fest@0.8.1: resolution: {integrity: sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==} engines: {node: '>=8'} dev: true - /type-is/1.6.18: - resolution: {integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==} - engines: {node: '>= 0.6'} - dependencies: - media-typer: 0.3.0 - mime-types: 2.1.35 - dev: true - - /typedarray-to-buffer/3.1.5: - resolution: {integrity: sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==} - dependencies: - is-typedarray: 1.0.0 - dev: true - - /typedarray/0.0.6: - resolution: {integrity: sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA==} - dev: true - - /typescript/3.9.10: + /typescript@3.9.10: resolution: {integrity: sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==} engines: {node: '>=4.2.0'} hasBin: true dev: true - /typescript/4.7.4: - resolution: {integrity: sha512-C0WQT0gezHuw6AdY1M2jxUO83Rjf0HP7Sk1DtXj6j1EwkQNZrHAg2XPWlq62oqEhYvONq5pkC2Y9oPljWToLmQ==} - engines: {node: '>=4.2.0'} - hasBin: true - dev: true - - /ua-parser-js/0.7.28: - resolution: {integrity: sha512-6Gurc1n//gjp9eQNXjD9O3M/sMwVtN5S8Lv9bvOYBfKfDNiIIhqiyi01vMBO45u4zkDE420w/e0se7Vs+sIg+g==} - dev: true - - /uglify-js/3.17.0: - resolution: {integrity: sha512-aTeNPVmgIMPpm1cxXr2Q/nEbvkmV8yq66F3om7X3P/cvOXQ0TMQ64Wk63iyT1gPlmdmGzjGpyLh1f3y8MZWXGg==} - engines: {node: '>=0.8.0'} - hasBin: true - requiresBuild: true - dev: true - optional: true - - /unbox-primitive/1.0.1: - resolution: {integrity: sha512-tZU/3NqK3dA5gpE1KtyiJUrEB0lxnGkMFHptJ7q6ewdZ8s12QrODwNbhIJStmJkd1QDXa1NRA8aF2A1zk/Ypyw==} - dependencies: - function-bind: 1.1.1 - has-bigints: 1.0.1 - has-symbols: 1.0.3 - which-boxed-primitive: 1.0.2 - dev: true - - /unbox-primitive/1.0.2: - resolution: {integrity: sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==} - dependencies: - call-bind: 1.0.2 - has-bigints: 1.0.2 - has-symbols: 1.0.3 - which-boxed-primitive: 1.0.2 - dev: true - - /unfetch/4.2.0: - resolution: {integrity: sha512-F9p7yYCn6cIW9El1zi0HI6vqpeIvBsr3dSuRO6Xuppb1u5rXpCPmMvLSyECLhybr9isec8Ohl0hPekMVrEinDA==} - dev: true - - /unherit/1.1.3: - resolution: {integrity: sha512-Ft16BJcnapDKp0+J/rqFC3Rrk6Y/Ng4nzsC028k2jdDII/rdZ7Wd3pPT/6+vIIxRagwRc9K0IUX0Ra4fKvw+WQ==} - dependencies: - inherits: 2.0.4 - xtend: 4.0.2 - dev: true - - /unicode-canonical-property-names-ecmascript/1.0.4: - resolution: {integrity: sha512-jDrNnXWHd4oHiTZnx/ZG7gtUTVp+gCcTTKr8L0HjlwphROEW3+Him+IpvC+xcJEFegapiMZyZe02CyuOnRmbnQ==} - engines: {node: '>=4'} - dev: true - - /unicode-canonical-property-names-ecmascript/2.0.0: - resolution: {integrity: sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ==} - engines: {node: '>=4'} - dev: true - - /unicode-match-property-ecmascript/1.0.4: - resolution: {integrity: sha512-L4Qoh15vTfntsn4P1zqnHulG0LdXgjSO035fEpdtp6YxXhMT51Q6vgM5lYdG/5X3MjS+k/Y9Xw4SFCY9IkR0rg==} - engines: {node: '>=4'} - dependencies: - unicode-canonical-property-names-ecmascript: 1.0.4 - unicode-property-aliases-ecmascript: 1.1.0 - dev: true - - /unicode-match-property-ecmascript/2.0.0: - resolution: {integrity: sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==} - engines: {node: '>=4'} - dependencies: - unicode-canonical-property-names-ecmascript: 2.0.0 - unicode-property-aliases-ecmascript: 2.1.0 - dev: true - - /unicode-match-property-value-ecmascript/1.2.0: - resolution: {integrity: sha512-wjuQHGQVofmSJv1uVISKLE5zO2rNGzM/KCYZch/QQvez7C1hUhBIuZ701fYXExuufJFMPhv2SyL8CyoIfMLbIQ==} - engines: {node: '>=4'} - dev: true - - /unicode-match-property-value-ecmascript/2.0.0: - resolution: {integrity: sha512-7Yhkc0Ye+t4PNYzOGKedDhXbYIBe1XEQYQxOPyhcXNMJ0WCABqqj6ckydd6pWRZTHV4GuCPKdBAUiMc60tsKVw==} - engines: {node: '>=4'} - dev: true - - /unicode-property-aliases-ecmascript/1.1.0: - resolution: {integrity: sha512-PqSoPh/pWetQ2phoj5RLiaqIk4kCNwoV3CI+LfGmWLKI3rE3kl1h59XpX2BjgDrmbxD9ARtQobPGU1SguCYuQg==} - engines: {node: '>=4'} - dev: true - - /unicode-property-aliases-ecmascript/2.1.0: - resolution: {integrity: sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w==} - engines: {node: '>=4'} - dev: true - - /unified/9.2.0: - resolution: {integrity: sha512-vx2Z0vY+a3YoTj8+pttM3tiJHCwY5UFbYdiWrwBEbHmK8pvsPj2rtAX2BFfgXen8T39CJWblWRDT4L5WGXtDdg==} - dependencies: - bail: 1.0.5 - extend: 3.0.2 - is-buffer: 2.0.5 - is-plain-obj: 2.1.0 - trough: 1.0.5 - vfile: 4.2.1 - dev: true - - /union-value/1.0.1: - resolution: {integrity: sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg==} - engines: {node: '>=0.10.0'} - dependencies: - arr-union: 3.1.0 - get-value: 2.0.6 - is-extendable: 0.1.1 - set-value: 2.0.1 - dev: true - - /uniq/1.0.1: - resolution: {integrity: sha1-sxxa6CVIRKOoKBVBzisEuGWnNP8=} - dev: true - - /uniqs/2.0.0: - resolution: {integrity: sha1-/+3ks2slKQaW5uFl1KWe25mOawI=} - dev: true - - /unique-filename/1.1.1: - resolution: {integrity: sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ==} - dependencies: - unique-slug: 2.0.2 - dev: true - - /unique-slug/2.0.2: - resolution: {integrity: sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w==} - dependencies: - imurmurhash: 0.1.4 + /typescript@4.7.4: + resolution: {integrity: sha512-C0WQT0gezHuw6AdY1M2jxUO83Rjf0HP7Sk1DtXj6j1EwkQNZrHAg2XPWlq62oqEhYvONq5pkC2Y9oPljWToLmQ==} + engines: {node: '>=4.2.0'} + hasBin: true dev: true - /unist-builder/2.0.3: - resolution: {integrity: sha512-f98yt5pnlMWlzP539tPc4grGMsFaQQlP/vM396b00jngsiINumNmsY8rkXjfoi1c6QaM8nQ3vaGDuoKWbe/1Uw==} + /ua-parser-js@0.7.28: + resolution: {integrity: sha512-6Gurc1n//gjp9eQNXjD9O3M/sMwVtN5S8Lv9bvOYBfKfDNiIIhqiyi01vMBO45u4zkDE420w/e0se7Vs+sIg+g==} dev: true - /unist-util-generated/1.1.6: - resolution: {integrity: sha512-cln2Mm1/CZzN5ttGK7vkoGw+RZ8VcUH6BtGbq98DDtRGquAAOXig1mrBQYelOwMXYS8rK+vZDyyojSjp7JX+Lg==} + /unbox-primitive@1.0.1: + resolution: {integrity: sha512-tZU/3NqK3dA5gpE1KtyiJUrEB0lxnGkMFHptJ7q6ewdZ8s12QrODwNbhIJStmJkd1QDXa1NRA8aF2A1zk/Ypyw==} + dependencies: + function-bind: 1.1.1 + has-bigints: 1.0.1 + has-symbols: 1.0.3 + which-boxed-primitive: 1.0.2 dev: true - /unist-util-is/4.1.0: - resolution: {integrity: sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg==} + /unicode-canonical-property-names-ecmascript@1.0.4: + resolution: {integrity: sha512-jDrNnXWHd4oHiTZnx/ZG7gtUTVp+gCcTTKr8L0HjlwphROEW3+Him+IpvC+xcJEFegapiMZyZe02CyuOnRmbnQ==} + engines: {node: '>=4'} dev: true - /unist-util-position/3.1.0: - resolution: {integrity: sha512-w+PkwCbYSFw8vpgWD0v7zRCl1FpY3fjDSQ3/N/wNd9Ffa4gPi8+4keqt99N3XW6F99t/mUzp2xAhNmfKWp95QA==} + /unicode-match-property-ecmascript@1.0.4: + resolution: {integrity: sha512-L4Qoh15vTfntsn4P1zqnHulG0LdXgjSO035fEpdtp6YxXhMT51Q6vgM5lYdG/5X3MjS+k/Y9Xw4SFCY9IkR0rg==} + engines: {node: '>=4'} + dependencies: + unicode-canonical-property-names-ecmascript: 1.0.4 + unicode-property-aliases-ecmascript: 1.1.0 dev: true - /unist-util-remove-position/2.0.1: - resolution: {integrity: sha512-fDZsLYIe2uT+oGFnuZmy73K6ZxOPG/Qcm+w7jbEjaFcJgbQ6cqjs/eSPzXhsmGpAsWPkqZM9pYjww5QTn3LHMA==} - dependencies: - unist-util-visit: 2.0.3 + /unicode-match-property-value-ecmascript@1.2.0: + resolution: {integrity: sha512-wjuQHGQVofmSJv1uVISKLE5zO2rNGzM/KCYZch/QQvez7C1hUhBIuZ701fYXExuufJFMPhv2SyL8CyoIfMLbIQ==} + engines: {node: '>=4'} dev: true - /unist-util-remove/2.1.0: - resolution: {integrity: sha512-J8NYPyBm4baYLdCbjmf1bhPu45Cr1MWTm77qd9istEkzWpnN6O9tMsEbB2JhNnBCqGENRqEWomQ+He6au0B27Q==} - dependencies: - unist-util-is: 4.1.0 + /unicode-property-aliases-ecmascript@1.1.0: + resolution: {integrity: sha512-PqSoPh/pWetQ2phoj5RLiaqIk4kCNwoV3CI+LfGmWLKI3rE3kl1h59XpX2BjgDrmbxD9ARtQobPGU1SguCYuQg==} + engines: {node: '>=4'} dev: true - /unist-util-stringify-position/2.0.3: - resolution: {integrity: sha512-3faScn5I+hy9VleOq/qNbAd6pAx7iH5jYBMS9I1HgQVijz/4mv5Bvw5iw1sC/90CODiKo81G/ps8AJrISn687g==} + /union-value@1.0.1: + resolution: {integrity: sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg==} + engines: {node: '>=0.10.0'} dependencies: - '@types/unist': 2.0.6 + arr-union: 3.1.0 + get-value: 2.0.6 + is-extendable: 0.1.1 + set-value: 2.0.1 dev: true - /unist-util-visit-parents/3.1.1: - resolution: {integrity: sha512-1KROIZWo6bcMrZEwiH2UrXDyalAa0uqzWCxCJj6lPOvTve2WkfgCytoDTPaMnodXh1WrXOq0haVYHj99ynJlsg==} - dependencies: - '@types/unist': 2.0.6 - unist-util-is: 4.1.0 + /uniq@1.0.1: + resolution: {integrity: sha1-sxxa6CVIRKOoKBVBzisEuGWnNP8=} dev: true - /unist-util-visit/2.0.3: - resolution: {integrity: sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q==} - dependencies: - '@types/unist': 2.0.6 - unist-util-is: 4.1.0 - unist-util-visit-parents: 3.1.1 + /uniqs@2.0.0: + resolution: {integrity: sha1-/+3ks2slKQaW5uFl1KWe25mOawI=} dev: true - /universalify/0.1.2: + /universalify@0.1.2: resolution: {integrity: sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==} engines: {node: '>= 4.0.0'} dev: true - /universalify/2.0.0: + /universalify@2.0.0: resolution: {integrity: sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==} engines: {node: '>= 10.0.0'} dev: true - /unpipe/1.0.0: - resolution: {integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==} - engines: {node: '>= 0.8'} - dev: true - - /unquote/1.1.1: + /unquote@1.1.1: resolution: {integrity: sha1-j97XMk7G6IoP+LkF58CYzcCG1UQ=} dev: true - /unset-value/1.0.0: + /unset-value@1.0.0: resolution: {integrity: sha512-PcA2tsuGSF9cnySLHTLSh2qrQiJ70mn+r+Glzxv2TWZblxsxCC52BDlZoPCsz7STd9pN7EZetkWZBAvk4cgZdQ==} engines: {node: '>=0.10.0'} dependencies: @@ -19138,21 +11294,7 @@ packages: isobject: 3.0.1 dev: true - /untildify/2.1.0: - resolution: {integrity: sha512-sJjbDp2GodvkB0FZZcn7k6afVisqX5BZD7Yq3xp4nN2O15BBK0cLm3Vwn2vQaF7UDS0UUsrQMkkplmDI5fskig==} - engines: {node: '>=0.10.0'} - dependencies: - os-homedir: 1.0.2 - dev: true - optional: true - - /upath/1.2.0: - resolution: {integrity: sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg==} - engines: {node: '>=4'} - dev: true - optional: true - - /update-browserslist-db/1.0.9_browserslist@4.21.4: + /update-browserslist-db@1.0.9(browserslist@4.21.4): resolution: {integrity: sha512-/xsqn21EGVdXI3EXSum1Yckj3ZVZugqyOZQ/CxYPBD/R+ko9NSUScf8tFF4dOKY+2pvSSJA/S+5B8s4Zr4kyvg==} hasBin: true peerDependencies: @@ -19163,129 +11305,64 @@ packages: picocolors: 1.0.0 dev: true - /uri-js/4.4.0: + /uri-js@4.4.0: resolution: {integrity: sha512-B0yRTzYdUCCn9n+F4+Gh4yIDtMQcaJsmYBDsTSG8g/OejKBodLQ2IHfN3bM7jUsRXndopT7OIXWdYqc1fjmV6g==} dependencies: punycode: 2.1.1 dev: true - /urix/0.1.0: + /urix@0.1.0: resolution: {integrity: sha512-Am1ousAhSLBeB9cG/7k7r2R0zj50uDRlZHPGbazid5s9rlF1F/QKYObEKSIunSjIOkJZqwRRLpvewjEkM7pSqg==} deprecated: Please see https://github.com/lydell/urix#deprecated dev: true - /url-loader/4.1.1_lit45vopotvaqup7lrvlnvtxwy: - resolution: {integrity: sha512-3BTV812+AVHHOJQO8O5MkWgZ5aosP7GnROJwvzLS9hWDj00lZ6Z0wNak423Lp9PBZN05N+Jk/N5Si8jRAlGyWA==} - engines: {node: '>= 10.13.0'} - peerDependencies: - file-loader: '*' - webpack: ^4.0.0 || ^5.0.0 - peerDependenciesMeta: - file-loader: - optional: true - webpack: - optional: true - dependencies: - file-loader: 6.2.0_webpack@4.46.0 - loader-utils: 2.0.2 - mime-types: 2.1.27 - schema-utils: 3.1.1 - webpack: 4.46.0 - dev: true - - /url/0.11.0: - resolution: {integrity: sha512-kbailJa29QrtXnxgq+DdCEGlbTeYM2eJUxsz6vjZavrCYPMIFHMKQmSKYAIuUK2i7hgPm28a8piX5NTUtM/LKQ==} - dependencies: - punycode: 1.3.2 - querystring: 0.2.0 - dev: true - - /use/3.1.1: + /use@3.1.1: resolution: {integrity: sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ==} engines: {node: '>=0.10.0'} dev: true - /util-deprecate/1.0.2: + /util-deprecate@1.0.2: resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} dev: true - /util.promisify/1.0.0: + /util.promisify@1.0.0: resolution: {integrity: sha512-i+6qA2MPhvoKLuxnJNpXAGhg7HphQOSUq2LKMZD0m15EiskXUkMvKdF4Uui0WYeCUGea+o2cw/ZuwehtfsrNkA==} dependencies: define-properties: 1.1.3 object.getownpropertydescriptors: 2.1.0 dev: true - /util/0.10.3: - resolution: {integrity: sha512-5KiHfsmkqacuKjkRkdV7SsfDJ2EGiPsK92s2MhNSY0craxjTdKTtqKsJaCWp4LW33ZZ0OPUv1WO/TFvNQRiQxQ==} - dependencies: - inherits: 2.0.1 - dev: true - - /util/0.11.1: - resolution: {integrity: sha512-HShAsny+zS2TZfaXxD9tYj4HQGlBezXZMZuM/S5PKLLoZkShZiGk9o5CzukI1LVHZvjdvZ2Sj1aW/Ndn2NB/HQ==} - dependencies: - inherits: 2.0.3 - dev: true - - /utila/0.4.0: - resolution: {integrity: sha512-Z0DbgELS9/L/75wZbro8xAnT50pBVFQZ+hUEueGDU5FN51YSCYM+jdxsfCiHjwNP/4LCDD0i/graKpeBnOXKRA==} - dev: true - - /utils-merge/1.0.1: - resolution: {integrity: sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==} - engines: {node: '>= 0.4.0'} - dev: true - - /uuid-browser/3.1.0: - resolution: {integrity: sha512-dsNgbLaTrd6l3MMxTtouOCFw4CBFc/3a+GgYA2YyrJvyQ1u6q4pcu3ktLoUZ/VN/Aw9WsauazbgsgdfVWgAKQg==} - dev: true - - /uuid/3.4.0: + /uuid@3.4.0: resolution: {integrity: sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==} deprecated: Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details. hasBin: true dev: true - /uuid/8.3.2: + /uuid@8.3.2: resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==} hasBin: true dev: false - /v8-compile-cache-lib/3.0.1: + /v8-compile-cache-lib@3.0.1: resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==} dev: true - /v8-compile-cache/2.3.0: + /v8-compile-cache@2.3.0: resolution: {integrity: sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==} dev: true - /v8-to-istanbul/9.0.1: - resolution: {integrity: sha512-74Y4LqY74kLE6IFyIjPtkSTWzUZmj8tdHT9Ii/26dvQ6K9Dl2NbEfj0XgU2sHCtKgt5VupqhlO/5aWuqS+IY1w==} - engines: {node: '>=10.12.0'} - dependencies: - '@jridgewell/trace-mapping': 0.3.15 - '@types/istanbul-lib-coverage': 2.0.3 - convert-source-map: 1.7.0 - dev: true - - /validate-npm-package-license/3.0.4: + /validate-npm-package-license@3.0.4: resolution: {integrity: sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==} dependencies: spdx-correct: 3.1.1 spdx-expression-parse: 3.0.1 dev: true - /vary/1.1.2: - resolution: {integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==} - engines: {node: '>= 0.8'} - dev: true - - /vendors/1.0.4: + /vendors@1.0.4: resolution: {integrity: sha512-/juG65kTL4Cy2su4P8HjtkTxk6VmJDiOPBufWniqQ6wknac6jNiXS9vU+hO3wgusiyqWlzTbVHi0dyJqRONg3w==} dev: true - /verror/1.10.0: + /verror@1.10.0: resolution: {integrity: sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=} engines: {'0': node >=0.6.0} dependencies: @@ -19294,27 +11371,7 @@ packages: extsprintf: 1.3.0 dev: true - /vfile-location/3.2.0: - resolution: {integrity: sha512-aLEIZKv/oxuCDZ8lkJGhuhztf/BW4M+iHdCwglA/eWc+vtuRFJj8EtgceYFX4LRjOhCAAiNHsKGssC6onJ+jbA==} - dev: true - - /vfile-message/2.0.4: - resolution: {integrity: sha512-DjssxRGkMvifUOJre00juHoP9DPWuzjxKuMDrhNbk2TdaYYBNMStsNhEOt3idrtI12VQYM/1+iM0KOzXi4pxwQ==} - dependencies: - '@types/unist': 2.0.6 - unist-util-stringify-position: 2.0.3 - dev: true - - /vfile/4.2.1: - resolution: {integrity: sha512-O6AE4OskCG5S1emQ/4gl8zK586RqA3srz3nfK/Viy0UPToBc5Trp9BVFb1u0CjsKrAWwnpr4ifM/KBXPWwJbCA==} - dependencies: - '@types/unist': 2.0.6 - is-buffer: 2.0.5 - unist-util-stringify-position: 2.0.3 - vfile-message: 2.0.4 - dev: true - - /vinyl/2.2.1: + /vinyl@2.2.1: resolution: {integrity: sha512-LII3bXRFBZLlezoG5FfZVcXflZgWP/4dCwKtxd5ky9+LOtM4CS3bIRQsmR1KMnMW07jpE8fqR2lcxPZ+8sJIcw==} engines: {node: '>= 0.10'} dependencies: @@ -19326,21 +11383,17 @@ packages: replace-ext: 1.0.1 dev: true - /vlq/0.2.3: + /vlq@0.2.3: resolution: {integrity: sha512-DRibZL6DsNhIgYQ+wNdWDL2SL3bKPlVrRiBqV5yuMm++op8W4kGFtaQfCs4KEJn0wBZcHVHJ3eoywX8983k1ow==} dev: true - /vm-browserify/1.1.2: - resolution: {integrity: sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ==} - dev: true - - /w3c-hr-time/1.0.2: + /w3c-hr-time@1.0.2: resolution: {integrity: sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ==} dependencies: browser-process-hrtime: 1.0.0 dev: true - /w3c-xmlserializer/1.1.2: + /w3c-xmlserializer@1.1.2: resolution: {integrity: sha512-p10l/ayESzrBMYWRID6xbuCKh2Fp77+sA0doRuGn4tTIMrrZVeqfpKjXHY+oDh3K4nLdPgNwMTVP6Vp4pvqbNg==} dependencies: domexception: 1.0.1 @@ -19348,221 +11401,31 @@ packages: xml-name-validator: 3.0.0 dev: true - /walker/1.0.7: + /walker@1.0.7: resolution: {integrity: sha512-cF4je9Fgt6sj1PKfuFt9jpQPeHosM+Ryma/hfY9U7uXGKM7pJCsF0v2r55o+Il54+i77SyYWetB4tD1dEygRkw==} dependencies: makeerror: 1.0.11 dev: true - /watchpack-chokidar2/2.0.1: - resolution: {integrity: sha512-nCFfBIPKr5Sh61s4LPpy1Wtfi0HE8isJ3d2Yb5/Ppw2P2B/3eVSEBjKfN0fmHJSK14+31KwMKmcrzs2GM4P0Ww==} - requiresBuild: true - dependencies: - chokidar: 2.1.8 - transitivePeerDependencies: - - supports-color - dev: true - optional: true - - /watchpack/1.7.5: - resolution: {integrity: sha512-9P3MWk6SrKjHsGkLT2KHXdQ/9SNkyoJbabxnKOoJepsvJjJG8uYTR3yTPxPQvNDI3w4Nz1xnE0TLHK4RIVe/MQ==} - dependencies: - graceful-fs: 4.2.10 - neo-async: 2.6.2 - optionalDependencies: - chokidar: 3.4.3 - watchpack-chokidar2: 2.0.1 - transitivePeerDependencies: - - supports-color - dev: true - - /watchpack/2.4.0: - resolution: {integrity: sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg==} - engines: {node: '>=10.13.0'} - dependencies: - glob-to-regexp: 0.4.1 - graceful-fs: 4.2.10 - dev: true - - /web-namespaces/1.1.4: - resolution: {integrity: sha512-wYxSGajtmoP4WxfejAPIr4l0fVh+jeMXZb08wNc0tMg6xsfZXj3cECqIK0G7ZAqUq0PP8WlMDtaOGVBTAWztNw==} - dev: true - - /webidl-conversions/3.0.1: - resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} - dev: true - - /webidl-conversions/4.0.2: + /webidl-conversions@4.0.2: resolution: {integrity: sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==} dev: true - /webpack-dev-middleware/3.7.3_webpack@4.46.0: - resolution: {integrity: sha512-djelc/zGiz9nZj/U7PTBi2ViorGJXEWo/3ltkPbDyxCXhhEXkW0ce99falaok4TPj+AsxLiXJR0EBOb0zh9fKQ==} - engines: {node: '>= 6'} - peerDependencies: - webpack: ^4.0.0 || ^5.0.0 - peerDependenciesMeta: - webpack: - optional: true - dependencies: - memory-fs: 0.4.1 - mime: 2.4.6 - mkdirp: 0.5.5 - range-parser: 1.2.1 - webpack: 4.46.0 - webpack-log: 2.0.0 - dev: true - - /webpack-filter-warnings-plugin/1.2.1_webpack@4.46.0: - resolution: {integrity: sha512-Ez6ytc9IseDMLPo0qCuNNYzgtUl8NovOqjIq4uAU8LTD4uoa1w1KpZyyzFtLTEMZpkkOkLfL9eN+KGYdk1Qtwg==} - engines: {node: '>= 4.3 < 5.0.0 || >= 5.10'} - peerDependencies: - webpack: ^2.0.0 || ^3.0.0 || ^4.0.0 - peerDependenciesMeta: - webpack: - optional: true - dependencies: - webpack: 4.46.0 - dev: true - - /webpack-hot-middleware/2.25.2: - resolution: {integrity: sha512-CVgm3NAQyfdIonRvXisRwPTUYuSbyZ6BY7782tMeUzWOO7RmVI2NaBYuCp41qyD4gYCkJyTneAJdK69A13B0+A==} - dependencies: - ansi-html-community: 0.0.8 - html-entities: 2.3.3 - strip-ansi: 6.0.1 - dev: true - - /webpack-log/2.0.0: - resolution: {integrity: sha512-cX8G2vR/85UYG59FgkoMamwHUIkSSlV3bBMRsbxVXVUk2j6NleCKjQ/WE9eYg9WY4w25O9w8wKP4rzNZFmUcUg==} - engines: {node: '>= 6'} - dependencies: - ansi-colors: 3.2.4 - uuid: 3.4.0 - dev: true - - /webpack-sources/1.4.3: - resolution: {integrity: sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ==} - dependencies: - source-list-map: 2.0.1 - source-map: 0.6.1 - dev: true - - /webpack-sources/3.2.3: - resolution: {integrity: sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==} - engines: {node: '>=10.13.0'} - dev: true - - /webpack-virtual-modules/0.2.2: - resolution: {integrity: sha512-kDUmfm3BZrei0y+1NTHJInejzxfhtU8eDj2M7OKb2IWrPFAeO1SOH2KuQ68MSZu9IGEHcxbkKKR1v18FrUSOmA==} - dependencies: - debug: 3.2.7 - transitivePeerDependencies: - - supports-color - dev: true - - /webpack/4.46.0: - resolution: {integrity: sha512-6jJuJjg8znb/xRItk7bkT0+Q7AHCYjjFnvKIWQPkNIOyRqoCGvkOs0ipeQzrqz4l5FtN5ZI/ukEHroeX/o1/5Q==} - engines: {node: '>=6.11.5'} - hasBin: true - peerDependencies: - webpack-cli: '*' - webpack-command: '*' - peerDependenciesMeta: - webpack-cli: - optional: true - webpack-command: - optional: true - dependencies: - '@webassemblyjs/ast': 1.9.0 - '@webassemblyjs/helper-module-context': 1.9.0 - '@webassemblyjs/wasm-edit': 1.9.0 - '@webassemblyjs/wasm-parser': 1.9.0 - acorn: 6.4.2 - ajv: 6.12.6 - ajv-keywords: 3.5.2_ajv@6.12.6 - chrome-trace-event: 1.0.3 - enhanced-resolve: 4.5.0 - eslint-scope: 4.0.3 - json-parse-better-errors: 1.0.2 - loader-runner: 2.4.0 - loader-utils: 1.4.0 - memory-fs: 0.4.1 - micromatch: 3.1.10 - mkdirp: 0.5.5 - neo-async: 2.6.2 - node-libs-browser: 2.2.1 - schema-utils: 1.0.0 - tapable: 1.1.3 - terser-webpack-plugin: 1.4.5_webpack@4.46.0 - watchpack: 1.7.5 - webpack-sources: 1.4.3 - transitivePeerDependencies: - - supports-color - dev: true - - /webpack/5.74.0_@swc+core@1.2.203: - resolution: {integrity: sha512-A2InDwnhhGN4LYctJj6M1JEaGL7Luj6LOmyBHjcI8529cm5p6VXiTIW2sn6ffvEAKmveLzvu4jrihwXtPojlAA==} - engines: {node: '>=10.13.0'} - hasBin: true - peerDependencies: - webpack-cli: '*' - peerDependenciesMeta: - webpack-cli: - optional: true - dependencies: - '@types/eslint-scope': 3.7.4 - '@types/estree': 0.0.51 - '@webassemblyjs/ast': 1.11.1 - '@webassemblyjs/wasm-edit': 1.11.1 - '@webassemblyjs/wasm-parser': 1.11.1 - acorn: 8.7.1 - acorn-import-assertions: 1.8.0_acorn@8.7.1 - browserslist: 4.20.4 - chrome-trace-event: 1.0.3 - enhanced-resolve: 5.10.0 - es-module-lexer: 0.9.3 - eslint-scope: 5.1.1 - events: 3.3.0 - glob-to-regexp: 0.4.1 - graceful-fs: 4.2.10 - json-parse-even-better-errors: 2.3.1 - loader-runner: 4.3.0 - mime-types: 2.1.27 - neo-async: 2.6.2 - schema-utils: 3.1.1 - tapable: 2.2.1 - terser-webpack-plugin: 5.3.6_bhtm7a3ixzishl2uxypy6qnuwu - watchpack: 2.4.0 - webpack-sources: 3.2.3 - transitivePeerDependencies: - - '@swc/core' - - esbuild - - uglify-js - dev: true - - /whatwg-encoding/1.0.5: + /whatwg-encoding@1.0.5: resolution: {integrity: sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw==} dependencies: iconv-lite: 0.4.24 dev: true - /whatwg-fetch/3.6.2: + /whatwg-fetch@3.6.2: resolution: {integrity: sha512-bJlen0FcuU/0EMLrdbJ7zOnW6ITZLrZMIarMUVmdKtsGvZna8vxKYaexICWPfZ8qwf9fzNq+UEIZrnSaApt6RA==} dev: true - /whatwg-mimetype/2.3.0: + /whatwg-mimetype@2.3.0: resolution: {integrity: sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g==} dev: true - /whatwg-url/5.0.0: - resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==} - dependencies: - tr46: 0.0.3 - webidl-conversions: 3.0.1 - dev: true - - /whatwg-url/6.5.0: + /whatwg-url@6.5.0: resolution: {integrity: sha512-rhRZRqx/TLJQWUpQ6bmrt2UV4f0HCQ463yQuONJqC6fO2VoEb1pTYddbe59SkYq87aoM5A3bdhMZiUiVws+fzQ==} dependencies: lodash.sortby: 4.7.0 @@ -19570,7 +11433,7 @@ packages: webidl-conversions: 4.0.2 dev: true - /whatwg-url/7.1.0: + /whatwg-url@7.1.0: resolution: {integrity: sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==} dependencies: lodash.sortby: 4.7.0 @@ -19578,7 +11441,7 @@ packages: webidl-conversions: 4.0.2 dev: true - /which-boxed-primitive/1.0.2: + /which-boxed-primitive@1.0.2: resolution: {integrity: sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==} dependencies: is-bigint: 1.0.4 @@ -19588,18 +11451,18 @@ packages: is-symbol: 1.0.4 dev: true - /which-module/2.0.0: + /which-module@2.0.0: resolution: {integrity: sha512-B+enWhmw6cjfVC7kS8Pj9pCrKSc5txArRyaYGe088shv/FGWH+0Rjx/xPgtsWfsUtS27FkP697E4DDhgrgoc0Q==} dev: true - /which/1.3.1: + /which@1.3.1: resolution: {integrity: sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==} hasBin: true dependencies: isexe: 2.0.0 dev: true - /which/2.0.2: + /which@2.0.2: resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} engines: {node: '>= 8'} hasBin: true @@ -19607,20 +11470,7 @@ packages: isexe: 2.0.0 dev: true - /wide-align/1.1.5: - resolution: {integrity: sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==} - dependencies: - string-width: 4.2.3 - dev: true - - /widest-line/3.1.0: - resolution: {integrity: sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg==} - engines: {node: '>=8'} - dependencies: - string-width: 4.2.3 - dev: true - - /winston-transport/4.5.0: + /winston-transport@4.5.0: resolution: {integrity: sha512-YpZzcUzBedhlTAfJg6vJDlyEai/IFMIVcaEZZyl3UXIl4gmqRpU7AE89AHLkbzLUsv0NVmw7ts+iztqKxxPW1Q==} engines: {node: '>= 6.4.0'} dependencies: @@ -19629,7 +11479,7 @@ packages: triple-beam: 1.3.0 dev: true - /winston/3.8.2: + /winston@3.8.2: resolution: {integrity: sha512-MsE1gRx1m5jdTTO9Ld/vND4krP2To+lgDoMEHGGa4HIlAUyXJtfc7CxQcGXVyz2IBpw5hbFkj2b/AtUdQwyRew==} engines: {node: '>= 12.0.0'} dependencies: @@ -19646,28 +11496,12 @@ packages: winston-transport: 4.5.0 dev: true - /word-wrap/1.2.3: + /word-wrap@1.2.3: resolution: {integrity: sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==} engines: {node: '>=0.10.0'} dev: true - /wordwrap/1.0.0: - resolution: {integrity: sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==} - dev: true - - /worker-farm/1.7.0: - resolution: {integrity: sha512-rvw3QTZc8lAxyVrqcSGVm5yP/IJ2UcB3U0graE3LCFoZ0Yn2x4EoVSqJKdB/T5M+FLcRPjz4TDacRf3OCfNUzw==} - dependencies: - errno: 0.1.8 - dev: true - - /worker-rpc/0.1.1: - resolution: {integrity: sha512-P1WjMrUB3qgJNI9jfmpZ/htmBEjFh//6l/5y8SD9hg1Ef5zTTVVoRjTrTEzPrNBQvmhMxkoTsjOXN10GWU7aCg==} - dependencies: - microevent.ts: 0.1.1 - dev: true - - /wrap-ansi/5.1.0: + /wrap-ansi@5.1.0: resolution: {integrity: sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==} engines: {node: '>=6'} dependencies: @@ -19676,7 +11510,7 @@ packages: strip-ansi: 5.2.0 dev: true - /wrap-ansi/7.0.0: + /wrap-ansi@7.0.0: resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} engines: {node: '>=10'} dependencies: @@ -19685,11 +11519,11 @@ packages: strip-ansi: 6.0.1 dev: true - /wrappy/1.0.2: + /wrappy@1.0.2: resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} dev: true - /write-file-atomic/2.4.1: + /write-file-atomic@2.4.1: resolution: {integrity: sha512-TGHFeZEZMnv+gBFRfjAcxL5bPHrsGKtnb4qsFAws7/vlh+QfwAaySIw4AXP9ZskTTh5GWu3FLuJhsWVdiJPGvg==} dependencies: graceful-fs: 4.2.10 @@ -19697,16 +11531,7 @@ packages: signal-exit: 3.0.7 dev: true - /write-file-atomic/3.0.3: - resolution: {integrity: sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==} - dependencies: - imurmurhash: 0.1.4 - is-typedarray: 1.0.0 - signal-exit: 3.0.7 - typedarray-to-buffer: 3.1.5 - dev: true - - /write-file-atomic/4.0.1: + /write-file-atomic@4.0.1: resolution: {integrity: sha512-nSKUxgAbyioruk6hU87QzVbY279oYT6uiwgDoujth2ju4mJ+TZau7SQBhtbTmUyuNYTuXnSyRn66FV0+eCgcrQ==} engines: {node: ^12.13.0 || ^14.15.0 || >=16} dependencies: @@ -19714,14 +11539,14 @@ packages: signal-exit: 3.0.7 dev: true - /write/1.0.3: + /write@1.0.3: resolution: {integrity: sha512-/lg70HAjtkUgWPVZhZcm+T4hkL8Zbtp1nFNOn3lRrxnlv50SRBv7cR7RqR+GMsd3hUXy9hWBo4CHTbFTcOYwig==} engines: {node: '>=4'} dependencies: mkdirp: 0.5.5 dev: true - /ws/5.2.2: + /ws@5.2.2: resolution: {integrity: sha512-jaHFD6PFv6UgoIVda6qZllptQsMlDEJkTQcybzzXDYM1XO9Y8em691FGMPmM46WGyLU4z9KMgQN+qrux/nhlHA==} peerDependencies: bufferutil: ^4.0.1 @@ -19735,7 +11560,7 @@ packages: async-limiter: 1.0.1 dev: true - /ws/6.2.1: + /ws@6.2.1: resolution: {integrity: sha512-GIyAXC2cB7LjvpgMt9EKS2ldqr0MTrORaleiOno6TweZ6r3TKtoFQWay/2PceJ3RuBasOHzXNn5Lrw1X0bEjqA==} peerDependencies: bufferutil: ^4.0.1 @@ -19749,83 +11574,60 @@ packages: async-limiter: 1.0.1 dev: true - /ws/8.8.1: - resolution: {integrity: sha512-bGy2JzvzkPowEJV++hF07hAD6niYSr0JzBNo/J29WsB57A2r7Wlc1UFcTR9IzrPvuNVO4B8LGqF8qcpsVOhJCA==} - engines: {node: '>=10.0.0'} - peerDependencies: - bufferutil: ^4.0.1 - utf-8-validate: ^5.0.2 - peerDependenciesMeta: - bufferutil: - optional: true - utf-8-validate: - optional: true - dev: true - - /x-default-browser/0.4.0: - resolution: {integrity: sha512-7LKo7RtWfoFN/rHx1UELv/2zHGMx8MkZKDq1xENmOCTkfIqZJ0zZ26NEJX8czhnPXVcqS0ARjjfJB+eJ0/5Cvw==} - hasBin: true - optionalDependencies: - default-browser-id: 1.0.4 - dev: true - - /xml-name-validator/3.0.0: + /xml-name-validator@3.0.0: resolution: {integrity: sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw==} dev: true - /xmlchars/2.2.0: + /xmlchars@2.2.0: resolution: {integrity: sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==} dev: true - /xpath/0.0.32: + /xpath@0.0.32: resolution: {integrity: sha512-rxMJhSIoiO8vXcWvSifKqhvV96GjiD5wYb8/QHdoRyQvraTpp4IEv944nhGausZZ3u7dhQXteZuZbaqfpB7uYw==} engines: {node: '>=0.6.0'} dev: true - /xtend/4.0.2: + /xtend@4.0.2: resolution: {integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==} engines: {node: '>=0.4'} + dev: false - /y18n/4.0.0: + /y18n@4.0.0: resolution: {integrity: sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==} dev: true - /y18n/5.0.8: + /y18n@5.0.8: resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} engines: {node: '>=10'} dev: true - /yallist/3.1.1: - resolution: {integrity: sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==} - dev: true - - /yallist/4.0.0: + /yallist@4.0.0: resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==} dev: true - /yaml/1.10.0: + /yaml@1.10.0: resolution: {integrity: sha512-yr2icI4glYaNG+KWONODapy2/jDdMSDnrONSjblABjD9B4Z5LgiircSt8m8sRZFNi08kG9Sm0uSHtEmP3zaEGg==} engines: {node: '>= 6'} dev: true - /yargs-parser/13.1.2: + /yargs-parser@13.1.2: resolution: {integrity: sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==} dependencies: camelcase: 5.3.1 decamelize: 1.2.0 dev: true - /yargs-parser/20.2.9: + /yargs-parser@20.2.9: resolution: {integrity: sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==} engines: {node: '>=10'} dev: true - /yargs-parser/21.1.1: + /yargs-parser@21.1.1: resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} engines: {node: '>=12'} dev: true - /yargs/13.3.2: + /yargs@13.3.2: resolution: {integrity: sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw==} dependencies: cliui: 5.0.0 @@ -19840,20 +11642,7 @@ packages: yargs-parser: 13.1.2 dev: true - /yargs/16.2.0: - resolution: {integrity: sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==} - engines: {node: '>=10'} - dependencies: - cliui: 7.0.4 - escalade: 3.1.1 - get-caller-file: 2.0.5 - require-directory: 2.1.1 - string-width: 4.2.3 - y18n: 5.0.8 - yargs-parser: 20.2.9 - dev: true - - /yargs/17.6.0: + /yargs@17.6.0: resolution: {integrity: sha512-8H/wTDqlSwoSnScvV2N/JHfLWOKuh5MVla9hqLjK3nsfyy6Y4kDSYSvkU5YCUEPOSnRXfIyx3Sq+B/IWudTo4g==} engines: {node: '>=12'} dependencies: @@ -19866,16 +11655,7 @@ packages: yargs-parser: 21.1.1 dev: true - /yn/3.1.1: + /yn@3.1.1: resolution: {integrity: sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==} engines: {node: '>=6'} dev: true - - /yocto-queue/0.1.0: - resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} - engines: {node: '>=10'} - dev: true - - /zwitch/1.0.5: - resolution: {integrity: sha512-V50KMwwzqJV0NpZIZFwfOD5/lyny3WlSzRiXgA0G7VUnRlqttta1L6UQIHzd6EuBY/cHGfwTIck7w1yH6Q5zUw==} - dev: true diff --git a/dac/ui-lib/pom.xml b/dac/ui-lib/pom.xml index af89df36ad..6b328a4a99 100644 --- a/dac/ui-lib/pom.xml +++ b/dac/ui-lib/pom.xml @@ -21,7 +21,7 @@ com.dremio dremio-dac-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-ui-lib @@ -46,57 +46,6 @@ - - org.apache.maven.plugins - maven-resources-plugin - - - generate-resources - - copy-resources - - - - - ${project.basedir} - - dist/** - dist-cjs/** - dist-esm/** - dist-themes/** - icons/** - images/** - node_modules/** - package.json - pnpm-lock.yaml - - false - - - ${project.build.directory}/src-resources - - - - - - - org.apache.maven.plugins - maven-jar-plugin - - - jar-resources - package - - jar - - - ${project.build.directory}/src-resources - true - resources - - - - com.github.eirslett diff --git a/dac/ui-lib/scripts/writeIconManifest.ts b/dac/ui-lib/scripts/writeIconManifest.ts index 579604499d..d579f73623 100644 --- a/dac/ui-lib/scripts/writeIconManifest.ts +++ b/dac/ui-lib/scripts/writeIconManifest.ts @@ -18,6 +18,6 @@ import path from "path"; import { generateIconManifest } from "./generateIconManifest"; fs.writeFileSync( - path.join(__dirname, "../example/manifest.json"), + path.join(__dirname, "../iconmanifest.json"), JSON.stringify(generateIconManifest(), null, 2) + "\n" ); diff --git a/dac/ui-lib/src/components/CopyToClipboard/CopyToClipboard.scss b/dac/ui-lib/src/components/CopyToClipboard/CopyToClipboard.scss index 3f2facc1ae..30e66a43c7 100644 --- a/dac/ui-lib/src/components/CopyToClipboard/CopyToClipboard.scss +++ b/dac/ui-lib/src/components/CopyToClipboard/CopyToClipboard.scss @@ -30,6 +30,6 @@ &__content { display: flex; align-items: center; - background-color: var(--dremio--color--neutral--700) !important; + background-color: var(--color--neutral--800) !important; } } diff --git a/dac/ui-lib/src/components/HoverHelp/HoverHelp.scss b/dac/ui-lib/src/components/HoverHelp/HoverHelp.scss index 90c051d4d1..7a4d2420ec 100644 --- a/dac/ui-lib/src/components/HoverHelp/HoverHelp.scss +++ b/dac/ui-lib/src/components/HoverHelp/HoverHelp.scss @@ -20,7 +20,7 @@ .infoIcon { width: 20px; height: 20px; - color: var(--dremio--color--neutral--600); + color: var(--color--neutral--600); &:hover { color: var(--dremio--color--link); } diff --git a/dac/ui-lib/src/components/Input/input.scss b/dac/ui-lib/src/components/Input/input.scss index a73d50a115..b764967806 100644 --- a/dac/ui-lib/src/components/Input/input.scss +++ b/dac/ui-lib/src/components/Input/input.scss @@ -69,7 +69,7 @@ } .input__prefix { - background-color: var(--dremio--color--neutral--100); + background-color: var(--color--neutral--25); border-top-left-radius: $input-border-radius; border-bottom-left-radius: $input-border-radius; padding: 0px map-get($map: $gutter-sizes, $key: double); diff --git a/dac/ui-lib/src/components/JsonSyntaxHighlighter/JsonSyntaxHighlighter.scss b/dac/ui-lib/src/components/JsonSyntaxHighlighter/JsonSyntaxHighlighter.scss index 4d8ab08531..15a0d171b2 100644 --- a/dac/ui-lib/src/components/JsonSyntaxHighlighter/JsonSyntaxHighlighter.scss +++ b/dac/ui-lib/src/components/JsonSyntaxHighlighter/JsonSyntaxHighlighter.scss @@ -18,7 +18,7 @@ &:focus-within { .json-container, .label-container { - border-color: var(--dremio--color--primary--500); + border-color: var(--color--brand--300); } } } @@ -27,7 +27,7 @@ overflow: auto; height: 130px; margin-bottom: 15px; - border: 1px solid var(--dremio--color--neutral--200); + border: 1px solid var(--color--neutral--100); border-top: none; border-radius: 0 0 var(--dremio--radius--1) var(--dremio--radius--1); } @@ -37,7 +37,7 @@ align-items: center; justify-content: space-between; border-radius: var(--dremio--radius--1) var(--dremio--radius--1) 0 0; - background-color: var(--dremio--color--neutral--100); - border: 1px solid var(--dremio--color--neutral--200); - border-bottom: 1px solid var(--dremio--color--neutral--150) !important; + background-color: var(--color--neutral--25); + border: 1px solid var(--color--neutral--100); + border-bottom: 1px solid var(--color--neutral--50) !important; } diff --git a/dac/ui-lib/src/components/ModalForm/ModalFormActionContainer/modalFormActionContainer.scss b/dac/ui-lib/src/components/ModalForm/ModalFormActionContainer/modalFormActionContainer.scss index c7fdf2002b..82d3186db8 100644 --- a/dac/ui-lib/src/components/ModalForm/ModalFormActionContainer/modalFormActionContainer.scss +++ b/dac/ui-lib/src/components/ModalForm/ModalFormActionContainer/modalFormActionContainer.scss @@ -22,7 +22,7 @@ align-items: center; justify-content: flex-end; padding: 0 16px; - border-top: 1px solid var(--dremio--color--neutral--150); + border-top: 1px solid var(--color--neutral--50); .modalFormActionContainer__container { display: flex; diff --git a/dac/ui-lib/src/components/MultiSelect/MultiSelect.js b/dac/ui-lib/src/components/MultiSelect/MultiSelect.js index 2bfc0bfdbd..3ca6f3eac4 100644 --- a/dac/ui-lib/src/components/MultiSelect/MultiSelect.js +++ b/dac/ui-lib/src/components/MultiSelect/MultiSelect.js @@ -23,6 +23,7 @@ import Checkbox from "@mui/material/Checkbox"; import Chip from "@mui/material/Chip"; import Menu from "@mui/material/Menu"; import MenuItem from "@mui/material/MenuItem"; +import Tooltip from "../Tooltip/index"; import { ReactComponent as XIcon } from "../../art/XLarge.svg"; @@ -30,7 +31,7 @@ import Label from "../Label"; import "./multiSelect.scss"; -const MultiSelect = (props) => { +const MultiSelectComponent = (props) => { const { classes, form: { errors, touched, setFieldValue } = {}, @@ -46,6 +47,8 @@ const MultiSelect = (props) => { value, onChange, loadNextRecords, + nonClearableValue, + getCustomChipIcon, } = props; const [showMenu, setShowMenu] = useState(false); @@ -185,7 +188,11 @@ const MultiSelect = (props) => { }; const handleClear = (e) => { - updateValue([]); + if (nonClearableValue) { + updateValue([nonClearableValue]); + } else { + updateValue([]); + } onChange && onChange(""); e.stopPropagation(); }; @@ -211,8 +218,18 @@ const MultiSelect = (props) => { const renderValue = () => { const hasValue = value && value.length > 0; + const { innerRef } = props; return ( -
        +
        { + valueContainerRef.current = node; + if (innerRef) { + innerRef.current = node; + } + }} + className={valueClass} + onClick={handleOpen} + >
        {visibleValues.map((selectedVal) => { const KEY = displayValues.length > 0 ? selectedVal.id : selectedVal; @@ -220,13 +237,25 @@ const MultiSelect = (props) => { + {getDisplayName(selectedVal)} + + } onClick={handleChipClick} - onDelete={(ev) => handleDelete(ev, selectedVal)} + onDelete={ + selectedVal !== nonClearableValue + ? (ev) => handleDelete(ev, selectedVal) + : null + } deleteIcon={} /> ); @@ -241,6 +270,7 @@ const MultiSelect = (props) => { name={`${name}_typeahead`} onChange={handleTypeAhead} className={inputClass} + autoComplete="off" value={filterText} ref={inputRef} onKeyDown={handleInputKeyDown} @@ -259,48 +289,74 @@ const MultiSelect = (props) => { ); }; + const renderMenuItemChipIcon = (item) => { + const { icon: IconComponent } = item; + if (getCustomChipIcon?.(item)) { + return getCustomChipIcon?.(item); + } else + return IconComponent ? ( + + + + ) : null; + }; + + const EllipisedMenuItem = ({ label }) => { + const [showTooltip, setShowTooltip] = useState(false); + return showTooltip ? ( + + {label} + + ) : ( + { + if (elem?.offsetWidth < elem?.scrollWidth) { + setShowTooltip(true); + } + }} + > + {label} + + ); + }; + const renderMenuItems = () => { if (filteredValues.length === 0) { return No values; } - return filteredValues.map( - ( - { label: optionLabel, value: optionValue, icon: IconComponent }, - idx - ) => { - const isSelected = value.indexOf(optionValue) !== -1; - return ( - handleMenuItemClick(optionValue)} - selected={isSelected} + return filteredValues.map((item, idx) => { + const isSelected = value.indexOf(item.value) !== -1; + const chip = renderMenuItemChipIcon(item); + return ( + handleMenuItemClick(item.value)} + selected={isSelected} + classes={{ + root: "multiSelect__option", + selected: "multiSelect__option --selected", + }} + disabled={item.disabled} + > + {/* Todo: Use font icons for checkboxes */} + - {/* Todo: Use font icons for checkboxes */} - -
        - {IconComponent && ( - - - - )} - {optionLabel} -
        -
        - ); - } - ); + disabled={item.disabled} + /> +
        + {chip} + +
        +
        + ); + }); }; return ( @@ -340,12 +396,14 @@ const MultiSelect = (props) => { ); }; -MultiSelect.propTypes = { +MultiSelectComponent.propTypes = { + innerRef: PropTypes.any, classes: PropTypes.shape({ root: PropTypes.string, value: PropTypes.string, input: PropTypes.string, label: PropTypes.string, + nonClearableChip: PropTypes.string, }), value: PropTypes.array, options: PropTypes.arrayOf( @@ -371,9 +429,11 @@ MultiSelect.propTypes = { }) ), disabled: PropTypes.bool, + nonClearableValue: PropTypes.string, + getCustomChipIcon: PropTypes.func, }; -MultiSelect.defaultProps = { +MultiSelectComponent.defaultProps = { classes: {}, value: [], displayValues: [], @@ -384,4 +444,7 @@ MultiSelect.defaultProps = { hasChipIcon: false, }; +const MultiSelect = React.forwardRef((props, ref) => { + return ; +}); export default MultiSelect; diff --git a/dac/ui-lib/src/components/MultiSelect/multiSelect.scss b/dac/ui-lib/src/components/MultiSelect/multiSelect.scss index 540b0e2807..c5379acf9d 100644 --- a/dac/ui-lib/src/components/MultiSelect/multiSelect.scss +++ b/dac/ui-lib/src/components/MultiSelect/multiSelect.scss @@ -83,12 +83,17 @@ color: $trout !important; } svg { + min-width: 24px; path:last-of-type { fill: $trout !important; } } } + &__chip__public { + padding: 4px 4px !important; + } + &__inputContainer { width: 100%; display: flex; @@ -129,4 +134,11 @@ cursor: pointer; padding-right: 4px; } + &__label { + &__container { + overflow: hidden; + } + overflow: hidden; + text-overflow: ellipsis; + } } diff --git a/dac/ui-lib/src/components/Select/select.scss b/dac/ui-lib/src/components/Select/select.scss index 33835c9440..d10a859751 100644 --- a/dac/ui-lib/src/components/Select/select.scss +++ b/dac/ui-lib/src/components/Select/select.scss @@ -29,17 +29,17 @@ //There was no hover style prior to mui upgrade fieldset { - border: 1px solid var(--dremio--color--neutral--200); + border: 1px solid var(--color--neutral--100); } &:hover { fieldset { - border-color: var(--dremio--color--neutral--300) !important; + border-color: var(--color--neutral--200) !important; } } &:focus, &.Mui-focused { fieldset { - border-color: var(--dremio--color--primary--500) !important; + border-color: var(--color--brand--300) !important; border-width: 1px !important; } } @@ -69,10 +69,6 @@ top: unset; } - &__no-padding { - padding: 0px; - } - .MuiInputBase-root { display: flex; background-color: $white; diff --git a/dac/ui-lib/src/components/TagList/TagList.scss b/dac/ui-lib/src/components/TagList/TagList.scss index d10ca667c9..a2c93eb5d8 100644 --- a/dac/ui-lib/src/components/TagList/TagList.scss +++ b/dac/ui-lib/src/components/TagList/TagList.scss @@ -28,8 +28,8 @@ line-height: 18px; font-weight: 400; margin-right: 4px; - background: var(--dremio--color--primary--150); - border: 1px solid var(--dremio--color--primary--200); + background: var(--color--brand--50); + border: 1px solid var(--color--brand--100); } .tagPopover { display: flex; diff --git a/dac/ui-lib/src/components/Tooltip/Tooltip.scss b/dac/ui-lib/src/components/Tooltip/Tooltip.scss index 8751b1eb85..a9339fe714 100644 --- a/dac/ui-lib/src/components/Tooltip/Tooltip.scss +++ b/dac/ui-lib/src/components/Tooltip/Tooltip.scss @@ -18,12 +18,12 @@ // Default tooltip colors. Moved out of the main theme for readability .MuiTooltip-tooltip, .tooltip { - background-color: var(--dremio--color--neutral--700) !important; + background-color: var(--color--neutral--800) !important; .MuiTooltip-arrow { - color: var(--dremio--color--neutral--700) !important; + color: var(--color--neutral--800) !important; } .MuiTooltip-arrow::before { - background-color: var(--dremio--color--neutral--700) !important; + background-color: var(--color--neutral--800) !important; } &.MuiTooltip-tooltip { @@ -38,15 +38,15 @@ // Custom tooltip .richTooltip { - background-color: var(--dremio--color--neutral--000) !important; + background-color: white !important; max-width: 400px !important; color: var(--dremio--color--text--main) !important; padding: 16px 0px 20px 0px !important; box-shadow: var(--dremio--shadow--layer-1) !important; .MuiTooltip-arrow { - color: var(--dremio--color--neutral--000) !important; + color: white !important; } .MuiTooltip-arrow::before { - background-color: var(--dremio--color--neutral--000) !important; + background-color: white !important; } } diff --git a/dac/ui-lib/src/stories/AppLayout.stories.tsx b/dac/ui-lib/src/stories/AppLayout.stories.tsx deleted file mode 100644 index a2ea18d2d5..0000000000 --- a/dac/ui-lib/src/stories/AppLayout.stories.tsx +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import React from "react"; -import { ComponentStory } from "@storybook/react"; - -const Shell = () => { - return ( -
        - -
        -
        Breadcrumbs
        -
        -
        Page header
        -
        -

        - Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nullam - ultricies augue a arcu iaculis, ac maximus massa hendrerit. Mauris - sed finibus eros, et laoreet nulla. Morbi in magna in justo - malesuada placerat. Aenean fermentum diam sed orci consectetur - aliquet. Cras ac ornare risus, ac cursus lacus. Pellentesque - interdum lacus vitae diam dignissim, a tempus augue eleifend. - Morbi in dapibus urna, vel viverra mauris. -

        - -

        - Vivamus aliquam convallis purus nec egestas. In hac habitasse - platea dictumst. Vivamus placerat id arcu ut feugiat. Interdum et - malesuada fames ac ante ipsum primis in faucibus. Integer - bibendum, eros eget faucibus condimentum, quam nunc mattis sem, - commodo tempor leo erat in ligula. Vestibulum pretium quam velit, - nec aliquet arcu tincidunt at. Pellentesque sollicitudin imperdiet - tortor, ut bibendum neque tincidunt sed. Pellentesque blandit - risus finibus semper interdum. -

        - -

        - Mauris sit amet tortor id massa ornare ornare at non dolor. Duis - eu leo non nunc pretium vestibulum. Nulla vitae diam non sapien - molestie maximus id nec lectus. Class aptent taciti sociosqu ad - litora torquent per conubia nostra, per inceptos himenaeos. Nam - sodales ante et nibh consequat auctor. Mauris sed dictum odio. - Etiam interdum eget metus ac cursus. Aliquam fringilla - pellentesque iaculis. Curabitur vel turpis ultrices nunc posuere - aliquam eu non diam. Sed consectetur gravida enim vitae pharetra. - Sed a turpis eget ex accumsan feugiat sit amet vitae massa. Nullam - mollis at ipsum ac iaculis. Curabitur congue tortor at nunc - fringilla, nec ultricies orci rutrum. Nulla aliquam fermentum sem, - ornare convallis libero dapibus vel. Vivamus dapibus quam eu enim - tempor, sit amet auctor mauris consectetur. Praesent in mauris - ultrices, luctus ex vitae, mollis velit. -

        - -

        - Nullam interdum maximus urna, sit amet facilisis quam commodo non. - Donec ullamcorper auctor ex, eu tincidunt quam gravida ut. - Pellentesque condimentum nibh et velit laoreet dignissim. Nunc - quis convallis ex. Donec luctus et urna sit amet vestibulum. Sed - massa lacus, ultrices et urna et, mollis consectetur nisl. Etiam - volutpat diam pretium nunc pellentesque, id pellentesque erat - condimentum. Aenean fringilla, mauris condimentum pretium sodales, - risus mauris aliquet ipsum, maximus dignissim leo erat ut nibh. - Nulla a ornare libero. Phasellus vitae neque nisi. Duis vitae - massa scelerisque, luctus urna vitae, vehicula ex. Duis lacus - lacus, pulvinar non urna eu, facilisis consequat odio. -

        - -

        - Aliquam euismod, leo eget pellentesque lobortis, neque nisi - volutpat libero, eget finibus justo erat in leo. Etiam iaculis - metus lorem, at molestie neque feugiat non. Morbi neque massa, - consequat at efficitur a, facilisis vel justo. Donec consectetur - ligula in nulla elementum ultrices. Mauris eu feugiat erat. Donec - nec lorem in nisi auctor lobortis. Nullam ex nunc, posuere sed - ornare vitae, ultrices id tortor. Sed et luctus lectus, in - suscipit lectus. Nam eu nibh erat. -

        -
        -
        -
        -
        - ); -}; - -export default { - title: "Patterns/App Layout", - component: Shell, -}; - -const Template: ComponentStory = () => ; - -export const Example = Template.bind({}); -Example.args = { - children:
        I am a child
        , -}; diff --git a/dac/ui-lib/src/stories/Drawer.stories.tsx b/dac/ui-lib/src/stories/Drawer.stories.tsx deleted file mode 100644 index e4e7c104e1..0000000000 --- a/dac/ui-lib/src/stories/Drawer.stories.tsx +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import React from "react"; -import { ComponentStory } from "@storybook/react"; -import { Button, DialogContent, Drawer, useDrawer } from "../../components"; - -const Shell = () => { - const drawer = useDrawer(); - return ( -
        - -
        -
        Breadcrumbs
        -
        -
        Page header
        -
        - - - - {/* width should be set by the inner content */} -
        -

        - Mauris sit amet tortor id massa ornare ornare at non dolor. - Duis eu leo non nunc pretium vestibulum. Nulla vitae diam - non sapien molestie maximus id nec lectus. Class aptent - taciti sociosqu ad litora torquent per conubia nostra, per - inceptos himenaeos. Nam sodales ante et nibh consequat - auctor. Mauris sed dictum odio. Etiam interdum eget metus ac - cursus. Aliquam fringilla pellentesque iaculis. Curabitur - vel turpis ultrices nunc posuere aliquam eu non diam. Sed - consectetur gravida enim vitae pharetra. Sed a turpis eget - ex accumsan feugiat sit amet vitae massa. Nullam mollis at - ipsum ac iaculis. Curabitur congue tortor at nunc fringilla, - nec ultricies orci rutrum. Nulla aliquam fermentum sem, - ornare convallis libero dapibus vel. Vivamus dapibus quam eu - enim tempor, sit amet auctor mauris consectetur. Praesent in - mauris ultrices, luctus ex vitae, mollis velit. -

        - -

        - Nullam interdum maximus urna, sit amet facilisis quam - commodo non. Donec ullamcorper auctor ex, eu tincidunt quam - gravida ut. Pellentesque condimentum nibh et velit laoreet - dignissim. Nunc quis convallis ex. Donec luctus et urna sit - amet vestibulum. Sed massa lacus, ultrices et urna et, - mollis consectetur nisl. Etiam volutpat diam pretium nunc - pellentesque, id pellentesque erat condimentum. Aenean - fringilla, mauris condimentum pretium sodales, risus mauris - aliquet ipsum, maximus dignissim leo erat ut nibh. Nulla a - ornare libero. Phasellus vitae neque nisi. Duis vitae massa - scelerisque, luctus urna vitae, vehicula ex. Duis lacus - lacus, pulvinar non urna eu, facilisis consequat odio. -

        - -

        - Aliquam euismod, leo eget pellentesque lobortis, neque nisi - volutpat libero, eget finibus justo erat in leo. Etiam - iaculis metus lorem, at molestie neque feugiat non. Morbi - neque massa, consequat at efficitur a, facilisis vel justo. - Donec consectetur ligula in nulla elementum ultrices. Mauris - eu feugiat erat. Donec nec lorem in nisi auctor lobortis. - Nullam ex nunc, posuere sed ornare vitae, ultrices id - tortor. Sed et luctus lectus, in suscipit lectus. Nam eu - nibh erat. -

        -
        -
        -
        -

        - Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nullam - ultricies augue a arcu iaculis, ac maximus massa hendrerit. Mauris - sed finibus eros, et laoreet nulla. Morbi in magna in justo - malesuada placerat. Aenean fermentum diam sed orci consectetur - aliquet. Cras ac ornare risus, ac cursus lacus. Pellentesque - interdum lacus vitae diam dignissim, a tempus augue eleifend. - Morbi in dapibus urna, vel viverra mauris. -

        - -

        - Vivamus aliquam convallis purus nec egestas. In hac habitasse - platea dictumst. Vivamus placerat id arcu ut feugiat. Interdum et - malesuada fames ac ante ipsum primis in faucibus. Integer - bibendum, eros eget faucibus condimentum, quam nunc mattis sem, - commodo tempor leo erat in ligula. Vestibulum pretium quam velit, - nec aliquet arcu tincidunt at. Pellentesque sollicitudin imperdiet - tortor, ut bibendum neque tincidunt sed. Pellentesque blandit - risus finibus semper interdum. -

        -
        -
        -
        -
        - ); -}; - -export default { - title: "Components/Drawer", - component: Shell, -}; - -const Template: ComponentStory = () => ; - -export const Example = Template.bind({}); diff --git a/dac/ui-lib/src/stories/Page.stories.tsx b/dac/ui-lib/src/stories/Page.stories.tsx deleted file mode 100644 index 7081edf189..0000000000 --- a/dac/ui-lib/src/stories/Page.stories.tsx +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import React from "react"; -import { ComponentStory, ComponentMeta } from "@storybook/react"; - -import { Page } from "../../components"; - -export default { - title: "Components/Page", - component: Page, -} as ComponentMeta; - -const Template: ComponentStory = (args) => ( -
        - Sample Title} - > -
        - -
        -

        - Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nullam - ultricies augue a arcu iaculis, ac maximus massa hendrerit. Mauris - sed finibus eros, et laoreet nulla. Morbi in magna in justo - malesuada placerat. Aenean fermentum diam sed orci consectetur - aliquet. Cras ac ornare risus, ac cursus lacus. Pellentesque - interdum lacus vitae diam dignissim, a tempus augue eleifend. Morbi - in dapibus urna, vel viverra mauris. -

        - -

        - Vivamus aliquam convallis purus nec egestas. In hac habitasse platea - dictumst. Vivamus placerat id arcu ut feugiat. Interdum et malesuada - fames ac ante ipsum primis in faucibus. Integer bibendum, eros eget - faucibus condimentum, quam nunc mattis sem, commodo tempor leo erat - in ligula. Vestibulum pretium quam velit, nec aliquet arcu tincidunt - at. Pellentesque sollicitudin imperdiet tortor, ut bibendum neque - tincidunt sed. Pellentesque blandit risus finibus semper interdum. -

        - -

        - Mauris sit amet tortor id massa ornare ornare at non dolor. Duis eu - leo non nunc pretium vestibulum. Nulla vitae diam non sapien - molestie maximus id nec lectus. Class aptent taciti sociosqu ad - litora torquent per conubia nostra, per inceptos himenaeos. Nam - sodales ante et nibh consequat auctor. Mauris sed dictum odio. Etiam - interdum eget metus ac cursus. Aliquam fringilla pellentesque - iaculis. Curabitur vel turpis ultrices nunc posuere aliquam eu non - diam. Sed consectetur gravida enim vitae pharetra. Sed a turpis eget - ex accumsan feugiat sit amet vitae massa. Nullam mollis at ipsum ac - iaculis. Curabitur congue tortor at nunc fringilla, nec ultricies - orci rutrum. Nulla aliquam fermentum sem, ornare convallis libero - dapibus vel. Vivamus dapibus quam eu enim tempor, sit amet auctor - mauris consectetur. Praesent in mauris ultrices, luctus ex vitae, - mollis velit. -

        -
        -
        -
        -
        -); - -export const MasterDetail = Template.bind({}); -MasterDetail.args = {}; diff --git a/dac/ui-lib/src/styles/constants/colorConstants.scss b/dac/ui-lib/src/styles/constants/colorConstants.scss index e6c9a283c2..fc0742feec 100644 --- a/dac/ui-lib/src/styles/constants/colorConstants.scss +++ b/dac/ui-lib/src/styles/constants/colorConstants.scss @@ -26,7 +26,7 @@ $tooltip-color: #32383e; $mine-shaft: #333; -$border-color: var(--dremio--color--neutral--200); +$border-color: var(--color--neutral--100); $neutral-color: #f2f2f2; diff --git a/dac/ui-lib/storybook/.gitignore b/dac/ui-lib/storybook/.gitignore new file mode 100644 index 0000000000..20687473be --- /dev/null +++ b/dac/ui-lib/storybook/.gitignore @@ -0,0 +1 @@ +storybook-static diff --git a/dac/ui-lib/storybook/.storybook/main.ts b/dac/ui-lib/storybook/.storybook/main.ts new file mode 100644 index 0000000000..cf311ac671 --- /dev/null +++ b/dac/ui-lib/storybook/.storybook/main.ts @@ -0,0 +1,27 @@ +import type { StorybookConfig } from "@storybook/react-vite"; +const config: StorybookConfig = { + addons: [ + "@storybook/addon-links", + "@storybook/addon-essentials", + "@storybook/addon-interactions", + "@storybook/addon-storysource", + "storybook-dark-mode", + ], + core: { + disableTelemetry: true, + }, + docs: { + autodocs: "tag", + }, + framework: { + name: "@storybook/react-vite", + options: {}, + }, + stories: ["../stories/**/*.stories.@(ts|tsx)"], + typescript: { + check: false, + reactDocgen: "react-docgen-typescript", + }, +}; + +export default config; diff --git a/dac/ui-lib/storybook/.storybook/preview.ts b/dac/ui-lib/storybook/.storybook/preview.ts new file mode 100644 index 0000000000..5c4744937f --- /dev/null +++ b/dac/ui-lib/storybook/.storybook/preview.ts @@ -0,0 +1,16 @@ +import "../../themes/utilities/reset.scss"; +import "../assets/fonts/inter-ui/inter.css"; +import "../assets/fonts/FiraCode/FiraCode.css"; +import "../../themes/dremio/index.scss"; +import { configureDremioIcon } from "../../components/icon/configureDremioIcon"; +import "../../themes/dremio/components/table.scss"; + +configureDremioIcon("/static/icons/dremio"); + +export const parameters = { + darkMode: { + classTarget: "html", + darkClass: "dremio-dark", + stylePreview: true, + }, +}; diff --git a/dac/ui-lib/storybook/assets/fonts/FiraCode/FiraCode-VF.woff2 b/dac/ui-lib/storybook/assets/fonts/FiraCode/FiraCode-VF.woff2 new file mode 100644 index 0000000000..e755a9dc72 Binary files /dev/null and b/dac/ui-lib/storybook/assets/fonts/FiraCode/FiraCode-VF.woff2 differ diff --git a/dac/ui-lib/storybook/assets/fonts/FiraCode/FiraCode.css b/dac/ui-lib/storybook/assets/fonts/FiraCode/FiraCode.css new file mode 100644 index 0000000000..401e9000bd --- /dev/null +++ b/dac/ui-lib/storybook/assets/fonts/FiraCode/FiraCode.css @@ -0,0 +1,22 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +@font-face { + font-family: "Fira Code"; + src: url("./FiraCode-VF.woff2") format("woff2-variations"); + font-weight: 300 700; + font-style: normal; +} diff --git a/dac/ui-lib/storybook/assets/fonts/inter-ui/Inter-italic.var.woff2 b/dac/ui-lib/storybook/assets/fonts/inter-ui/Inter-italic.var.woff2 new file mode 100644 index 0000000000..b826d5af84 Binary files /dev/null and b/dac/ui-lib/storybook/assets/fonts/inter-ui/Inter-italic.var.woff2 differ diff --git a/dac/ui-lib/storybook/assets/fonts/inter-ui/Inter-roman.var.woff2 b/dac/ui-lib/storybook/assets/fonts/inter-ui/Inter-roman.var.woff2 new file mode 100644 index 0000000000..6a256a068f Binary files /dev/null and b/dac/ui-lib/storybook/assets/fonts/inter-ui/Inter-roman.var.woff2 differ diff --git a/dac/ui-lib/storybook/assets/fonts/inter-ui/inter.css b/dac/ui-lib/storybook/assets/fonts/inter-ui/inter.css new file mode 100644 index 0000000000..70f4ba1884 --- /dev/null +++ b/dac/ui-lib/storybook/assets/fonts/inter-ui/inter.css @@ -0,0 +1,36 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +@font-face { + font-family: Inter; + font-weight: 100 900; + font-display: swap; + font-style: normal; + + /* stylelint-disable-next-line property-no-unknown */ + font-named-instance: "Regular"; + src: url("./Inter-roman.var.woff2") format("woff2"); +} +@font-face { + font-family: Inter; + font-weight: 100 900; + font-display: swap; + font-style: italic; + + /* stylelint-disable-next-line property-no-unknown */ + font-named-instance: "Italic"; + src: url("./Inter-italic.var.woff2") format("woff2"); +} diff --git a/dac/ui-lib/storybook/package.json b/dac/ui-lib/storybook/package.json new file mode 100644 index 0000000000..b9751dcc17 --- /dev/null +++ b/dac/ui-lib/storybook/package.json @@ -0,0 +1,32 @@ +{ + "private": true, + "scripts": { + "start": "storybook dev -p 6006", + "build": "storybook build" + }, + "dependencies": { + "leantable": "^0.4.12", + "react": "^18.2.0", + "react-dom": "^18.2.0", + "storybook-dark-mode": "^3.0.0" + }, + "devDependencies": { + "@storybook/addon-essentials": "^7.0.2", + "@storybook/addon-interactions": "^7.0.2", + "@storybook/addon-links": "^7.0.2", + "@storybook/addon-storysource": "^7.0.3", + "@storybook/blocks": "^7.0.2", + "@storybook/builder-vite": "^7.0.2", + "@storybook/react": "^7.0.2", + "@storybook/react-vite": "^7.0.2", + "@types/react": "^18.0.28", + "@types/react-dom": "^18.0.11", + "@vitejs/plugin-react": "^3.1.0", + "prop-types": "^15.8.1", + "storybook": "^7.0.2", + "typescript": "5.0.4", + "vite": "^4.2.0", + "vite-plugin-static-copy": "^0.13.1" + }, + "packageManager": "pnpm@8.1.0" +} diff --git a/dac/ui-lib/storybook/pnpm-lock.yaml b/dac/ui-lib/storybook/pnpm-lock.yaml new file mode 100644 index 0000000000..04ef6c008f --- /dev/null +++ b/dac/ui-lib/storybook/pnpm-lock.yaml @@ -0,0 +1,6990 @@ +lockfileVersion: '6.0' + +dependencies: + leantable: + specifier: ^0.4.12 + version: 0.4.12(react@18.2.0) + react: + specifier: ^18.2.0 + version: 18.2.0 + react-dom: + specifier: ^18.2.0 + version: 18.2.0(react@18.2.0) + storybook-dark-mode: + specifier: ^3.0.0 + version: 3.0.0(react-dom@18.2.0)(react@18.2.0) + +devDependencies: + '@storybook/addon-essentials': + specifier: ^7.0.2 + version: 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/addon-interactions': + specifier: ^7.0.2 + version: 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/addon-links': + specifier: ^7.0.2 + version: 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/addon-storysource': + specifier: ^7.0.3 + version: 7.0.3(react-dom@18.2.0)(react@18.2.0) + '@storybook/blocks': + specifier: ^7.0.2 + version: 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/builder-vite': + specifier: ^7.0.2 + version: 7.0.2(typescript@5.0.4)(vite@4.2.0) + '@storybook/react': + specifier: ^7.0.2 + version: 7.0.2(react-dom@18.2.0)(react@18.2.0)(typescript@5.0.4) + '@storybook/react-vite': + specifier: ^7.0.2 + version: 7.0.2(react-dom@18.2.0)(react@18.2.0)(typescript@5.0.4)(vite@4.2.0) + '@types/react': + specifier: ^18.0.28 + version: 18.0.28 + '@types/react-dom': + specifier: ^18.0.11 + version: 18.0.11 + '@vitejs/plugin-react': + specifier: ^3.1.0 + version: 3.1.0(vite@4.2.0) + prop-types: + specifier: ^15.8.1 + version: 15.8.1 + storybook: + specifier: ^7.0.2 + version: 7.0.2 + typescript: + specifier: 5.0.4 + version: 5.0.4 + vite: + specifier: ^4.2.0 + version: 4.2.0 + vite-plugin-static-copy: + specifier: ^0.13.1 + version: 0.13.1(vite@4.2.0) + +packages: + + /@ampproject/remapping@2.2.1: + resolution: {integrity: sha512-lFMjJTrFL3j7L9yBxwYfCq2k6qqwHyzuUl/XBnif78PWTJYyL/dfowQHWE3sp6U6ZzqWiiIZnpTMO96zhkjwtg==} + engines: {node: '>=6.0.0'} + dependencies: + '@jridgewell/gen-mapping': 0.3.3 + '@jridgewell/trace-mapping': 0.3.18 + dev: true + + /@aw-web-design/x-default-browser@1.4.88: + resolution: {integrity: sha512-AkEmF0wcwYC2QkhK703Y83fxWARttIWXDmQN8+cof8FmFZ5BRhnNXGymeb1S73bOCLfWjYELxtujL56idCN/XA==} + hasBin: true + dependencies: + default-browser-id: 3.0.0 + dev: true + + /@babel/code-frame@7.21.4: + resolution: {integrity: sha512-LYvhNKfwWSPpocw8GI7gpK2nq3HSDuEPC/uSYaALSJu9xjsalaaYFOq0Pwt5KmVqwEbZlDu81aLXwBOmD/Fv9g==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/highlight': 7.18.6 + dev: true + + /@babel/compat-data@7.21.4: + resolution: {integrity: sha512-/DYyDpeCfaVinT40FPGdkkb+lYSKvsVuMjDAG7jPOWWiM1ibOaB9CXJAlc4d1QpP/U2q2P9jbrSlClKSErd55g==} + engines: {node: '>=6.9.0'} + dev: true + + /@babel/core@7.21.4: + resolution: {integrity: sha512-qt/YV149Jman/6AfmlxJ04LMIu8bMoyl3RB91yTFrxQmgbrSvQMy7cI8Q62FHx1t8wJ8B5fu0UDoLwHAhUo1QA==} + engines: {node: '>=6.9.0'} + dependencies: + '@ampproject/remapping': 2.2.1 + '@babel/code-frame': 7.21.4 + '@babel/generator': 7.21.4 + '@babel/helper-compilation-targets': 7.21.4(@babel/core@7.21.4) + '@babel/helper-module-transforms': 7.21.2 + '@babel/helpers': 7.21.0 + '@babel/parser': 7.21.4 + '@babel/template': 7.20.7 + '@babel/traverse': 7.21.4 + '@babel/types': 7.21.4 + convert-source-map: 1.9.0 + debug: 4.3.4 + gensync: 1.0.0-beta.2 + json5: 2.2.3 + semver: 6.3.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/generator@7.21.4: + resolution: {integrity: sha512-NieM3pVIYW2SwGzKoqfPrQsf4xGs9M9AIG3ThppsSRmO+m7eQhmI6amajKMUeIO37wFfsvnvcxQFx6x6iqxDnA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.21.4 + '@jridgewell/gen-mapping': 0.3.3 + '@jridgewell/trace-mapping': 0.3.18 + jsesc: 2.5.2 + dev: true + + /@babel/helper-annotate-as-pure@7.18.6: + resolution: {integrity: sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.21.4 + dev: true + + /@babel/helper-builder-binary-assignment-operator-visitor@7.18.9: + resolution: {integrity: sha512-yFQ0YCHoIqarl8BCRwBL8ulYUaZpz3bNsA7oFepAzee+8/+ImtADXNOmO5vJvsPff3qi+hvpkY/NYBTrBQgdNw==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-explode-assignable-expression': 7.18.6 + '@babel/types': 7.21.4 + dev: true + + /@babel/helper-compilation-targets@7.21.4(@babel/core@7.21.4): + resolution: {integrity: sha512-Fa0tTuOXZ1iL8IeDFUWCzjZcn+sJGd9RZdH9esYVjEejGmzf+FFYQpMi/kZUk2kPy/q1H3/GPw7np8qar/stfg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + dependencies: + '@babel/compat-data': 7.21.4 + '@babel/core': 7.21.4 + '@babel/helper-validator-option': 7.21.0 + browserslist: 4.21.5 + lru-cache: 5.1.1 + semver: 6.3.0 + dev: true + + /@babel/helper-create-class-features-plugin@7.21.4(@babel/core@7.21.4): + resolution: {integrity: sha512-46QrX2CQlaFRF4TkwfTt6nJD7IHq8539cCL7SDpqWSDeJKY1xylKKY5F/33mJhLZ3mFvKv2gGrVS6NkyF6qs+Q==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-annotate-as-pure': 7.18.6 + '@babel/helper-environment-visitor': 7.18.9 + '@babel/helper-function-name': 7.21.0 + '@babel/helper-member-expression-to-functions': 7.21.0 + '@babel/helper-optimise-call-expression': 7.18.6 + '@babel/helper-replace-supers': 7.20.7 + '@babel/helper-skip-transparent-expression-wrappers': 7.20.0 + '@babel/helper-split-export-declaration': 7.18.6 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/helper-create-regexp-features-plugin@7.21.4(@babel/core@7.21.4): + resolution: {integrity: sha512-M00OuhU+0GyZ5iBBN9czjugzWrEq2vDpf/zCYHxxf93ul/Q5rv+a5h+/+0WnI1AebHNVtl5bFV0qsJoH23DbfA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-annotate-as-pure': 7.18.6 + regexpu-core: 5.3.2 + dev: true + + /@babel/helper-define-polyfill-provider@0.3.3(@babel/core@7.21.4): + resolution: {integrity: sha512-z5aQKU4IzbqCC1XH0nAqfsFLMVSo22SBKUc0BxGrLkolTdPTructy0ToNnlO2zA4j9Q/7pjMZf0DSY+DSTYzww==} + peerDependencies: + '@babel/core': ^7.4.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-compilation-targets': 7.21.4(@babel/core@7.21.4) + '@babel/helper-plugin-utils': 7.20.2 + debug: 4.3.4 + lodash.debounce: 4.0.8 + resolve: 1.22.2 + semver: 6.3.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/helper-environment-visitor@7.18.9: + resolution: {integrity: sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==} + engines: {node: '>=6.9.0'} + dev: true + + /@babel/helper-explode-assignable-expression@7.18.6: + resolution: {integrity: sha512-eyAYAsQmB80jNfg4baAtLeWAQHfHFiR483rzFK+BhETlGZaQC9bsfrugfXDCbRHLQbIA7U5NxhhOxN7p/dWIcg==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.21.4 + dev: true + + /@babel/helper-function-name@7.21.0: + resolution: {integrity: sha512-HfK1aMRanKHpxemaY2gqBmL04iAPOPRj7DxtNbiDOrJK+gdwkiNRVpCpUJYbUT+aZyemKN8brqTOxzCaG6ExRg==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/template': 7.20.7 + '@babel/types': 7.21.4 + dev: true + + /@babel/helper-hoist-variables@7.18.6: + resolution: {integrity: sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.21.4 + dev: true + + /@babel/helper-member-expression-to-functions@7.21.0: + resolution: {integrity: sha512-Muu8cdZwNN6mRRNG6lAYErJ5X3bRevgYR2O8wN0yn7jJSnGDu6eG59RfT29JHxGUovyfrh6Pj0XzmR7drNVL3Q==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.21.4 + dev: true + + /@babel/helper-module-imports@7.21.4: + resolution: {integrity: sha512-orajc5T2PsRYUN3ZryCEFeMDYwyw09c/pZeaQEZPH0MpKzSvn3e0uXsDBu3k03VI+9DBiRo+l22BfKTpKwa/Wg==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.21.4 + dev: true + + /@babel/helper-module-transforms@7.21.2: + resolution: {integrity: sha512-79yj2AR4U/Oqq/WOV7Lx6hUjau1Zfo4cI+JLAVYeMV5XIlbOhmjEk5ulbTc9fMpmlojzZHkUUxAiK+UKn+hNQQ==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-environment-visitor': 7.18.9 + '@babel/helper-module-imports': 7.21.4 + '@babel/helper-simple-access': 7.20.2 + '@babel/helper-split-export-declaration': 7.18.6 + '@babel/helper-validator-identifier': 7.19.1 + '@babel/template': 7.20.7 + '@babel/traverse': 7.21.4 + '@babel/types': 7.21.4 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/helper-optimise-call-expression@7.18.6: + resolution: {integrity: sha512-HP59oD9/fEHQkdcbgFCnbmgH5vIQTJbxh2yf+CdM89/glUNnuzr87Q8GIjGEnOktTROemO0Pe0iPAYbqZuOUiA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.21.4 + dev: true + + /@babel/helper-plugin-utils@7.20.2: + resolution: {integrity: sha512-8RvlJG2mj4huQ4pZ+rU9lqKi9ZKiRmuvGuM2HlWmkmgOhbs6zEAw6IEiJ5cQqGbDzGZOhwuOQNtZMi/ENLjZoQ==} + engines: {node: '>=6.9.0'} + dev: true + + /@babel/helper-remap-async-to-generator@7.18.9(@babel/core@7.21.4): + resolution: {integrity: sha512-dI7q50YKd8BAv3VEfgg7PS7yD3Rtbi2J1XMXaalXO0W0164hYLnh8zpjRS0mte9MfVp/tltvr/cfdXPvJr1opA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-annotate-as-pure': 7.18.6 + '@babel/helper-environment-visitor': 7.18.9 + '@babel/helper-wrap-function': 7.20.5 + '@babel/types': 7.21.4 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/helper-replace-supers@7.20.7: + resolution: {integrity: sha512-vujDMtB6LVfNW13jhlCrp48QNslK6JXi7lQG736HVbHz/mbf4Dc7tIRh1Xf5C0rF7BP8iiSxGMCmY6Ci1ven3A==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-environment-visitor': 7.18.9 + '@babel/helper-member-expression-to-functions': 7.21.0 + '@babel/helper-optimise-call-expression': 7.18.6 + '@babel/template': 7.20.7 + '@babel/traverse': 7.21.4 + '@babel/types': 7.21.4 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/helper-simple-access@7.20.2: + resolution: {integrity: sha512-+0woI/WPq59IrqDYbVGfshjT5Dmk/nnbdpcF8SnMhhXObpTq2KNBdLFRFrkVdbDOyUmHBCxzm5FHV1rACIkIbA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.21.4 + dev: true + + /@babel/helper-skip-transparent-expression-wrappers@7.20.0: + resolution: {integrity: sha512-5y1JYeNKfvnT8sZcK9DVRtpTbGiomYIHviSP3OQWmDPU3DeH4a1ZlT/N2lyQ5P8egjcRaT/Y9aNqUxK0WsnIIg==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.21.4 + dev: true + + /@babel/helper-split-export-declaration@7.18.6: + resolution: {integrity: sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.21.4 + dev: true + + /@babel/helper-string-parser@7.19.4: + resolution: {integrity: sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw==} + engines: {node: '>=6.9.0'} + + /@babel/helper-validator-identifier@7.19.1: + resolution: {integrity: sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==} + engines: {node: '>=6.9.0'} + + /@babel/helper-validator-option@7.21.0: + resolution: {integrity: sha512-rmL/B8/f0mKS2baE9ZpyTcTavvEuWhTTW8amjzXNvYG4AwBsqTLikfXsEofsJEfKHf+HQVQbFOHy6o+4cnC/fQ==} + engines: {node: '>=6.9.0'} + dev: true + + /@babel/helper-wrap-function@7.20.5: + resolution: {integrity: sha512-bYMxIWK5mh+TgXGVqAtnu5Yn1un+v8DDZtqyzKRLUzrh70Eal2O3aZ7aPYiMADO4uKlkzOiRiZ6GX5q3qxvW9Q==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-function-name': 7.21.0 + '@babel/template': 7.20.7 + '@babel/traverse': 7.21.4 + '@babel/types': 7.21.4 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/helpers@7.21.0: + resolution: {integrity: sha512-XXve0CBtOW0pd7MRzzmoyuSj0e3SEzj8pgyFxnTT1NJZL38BD1MK7yYrm8yefRPIDvNNe14xR4FdbHwpInD4rA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/template': 7.20.7 + '@babel/traverse': 7.21.4 + '@babel/types': 7.21.4 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/highlight@7.18.6: + resolution: {integrity: sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-validator-identifier': 7.19.1 + chalk: 2.4.2 + js-tokens: 4.0.0 + dev: true + + /@babel/parser@7.21.4: + resolution: {integrity: sha512-alVJj7k7zIxqBZ7BTRhz0IqJFxW1VJbm6N8JbcYhQ186df9ZBPbZBmWSqAMXwHGsCJdYks7z/voa3ibiS5bCIw==} + engines: {node: '>=6.0.0'} + hasBin: true + dependencies: + '@babel/types': 7.21.4 + + /@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@7.18.6(@babel/core@7.21.4): + resolution: {integrity: sha512-Dgxsyg54Fx1d4Nge8UnvTrED63vrwOdPmyvPzlNN/boaliRP54pm3pGzZD1SJUwrBA+Cs/xdG8kXX6Mn/RfISQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@7.20.7(@babel/core@7.21.4): + resolution: {integrity: sha512-sbr9+wNE5aXMBBFBICk01tt7sBf2Oc9ikRFEcem/ZORup9IMUdNhW7/wVLEbbtlWOsEubJet46mHAL2C8+2jKQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.13.0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + '@babel/helper-skip-transparent-expression-wrappers': 7.20.0 + '@babel/plugin-proposal-optional-chaining': 7.21.0(@babel/core@7.21.4) + dev: true + + /@babel/plugin-proposal-async-generator-functions@7.20.7(@babel/core@7.21.4): + resolution: {integrity: sha512-xMbiLsn/8RK7Wq7VeVytytS2L6qE69bXPB10YCmMdDZbKF4okCqY74pI/jJQ/8U0b/F6NrT2+14b8/P9/3AMGA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-environment-visitor': 7.18.9 + '@babel/helper-plugin-utils': 7.20.2 + '@babel/helper-remap-async-to-generator': 7.18.9(@babel/core@7.21.4) + '@babel/plugin-syntax-async-generators': 7.8.4(@babel/core@7.21.4) + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/plugin-proposal-class-properties@7.18.6(@babel/core@7.21.4): + resolution: {integrity: sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-create-class-features-plugin': 7.21.4(@babel/core@7.21.4) + '@babel/helper-plugin-utils': 7.20.2 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/plugin-proposal-class-static-block@7.21.0(@babel/core@7.21.4): + resolution: {integrity: sha512-XP5G9MWNUskFuP30IfFSEFB0Z6HzLIUcjYM4bYOPHXl7eiJ9HFv8tWj6TXTN5QODiEhDZAeI4hLok2iHFFV4hw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.12.0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-create-class-features-plugin': 7.21.4(@babel/core@7.21.4) + '@babel/helper-plugin-utils': 7.20.2 + '@babel/plugin-syntax-class-static-block': 7.14.5(@babel/core@7.21.4) + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/plugin-proposal-dynamic-import@7.18.6(@babel/core@7.21.4): + resolution: {integrity: sha512-1auuwmK+Rz13SJj36R+jqFPMJWyKEDd7lLSdOj4oJK0UTgGueSAtkrCvz9ewmgyU/P941Rv2fQwZJN8s6QruXw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + '@babel/plugin-syntax-dynamic-import': 7.8.3(@babel/core@7.21.4) + dev: true + + /@babel/plugin-proposal-export-namespace-from@7.18.9(@babel/core@7.21.4): + resolution: {integrity: sha512-k1NtHyOMvlDDFeb9G5PhUXuGj8m/wiwojgQVEhJ/fsVsMCpLyOP4h0uGEjYJKrRI+EVPlb5Jk+Gt9P97lOGwtA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + '@babel/plugin-syntax-export-namespace-from': 7.8.3(@babel/core@7.21.4) + dev: true + + /@babel/plugin-proposal-json-strings@7.18.6(@babel/core@7.21.4): + resolution: {integrity: sha512-lr1peyn9kOdbYc0xr0OdHTZ5FMqS6Di+H0Fz2I/JwMzGmzJETNeOFq2pBySw6X/KFL5EWDjlJuMsUGRFb8fQgQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + '@babel/plugin-syntax-json-strings': 7.8.3(@babel/core@7.21.4) + dev: true + + /@babel/plugin-proposal-logical-assignment-operators@7.20.7(@babel/core@7.21.4): + resolution: {integrity: sha512-y7C7cZgpMIjWlKE5T7eJwp+tnRYM89HmRvWM5EQuB5BoHEONjmQ8lSNmBUwOyy/GFRsohJED51YBF79hE1djug==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + '@babel/plugin-syntax-logical-assignment-operators': 7.10.4(@babel/core@7.21.4) + dev: true + + /@babel/plugin-proposal-nullish-coalescing-operator@7.18.6(@babel/core@7.21.4): + resolution: {integrity: sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3(@babel/core@7.21.4) + dev: true + + /@babel/plugin-proposal-numeric-separator@7.18.6(@babel/core@7.21.4): + resolution: {integrity: sha512-ozlZFogPqoLm8WBr5Z8UckIoE4YQ5KESVcNudyXOR8uqIkliTEgJ3RoketfG6pmzLdeZF0H/wjE9/cCEitBl7Q==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + '@babel/plugin-syntax-numeric-separator': 7.10.4(@babel/core@7.21.4) + dev: true + + /@babel/plugin-proposal-object-rest-spread@7.20.7(@babel/core@7.21.4): + resolution: {integrity: sha512-d2S98yCiLxDVmBmE8UjGcfPvNEUbA1U5q5WxaWFUGRzJSVAZqm5W6MbPct0jxnegUZ0niLeNX+IOzEs7wYg9Dg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/compat-data': 7.21.4 + '@babel/core': 7.21.4 + '@babel/helper-compilation-targets': 7.21.4(@babel/core@7.21.4) + '@babel/helper-plugin-utils': 7.20.2 + '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/core@7.21.4) + '@babel/plugin-transform-parameters': 7.21.3(@babel/core@7.21.4) + dev: true + + /@babel/plugin-proposal-optional-catch-binding@7.18.6(@babel/core@7.21.4): + resolution: {integrity: sha512-Q40HEhs9DJQyaZfUjjn6vE8Cv4GmMHCYuMGIWUnlxH6400VGxOuwWsPt4FxXxJkC/5eOzgn0z21M9gMT4MOhbw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + '@babel/plugin-syntax-optional-catch-binding': 7.8.3(@babel/core@7.21.4) + dev: true + + /@babel/plugin-proposal-optional-chaining@7.21.0(@babel/core@7.21.4): + resolution: {integrity: sha512-p4zeefM72gpmEe2fkUr/OnOXpWEf8nAgk7ZYVqqfFiyIG7oFfVZcCrU64hWn5xp4tQ9LkV4bTIa5rD0KANpKNA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + '@babel/helper-skip-transparent-expression-wrappers': 7.20.0 + '@babel/plugin-syntax-optional-chaining': 7.8.3(@babel/core@7.21.4) + dev: true + + /@babel/plugin-proposal-private-methods@7.18.6(@babel/core@7.21.4): + resolution: {integrity: sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-create-class-features-plugin': 7.21.4(@babel/core@7.21.4) + '@babel/helper-plugin-utils': 7.20.2 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/plugin-proposal-private-property-in-object@7.21.0(@babel/core@7.21.4): + resolution: {integrity: sha512-ha4zfehbJjc5MmXBlHec1igel5TJXXLDDRbuJ4+XT2TJcyD9/V1919BA8gMvsdHcNMBy4WBUBiRb3nw/EQUtBw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-annotate-as-pure': 7.18.6 + '@babel/helper-create-class-features-plugin': 7.21.4(@babel/core@7.21.4) + '@babel/helper-plugin-utils': 7.20.2 + '@babel/plugin-syntax-private-property-in-object': 7.14.5(@babel/core@7.21.4) + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/plugin-proposal-unicode-property-regex@7.18.6(@babel/core@7.21.4): + resolution: {integrity: sha512-2BShG/d5yoZyXZfVePH91urL5wTG6ASZU9M4o03lKK8u8UW1y08OMttBSOADTcJrnPMpvDXRG3G8fyLh4ovs8w==} + engines: {node: '>=4'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-create-regexp-features-plugin': 7.21.4(@babel/core@7.21.4) + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-syntax-async-generators@7.8.4(@babel/core@7.21.4): + resolution: {integrity: sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-syntax-class-properties@7.12.13(@babel/core@7.21.4): + resolution: {integrity: sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-syntax-class-static-block@7.14.5(@babel/core@7.21.4): + resolution: {integrity: sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-syntax-dynamic-import@7.8.3(@babel/core@7.21.4): + resolution: {integrity: sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-syntax-export-namespace-from@7.8.3(@babel/core@7.21.4): + resolution: {integrity: sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-syntax-flow@7.21.4(@babel/core@7.21.4): + resolution: {integrity: sha512-l9xd3N+XG4fZRxEP3vXdK6RW7vN1Uf5dxzRC/09wV86wqZ/YYQooBIGNsiRdfNR3/q2/5pPzV4B54J/9ctX5jw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-syntax-import-assertions@7.20.0(@babel/core@7.21.4): + resolution: {integrity: sha512-IUh1vakzNoWalR8ch/areW7qFopR2AEw03JlG7BbrDqmQ4X3q9uuipQwSGrUn7oGiemKjtSLDhNtQHzMHr1JdQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-syntax-json-strings@7.8.3(@babel/core@7.21.4): + resolution: {integrity: sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-syntax-jsx@7.21.4(@babel/core@7.21.4): + resolution: {integrity: sha512-5hewiLct5OKyh6PLKEYaFclcqtIgCb6bmELouxjF6up5q3Sov7rOayW4RwhbaBL0dit8rA80GNfY+UuDp2mBbQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-syntax-logical-assignment-operators@7.10.4(@babel/core@7.21.4): + resolution: {integrity: sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-syntax-nullish-coalescing-operator@7.8.3(@babel/core@7.21.4): + resolution: {integrity: sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-syntax-numeric-separator@7.10.4(@babel/core@7.21.4): + resolution: {integrity: sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-syntax-object-rest-spread@7.8.3(@babel/core@7.21.4): + resolution: {integrity: sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-syntax-optional-catch-binding@7.8.3(@babel/core@7.21.4): + resolution: {integrity: sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-syntax-optional-chaining@7.8.3(@babel/core@7.21.4): + resolution: {integrity: sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-syntax-private-property-in-object@7.14.5(@babel/core@7.21.4): + resolution: {integrity: sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-syntax-top-level-await@7.14.5(@babel/core@7.21.4): + resolution: {integrity: sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-syntax-typescript@7.21.4(@babel/core@7.21.4): + resolution: {integrity: sha512-xz0D39NvhQn4t4RNsHmDnnsaQizIlUkdtYvLs8La1BlfjQ6JEwxkJGeqJMW2tAXx+q6H+WFuUTXNdYVpEya0YA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-arrow-functions@7.20.7(@babel/core@7.21.4): + resolution: {integrity: sha512-3poA5E7dzDomxj9WXWwuD6A5F3kc7VXwIJO+E+J8qtDtS+pXPAhrgEyh+9GBwBgPq1Z+bB+/JD60lp5jsN7JPQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-async-to-generator@7.20.7(@babel/core@7.21.4): + resolution: {integrity: sha512-Uo5gwHPT9vgnSXQxqGtpdufUiWp96gk7yiP4Mp5bm1QMkEmLXBO7PAGYbKoJ6DhAwiNkcHFBol/x5zZZkL/t0Q==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-module-imports': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + '@babel/helper-remap-async-to-generator': 7.18.9(@babel/core@7.21.4) + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/plugin-transform-block-scoped-functions@7.18.6(@babel/core@7.21.4): + resolution: {integrity: sha512-ExUcOqpPWnliRcPqves5HJcJOvHvIIWfuS4sroBUenPuMdmW+SMHDakmtS7qOo13sVppmUijqeTv7qqGsvURpQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-block-scoping@7.21.0(@babel/core@7.21.4): + resolution: {integrity: sha512-Mdrbunoh9SxwFZapeHVrwFmri16+oYotcZysSzhNIVDwIAb1UV+kvnxULSYq9J3/q5MDG+4X6w8QVgD1zhBXNQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-classes@7.21.0(@babel/core@7.21.4): + resolution: {integrity: sha512-RZhbYTCEUAe6ntPehC4hlslPWosNHDox+vAs4On/mCLRLfoDVHf6hVEd7kuxr1RnHwJmxFfUM3cZiZRmPxJPXQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-annotate-as-pure': 7.18.6 + '@babel/helper-compilation-targets': 7.21.4(@babel/core@7.21.4) + '@babel/helper-environment-visitor': 7.18.9 + '@babel/helper-function-name': 7.21.0 + '@babel/helper-optimise-call-expression': 7.18.6 + '@babel/helper-plugin-utils': 7.20.2 + '@babel/helper-replace-supers': 7.20.7 + '@babel/helper-split-export-declaration': 7.18.6 + globals: 11.12.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/plugin-transform-computed-properties@7.20.7(@babel/core@7.21.4): + resolution: {integrity: sha512-Lz7MvBK6DTjElHAmfu6bfANzKcxpyNPeYBGEafyA6E5HtRpjpZwU+u7Qrgz/2OR0z+5TvKYbPdphfSaAcZBrYQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + '@babel/template': 7.20.7 + dev: true + + /@babel/plugin-transform-destructuring@7.21.3(@babel/core@7.21.4): + resolution: {integrity: sha512-bp6hwMFzuiE4HqYEyoGJ/V2LeIWn+hLVKc4pnj++E5XQptwhtcGmSayM029d/j2X1bPKGTlsyPwAubuU22KhMA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-dotall-regex@7.18.6(@babel/core@7.21.4): + resolution: {integrity: sha512-6S3jpun1eEbAxq7TdjLotAsl4WpQI9DxfkycRcKrjhQYzU87qpXdknpBg/e+TdcMehqGnLFi7tnFUBR02Vq6wg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-create-regexp-features-plugin': 7.21.4(@babel/core@7.21.4) + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-duplicate-keys@7.18.9(@babel/core@7.21.4): + resolution: {integrity: sha512-d2bmXCtZXYc59/0SanQKbiWINadaJXqtvIQIzd4+hNwkWBgyCd5F/2t1kXoUdvPMrxzPvhK6EMQRROxsue+mfw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-exponentiation-operator@7.18.6(@babel/core@7.21.4): + resolution: {integrity: sha512-wzEtc0+2c88FVR34aQmiz56dxEkxr2g8DQb/KfaFa1JYXOFVsbhvAonFN6PwVWj++fKmku8NP80plJ5Et4wqHw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-builder-binary-assignment-operator-visitor': 7.18.9 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-flow-strip-types@7.21.0(@babel/core@7.21.4): + resolution: {integrity: sha512-FlFA2Mj87a6sDkW4gfGrQQqwY/dLlBAyJa2dJEZ+FHXUVHBflO2wyKvg+OOEzXfrKYIa4HWl0mgmbCzt0cMb7w==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + '@babel/plugin-syntax-flow': 7.21.4(@babel/core@7.21.4) + dev: true + + /@babel/plugin-transform-for-of@7.21.0(@babel/core@7.21.4): + resolution: {integrity: sha512-LlUYlydgDkKpIY7mcBWvyPPmMcOphEyYA27Ef4xpbh1IiDNLr0kZsos2nf92vz3IccvJI25QUwp86Eo5s6HmBQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-function-name@7.18.9(@babel/core@7.21.4): + resolution: {integrity: sha512-WvIBoRPaJQ5yVHzcnJFor7oS5Ls0PYixlTYE63lCj2RtdQEl15M68FXQlxnG6wdraJIXRdR7KI+hQ7q/9QjrCQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-compilation-targets': 7.21.4(@babel/core@7.21.4) + '@babel/helper-function-name': 7.21.0 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-literals@7.18.9(@babel/core@7.21.4): + resolution: {integrity: sha512-IFQDSRoTPnrAIrI5zoZv73IFeZu2dhu6irxQjY9rNjTT53VmKg9fenjvoiOWOkJ6mm4jKVPtdMzBY98Fp4Z4cg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-member-expression-literals@7.18.6(@babel/core@7.21.4): + resolution: {integrity: sha512-qSF1ihLGO3q+/g48k85tUjD033C29TNTVB2paCwZPVmOsjn9pClvYYrM2VeJpBY2bcNkuny0YUyTNRyRxJ54KA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-modules-amd@7.20.11(@babel/core@7.21.4): + resolution: {integrity: sha512-NuzCt5IIYOW0O30UvqktzHYR2ud5bOWbY0yaxWZ6G+aFzOMJvrs5YHNikrbdaT15+KNO31nPOy5Fim3ku6Zb5g==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-module-transforms': 7.21.2 + '@babel/helper-plugin-utils': 7.20.2 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/plugin-transform-modules-commonjs@7.21.2(@babel/core@7.21.4): + resolution: {integrity: sha512-Cln+Yy04Gxua7iPdj6nOV96smLGjpElir5YwzF0LBPKoPlLDNJePNlrGGaybAJkd0zKRnOVXOgizSqPYMNYkzA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-module-transforms': 7.21.2 + '@babel/helper-plugin-utils': 7.20.2 + '@babel/helper-simple-access': 7.20.2 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/plugin-transform-modules-systemjs@7.20.11(@babel/core@7.21.4): + resolution: {integrity: sha512-vVu5g9BPQKSFEmvt2TA4Da5N+QVS66EX21d8uoOihC+OCpUoGvzVsXeqFdtAEfVa5BILAeFt+U7yVmLbQnAJmw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-hoist-variables': 7.18.6 + '@babel/helper-module-transforms': 7.21.2 + '@babel/helper-plugin-utils': 7.20.2 + '@babel/helper-validator-identifier': 7.19.1 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/plugin-transform-modules-umd@7.18.6(@babel/core@7.21.4): + resolution: {integrity: sha512-dcegErExVeXcRqNtkRU/z8WlBLnvD4MRnHgNs3MytRO1Mn1sHRyhbcpYbVMGclAqOjdW+9cfkdZno9dFdfKLfQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-module-transforms': 7.21.2 + '@babel/helper-plugin-utils': 7.20.2 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/plugin-transform-named-capturing-groups-regex@7.20.5(@babel/core@7.21.4): + resolution: {integrity: sha512-mOW4tTzi5iTLnw+78iEq3gr8Aoq4WNRGpmSlrogqaiCBoR1HFhpU4JkpQFOHfeYx3ReVIFWOQJS4aZBRvuZ6mA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-create-regexp-features-plugin': 7.21.4(@babel/core@7.21.4) + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-new-target@7.18.6(@babel/core@7.21.4): + resolution: {integrity: sha512-DjwFA/9Iu3Z+vrAn+8pBUGcjhxKguSMlsFqeCKbhb9BAV756v0krzVK04CRDi/4aqmk8BsHb4a/gFcaA5joXRw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-object-super@7.18.6(@babel/core@7.21.4): + resolution: {integrity: sha512-uvGz6zk+pZoS1aTZrOvrbj6Pp/kK2mp45t2B+bTDre2UgsZZ8EZLSJtUg7m/no0zOJUWgFONpB7Zv9W2tSaFlA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + '@babel/helper-replace-supers': 7.20.7 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/plugin-transform-parameters@7.21.3(@babel/core@7.21.4): + resolution: {integrity: sha512-Wxc+TvppQG9xWFYatvCGPvZ6+SIUxQ2ZdiBP+PHYMIjnPXD+uThCshaz4NZOnODAtBjjcVQQ/3OKs9LW28purQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-property-literals@7.18.6(@babel/core@7.21.4): + resolution: {integrity: sha512-cYcs6qlgafTud3PAzrrRNbQtfpQ8+y/+M5tKmksS9+M1ckbH6kzY8MrexEM9mcA6JDsukE19iIRvAyYl463sMg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-react-jsx-self@7.21.0(@babel/core@7.21.4): + resolution: {integrity: sha512-f/Eq+79JEu+KUANFks9UZCcvydOOGMgF7jBrcwjHa5jTZD8JivnhCJYvmlhR/WTXBWonDExPoW0eO/CR4QJirA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-react-jsx-source@7.19.6(@babel/core@7.21.4): + resolution: {integrity: sha512-RpAi004QyMNisst/pvSanoRdJ4q+jMCWyk9zdw/CyLB9j8RXEahodR6l2GyttDRyEVWZtbN+TpLiHJ3t34LbsQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-react-jsx@7.21.0(@babel/core@7.21.4): + resolution: {integrity: sha512-6OAWljMvQrZjR2DaNhVfRz6dkCAVV+ymcLUmaf8bccGOHn2v5rHJK3tTpij0BuhdYWP4LLaqj5lwcdlpAAPuvg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-annotate-as-pure': 7.18.6 + '@babel/helper-module-imports': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + '@babel/plugin-syntax-jsx': 7.21.4(@babel/core@7.21.4) + '@babel/types': 7.21.4 + dev: true + + /@babel/plugin-transform-regenerator@7.20.5(@babel/core@7.21.4): + resolution: {integrity: sha512-kW/oO7HPBtntbsahzQ0qSE3tFvkFwnbozz3NWFhLGqH75vLEg+sCGngLlhVkePlCs3Jv0dBBHDzCHxNiFAQKCQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + regenerator-transform: 0.15.1 + dev: true + + /@babel/plugin-transform-reserved-words@7.18.6(@babel/core@7.21.4): + resolution: {integrity: sha512-oX/4MyMoypzHjFrT1CdivfKZ+XvIPMFXwwxHp/r0Ddy2Vuomt4HDFGmft1TAY2yiTKiNSsh3kjBAzcM8kSdsjA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-shorthand-properties@7.18.6(@babel/core@7.21.4): + resolution: {integrity: sha512-eCLXXJqv8okzg86ywZJbRn19YJHU4XUa55oz2wbHhaQVn/MM+XhukiT7SYqp/7o00dg52Rj51Ny+Ecw4oyoygw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-spread@7.20.7(@babel/core@7.21.4): + resolution: {integrity: sha512-ewBbHQ+1U/VnH1fxltbJqDeWBU1oNLG8Dj11uIv3xVf7nrQu0bPGe5Rf716r7K5Qz+SqtAOVswoVunoiBtGhxw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + '@babel/helper-skip-transparent-expression-wrappers': 7.20.0 + dev: true + + /@babel/plugin-transform-sticky-regex@7.18.6(@babel/core@7.21.4): + resolution: {integrity: sha512-kfiDrDQ+PBsQDO85yj1icueWMfGfJFKN1KCkndygtu/C9+XUfydLC8Iv5UYJqRwy4zk8EcplRxEOeLyjq1gm6Q==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-template-literals@7.18.9(@babel/core@7.21.4): + resolution: {integrity: sha512-S8cOWfT82gTezpYOiVaGHrCbhlHgKhQt8XH5ES46P2XWmX92yisoZywf5km75wv5sYcXDUCLMmMxOLCtthDgMA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-typeof-symbol@7.18.9(@babel/core@7.21.4): + resolution: {integrity: sha512-SRfwTtF11G2aemAZWivL7PD+C9z52v9EvMqH9BuYbabyPuKUvSWks3oCg6041pT925L4zVFqaVBeECwsmlguEw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-typescript@7.21.3(@babel/core@7.21.4): + resolution: {integrity: sha512-RQxPz6Iqt8T0uw/WsJNReuBpWpBqs/n7mNo18sKLoTbMp+UrEekhH+pKSVC7gWz+DNjo9gryfV8YzCiT45RgMw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-annotate-as-pure': 7.18.6 + '@babel/helper-create-class-features-plugin': 7.21.4(@babel/core@7.21.4) + '@babel/helper-plugin-utils': 7.20.2 + '@babel/plugin-syntax-typescript': 7.21.4(@babel/core@7.21.4) + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/plugin-transform-unicode-escapes@7.18.10(@babel/core@7.21.4): + resolution: {integrity: sha512-kKAdAI+YzPgGY/ftStBFXTI1LZFju38rYThnfMykS+IXy8BVx+res7s2fxf1l8I35DV2T97ezo6+SGrXz6B3iQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-unicode-regex@7.18.6(@babel/core@7.21.4): + resolution: {integrity: sha512-gE7A6Lt7YLnNOL3Pb9BNeZvi+d8l7tcRrG4+pwJjK9hD2xX4mEvjlQW60G9EEmfXVYRPv9VRQcyegIVHCql/AA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-create-regexp-features-plugin': 7.21.4(@babel/core@7.21.4) + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/preset-env@7.21.4(@babel/core@7.21.4): + resolution: {integrity: sha512-2W57zHs2yDLm6GD5ZpvNn71lZ0B/iypSdIeq25OurDKji6AdzV07qp4s3n1/x5BqtiGaTrPN3nerlSCaC5qNTw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/compat-data': 7.21.4 + '@babel/core': 7.21.4 + '@babel/helper-compilation-targets': 7.21.4(@babel/core@7.21.4) + '@babel/helper-plugin-utils': 7.20.2 + '@babel/helper-validator-option': 7.21.0 + '@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression': 7.18.6(@babel/core@7.21.4) + '@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining': 7.20.7(@babel/core@7.21.4) + '@babel/plugin-proposal-async-generator-functions': 7.20.7(@babel/core@7.21.4) + '@babel/plugin-proposal-class-properties': 7.18.6(@babel/core@7.21.4) + '@babel/plugin-proposal-class-static-block': 7.21.0(@babel/core@7.21.4) + '@babel/plugin-proposal-dynamic-import': 7.18.6(@babel/core@7.21.4) + '@babel/plugin-proposal-export-namespace-from': 7.18.9(@babel/core@7.21.4) + '@babel/plugin-proposal-json-strings': 7.18.6(@babel/core@7.21.4) + '@babel/plugin-proposal-logical-assignment-operators': 7.20.7(@babel/core@7.21.4) + '@babel/plugin-proposal-nullish-coalescing-operator': 7.18.6(@babel/core@7.21.4) + '@babel/plugin-proposal-numeric-separator': 7.18.6(@babel/core@7.21.4) + '@babel/plugin-proposal-object-rest-spread': 7.20.7(@babel/core@7.21.4) + '@babel/plugin-proposal-optional-catch-binding': 7.18.6(@babel/core@7.21.4) + '@babel/plugin-proposal-optional-chaining': 7.21.0(@babel/core@7.21.4) + '@babel/plugin-proposal-private-methods': 7.18.6(@babel/core@7.21.4) + '@babel/plugin-proposal-private-property-in-object': 7.21.0(@babel/core@7.21.4) + '@babel/plugin-proposal-unicode-property-regex': 7.18.6(@babel/core@7.21.4) + '@babel/plugin-syntax-async-generators': 7.8.4(@babel/core@7.21.4) + '@babel/plugin-syntax-class-properties': 7.12.13(@babel/core@7.21.4) + '@babel/plugin-syntax-class-static-block': 7.14.5(@babel/core@7.21.4) + '@babel/plugin-syntax-dynamic-import': 7.8.3(@babel/core@7.21.4) + '@babel/plugin-syntax-export-namespace-from': 7.8.3(@babel/core@7.21.4) + '@babel/plugin-syntax-import-assertions': 7.20.0(@babel/core@7.21.4) + '@babel/plugin-syntax-json-strings': 7.8.3(@babel/core@7.21.4) + '@babel/plugin-syntax-logical-assignment-operators': 7.10.4(@babel/core@7.21.4) + '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3(@babel/core@7.21.4) + '@babel/plugin-syntax-numeric-separator': 7.10.4(@babel/core@7.21.4) + '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/core@7.21.4) + '@babel/plugin-syntax-optional-catch-binding': 7.8.3(@babel/core@7.21.4) + '@babel/plugin-syntax-optional-chaining': 7.8.3(@babel/core@7.21.4) + '@babel/plugin-syntax-private-property-in-object': 7.14.5(@babel/core@7.21.4) + '@babel/plugin-syntax-top-level-await': 7.14.5(@babel/core@7.21.4) + '@babel/plugin-transform-arrow-functions': 7.20.7(@babel/core@7.21.4) + '@babel/plugin-transform-async-to-generator': 7.20.7(@babel/core@7.21.4) + '@babel/plugin-transform-block-scoped-functions': 7.18.6(@babel/core@7.21.4) + '@babel/plugin-transform-block-scoping': 7.21.0(@babel/core@7.21.4) + '@babel/plugin-transform-classes': 7.21.0(@babel/core@7.21.4) + '@babel/plugin-transform-computed-properties': 7.20.7(@babel/core@7.21.4) + '@babel/plugin-transform-destructuring': 7.21.3(@babel/core@7.21.4) + '@babel/plugin-transform-dotall-regex': 7.18.6(@babel/core@7.21.4) + '@babel/plugin-transform-duplicate-keys': 7.18.9(@babel/core@7.21.4) + '@babel/plugin-transform-exponentiation-operator': 7.18.6(@babel/core@7.21.4) + '@babel/plugin-transform-for-of': 7.21.0(@babel/core@7.21.4) + '@babel/plugin-transform-function-name': 7.18.9(@babel/core@7.21.4) + '@babel/plugin-transform-literals': 7.18.9(@babel/core@7.21.4) + '@babel/plugin-transform-member-expression-literals': 7.18.6(@babel/core@7.21.4) + '@babel/plugin-transform-modules-amd': 7.20.11(@babel/core@7.21.4) + '@babel/plugin-transform-modules-commonjs': 7.21.2(@babel/core@7.21.4) + '@babel/plugin-transform-modules-systemjs': 7.20.11(@babel/core@7.21.4) + '@babel/plugin-transform-modules-umd': 7.18.6(@babel/core@7.21.4) + '@babel/plugin-transform-named-capturing-groups-regex': 7.20.5(@babel/core@7.21.4) + '@babel/plugin-transform-new-target': 7.18.6(@babel/core@7.21.4) + '@babel/plugin-transform-object-super': 7.18.6(@babel/core@7.21.4) + '@babel/plugin-transform-parameters': 7.21.3(@babel/core@7.21.4) + '@babel/plugin-transform-property-literals': 7.18.6(@babel/core@7.21.4) + '@babel/plugin-transform-regenerator': 7.20.5(@babel/core@7.21.4) + '@babel/plugin-transform-reserved-words': 7.18.6(@babel/core@7.21.4) + '@babel/plugin-transform-shorthand-properties': 7.18.6(@babel/core@7.21.4) + '@babel/plugin-transform-spread': 7.20.7(@babel/core@7.21.4) + '@babel/plugin-transform-sticky-regex': 7.18.6(@babel/core@7.21.4) + '@babel/plugin-transform-template-literals': 7.18.9(@babel/core@7.21.4) + '@babel/plugin-transform-typeof-symbol': 7.18.9(@babel/core@7.21.4) + '@babel/plugin-transform-unicode-escapes': 7.18.10(@babel/core@7.21.4) + '@babel/plugin-transform-unicode-regex': 7.18.6(@babel/core@7.21.4) + '@babel/preset-modules': 0.1.5(@babel/core@7.21.4) + '@babel/types': 7.21.4 + babel-plugin-polyfill-corejs2: 0.3.3(@babel/core@7.21.4) + babel-plugin-polyfill-corejs3: 0.6.0(@babel/core@7.21.4) + babel-plugin-polyfill-regenerator: 0.4.1(@babel/core@7.21.4) + core-js-compat: 3.30.0 + semver: 6.3.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/preset-flow@7.21.4(@babel/core@7.21.4): + resolution: {integrity: sha512-F24cSq4DIBmhq4OzK3dE63NHagb27OPE3eWR+HLekt4Z3Y5MzIIUGF3LlLgV0gN8vzbDViSY7HnrReNVCJXTeA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + '@babel/helper-validator-option': 7.21.0 + '@babel/plugin-transform-flow-strip-types': 7.21.0(@babel/core@7.21.4) + dev: true + + /@babel/preset-modules@0.1.5(@babel/core@7.21.4): + resolution: {integrity: sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + '@babel/plugin-proposal-unicode-property-regex': 7.18.6(@babel/core@7.21.4) + '@babel/plugin-transform-dotall-regex': 7.18.6(@babel/core@7.21.4) + '@babel/types': 7.21.4 + esutils: 2.0.3 + dev: true + + /@babel/preset-typescript@7.21.4(@babel/core@7.21.4): + resolution: {integrity: sha512-sMLNWY37TCdRH/bJ6ZeeOH1nPuanED7Ai9Y/vH31IPqalioJ6ZNFUWONsakhv4r4n+I6gm5lmoE0olkgib/j/A==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-plugin-utils': 7.20.2 + '@babel/helper-validator-option': 7.21.0 + '@babel/plugin-syntax-jsx': 7.21.4(@babel/core@7.21.4) + '@babel/plugin-transform-modules-commonjs': 7.21.2(@babel/core@7.21.4) + '@babel/plugin-transform-typescript': 7.21.3(@babel/core@7.21.4) + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/register@7.21.0(@babel/core@7.21.4): + resolution: {integrity: sha512-9nKsPmYDi5DidAqJaQooxIhsLJiNMkGr8ypQ8Uic7cIox7UCDsM7HuUGxdGT7mSDTYbqzIdsOWzfBton/YJrMw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + clone-deep: 4.0.1 + find-cache-dir: 2.1.0 + make-dir: 2.1.0 + pirates: 4.0.5 + source-map-support: 0.5.21 + dev: true + + /@babel/regjsgen@0.8.0: + resolution: {integrity: sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA==} + dev: true + + /@babel/runtime@7.21.0: + resolution: {integrity: sha512-xwII0//EObnq89Ji5AKYQaRYiW/nZ3llSv29d49IuxPhKbtJoLP+9QUUZ4nVragQVtaVGeZrpB+ZtG/Pdy/POw==} + engines: {node: '>=6.9.0'} + dependencies: + regenerator-runtime: 0.13.11 + dev: true + + /@babel/template@7.20.7: + resolution: {integrity: sha512-8SegXApWe6VoNw0r9JHpSteLKTpTiLZ4rMlGIm9JQ18KiCtyQiAMEazujAHrUS5flrcqYZa75ukev3P6QmUwUw==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/code-frame': 7.21.4 + '@babel/parser': 7.21.4 + '@babel/types': 7.21.4 + dev: true + + /@babel/traverse@7.21.4: + resolution: {integrity: sha512-eyKrRHKdyZxqDm+fV1iqL9UAHMoIg0nDaGqfIOd8rKH17m5snv7Gn4qgjBoFfLz9APvjFU/ICT00NVCv1Epp8Q==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/code-frame': 7.21.4 + '@babel/generator': 7.21.4 + '@babel/helper-environment-visitor': 7.18.9 + '@babel/helper-function-name': 7.21.0 + '@babel/helper-hoist-variables': 7.18.6 + '@babel/helper-split-export-declaration': 7.18.6 + '@babel/parser': 7.21.4 + '@babel/types': 7.21.4 + debug: 4.3.4 + globals: 11.12.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/types@7.21.4: + resolution: {integrity: sha512-rU2oY501qDxE8Pyo7i/Orqma4ziCOrby0/9mvbDUGEfvZjb279Nk9k19e2fiCxHbRRpY2ZyrgW1eq22mvmOIzA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-string-parser': 7.19.4 + '@babel/helper-validator-identifier': 7.19.1 + to-fast-properties: 2.0.0 + + /@base2/pretty-print-object@1.0.1: + resolution: {integrity: sha512-4iri8i1AqYHJE2DstZYkyEprg6Pq6sKx3xn5FpySk9sNhH7qN2LLlHJCfDTZRILNwQNPD7mATWM0TBui7uC1pA==} + dev: true + + /@bcoe/v8-coverage@0.2.3: + resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==} + dev: true + + /@colors/colors@1.5.0: + resolution: {integrity: sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==} + engines: {node: '>=0.1.90'} + requiresBuild: true + dev: true + optional: true + + /@discoveryjs/json-ext@0.5.7: + resolution: {integrity: sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw==} + engines: {node: '>=10.0.0'} + dev: true + + /@emotion/use-insertion-effect-with-fallbacks@1.0.0(react@18.2.0): + resolution: {integrity: sha512-1eEgUGmkaljiBnRMTdksDV1W4kUnmwgp7X9G8B++9GYwl1lUdqSndSriIrTJ0N7LQaoauY9JJ2yhiOYK5+NI4A==} + peerDependencies: + react: '>=16.8.0' + dependencies: + react: 18.2.0 + + /@esbuild/android-arm64@0.17.16: + resolution: {integrity: sha512-QX48qmsEZW+gcHgTmAj+x21mwTz8MlYQBnzF6861cNdQGvj2jzzFjqH0EBabrIa/WVZ2CHolwMoqxVryqKt8+Q==} + engines: {node: '>=12'} + cpu: [arm64] + os: [android] + requiresBuild: true + dev: true + optional: true + + /@esbuild/android-arm@0.17.16: + resolution: {integrity: sha512-baLqRpLe4JnKrUXLJChoTN0iXZH7El/mu58GE3WIA6/H834k0XWvLRmGLG8y8arTRS9hJJibPnF0tiGhmWeZgw==} + engines: {node: '>=12'} + cpu: [arm] + os: [android] + requiresBuild: true + dev: true + optional: true + + /@esbuild/android-x64@0.17.16: + resolution: {integrity: sha512-G4wfHhrrz99XJgHnzFvB4UwwPxAWZaZBOFXh+JH1Duf1I4vIVfuYY9uVLpx4eiV2D/Jix8LJY+TAdZ3i40tDow==} + engines: {node: '>=12'} + cpu: [x64] + os: [android] + requiresBuild: true + dev: true + optional: true + + /@esbuild/darwin-arm64@0.17.16: + resolution: {integrity: sha512-/Ofw8UXZxuzTLsNFmz1+lmarQI6ztMZ9XktvXedTbt3SNWDn0+ODTwxExLYQ/Hod91EZB4vZPQJLoqLF0jvEzA==} + engines: {node: '>=12'} + cpu: [arm64] + os: [darwin] + requiresBuild: true + dev: true + optional: true + + /@esbuild/darwin-x64@0.17.16: + resolution: {integrity: sha512-SzBQtCV3Pdc9kyizh36Ol+dNVhkDyIrGb/JXZqFq8WL37LIyrXU0gUpADcNV311sCOhvY+f2ivMhb5Tuv8nMOQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [darwin] + requiresBuild: true + dev: true + optional: true + + /@esbuild/freebsd-arm64@0.17.16: + resolution: {integrity: sha512-ZqftdfS1UlLiH1DnS2u3It7l4Bc3AskKeu+paJSfk7RNOMrOxmeFDhLTMQqMxycP1C3oj8vgkAT6xfAuq7ZPRA==} + engines: {node: '>=12'} + cpu: [arm64] + os: [freebsd] + requiresBuild: true + dev: true + optional: true + + /@esbuild/freebsd-x64@0.17.16: + resolution: {integrity: sha512-rHV6zNWW1tjgsu0dKQTX9L0ByiJHHLvQKrWtnz8r0YYJI27FU3Xu48gpK2IBj1uCSYhJ+pEk6Y0Um7U3rIvV8g==} + engines: {node: '>=12'} + cpu: [x64] + os: [freebsd] + requiresBuild: true + dev: true + optional: true + + /@esbuild/linux-arm64@0.17.16: + resolution: {integrity: sha512-8yoZhGkU6aHu38WpaM4HrRLTFc7/VVD9Q2SvPcmIQIipQt2I/GMTZNdEHXoypbbGao5kggLcxg0iBKjo0SQYKA==} + engines: {node: '>=12'} + cpu: [arm64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@esbuild/linux-arm@0.17.16: + resolution: {integrity: sha512-n4O8oVxbn7nl4+m+ISb0a68/lcJClIbaGAoXwqeubj/D1/oMMuaAXmJVfFlRjJLu/ZvHkxoiFJnmbfp4n8cdSw==} + engines: {node: '>=12'} + cpu: [arm] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@esbuild/linux-ia32@0.17.16: + resolution: {integrity: sha512-9ZBjlkdaVYxPNO8a7OmzDbOH9FMQ1a58j7Xb21UfRU29KcEEU3VTHk+Cvrft/BNv0gpWJMiiZ/f4w0TqSP0gLA==} + engines: {node: '>=12'} + cpu: [ia32] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@esbuild/linux-loong64@0.17.16: + resolution: {integrity: sha512-TIZTRojVBBzdgChY3UOG7BlPhqJz08AL7jdgeeu+kiObWMFzGnQD7BgBBkWRwOtKR1i2TNlO7YK6m4zxVjjPRQ==} + engines: {node: '>=12'} + cpu: [loong64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@esbuild/linux-mips64el@0.17.16: + resolution: {integrity: sha512-UPeRuFKCCJYpBbIdczKyHLAIU31GEm0dZl1eMrdYeXDH+SJZh/i+2cAmD3A1Wip9pIc5Sc6Kc5cFUrPXtR0XHA==} + engines: {node: '>=12'} + cpu: [mips64el] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@esbuild/linux-ppc64@0.17.16: + resolution: {integrity: sha512-io6yShgIEgVUhExJejJ21xvO5QtrbiSeI7vYUnr7l+v/O9t6IowyhdiYnyivX2X5ysOVHAuyHW+Wyi7DNhdw6Q==} + engines: {node: '>=12'} + cpu: [ppc64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@esbuild/linux-riscv64@0.17.16: + resolution: {integrity: sha512-WhlGeAHNbSdG/I2gqX2RK2gfgSNwyJuCiFHMc8s3GNEMMHUI109+VMBfhVqRb0ZGzEeRiibi8dItR3ws3Lk+cA==} + engines: {node: '>=12'} + cpu: [riscv64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@esbuild/linux-s390x@0.17.16: + resolution: {integrity: sha512-gHRReYsJtViir63bXKoFaQ4pgTyah4ruiMRQ6im9YZuv+gp3UFJkNTY4sFA73YDynmXZA6hi45en4BGhNOJUsw==} + engines: {node: '>=12'} + cpu: [s390x] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@esbuild/linux-x64@0.17.16: + resolution: {integrity: sha512-mfiiBkxEbUHvi+v0P+TS7UnA9TeGXR48aK4XHkTj0ZwOijxexgMF01UDFaBX7Q6CQsB0d+MFNv9IiXbIHTNd4g==} + engines: {node: '>=12'} + cpu: [x64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@esbuild/netbsd-x64@0.17.16: + resolution: {integrity: sha512-n8zK1YRDGLRZfVcswcDMDM0j2xKYLNXqei217a4GyBxHIuPMGrrVuJ+Ijfpr0Kufcm7C1k/qaIrGy6eG7wvgmA==} + engines: {node: '>=12'} + cpu: [x64] + os: [netbsd] + requiresBuild: true + dev: true + optional: true + + /@esbuild/openbsd-x64@0.17.16: + resolution: {integrity: sha512-lEEfkfsUbo0xC47eSTBqsItXDSzwzwhKUSsVaVjVji07t8+6KA5INp2rN890dHZeueXJAI8q0tEIfbwVRYf6Ew==} + engines: {node: '>=12'} + cpu: [x64] + os: [openbsd] + requiresBuild: true + dev: true + optional: true + + /@esbuild/sunos-x64@0.17.16: + resolution: {integrity: sha512-jlRjsuvG1fgGwnE8Afs7xYDnGz0dBgTNZfgCK6TlvPH3Z13/P5pi6I57vyLE8qZYLrGVtwcm9UbUx1/mZ8Ukag==} + engines: {node: '>=12'} + cpu: [x64] + os: [sunos] + requiresBuild: true + dev: true + optional: true + + /@esbuild/win32-arm64@0.17.16: + resolution: {integrity: sha512-TzoU2qwVe2boOHl/3KNBUv2PNUc38U0TNnzqOAcgPiD/EZxT2s736xfC2dYQbszAwo4MKzzwBV0iHjhfjxMimg==} + engines: {node: '>=12'} + cpu: [arm64] + os: [win32] + requiresBuild: true + dev: true + optional: true + + /@esbuild/win32-ia32@0.17.16: + resolution: {integrity: sha512-B8b7W+oo2yb/3xmwk9Vc99hC9bNolvqjaTZYEfMQhzdpBsjTvZBlXQ/teUE55Ww6sg//wlcDjOaqldOKyigWdA==} + engines: {node: '>=12'} + cpu: [ia32] + os: [win32] + requiresBuild: true + dev: true + optional: true + + /@esbuild/win32-x64@0.17.16: + resolution: {integrity: sha512-xJ7OH/nanouJO9pf03YsL9NAFQBHd8AqfrQd7Pf5laGyyTt/gToul6QYOA/i5i/q8y9iaM5DQFNTgpi995VkOg==} + engines: {node: '>=12'} + cpu: [x64] + os: [win32] + requiresBuild: true + dev: true + optional: true + + /@fal-works/esbuild-plugin-global-externals@2.1.2: + resolution: {integrity: sha512-cEee/Z+I12mZcFJshKcCqC8tuX5hG3s+d+9nZ3LabqKF1vKdF41B92pJVCBggjAGORAeOzyyDDKrZwIkLffeOQ==} + dev: true + + /@istanbuljs/load-nyc-config@1.1.0: + resolution: {integrity: sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==} + engines: {node: '>=8'} + dependencies: + camelcase: 5.3.1 + find-up: 4.1.0 + get-package-type: 0.1.0 + js-yaml: 3.14.1 + resolve-from: 5.0.0 + dev: true + + /@istanbuljs/schema@0.1.3: + resolution: {integrity: sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==} + engines: {node: '>=8'} + dev: true + + /@jest/schemas@29.4.3: + resolution: {integrity: sha512-VLYKXQmtmuEz6IxJsrZwzG9NvtkQsWNnWMsKxqWNu3+CnfzJQhp0WDDKWLVV9hLKr0l3SLLFRqcYHjhtyuDVxg==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + dependencies: + '@sinclair/typebox': 0.25.24 + dev: true + + /@jest/transform@29.5.0: + resolution: {integrity: sha512-8vbeZWqLJOvHaDfeMuoHITGKSz5qWc9u04lnWrQE3VyuSw604PzQM824ZeX9XSjUCeDiE3GuxZe5UKa8J61NQw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + dependencies: + '@babel/core': 7.21.4 + '@jest/types': 29.5.0 + '@jridgewell/trace-mapping': 0.3.18 + babel-plugin-istanbul: 6.1.1 + chalk: 4.1.2 + convert-source-map: 2.0.0 + fast-json-stable-stringify: 2.1.0 + graceful-fs: 4.2.11 + jest-haste-map: 29.5.0 + jest-regex-util: 29.4.3 + jest-util: 29.5.0 + micromatch: 4.0.5 + pirates: 4.0.5 + slash: 3.0.0 + write-file-atomic: 4.0.2 + transitivePeerDependencies: + - supports-color + dev: true + + /@jest/types@27.5.1: + resolution: {integrity: sha512-Cx46iJ9QpwQTjIdq5VJu2QTMMs3QlEjI0x1QbBP5W1+nMzyc2XmimiRR/CbX9TO0cPTeUlxWMOu8mslYsJ8DEw==} + engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} + dependencies: + '@types/istanbul-lib-coverage': 2.0.4 + '@types/istanbul-reports': 3.0.1 + '@types/node': 18.15.11 + '@types/yargs': 16.0.5 + chalk: 4.1.2 + dev: true + + /@jest/types@29.5.0: + resolution: {integrity: sha512-qbu7kN6czmVRc3xWFQcAN03RAUamgppVUdXrvl1Wr3jlNF93o9mJbGcDWrwGB6ht44u7efB1qCFgVQmca24Uog==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + dependencies: + '@jest/schemas': 29.4.3 + '@types/istanbul-lib-coverage': 2.0.4 + '@types/istanbul-reports': 3.0.1 + '@types/node': 18.15.11 + '@types/yargs': 17.0.24 + chalk: 4.1.2 + dev: true + + /@joshwooding/vite-plugin-react-docgen-typescript@0.2.2(typescript@5.0.4)(vite@4.2.0): + resolution: {integrity: sha512-BlArZRyCNaQXo9jSW1crabSqdQXlgIB9bh3W7WpKTeopUFy2PqOkVFdOv3FvvcJOu0A3pC/ECyQMiXxXK547MQ==} + peerDependencies: + typescript: '>= 4.3.x' + vite: ^3.0.0 || ^4.0.0 + peerDependenciesMeta: + typescript: + optional: true + dependencies: + magic-string: 0.27.0 + react-docgen-typescript: 2.2.2(typescript@5.0.4) + typescript: 5.0.4 + vite: 4.2.0 + dev: true + + /@jridgewell/gen-mapping@0.3.3: + resolution: {integrity: sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==} + engines: {node: '>=6.0.0'} + dependencies: + '@jridgewell/set-array': 1.1.2 + '@jridgewell/sourcemap-codec': 1.4.15 + '@jridgewell/trace-mapping': 0.3.18 + dev: true + + /@jridgewell/resolve-uri@3.1.0: + resolution: {integrity: sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==} + engines: {node: '>=6.0.0'} + dev: true + + /@jridgewell/set-array@1.1.2: + resolution: {integrity: sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==} + engines: {node: '>=6.0.0'} + dev: true + + /@jridgewell/sourcemap-codec@1.4.14: + resolution: {integrity: sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==} + dev: true + + /@jridgewell/sourcemap-codec@1.4.15: + resolution: {integrity: sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==} + dev: true + + /@jridgewell/trace-mapping@0.3.18: + resolution: {integrity: sha512-w+niJYzMHdd7USdiH2U6869nqhD2nbfZXND5Yp93qIbEmnDNk7PD48o+YchRVpzMU7M6jVCbenTR7PA1FLQ9pA==} + dependencies: + '@jridgewell/resolve-uri': 3.1.0 + '@jridgewell/sourcemap-codec': 1.4.14 + dev: true + + /@juggle/resize-observer@3.4.0: + resolution: {integrity: sha512-dfLbk+PwWvFzSxwk3n5ySL0hfBog779o8h68wK/7/APo/7cgyWp5jcXockbxdk5kFRkbeXWm4Fbi9FrdN381sA==} + + /@mdx-js/react@2.3.0(react@18.2.0): + resolution: {integrity: sha512-zQH//gdOmuu7nt2oJR29vFhDv88oGPmVw6BggmrHeMI+xgEkp1B2dX9/bMBSYtK0dyLX/aOmesKS09g222K1/g==} + peerDependencies: + react: '>=16' + dependencies: + '@types/mdx': 2.0.4 + '@types/react': 18.0.28 + react: 18.2.0 + dev: true + + /@ndelangen/get-tarball@3.0.7: + resolution: {integrity: sha512-NqGfTZIZpRFef1GoVaShSSRwDC3vde3ThtTeqFdcYd6ipKqnfEVhjK2hUeHjCQUcptyZr2TONqcloFXM+5QBrQ==} + dependencies: + gunzip-maybe: 1.4.2 + pump: 3.0.0 + tar-fs: 2.1.1 + dev: true + + /@nodelib/fs.scandir@2.1.5: + resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} + engines: {node: '>= 8'} + dependencies: + '@nodelib/fs.stat': 2.0.5 + run-parallel: 1.2.0 + dev: true + + /@nodelib/fs.stat@2.0.5: + resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} + engines: {node: '>= 8'} + dev: true + + /@nodelib/fs.walk@1.2.8: + resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} + engines: {node: '>= 8'} + dependencies: + '@nodelib/fs.scandir': 2.1.5 + fastq: 1.15.0 + dev: true + + /@rollup/pluginutils@4.2.1: + resolution: {integrity: sha512-iKnFXr7NkdZAIHiIWE+BX5ULi/ucVFYWD6TbAV+rZctiRTY2PL6tsIKhoIOaoskiWAkgu+VsbXgUVDNLHf+InQ==} + engines: {node: '>= 8.0.0'} + dependencies: + estree-walker: 2.0.2 + picomatch: 2.3.1 + dev: true + + /@sinclair/typebox@0.25.24: + resolution: {integrity: sha512-XJfwUVUKDHF5ugKwIcxEgc9k8b7HbznCp6eUfWgu710hMPNIO4aw4/zB5RogDQz8nd6gyCDpU9O/m6qYEWY6yQ==} + dev: true + + /@storybook/addon-actions@7.0.2(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-rcj39u9MrmzsrDWYt1zsoVxrogZ1Amrv9xkEofEY/QKUr2R3xpHhTALveY9BKIlG1GoE8zLlLoP2k4nz3sNNwQ==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + peerDependenciesMeta: + react: + optional: true + react-dom: + optional: true + dependencies: + '@storybook/client-logger': 7.0.2 + '@storybook/components': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/core-events': 7.0.2 + '@storybook/global': 5.0.0 + '@storybook/manager-api': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/preview-api': 7.0.2 + '@storybook/theming': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/types': 7.0.2 + dequal: 2.0.3 + lodash: 4.17.21 + polished: 4.2.2 + prop-types: 15.8.1 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + react-inspector: 6.0.1(react@18.2.0) + telejson: 7.1.0 + ts-dedent: 2.2.0 + uuid-browser: 3.1.0 + dev: true + + /@storybook/addon-backgrounds@7.0.2(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-yRNHQ4PPRJ+HIORQPhDGxn5xolw1xW0ByQZoNRpMD+AMEyfUNFdWbCsRQAOWjNhawxVMHM7EeA2Exrb41zhEjA==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + peerDependenciesMeta: + react: + optional: true + react-dom: + optional: true + dependencies: + '@storybook/client-logger': 7.0.2 + '@storybook/components': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/core-events': 7.0.2 + '@storybook/global': 5.0.0 + '@storybook/manager-api': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/preview-api': 7.0.2 + '@storybook/theming': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/types': 7.0.2 + memoizerific: 1.11.3 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + ts-dedent: 2.2.0 + dev: true + + /@storybook/addon-controls@7.0.2(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-dMpRtj5cmfC9vEMve5ncvbWCEC+WD9YuzJ+grdc48E/Hd//p+O2FE6klSkrz5FAjrc+rHINixdyssekpEL6nYQ==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + peerDependenciesMeta: + react: + optional: true + react-dom: + optional: true + dependencies: + '@storybook/blocks': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/client-logger': 7.0.2 + '@storybook/components': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/core-common': 7.0.2 + '@storybook/manager-api': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/node-logger': 7.0.2 + '@storybook/preview-api': 7.0.2 + '@storybook/theming': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/types': 7.0.2 + lodash: 4.17.21 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + ts-dedent: 2.2.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@storybook/addon-docs@7.0.2(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-q3rDWoZEym6Lkmhqc/HBNfLDAmTY8l0WINGUZo/nF98eP5iu4B7Nk7V6BRGYGQt6Y6ZyIQ8WKH0e/eJww2zIog==} + peerDependencies: + '@storybook/mdx1-csf': '>=1.0.0-0' + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + peerDependenciesMeta: + '@storybook/mdx1-csf': + optional: true + dependencies: + '@babel/core': 7.21.4 + '@babel/plugin-transform-react-jsx': 7.21.0(@babel/core@7.21.4) + '@jest/transform': 29.5.0 + '@mdx-js/react': 2.3.0(react@18.2.0) + '@storybook/blocks': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/client-logger': 7.0.2 + '@storybook/components': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/csf-plugin': 7.0.2 + '@storybook/csf-tools': 7.0.2 + '@storybook/global': 5.0.0 + '@storybook/mdx2-csf': 1.0.0 + '@storybook/node-logger': 7.0.2 + '@storybook/postinstall': 7.0.2 + '@storybook/preview-api': 7.0.2 + '@storybook/react-dom-shim': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/theming': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/types': 7.0.2 + fs-extra: 11.1.1 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + remark-external-links: 8.0.0 + remark-slug: 6.1.0 + ts-dedent: 2.2.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@storybook/addon-essentials@7.0.2(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-LAsWsXa/Pp2B4Ve2WVgc990FtsiHpFDRsq7S3V7xRrZP8DYRbtJIVdszPMDS5uKC+yzbswFEXz08lqbGvq8zgQ==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + dependencies: + '@storybook/addon-actions': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/addon-backgrounds': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/addon-controls': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/addon-docs': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/addon-highlight': 7.0.2 + '@storybook/addon-measure': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/addon-outline': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/addon-toolbars': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/addon-viewport': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/core-common': 7.0.2 + '@storybook/manager-api': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/node-logger': 7.0.2 + '@storybook/preview-api': 7.0.2 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + ts-dedent: 2.2.0 + transitivePeerDependencies: + - '@storybook/mdx1-csf' + - supports-color + dev: true + + /@storybook/addon-highlight@7.0.2: + resolution: {integrity: sha512-9BkL1OOanguuy73S6nLK0isUb045tOkFONd/PQldOJ0PV3agCvKxKHyzlBz7Hsba8KZhY5jQs+nVW2NiREyGYg==} + dependencies: + '@storybook/core-events': 7.0.2 + '@storybook/global': 5.0.0 + '@storybook/preview-api': 7.0.2 + dev: true + + /@storybook/addon-interactions@7.0.2(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-vPWnyGND4s9nVp+U21N/jE00dCRsHcKU68SoL4OiIZioTTRbLvrTG9eAdBkZXsVPpFHq8gndma3nXfplOSSckg==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + peerDependenciesMeta: + react: + optional: true + react-dom: + optional: true + dependencies: + '@storybook/client-logger': 7.0.2 + '@storybook/components': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/core-common': 7.0.2 + '@storybook/core-events': 7.0.2 + '@storybook/global': 5.0.0 + '@storybook/instrumenter': 7.0.2 + '@storybook/manager-api': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/preview-api': 7.0.2 + '@storybook/theming': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/types': 7.0.2 + jest-mock: 27.5.1 + polished: 4.2.2 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + ts-dedent: 2.2.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@storybook/addon-links@7.0.2(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-lPtfy2MqrcI9YjupBM2eRKGPdFKVPCz7WgO/JQQakGugORJTEGCyJrNJNtWY9jDenv8ynLZ40OxtPBZi54Sr6Q==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + peerDependenciesMeta: + react: + optional: true + react-dom: + optional: true + dependencies: + '@storybook/client-logger': 7.0.2 + '@storybook/core-events': 7.0.2 + '@storybook/csf': 0.1.0 + '@storybook/global': 5.0.0 + '@storybook/manager-api': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/preview-api': 7.0.2 + '@storybook/router': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/types': 7.0.2 + prop-types: 15.8.1 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + ts-dedent: 2.2.0 + dev: true + + /@storybook/addon-measure@7.0.2(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-cf/d5MXpHAjyUiDIVfc8pLn79CPHgnryDmNNlSiP2zEFKcivrRWiu8Rmrad8pGqLkuAh+PXLKCGn9uiqDvg7QQ==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + peerDependenciesMeta: + react: + optional: true + react-dom: + optional: true + dependencies: + '@storybook/client-logger': 7.0.2 + '@storybook/components': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/core-events': 7.0.2 + '@storybook/global': 5.0.0 + '@storybook/manager-api': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/preview-api': 7.0.2 + '@storybook/types': 7.0.2 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + dev: true + + /@storybook/addon-outline@7.0.2(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-thVISO4NM22xlETisBvAPvz2yFD3qLGOjgzBmj8l8r9Rv0IEdwdPrwm5j0WTv8OtbhC4A8lPpvMsn5FhY5mDXg==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + peerDependenciesMeta: + react: + optional: true + react-dom: + optional: true + dependencies: + '@storybook/client-logger': 7.0.2 + '@storybook/components': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/core-events': 7.0.2 + '@storybook/global': 5.0.0 + '@storybook/manager-api': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/preview-api': 7.0.2 + '@storybook/types': 7.0.2 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + ts-dedent: 2.2.0 + dev: true + + /@storybook/addon-storysource@7.0.3(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-5CyRY3k+BSjTS9KA03uWynn0I/8K9P/Q3yWxFIslZDZZj30Z/kI++fE4O9fWgK4/vQeaLAQvK1LXinIXWnsyRA==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + peerDependenciesMeta: + react: + optional: true + react-dom: + optional: true + dependencies: + '@storybook/client-logger': 7.0.3 + '@storybook/components': 7.0.3(react-dom@18.2.0)(react@18.2.0) + '@storybook/manager-api': 7.0.3(react-dom@18.2.0)(react@18.2.0) + '@storybook/preview-api': 7.0.3 + '@storybook/router': 7.0.3(react-dom@18.2.0)(react@18.2.0) + '@storybook/source-loader': 7.0.3(react-dom@18.2.0)(react@18.2.0) + '@storybook/theming': 7.0.3(react-dom@18.2.0)(react@18.2.0) + estraverse: 5.3.0 + prop-types: 15.8.1 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + react-syntax-highlighter: 15.5.0(react@18.2.0) + dev: true + + /@storybook/addon-toolbars@7.0.2(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-tAxZ2+nUYsJdT1sx3BrmoMAZFM19+OzWJY6qSnbEq5zoRgvGZaXGR6tLMKydDoHQBU9Ta9YHGo7N7u7h1C23yg==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + peerDependenciesMeta: + react: + optional: true + react-dom: + optional: true + dependencies: + '@storybook/client-logger': 7.0.2 + '@storybook/components': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/manager-api': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/preview-api': 7.0.2 + '@storybook/theming': 7.0.2(react-dom@18.2.0)(react@18.2.0) + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + dev: true + + /@storybook/addon-viewport@7.0.2(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-TaHJWIIazPM/TerRbka9RqjMPNpwaRsGRdVRBtVoVosy1FzsEjAdQSO7RBMe4G03m5CacSqdsDiJCblI2AXaew==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + peerDependenciesMeta: + react: + optional: true + react-dom: + optional: true + dependencies: + '@storybook/client-logger': 7.0.2 + '@storybook/components': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/core-events': 7.0.2 + '@storybook/global': 5.0.0 + '@storybook/manager-api': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/preview-api': 7.0.2 + '@storybook/theming': 7.0.2(react-dom@18.2.0)(react@18.2.0) + memoizerific: 1.11.3 + prop-types: 15.8.1 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + dev: true + + /@storybook/addons@7.0.4(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-dkpyKNwAY+Ev9ZbgiLB0ki7H6AbAMqYcBx1qYvyFR2hv3k1Ta2OQIMTkODWdkYsgffH00SSpgaBwb2lBUrMZkw==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + dependencies: + '@storybook/manager-api': 7.0.4(react-dom@18.2.0)(react@18.2.0) + '@storybook/preview-api': 7.0.4 + '@storybook/types': 7.0.4 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + dev: false + + /@storybook/api@7.0.4(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-6Zd83ByPhEeDlNmtVOvlurw5AmjELjun4hUYdy6awy6WGgroAPGO1639LLZTToCa0YxIuf7ZAWa6p3dMZOluMA==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + peerDependenciesMeta: + react: + optional: true + react-dom: + optional: true + dependencies: + '@storybook/client-logger': 7.0.4 + '@storybook/manager-api': 7.0.4(react-dom@18.2.0)(react@18.2.0) + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + dev: false + + /@storybook/blocks@7.0.2(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-JzHmU8jZLzeQ6bunzci8j/2Ji18GBTyhrPFLk5RjEbMNGWpGjvER/yR127tZOdbPguVNr4iVbRfGzd1wGHlrzA==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + dependencies: + '@storybook/channels': 7.0.2 + '@storybook/client-logger': 7.0.2 + '@storybook/components': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/core-events': 7.0.2 + '@storybook/csf': 0.1.0 + '@storybook/docs-tools': 7.0.2 + '@storybook/global': 5.0.0 + '@storybook/manager-api': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/preview-api': 7.0.2 + '@storybook/theming': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/types': 7.0.2 + '@types/lodash': 4.14.192 + color-convert: 2.0.1 + dequal: 2.0.3 + lodash: 4.17.21 + markdown-to-jsx: 7.2.0(react@18.2.0) + memoizerific: 1.11.3 + polished: 4.2.2 + react: 18.2.0 + react-colorful: 5.6.1(react-dom@18.2.0)(react@18.2.0) + react-dom: 18.2.0(react@18.2.0) + telejson: 7.1.0 + ts-dedent: 2.2.0 + util-deprecate: 1.0.2 + transitivePeerDependencies: + - supports-color + dev: true + + /@storybook/builder-manager@7.0.2: + resolution: {integrity: sha512-Oej/n8D7eaWgmWF7nN2hXLRM53lcYOdh6umSN8Mh/LcYUfxB+dvUBFzUjoLE0xjhW6xRinrKrENT5LcP/f/HBQ==} + dependencies: + '@fal-works/esbuild-plugin-global-externals': 2.1.2 + '@storybook/core-common': 7.0.2 + '@storybook/manager': 7.0.2 + '@storybook/node-logger': 7.0.2 + '@types/ejs': 3.1.2 + '@types/find-cache-dir': 3.2.1 + '@yarnpkg/esbuild-plugin-pnp': 3.0.0-rc.15(esbuild@0.17.16) + browser-assert: 1.2.1 + ejs: 3.1.9 + esbuild: 0.17.16 + esbuild-plugin-alias: 0.2.1 + express: 4.18.2 + find-cache-dir: 3.3.2 + fs-extra: 11.1.1 + process: 0.11.10 + util: 0.12.5 + transitivePeerDependencies: + - supports-color + dev: true + + /@storybook/builder-vite@7.0.2(typescript@5.0.4)(vite@4.2.0): + resolution: {integrity: sha512-G6CD2Gf2zwzRslvNvqgz4FeADVEA9XA4Mw6+NM6Twc+Wy/Ah482dvHS9ApSgirtGyBKjOfdHn1xQT4Z+kzbJnw==} + peerDependencies: + '@preact/preset-vite': '*' + '@storybook/mdx1-csf': '>=1.0.0-next.1' + typescript: '>= 4.3.x' + vite: ^3.0.0 || ^4.0.0 + vite-plugin-glimmerx: '*' + peerDependenciesMeta: + '@preact/preset-vite': + optional: true + '@storybook/mdx1-csf': + optional: true + typescript: + optional: true + vite-plugin-glimmerx: + optional: true + dependencies: + '@storybook/channel-postmessage': 7.0.2 + '@storybook/channel-websocket': 7.0.2 + '@storybook/client-logger': 7.0.2 + '@storybook/core-common': 7.0.2 + '@storybook/csf-plugin': 7.0.2 + '@storybook/mdx2-csf': 1.0.0 + '@storybook/node-logger': 7.0.2 + '@storybook/preview': 7.0.2 + '@storybook/preview-api': 7.0.2 + '@storybook/types': 7.0.2 + browser-assert: 1.2.1 + es-module-lexer: 0.9.3 + express: 4.18.2 + fs-extra: 11.1.1 + glob: 8.1.0 + glob-promise: 6.0.2(glob@8.1.0) + magic-string: 0.27.0 + remark-external-links: 8.0.0 + remark-slug: 6.1.0 + rollup: 3.20.2 + typescript: 5.0.4 + vite: 4.2.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@storybook/channel-postmessage@7.0.2: + resolution: {integrity: sha512-SZ/KqnZcx10W9hJbrzBKcP9dmgaeTaXugUhcgw1IkmjKWdsKazqFZCPwQWZZKAmhO4wYbyYOhkz3wfSIeB4mFw==} + dependencies: + '@storybook/channels': 7.0.2 + '@storybook/client-logger': 7.0.2 + '@storybook/core-events': 7.0.2 + '@storybook/global': 5.0.0 + qs: 6.11.1 + telejson: 7.1.0 + dev: true + + /@storybook/channel-postmessage@7.0.3: + resolution: {integrity: sha512-bDTCVpEJ9UaFnbcgfMi13xVWzGC4Kc5/3iPEjOEWY8qUN4AEXw4Svken1Q4waSoadYLnAwGfjnc4UWBjitUSoQ==} + dependencies: + '@storybook/channels': 7.0.3 + '@storybook/client-logger': 7.0.3 + '@storybook/core-events': 7.0.3 + '@storybook/global': 5.0.0 + qs: 6.11.1 + telejson: 7.1.0 + dev: true + + /@storybook/channel-postmessage@7.0.4: + resolution: {integrity: sha512-KInHB3iSBgMxGkDmOMBu+B+ohxi2NzDpcl9yA5+xVuqG8Q6gJBurDYBsinq2zEZ1ceZYSoCseqJaH2jQFh/Oeg==} + dependencies: + '@storybook/channels': 7.0.4 + '@storybook/client-logger': 7.0.4 + '@storybook/core-events': 7.0.4 + '@storybook/global': 5.0.0 + qs: 6.11.1 + telejson: 7.1.0 + dev: false + + /@storybook/channel-websocket@7.0.2: + resolution: {integrity: sha512-YU3lFId6Nsi75ddA+3qfbnLfNUPswboYyx+SALhaLuXqz7zqfzX4ezMgxeS/h0gRlUJ7nf2/yJ5qie/kZaizjw==} + dependencies: + '@storybook/channels': 7.0.2 + '@storybook/client-logger': 7.0.2 + '@storybook/global': 5.0.0 + telejson: 7.1.0 + dev: true + + /@storybook/channels@7.0.2: + resolution: {integrity: sha512-qkI8mFy9c8mxN2f01etayKhCaauL6RAsxRzbX1/pKj6UqhHWqqUbtHwymrv4hG5qDYjV1e9pd7ae5eNF8Kui0g==} + dev: true + + /@storybook/channels@7.0.3: + resolution: {integrity: sha512-X+78KlKRPfqojOj2j34qpXqcwh5O6/oIhqXB1PORPmoVPuvngSvjqeczJI11g+qEiBbs3l1CkwA/RRGnnuE1ew==} + + /@storybook/channels@7.0.4: + resolution: {integrity: sha512-1HT8VM8G72XQ88wGcXVYl2g6OFsglUBW8L7uWWZoh96xWpNViaptaN/4OKwiUrThrc0DbEkAKmhPT3zQ7McoyA==} + dev: false + + /@storybook/cli@7.0.2: + resolution: {integrity: sha512-xMM2QdXNGg09wuXzAGroKrbsnaHSFPmtmefX1XGALhHuKVwxOoC2apWMpek6gY/9vh5EIRTog2Dvfd2BzNrT6Q==} + hasBin: true + dependencies: + '@babel/core': 7.21.4 + '@babel/preset-env': 7.21.4(@babel/core@7.21.4) + '@ndelangen/get-tarball': 3.0.7 + '@storybook/codemod': 7.0.2 + '@storybook/core-common': 7.0.2 + '@storybook/core-server': 7.0.2 + '@storybook/csf-tools': 7.0.2 + '@storybook/node-logger': 7.0.2 + '@storybook/telemetry': 7.0.2 + '@storybook/types': 7.0.2 + '@types/semver': 7.3.13 + boxen: 5.1.2 + chalk: 4.1.2 + commander: 6.2.1 + cross-spawn: 7.0.3 + detect-indent: 6.1.0 + envinfo: 7.8.1 + execa: 5.1.1 + express: 4.18.2 + find-up: 5.0.0 + fs-extra: 11.1.1 + get-npm-tarball-url: 2.0.3 + get-port: 5.1.1 + giget: 1.1.2 + globby: 11.1.0 + jscodeshift: 0.14.0(@babel/preset-env@7.21.4) + leven: 3.1.0 + prettier: 2.8.7 + prompts: 2.4.2 + puppeteer-core: 2.1.1 + read-pkg-up: 7.0.1 + semver: 7.4.0 + shelljs: 0.8.5 + simple-update-notifier: 1.1.0 + strip-json-comments: 3.1.1 + tempy: 1.0.1 + ts-dedent: 2.2.0 + util-deprecate: 1.0.2 + transitivePeerDependencies: + - bufferutil + - encoding + - supports-color + - utf-8-validate + dev: true + + /@storybook/client-logger@7.0.2: + resolution: {integrity: sha512-rv7W2BhzIQHbFpUM5/CP/acS6T5lTmaxT0MbZ9n+9h++9QQU/cFOdkZgSUbLVAb1AeUGoLsk0HYzcqPpV35Xsw==} + dependencies: + '@storybook/global': 5.0.0 + dev: true + + /@storybook/client-logger@7.0.3: + resolution: {integrity: sha512-Hb+ecOEDDvXM61ElbAuKfQ1a0I2mIDkbtlS26C7S2GxRHRGE8aXA2toAKvBaJ4niQrXlVCR0umYfAZeDL85veg==} + dependencies: + '@storybook/global': 5.0.0 + + /@storybook/client-logger@7.0.4: + resolution: {integrity: sha512-3sEUIt6/ry+RdTpP+6Ic1QqoQh6Pn9ugCaP54Bc0z4wDI+NIJtJ5E2j4bcml/1/l9h9zNlmAAMgpZizm8KtIdA==} + dependencies: + '@storybook/global': 5.0.0 + dev: false + + /@storybook/codemod@7.0.2: + resolution: {integrity: sha512-D9PdByxJlFiaDJcLkM+RN1DHCj4VfQIlSZkADOcNtI4o9H064oiMloWDGZiR1i1FCYMSXuWmW6tMsuCVebA+Nw==} + dependencies: + '@babel/core': 7.21.4 + '@babel/preset-env': 7.21.4(@babel/core@7.21.4) + '@babel/types': 7.21.4 + '@storybook/csf': 0.1.0 + '@storybook/csf-tools': 7.0.2 + '@storybook/node-logger': 7.0.2 + '@storybook/types': 7.0.2 + cross-spawn: 7.0.3 + globby: 11.1.0 + jscodeshift: 0.14.0(@babel/preset-env@7.21.4) + lodash: 4.17.21 + prettier: 2.8.7 + recast: 0.23.1 + transitivePeerDependencies: + - supports-color + dev: true + + /@storybook/components@7.0.2(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-Ee9pY6WlpricPUdYiyR0Ov8zgHkUt541yl1CZ6Ytaom2TA12cAnRjKewbLAgVPPhIE1LsMRhOPFYql0JMtnN4Q==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + dependencies: + '@storybook/client-logger': 7.0.2 + '@storybook/csf': 0.1.0 + '@storybook/global': 5.0.0 + '@storybook/theming': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/types': 7.0.2 + memoizerific: 1.11.3 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + use-resize-observer: 9.1.0(react-dom@18.2.0)(react@18.2.0) + util-deprecate: 1.0.2 + dev: true + + /@storybook/components@7.0.3(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-4Iiguje+Vz5QHreeSKD1rYEWi2AaycC3MLJMiu35GijFZrOvpA17ehka1RNhRDAKQ02OjG655nttp0XLf7Tu7Q==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + dependencies: + '@storybook/client-logger': 7.0.3 + '@storybook/csf': 0.1.0 + '@storybook/global': 5.0.0 + '@storybook/theming': 7.0.3(react-dom@18.2.0)(react@18.2.0) + '@storybook/types': 7.0.3 + memoizerific: 1.11.3 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + use-resize-observer: 9.1.0(react-dom@18.2.0)(react@18.2.0) + util-deprecate: 1.0.2 + + /@storybook/core-client@7.0.2: + resolution: {integrity: sha512-tr6Uv41YD2O0xiUrtgujiY1QxuznhbyUI0BRsSh49e8cx3QoW7FgPy7IVZHgb17DXKZ/wY/hgdyTTB87H6IbLA==} + dependencies: + '@storybook/client-logger': 7.0.2 + '@storybook/preview-api': 7.0.2 + dev: true + + /@storybook/core-common@7.0.2: + resolution: {integrity: sha512-DayFPTCj695tnEKLuDlogclBim8mzdrbj9U1xzFm23BUReheGSGdLl2zrb3mP1l9Zj4xJ/Ctst1KN9SFbW84vw==} + dependencies: + '@storybook/node-logger': 7.0.2 + '@storybook/types': 7.0.2 + '@types/node': 16.18.23 + '@types/pretty-hrtime': 1.0.1 + chalk: 4.1.2 + esbuild: 0.17.16 + esbuild-register: 3.4.2(esbuild@0.17.16) + file-system-cache: 2.0.2 + find-up: 5.0.0 + fs-extra: 11.1.1 + glob: 8.1.0 + glob-promise: 6.0.2(glob@8.1.0) + handlebars: 4.7.7 + lazy-universal-dotenv: 4.0.0 + picomatch: 2.3.1 + pkg-dir: 5.0.0 + pretty-hrtime: 1.0.3 + resolve-from: 5.0.0 + ts-dedent: 2.2.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@storybook/core-events@7.0.2: + resolution: {integrity: sha512-1DCHCwHRL3+rlvnVVc/BCfReP31XaT2WYgcLeGTmkX1E43Po1MkgcM7PnJPSaa9POvSqZ+6YLZv5Bs1SXbufow==} + dev: true + + /@storybook/core-events@7.0.3: + resolution: {integrity: sha512-r8+pUMf9cQlXMWgZGrBBG9SKK6uWHdb1Lub+vjuAz6I5jXVI/IyfnGbfRPGkLmLynY6NugsAMnjn9oifSBD4cA==} + + /@storybook/core-events@7.0.4: + resolution: {integrity: sha512-3gYyJZdHrf69tGueN7SQCgPxnLYYow8n5BeBcBlehYAutfLOafpd36HPIXSHIvJaLDNUzGqLcFiGub04ts1pJA==} + dev: false + + /@storybook/core-server@7.0.2: + resolution: {integrity: sha512-7ipGws8YffVaiwkc+D0+MfZc/Sy52aKenG3nDJdK4Ajmp5LPAlelb/sxIhfRvoHDbDsy2FQNz++Mb55Yh03KkA==} + dependencies: + '@aw-web-design/x-default-browser': 1.4.88 + '@discoveryjs/json-ext': 0.5.7 + '@storybook/builder-manager': 7.0.2 + '@storybook/core-common': 7.0.2 + '@storybook/core-events': 7.0.2 + '@storybook/csf': 0.1.0 + '@storybook/csf-tools': 7.0.2 + '@storybook/docs-mdx': 0.1.0 + '@storybook/global': 5.0.0 + '@storybook/manager': 7.0.2 + '@storybook/node-logger': 7.0.2 + '@storybook/preview-api': 7.0.2 + '@storybook/telemetry': 7.0.2 + '@storybook/types': 7.0.2 + '@types/detect-port': 1.3.2 + '@types/node': 16.18.23 + '@types/node-fetch': 2.6.3 + '@types/pretty-hrtime': 1.0.1 + '@types/semver': 7.3.13 + better-opn: 2.1.1 + boxen: 5.1.2 + chalk: 4.1.2 + cli-table3: 0.6.3 + compression: 1.7.4 + detect-port: 1.5.1 + express: 4.18.2 + fs-extra: 11.1.1 + globby: 11.1.0 + ip: 2.0.0 + lodash: 4.17.21 + node-fetch: 2.6.9 + open: 8.4.2 + pretty-hrtime: 1.0.3 + prompts: 2.4.2 + read-pkg-up: 7.0.1 + semver: 7.4.0 + serve-favicon: 2.5.0 + telejson: 7.1.0 + ts-dedent: 2.2.0 + util-deprecate: 1.0.2 + watchpack: 2.4.0 + ws: 8.13.0 + transitivePeerDependencies: + - bufferutil + - encoding + - supports-color + - utf-8-validate + dev: true + + /@storybook/csf-plugin@7.0.2: + resolution: {integrity: sha512-aGuo+G6G5IwSGkmc+OUA796sOfvJMaQj8QS/Zh5F0nL4ZlQvghHpXON8cRHHvmXHQqUo07KLiy7CZh2I2oq4iQ==} + dependencies: + '@storybook/csf-tools': 7.0.2 + unplugin: 0.10.2 + transitivePeerDependencies: + - supports-color + dev: true + + /@storybook/csf-tools@7.0.2: + resolution: {integrity: sha512-sOp355yQSpYiMqNSopmFYWZkPPRJdGgy4tpxGGLxpOZMygK3j1wQ/WQtl2Z0h61KP0S0dl6hrs0pHQz3A/eVrw==} + dependencies: + '@babel/generator': 7.21.4 + '@babel/parser': 7.21.4 + '@babel/traverse': 7.21.4 + '@babel/types': 7.21.4 + '@storybook/csf': 0.1.0 + '@storybook/types': 7.0.2 + fs-extra: 11.1.1 + recast: 0.23.1 + ts-dedent: 2.2.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@storybook/csf@0.1.0: + resolution: {integrity: sha512-uk+jMXCZ8t38jSTHk2o5btI+aV2Ksbvl6DoOv3r6VaCM1KZqeuMwtwywIQdflkA8/6q/dKT8z8L+g8hC4GC3VQ==} + dependencies: + type-fest: 2.19.0 + + /@storybook/docs-mdx@0.1.0: + resolution: {integrity: sha512-JDaBR9lwVY4eSH5W8EGHrhODjygPd6QImRbwjAuJNEnY0Vw4ie3bPkeGfnacB3OBW6u/agqPv2aRlR46JcAQLg==} + dev: true + + /@storybook/docs-tools@7.0.2: + resolution: {integrity: sha512-w4D5BURrYjLbLGG9VKAaKU2dSdukszxRE3HWkJyhQU9R1JHvS3n8ntcMqYPqRfoHCOeBLBxP0edDYcAfzGNDYQ==} + dependencies: + '@babel/core': 7.21.4 + '@storybook/core-common': 7.0.2 + '@storybook/preview-api': 7.0.2 + '@storybook/types': 7.0.2 + '@types/doctrine': 0.0.3 + doctrine: 3.0.0 + lodash: 4.17.21 + transitivePeerDependencies: + - supports-color + dev: true + + /@storybook/global@5.0.0: + resolution: {integrity: sha512-FcOqPAXACP0I3oJ/ws6/rrPT9WGhu915Cg8D02a9YxLo0DE9zI+a9A5gRGvmQ09fiWPukqI8ZAEoQEdWUKMQdQ==} + + /@storybook/instrumenter@7.0.2: + resolution: {integrity: sha512-zr9/fuaYtGVUtcL8XgjA4Iq5jtzdcqQyOSH4XLXtz6JtSad3lkRagbJo2Vzbw7dO/4vzjfTMxEzvWjUuPxLOhA==} + dependencies: + '@storybook/channels': 7.0.2 + '@storybook/client-logger': 7.0.2 + '@storybook/core-events': 7.0.2 + '@storybook/global': 5.0.0 + '@storybook/preview-api': 7.0.2 + dev: true + + /@storybook/manager-api@7.0.2(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-PbLj9Rc5uCMPfMdaXv1wE3koA3+d0rmZ3BJI8jeq+mfZEvpvfI4OOpRioT1q04CkkVomFOVFTyO0Q/o6Rb5N7g==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + dependencies: + '@storybook/channels': 7.0.2 + '@storybook/client-logger': 7.0.2 + '@storybook/core-events': 7.0.2 + '@storybook/csf': 0.1.0 + '@storybook/global': 5.0.0 + '@storybook/router': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/theming': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/types': 7.0.2 + dequal: 2.0.3 + lodash: 4.17.21 + memoizerific: 1.11.3 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + semver: 7.4.0 + store2: 2.14.2 + telejson: 7.1.0 + ts-dedent: 2.2.0 + dev: true + + /@storybook/manager-api@7.0.3(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-sfuy4JKaOcxhmYDZU3GH9Rac8dM/rNWUK69bHIogOz6t+hthwKxYRnoLwob2pSKaUZhZylffm0ptO5odY6UZTg==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + dependencies: + '@storybook/channels': 7.0.3 + '@storybook/client-logger': 7.0.3 + '@storybook/core-events': 7.0.3 + '@storybook/csf': 0.1.0 + '@storybook/global': 5.0.0 + '@storybook/router': 7.0.3(react-dom@18.2.0)(react@18.2.0) + '@storybook/theming': 7.0.3(react-dom@18.2.0)(react@18.2.0) + '@storybook/types': 7.0.3 + dequal: 2.0.3 + lodash: 4.17.21 + memoizerific: 1.11.3 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + semver: 7.4.0 + store2: 2.14.2 + telejson: 7.1.0 + ts-dedent: 2.2.0 + dev: true + + /@storybook/manager-api@7.0.4(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-kydmycU7EdlngXRL+9rmNQ6WE4VsbW9TvSeuzfmZ1RVbbl1yF3jpwU/9xK23I4ci4jWk6xilAJgs7FkPBVCeJQ==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + dependencies: + '@storybook/channels': 7.0.4 + '@storybook/client-logger': 7.0.4 + '@storybook/core-events': 7.0.4 + '@storybook/csf': 0.1.0 + '@storybook/global': 5.0.0 + '@storybook/router': 7.0.4(react-dom@18.2.0)(react@18.2.0) + '@storybook/theming': 7.0.4(react-dom@18.2.0)(react@18.2.0) + '@storybook/types': 7.0.4 + dequal: 2.0.3 + lodash: 4.17.21 + memoizerific: 1.11.3 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + semver: 7.4.0 + store2: 2.14.2 + telejson: 7.1.0 + ts-dedent: 2.2.0 + dev: false + + /@storybook/manager@7.0.2: + resolution: {integrity: sha512-jsFsFKG0rPNYfuRm/WSXGMBy8vnALyFWU330ObDmfU0JID3SeLlVqAOZT1GlwI6vupYpWodsN6qPZKRmC8onRw==} + dev: true + + /@storybook/mdx2-csf@1.0.0: + resolution: {integrity: sha512-dBAnEL4HfxxJmv7LdEYUoZlQbWj9APZNIbOaq0tgF8XkxiIbzqvgB0jhL/9UOrysSDbQWBiCRTu2wOVxedGfmw==} + dev: true + + /@storybook/node-logger@7.0.2: + resolution: {integrity: sha512-UENpXxB1yDqP7JXaODJo+pbGt5y3NFBNurBr4+pI4bMAC4ARjpgRE4wp6fxUKFPu9MAR10oCdcLEHkaVUAjYRg==} + dependencies: + '@types/npmlog': 4.1.4 + chalk: 4.1.2 + npmlog: 5.0.1 + pretty-hrtime: 1.0.3 + dev: true + + /@storybook/postinstall@7.0.2: + resolution: {integrity: sha512-Hhiu3+N3ZDcbrhOCBJTDJbn/mC4l0v3ziyAP3yalq/2ZR9R5kfsEHHakKmswsKKV+ey0gNGijFTy3soU5oSs+A==} + dev: true + + /@storybook/preview-api@7.0.2: + resolution: {integrity: sha512-QAlJM/r92+dQe/kB7MTTR9b/1mt9UJjxNjazGdEWipA/nw23kOF3o/hBcvKwBYkit4zGYsX70H+vuzW8hCo/lA==} + dependencies: + '@storybook/channel-postmessage': 7.0.2 + '@storybook/channels': 7.0.2 + '@storybook/client-logger': 7.0.2 + '@storybook/core-events': 7.0.2 + '@storybook/csf': 0.1.0 + '@storybook/global': 5.0.0 + '@storybook/types': 7.0.2 + '@types/qs': 6.9.7 + dequal: 2.0.3 + lodash: 4.17.21 + memoizerific: 1.11.3 + qs: 6.11.1 + synchronous-promise: 2.0.17 + ts-dedent: 2.2.0 + util-deprecate: 1.0.2 + dev: true + + /@storybook/preview-api@7.0.3: + resolution: {integrity: sha512-6QsJikmBTc+/5LnNpPB5aI0um8MlJhbeQRycI+ISclwBDEJDsZheoz8yha4KBSOqLBwKpOFtKB1IXarBjkuigA==} + dependencies: + '@storybook/channel-postmessage': 7.0.3 + '@storybook/channels': 7.0.3 + '@storybook/client-logger': 7.0.3 + '@storybook/core-events': 7.0.3 + '@storybook/csf': 0.1.0 + '@storybook/global': 5.0.0 + '@storybook/types': 7.0.3 + '@types/qs': 6.9.7 + dequal: 2.0.3 + lodash: 4.17.21 + memoizerific: 1.11.3 + qs: 6.11.1 + synchronous-promise: 2.0.17 + ts-dedent: 2.2.0 + util-deprecate: 1.0.2 + dev: true + + /@storybook/preview-api@7.0.4: + resolution: {integrity: sha512-v1DDhJ2gPUqKhidHPDs/bjbBGEuFIBEZy5ZPA/cZHCZjH3vK70p+ZuihEiD2dl64M/7FtEF4tb6e0ZlRCcLKQA==} + dependencies: + '@storybook/channel-postmessage': 7.0.4 + '@storybook/channels': 7.0.4 + '@storybook/client-logger': 7.0.4 + '@storybook/core-events': 7.0.4 + '@storybook/csf': 0.1.0 + '@storybook/global': 5.0.0 + '@storybook/types': 7.0.4 + '@types/qs': 6.9.7 + dequal: 2.0.3 + lodash: 4.17.21 + memoizerific: 1.11.3 + qs: 6.11.1 + synchronous-promise: 2.0.17 + ts-dedent: 2.2.0 + util-deprecate: 1.0.2 + dev: false + + /@storybook/preview@7.0.2: + resolution: {integrity: sha512-U7MZkDT9bBq7HggLAXmTO9gI4eqhYs26fZS0L6iTE/PCX4Wg2TJBJSq2X8jhDXRqJFOt8SrQ756+V5Vtwrh4Og==} + dev: true + + /@storybook/react-dom-shim@7.0.2(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-fMl0aV7mJ3wyQKvt6z+rZuiIiSd9YinS77IJ1ETHqVZ4SxWriOS0GFKP6sZflrlpShoZBh+zl1lDPG7ZZdrQGw==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + dependencies: + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + dev: true + + /@storybook/react-vite@7.0.2(react-dom@18.2.0)(react@18.2.0)(typescript@5.0.4)(vite@4.2.0): + resolution: {integrity: sha512-1bDrmGo6imxBzZKJJ+SEHPuDn474JY3Yatm0cPaNVtlYhbnbiTPa3PxhI4U3233l4Qsc6DXNLKvi++j/knXDCw==} + engines: {node: '>=16'} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + vite: ^3.0.0 || ^4.0.0 + dependencies: + '@joshwooding/vite-plugin-react-docgen-typescript': 0.2.2(typescript@5.0.4)(vite@4.2.0) + '@rollup/pluginutils': 4.2.1 + '@storybook/builder-vite': 7.0.2(typescript@5.0.4)(vite@4.2.0) + '@storybook/react': 7.0.2(react-dom@18.2.0)(react@18.2.0)(typescript@5.0.4) + '@vitejs/plugin-react': 3.1.0(vite@4.2.0) + ast-types: 0.14.2 + magic-string: 0.27.0 + react: 18.2.0 + react-docgen: 6.0.0-alpha.3 + react-dom: 18.2.0(react@18.2.0) + vite: 4.2.0 + transitivePeerDependencies: + - '@preact/preset-vite' + - '@storybook/mdx1-csf' + - supports-color + - typescript + - vite-plugin-glimmerx + dev: true + + /@storybook/react@7.0.2(react-dom@18.2.0)(react@18.2.0)(typescript@5.0.4): + resolution: {integrity: sha512-2P7Oju1XKWMyn75dO0vjL4gthzBL/lLiCBRyAHKXZJ1H2eNdWjXkOOtH1HxnbRcXjWSU4tW96dqKY8m0iR9zAA==} + engines: {node: '>=16.0.0'} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + dependencies: + '@storybook/client-logger': 7.0.2 + '@storybook/core-client': 7.0.2 + '@storybook/docs-tools': 7.0.2 + '@storybook/global': 5.0.0 + '@storybook/preview-api': 7.0.2 + '@storybook/react-dom-shim': 7.0.2(react-dom@18.2.0)(react@18.2.0) + '@storybook/types': 7.0.2 + '@types/escodegen': 0.0.6 + '@types/estree': 0.0.51 + '@types/node': 16.18.23 + acorn: 7.4.1 + acorn-jsx: 5.3.2(acorn@7.4.1) + acorn-walk: 7.2.0 + escodegen: 2.0.0 + html-tags: 3.3.1 + lodash: 4.17.21 + prop-types: 15.8.1 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + react-element-to-jsx-string: 15.0.0(react-dom@18.2.0)(react@18.2.0) + ts-dedent: 2.2.0 + type-fest: 2.19.0 + typescript: 5.0.4 + util-deprecate: 1.0.2 + transitivePeerDependencies: + - supports-color + dev: true + + /@storybook/router@7.0.2(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-ZB2vucfayZUrMLBlXju4v6CNOQQb0YKDLw5RoojdBxOsUFtnp5UiPOE+I8PQR63EBwnRjozeibV1XSM+GlQb5w==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + dependencies: + '@storybook/client-logger': 7.0.2 + memoizerific: 1.11.3 + qs: 6.11.1 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + dev: true + + /@storybook/router@7.0.3(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-GD3qGp/7JlhQ+ZtNmKEZ+QVNBj6RWLOw6XXVx/gsEV3NWEn3/etogu8Fwz5z46kT4kDt6Ac3+zVUe5zTKhNU5A==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + dependencies: + '@storybook/client-logger': 7.0.3 + memoizerific: 1.11.3 + qs: 6.11.1 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + dev: true + + /@storybook/router@7.0.4(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-pVUSYBYbf+eIiWpO0i3kOZwvETM26txd7Q4IZqFcORX+BhWgPgcDZk9uebxii2SmwZ1VqdMKbhgeXsNcQxtnrw==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + dependencies: + '@storybook/client-logger': 7.0.4 + memoizerific: 1.11.3 + qs: 6.11.1 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + dev: false + + /@storybook/source-loader@7.0.3(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-4NzlkXFjSW5n5r1gQ5fUnRatEkBG9wnhGXms9WLt66XwgeWmuT1pwFeSWJmrDx+IR50twHU2UdOem0IbifkbZg==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + dependencies: + '@storybook/csf': 0.1.0 + '@storybook/types': 7.0.3 + estraverse: 5.3.0 + lodash: 4.17.21 + prettier: 2.8.7 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + dev: true + + /@storybook/telemetry@7.0.2: + resolution: {integrity: sha512-s2PIwI9nVYQBf3h40EFHLynYUfdqzRJMXyaCWJdVQuvdQfRkAn3CLXaubK+VdjC869z3ZfW20EMu3Mbgzcc0HA==} + dependencies: + '@storybook/client-logger': 7.0.2 + '@storybook/core-common': 7.0.2 + chalk: 4.1.2 + detect-package-manager: 2.0.1 + fetch-retry: 5.0.4 + fs-extra: 11.1.1 + isomorphic-unfetch: 3.1.0 + nanoid: 3.3.6 + read-pkg-up: 7.0.1 + transitivePeerDependencies: + - encoding + - supports-color + dev: true + + /@storybook/theming@7.0.2(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-c9sE+QAZNbopPvLiJ6BMxBERfTaq1ATyIri97FBvTucuSotNXw7X5q+ip5/nrCOPZuvK2f5wF4DRyD2HnB/rIQ==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + dependencies: + '@emotion/use-insertion-effect-with-fallbacks': 1.0.0(react@18.2.0) + '@storybook/client-logger': 7.0.2 + '@storybook/global': 5.0.0 + memoizerific: 1.11.3 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + dev: true + + /@storybook/theming@7.0.3(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-WCYnariRFvi5TvXEsrX+ghk/rN3GWKzfPJ5mfoN62k53lP1lI4ctRiHHA4xZ95n4UrzHxkjDNVkPhZgAMuM/Sg==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + dependencies: + '@emotion/use-insertion-effect-with-fallbacks': 1.0.0(react@18.2.0) + '@storybook/client-logger': 7.0.3 + '@storybook/global': 5.0.0 + memoizerific: 1.11.3 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + + /@storybook/theming@7.0.4(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-BahlmB86Q9wlvUT9Otx7vmJ7IAiytCBYyx5uLY3Ypt4JHyh5dT8UI8u4uowor9QW20YdfwPSIdaJwF1qzVuWNg==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + dependencies: + '@emotion/use-insertion-effect-with-fallbacks': 1.0.0(react@18.2.0) + '@storybook/client-logger': 7.0.4 + '@storybook/global': 5.0.0 + memoizerific: 1.11.3 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + dev: false + + /@storybook/types@7.0.2: + resolution: {integrity: sha512-0OCt/kAexa8MCcljxA+yZxGMn0n2U2Ync0KxotItqNbKBKVkaLQUls0+IXTWSCpC/QJvNZ049jxUHHanNi/96w==} + dependencies: + '@storybook/channels': 7.0.2 + '@types/babel__core': 7.20.0 + '@types/express': 4.17.17 + file-system-cache: 2.0.2 + dev: true + + /@storybook/types@7.0.3: + resolution: {integrity: sha512-AafPE2QbEaT+rQn53W1UOcteP1W7iYWt1DbMJZSxXMLYVatjsoirbfMKHy/9S63Whee39CTjOxcXUBYNs+5jCA==} + dependencies: + '@storybook/channels': 7.0.3 + '@types/babel__core': 7.20.0 + '@types/express': 4.17.17 + file-system-cache: 2.0.2 + + /@storybook/types@7.0.4: + resolution: {integrity: sha512-CRGugXpTJ3K3IGuSyHA+/r2nmZluWkgRBGpbl1OQlGY/vAI7YlrJhLg1Lwf5dp66etUsjZN6d/vJeivNcyD68g==} + dependencies: + '@storybook/channels': 7.0.4 + '@types/babel__core': 7.20.0 + '@types/express': 4.17.17 + file-system-cache: 2.0.2 + dev: false + + /@types/babel__core@7.20.0: + resolution: {integrity: sha512-+n8dL/9GWblDO0iU6eZAwEIJVr5DWigtle+Q6HLOrh/pdbXOhOtqzq8VPPE2zvNJzSKY4vH/z3iT3tn0A3ypiQ==} + dependencies: + '@babel/parser': 7.21.4 + '@babel/types': 7.21.4 + '@types/babel__generator': 7.6.4 + '@types/babel__template': 7.4.1 + '@types/babel__traverse': 7.18.3 + + /@types/babel__generator@7.6.4: + resolution: {integrity: sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg==} + dependencies: + '@babel/types': 7.21.4 + + /@types/babel__template@7.4.1: + resolution: {integrity: sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g==} + dependencies: + '@babel/parser': 7.21.4 + '@babel/types': 7.21.4 + + /@types/babel__traverse@7.18.3: + resolution: {integrity: sha512-1kbcJ40lLB7MHsj39U4Sh1uTd2E7rLEa79kmDpI6cy+XiXsteB3POdQomoq4FxszMrO3ZYchkhYJw7A2862b3w==} + dependencies: + '@babel/types': 7.21.4 + + /@types/body-parser@1.19.2: + resolution: {integrity: sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g==} + dependencies: + '@types/connect': 3.4.35 + '@types/node': 18.15.11 + + /@types/connect@3.4.35: + resolution: {integrity: sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ==} + dependencies: + '@types/node': 18.15.11 + + /@types/detect-port@1.3.2: + resolution: {integrity: sha512-xxgAGA2SAU4111QefXPSp5eGbDm/hW6zhvYl9IeEPZEry9F4d66QAHm5qpUXjb6IsevZV/7emAEx5MhP6O192g==} + dev: true + + /@types/doctrine@0.0.3: + resolution: {integrity: sha512-w5jZ0ee+HaPOaX25X2/2oGR/7rgAQSYII7X7pp0m9KgBfMP7uKfMfTvcpl5Dj+eDBbpxKGiqE+flqDr6XTd2RA==} + dev: true + + /@types/ejs@3.1.2: + resolution: {integrity: sha512-ZmiaE3wglXVWBM9fyVC17aGPkLo/UgaOjEiI2FXQfyczrCefORPxIe+2dVmnmk3zkVIbizjrlQzmPGhSYGXG5g==} + dev: true + + /@types/escodegen@0.0.6: + resolution: {integrity: sha512-AjwI4MvWx3HAOaZqYsjKWyEObT9lcVV0Y0V8nXo6cXzN8ZiMxVhf6F3d/UNvXVGKrEzL/Dluc5p+y9GkzlTWig==} + dev: true + + /@types/estree@0.0.51: + resolution: {integrity: sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ==} + dev: true + + /@types/express-serve-static-core@4.17.33: + resolution: {integrity: sha512-TPBqmR/HRYI3eC2E5hmiivIzv+bidAfXofM+sbonAGvyDhySGw9/PQZFt2BLOrjUUR++4eJVpx6KnLQK1Fk9tA==} + dependencies: + '@types/node': 18.15.11 + '@types/qs': 6.9.7 + '@types/range-parser': 1.2.4 + + /@types/express@4.17.17: + resolution: {integrity: sha512-Q4FmmuLGBG58btUnfS1c1r/NQdlp3DMfGDGig8WhfpA2YRUtEkxAjkZb0yvplJGYdF1fsQ81iMDcH24sSCNC/Q==} + dependencies: + '@types/body-parser': 1.19.2 + '@types/express-serve-static-core': 4.17.33 + '@types/qs': 6.9.7 + '@types/serve-static': 1.15.1 + + /@types/find-cache-dir@3.2.1: + resolution: {integrity: sha512-frsJrz2t/CeGifcu/6uRo4b+SzAwT4NYCVPu1GN8IB9XTzrpPkGuV0tmh9mN+/L0PklAlsC3u5Fxt0ju00LXIw==} + dev: true + + /@types/glob@8.1.0: + resolution: {integrity: sha512-IO+MJPVhoqz+28h1qLAcBEH2+xHMK6MTyHJc7MTnnYb6wsoLR29POVGJ7LycmVXIqyy/4/2ShP5sUwTXuOwb/w==} + dependencies: + '@types/minimatch': 5.1.2 + '@types/node': 18.15.11 + dev: true + + /@types/graceful-fs@4.1.6: + resolution: {integrity: sha512-Sig0SNORX9fdW+bQuTEovKj3uHcUL6LQKbCrrqb1X7J6/ReAbhCXRAhc+SMejhLELFj2QcyuxmUooZ4bt5ReSw==} + dependencies: + '@types/node': 18.15.11 + dev: true + + /@types/hast@2.3.4: + resolution: {integrity: sha512-wLEm0QvaoawEDoTRwzTXp4b4jpwiJDvR5KMnFnVodm3scufTlBOWRD6N1OBf9TZMhjlNsSfcO5V+7AF4+Vy+9g==} + dependencies: + '@types/unist': 2.0.6 + dev: true + + /@types/istanbul-lib-coverage@2.0.4: + resolution: {integrity: sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g==} + dev: true + + /@types/istanbul-lib-report@3.0.0: + resolution: {integrity: sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg==} + dependencies: + '@types/istanbul-lib-coverage': 2.0.4 + dev: true + + /@types/istanbul-reports@3.0.1: + resolution: {integrity: sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw==} + dependencies: + '@types/istanbul-lib-report': 3.0.0 + dev: true + + /@types/lodash@4.14.192: + resolution: {integrity: sha512-km+Vyn3BYm5ytMO13k9KTp27O75rbQ0NFw+U//g+PX7VZyjCioXaRFisqSIJRECljcTv73G3i6BpglNGHgUQ5A==} + dev: true + + /@types/mdx@2.0.4: + resolution: {integrity: sha512-qCYrNdpKwN6YO6FVnx+ulfqifKlE3lQGsNhvDaW9Oxzyob/cRLBJWow8GHBBD4NxQ7BVvtsATgLsX0vZAWmtrg==} + dev: true + + /@types/mime-types@2.1.1: + resolution: {integrity: sha512-vXOTGVSLR2jMw440moWTC7H19iUyLtP3Z1YTj7cSsubOICinjMxFeb/V57v9QdyyPGbbWolUFSSmSiRSn94tFw==} + dev: true + + /@types/mime@3.0.1: + resolution: {integrity: sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA==} + + /@types/minimatch@5.1.2: + resolution: {integrity: sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA==} + dev: true + + /@types/node-fetch@2.6.3: + resolution: {integrity: sha512-ETTL1mOEdq/sxUtgtOhKjyB2Irra4cjxksvcMUR5Zr4n+PxVhsCD9WS46oPbHL3et9Zde7CNRr+WUNlcHvsX+w==} + dependencies: + '@types/node': 16.18.23 + form-data: 3.0.1 + dev: true + + /@types/node@16.18.23: + resolution: {integrity: sha512-XAMpaw1s1+6zM+jn2tmw8MyaRDIJfXxqmIQIS0HfoGYPuf7dUWeiUKopwq13KFX9lEp1+THGtlaaYx39Nxr58g==} + dev: true + + /@types/node@18.15.11: + resolution: {integrity: sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q==} + + /@types/normalize-package-data@2.4.1: + resolution: {integrity: sha512-Gj7cI7z+98M282Tqmp2K5EIsoouUEzbBJhQQzDE3jSIRk6r9gsz0oUokqIUR4u1R3dMHo0pDHM7sNOHyhulypw==} + dev: true + + /@types/npmlog@4.1.4: + resolution: {integrity: sha512-WKG4gTr8przEZBiJ5r3s8ZIAoMXNbOgQ+j/d5O4X3x6kZJRLNvyUJuUK/KoG3+8BaOHPhp2m7WC6JKKeovDSzQ==} + dev: true + + /@types/pretty-hrtime@1.0.1: + resolution: {integrity: sha512-VjID5MJb1eGKthz2qUerWT8+R4b9N+CHvGCzg9fn4kWZgaF9AhdYikQio3R7wV8YY1NsQKPaCwKz1Yff+aHNUQ==} + dev: true + + /@types/prop-types@15.7.5: + resolution: {integrity: sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==} + dev: true + + /@types/qs@6.9.7: + resolution: {integrity: sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==} + + /@types/range-parser@1.2.4: + resolution: {integrity: sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==} + + /@types/react-dom@18.0.11: + resolution: {integrity: sha512-O38bPbI2CWtgw/OoQoY+BRelw7uysmXbWvw3nLWO21H1HSh+GOlqPuXshJfjmpNlKiiSDG9cc1JZAaMmVdcTlw==} + dependencies: + '@types/react': 18.0.28 + dev: true + + /@types/react@18.0.28: + resolution: {integrity: sha512-RD0ivG1kEztNBdoAK7lekI9M+azSnitIn85h4iOiaLjaTrMjzslhaqCGaI4IyCJ1RljWiLCEu4jyrLLgqxBTew==} + dependencies: + '@types/prop-types': 15.7.5 + '@types/scheduler': 0.16.3 + csstype: 3.1.2 + dev: true + + /@types/scheduler@0.16.3: + resolution: {integrity: sha512-5cJ8CB4yAx7BH1oMvdU0Jh9lrEXyPkar6F9G/ERswkCuvP4KQZfZkSjcMbAICCpQTN4OuZn8tz0HiKv9TGZgrQ==} + dev: true + + /@types/semver@7.3.13: + resolution: {integrity: sha512-21cFJr9z3g5dW8B0CVI9g2O9beqaThGQ6ZFBqHfwhzLDKUxaqTIy3vnfah/UPkfOiF2pLq+tGz+W8RyCskuslw==} + dev: true + + /@types/serve-static@1.15.1: + resolution: {integrity: sha512-NUo5XNiAdULrJENtJXZZ3fHtfMolzZwczzBbnAeBbqBwG+LaG6YaJtuwzwGSQZ2wsCrxjEhNNjAkKigy3n8teQ==} + dependencies: + '@types/mime': 3.0.1 + '@types/node': 18.15.11 + + /@types/unist@2.0.6: + resolution: {integrity: sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ==} + dev: true + + /@types/yargs-parser@21.0.0: + resolution: {integrity: sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA==} + dev: true + + /@types/yargs@16.0.5: + resolution: {integrity: sha512-AxO/ADJOBFJScHbWhq2xAhlWP24rY4aCEG/NFaMvbT3X2MgRsLjhjQwsn0Zi5zn0LG9jUhCCZMeX9Dkuw6k+vQ==} + dependencies: + '@types/yargs-parser': 21.0.0 + dev: true + + /@types/yargs@17.0.24: + resolution: {integrity: sha512-6i0aC7jV6QzQB8ne1joVZ0eSFIstHsCrobmOtghM11yGlH0j43FKL2UhWdELkyps0zuf7qVTUVCCR+tgSlyLLw==} + dependencies: + '@types/yargs-parser': 21.0.0 + dev: true + + /@vitejs/plugin-react@3.1.0(vite@4.2.0): + resolution: {integrity: sha512-AfgcRL8ZBhAlc3BFdigClmTUMISmmzHn7sB2h9U1odvc5U/MjWXsAaz18b/WoppUTDBzxOJwo2VdClfUcItu9g==} + engines: {node: ^14.18.0 || >=16.0.0} + peerDependencies: + vite: ^4.1.0-beta.0 + dependencies: + '@babel/core': 7.21.4 + '@babel/plugin-transform-react-jsx-self': 7.21.0(@babel/core@7.21.4) + '@babel/plugin-transform-react-jsx-source': 7.19.6(@babel/core@7.21.4) + magic-string: 0.27.0 + react-refresh: 0.14.0 + vite: 4.2.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@yarnpkg/esbuild-plugin-pnp@3.0.0-rc.15(esbuild@0.17.16): + resolution: {integrity: sha512-kYzDJO5CA9sy+on/s2aIW0411AklfCi8Ck/4QDivOqsMKpStZA2SsR+X27VTggGwpStWaLrjJcDcdDMowtG8MA==} + engines: {node: '>=14.15.0'} + peerDependencies: + esbuild: '>=0.10.0' + dependencies: + esbuild: 0.17.16 + tslib: 2.5.0 + dev: true + + /accepts@1.3.8: + resolution: {integrity: sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==} + engines: {node: '>= 0.6'} + dependencies: + mime-types: 2.1.35 + negotiator: 0.6.3 + dev: true + + /acorn-jsx@5.3.2(acorn@7.4.1): + resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} + peerDependencies: + acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 + dependencies: + acorn: 7.4.1 + dev: true + + /acorn-walk@7.2.0: + resolution: {integrity: sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA==} + engines: {node: '>=0.4.0'} + dev: true + + /acorn@7.4.1: + resolution: {integrity: sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==} + engines: {node: '>=0.4.0'} + hasBin: true + dev: true + + /acorn@8.8.2: + resolution: {integrity: sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw==} + engines: {node: '>=0.4.0'} + hasBin: true + dev: true + + /address@1.2.2: + resolution: {integrity: sha512-4B/qKCfeE/ODUaAUpSwfzazo5x29WD4r3vXiWsB7I2mSDAihwEqKO+g8GELZUQSSAo5e1XTYh3ZVfLyxBc12nA==} + engines: {node: '>= 10.0.0'} + dev: true + + /agent-base@5.1.1: + resolution: {integrity: sha512-TMeqbNl2fMW0nMjTEPOwe3J/PRFP4vqeoNuQMG0HlMrtm5QxKqdvAkZ1pRBQ/ulIyDD5Yq0nJ7YbdD8ey0TO3g==} + engines: {node: '>= 6.0.0'} + dev: true + + /agent-base@6.0.2: + resolution: {integrity: sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==} + engines: {node: '>= 6.0.0'} + dependencies: + debug: 4.3.4 + transitivePeerDependencies: + - supports-color + dev: true + + /aggregate-error@3.1.0: + resolution: {integrity: sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==} + engines: {node: '>=8'} + dependencies: + clean-stack: 2.2.0 + indent-string: 4.0.0 + dev: true + + /ansi-align@3.0.1: + resolution: {integrity: sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w==} + dependencies: + string-width: 4.2.3 + dev: true + + /ansi-regex@5.0.1: + resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} + engines: {node: '>=8'} + dev: true + + /ansi-styles@3.2.1: + resolution: {integrity: sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==} + engines: {node: '>=4'} + dependencies: + color-convert: 1.9.3 + dev: true + + /ansi-styles@4.3.0: + resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} + engines: {node: '>=8'} + dependencies: + color-convert: 2.0.1 + dev: true + + /anymatch@3.1.3: + resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==} + engines: {node: '>= 8'} + dependencies: + normalize-path: 3.0.0 + picomatch: 2.3.1 + dev: true + + /app-root-dir@1.0.2: + resolution: {integrity: sha512-jlpIfsOoNoafl92Sz//64uQHGSyMrD2vYG5d8o2a4qGvyNCvXur7bzIsWtAC/6flI2RYAp3kv8rsfBtaLm7w0g==} + dev: true + + /aproba@2.0.0: + resolution: {integrity: sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==} + dev: true + + /are-we-there-yet@2.0.0: + resolution: {integrity: sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==} + engines: {node: '>=10'} + dependencies: + delegates: 1.0.0 + readable-stream: 3.6.2 + dev: true + + /argparse@1.0.10: + resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==} + dependencies: + sprintf-js: 1.0.3 + dev: true + + /array-flatten@1.1.1: + resolution: {integrity: sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==} + dev: true + + /array-union@2.1.0: + resolution: {integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==} + engines: {node: '>=8'} + dev: true + + /assert@2.0.0: + resolution: {integrity: sha512-se5Cd+js9dXJnu6Ag2JFc00t+HmHOen+8Q+L7O9zI0PqQXr20uk2J0XQqMxZEeo5U50o8Nvmmx7dZrl+Ufr35A==} + dependencies: + es6-object-assign: 1.1.0 + is-nan: 1.3.2 + object-is: 1.1.5 + util: 0.12.5 + dev: true + + /ast-types@0.14.2: + resolution: {integrity: sha512-O0yuUDnZeQDL+ncNGlJ78BiO4jnYI3bvMsD5prT0/nsgijG/LpNBIr63gTjVTNsiGkgQhiyCShTgxt8oXOrklA==} + engines: {node: '>=4'} + dependencies: + tslib: 2.5.0 + dev: true + + /ast-types@0.15.2: + resolution: {integrity: sha512-c27loCv9QkZinsa5ProX751khO9DJl/AcB5c2KNtA6NRvHKS0PgLfcftz72KVq504vB0Gku5s2kUZzDBvQWvHg==} + engines: {node: '>=4'} + dependencies: + tslib: 2.5.0 + dev: true + + /ast-types@0.16.1: + resolution: {integrity: sha512-6t10qk83GOG8p0vKmaCr8eiilZwO171AvbROMtvvNiwrTly62t+7XkA8RdIIVbpMhCASAsxgAzdRSwh6nw/5Dg==} + engines: {node: '>=4'} + dependencies: + tslib: 2.5.0 + dev: true + + /async-limiter@1.0.1: + resolution: {integrity: sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ==} + dev: true + + /async@3.2.4: + resolution: {integrity: sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ==} + dev: true + + /asynckit@0.4.0: + resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} + dev: true + + /available-typed-arrays@1.0.5: + resolution: {integrity: sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==} + engines: {node: '>= 0.4'} + dev: true + + /babel-core@7.0.0-bridge.0(@babel/core@7.21.4): + resolution: {integrity: sha512-poPX9mZH/5CSanm50Q+1toVci6pv5KSRv/5TWCwtzQS5XEwn40BcCrgIeMFWP9CKKIniKXNxoIOnOq4VVlGXhg==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + dev: true + + /babel-plugin-istanbul@6.1.1: + resolution: {integrity: sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==} + engines: {node: '>=8'} + dependencies: + '@babel/helper-plugin-utils': 7.20.2 + '@istanbuljs/load-nyc-config': 1.1.0 + '@istanbuljs/schema': 0.1.3 + istanbul-lib-instrument: 5.2.1 + test-exclude: 6.0.0 + transitivePeerDependencies: + - supports-color + dev: true + + /babel-plugin-polyfill-corejs2@0.3.3(@babel/core@7.21.4): + resolution: {integrity: sha512-8hOdmFYFSZhqg2C/JgLUQ+t52o5nirNwaWM2B9LWteozwIvM14VSwdsCAUET10qT+kmySAlseadmfeeSWFCy+Q==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/compat-data': 7.21.4 + '@babel/core': 7.21.4 + '@babel/helper-define-polyfill-provider': 0.3.3(@babel/core@7.21.4) + semver: 6.3.0 + transitivePeerDependencies: + - supports-color + dev: true + + /babel-plugin-polyfill-corejs3@0.6.0(@babel/core@7.21.4): + resolution: {integrity: sha512-+eHqR6OPcBhJOGgsIar7xoAB1GcSwVUA3XjAd7HJNzOXT4wv6/H7KIdA/Nc60cvUlDbKApmqNvD1B1bzOt4nyA==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-define-polyfill-provider': 0.3.3(@babel/core@7.21.4) + core-js-compat: 3.30.0 + transitivePeerDependencies: + - supports-color + dev: true + + /babel-plugin-polyfill-regenerator@0.4.1(@babel/core@7.21.4): + resolution: {integrity: sha512-NtQGmyQDXjQqQ+IzRkBVwEOz9lQ4zxAQZgoAYEtU9dJjnl1Oc98qnN7jcp+bE7O7aYzVpavXE3/VKXNzUbh7aw==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.21.4 + '@babel/helper-define-polyfill-provider': 0.3.3(@babel/core@7.21.4) + transitivePeerDependencies: + - supports-color + dev: true + + /balanced-match@1.0.2: + resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + dev: true + + /base64-js@1.5.1: + resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} + dev: true + + /better-opn@2.1.1: + resolution: {integrity: sha512-kIPXZS5qwyKiX/HcRvDYfmBQUa8XP17I0mYZZ0y4UhpYOSvtsLHDYqmomS+Mj20aDvD3knEiQ0ecQy2nhio3yA==} + engines: {node: '>8.0.0'} + dependencies: + open: 7.4.2 + dev: true + + /big-integer@1.6.51: + resolution: {integrity: sha512-GPEid2Y9QU1Exl1rpO9B2IPJGHPSupF5GnVIP0blYvNOMer2bTvSWs1jGOUg04hTmu67nmLsQ9TBo1puaotBHg==} + engines: {node: '>=0.6'} + dev: true + + /binary-extensions@2.2.0: + resolution: {integrity: sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==} + engines: {node: '>=8'} + dev: true + + /bl@4.1.0: + resolution: {integrity: sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==} + dependencies: + buffer: 5.7.1 + inherits: 2.0.4 + readable-stream: 3.6.2 + dev: true + + /body-parser@1.20.1: + resolution: {integrity: sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==} + engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16} + dependencies: + bytes: 3.1.2 + content-type: 1.0.5 + debug: 2.6.9 + depd: 2.0.0 + destroy: 1.2.0 + http-errors: 2.0.0 + iconv-lite: 0.4.24 + on-finished: 2.4.1 + qs: 6.11.0 + raw-body: 2.5.1 + type-is: 1.6.18 + unpipe: 1.0.0 + transitivePeerDependencies: + - supports-color + dev: true + + /boxen@5.1.2: + resolution: {integrity: sha512-9gYgQKXx+1nP8mP7CzFyaUARhg7D3n1dF/FnErWmu9l6JvGpNUN278h0aSb+QjoiKSWG+iZ3uHrcqk0qrY9RQQ==} + engines: {node: '>=10'} + dependencies: + ansi-align: 3.0.1 + camelcase: 6.3.0 + chalk: 4.1.2 + cli-boxes: 2.2.1 + string-width: 4.2.3 + type-fest: 0.20.2 + widest-line: 3.1.0 + wrap-ansi: 7.0.0 + dev: true + + /bplist-parser@0.2.0: + resolution: {integrity: sha512-z0M+byMThzQmD9NILRniCUXYsYpjwnlO8N5uCFaCqIOpqRsJCrQL9NK3JsD67CN5a08nF5oIL2bD6loTdHOuKw==} + engines: {node: '>= 5.10.0'} + dependencies: + big-integer: 1.6.51 + dev: true + + /brace-expansion@1.1.11: + resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==} + dependencies: + balanced-match: 1.0.2 + concat-map: 0.0.1 + dev: true + + /brace-expansion@2.0.1: + resolution: {integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==} + dependencies: + balanced-match: 1.0.2 + dev: true + + /braces@3.0.2: + resolution: {integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==} + engines: {node: '>=8'} + dependencies: + fill-range: 7.0.1 + dev: true + + /browser-assert@1.2.1: + resolution: {integrity: sha512-nfulgvOR6S4gt9UKCeGJOuSGBPGiFT6oQ/2UBnvTY/5aQ1PnksW72fhZkM30DzoRRv2WpwZf1vHHEr3mtuXIWQ==} + dev: true + + /browserify-zlib@0.1.4: + resolution: {integrity: sha512-19OEpq7vWgsH6WkvkBJQDFvJS1uPcbFOQ4v9CU839dO+ZZXUZO6XpE6hNCqvlIIj+4fZvRiJ6DsAQ382GwiyTQ==} + dependencies: + pako: 0.2.9 + dev: true + + /browserslist@4.21.5: + resolution: {integrity: sha512-tUkiguQGW7S3IhB7N+c2MV/HZPSCPAAiYBZXLsBhFB/PCy6ZKKsZrmBayHV9fdGV/ARIfJ14NkxKzRDjvp7L6w==} + engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} + hasBin: true + dependencies: + caniuse-lite: 1.0.30001477 + electron-to-chromium: 1.4.357 + node-releases: 2.0.10 + update-browserslist-db: 1.0.10(browserslist@4.21.5) + dev: true + + /bser@2.1.1: + resolution: {integrity: sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==} + dependencies: + node-int64: 0.4.0 + dev: true + + /buffer-crc32@0.2.13: + resolution: {integrity: sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==} + dev: true + + /buffer-from@1.1.2: + resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==} + dev: true + + /buffer@5.7.1: + resolution: {integrity: sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==} + dependencies: + base64-js: 1.5.1 + ieee754: 1.2.1 + dev: true + + /bytes@3.0.0: + resolution: {integrity: sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=} + engines: {node: '>= 0.8'} + dev: true + + /bytes@3.1.2: + resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==} + engines: {node: '>= 0.8'} + dev: true + + /c8@7.13.0: + resolution: {integrity: sha512-/NL4hQTv1gBL6J6ei80zu3IiTrmePDKXKXOTLpHvcIWZTVYQlDhVWjjWvkhICylE8EwwnMVzDZugCvdx0/DIIA==} + engines: {node: '>=10.12.0'} + hasBin: true + dependencies: + '@bcoe/v8-coverage': 0.2.3 + '@istanbuljs/schema': 0.1.3 + find-up: 5.0.0 + foreground-child: 2.0.0 + istanbul-lib-coverage: 3.2.0 + istanbul-lib-report: 3.0.0 + istanbul-reports: 3.1.5 + rimraf: 3.0.2 + test-exclude: 6.0.0 + v8-to-istanbul: 9.1.0 + yargs: 16.2.0 + yargs-parser: 20.2.9 + dev: true + + /call-bind@1.0.2: + resolution: {integrity: sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==} + dependencies: + function-bind: 1.1.1 + get-intrinsic: 1.2.0 + + /camelcase@5.3.1: + resolution: {integrity: sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==} + engines: {node: '>=6'} + dev: true + + /camelcase@6.3.0: + resolution: {integrity: sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==} + engines: {node: '>=10'} + dev: true + + /caniuse-lite@1.0.30001477: + resolution: {integrity: sha512-lZim4iUHhGcy5p+Ri/G7m84hJwncj+Kz7S5aD4hoQfslKZJgt0tHc/hafVbqHC5bbhHb+mrW2JOUHkI5KH7toQ==} + dev: true + + /chalk@2.4.2: + resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==} + engines: {node: '>=4'} + dependencies: + ansi-styles: 3.2.1 + escape-string-regexp: 1.0.5 + supports-color: 5.5.0 + dev: true + + /chalk@4.1.2: + resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} + engines: {node: '>=10'} + dependencies: + ansi-styles: 4.3.0 + supports-color: 7.2.0 + dev: true + + /character-entities-legacy@1.1.4: + resolution: {integrity: sha512-3Xnr+7ZFS1uxeiUDvV02wQ+QDbc55o97tIV5zHScSPJpcLm/r0DFPcoY3tYRp+VZukxuMeKgXYmsXQHO05zQeA==} + dev: true + + /character-entities@1.2.4: + resolution: {integrity: sha512-iBMyeEHxfVnIakwOuDXpVkc54HijNgCyQB2w0VfGQThle6NXn50zU6V/u+LDhxHcDUPojn6Kpga3PTAD8W1bQw==} + dev: true + + /character-reference-invalid@1.1.4: + resolution: {integrity: sha512-mKKUkUbhPpQlCOfIuZkvSEgktjPFIsZKRRbC6KWVEMvlzblj3i3asQv5ODsrwt0N3pHAEvjP8KTQPHkp0+6jOg==} + dev: true + + /chokidar@3.5.3: + resolution: {integrity: sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==} + engines: {node: '>= 8.10.0'} + dependencies: + anymatch: 3.1.3 + braces: 3.0.2 + glob-parent: 5.1.2 + is-binary-path: 2.1.0 + is-glob: 4.0.3 + normalize-path: 3.0.0 + readdirp: 3.6.0 + optionalDependencies: + fsevents: 2.3.2 + dev: true + + /chownr@1.1.4: + resolution: {integrity: sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==} + dev: true + + /chownr@2.0.0: + resolution: {integrity: sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==} + engines: {node: '>=10'} + dev: true + + /ci-info@3.8.0: + resolution: {integrity: sha512-eXTggHWSooYhq49F2opQhuHWgzucfF2YgODK4e1566GQs5BIfP30B0oenwBJHfWxAs2fyPB1s7Mg949zLf61Yw==} + engines: {node: '>=8'} + dev: true + + /clean-stack@2.2.0: + resolution: {integrity: sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==} + engines: {node: '>=6'} + dev: true + + /cli-boxes@2.2.1: + resolution: {integrity: sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw==} + engines: {node: '>=6'} + dev: true + + /cli-table3@0.6.3: + resolution: {integrity: sha512-w5Jac5SykAeZJKntOxJCrm63Eg5/4dhMWIcuTbo9rpE+brgaSZo0RuNJZeOyMgsUdhDeojvgyQLmjI+K50ZGyg==} + engines: {node: 10.* || >= 12.*} + dependencies: + string-width: 4.2.3 + optionalDependencies: + '@colors/colors': 1.5.0 + dev: true + + /cliui@7.0.4: + resolution: {integrity: sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==} + dependencies: + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrap-ansi: 7.0.0 + dev: true + + /clone-deep@4.0.1: + resolution: {integrity: sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==} + engines: {node: '>=6'} + dependencies: + is-plain-object: 2.0.4 + kind-of: 6.0.3 + shallow-clone: 3.0.1 + dev: true + + /clsx@1.2.1: + resolution: {integrity: sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg==} + engines: {node: '>=6'} + dev: false + + /color-convert@1.9.3: + resolution: {integrity: sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==} + dependencies: + color-name: 1.1.3 + dev: true + + /color-convert@2.0.1: + resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} + engines: {node: '>=7.0.0'} + dependencies: + color-name: 1.1.4 + dev: true + + /color-name@1.1.3: + resolution: {integrity: sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==} + dev: true + + /color-name@1.1.4: + resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + dev: true + + /color-support@1.1.3: + resolution: {integrity: sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==} + hasBin: true + dev: true + + /colorette@2.0.19: + resolution: {integrity: sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ==} + dev: true + + /combined-stream@1.0.8: + resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} + engines: {node: '>= 0.8'} + dependencies: + delayed-stream: 1.0.0 + dev: true + + /comma-separated-tokens@1.0.8: + resolution: {integrity: sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw==} + dev: true + + /commander@2.20.3: + resolution: {integrity: sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==} + dev: true + + /commander@6.2.1: + resolution: {integrity: sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==} + engines: {node: '>= 6'} + dev: true + + /commondir@1.0.1: + resolution: {integrity: sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==} + dev: true + + /compressible@2.0.18: + resolution: {integrity: sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==} + engines: {node: '>= 0.6'} + dependencies: + mime-db: 1.52.0 + dev: true + + /compression@1.7.4: + resolution: {integrity: sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==} + engines: {node: '>= 0.8.0'} + dependencies: + accepts: 1.3.8 + bytes: 3.0.0 + compressible: 2.0.18 + debug: 2.6.9 + on-headers: 1.0.2 + safe-buffer: 5.1.2 + vary: 1.1.2 + transitivePeerDependencies: + - supports-color + dev: true + + /concat-map@0.0.1: + resolution: {integrity: sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=} + dev: true + + /concat-stream@1.6.2: + resolution: {integrity: sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==} + engines: {'0': node >= 0.8} + dependencies: + buffer-from: 1.1.2 + inherits: 2.0.4 + readable-stream: 2.3.8 + typedarray: 0.0.6 + dev: true + + /console-control-strings@1.1.0: + resolution: {integrity: sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==} + dev: true + + /content-disposition@0.5.4: + resolution: {integrity: sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==} + engines: {node: '>= 0.6'} + dependencies: + safe-buffer: 5.2.1 + dev: true + + /content-type@1.0.5: + resolution: {integrity: sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==} + engines: {node: '>= 0.6'} + dev: true + + /convert-source-map@1.9.0: + resolution: {integrity: sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==} + dev: true + + /convert-source-map@2.0.0: + resolution: {integrity: sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==} + dev: true + + /cookie-signature@1.0.6: + resolution: {integrity: sha1-4wOogrNCzD7oylE6eZmXNNqzriw=} + dev: true + + /cookie@0.5.0: + resolution: {integrity: sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==} + engines: {node: '>= 0.6'} + dev: true + + /core-js-compat@3.30.0: + resolution: {integrity: sha512-P5A2h/9mRYZFIAP+5Ab8ns6083IyVpSclU74UNvbGVQ8VM7n3n3/g2yF3AkKQ9NXz2O+ioxLbEWKnDtgsFamhg==} + dependencies: + browserslist: 4.21.5 + dev: true + + /core-util-is@1.0.3: + resolution: {integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==} + dev: true + + /cross-spawn@7.0.3: + resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} + engines: {node: '>= 8'} + dependencies: + path-key: 3.1.1 + shebang-command: 2.0.0 + which: 2.0.2 + dev: true + + /crypto-random-string@2.0.0: + resolution: {integrity: sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==} + engines: {node: '>=8'} + dev: true + + /csstype@3.1.2: + resolution: {integrity: sha512-I7K1Uu0MBPzaFKg4nI5Q7Vs2t+3gWWW648spaF+Rg7pI9ds18Ugn+lvg4SHczUdKlHI5LWBXyqfS8+DufyBsgQ==} + dev: true + + /debug@2.6.9: + resolution: {integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + dependencies: + ms: 2.0.0 + dev: true + + /debug@4.3.4: + resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + dependencies: + ms: 2.1.2 + dev: true + + /deep-is@0.1.4: + resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==} + dev: true + + /default-browser-id@3.0.0: + resolution: {integrity: sha512-OZ1y3y0SqSICtE8DE4S8YOE9UZOJ8wO16fKWVP5J1Qz42kV9jcnMVFrEE/noXb/ss3Q4pZIH79kxofzyNNtUNA==} + engines: {node: '>=12'} + dependencies: + bplist-parser: 0.2.0 + untildify: 4.0.0 + dev: true + + /define-lazy-prop@2.0.0: + resolution: {integrity: sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==} + engines: {node: '>=8'} + dev: true + + /define-properties@1.2.0: + resolution: {integrity: sha512-xvqAVKGfT1+UAvPwKTVw/njhdQ8ZhXK4lI0bCIuCMrp2up9nPnaDftrLtmpTazqd1o+UY4zgzU+avtMbDP+ldA==} + engines: {node: '>= 0.4'} + dependencies: + has-property-descriptors: 1.0.0 + object-keys: 1.1.1 + dev: true + + /defu@6.1.2: + resolution: {integrity: sha512-+uO4+qr7msjNNWKYPHqN/3+Dx3NFkmIzayk2L1MyZQlvgZb/J1A0fo410dpKrN2SnqFjt8n4JL8fDJE0wIgjFQ==} + dev: true + + /del@6.1.1: + resolution: {integrity: sha512-ua8BhapfP0JUJKC/zV9yHHDW/rDoDxP4Zhn3AkA6/xT6gY7jYXJiaeyBZznYVujhZZET+UgcbZiQ7sN3WqcImg==} + engines: {node: '>=10'} + dependencies: + globby: 11.1.0 + graceful-fs: 4.2.11 + is-glob: 4.0.3 + is-path-cwd: 2.2.0 + is-path-inside: 3.0.3 + p-map: 4.0.0 + rimraf: 3.0.2 + slash: 3.0.0 + dev: true + + /delayed-stream@1.0.0: + resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} + engines: {node: '>=0.4.0'} + dev: true + + /delegates@1.0.0: + resolution: {integrity: sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==} + dev: true + + /depd@2.0.0: + resolution: {integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==} + engines: {node: '>= 0.8'} + dev: true + + /dequal@2.0.3: + resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==} + engines: {node: '>=6'} + + /destroy@1.2.0: + resolution: {integrity: sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==} + engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16} + dev: true + + /detect-indent@6.1.0: + resolution: {integrity: sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==} + engines: {node: '>=8'} + dev: true + + /detect-package-manager@2.0.1: + resolution: {integrity: sha512-j/lJHyoLlWi6G1LDdLgvUtz60Zo5GEj+sVYtTVXnYLDPuzgC3llMxonXym9zIwhhUII8vjdw0LXxavpLqTbl1A==} + engines: {node: '>=12'} + dependencies: + execa: 5.1.1 + dev: true + + /detect-port@1.5.1: + resolution: {integrity: sha512-aBzdj76lueB6uUst5iAs7+0H/oOjqI5D16XUWxlWMIMROhcM0rfsNVk93zTngq1dDNpoXRr++Sus7ETAExppAQ==} + hasBin: true + dependencies: + address: 1.2.2 + debug: 4.3.4 + transitivePeerDependencies: + - supports-color + dev: true + + /dir-glob@3.0.1: + resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} + engines: {node: '>=8'} + dependencies: + path-type: 4.0.0 + dev: true + + /doctrine@3.0.0: + resolution: {integrity: sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==} + engines: {node: '>=6.0.0'} + dependencies: + esutils: 2.0.3 + dev: true + + /dotenv-expand@10.0.0: + resolution: {integrity: sha512-GopVGCpVS1UKH75VKHGuQFqS1Gusej0z4FyQkPdwjil2gNIv+LNsqBlboOzpJFZKVT95GkCyWJbBSdFEFUWI2A==} + engines: {node: '>=12'} + dev: true + + /dotenv@16.0.3: + resolution: {integrity: sha512-7GO6HghkA5fYG9TYnNxi14/7K9f5occMlp3zXAuSxn7CKCxt9xbNWG7yF8hTCSUchlfWSe3uLmlPfigevRItzQ==} + engines: {node: '>=12'} + dev: true + + /duplexify@3.7.1: + resolution: {integrity: sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g==} + dependencies: + end-of-stream: 1.4.4 + inherits: 2.0.4 + readable-stream: 2.3.8 + stream-shift: 1.0.1 + dev: true + + /ee-first@1.1.1: + resolution: {integrity: sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=} + dev: true + + /ejs@3.1.9: + resolution: {integrity: sha512-rC+QVNMJWv+MtPgkt0y+0rVEIdbtxVADApW9JXrUVlzHetgcyczP/E7DJmWJ4fJCZF2cPcBk0laWO9ZHMG3DmQ==} + engines: {node: '>=0.10.0'} + hasBin: true + dependencies: + jake: 10.8.5 + dev: true + + /electron-to-chromium@1.4.357: + resolution: {integrity: sha512-UTkCbNTAcGXABmEnQrGcW4m3cG6fcyBfD4KDF0iyEAlbrGZiY9dmslyDAGOD1Kr5biN2F743Y30aRCOtau35Vw==} + dev: true + + /emoji-regex@8.0.0: + resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} + dev: true + + /encodeurl@1.0.2: + resolution: {integrity: sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==} + engines: {node: '>= 0.8'} + dev: true + + /end-of-stream@1.4.4: + resolution: {integrity: sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==} + dependencies: + once: 1.4.0 + dev: true + + /envinfo@7.8.1: + resolution: {integrity: sha512-/o+BXHmB7ocbHEAs6F2EnG0ogybVVUdkRunTT2glZU9XAaGmhqskrvKwqXuDfNjEO0LZKWdejEEpnq8aM0tOaw==} + engines: {node: '>=4'} + hasBin: true + dev: true + + /error-ex@1.3.2: + resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==} + dependencies: + is-arrayish: 0.2.1 + dev: true + + /es-module-lexer@0.9.3: + resolution: {integrity: sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ==} + dev: true + + /es6-object-assign@1.1.0: + resolution: {integrity: sha512-MEl9uirslVwqQU369iHNWZXsI8yaZYGg/D65aOgZkeyFJwHYSxilf7rQzXKI7DdDuBPrBXbfk3sl9hJhmd5AUw==} + dev: true + + /esbuild-plugin-alias@0.2.1: + resolution: {integrity: sha512-jyfL/pwPqaFXyKnj8lP8iLk6Z0m099uXR45aSN8Av1XD4vhvQutxxPzgA2bTcAwQpa1zCXDcWOlhFgyP3GKqhQ==} + dev: true + + /esbuild-register@3.4.2(esbuild@0.17.16): + resolution: {integrity: sha512-kG/XyTDyz6+YDuyfB9ZoSIOOmgyFCH+xPRtsCa8W85HLRV5Csp+o3jWVbOSHgSLfyLc5DmP+KFDNwty4mEjC+Q==} + peerDependencies: + esbuild: '>=0.12 <1' + dependencies: + debug: 4.3.4 + esbuild: 0.17.16 + transitivePeerDependencies: + - supports-color + dev: true + + /esbuild@0.17.16: + resolution: {integrity: sha512-aeSuUKr9aFVY9Dc8ETVELGgkj4urg5isYx8pLf4wlGgB0vTFjxJQdHnNH6Shmx4vYYrOTLCHtRI5i1XZ9l2Zcg==} + engines: {node: '>=12'} + hasBin: true + requiresBuild: true + optionalDependencies: + '@esbuild/android-arm': 0.17.16 + '@esbuild/android-arm64': 0.17.16 + '@esbuild/android-x64': 0.17.16 + '@esbuild/darwin-arm64': 0.17.16 + '@esbuild/darwin-x64': 0.17.16 + '@esbuild/freebsd-arm64': 0.17.16 + '@esbuild/freebsd-x64': 0.17.16 + '@esbuild/linux-arm': 0.17.16 + '@esbuild/linux-arm64': 0.17.16 + '@esbuild/linux-ia32': 0.17.16 + '@esbuild/linux-loong64': 0.17.16 + '@esbuild/linux-mips64el': 0.17.16 + '@esbuild/linux-ppc64': 0.17.16 + '@esbuild/linux-riscv64': 0.17.16 + '@esbuild/linux-s390x': 0.17.16 + '@esbuild/linux-x64': 0.17.16 + '@esbuild/netbsd-x64': 0.17.16 + '@esbuild/openbsd-x64': 0.17.16 + '@esbuild/sunos-x64': 0.17.16 + '@esbuild/win32-arm64': 0.17.16 + '@esbuild/win32-ia32': 0.17.16 + '@esbuild/win32-x64': 0.17.16 + dev: true + + /escalade@3.1.1: + resolution: {integrity: sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==} + engines: {node: '>=6'} + dev: true + + /escape-html@1.0.3: + resolution: {integrity: sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==} + dev: true + + /escape-string-regexp@1.0.5: + resolution: {integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==} + engines: {node: '>=0.8.0'} + dev: true + + /escodegen@2.0.0: + resolution: {integrity: sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw==} + engines: {node: '>=6.0'} + hasBin: true + dependencies: + esprima: 4.0.1 + estraverse: 5.3.0 + esutils: 2.0.3 + optionator: 0.8.3 + optionalDependencies: + source-map: 0.6.1 + dev: true + + /esprima@4.0.1: + resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==} + engines: {node: '>=4'} + hasBin: true + dev: true + + /estraverse@5.3.0: + resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==} + engines: {node: '>=4.0'} + dev: true + + /estree-to-babel@3.2.1: + resolution: {integrity: sha512-YNF+mZ/Wu2FU/gvmzuWtYc8rloubL7wfXCTgouFrnjGVXPA/EeYYA7pupXWrb3Iv1cTBeSSxxJIbK23l4MRNqg==} + engines: {node: '>=8.3.0'} + dependencies: + '@babel/traverse': 7.21.4 + '@babel/types': 7.21.4 + c8: 7.13.0 + transitivePeerDependencies: + - supports-color + dev: true + + /estree-walker@2.0.2: + resolution: {integrity: sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==} + dev: true + + /esutils@2.0.3: + resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==} + engines: {node: '>=0.10.0'} + dev: true + + /etag@1.8.1: + resolution: {integrity: sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==} + engines: {node: '>= 0.6'} + dev: true + + /execa@5.1.1: + resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==} + engines: {node: '>=10'} + dependencies: + cross-spawn: 7.0.3 + get-stream: 6.0.1 + human-signals: 2.1.0 + is-stream: 2.0.1 + merge-stream: 2.0.0 + npm-run-path: 4.0.1 + onetime: 5.1.2 + signal-exit: 3.0.7 + strip-final-newline: 2.0.0 + dev: true + + /express@4.18.2: + resolution: {integrity: sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==} + engines: {node: '>= 0.10.0'} + dependencies: + accepts: 1.3.8 + array-flatten: 1.1.1 + body-parser: 1.20.1 + content-disposition: 0.5.4 + content-type: 1.0.5 + cookie: 0.5.0 + cookie-signature: 1.0.6 + debug: 2.6.9 + depd: 2.0.0 + encodeurl: 1.0.2 + escape-html: 1.0.3 + etag: 1.8.1 + finalhandler: 1.2.0 + fresh: 0.5.2 + http-errors: 2.0.0 + merge-descriptors: 1.0.1 + methods: 1.1.2 + on-finished: 2.4.1 + parseurl: 1.3.3 + path-to-regexp: 0.1.7 + proxy-addr: 2.0.7 + qs: 6.11.0 + range-parser: 1.2.1 + safe-buffer: 5.2.1 + send: 0.18.0 + serve-static: 1.15.0 + setprototypeof: 1.2.0 + statuses: 2.0.1 + type-is: 1.6.18 + utils-merge: 1.0.1 + vary: 1.1.2 + transitivePeerDependencies: + - supports-color + dev: true + + /extend@3.0.2: + resolution: {integrity: sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==} + dev: true + + /extract-zip@1.7.0: + resolution: {integrity: sha512-xoh5G1W/PB0/27lXgMQyIhP5DSY/LhoCsOyZgb+6iMmRtCwVBo55uKaMoEYrDCKQhWvqEip5ZPKAc6eFNyf/MA==} + hasBin: true + dependencies: + concat-stream: 1.6.2 + debug: 2.6.9 + mkdirp: 0.5.6 + yauzl: 2.10.0 + transitivePeerDependencies: + - supports-color + dev: true + + /fast-deep-equal@3.1.3: + resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} + dev: false + + /fast-glob@3.2.12: + resolution: {integrity: sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==} + engines: {node: '>=8.6.0'} + dependencies: + '@nodelib/fs.stat': 2.0.5 + '@nodelib/fs.walk': 1.2.8 + glob-parent: 5.1.2 + merge2: 1.4.1 + micromatch: 4.0.5 + dev: true + + /fast-json-stable-stringify@2.1.0: + resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==} + dev: true + + /fast-levenshtein@2.0.6: + resolution: {integrity: sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==} + dev: true + + /fastq@1.15.0: + resolution: {integrity: sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==} + dependencies: + reusify: 1.0.4 + dev: true + + /fault@1.0.4: + resolution: {integrity: sha512-CJ0HCB5tL5fYTEA7ToAq5+kTwd++Borf1/bifxd9iT70QcXr4MRrO3Llf8Ifs70q+SJcGHFtnIE/Nw6giCtECA==} + dependencies: + format: 0.2.2 + dev: true + + /fb-watchman@2.0.2: + resolution: {integrity: sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==} + dependencies: + bser: 2.1.1 + dev: true + + /fd-slicer@1.1.0: + resolution: {integrity: sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==} + dependencies: + pend: 1.2.0 + dev: true + + /fetch-retry@5.0.4: + resolution: {integrity: sha512-LXcdgpdcVedccGg0AZqg+S8lX/FCdwXD92WNZ5k5qsb0irRhSFsBOpcJt7oevyqT2/C2nEE0zSFNdBEpj3YOSw==} + dev: true + + /file-system-cache@2.0.2: + resolution: {integrity: sha512-lp4BHO4CWqvRyx88Tt3quZic9ZMf4cJyquYq7UI8sH42Bm2ArlBBjKQAalZOo+UfaBassb7X123Lik5qZ/tSAA==} + dependencies: + fs-extra: 11.1.1 + ramda: 0.28.0 + + /filelist@1.0.4: + resolution: {integrity: sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==} + dependencies: + minimatch: 5.1.6 + dev: true + + /fill-range@7.0.1: + resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==} + engines: {node: '>=8'} + dependencies: + to-regex-range: 5.0.1 + dev: true + + /finalhandler@1.2.0: + resolution: {integrity: sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==} + engines: {node: '>= 0.8'} + dependencies: + debug: 2.6.9 + encodeurl: 1.0.2 + escape-html: 1.0.3 + on-finished: 2.4.1 + parseurl: 1.3.3 + statuses: 2.0.1 + unpipe: 1.0.0 + transitivePeerDependencies: + - supports-color + dev: true + + /find-cache-dir@2.1.0: + resolution: {integrity: sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==} + engines: {node: '>=6'} + dependencies: + commondir: 1.0.1 + make-dir: 2.1.0 + pkg-dir: 3.0.0 + dev: true + + /find-cache-dir@3.3.2: + resolution: {integrity: sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig==} + engines: {node: '>=8'} + dependencies: + commondir: 1.0.1 + make-dir: 3.1.0 + pkg-dir: 4.2.0 + dev: true + + /find-up@3.0.0: + resolution: {integrity: sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==} + engines: {node: '>=6'} + dependencies: + locate-path: 3.0.0 + dev: true + + /find-up@4.1.0: + resolution: {integrity: sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==} + engines: {node: '>=8'} + dependencies: + locate-path: 5.0.0 + path-exists: 4.0.0 + dev: true + + /find-up@5.0.0: + resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==} + engines: {node: '>=10'} + dependencies: + locate-path: 6.0.0 + path-exists: 4.0.0 + dev: true + + /flow-parser@0.203.1: + resolution: {integrity: sha512-Nw2M8MPP/Zb+yhvmPDEjzkCXLtgyWGKXZjAYOVftm+wIf3xd4FKa7nRI9v67rODs0WzxMbPc8IPs/7o/dyxo/Q==} + engines: {node: '>=0.4.0'} + dev: true + + /for-each@0.3.3: + resolution: {integrity: sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==} + dependencies: + is-callable: 1.2.7 + dev: true + + /foreground-child@2.0.0: + resolution: {integrity: sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==} + engines: {node: '>=8.0.0'} + dependencies: + cross-spawn: 7.0.3 + signal-exit: 3.0.7 + dev: true + + /form-data@3.0.1: + resolution: {integrity: sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==} + engines: {node: '>= 6'} + dependencies: + asynckit: 0.4.0 + combined-stream: 1.0.8 + mime-types: 2.1.35 + dev: true + + /format@0.2.2: + resolution: {integrity: sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==} + engines: {node: '>=0.4.x'} + dev: true + + /forwarded@0.2.0: + resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==} + engines: {node: '>= 0.6'} + dev: true + + /fresh@0.5.2: + resolution: {integrity: sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=} + engines: {node: '>= 0.6'} + dev: true + + /fs-constants@1.0.0: + resolution: {integrity: sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==} + dev: true + + /fs-extra@11.1.1: + resolution: {integrity: sha512-MGIE4HOvQCeUCzmlHs0vXpih4ysz4wg9qiSAu6cd42lVwPbTM1TjV7RusoyQqMmk/95gdQZX72u+YW+c3eEpFQ==} + engines: {node: '>=14.14'} + dependencies: + graceful-fs: 4.2.11 + jsonfile: 6.1.0 + universalify: 2.0.0 + + /fs-minipass@2.1.0: + resolution: {integrity: sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==} + engines: {node: '>= 8'} + dependencies: + minipass: 3.3.6 + dev: true + + /fs.realpath@1.0.0: + resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} + dev: true + + /fsevents@2.3.2: + resolution: {integrity: sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + requiresBuild: true + dev: true + optional: true + + /function-bind@1.1.1: + resolution: {integrity: sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==} + + /gauge@3.0.2: + resolution: {integrity: sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==} + engines: {node: '>=10'} + dependencies: + aproba: 2.0.0 + color-support: 1.1.3 + console-control-strings: 1.1.0 + has-unicode: 2.0.1 + object-assign: 4.1.1 + signal-exit: 3.0.7 + string-width: 4.2.3 + strip-ansi: 6.0.1 + wide-align: 1.1.5 + dev: true + + /gensync@1.0.0-beta.2: + resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} + engines: {node: '>=6.9.0'} + dev: true + + /get-caller-file@2.0.5: + resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} + engines: {node: 6.* || 8.* || >= 10.*} + dev: true + + /get-intrinsic@1.2.0: + resolution: {integrity: sha512-L049y6nFOuom5wGyRc3/gdTLO94dySVKRACj1RmJZBQXlbTMhtNIgkWkUHq+jYmZvKf14EW1EoJnnjbmoHij0Q==} + dependencies: + function-bind: 1.1.1 + has: 1.0.3 + has-symbols: 1.0.3 + + /get-npm-tarball-url@2.0.3: + resolution: {integrity: sha512-R/PW6RqyaBQNWYaSyfrh54/qtcnOp22FHCCiRhSSZj0FP3KQWCsxxt0DzIdVTbwTqe9CtQfvl/FPD4UIPt4pqw==} + engines: {node: '>=12.17'} + dev: true + + /get-package-type@0.1.0: + resolution: {integrity: sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==} + engines: {node: '>=8.0.0'} + dev: true + + /get-port@5.1.1: + resolution: {integrity: sha512-g/Q1aTSDOxFpchXC4i8ZWvxA1lnPqx/JHqcpIw0/LX9T8x/GBbi6YnlN5nhaKIFkT8oFsscUKgDJYxfwfS6QsQ==} + engines: {node: '>=8'} + dev: true + + /get-stream@6.0.1: + resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} + engines: {node: '>=10'} + dev: true + + /giget@1.1.2: + resolution: {integrity: sha512-HsLoS07HiQ5oqvObOI+Qb2tyZH4Gj5nYGfF9qQcZNrPw+uEFhdXtgJr01aO2pWadGHucajYDLxxbtQkm97ON2A==} + hasBin: true + dependencies: + colorette: 2.0.19 + defu: 6.1.2 + https-proxy-agent: 5.0.1 + mri: 1.2.0 + node-fetch-native: 1.1.0 + pathe: 1.1.0 + tar: 6.1.13 + transitivePeerDependencies: + - supports-color + dev: true + + /github-slugger@1.5.0: + resolution: {integrity: sha512-wIh+gKBI9Nshz2o46B0B3f5k/W+WI9ZAv6y5Dn5WJ5SK1t0TnDimB4WE5rmTD05ZAIn8HALCZVmCsvj0w0v0lw==} + dev: true + + /glob-parent@5.1.2: + resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} + engines: {node: '>= 6'} + dependencies: + is-glob: 4.0.3 + dev: true + + /glob-promise@6.0.2(glob@8.1.0): + resolution: {integrity: sha512-Ni2aDyD1ekD6x8/+K4hDriRDbzzfuK4yKpqSymJ4P7IxbtARiOOuU+k40kbHM0sLIlbf1Qh0qdMkAHMZYE6XJQ==} + engines: {node: '>=16'} + peerDependencies: + glob: ^8.0.3 + dependencies: + '@types/glob': 8.1.0 + glob: 8.1.0 + dev: true + + /glob-to-regexp@0.4.1: + resolution: {integrity: sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==} + dev: true + + /glob@7.2.3: + resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==} + dependencies: + fs.realpath: 1.0.0 + inflight: 1.0.6 + inherits: 2.0.4 + minimatch: 3.1.2 + once: 1.4.0 + path-is-absolute: 1.0.1 + dev: true + + /glob@8.1.0: + resolution: {integrity: sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==} + engines: {node: '>=12'} + dependencies: + fs.realpath: 1.0.0 + inflight: 1.0.6 + inherits: 2.0.4 + minimatch: 5.1.6 + once: 1.4.0 + dev: true + + /globals@11.12.0: + resolution: {integrity: sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==} + engines: {node: '>=4'} + dev: true + + /globby@11.1.0: + resolution: {integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==} + engines: {node: '>=10'} + dependencies: + array-union: 2.1.0 + dir-glob: 3.0.1 + fast-glob: 3.2.12 + ignore: 5.2.4 + merge2: 1.4.1 + slash: 3.0.0 + dev: true + + /gopd@1.0.1: + resolution: {integrity: sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==} + dependencies: + get-intrinsic: 1.2.0 + dev: true + + /graceful-fs@4.2.11: + resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} + + /gunzip-maybe@1.4.2: + resolution: {integrity: sha512-4haO1M4mLO91PW57BMsDFf75UmwoRX0GkdD+Faw+Lr+r/OZrOCS0pIBwOL1xCKQqnQzbNFGgK2V2CpBUPeFNTw==} + hasBin: true + dependencies: + browserify-zlib: 0.1.4 + is-deflate: 1.0.0 + is-gzip: 1.0.0 + peek-stream: 1.1.3 + pumpify: 1.5.1 + through2: 2.0.5 + dev: true + + /handlebars@4.7.7: + resolution: {integrity: sha512-aAcXm5OAfE/8IXkcZvCepKU3VzW1/39Fb5ZuqMtgI/hT8X2YgoMvBY5dLhq/cpOvw7Lk1nK/UF71aLG/ZnVYRA==} + engines: {node: '>=0.4.7'} + hasBin: true + dependencies: + minimist: 1.2.8 + neo-async: 2.6.2 + source-map: 0.6.1 + wordwrap: 1.0.0 + optionalDependencies: + uglify-js: 3.17.4 + dev: true + + /has-flag@3.0.0: + resolution: {integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==} + engines: {node: '>=4'} + dev: true + + /has-flag@4.0.0: + resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} + engines: {node: '>=8'} + dev: true + + /has-property-descriptors@1.0.0: + resolution: {integrity: sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==} + dependencies: + get-intrinsic: 1.2.0 + dev: true + + /has-symbols@1.0.3: + resolution: {integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==} + engines: {node: '>= 0.4'} + + /has-tostringtag@1.0.0: + resolution: {integrity: sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==} + engines: {node: '>= 0.4'} + dependencies: + has-symbols: 1.0.3 + dev: true + + /has-unicode@2.0.1: + resolution: {integrity: sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==} + dev: true + + /has@1.0.3: + resolution: {integrity: sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==} + engines: {node: '>= 0.4.0'} + dependencies: + function-bind: 1.1.1 + + /hast-util-parse-selector@2.2.5: + resolution: {integrity: sha512-7j6mrk/qqkSehsM92wQjdIgWM2/BW61u/53G6xmC8i1OmEdKLHbk419QKQUjz6LglWsfqoiHmyMRkP1BGjecNQ==} + dev: true + + /hastscript@6.0.0: + resolution: {integrity: sha512-nDM6bvd7lIqDUiYEiu5Sl/+6ReP0BMk/2f4U/Rooccxkj0P5nm+acM5PrGJ/t5I8qPGiqZSE6hVAwZEdZIvP4w==} + dependencies: + '@types/hast': 2.3.4 + comma-separated-tokens: 1.0.8 + hast-util-parse-selector: 2.2.5 + property-information: 5.6.0 + space-separated-tokens: 1.1.5 + dev: true + + /highlight.js@10.7.3: + resolution: {integrity: sha512-tzcUFauisWKNHaRkN4Wjl/ZA07gENAjFl3J/c480dprkGTg5EQstgaNFqBfUqCq54kZRIEcreTsAgF/m2quD7A==} + dev: true + + /hosted-git-info@2.8.9: + resolution: {integrity: sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==} + dev: true + + /html-escaper@2.0.2: + resolution: {integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==} + dev: true + + /html-tags@3.3.1: + resolution: {integrity: sha512-ztqyC3kLto0e9WbNp0aeP+M3kTt+nbaIveGmUxAtZa+8iFgKLUOD4YKM5j+f3QD89bra7UeumolZHKuOXnTmeQ==} + engines: {node: '>=8'} + dev: true + + /http-errors@2.0.0: + resolution: {integrity: sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==} + engines: {node: '>= 0.8'} + dependencies: + depd: 2.0.0 + inherits: 2.0.4 + setprototypeof: 1.2.0 + statuses: 2.0.1 + toidentifier: 1.0.1 + dev: true + + /https-proxy-agent@4.0.0: + resolution: {integrity: sha512-zoDhWrkR3of1l9QAL8/scJZyLu8j/gBkcwcaQOZh7Gyh/+uJQzGVETdgT30akuwkpL8HTRfssqI3BZuV18teDg==} + engines: {node: '>= 6.0.0'} + dependencies: + agent-base: 5.1.1 + debug: 4.3.4 + transitivePeerDependencies: + - supports-color + dev: true + + /https-proxy-agent@5.0.1: + resolution: {integrity: sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==} + engines: {node: '>= 6'} + dependencies: + agent-base: 6.0.2 + debug: 4.3.4 + transitivePeerDependencies: + - supports-color + dev: true + + /human-signals@2.1.0: + resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} + engines: {node: '>=10.17.0'} + dev: true + + /iconv-lite@0.4.24: + resolution: {integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==} + engines: {node: '>=0.10.0'} + dependencies: + safer-buffer: 2.1.2 + dev: true + + /ieee754@1.2.1: + resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} + dev: true + + /ignore@5.2.4: + resolution: {integrity: sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==} + engines: {node: '>= 4'} + dev: true + + /imurmurhash@0.1.4: + resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==} + engines: {node: '>=0.8.19'} + dev: true + + /indent-string@4.0.0: + resolution: {integrity: sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==} + engines: {node: '>=8'} + dev: true + + /inflight@1.0.6: + resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} + dependencies: + once: 1.4.0 + wrappy: 1.0.2 + dev: true + + /inherits@2.0.4: + resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} + dev: true + + /interpret@1.4.0: + resolution: {integrity: sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA==} + engines: {node: '>= 0.10'} + dev: true + + /ip@2.0.0: + resolution: {integrity: sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==} + dev: true + + /ipaddr.js@1.9.1: + resolution: {integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==} + engines: {node: '>= 0.10'} + dev: true + + /is-absolute-url@3.0.3: + resolution: {integrity: sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q==} + engines: {node: '>=8'} + dev: true + + /is-alphabetical@1.0.4: + resolution: {integrity: sha512-DwzsA04LQ10FHTZuL0/grVDk4rFoVH1pjAToYwBrHSxcrBIGQuXrQMtD5U1b0U2XVgKZCTLLP8u2Qxqhy3l2Vg==} + dev: true + + /is-alphanumerical@1.0.4: + resolution: {integrity: sha512-UzoZUr+XfVz3t3v4KyGEniVL9BDRoQtY7tOyrRybkVNjDFWyo1yhXNGrrBTQxp3ib9BLAWs7k2YKBQsFRkZG9A==} + dependencies: + is-alphabetical: 1.0.4 + is-decimal: 1.0.4 + dev: true + + /is-arguments@1.1.1: + resolution: {integrity: sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + has-tostringtag: 1.0.0 + dev: true + + /is-arrayish@0.2.1: + resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==} + dev: true + + /is-binary-path@2.1.0: + resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==} + engines: {node: '>=8'} + dependencies: + binary-extensions: 2.2.0 + dev: true + + /is-callable@1.2.7: + resolution: {integrity: sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==} + engines: {node: '>= 0.4'} + dev: true + + /is-core-module@2.12.0: + resolution: {integrity: sha512-RECHCBCd/viahWmwj6enj19sKbHfJrddi/6cBDsNTKbNq0f7VeaUkBo60BqzvPqo/W54ChS62Z5qyun7cfOMqQ==} + dependencies: + has: 1.0.3 + dev: true + + /is-decimal@1.0.4: + resolution: {integrity: sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw==} + dev: true + + /is-deflate@1.0.0: + resolution: {integrity: sha512-YDoFpuZWu1VRXlsnlYMzKyVRITXj7Ej/V9gXQ2/pAe7X1J7M/RNOqaIYi6qUn+B7nGyB9pDXrv02dsB58d2ZAQ==} + dev: true + + /is-docker@2.2.1: + resolution: {integrity: sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==} + engines: {node: '>=8'} + hasBin: true + dev: true + + /is-extglob@2.1.1: + resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} + engines: {node: '>=0.10.0'} + dev: true + + /is-fullwidth-code-point@3.0.0: + resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} + engines: {node: '>=8'} + dev: true + + /is-generator-function@1.0.10: + resolution: {integrity: sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==} + engines: {node: '>= 0.4'} + dependencies: + has-tostringtag: 1.0.0 + dev: true + + /is-glob@4.0.3: + resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} + engines: {node: '>=0.10.0'} + dependencies: + is-extglob: 2.1.1 + dev: true + + /is-gzip@1.0.0: + resolution: {integrity: sha512-rcfALRIb1YewtnksfRIHGcIY93QnK8BIQ/2c9yDYcG/Y6+vRoJuTWBmmSEbyLLYtXm7q35pHOHbZFQBaLrhlWQ==} + engines: {node: '>=0.10.0'} + dev: true + + /is-hexadecimal@1.0.4: + resolution: {integrity: sha512-gyPJuv83bHMpocVYoqof5VDiZveEoGoFL8m3BXNb2VW8Xs+rz9kqO8LOQ5DH6EsuvilT1ApazU0pyl+ytbPtlw==} + dev: true + + /is-nan@1.3.2: + resolution: {integrity: sha512-E+zBKpQ2t6MEo1VsonYmluk9NxGrbzpeeLC2xIViuO2EjU2xsXsBPwTr3Ykv9l08UYEVEdWeRZNouaZqF6RN0w==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + define-properties: 1.2.0 + dev: true + + /is-number@7.0.0: + resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} + engines: {node: '>=0.12.0'} + dev: true + + /is-path-cwd@2.2.0: + resolution: {integrity: sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ==} + engines: {node: '>=6'} + dev: true + + /is-path-inside@3.0.3: + resolution: {integrity: sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==} + engines: {node: '>=8'} + dev: true + + /is-plain-object@2.0.4: + resolution: {integrity: sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==} + engines: {node: '>=0.10.0'} + dependencies: + isobject: 3.0.1 + dev: true + + /is-plain-object@5.0.0: + resolution: {integrity: sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==} + engines: {node: '>=0.10.0'} + dev: true + + /is-stream@2.0.1: + resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} + engines: {node: '>=8'} + dev: true + + /is-typed-array@1.1.10: + resolution: {integrity: sha512-PJqgEHiWZvMpaFZ3uTc8kHPM4+4ADTlDniuQL7cU/UDA0Ql7F70yGfHph3cLNe+c9toaigv+DFzTJKhc2CtO6A==} + engines: {node: '>= 0.4'} + dependencies: + available-typed-arrays: 1.0.5 + call-bind: 1.0.2 + for-each: 0.3.3 + gopd: 1.0.1 + has-tostringtag: 1.0.0 + dev: true + + /is-wsl@2.2.0: + resolution: {integrity: sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==} + engines: {node: '>=8'} + dependencies: + is-docker: 2.2.1 + dev: true + + /isarray@1.0.0: + resolution: {integrity: sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==} + dev: true + + /isexe@2.0.0: + resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + dev: true + + /isobject@3.0.1: + resolution: {integrity: sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==} + engines: {node: '>=0.10.0'} + dev: true + + /isomorphic-unfetch@3.1.0: + resolution: {integrity: sha512-geDJjpoZ8N0kWexiwkX8F9NkTsXhetLPVbZFQ+JTW239QNOwvB0gniuR1Wc6f0AMTn7/mFGyXvHTifrCp/GH8Q==} + dependencies: + node-fetch: 2.6.9 + unfetch: 4.2.0 + transitivePeerDependencies: + - encoding + dev: true + + /istanbul-lib-coverage@3.2.0: + resolution: {integrity: sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw==} + engines: {node: '>=8'} + dev: true + + /istanbul-lib-instrument@5.2.1: + resolution: {integrity: sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==} + engines: {node: '>=8'} + dependencies: + '@babel/core': 7.21.4 + '@babel/parser': 7.21.4 + '@istanbuljs/schema': 0.1.3 + istanbul-lib-coverage: 3.2.0 + semver: 6.3.0 + transitivePeerDependencies: + - supports-color + dev: true + + /istanbul-lib-report@3.0.0: + resolution: {integrity: sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==} + engines: {node: '>=8'} + dependencies: + istanbul-lib-coverage: 3.2.0 + make-dir: 3.1.0 + supports-color: 7.2.0 + dev: true + + /istanbul-reports@3.1.5: + resolution: {integrity: sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w==} + engines: {node: '>=8'} + dependencies: + html-escaper: 2.0.2 + istanbul-lib-report: 3.0.0 + dev: true + + /jake@10.8.5: + resolution: {integrity: sha512-sVpxYeuAhWt0OTWITwT98oyV0GsXyMlXCF+3L1SuafBVUIr/uILGRB+NqwkzhgXKvoJpDIpQvqkUALgdmQsQxw==} + engines: {node: '>=10'} + hasBin: true + dependencies: + async: 3.2.4 + chalk: 4.1.2 + filelist: 1.0.4 + minimatch: 3.1.2 + dev: true + + /jest-haste-map@29.5.0: + resolution: {integrity: sha512-IspOPnnBro8YfVYSw6yDRKh/TiCdRngjxeacCps1cQ9cgVN6+10JUcuJ1EabrgYLOATsIAigxA0rLR9x/YlrSA==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + dependencies: + '@jest/types': 29.5.0 + '@types/graceful-fs': 4.1.6 + '@types/node': 18.15.11 + anymatch: 3.1.3 + fb-watchman: 2.0.2 + graceful-fs: 4.2.11 + jest-regex-util: 29.4.3 + jest-util: 29.5.0 + jest-worker: 29.5.0 + micromatch: 4.0.5 + walker: 1.0.8 + optionalDependencies: + fsevents: 2.3.2 + dev: true + + /jest-mock@27.5.1: + resolution: {integrity: sha512-K4jKbY1d4ENhbrG2zuPWaQBvDly+iZ2yAW+T1fATN78hc0sInwn7wZB8XtlNnvHug5RMwV897Xm4LqmPM4e2Og==} + engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} + dependencies: + '@jest/types': 27.5.1 + '@types/node': 18.15.11 + dev: true + + /jest-regex-util@29.4.3: + resolution: {integrity: sha512-O4FglZaMmWXbGHSQInfXewIsd1LMn9p3ZXB/6r4FOkyhX2/iP/soMG98jGvk/A3HAN78+5VWcBGO0BJAPRh4kg==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + dev: true + + /jest-util@29.5.0: + resolution: {integrity: sha512-RYMgG/MTadOr5t8KdhejfvUU82MxsCu5MF6KuDUHl+NuwzUt+Sm6jJWxTJVrDR1j5M/gJVCPKQEpWXY+yIQ6lQ==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + dependencies: + '@jest/types': 29.5.0 + '@types/node': 18.15.11 + chalk: 4.1.2 + ci-info: 3.8.0 + graceful-fs: 4.2.11 + picomatch: 2.3.1 + dev: true + + /jest-worker@29.5.0: + resolution: {integrity: sha512-NcrQnevGoSp4b5kg+akIpthoAFHxPBcb5P6mYPY0fUNT+sSvmtu6jlkEle3anczUKIKEbMxFimk9oTP/tpIPgA==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + dependencies: + '@types/node': 18.15.11 + jest-util: 29.5.0 + merge-stream: 2.0.0 + supports-color: 8.1.1 + dev: true + + /js-tokens@4.0.0: + resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} + + /js-yaml@3.14.1: + resolution: {integrity: sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==} + hasBin: true + dependencies: + argparse: 1.0.10 + esprima: 4.0.1 + dev: true + + /jscodeshift@0.14.0(@babel/preset-env@7.21.4): + resolution: {integrity: sha512-7eCC1knD7bLUPuSCwXsMZUH51O8jIcoVyKtI6P0XM0IVzlGjckPy3FIwQlorzbN0Sg79oK+RlohN32Mqf/lrYA==} + hasBin: true + peerDependencies: + '@babel/preset-env': ^7.1.6 + dependencies: + '@babel/core': 7.21.4 + '@babel/parser': 7.21.4 + '@babel/plugin-proposal-class-properties': 7.18.6(@babel/core@7.21.4) + '@babel/plugin-proposal-nullish-coalescing-operator': 7.18.6(@babel/core@7.21.4) + '@babel/plugin-proposal-optional-chaining': 7.21.0(@babel/core@7.21.4) + '@babel/plugin-transform-modules-commonjs': 7.21.2(@babel/core@7.21.4) + '@babel/preset-env': 7.21.4(@babel/core@7.21.4) + '@babel/preset-flow': 7.21.4(@babel/core@7.21.4) + '@babel/preset-typescript': 7.21.4(@babel/core@7.21.4) + '@babel/register': 7.21.0(@babel/core@7.21.4) + babel-core: 7.0.0-bridge.0(@babel/core@7.21.4) + chalk: 4.1.2 + flow-parser: 0.203.1 + graceful-fs: 4.2.11 + micromatch: 4.0.5 + neo-async: 2.6.2 + node-dir: 0.1.17 + recast: 0.21.5 + temp: 0.8.4 + write-file-atomic: 2.4.3 + transitivePeerDependencies: + - supports-color + dev: true + + /jsesc@0.5.0: + resolution: {integrity: sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA==} + hasBin: true + dev: true + + /jsesc@2.5.2: + resolution: {integrity: sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==} + engines: {node: '>=4'} + hasBin: true + dev: true + + /json-parse-even-better-errors@2.3.1: + resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} + dev: true + + /json5@2.2.3: + resolution: {integrity: sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==} + engines: {node: '>=6'} + hasBin: true + dev: true + + /jsonfile@6.1.0: + resolution: {integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==} + dependencies: + universalify: 2.0.0 + optionalDependencies: + graceful-fs: 4.2.11 + + /kind-of@6.0.3: + resolution: {integrity: sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==} + engines: {node: '>=0.10.0'} + dev: true + + /kleur@3.0.3: + resolution: {integrity: sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==} + engines: {node: '>=6'} + dev: true + + /lazy-universal-dotenv@4.0.0: + resolution: {integrity: sha512-aXpZJRnTkpK6gQ/z4nk+ZBLd/Qdp118cvPruLSIQzQNRhKwEcdXCOzXuF55VDqIiuAaY3UGZ10DJtvZzDcvsxg==} + engines: {node: '>=14.0.0'} + dependencies: + app-root-dir: 1.0.2 + dotenv: 16.0.3 + dotenv-expand: 10.0.0 + dev: true + + /leantable@0.4.12(react@18.2.0): + resolution: {integrity: sha512-ilBvzlHIDFle9xpTHQV2zzXhsLc9UDbn/eLOxRM3J8CuiJ5M/orY0tJ7fuu5+jr9oUIRPwotA/3Mo6CqK5RgNQ==} + peerDependencies: + react: '>= 16.8.0' + dependencies: + clsx: 1.2.1 + react: 18.2.0 + rxjs: 7.8.0 + dev: false + + /leven@3.1.0: + resolution: {integrity: sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==} + engines: {node: '>=6'} + dev: true + + /levn@0.3.0: + resolution: {integrity: sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA==} + engines: {node: '>= 0.8.0'} + dependencies: + prelude-ls: 1.1.2 + type-check: 0.3.2 + dev: true + + /lines-and-columns@1.2.4: + resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} + dev: true + + /locate-path@3.0.0: + resolution: {integrity: sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==} + engines: {node: '>=6'} + dependencies: + p-locate: 3.0.0 + path-exists: 3.0.0 + dev: true + + /locate-path@5.0.0: + resolution: {integrity: sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==} + engines: {node: '>=8'} + dependencies: + p-locate: 4.1.0 + dev: true + + /locate-path@6.0.0: + resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==} + engines: {node: '>=10'} + dependencies: + p-locate: 5.0.0 + dev: true + + /lodash.debounce@4.0.8: + resolution: {integrity: sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==} + dev: true + + /lodash@4.17.21: + resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==} + + /loose-envify@1.4.0: + resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==} + hasBin: true + dependencies: + js-tokens: 4.0.0 + + /lowlight@1.20.0: + resolution: {integrity: sha512-8Ktj+prEb1RoCPkEOrPMYUN/nCggB7qAWe3a7OpMjWQkh3l2RD5wKRQ+o8Q8YuI9RG/xs95waaI/E6ym/7NsTw==} + dependencies: + fault: 1.0.4 + highlight.js: 10.7.3 + dev: true + + /lru-cache@5.1.1: + resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==} + dependencies: + yallist: 3.1.1 + dev: true + + /lru-cache@6.0.0: + resolution: {integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==} + engines: {node: '>=10'} + dependencies: + yallist: 4.0.0 + + /magic-string@0.27.0: + resolution: {integrity: sha512-8UnnX2PeRAPZuN12svgR9j7M1uWMovg/CEnIwIG0LFkXSJJe4PdfUGiTGl8V9bsBHFUtfVINcSyYxd7q+kx9fA==} + engines: {node: '>=12'} + dependencies: + '@jridgewell/sourcemap-codec': 1.4.15 + dev: true + + /make-dir@2.1.0: + resolution: {integrity: sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==} + engines: {node: '>=6'} + dependencies: + pify: 4.0.1 + semver: 5.7.1 + dev: true + + /make-dir@3.1.0: + resolution: {integrity: sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==} + engines: {node: '>=8'} + dependencies: + semver: 6.3.0 + dev: true + + /makeerror@1.0.12: + resolution: {integrity: sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==} + dependencies: + tmpl: 1.0.5 + dev: true + + /map-or-similar@1.5.0: + resolution: {integrity: sha512-0aF7ZmVon1igznGI4VS30yugpduQW3y3GkcgGJOp7d8x8QrizhigUxjI/m2UojsXXto+jLAH3KSz+xOJTiORjg==} + + /markdown-to-jsx@7.2.0(react@18.2.0): + resolution: {integrity: sha512-3l4/Bigjm4bEqjCR6Xr+d4DtM1X6vvtGsMGSjJYyep8RjjIvcWtrXBS8Wbfe1/P+atKNMccpsraESIaWVplzVg==} + engines: {node: '>= 10'} + peerDependencies: + react: '>= 0.14.0' + dependencies: + react: 18.2.0 + dev: true + + /mdast-util-definitions@4.0.0: + resolution: {integrity: sha512-k8AJ6aNnUkB7IE+5azR9h81O5EQ/cTDXtWdMq9Kk5KcEW/8ritU5CeLg/9HhOC++nALHBlaogJ5jz0Ybk3kPMQ==} + dependencies: + unist-util-visit: 2.0.3 + dev: true + + /mdast-util-to-string@1.1.0: + resolution: {integrity: sha512-jVU0Nr2B9X3MU4tSK7JP1CMkSvOj7X5l/GboG1tKRw52lLF1x2Ju92Ms9tNetCcbfX3hzlM73zYo2NKkWSfF/A==} + dev: true + + /media-typer@0.3.0: + resolution: {integrity: sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=} + engines: {node: '>= 0.6'} + dev: true + + /memoizerific@1.11.3: + resolution: {integrity: sha512-/EuHYwAPdLtXwAwSZkh/Gutery6pD2KYd44oQLhAvQp/50mpyduZh8Q7PYHXTCJ+wuXxt7oij2LXyIJOOYFPog==} + dependencies: + map-or-similar: 1.5.0 + + /merge-descriptors@1.0.1: + resolution: {integrity: sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=} + dev: true + + /merge-stream@2.0.0: + resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} + dev: true + + /merge2@1.4.1: + resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} + engines: {node: '>= 8'} + dev: true + + /methods@1.1.2: + resolution: {integrity: sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==} + engines: {node: '>= 0.6'} + dev: true + + /micromatch@4.0.5: + resolution: {integrity: sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==} + engines: {node: '>=8.6'} + dependencies: + braces: 3.0.2 + picomatch: 2.3.1 + dev: true + + /mime-db@1.52.0: + resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} + engines: {node: '>= 0.6'} + dev: true + + /mime-types@2.1.35: + resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} + engines: {node: '>= 0.6'} + dependencies: + mime-db: 1.52.0 + dev: true + + /mime@1.6.0: + resolution: {integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==} + engines: {node: '>=4'} + hasBin: true + dev: true + + /mime@2.6.0: + resolution: {integrity: sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==} + engines: {node: '>=4.0.0'} + hasBin: true + dev: true + + /mimic-fn@2.1.0: + resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} + engines: {node: '>=6'} + dev: true + + /min-indent@1.0.1: + resolution: {integrity: sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==} + engines: {node: '>=4'} + dev: true + + /minimatch@3.1.2: + resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} + dependencies: + brace-expansion: 1.1.11 + dev: true + + /minimatch@5.1.6: + resolution: {integrity: sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==} + engines: {node: '>=10'} + dependencies: + brace-expansion: 2.0.1 + dev: true + + /minimist@1.2.8: + resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} + dev: true + + /minipass@3.3.6: + resolution: {integrity: sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==} + engines: {node: '>=8'} + dependencies: + yallist: 4.0.0 + dev: true + + /minipass@4.2.8: + resolution: {integrity: sha512-fNzuVyifolSLFL4NzpF+wEF4qrgqaaKX0haXPQEdQ7NKAN+WecoKMHV09YcuL/DHxrUsYQOK3MiuDf7Ip2OXfQ==} + engines: {node: '>=8'} + dev: true + + /minizlib@2.1.2: + resolution: {integrity: sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==} + engines: {node: '>= 8'} + dependencies: + minipass: 3.3.6 + yallist: 4.0.0 + dev: true + + /mkdirp-classic@0.5.3: + resolution: {integrity: sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==} + dev: true + + /mkdirp@0.5.6: + resolution: {integrity: sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==} + hasBin: true + dependencies: + minimist: 1.2.8 + dev: true + + /mkdirp@1.0.4: + resolution: {integrity: sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==} + engines: {node: '>=10'} + hasBin: true + dev: true + + /mri@1.2.0: + resolution: {integrity: sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==} + engines: {node: '>=4'} + dev: true + + /ms@2.0.0: + resolution: {integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==} + dev: true + + /ms@2.1.1: + resolution: {integrity: sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==} + dev: true + + /ms@2.1.2: + resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} + dev: true + + /ms@2.1.3: + resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + dev: true + + /nanoid@3.3.6: + resolution: {integrity: sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA==} + engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} + hasBin: true + dev: true + + /negotiator@0.6.3: + resolution: {integrity: sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==} + engines: {node: '>= 0.6'} + dev: true + + /neo-async@2.6.2: + resolution: {integrity: sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==} + dev: true + + /node-dir@0.1.17: + resolution: {integrity: sha512-tmPX422rYgofd4epzrNoOXiE8XFZYOcCq1vD7MAXCDO+O+zndlA2ztdKKMa+EeuBG5tHETpr4ml4RGgpqDCCAg==} + engines: {node: '>= 0.10.5'} + dependencies: + minimatch: 3.1.2 + dev: true + + /node-fetch-native@1.1.0: + resolution: {integrity: sha512-nl5goFCig93JZ9FIV8GHT9xpNqXbxQUzkOmKIMKmncsBH9jhg7qKex8hirpymkBFmNQ114chEEG5lS4wgK2I+Q==} + dev: true + + /node-fetch@2.6.9: + resolution: {integrity: sha512-DJm/CJkZkRjKKj4Zi4BsKVZh3ValV5IR5s7LVZnW+6YMh0W1BfNA8XSs6DLMGYlId5F3KnA70uu2qepcR08Qqg==} + engines: {node: 4.x || >=6.0.0} + peerDependencies: + encoding: ^0.1.0 + peerDependenciesMeta: + encoding: + optional: true + dependencies: + whatwg-url: 5.0.0 + dev: true + + /node-int64@0.4.0: + resolution: {integrity: sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==} + dev: true + + /node-releases@2.0.10: + resolution: {integrity: sha512-5GFldHPXVG/YZmFzJvKK2zDSzPKhEp0+ZR5SVaoSag9fsL5YgHbUHDfnG5494ISANDcK4KwPXAx2xqVEydmd7w==} + dev: true + + /normalize-package-data@2.5.0: + resolution: {integrity: sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==} + dependencies: + hosted-git-info: 2.8.9 + resolve: 1.22.2 + semver: 5.7.1 + validate-npm-package-license: 3.0.4 + dev: true + + /normalize-path@3.0.0: + resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} + engines: {node: '>=0.10.0'} + dev: true + + /npm-run-path@4.0.1: + resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==} + engines: {node: '>=8'} + dependencies: + path-key: 3.1.1 + dev: true + + /npmlog@5.0.1: + resolution: {integrity: sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==} + dependencies: + are-we-there-yet: 2.0.0 + console-control-strings: 1.1.0 + gauge: 3.0.2 + set-blocking: 2.0.0 + dev: true + + /object-assign@4.1.1: + resolution: {integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==} + engines: {node: '>=0.10.0'} + dev: true + + /object-inspect@1.12.3: + resolution: {integrity: sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==} + + /object-is@1.1.5: + resolution: {integrity: sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + define-properties: 1.2.0 + dev: true + + /object-keys@1.1.1: + resolution: {integrity: sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==} + engines: {node: '>= 0.4'} + dev: true + + /on-finished@2.4.1: + resolution: {integrity: sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==} + engines: {node: '>= 0.8'} + dependencies: + ee-first: 1.1.1 + dev: true + + /on-headers@1.0.2: + resolution: {integrity: sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==} + engines: {node: '>= 0.8'} + dev: true + + /once@1.4.0: + resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} + dependencies: + wrappy: 1.0.2 + dev: true + + /onetime@5.1.2: + resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} + engines: {node: '>=6'} + dependencies: + mimic-fn: 2.1.0 + dev: true + + /open@7.4.2: + resolution: {integrity: sha512-MVHddDVweXZF3awtlAS+6pgKLlm/JgxZ90+/NBurBoQctVOOB/zDdVjcyPzQ+0laDGbsWgrRkflI65sQeOgT9Q==} + engines: {node: '>=8'} + dependencies: + is-docker: 2.2.1 + is-wsl: 2.2.0 + dev: true + + /open@8.4.2: + resolution: {integrity: sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==} + engines: {node: '>=12'} + dependencies: + define-lazy-prop: 2.0.0 + is-docker: 2.2.1 + is-wsl: 2.2.0 + dev: true + + /optionator@0.8.3: + resolution: {integrity: sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==} + engines: {node: '>= 0.8.0'} + dependencies: + deep-is: 0.1.4 + fast-levenshtein: 2.0.6 + levn: 0.3.0 + prelude-ls: 1.1.2 + type-check: 0.3.2 + word-wrap: 1.2.3 + dev: true + + /p-limit@2.3.0: + resolution: {integrity: sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==} + engines: {node: '>=6'} + dependencies: + p-try: 2.2.0 + dev: true + + /p-limit@3.1.0: + resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==} + engines: {node: '>=10'} + dependencies: + yocto-queue: 0.1.0 + dev: true + + /p-locate@3.0.0: + resolution: {integrity: sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==} + engines: {node: '>=6'} + dependencies: + p-limit: 2.3.0 + dev: true + + /p-locate@4.1.0: + resolution: {integrity: sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==} + engines: {node: '>=8'} + dependencies: + p-limit: 2.3.0 + dev: true + + /p-locate@5.0.0: + resolution: {integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==} + engines: {node: '>=10'} + dependencies: + p-limit: 3.1.0 + dev: true + + /p-map@4.0.0: + resolution: {integrity: sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==} + engines: {node: '>=10'} + dependencies: + aggregate-error: 3.1.0 + dev: true + + /p-try@2.2.0: + resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==} + engines: {node: '>=6'} + dev: true + + /pako@0.2.9: + resolution: {integrity: sha512-NUcwaKxUxWrZLpDG+z/xZaCgQITkA/Dv4V/T6bw7VON6l1Xz/VnrBqrYjZQ12TamKHzITTfOEIYUj48y2KXImA==} + dev: true + + /parse-entities@2.0.0: + resolution: {integrity: sha512-kkywGpCcRYhqQIchaWqZ875wzpS/bMKhz5HnN3p7wveJTkTtyAB/AlnS0f8DFSqYW1T82t6yEAkEcB+A1I3MbQ==} + dependencies: + character-entities: 1.2.4 + character-entities-legacy: 1.1.4 + character-reference-invalid: 1.1.4 + is-alphanumerical: 1.0.4 + is-decimal: 1.0.4 + is-hexadecimal: 1.0.4 + dev: true + + /parse-json@5.2.0: + resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==} + engines: {node: '>=8'} + dependencies: + '@babel/code-frame': 7.21.4 + error-ex: 1.3.2 + json-parse-even-better-errors: 2.3.1 + lines-and-columns: 1.2.4 + dev: true + + /parseurl@1.3.3: + resolution: {integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==} + engines: {node: '>= 0.8'} + dev: true + + /path-exists@3.0.0: + resolution: {integrity: sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==} + engines: {node: '>=4'} + dev: true + + /path-exists@4.0.0: + resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} + engines: {node: '>=8'} + dev: true + + /path-is-absolute@1.0.1: + resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==} + engines: {node: '>=0.10.0'} + dev: true + + /path-key@3.1.1: + resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} + engines: {node: '>=8'} + dev: true + + /path-parse@1.0.7: + resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==} + dev: true + + /path-to-regexp@0.1.7: + resolution: {integrity: sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==} + dev: true + + /path-type@4.0.0: + resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} + engines: {node: '>=8'} + dev: true + + /pathe@1.1.0: + resolution: {integrity: sha512-ODbEPR0KKHqECXW1GoxdDb+AZvULmXjVPy4rt+pGo2+TnjJTIPJQSVS6N63n8T2Ip+syHhbn52OewKicV0373w==} + dev: true + + /peek-stream@1.1.3: + resolution: {integrity: sha512-FhJ+YbOSBb9/rIl2ZeE/QHEsWn7PqNYt8ARAY3kIgNGOk13g9FGyIY6JIl/xB/3TFRVoTv5as0l11weORrTekA==} + dependencies: + buffer-from: 1.1.2 + duplexify: 3.7.1 + through2: 2.0.5 + dev: true + + /pend@1.2.0: + resolution: {integrity: sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==} + dev: true + + /picocolors@1.0.0: + resolution: {integrity: sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==} + dev: true + + /picomatch@2.3.1: + resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} + engines: {node: '>=8.6'} + dev: true + + /pify@4.0.1: + resolution: {integrity: sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==} + engines: {node: '>=6'} + dev: true + + /pirates@4.0.5: + resolution: {integrity: sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ==} + engines: {node: '>= 6'} + dev: true + + /pkg-dir@3.0.0: + resolution: {integrity: sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==} + engines: {node: '>=6'} + dependencies: + find-up: 3.0.0 + dev: true + + /pkg-dir@4.2.0: + resolution: {integrity: sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==} + engines: {node: '>=8'} + dependencies: + find-up: 4.1.0 + dev: true + + /pkg-dir@5.0.0: + resolution: {integrity: sha512-NPE8TDbzl/3YQYY7CSS228s3g2ollTFnc+Qi3tqmqJp9Vg2ovUpixcJEo2HJScN2Ez+kEaal6y70c0ehqJBJeA==} + engines: {node: '>=10'} + dependencies: + find-up: 5.0.0 + dev: true + + /polished@4.2.2: + resolution: {integrity: sha512-Sz2Lkdxz6F2Pgnpi9U5Ng/WdWAUZxmHrNPoVlm3aAemxoy2Qy7LGjQg4uf8qKelDAUW94F4np3iH2YPf2qefcQ==} + engines: {node: '>=10'} + dependencies: + '@babel/runtime': 7.21.0 + dev: true + + /postcss@8.4.21: + resolution: {integrity: sha512-tP7u/Sn/dVxK2NnruI4H9BG+x+Wxz6oeZ1cJ8P6G/PZY0IKk4k/63TDsQf2kQq3+qoJeLm2kIBUNlZe3zgb4Zg==} + engines: {node: ^10 || ^12 || >=14} + dependencies: + nanoid: 3.3.6 + picocolors: 1.0.0 + source-map-js: 1.0.2 + dev: true + + /prelude-ls@1.1.2: + resolution: {integrity: sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w==} + engines: {node: '>= 0.8.0'} + dev: true + + /prettier@2.8.7: + resolution: {integrity: sha512-yPngTo3aXUUmyuTjeTUT75txrf+aMh9FiD7q9ZE/i6r0bPb22g4FsE6Y338PQX1bmfy08i9QQCB7/rcUAVntfw==} + engines: {node: '>=10.13.0'} + hasBin: true + dev: true + + /pretty-hrtime@1.0.3: + resolution: {integrity: sha512-66hKPCr+72mlfiSjlEB1+45IjXSqvVAIy6mocupoww4tBFE9R9IhwwUGoI4G++Tc9Aq+2rxOt0RFU6gPcrte0A==} + engines: {node: '>= 0.8'} + dev: true + + /prismjs@1.27.0: + resolution: {integrity: sha512-t13BGPUlFDR7wRB5kQDG4jjl7XeuH6jbJGt11JHPL96qwsEHNX2+68tFXqc1/k+/jALsbSWJKUOT/hcYAZ5LkA==} + engines: {node: '>=6'} + dev: true + + /prismjs@1.29.0: + resolution: {integrity: sha512-Kx/1w86q/epKcmte75LNrEoT+lX8pBpavuAbvJWRXar7Hz8jrtF+e3vY751p0R8H9HdArwaCTNDDzHg/ScJK1Q==} + engines: {node: '>=6'} + dev: true + + /process-nextick-args@2.0.1: + resolution: {integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==} + dev: true + + /process@0.11.10: + resolution: {integrity: sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==} + engines: {node: '>= 0.6.0'} + dev: true + + /progress@2.0.3: + resolution: {integrity: sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==} + engines: {node: '>=0.4.0'} + dev: true + + /prompts@2.4.2: + resolution: {integrity: sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==} + engines: {node: '>= 6'} + dependencies: + kleur: 3.0.3 + sisteransi: 1.0.5 + dev: true + + /prop-types@15.8.1: + resolution: {integrity: sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==} + dependencies: + loose-envify: 1.4.0 + object-assign: 4.1.1 + react-is: 16.13.1 + dev: true + + /property-information@5.6.0: + resolution: {integrity: sha512-YUHSPk+A30YPv+0Qf8i9Mbfe/C0hdPXk1s1jPVToV8pk8BQtpw10ct89Eo7OWkutrwqvT0eicAxlOg3dOAu8JA==} + dependencies: + xtend: 4.0.2 + dev: true + + /proxy-addr@2.0.7: + resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==} + engines: {node: '>= 0.10'} + dependencies: + forwarded: 0.2.0 + ipaddr.js: 1.9.1 + dev: true + + /proxy-from-env@1.1.0: + resolution: {integrity: sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==} + dev: true + + /pump@2.0.1: + resolution: {integrity: sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA==} + dependencies: + end-of-stream: 1.4.4 + once: 1.4.0 + dev: true + + /pump@3.0.0: + resolution: {integrity: sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==} + dependencies: + end-of-stream: 1.4.4 + once: 1.4.0 + dev: true + + /pumpify@1.5.1: + resolution: {integrity: sha512-oClZI37HvuUJJxSKKrC17bZ9Cu0ZYhEAGPsPUy9KlMUmv9dKX2o77RUmq7f3XjIxbwyGwYzbzQ1L2Ks8sIradQ==} + dependencies: + duplexify: 3.7.1 + inherits: 2.0.4 + pump: 2.0.1 + dev: true + + /puppeteer-core@2.1.1: + resolution: {integrity: sha512-n13AWriBMPYxnpbb6bnaY5YoY6rGj8vPLrz6CZF3o0qJNEwlcfJVxBzYZ0NJsQ21UbdJoijPCDrM++SUVEz7+w==} + engines: {node: '>=8.16.0'} + dependencies: + '@types/mime-types': 2.1.1 + debug: 4.3.4 + extract-zip: 1.7.0 + https-proxy-agent: 4.0.0 + mime: 2.6.0 + mime-types: 2.1.35 + progress: 2.0.3 + proxy-from-env: 1.1.0 + rimraf: 2.7.1 + ws: 6.2.2 + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate + dev: true + + /qs@6.11.0: + resolution: {integrity: sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==} + engines: {node: '>=0.6'} + dependencies: + side-channel: 1.0.4 + dev: true + + /qs@6.11.1: + resolution: {integrity: sha512-0wsrzgTz/kAVIeuxSjnpGC56rzYtr6JT/2BwEvMaPhFIoYa1aGO8LbzuU1R0uUYQkLpWBTOj0l/CLAJB64J6nQ==} + engines: {node: '>=0.6'} + dependencies: + side-channel: 1.0.4 + + /queue-microtask@1.2.3: + resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} + dev: true + + /ramda@0.28.0: + resolution: {integrity: sha512-9QnLuG/kPVgWvMQ4aODhsBUFKOUmnbUnsSXACv+NCQZcHbeb+v8Lodp8OVxtRULN1/xOyYLLaL6npE6dMq5QTA==} + + /range-parser@1.2.1: + resolution: {integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==} + engines: {node: '>= 0.6'} + dev: true + + /raw-body@2.5.1: + resolution: {integrity: sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==} + engines: {node: '>= 0.8'} + dependencies: + bytes: 3.1.2 + http-errors: 2.0.0 + iconv-lite: 0.4.24 + unpipe: 1.0.0 + dev: true + + /react-colorful@5.6.1(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-1exovf0uGTGyq5mXQT0zgQ80uvj2PCwvF8zY1RN9/vbJVSjSo3fsB/4L3ObbF7u70NduSiK4xu4Y6q1MHoUGEw==} + peerDependencies: + react: '>=16.8.0' + react-dom: '>=16.8.0' + dependencies: + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + dev: true + + /react-docgen-typescript@2.2.2(typescript@5.0.4): + resolution: {integrity: sha512-tvg2ZtOpOi6QDwsb3GZhOjDkkX0h8Z2gipvTg6OVMUyoYoURhEiRNePT8NZItTVCDh39JJHnLdfCOkzoLbFnTg==} + peerDependencies: + typescript: '>= 4.3.x' + dependencies: + typescript: 5.0.4 + dev: true + + /react-docgen@6.0.0-alpha.3: + resolution: {integrity: sha512-DDLvB5EV9As1/zoUsct6Iz2Cupw9FObEGD3DMcIs3EDFIoSKyz8FZtoWj3Wj+oodrU4/NfidN0BL5yrapIcTSA==} + engines: {node: '>=12.0.0'} + hasBin: true + dependencies: + '@babel/core': 7.21.4 + '@babel/generator': 7.21.4 + ast-types: 0.14.2 + commander: 2.20.3 + doctrine: 3.0.0 + estree-to-babel: 3.2.1 + neo-async: 2.6.2 + node-dir: 0.1.17 + resolve: 1.22.2 + strip-indent: 3.0.0 + transitivePeerDependencies: + - supports-color + dev: true + + /react-dom@18.2.0(react@18.2.0): + resolution: {integrity: sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g==} + peerDependencies: + react: ^18.2.0 + dependencies: + loose-envify: 1.4.0 + react: 18.2.0 + scheduler: 0.23.0 + + /react-element-to-jsx-string@15.0.0(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-UDg4lXB6BzlobN60P8fHWVPX3Kyw8ORrTeBtClmIlGdkOOE+GYQSFvmEU5iLLpwp/6v42DINwNcwOhOLfQ//FQ==} + peerDependencies: + react: ^0.14.8 || ^15.0.1 || ^16.0.0 || ^17.0.1 || ^18.0.0 + react-dom: ^0.14.8 || ^15.0.1 || ^16.0.0 || ^17.0.1 || ^18.0.0 + dependencies: + '@base2/pretty-print-object': 1.0.1 + is-plain-object: 5.0.0 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + react-is: 18.1.0 + dev: true + + /react-inspector@6.0.1(react@18.2.0): + resolution: {integrity: sha512-cxKSeFTf7jpSSVddm66sKdolG90qURAX3g1roTeaN6x0YEbtWc8JpmFN9+yIqLNH2uEkYerWLtJZIXRIFuBKrg==} + peerDependencies: + react: ^16.8.4 || ^17.0.0 || ^18.0.0 + dependencies: + react: 18.2.0 + dev: true + + /react-is@16.13.1: + resolution: {integrity: sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==} + dev: true + + /react-is@18.1.0: + resolution: {integrity: sha512-Fl7FuabXsJnV5Q1qIOQwx/sagGF18kogb4gpfcG4gjLBWO0WDiiz1ko/ExayuxE7InyQkBLkxRFG5oxY6Uu3Kg==} + dev: true + + /react-refresh@0.14.0: + resolution: {integrity: sha512-wViHqhAd8OHeLS/IRMJjTSDHF3U9eWi62F/MledQGPdJGDhodXJ9PBLNGr6WWL7qlH12Mt3TyTpbS+hGXMjCzQ==} + engines: {node: '>=0.10.0'} + dev: true + + /react-syntax-highlighter@15.5.0(react@18.2.0): + resolution: {integrity: sha512-+zq2myprEnQmH5yw6Gqc8lD55QHnpKaU8TOcFeC/Lg/MQSs8UknEA0JC4nTZGFAXC2J2Hyj/ijJ7NlabyPi2gg==} + peerDependencies: + react: '>= 0.14.0' + dependencies: + '@babel/runtime': 7.21.0 + highlight.js: 10.7.3 + lowlight: 1.20.0 + prismjs: 1.29.0 + react: 18.2.0 + refractor: 3.6.0 + dev: true + + /react@18.2.0: + resolution: {integrity: sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ==} + engines: {node: '>=0.10.0'} + dependencies: + loose-envify: 1.4.0 + + /read-pkg-up@7.0.1: + resolution: {integrity: sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==} + engines: {node: '>=8'} + dependencies: + find-up: 4.1.0 + read-pkg: 5.2.0 + type-fest: 0.8.1 + dev: true + + /read-pkg@5.2.0: + resolution: {integrity: sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==} + engines: {node: '>=8'} + dependencies: + '@types/normalize-package-data': 2.4.1 + normalize-package-data: 2.5.0 + parse-json: 5.2.0 + type-fest: 0.6.0 + dev: true + + /readable-stream@2.3.8: + resolution: {integrity: sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==} + dependencies: + core-util-is: 1.0.3 + inherits: 2.0.4 + isarray: 1.0.0 + process-nextick-args: 2.0.1 + safe-buffer: 5.1.2 + string_decoder: 1.1.1 + util-deprecate: 1.0.2 + dev: true + + /readable-stream@3.6.2: + resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==} + engines: {node: '>= 6'} + dependencies: + inherits: 2.0.4 + string_decoder: 1.3.0 + util-deprecate: 1.0.2 + dev: true + + /readdirp@3.6.0: + resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} + engines: {node: '>=8.10.0'} + dependencies: + picomatch: 2.3.1 + dev: true + + /recast@0.21.5: + resolution: {integrity: sha512-hjMmLaUXAm1hIuTqOdeYObMslq/q+Xff6QE3Y2P+uoHAg2nmVlLBps2hzh1UJDdMtDTMXOFewK6ky51JQIeECg==} + engines: {node: '>= 4'} + dependencies: + ast-types: 0.15.2 + esprima: 4.0.1 + source-map: 0.6.1 + tslib: 2.5.0 + dev: true + + /recast@0.23.1: + resolution: {integrity: sha512-RokaBcoxSjXUDzz1TXSZmZsSW6ZpLmlA3GGqJ8uuTrQ9hZhEz+4Tpsc+gRvYRJ2BU4H+ZyUlg91eSGDw7bwy7g==} + engines: {node: '>= 4'} + dependencies: + assert: 2.0.0 + ast-types: 0.16.1 + esprima: 4.0.1 + source-map: 0.6.1 + tslib: 2.5.0 + dev: true + + /rechoir@0.6.2: + resolution: {integrity: sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw==} + engines: {node: '>= 0.10'} + dependencies: + resolve: 1.22.2 + dev: true + + /refractor@3.6.0: + resolution: {integrity: sha512-MY9W41IOWxxk31o+YvFCNyNzdkc9M20NoZK5vq6jkv4I/uh2zkWcfudj0Q1fovjUQJrNewS9NMzeTtqPf+n5EA==} + dependencies: + hastscript: 6.0.0 + parse-entities: 2.0.0 + prismjs: 1.27.0 + dev: true + + /regenerate-unicode-properties@10.1.0: + resolution: {integrity: sha512-d1VudCLoIGitcU/hEg2QqvyGZQmdC0Lf8BqdOMXGFSvJP4bNV1+XqbPQeHHLD51Jh4QJJ225dlIFvY4Ly6MXmQ==} + engines: {node: '>=4'} + dependencies: + regenerate: 1.4.2 + dev: true + + /regenerate@1.4.2: + resolution: {integrity: sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==} + dev: true + + /regenerator-runtime@0.13.11: + resolution: {integrity: sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==} + dev: true + + /regenerator-transform@0.15.1: + resolution: {integrity: sha512-knzmNAcuyxV+gQCufkYcvOqX/qIIfHLv0u5x79kRxuGojfYVky1f15TzZEu2Avte8QGepvUNTnLskf8E6X6Vyg==} + dependencies: + '@babel/runtime': 7.21.0 + dev: true + + /regexpu-core@5.3.2: + resolution: {integrity: sha512-RAM5FlZz+Lhmo7db9L298p2vHP5ZywrVXmVXpmAD9GuL5MPH6t9ROw1iA/wfHkQ76Qe7AaPF0nGuim96/IrQMQ==} + engines: {node: '>=4'} + dependencies: + '@babel/regjsgen': 0.8.0 + regenerate: 1.4.2 + regenerate-unicode-properties: 10.1.0 + regjsparser: 0.9.1 + unicode-match-property-ecmascript: 2.0.0 + unicode-match-property-value-ecmascript: 2.1.0 + dev: true + + /regjsparser@0.9.1: + resolution: {integrity: sha512-dQUtn90WanSNl+7mQKcXAgZxvUe7Z0SqXlgzv0za4LwiUhyzBC58yQO3liFoUgu8GiJVInAhJjkj1N0EtQ5nkQ==} + hasBin: true + dependencies: + jsesc: 0.5.0 + dev: true + + /remark-external-links@8.0.0: + resolution: {integrity: sha512-5vPSX0kHoSsqtdftSHhIYofVINC8qmp0nctkeU9YoJwV3YfiBRiI6cbFRJ0oI/1F9xS+bopXG0m2KS8VFscuKA==} + dependencies: + extend: 3.0.2 + is-absolute-url: 3.0.3 + mdast-util-definitions: 4.0.0 + space-separated-tokens: 1.1.5 + unist-util-visit: 2.0.3 + dev: true + + /remark-slug@6.1.0: + resolution: {integrity: sha512-oGCxDF9deA8phWvxFuyr3oSJsdyUAxMFbA0mZ7Y1Sas+emILtO+e5WutF9564gDsEN4IXaQXm5pFo6MLH+YmwQ==} + dependencies: + github-slugger: 1.5.0 + mdast-util-to-string: 1.1.0 + unist-util-visit: 2.0.3 + dev: true + + /require-directory@2.1.1: + resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} + engines: {node: '>=0.10.0'} + dev: true + + /resolve-from@5.0.0: + resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} + engines: {node: '>=8'} + dev: true + + /resolve@1.22.2: + resolution: {integrity: sha512-Sb+mjNHOULsBv818T40qSPeRiuWLyaGMa5ewydRLFimneixmVy2zdivRl+AF6jaYPC8ERxGDmFSiqui6SfPd+g==} + hasBin: true + dependencies: + is-core-module: 2.12.0 + path-parse: 1.0.7 + supports-preserve-symlinks-flag: 1.0.0 + dev: true + + /reusify@1.0.4: + resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} + engines: {iojs: '>=1.0.0', node: '>=0.10.0'} + dev: true + + /rimraf@2.6.3: + resolution: {integrity: sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==} + hasBin: true + dependencies: + glob: 7.2.3 + dev: true + + /rimraf@2.7.1: + resolution: {integrity: sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==} + hasBin: true + dependencies: + glob: 7.2.3 + dev: true + + /rimraf@3.0.2: + resolution: {integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==} + hasBin: true + dependencies: + glob: 7.2.3 + dev: true + + /rollup@3.20.2: + resolution: {integrity: sha512-3zwkBQl7Ai7MFYQE0y1MeQ15+9jsi7XxfrqwTb/9EK8D9C9+//EBR4M+CuA1KODRaNbFez/lWxA5vhEGZp4MUg==} + engines: {node: '>=14.18.0', npm: '>=8.0.0'} + hasBin: true + optionalDependencies: + fsevents: 2.3.2 + dev: true + + /run-parallel@1.2.0: + resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} + dependencies: + queue-microtask: 1.2.3 + dev: true + + /rxjs@7.8.0: + resolution: {integrity: sha512-F2+gxDshqmIub1KdvZkaEfGDwLNpPvk9Fs6LD/MyQxNgMds/WH9OdDDXOmxUZpME+iSK3rQCctkL0DYyytUqMg==} + dependencies: + tslib: 2.5.0 + dev: false + + /safe-buffer@5.1.1: + resolution: {integrity: sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg==} + dev: true + + /safe-buffer@5.1.2: + resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==} + dev: true + + /safe-buffer@5.2.1: + resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} + dev: true + + /safer-buffer@2.1.2: + resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} + dev: true + + /scheduler@0.23.0: + resolution: {integrity: sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw==} + dependencies: + loose-envify: 1.4.0 + + /semver@5.7.1: + resolution: {integrity: sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==} + hasBin: true + dev: true + + /semver@6.3.0: + resolution: {integrity: sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==} + hasBin: true + dev: true + + /semver@7.0.0: + resolution: {integrity: sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==} + hasBin: true + dev: true + + /semver@7.4.0: + resolution: {integrity: sha512-RgOxM8Mw+7Zus0+zcLEUn8+JfoLpj/huFTItQy2hsM4khuC1HYRDp0cU482Ewn/Fcy6bCjufD8vAj7voC66KQw==} + engines: {node: '>=10'} + hasBin: true + dependencies: + lru-cache: 6.0.0 + + /send@0.18.0: + resolution: {integrity: sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==} + engines: {node: '>= 0.8.0'} + dependencies: + debug: 2.6.9 + depd: 2.0.0 + destroy: 1.2.0 + encodeurl: 1.0.2 + escape-html: 1.0.3 + etag: 1.8.1 + fresh: 0.5.2 + http-errors: 2.0.0 + mime: 1.6.0 + ms: 2.1.3 + on-finished: 2.4.1 + range-parser: 1.2.1 + statuses: 2.0.1 + transitivePeerDependencies: + - supports-color + dev: true + + /serve-favicon@2.5.0: + resolution: {integrity: sha512-FMW2RvqNr03x+C0WxTyu6sOv21oOjkq5j8tjquWccwa6ScNyGFOGJVpuS1NmTVGBAHS07xnSKotgf2ehQmf9iA==} + engines: {node: '>= 0.8.0'} + dependencies: + etag: 1.8.1 + fresh: 0.5.2 + ms: 2.1.1 + parseurl: 1.3.3 + safe-buffer: 5.1.1 + dev: true + + /serve-static@1.15.0: + resolution: {integrity: sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==} + engines: {node: '>= 0.8.0'} + dependencies: + encodeurl: 1.0.2 + escape-html: 1.0.3 + parseurl: 1.3.3 + send: 0.18.0 + transitivePeerDependencies: + - supports-color + dev: true + + /set-blocking@2.0.0: + resolution: {integrity: sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==} + dev: true + + /setprototypeof@1.2.0: + resolution: {integrity: sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==} + dev: true + + /shallow-clone@3.0.1: + resolution: {integrity: sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==} + engines: {node: '>=8'} + dependencies: + kind-of: 6.0.3 + dev: true + + /shebang-command@2.0.0: + resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} + engines: {node: '>=8'} + dependencies: + shebang-regex: 3.0.0 + dev: true + + /shebang-regex@3.0.0: + resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} + engines: {node: '>=8'} + dev: true + + /shelljs@0.8.5: + resolution: {integrity: sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==} + engines: {node: '>=4'} + hasBin: true + dependencies: + glob: 7.2.3 + interpret: 1.4.0 + rechoir: 0.6.2 + dev: true + + /side-channel@1.0.4: + resolution: {integrity: sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==} + dependencies: + call-bind: 1.0.2 + get-intrinsic: 1.2.0 + object-inspect: 1.12.3 + + /signal-exit@3.0.7: + resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} + dev: true + + /simple-update-notifier@1.1.0: + resolution: {integrity: sha512-VpsrsJSUcJEseSbMHkrsrAVSdvVS5I96Qo1QAQ4FxQ9wXFcB+pjj7FB7/us9+GcgfW4ziHtYMc1J0PLczb55mg==} + engines: {node: '>=8.10.0'} + dependencies: + semver: 7.0.0 + dev: true + + /sisteransi@1.0.5: + resolution: {integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==} + dev: true + + /slash@3.0.0: + resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} + engines: {node: '>=8'} + dev: true + + /source-map-js@1.0.2: + resolution: {integrity: sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==} + engines: {node: '>=0.10.0'} + dev: true + + /source-map-support@0.5.21: + resolution: {integrity: sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==} + dependencies: + buffer-from: 1.1.2 + source-map: 0.6.1 + dev: true + + /source-map@0.6.1: + resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==} + engines: {node: '>=0.10.0'} + dev: true + + /space-separated-tokens@1.1.5: + resolution: {integrity: sha512-q/JSVd1Lptzhf5bkYm4ob4iWPjx0KiRe3sRFBNrVqbJkFaBm5vbbowy1mymoPNLRa52+oadOhJ+K49wsSeSjTA==} + dev: true + + /spdx-correct@3.2.0: + resolution: {integrity: sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==} + dependencies: + spdx-expression-parse: 3.0.1 + spdx-license-ids: 3.0.13 + dev: true + + /spdx-exceptions@2.3.0: + resolution: {integrity: sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==} + dev: true + + /spdx-expression-parse@3.0.1: + resolution: {integrity: sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==} + dependencies: + spdx-exceptions: 2.3.0 + spdx-license-ids: 3.0.13 + dev: true + + /spdx-license-ids@3.0.13: + resolution: {integrity: sha512-XkD+zwiqXHikFZm4AX/7JSCXA98U5Db4AFd5XUg/+9UNtnH75+Z9KxtpYiJZx36mUDVOwH83pl7yvCer6ewM3w==} + dev: true + + /sprintf-js@1.0.3: + resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} + dev: true + + /statuses@2.0.1: + resolution: {integrity: sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==} + engines: {node: '>= 0.8'} + dev: true + + /store2@2.14.2: + resolution: {integrity: sha512-siT1RiqlfQnGqgT/YzXVUNsom9S0H1OX+dpdGN1xkyYATo4I6sep5NmsRD/40s3IIOvlCq6akxkqG82urIZW1w==} + + /storybook-dark-mode@3.0.0(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-aeAvqP/mmdccEiCsvx6aw3M0i7mZSiXROsrAsEQN8vl1lAg3FZN+y3Xu/f+ye59wLMRuKJC/JBp7E3/H7vLBRQ==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + peerDependenciesMeta: + react: + optional: true + react-dom: + optional: true + dependencies: + '@storybook/addons': 7.0.4(react-dom@18.2.0)(react@18.2.0) + '@storybook/api': 7.0.4(react-dom@18.2.0)(react@18.2.0) + '@storybook/components': 7.0.3(react-dom@18.2.0)(react@18.2.0) + '@storybook/core-events': 7.0.3 + '@storybook/global': 5.0.0 + '@storybook/theming': 7.0.3(react-dom@18.2.0)(react@18.2.0) + fast-deep-equal: 3.1.3 + memoizerific: 1.11.3 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + dev: false + + /storybook@7.0.2: + resolution: {integrity: sha512-/XBLhT9Vb14yNBcA9rlW15y+C6IsCA3kx5PKvK9kL10sKCi8invcY94UfCSisXe8HqsO3u6peumo2xpYucKMjw==} + hasBin: true + dependencies: + '@storybook/cli': 7.0.2 + transitivePeerDependencies: + - bufferutil + - encoding + - supports-color + - utf-8-validate + dev: true + + /stream-shift@1.0.1: + resolution: {integrity: sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ==} + dev: true + + /string-width@4.2.3: + resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} + engines: {node: '>=8'} + dependencies: + emoji-regex: 8.0.0 + is-fullwidth-code-point: 3.0.0 + strip-ansi: 6.0.1 + dev: true + + /string_decoder@1.1.1: + resolution: {integrity: sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==} + dependencies: + safe-buffer: 5.1.2 + dev: true + + /string_decoder@1.3.0: + resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} + dependencies: + safe-buffer: 5.2.1 + dev: true + + /strip-ansi@6.0.1: + resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} + engines: {node: '>=8'} + dependencies: + ansi-regex: 5.0.1 + dev: true + + /strip-final-newline@2.0.0: + resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} + engines: {node: '>=6'} + dev: true + + /strip-indent@3.0.0: + resolution: {integrity: sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==} + engines: {node: '>=8'} + dependencies: + min-indent: 1.0.1 + dev: true + + /strip-json-comments@3.1.1: + resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} + engines: {node: '>=8'} + dev: true + + /supports-color@5.5.0: + resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==} + engines: {node: '>=4'} + dependencies: + has-flag: 3.0.0 + dev: true + + /supports-color@7.2.0: + resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} + engines: {node: '>=8'} + dependencies: + has-flag: 4.0.0 + dev: true + + /supports-color@8.1.1: + resolution: {integrity: sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==} + engines: {node: '>=10'} + dependencies: + has-flag: 4.0.0 + dev: true + + /supports-preserve-symlinks-flag@1.0.0: + resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} + engines: {node: '>= 0.4'} + dev: true + + /synchronous-promise@2.0.17: + resolution: {integrity: sha512-AsS729u2RHUfEra9xJrE39peJcc2stq2+poBXX8bcM08Y6g9j/i/PUzwNQqkaJde7Ntg1TO7bSREbR5sdosQ+g==} + + /tar-fs@2.1.1: + resolution: {integrity: sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng==} + dependencies: + chownr: 1.1.4 + mkdirp-classic: 0.5.3 + pump: 3.0.0 + tar-stream: 2.2.0 + dev: true + + /tar-stream@2.2.0: + resolution: {integrity: sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==} + engines: {node: '>=6'} + dependencies: + bl: 4.1.0 + end-of-stream: 1.4.4 + fs-constants: 1.0.0 + inherits: 2.0.4 + readable-stream: 3.6.2 + dev: true + + /tar@6.1.13: + resolution: {integrity: sha512-jdIBIN6LTIe2jqzay/2vtYLlBHa3JF42ot3h1dW8Q0PaAG4v8rm0cvpVePtau5C6OKXGGcgO9q2AMNSWxiLqKw==} + engines: {node: '>=10'} + dependencies: + chownr: 2.0.0 + fs-minipass: 2.1.0 + minipass: 4.2.8 + minizlib: 2.1.2 + mkdirp: 1.0.4 + yallist: 4.0.0 + dev: true + + /telejson@7.1.0: + resolution: {integrity: sha512-jFJO4P5gPebZAERPkJsqMAQ0IMA1Hi0AoSfxpnUaV6j6R2SZqlpkbS20U6dEUtA3RUYt2Ak/mTlkQzHH9Rv/hA==} + dependencies: + memoizerific: 1.11.3 + + /temp-dir@2.0.0: + resolution: {integrity: sha512-aoBAniQmmwtcKp/7BzsH8Cxzv8OL736p7v1ihGb5e9DJ9kTwGWHrQrVB5+lfVDzfGrdRzXch+ig7LHaY1JTOrg==} + engines: {node: '>=8'} + dev: true + + /temp@0.8.4: + resolution: {integrity: sha512-s0ZZzd0BzYv5tLSptZooSjK8oj6C+c19p7Vqta9+6NPOf7r+fxq0cJe6/oN4LTC79sy5NY8ucOJNgwsKCSbfqg==} + engines: {node: '>=6.0.0'} + dependencies: + rimraf: 2.6.3 + dev: true + + /tempy@1.0.1: + resolution: {integrity: sha512-biM9brNqxSc04Ee71hzFbryD11nX7VPhQQY32AdDmjFvodsRFz/3ufeoTZ6uYkRFfGo188tENcASNs3vTdsM0w==} + engines: {node: '>=10'} + dependencies: + del: 6.1.1 + is-stream: 2.0.1 + temp-dir: 2.0.0 + type-fest: 0.16.0 + unique-string: 2.0.0 + dev: true + + /test-exclude@6.0.0: + resolution: {integrity: sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==} + engines: {node: '>=8'} + dependencies: + '@istanbuljs/schema': 0.1.3 + glob: 7.2.3 + minimatch: 3.1.2 + dev: true + + /through2@2.0.5: + resolution: {integrity: sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==} + dependencies: + readable-stream: 2.3.8 + xtend: 4.0.2 + dev: true + + /tmpl@1.0.5: + resolution: {integrity: sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==} + dev: true + + /to-fast-properties@2.0.0: + resolution: {integrity: sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==} + engines: {node: '>=4'} + + /to-regex-range@5.0.1: + resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} + engines: {node: '>=8.0'} + dependencies: + is-number: 7.0.0 + dev: true + + /toidentifier@1.0.1: + resolution: {integrity: sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==} + engines: {node: '>=0.6'} + dev: true + + /tr46@0.0.3: + resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==} + dev: true + + /ts-dedent@2.2.0: + resolution: {integrity: sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ==} + engines: {node: '>=6.10'} + + /tslib@2.5.0: + resolution: {integrity: sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg==} + + /type-check@0.3.2: + resolution: {integrity: sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg==} + engines: {node: '>= 0.8.0'} + dependencies: + prelude-ls: 1.1.2 + dev: true + + /type-fest@0.16.0: + resolution: {integrity: sha512-eaBzG6MxNzEn9kiwvtre90cXaNLkmadMWa1zQMs3XORCXNbsH/OewwbxC5ia9dCxIxnTAsSxXJaa/p5y8DlvJg==} + engines: {node: '>=10'} + dev: true + + /type-fest@0.20.2: + resolution: {integrity: sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==} + engines: {node: '>=10'} + dev: true + + /type-fest@0.6.0: + resolution: {integrity: sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==} + engines: {node: '>=8'} + dev: true + + /type-fest@0.8.1: + resolution: {integrity: sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==} + engines: {node: '>=8'} + dev: true + + /type-fest@2.19.0: + resolution: {integrity: sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==} + engines: {node: '>=12.20'} + + /type-is@1.6.18: + resolution: {integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==} + engines: {node: '>= 0.6'} + dependencies: + media-typer: 0.3.0 + mime-types: 2.1.35 + dev: true + + /typedarray@0.0.6: + resolution: {integrity: sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA==} + dev: true + + /typescript@5.0.4: + resolution: {integrity: sha512-cW9T5W9xY37cc+jfEnaUvX91foxtHkza3Nw3wkoF4sSlKn0MONdkdEndig/qPBWXNkmplh3NzayQzCiHM4/hqw==} + engines: {node: '>=12.20'} + hasBin: true + dev: true + + /uglify-js@3.17.4: + resolution: {integrity: sha512-T9q82TJI9e/C1TAxYvfb16xO120tMVFZrGA3f9/P4424DNu6ypK103y0GPFVa17yotwSyZW5iYXgjYHkGrJW/g==} + engines: {node: '>=0.8.0'} + hasBin: true + requiresBuild: true + dev: true + optional: true + + /unfetch@4.2.0: + resolution: {integrity: sha512-F9p7yYCn6cIW9El1zi0HI6vqpeIvBsr3dSuRO6Xuppb1u5rXpCPmMvLSyECLhybr9isec8Ohl0hPekMVrEinDA==} + dev: true + + /unicode-canonical-property-names-ecmascript@2.0.0: + resolution: {integrity: sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ==} + engines: {node: '>=4'} + dev: true + + /unicode-match-property-ecmascript@2.0.0: + resolution: {integrity: sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==} + engines: {node: '>=4'} + dependencies: + unicode-canonical-property-names-ecmascript: 2.0.0 + unicode-property-aliases-ecmascript: 2.1.0 + dev: true + + /unicode-match-property-value-ecmascript@2.1.0: + resolution: {integrity: sha512-qxkjQt6qjg/mYscYMC0XKRn3Rh0wFPlfxB0xkt9CfyTvpX1Ra0+rAmdX2QyAobptSEvuy4RtpPRui6XkV+8wjA==} + engines: {node: '>=4'} + dev: true + + /unicode-property-aliases-ecmascript@2.1.0: + resolution: {integrity: sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w==} + engines: {node: '>=4'} + dev: true + + /unique-string@2.0.0: + resolution: {integrity: sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==} + engines: {node: '>=8'} + dependencies: + crypto-random-string: 2.0.0 + dev: true + + /unist-util-is@4.1.0: + resolution: {integrity: sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg==} + dev: true + + /unist-util-visit-parents@3.1.1: + resolution: {integrity: sha512-1KROIZWo6bcMrZEwiH2UrXDyalAa0uqzWCxCJj6lPOvTve2WkfgCytoDTPaMnodXh1WrXOq0haVYHj99ynJlsg==} + dependencies: + '@types/unist': 2.0.6 + unist-util-is: 4.1.0 + dev: true + + /unist-util-visit@2.0.3: + resolution: {integrity: sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q==} + dependencies: + '@types/unist': 2.0.6 + unist-util-is: 4.1.0 + unist-util-visit-parents: 3.1.1 + dev: true + + /universalify@2.0.0: + resolution: {integrity: sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==} + engines: {node: '>= 10.0.0'} + + /unpipe@1.0.0: + resolution: {integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==} + engines: {node: '>= 0.8'} + dev: true + + /unplugin@0.10.2: + resolution: {integrity: sha512-6rk7GUa4ICYjae5PrAllvcDeuT8pA9+j5J5EkxbMFaV+SalHhxZ7X2dohMzu6C3XzsMT+6jwR/+pwPNR3uK9MA==} + dependencies: + acorn: 8.8.2 + chokidar: 3.5.3 + webpack-sources: 3.2.3 + webpack-virtual-modules: 0.4.6 + dev: true + + /untildify@4.0.0: + resolution: {integrity: sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==} + engines: {node: '>=8'} + dev: true + + /update-browserslist-db@1.0.10(browserslist@4.21.5): + resolution: {integrity: sha512-OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ==} + hasBin: true + peerDependencies: + browserslist: '>= 4.21.0' + dependencies: + browserslist: 4.21.5 + escalade: 3.1.1 + picocolors: 1.0.0 + dev: true + + /use-resize-observer@9.1.0(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-R25VqO9Wb3asSD4eqtcxk8sJalvIOYBqS8MNZlpDSQ4l4xMQxC/J7Id9HoTqPq8FwULIn0PVW+OAqF2dyYbjow==} + peerDependencies: + react: 16.8.0 - 18 + react-dom: 16.8.0 - 18 + dependencies: + '@juggle/resize-observer': 3.4.0 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + + /util-deprecate@1.0.2: + resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} + + /util@0.12.5: + resolution: {integrity: sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==} + dependencies: + inherits: 2.0.4 + is-arguments: 1.1.1 + is-generator-function: 1.0.10 + is-typed-array: 1.1.10 + which-typed-array: 1.1.9 + dev: true + + /utils-merge@1.0.1: + resolution: {integrity: sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=} + engines: {node: '>= 0.4.0'} + dev: true + + /uuid-browser@3.1.0: + resolution: {integrity: sha512-dsNgbLaTrd6l3MMxTtouOCFw4CBFc/3a+GgYA2YyrJvyQ1u6q4pcu3ktLoUZ/VN/Aw9WsauazbgsgdfVWgAKQg==} + dev: true + + /v8-to-istanbul@9.1.0: + resolution: {integrity: sha512-6z3GW9x8G1gd+JIIgQQQxXuiJtCXeAjp6RaPEPLv62mH3iPHPxV6W3robxtCzNErRo6ZwTmzWhsbNvjyEBKzKA==} + engines: {node: '>=10.12.0'} + dependencies: + '@jridgewell/trace-mapping': 0.3.18 + '@types/istanbul-lib-coverage': 2.0.4 + convert-source-map: 1.9.0 + dev: true + + /validate-npm-package-license@3.0.4: + resolution: {integrity: sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==} + dependencies: + spdx-correct: 3.2.0 + spdx-expression-parse: 3.0.1 + dev: true + + /vary@1.1.2: + resolution: {integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==} + engines: {node: '>= 0.8'} + dev: true + + /vite-plugin-static-copy@0.13.1(vite@4.2.0): + resolution: {integrity: sha512-KwIcGBT1aOxSq+laK3VmSngoEa3HXWj/6ZEXdv+y59eZ7p/XSuPahoDo+CfYW22JjTdnstgeKWiX+78KNgDu6g==} + engines: {node: ^14.18.0 || >=16.0.0} + peerDependencies: + vite: ^3.0.0 || ^4.0.0 + dependencies: + chokidar: 3.5.3 + fast-glob: 3.2.12 + fs-extra: 11.1.1 + picocolors: 1.0.0 + vite: 4.2.0 + dev: true + + /vite@4.2.0: + resolution: {integrity: sha512-AbDTyzzwuKoRtMIRLGNxhLRuv1FpRgdIw+1y6AQG73Q5+vtecmvzKo/yk8X/vrHDpETRTx01ABijqUHIzBXi0g==} + engines: {node: ^14.18.0 || >=16.0.0} + hasBin: true + peerDependencies: + '@types/node': '>= 14' + less: '*' + sass: '*' + stylus: '*' + sugarss: '*' + terser: ^5.4.0 + peerDependenciesMeta: + '@types/node': + optional: true + less: + optional: true + sass: + optional: true + stylus: + optional: true + sugarss: + optional: true + terser: + optional: true + dependencies: + esbuild: 0.17.16 + postcss: 8.4.21 + resolve: 1.22.2 + rollup: 3.20.2 + optionalDependencies: + fsevents: 2.3.2 + dev: true + + /walker@1.0.8: + resolution: {integrity: sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==} + dependencies: + makeerror: 1.0.12 + dev: true + + /watchpack@2.4.0: + resolution: {integrity: sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg==} + engines: {node: '>=10.13.0'} + dependencies: + glob-to-regexp: 0.4.1 + graceful-fs: 4.2.11 + dev: true + + /webidl-conversions@3.0.1: + resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} + dev: true + + /webpack-sources@3.2.3: + resolution: {integrity: sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==} + engines: {node: '>=10.13.0'} + dev: true + + /webpack-virtual-modules@0.4.6: + resolution: {integrity: sha512-5tyDlKLqPfMqjT3Q9TAqf2YqjwmnUleZwzJi1A5qXnlBCdj2AtOJ6wAWdglTIDOPgOiOrXeBeFcsQ8+aGQ6QbA==} + dev: true + + /whatwg-url@5.0.0: + resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==} + dependencies: + tr46: 0.0.3 + webidl-conversions: 3.0.1 + dev: true + + /which-typed-array@1.1.9: + resolution: {integrity: sha512-w9c4xkx6mPidwp7180ckYWfMmvxpjlZuIudNtDf4N/tTAUB8VJbX25qZoAsrtGuYNnGw3pa0AXgbGKRB8/EceA==} + engines: {node: '>= 0.4'} + dependencies: + available-typed-arrays: 1.0.5 + call-bind: 1.0.2 + for-each: 0.3.3 + gopd: 1.0.1 + has-tostringtag: 1.0.0 + is-typed-array: 1.1.10 + dev: true + + /which@2.0.2: + resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} + engines: {node: '>= 8'} + hasBin: true + dependencies: + isexe: 2.0.0 + dev: true + + /wide-align@1.1.5: + resolution: {integrity: sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==} + dependencies: + string-width: 4.2.3 + dev: true + + /widest-line@3.1.0: + resolution: {integrity: sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg==} + engines: {node: '>=8'} + dependencies: + string-width: 4.2.3 + dev: true + + /word-wrap@1.2.3: + resolution: {integrity: sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==} + engines: {node: '>=0.10.0'} + dev: true + + /wordwrap@1.0.0: + resolution: {integrity: sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==} + dev: true + + /wrap-ansi@7.0.0: + resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} + engines: {node: '>=10'} + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + dev: true + + /wrappy@1.0.2: + resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} + dev: true + + /write-file-atomic@2.4.3: + resolution: {integrity: sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ==} + dependencies: + graceful-fs: 4.2.11 + imurmurhash: 0.1.4 + signal-exit: 3.0.7 + dev: true + + /write-file-atomic@4.0.2: + resolution: {integrity: sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==} + engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} + dependencies: + imurmurhash: 0.1.4 + signal-exit: 3.0.7 + dev: true + + /ws@6.2.2: + resolution: {integrity: sha512-zmhltoSR8u1cnDsD43TX59mzoMZsLKqUweyYBAIvTngR3shc0W6aOZylZmq/7hqyVxPdi+5Ud2QInblgyE72fw==} + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: ^5.0.2 + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + dependencies: + async-limiter: 1.0.1 + dev: true + + /ws@8.13.0: + resolution: {integrity: sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA==} + engines: {node: '>=10.0.0'} + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: '>=5.0.2' + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + dev: true + + /xtend@4.0.2: + resolution: {integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==} + engines: {node: '>=0.4'} + dev: true + + /y18n@5.0.8: + resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} + engines: {node: '>=10'} + dev: true + + /yallist@3.1.1: + resolution: {integrity: sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==} + dev: true + + /yallist@4.0.0: + resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==} + + /yargs-parser@20.2.9: + resolution: {integrity: sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==} + engines: {node: '>=10'} + dev: true + + /yargs@16.2.0: + resolution: {integrity: sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==} + engines: {node: '>=10'} + dependencies: + cliui: 7.0.4 + escalade: 3.1.1 + get-caller-file: 2.0.5 + require-directory: 2.1.1 + string-width: 4.2.3 + y18n: 5.0.8 + yargs-parser: 20.2.9 + dev: true + + /yauzl@2.10.0: + resolution: {integrity: sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==} + dependencies: + buffer-crc32: 0.2.13 + fd-slicer: 1.1.0 + dev: true + + /yocto-queue@0.1.0: + resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} + engines: {node: '>=10'} + dev: true diff --git a/dac/ui-lib/storybook/stories/Components/Avatar.stories.tsx b/dac/ui-lib/storybook/stories/Components/Avatar.stories.tsx new file mode 100644 index 0000000000..5843c330d9 --- /dev/null +++ b/dac/ui-lib/storybook/stories/Components/Avatar.stories.tsx @@ -0,0 +1,52 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { Meta } from "@storybook/react"; + +import { Avatar, AvatarGroup } from "../../../components"; + +export default { + title: "Components/Avatar", + component: Avatar, +} as Meta; + +const users = [ + "AB", + "CD", + "EF", + "GH", + "IJ", + "KL", + "MN", + "OP", + "QR", + "ST", + "UV", + "WX", + "YZ", +]; + +export const Default = () => { + return ( + + {users.map((user) => ( + + ))} + + ); +}; + +Default.storyName = "Avatar"; diff --git a/dac/ui-lib/src/stories/Button.stories.tsx b/dac/ui-lib/storybook/stories/Components/Button.stories.tsx similarity index 63% rename from dac/ui-lib/src/stories/Button.stories.tsx rename to dac/ui-lib/storybook/stories/Components/Button.stories.tsx index 9704c991fc..5396d77acb 100644 --- a/dac/ui-lib/src/stories/Button.stories.tsx +++ b/dac/ui-lib/storybook/stories/Components/Button.stories.tsx @@ -13,21 +13,25 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import React from "react"; -import { ComponentStory, ComponentMeta } from "@storybook/react"; +import { StoryFn, Meta } from "@storybook/react"; -import { Button } from "../../components/Button/Button"; +import { Button } from "../../../components/Button/Button"; export default { title: "Components/Button", component: Button, -} as ComponentMeta; +} as Meta; -const Template: ComponentStory = (args) => ( - +const Template: StoryFn = (args) => ( + ); -export const Primary = Template.bind({}); -Primary.args = { +export const Default = Template.bind({}); +Default.args = { variant: "primary", + disabled: false, }; + +Default.storyName = "Button"; diff --git a/dac/ui-lib/storybook/stories/Components/ButtonGroup.stories.tsx b/dac/ui-lib/storybook/stories/Components/ButtonGroup.stories.tsx new file mode 100644 index 0000000000..8cd46e4472 --- /dev/null +++ b/dac/ui-lib/storybook/stories/Components/ButtonGroup.stories.tsx @@ -0,0 +1,32 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { Button } from "../../../components"; + +export default { + title: "Components/ButtonGroup", +}; + +export const Default = () => { + return ( +
        + + +
        + ); +}; + +Default.storyName = "ButtonGroup"; diff --git a/dac/ui-lib/themes/dremio-light/components/tag.scss b/dac/ui-lib/storybook/stories/Components/DescriptionList.stories.tsx similarity index 58% rename from dac/ui-lib/themes/dremio-light/components/tag.scss rename to dac/ui-lib/storybook/stories/Components/DescriptionList.stories.tsx index acfb6f01df..81030c5201 100644 --- a/dac/ui-lib/themes/dremio-light/components/tag.scss +++ b/dac/ui-lib/storybook/stories/Components/DescriptionList.stories.tsx @@ -14,20 +14,27 @@ * limitations under the License. */ -@use "../../base/components/tag"; +import { Avatar } from "../../../components"; -.dremio-tag { - --dremio--tag--background: var(--dremio--color--neutral--150); - --dremio--tag--border: none; - --dremio--tag--border-radius: var(--dremio--radius--1); - --dremio--tag--color: var(--dremio--color--text--main); - --dremio--tag--font-size: 13px; - --dremio--tag--font-weight: 500; - --dremio--tag--gap: 2px; - --dremio--tag--padding: var(--dremio--spacing--05); +export default { + title: "Components/DescriptionList", +}; - &--state-success { - background: var(--dremio--color--primary--200); - color: var(--dremio--color--primary--800); - } -} +export const Default = () => { + return ( +
        +
        Created by
        +
        +  Some User +
        +
        Cloud
        +
        aws-test-1
        +
        Created on
        +
        January 1, 2023, 12:00 PM
        +
        Engines
        +
        3
        +
        + ); +}; + +Default.storyName = "DescriptionList"; diff --git a/dac/ui-lib/src/stories/CopyButton.stories.tsx b/dac/ui-lib/storybook/stories/Components/IconButton.stories.tsx similarity index 59% rename from dac/ui-lib/src/stories/CopyButton.stories.tsx rename to dac/ui-lib/storybook/stories/Components/IconButton.stories.tsx index 2403a767be..c7785e2995 100644 --- a/dac/ui-lib/src/stories/CopyButton.stories.tsx +++ b/dac/ui-lib/storybook/stories/Components/IconButton.stories.tsx @@ -13,20 +13,20 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import React from "react"; -import { ComponentStory, ComponentMeta } from "@storybook/react"; -import { CopyButton } from "../../components/CopyButton"; +import { StoryFn, Meta } from "@storybook/react"; +import { IconButton } from "../../../components"; export default { - title: "Components/Copy Button", - component: CopyButton, -} as ComponentMeta; + title: "Components/IconButton", + component: IconButton, +} as Meta; -const Template: ComponentStory = (args) => ( - -); - -export const Primary = Template.bind({}); -Primary.args = { - contents: "Hello world", +export const Default: StoryFn = () => { + return ( + + + + ); }; + +Default.storyName = "IconButton"; diff --git a/dac/ui-lib/storybook/stories/Components/ModalContainer.stories.tsx b/dac/ui-lib/storybook/stories/Components/ModalContainer.stories.tsx new file mode 100644 index 0000000000..bc4254cd12 --- /dev/null +++ b/dac/ui-lib/storybook/stories/Components/ModalContainer.stories.tsx @@ -0,0 +1,118 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { Meta, StoryFn } from "@storybook/react"; + +import { + Button, + DialogContent, + ModalContainer, + useModalContainer, +} from "../../../components"; + +export default { + title: "Components/ModalContainer", + component: ModalContainer, +} as Meta; + +export const Default: StoryFn = () => { + const myModal = useModalContainer(); + return ( +
        + + + + Close + + } + > +
        +

        + {" "} + Lorem ipsum dolor sit amet, consectetur + adipiscing elit. Nam neque ante, porttitor vel convallis in, + ullamcorper sed arcu. In ultrices magna nec auctor feugiat. + Quisque aliquam, nulla et scelerisque condimentum, magna quam + condimentum erat, non ultrices est arcu in lorem. Vivamus nec mi + auctor, ornare dolor vitae, feugiat mi. Nam sodales metus sed + tortor iaculis, quis convallis tellus ornare. Phasellus ac + faucibus arcu. Suspendisse nec ipsum augue. Nullam tempus tellus a + enim luctus luctus. Vestibulum eu nibh et velit varius tincidunt + quis vitae lectus. Cras cursus turpis arcu, quis facilisis sem + eleifend ac. Suspendisse aliquet, lacus eu auctor pellentesque, + lorem odio venenatis tortor, quis mollis libero ipsum vitae massa. + Sed ullamcorper imperdiet felis, id dignissim nunc elementum ut. + Pellentesque tincidunt felis vitae pulvinar varius. Nunc a erat + congue orci tristique malesuada. Nullam dictum facilisis pretium. + Duis ligula mauris, aliquam ut tortor elementum, euismod euismod + mi. +

        +

        + Sed nec dui magna. Donec ultricies feugiat est. Duis finibus nunc + lectus, at placerat purus venenatis vitae. Fusce eu purus in ante + elementum interdum. Proin posuere non erat a rutrum. Nullam auctor + tortor a hendrerit consequat. Duis tempor volutpat luctus. Nam + ipsum lorem, ornare sit amet justo quis, pharetra dapibus nulla. +

        +

        + Sed feugiat justo a placerat vulputate. Class aptent taciti + sociosqu ad litora torquent per conubia nostra, per inceptos + himenaeos. Sed at auctor arcu. Curabitur quis condimentum justo. + Nulla eu vehicula nibh. Praesent vel congue dolor. Aenean mattis + erat et arcu vehicula, id interdum nulla cursus. Donec efficitur + ultricies mi quis vestibulum. Aenean tincidunt tellus sit amet + urna hendrerit condimentum. Aenean metus tellus, efficitur + sagittis volutpat at, imperdiet vitae nisi. Cras dignissim lacus + lectus, a pellentesque tellus varius sed. Nam ligula turpis, + porttitor eu mollis et, tristique ut tellus. Mauris convallis + libero sed ligula malesuada convallis. +

        +

        + Mauris ut ultrices risus. Nullam tincidunt ex eget ligula rhoncus, + sit amet tempus lectus pellentesque. Donec consequat porta orci, + sed cursus justo suscipit quis. Donec risus ligula, sodales sit + amet vehicula non, bibendum et justo. Mauris scelerisque + ullamcorper pretium. Nunc ipsum ex, varius eu ullamcorper ut, + molestie a lacus. Nunc id porta nulla, non rutrum massa. Maecenas + vel ullamcorper neque. In et enim sagittis, laoreet augue eget, + maximus sapien. Maecenas sed ipsum sollicitudin, dapibus nisi + tristique, imperdiet nisl. Proin pulvinar tincidunt odio, eu + posuere nulla congue non. +

        +

        + Vivamus non aliquet neque. Ut dignissim ligula felis, dapibus + molestie nulla lacinia ac. Praesent id vehicula metus. Duis + posuere convallis erat, eget euismod turpis imperdiet eget. Cras + pharetra rutrum risus eget sollicitudin. Integer ornare sit amet + risus et facilisis. Morbi eu mollis erat, ac dictum nibh. + Phasellus nisl libero, lacinia eget consequat vel, rutrum eu leo. + Class aptent taciti sociosqu ad litora torquent per conubia + nostra, per inceptos himenaeos. Pellentesque condimentum aliquet + bibendum. +

        +
        +
        +
        +
        + ); +}; + +Default.storyName = "ModalContainer"; diff --git a/dac/ui-lib/storybook/stories/Components/Skeleton.stories.tsx b/dac/ui-lib/storybook/stories/Components/Skeleton.stories.tsx new file mode 100644 index 0000000000..c320da3389 --- /dev/null +++ b/dac/ui-lib/storybook/stories/Components/Skeleton.stories.tsx @@ -0,0 +1,42 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { Meta } from "@storybook/react"; + +import { Skeleton } from "../../../components"; + +export default { + title: "Components/Skeleton", + component: Skeleton, +} as Meta; + +export const Default = () => { + return ( +
        +

        + +

        +

        + +

        + + + +
        + ); +}; + +Default.storyName = "Skeleton"; diff --git a/dac/ui-lib/src/stories/Avatar.stories.tsx b/dac/ui-lib/storybook/stories/Components/SpinnerOverlay.stories.tsx similarity index 61% rename from dac/ui-lib/src/stories/Avatar.stories.tsx rename to dac/ui-lib/storybook/stories/Components/SpinnerOverlay.stories.tsx index 525effdfee..88490e9420 100644 --- a/dac/ui-lib/src/stories/Avatar.stories.tsx +++ b/dac/ui-lib/storybook/stories/Components/SpinnerOverlay.stories.tsx @@ -13,19 +13,26 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import React from "react"; -import { ComponentStory, ComponentMeta } from "@storybook/react"; -import { Avatar } from "../../components/Avatar"; +import { Meta } from "@storybook/react"; + +import { SpinnerOverlay } from "../../../components"; export default { - title: "Components/Avatar", - component: Avatar, -} as ComponentMeta; + title: "Components/SpinnerOverlay", + component: SpinnerOverlay, +} as Meta; -const Template: ComponentStory = (args) => ; +export const Default = (args: any) => { + return ( +
        + +
        + ); +}; -export const WithInitials = Template.bind({}); -WithInitials.args = { - initials: "YF", +Default.args = { + in: true, }; + +Default.storyName = "SpinnerOverlay"; diff --git a/dac/ui-lib/storybook/stories/Components/Table.stories.tsx b/dac/ui-lib/storybook/stories/Components/Table.stories.tsx new file mode 100644 index 0000000000..467d09f013 --- /dev/null +++ b/dac/ui-lib/storybook/stories/Components/Table.stories.tsx @@ -0,0 +1,271 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { createTable } from "leantable/core"; +import { Column, columnSorting, Table } from "leantable/react"; + +const defaultTable = createTable([columnSorting()]); + +export default { + title: "Components/Table", +}; + +const mockData = [ + { + id: "fa9fc823f800", + user: "sampleuser@dremio.com", + dataset: "Unavailable", + queryType: "JDBC Client", + engine: "engine1", + startTime: new Date().toDateString(), + duration: "00:00:08", + sql: `select * from Samples."http://samples.dremio.com"."NYC-taxi-trips-iceberg"`, + cost: "1", + time: "<1s", + scanned: 100, + returned: 100, + }, + { + id: "dba4d144b300", + user: "sampleuser@dremio.com", + dataset: "trips_pickupdate", + queryType: "JDBC Client", + engine: "engine1", + startTime: new Date().toDateString(), + duration: "00:00:08", + sql: `select * from S3.Taxi."trips_pickupdate"`, + cost: "20K", + time: "<1s", + scanned: 100, + returned: 100, + }, + { + id: "423fd4154c00", + user: "sampleuser@dremio.com", + dataset: "NYC-taxi-trips-iceberg", + queryType: "UI (run)", + engine: "engine1", + startTime: new Date().toDateString(), + duration: "00:00:08", + sql: `select * from Samples."samples.dremio.com"."NYC-taxi-trips-iceberg"`, + cost: "20K", + time: "<1s", + scanned: 100, + returned: 100, + }, + { + id: "d32f6fb63200", + user: "sampleuser@dremio.com", + dataset: "returns_report", + queryType: "UI (run)", + engine: "engine1", + startTime: new Date().toDateString(), + duration: "00:00:08", + sql: `select r_reason_desc, count(1) num_returns, sum(cr_return_amount) return_amt from catalog_returns cr inner join reason r on cr.cr_reason_fk = r.r_reason_sk group by 1 order by 2 desc`, + cost: "20K", + time: "<1s", + scanned: 100, + returned: 100, + }, + { + id: "8eb73052b100", + user: "sampleuser@dremio.com", + dataset: "trips_pickupdate", + queryType: "JDBC Client", + engine: "engine1", + startTime: new Date().toDateString(), + duration: "00:00:08", + sql: `select * from S3.Taxi."trips_pickupdate"`, + cost: "20K", + time: "<1s", + scanned: 100, + returned: 100, + }, + { + id: "b587c070f300", + user: "sampleuser@dremio.com", + dataset: "NYC-taxi-trips-iceberg", + queryType: "UI (run)", + engine: "engine1", + startTime: new Date().toDateString(), + duration: "00:00:08", + sql: `select * from Samples."samples.dremio.com"."NYC-taxi-trips-iceberg"`, + cost: "20K", + time: "<1s", + scanned: 100, + returned: 100, + }, + { + id: "2b5b347b5500", + user: "sampleuser@dremio.com", + dataset: "returns_report", + queryType: "UI (run)", + engine: "engine1", + startTime: new Date().toDateString(), + duration: "00:00:08", + sql: `select r_reason_desc, count(1) num_returns, sum(cr_return_amount) return_amt from catalog_returns cr inner join reason r on cr.cr_reason_fk = r.r_reason_sk group by 1 order by 2 desc`, + cost: "20K", + time: "<1s", + scanned: 100, + returned: 100, + }, + { + id: "51666df1f800", + user: "sampleuser@dremio.com", + dataset: "Unavailable", + queryType: "UI (run)", + engine: "engine1", + startTime: new Date().toDateString(), + duration: "00:00:08", + sql: `select * from Samples."http://samples.dremio.com"."NYC-taxi-trips-iceberg"`, + cost: "20K", + time: "<1s", + scanned: 100, + returned: 100, + }, + { + id: "6bd306e61100", + user: "sampleuser@dremio.com", + dataset: "NYC-taxi-trips-iceberg", + queryType: "UI (run)", + engine: "engine1", + startTime: new Date().toDateString(), + duration: "00:00:08", + sql: `select * from Samples."samples.dremio.com"."NYC-taxi-trips-iceberg"`, + cost: "20K", + time: "<1s", + scanned: 100, + returned: 100, + }, + { + id: "dbe47b75ce00", + user: "sampleuser@dremio.com", + dataset: "returns_report", + queryType: "JDBC Client", + engine: "engine1", + startTime: new Date().toDateString(), + duration: "00:00:08", + sql: `select r_reason_desc, count(1) num_returns, sum(cr_return_amount) return_amt from catalog_returns cr inner join reason r on cr.cr_reason_fk = r.r_reason_sk group by 1 order by 2 desc`, + cost: "20K", + time: "<1s", + scanned: 100, + returned: 100, + }, +]; + +const columns: Column[] = [ + { + id: "id", + class: "leantable-sticky-column leantable-sticky-column--left", + renderHeaderCell: () => "Job ID", + renderCell: (row) => {row.data.id}, + sortable: true, + }, + { + id: "user", + renderHeaderCell: () => "User", + renderCell: (row) => row.data.user, + sortable: true, + }, + { + id: "dataset", + renderHeaderCell: () => "Dataset", + renderCell: (row) => row.data.dataset, + }, + { + id: "queryType", + renderHeaderCell: () => "Query Type", + renderCell: (row) => row.data.queryType, + }, + { + id: "engine", + renderHeaderCell: () => "Engine", + renderCell: (row) => row.data.engine, + }, + { + id: "startTime", + renderHeaderCell: () => "Start Time", + renderCell: (row) => row.data.startTime, + }, + { + id: "duration", + renderHeaderCell: () => "Duration", + renderCell: (row) => row.data.duration, + }, + { + id: "sql", + renderHeaderCell: () => "SQL", + renderCell: (row) => ( + + {row.data.sql} + + ), + }, + { + id: "cost", + renderHeaderCell: () => "Planner Cost Estimate", + renderCell: (row) => row.data.cost, + }, + { + id: "time", + renderHeaderCell: () => "Planning Time", + renderCell: (row) => row.data.time, + }, + { + id: "scanned", + renderHeaderCell: () => "Rows Scanned", + renderCell: (row) => row.data.scanned, + }, + { + id: "returned", + renderHeaderCell: () => "Rows Returned", + renderCell: (row) => row.data.returned, + }, +]; + +const getRow = (i: number) => { + const data = mockData[i]; + return { + id: data.id, + data, + }; +}; + +export const Default = () => { + return ( +
        +
        + + ); +}; + +Default.storyName = "Table"; diff --git a/dac/ui-lib/storybook/stories/Components/Tag.stories.tsx b/dac/ui-lib/storybook/stories/Components/Tag.stories.tsx new file mode 100644 index 0000000000..c5cd5754e0 --- /dev/null +++ b/dac/ui-lib/storybook/stories/Components/Tag.stories.tsx @@ -0,0 +1,69 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { Meta, StoryFn } from "@storybook/react"; + +import { IconButton, Tag } from "../../../components"; + +export default { + title: "Components/Tag", + component: Tag, +} as Meta; + +export const Default: StoryFn = () => { + return ( +
        +
        + default + brand-subtle + success-subtle + info-subtle + danger-subtle + warning-subtle + warning-bold + danger-bold + info-bold{" "} + success-bold + brand-bold +
        +
        + + default{" "} + alert("Deleted!")} + > + + + + + default{" "} + alert("Deleted!")} + > + + + +
        +
        + ); +}; + +Default.storyName = "Tag"; diff --git a/dac/ui-lib/storybook/stories/Components/Tooltip.stories.tsx b/dac/ui-lib/storybook/stories/Components/Tooltip.stories.tsx new file mode 100644 index 0000000000..cf9b1f1cf8 --- /dev/null +++ b/dac/ui-lib/storybook/stories/Components/Tooltip.stories.tsx @@ -0,0 +1,55 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { Meta, StoryFn } from "@storybook/react"; + +import { Tooltip } from "../../../components"; + +export default { + title: "Components/Tooltip", + component: Tooltip, +} as Meta; + +export const Default: StoryFn = () => { + return ( + + Lorem ipsum dolor sit amet, consectetur adipiscing + elit. Nam neque ante, porttitor vel convallis in, ullamcorper sed + arcu. In ultrices magna nec auctor feugiat. Quisque aliquam, nulla et + scelerisque condimentum, magna quam condimentum erat, non ultrices est + arcu in lorem. Vivamus nec mi auctor, ornare dolor vitae, feugiat mi. + Nam sodales metus sed tortor iaculis, quis convallis tellus ornare. + Phasellus ac faucibus arcu. Suspendisse nec ipsum augue. Nullam tempus + tellus a enim luctus luctus. Vestibulum eu nibh et velit varius + tincidunt quis vitae lectus. Cras cursus turpis arcu, quis facilisis + sem eleifend ac. Suspendisse aliquet, lacus eu auctor pellentesque, + lorem odio venenatis tortor, quis mollis libero ipsum vitae massa. Sed + ullamcorper imperdiet felis, id dignissim nunc elementum ut. + Pellentesque tincidunt felis vitae pulvinar varius. Nunc a erat congue + orci tristique malesuada. Nullam dictum facilisis pretium. Duis ligula + mauris, aliquam ut tortor elementum, euismod euismod mi. +

        + } + interactive + > + Hover me +
        + ); +}; + +Default.storyName = "Tooltip"; diff --git a/dac/ui-lib/storybook/stories/Containers/Card.stories.tsx b/dac/ui-lib/storybook/stories/Containers/Card.stories.tsx new file mode 100644 index 0000000000..2ad83bb964 --- /dev/null +++ b/dac/ui-lib/storybook/stories/Containers/Card.stories.tsx @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { Meta } from "@storybook/react"; + +import { Card } from "../../../components"; + +export default { + title: "Containers/Card", + component: Card, +} as Meta; + +export const Default = () => { + return ( + +

        + Lorem ipsum dolor sit amet, consectetur adipiscing + elit. Nam neque ante, porttitor vel convallis in, ullamcorper sed arcu. + In ultrices magna nec auctor feugiat. Quisque aliquam, nulla et + scelerisque condimentum, magna quam condimentum erat, non ultrices est + arcu in lorem. Vivamus nec mi auctor, ornare dolor vitae, feugiat mi. + Nam sodales metus sed tortor iaculis, quis convallis tellus ornare. + Phasellus ac faucibus arcu. Suspendisse nec ipsum augue. Nullam tempus + tellus a enim luctus luctus. Vestibulum eu nibh et velit varius + tincidunt quis vitae lectus. Cras cursus turpis arcu, quis facilisis sem + eleifend ac. Suspendisse aliquet, lacus eu auctor pellentesque, lorem + odio venenatis tortor, quis mollis libero ipsum vitae massa. Sed + ullamcorper imperdiet felis, id dignissim nunc elementum ut. + Pellentesque tincidunt felis vitae pulvinar varius. Nunc a erat congue + orci tristique malesuada. Nullam dictum facilisis pretium. Duis ligula + mauris, aliquam ut tortor elementum, euismod euismod mi. +

        +
        + ); +}; + +Default.storyName = "Card"; diff --git a/dac/ui-lib/storybook/stories/Containers/DialogContent.stories.tsx b/dac/ui-lib/storybook/stories/Containers/DialogContent.stories.tsx new file mode 100644 index 0000000000..d55ef79ae3 --- /dev/null +++ b/dac/ui-lib/storybook/stories/Containers/DialogContent.stories.tsx @@ -0,0 +1,96 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { Meta, StoryFn } from "@storybook/react"; + +import { Button, DialogContent } from "../../../components"; + +export default { + title: "Containers/DialogContent", + component: DialogContent, +} as Meta; + +export const Default: StoryFn = () => { + return ( + Close} + > +
        +

        + {" "} + Lorem ipsum dolor sit amet, consectetur adipiscing + elit. Nam neque ante, porttitor vel convallis in, ullamcorper sed + arcu. In ultrices magna nec auctor feugiat. Quisque aliquam, nulla et + scelerisque condimentum, magna quam condimentum erat, non ultrices est + arcu in lorem. Vivamus nec mi auctor, ornare dolor vitae, feugiat mi. + Nam sodales metus sed tortor iaculis, quis convallis tellus ornare. + Phasellus ac faucibus arcu. Suspendisse nec ipsum augue. Nullam tempus + tellus a enim luctus luctus. Vestibulum eu nibh et velit varius + tincidunt quis vitae lectus. Cras cursus turpis arcu, quis facilisis + sem eleifend ac. Suspendisse aliquet, lacus eu auctor pellentesque, + lorem odio venenatis tortor, quis mollis libero ipsum vitae massa. Sed + ullamcorper imperdiet felis, id dignissim nunc elementum ut. + Pellentesque tincidunt felis vitae pulvinar varius. Nunc a erat congue + orci tristique malesuada. Nullam dictum facilisis pretium. Duis ligula + mauris, aliquam ut tortor elementum, euismod euismod mi. +

        +

        + Sed nec dui magna. Donec ultricies feugiat est. Duis finibus nunc + lectus, at placerat purus venenatis vitae. Fusce eu purus in ante + elementum interdum. Proin posuere non erat a rutrum. Nullam auctor + tortor a hendrerit consequat. Duis tempor volutpat luctus. Nam ipsum + lorem, ornare sit amet justo quis, pharetra dapibus nulla. +

        +

        + Sed feugiat justo a placerat vulputate. Class aptent taciti sociosqu + ad litora torquent per conubia nostra, per inceptos himenaeos. Sed at + auctor arcu. Curabitur quis condimentum justo. Nulla eu vehicula nibh. + Praesent vel congue dolor. Aenean mattis erat et arcu vehicula, id + interdum nulla cursus. Donec efficitur ultricies mi quis vestibulum. + Aenean tincidunt tellus sit amet urna hendrerit condimentum. Aenean + metus tellus, efficitur sagittis volutpat at, imperdiet vitae nisi. + Cras dignissim lacus lectus, a pellentesque tellus varius sed. Nam + ligula turpis, porttitor eu mollis et, tristique ut tellus. Mauris + convallis libero sed ligula malesuada convallis. +

        +

        + Mauris ut ultrices risus. Nullam tincidunt ex eget ligula rhoncus, sit + amet tempus lectus pellentesque. Donec consequat porta orci, sed + cursus justo suscipit quis. Donec risus ligula, sodales sit amet + vehicula non, bibendum et justo. Mauris scelerisque ullamcorper + pretium. Nunc ipsum ex, varius eu ullamcorper ut, molestie a lacus. + Nunc id porta nulla, non rutrum massa. Maecenas vel ullamcorper neque. + In et enim sagittis, laoreet augue eget, maximus sapien. Maecenas sed + ipsum sollicitudin, dapibus nisi tristique, imperdiet nisl. Proin + pulvinar tincidunt odio, eu posuere nulla congue non. +

        +

        + Vivamus non aliquet neque. Ut dignissim ligula felis, dapibus molestie + nulla lacinia ac. Praesent id vehicula metus. Duis posuere convallis + erat, eget euismod turpis imperdiet eget. Cras pharetra rutrum risus + eget sollicitudin. Integer ornare sit amet risus et facilisis. Morbi + eu mollis erat, ac dictum nibh. Phasellus nisl libero, lacinia eget + consequat vel, rutrum eu leo. Class aptent taciti sociosqu ad litora + torquent per conubia nostra, per inceptos himenaeos. Pellentesque + condimentum aliquet bibendum. +

        +
        +
        + ); +}; + +Default.storyName = "DialogContent"; diff --git a/dac/ui-lib/storybook/stories/Containers/ErrorDisplay.stories.tsx b/dac/ui-lib/storybook/stories/Containers/ErrorDisplay.stories.tsx new file mode 100644 index 0000000000..f9cd0d1715 --- /dev/null +++ b/dac/ui-lib/storybook/stories/Containers/ErrorDisplay.stories.tsx @@ -0,0 +1,37 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { Meta, StoryObj } from "@storybook/react"; + +import { ErrorDisplay } from "../../../components"; + +export default { + title: "Containers/ErrorDisplay", + component: ErrorDisplay, +} as Meta; + +export const Default: StoryObj = { + args: { + error: new Error("fetch: Internal Server Error"), + title: "Something went wrong when we tried to fetch the list of jobs.", + production: true, + supportMessage: + "If the problem persists after refreshing this page, please contact Dremio support.", + renderSupportInfo: () =>
        Session ID: a89ef98g
        , + }, +}; + +Default.storyName = "ErrorDisplay"; diff --git a/dac/ui-lib/storybook/stories/Containers/SectionMessage.stories.tsx b/dac/ui-lib/storybook/stories/Containers/SectionMessage.stories.tsx new file mode 100644 index 0000000000..4ca9cbf1d6 --- /dev/null +++ b/dac/ui-lib/storybook/stories/Containers/SectionMessage.stories.tsx @@ -0,0 +1,53 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { Meta } from "@storybook/react"; + +import { SectionMessage } from "../../../components"; +import { MessageDetails } from "../../../components"; + +export default { + title: "Containers/SectionMessage", + component: SectionMessage, +} as Meta; + +export const Default = () => { + return ( +
        + + This is an informational message + + + This is a success message + + + This is a warning message + + + This is a danger message + + + + +
        + ); +}; + +Default.storyName = "SectionMessage"; diff --git a/dac/ui-lib/storybook/stories/Icons.stories.tsx b/dac/ui-lib/storybook/stories/Icons.stories.tsx new file mode 100644 index 0000000000..321872e452 --- /dev/null +++ b/dac/ui-lib/storybook/stories/Icons.stories.tsx @@ -0,0 +1,46 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import icons from "../../iconmanifest.json"; + +export default { + title: "Icons", +}; + +export const Icons = (args) => ( +
          + {icons.map((icon) => ( +
        • + + + {icon.name} + +
        • + ))} +
        +); diff --git a/dac/ui-lib/storybook/stories/InputControls/Checkbox.stories.tsx b/dac/ui-lib/storybook/stories/InputControls/Checkbox.stories.tsx new file mode 100644 index 0000000000..969a0dbc0b --- /dev/null +++ b/dac/ui-lib/storybook/stories/InputControls/Checkbox.stories.tsx @@ -0,0 +1,32 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { Meta, StoryObj } from "@storybook/react"; + +import { Checkbox } from "../../../components"; + +export default { + title: "Input Controls/Checkbox", + component: Checkbox, +} as Meta; + +export const Default: StoryObj = { + args: { + label: "Never expire", + }, +}; + +Default.storyName = "Checkbox"; diff --git a/dac/ui-lib/src/stories/DialogContent.stories.tsx b/dac/ui-lib/storybook/stories/InputControls/Input.stories.tsx similarity index 57% rename from dac/ui-lib/src/stories/DialogContent.stories.tsx rename to dac/ui-lib/storybook/stories/InputControls/Input.stories.tsx index 79bdd08276..4cb0155128 100644 --- a/dac/ui-lib/src/stories/DialogContent.stories.tsx +++ b/dac/ui-lib/storybook/stories/InputControls/Input.stories.tsx @@ -13,21 +13,25 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import React from "react"; -import { ComponentStory, ComponentMeta } from "@storybook/react"; -import { DialogContent } from "../../components/Dialog/DialogContent"; +import { Meta, StoryFn } from "@storybook/react"; -export default { - title: "Components/DialogContent", - component: DialogContent, -} as ComponentMeta; +import { Input } from "../../../components"; -const Template: ComponentStory = (args) => ( - -); +export default { + title: "Input Controls/Input", + component: Input, +} as Meta; -export const Example = Template.bind({}); -Example.args = { - children:
        I am a child
        , +export const Default: StoryFn = (args: any) => { + return ( +
        +
        + + +
        +
        + ); }; + +Default.storyName = "Input"; diff --git a/dac/ui-lib/.storybook/main.js b/dac/ui-lib/storybook/stories/InputControls/PasswordInput.stories.tsx similarity index 54% rename from dac/ui-lib/.storybook/main.js rename to dac/ui-lib/storybook/stories/InputControls/PasswordInput.stories.tsx index ff5b2f2a41..d98afd4338 100644 --- a/dac/ui-lib/.storybook/main.js +++ b/dac/ui-lib/storybook/stories/InputControls/PasswordInput.stories.tsx @@ -13,22 +13,25 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -module.exports = { - stories: ["../src/**/*.stories.mdx", "../src/**/*.stories.@(js|jsx|ts|tsx)"], - addons: [ - "storybook-addon-sass-postcss", - { - name: "storybook-addon-sass-postcss", - options: { - rule: { - test: /\.(scss|sass)$/i, - }, - }, - }, - "@storybook/addon-links", - "@storybook/addon-essentials", - "@storybook/addon-interactions", - "storybook-dark-mode", - ], - framework: "@storybook/react", + +import { Meta, StoryFn } from "@storybook/react"; + +import { PasswordInput } from "../../../components"; + +export default { + title: "Input Controls/PasswordInput", + component: PasswordInput, +} as Meta; + +export const Default: StoryFn = (args: any) => { + return ( +
        +
        + + +
        +
        + ); }; + +Default.storyName = "PasswordInput"; diff --git a/dac/ui-lib/storybook/stories/InputControls/Radio.stories.tsx b/dac/ui-lib/storybook/stories/InputControls/Radio.stories.tsx new file mode 100644 index 0000000000..8ddd5d13f4 --- /dev/null +++ b/dac/ui-lib/storybook/stories/InputControls/Radio.stories.tsx @@ -0,0 +1,32 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { Meta, StoryObj } from "@storybook/react"; + +import { Radio } from "../../../components"; + +export default { + title: "Input Controls/Radio", + component: Radio, +} as Meta; + +export const Default: StoryObj = { + args: { + label: "Project Data Credentials", + }, +}; + +Default.storyName = "Radio"; diff --git a/dac/ui-lib/storybook/stories/InputControls/SegmentedControl.stories.tsx b/dac/ui-lib/storybook/stories/InputControls/SegmentedControl.stories.tsx new file mode 100644 index 0000000000..697ac8c604 --- /dev/null +++ b/dac/ui-lib/storybook/stories/InputControls/SegmentedControl.stories.tsx @@ -0,0 +1,62 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { Meta, StoryFn } from "@storybook/react"; + +import { + SegmentedControl, + SegmentedControlOption, + useSegmentedControl, +} from "../../../components"; + +export default { + title: "Input Controls/SegmentedControl", + component: SegmentedControl, +} as Meta; + +export const Default: StoryFn = () => { + const segmentedControl1 = useSegmentedControl("1"); + const segmentedControl2 = useSegmentedControl("1"); + return ( +
        + + + Option 1 + + + Option 2 + + + Option 3 + + +
        + + + + + + + + + + + +
        + ); +}; + +Default.storyName = "SegmentedControl"; diff --git a/dac/ui-lib/storybook/stories/InputControls/Select.stories.tsx b/dac/ui-lib/storybook/stories/InputControls/Select.stories.tsx new file mode 100644 index 0000000000..5fa7533852 --- /dev/null +++ b/dac/ui-lib/storybook/stories/InputControls/Select.stories.tsx @@ -0,0 +1,87 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { Meta, StoryFn } from "@storybook/react"; + +import { Select, useSelect } from "../../../components"; + +export default { + title: "Input Controls/Select", + component: Select, +} as Meta; + +const regions = [ + { + id: "us-west-1", + title: "US West (N. California)", + }, + { + id: "us-west-2", + title: "US West (Oregon)", + }, + { + id: "us-east-1", + title: "US East (N. Virginia)", + }, + { + id: "us-east-2", + title: "US East (Ohio)", + }, + { + id: "ca-central-1", + title: "Canada (Central)", + }, +] as const; + +const options = [ + "us-west-1", + "us-west-2", + "us-east-1", + "us-east-2", + "ca-central-1", +] as const; + +export const Default: StoryFn = () => { + const regionSelection = useSelect(); + const renderRegion = (id: typeof options[number]) => ( + + {regions.find((region) => region.id === id).title} {id} + + ); + return ( +
        +
        + + setValue(e.target.value)} onOpen={refetchOnOpen} />
        diff --git a/dac/ui/src/components/BreadcrumbBlocks/Custom/CatalogSelector/CatalogSelector.tsx b/dac/ui/src/components/BreadcrumbBlocks/Custom/CatalogSelector/CatalogSelector.tsx index 0719a78c8f..22eafaba7e 100644 --- a/dac/ui/src/components/BreadcrumbBlocks/Custom/CatalogSelector/CatalogSelector.tsx +++ b/dac/ui/src/components/BreadcrumbBlocks/Custom/CatalogSelector/CatalogSelector.tsx @@ -30,7 +30,6 @@ import { ArcticCatalogsResource } from "@app/exports/resources/ArcticCatalogsRes import { useResourceSnapshot } from "smart-resource/react"; import { useDispatch } from "react-redux"; import { resetNessieState } from "@app/actions/nessie/nessie"; -import { handleSonarProjectChange } from "@app/utils/projects"; import { getSonarContext } from "dremio-ui-common/contexts/SonarContext.js"; import { rmProjectBase } from "dremio-ui-common/utilities/projectBase.js"; @@ -45,15 +44,13 @@ const CatalogSelector = () => { const [arcticCatalogs] = useResourceSnapshot(ArcticCatalogsResource); const [sonarProjects] = useResourceSnapshot(SonarProjectsResource); - // fetch if projects/catalogs are null useEffect(() => { - if (!sonarProjects && !isArctic) { + if (!isArctic) { SonarProjectsResource.fetch(); - } - if (!arcticCatalogs && isArctic) { + } else { ArcticCatalogsResource.fetch(); } - }, [sonarProjects, arcticCatalogs, isArctic]); + }, [isArctic]); const refetchOnOpen = () => { isArctic ? ArcticCatalogsResource.fetch() : SonarProjectsResource.fetch(); @@ -73,12 +70,10 @@ const CatalogSelector = () => { if (sonarProjectId === newProject?.id) { return; } + }; - handleSonarProjectChange(newProject, () => - browserHistory.push( - commonPaths.projectBase.link({ projectId: newProject.id }) - ) - ); + const getToRoute = (newProject: Record) => { + return commonPaths.projectBase.link({ projectId: newProject.id }); }; const iconName = isArctic @@ -97,16 +92,24 @@ const CatalogSelector = () => { (project: Record) => project.state === PROJECT_STATES.ACTIVE || project.state === PROJECT_STATES.INACTIVE || + project.state === PROJECT_STATES.DEACTIVATING || project.state === PROJECT_STATES.ACTIVATING ) .map((project: Record) => { return { - label: , - onClick: () => changeProject(project), + label: ( + + ), value: project.id, + onClick: () => changeProject(project), }; }) ); + // eslint-disable-next-line react-hooks/exhaustive-deps }, [projectsToShow]); if (Array.isArray(projectOptions)) { diff --git a/dac/ui/src/components/BreadcrumbBlocks/Custom/CurrentLocCrumb/CurrentLocCrumb.tsx b/dac/ui/src/components/BreadcrumbBlocks/Custom/CurrentLocCrumb/CurrentLocCrumb.tsx index b3449cc0a3..da7eb4922e 100644 --- a/dac/ui/src/components/BreadcrumbBlocks/Custom/CurrentLocCrumb/CurrentLocCrumb.tsx +++ b/dac/ui/src/components/BreadcrumbBlocks/Custom/CurrentLocCrumb/CurrentLocCrumb.tsx @@ -129,6 +129,13 @@ const CurrentLocCrumb = (props: WithRouterProps & CurrentLocCrumbProps) => { iconName = "interface/settings"; } + // Catalog Settings path + if (splitPath[2] === "jobs") { + text = formatMessage({ id: "SideNav.Jobs" }); + to = PATHS.arcticCatalogJobs({ arcticCatalogId: splitPath[1] }); + iconName = "brand/arctic-jobs"; + } + return ( text && to && diff --git a/dac/ui/src/components/Buttons/SettingsBtn.less b/dac/ui/src/components/Buttons/SettingsBtn.less index 26016f7526..ee6e939bd8 100644 --- a/dac/ui/src/components/Buttons/SettingsBtn.less +++ b/dac/ui/src/components/Buttons/SettingsBtn.less @@ -57,7 +57,7 @@ .settings-icon { height: 24px; width: 24px; - color: var(--dremio--color--neutral--600); + color: var(--color--neutral--600); } } .drop-down { diff --git a/dac/ui/src/components/Dataset/DatasetItemLabel.js b/dac/ui/src/components/Dataset/DatasetItemLabel.js index 953e814367..dc27161b0d 100644 --- a/dac/ui/src/components/Dataset/DatasetItemLabel.js +++ b/dac/ui/src/components/Dataset/DatasetItemLabel.js @@ -30,6 +30,11 @@ import { stopPropagation } from "@app/utils/reactEventUtils"; import { IconButton, Tooltip } from "dremio-ui-lib"; import DatasetOverlayContent from "./DatasetOverlayContent"; import DatasetSummaryOverlay from "./DatasetSummaryOverlay"; +import WikiDrawerWrapper from "@app/components/WikiDrawerWrapper"; +import { FeatureSwitch } from "@app/exports/components/FeatureSwitch/FeatureSwitch"; +import { CATALOG_ARS_ENABLED } from "@app/exports/flags/CATALOG_ARS_ENABLED"; +import { getCommonWikiDrawerTitle } from "@app/utils/WikiDrawerUtils"; + import "./DatasetItemLabel.less"; export class DatasetItemLabel extends PureComponent { @@ -58,6 +63,8 @@ export class DatasetItemLabel extends PureComponent { isStarredLimitReached: PropTypes.bool, isSearchItem: PropTypes.bool, showSummaryOverlay: PropTypes.bool, + versionContext: PropTypes.object, + tooltipPlacement: PropTypes.string, }; static defaultProps = { @@ -72,6 +79,8 @@ export class DatasetItemLabel extends PureComponent { isIconHovered: false, isDragInProgress: false, isLoadingData: false, + drawerIsOpen: false, + datasetDetails: Immutable.fromJS({}), }; setOpenOverlay = () => this.setState({ isOpenOverlay: true }); @@ -144,6 +153,33 @@ export class DatasetItemLabel extends PureComponent { ); } + openWikiDrawer = (dataset) => { + this.setState({ + datasetDetails: dataset, + drawerIsOpen: true, + }); + }; + + closeWikiDrawer = (e) => { + e.stopPropagation(); + e.preventDefault(); + this.setState({ + datasetDetails: Immutable.fromJS({}), + drawerIsOpen: false, + }); + }; + + wikiDrawerTitle = () => { + const { fullPath } = this.props; + const { datasetDetails } = this.state; + + return getCommonWikiDrawerTitle( + datasetDetails, + fullPath, + this.closeWikiDrawer + ); + }; + render() { const { fullPath, @@ -161,9 +197,11 @@ export class DatasetItemLabel extends PureComponent { unstarNode, isStarredLimitReached, showSummaryOverlay = true, + versionContext, + tooltipPlacement, } = this.props; - const { isOpenOverlay } = this.state; + const { isOpenOverlay, drawerIsOpen, datasetDetails } = this.state; const iconStyle = iconSize === "LARGE" ? styles.largeIcon : {}; const labelTypeIcon = iconSize === "LARGE" ? `${typeIcon}Large` : typeIcon; @@ -226,18 +264,29 @@ export class DatasetItemLabel extends PureComponent { {showSummaryOverlay && labelTypeIcon !== "Script" && labelTypeIcon !== "FileEmpty" ? ( - - } - > - {renderDataItemLabel()} - + <> + + } + > + {renderDataItemLabel()} + + + ) : ( renderDataItemLabel() )} @@ -251,48 +300,54 @@ export class DatasetItemLabel extends PureComponent { > - {nodeId && ( - { - if (!isStarred && !isStarredLimitReached) { - starNode(nodeId); - } else if (isStarred) { - unstarNode(nodeId); - } - }} - className={ - isStarred - ? "datasetItemLabel-item__starIcon datasetItemLabel-item--starred" - : `datasetItemLabel-item__starIcon resourceTreeNode${ - isStarredLimitReached - ? "--limitReached" - : "--unstarred" - }` - } - > - - - )} + null} + renderDisabled={() => + nodeId && ( + { + if (!isStarred && !isStarredLimitReached) { + starNode(nodeId); + } else if (isStarred) { + unstarNode(nodeId); + } + }} + className={ + isStarred + ? "datasetItemLabel-item__starIcon datasetItemLabel-item--starred" + : `datasetItemLabel-item__starIcon resourceTreeNode${ + isStarredLimitReached + ? "--limitReached" + : "--unstarred" + }` + } + > + + + ) + } + /> )}
        diff --git a/dac/ui/src/components/Dataset/DatasetItemLabel.less b/dac/ui/src/components/Dataset/DatasetItemLabel.less index efae7fb923..38fce0c40a 100644 --- a/dac/ui/src/components/Dataset/DatasetItemLabel.less +++ b/dac/ui/src/components/Dataset/DatasetItemLabel.less @@ -25,7 +25,7 @@ justify-content: space-between; &:hover { - border-radius: 0.4rem; + border-radius: 4px; .datasetItemLabel-item__add { opacity: 1; @@ -38,17 +38,17 @@ &__iconBlock { display: flex; align-items: center; - margin-right: 0.5rem; - gap: 0.4rem; + margin-right: 5px; + gap: 4px; } &__add { - height: 2.4rem; - width: 2.4rem; + height: 24px; + width: 24px; opacity: 0; transition: 300ms ease all; padding: 0; - color: var(--dremio--color--neutral--600) !important; + color: var(--color--neutral--600) !important; &:hover { color: var(--dremio--color--link) !important; @@ -56,13 +56,13 @@ } &__starIcon { - height: 2.4rem; - width: 2.4rem; - margin-right: 0.5rem; + height: 24px; + width: 24px; + margin-right: 5px; transition: 300ms ease all; opacity: 0; padding: 0; - color: var(--dremio--color--neutral--600) !important; + color: var(--color--neutral--600) !important; &:hover { color: var(--dremio--color--link) !important; @@ -98,7 +98,7 @@ } .font-icon { - min-width: 2.4rem; + min-width: 24px; justify-content: center; } .searchHeading { @@ -117,7 +117,7 @@ } .resourceTree .datasetItemLabel-item__content.--isNotExpandable { - margin-left: 2.4rem; + margin-left: 24px; } .dependantItem { @@ -154,7 +154,7 @@ } .EllipsedText { - margin-left: 0.5rem; + margin-left: 5px; } } @@ -162,7 +162,7 @@ // .ColumnMenuItem__content margin-left: 21px; margin-right: 0; - padding: 0 0.4rem; + padding: 0 4px; width: 100%; } @@ -174,7 +174,7 @@ .search_draggable-row { // .ColumnMenuItem__content .draggable-row { - width: calc(100% - 21px - 0.5rem); + width: calc(100% - 21px - 5px); } } } diff --git a/dac/ui/src/components/Dataset/DatasetSummaryOverlay.tsx b/dac/ui/src/components/Dataset/DatasetSummaryOverlay.tsx index 108af1a0b9..cbc7e602be 100644 --- a/dac/ui/src/components/Dataset/DatasetSummaryOverlay.tsx +++ b/dac/ui/src/components/Dataset/DatasetSummaryOverlay.tsx @@ -14,7 +14,7 @@ * limitations under the License. */ import Immutable from "immutable"; -import { useEffect } from "react"; +import { ReactNode, useEffect } from "react"; import { connect } from "react-redux"; import { withRouter, WithRouterProps } from "react-router"; import { loadSummaryDataset } from "actions/resources/dataset"; @@ -23,6 +23,7 @@ import { getViewState } from "selectors/resources"; import { constructFullPath } from "@app/utils/pathUtils"; import DatasetSummary from "../DatasetSummary/DatasetSummary"; import DatasetSummaryNotFound from "../DatasetSummaryNotFound/DatasetSummaryNotFound"; +import { VersionContextType } from "dremio-ui-common/components/VersionContext.js"; const VIEW_ID = "SummaryDataset"; @@ -33,6 +34,12 @@ type DatasetSummaryOverlayProps = { inheritedTitle?: string; viewState: Immutable.Map; loadSummaryDataset: typeof loadSummaryDataset; + detailsView?: boolean; + tagsComponent?: ReactNode; + openWikiDrawer: (dataset: any) => void; + showColumns?: boolean; + hideSqlEditorIcon?: boolean; + versionContext?: VersionContextType; }; const DatasetSummaryOverlay = ( @@ -46,13 +53,30 @@ const DatasetSummaryOverlay = ( inheritedTitle, location, loadSummaryDataset: dispatchLoadSummaryDataset, + detailsView, + tagsComponent, + openWikiDrawer, + showColumns, + hideSqlEditorIcon, + versionContext, } = props; + const { type: contextType, value: contextValue } = versionContext ?? {}; + useEffect(() => { - dispatchLoadSummaryDataset(fullPath.join("/"), VIEW_ID); - }, [dispatchLoadSummaryDataset, fullPath]); + dispatchLoadSummaryDataset( + fullPath?.join("/"), + VIEW_ID, + undefined, + undefined, + undefined, + contextType && contextValue + ? { value: contextValue, type: contextType } + : undefined + ); + }, [dispatchLoadSummaryDataset, fullPath, contextType, contextValue]); - const title = fullPath.get(fullPath.size - 1); + const title = fullPath?.get(fullPath.size - 1); const constructedFullPath = constructFullPath(fullPath); const showDeletedDatasetSummary = viewState.get("isFailed"); const disableActionButtons = viewState.get("isInProgress"); @@ -70,6 +94,12 @@ const DatasetSummaryOverlay = ( disableActionButtons={disableActionButtons} fullPath={constructedFullPath} dataset={summaryDataset} + detailsView={detailsView} + tagsComponent={tagsComponent} + openWikiDrawer={openWikiDrawer} + showColumns={showColumns} + hideSqlEditorIcon={hideSqlEditorIcon} + versionContext={versionContext} /> ); }; @@ -78,7 +108,7 @@ const mapStateToProps = ( state: Record, props: { fullPath: string[] } ) => { - const fullPath = props.fullPath.join(","); + const fullPath = props.fullPath?.join(","); return { summaryDataset: getSummaryDataset(state, fullPath), viewState: getViewState(state, VIEW_ID), diff --git a/dac/ui/src/components/DatasetList/DatasetList.module.less b/dac/ui/src/components/DatasetList/DatasetList.module.less index bf6b44c220..ad79c9ae28 100644 --- a/dac/ui/src/components/DatasetList/DatasetList.module.less +++ b/dac/ui/src/components/DatasetList/DatasetList.module.less @@ -15,7 +15,7 @@ */ .dataSetsList { - background: var(--dremio--color--neutral--000); + background: white; max-height: 50vh; overflow: auto; box-shadow: rgb(0 0 0 / 10%) 0px 0px 8px 0px; diff --git a/dac/ui/src/components/DatasetList/SearchDatasetsPopover.js b/dac/ui/src/components/DatasetList/SearchDatasetsPopover.js index 7d66cbc678..fd9b75b65d 100644 --- a/dac/ui/src/components/DatasetList/SearchDatasetsPopover.js +++ b/dac/ui/src/components/DatasetList/SearchDatasetsPopover.js @@ -89,7 +89,6 @@ class SearchDatasetsPopover extends PureComponent { handleSearchHide = () => { this.setState({ searchVisible: false, closeVisible: false }); - this.input.focus(); }; startSearch(text) { @@ -124,15 +123,15 @@ class SearchDatasetsPopover extends PureComponent { onFocus = () => { this.searchDatasetsPopover.className = "searchDatasetsPopover --focused"; - } + }; onBlur = () => { this.searchDatasetsPopover.className = "searchDatasetsPopover"; - } + }; onFocusRef = (div) => { - this.searchDatasetsPopover = div - } + this.searchDatasetsPopover = div; + }; render() { const value = this.state.filter; diff --git a/dac/ui/src/components/DatasetList/SearchDatasetsPopover.less b/dac/ui/src/components/DatasetList/SearchDatasetsPopover.less index 8dd67cf2c4..5dc8bf6def 100644 --- a/dac/ui/src/components/DatasetList/SearchDatasetsPopover.less +++ b/dac/ui/src/components/DatasetList/SearchDatasetsPopover.less @@ -19,14 +19,14 @@ .searchDatasetsPopover { align-items: center; display: flex; - margin: 0.8rem 1rem; - border: 0.1rem solid #d2d6da; + margin: 8px 10px; + border: 1px solid #d2d6da; box-sizing: border-box; - border-radius: 0.4rem; - padding-left: 0.8rem; + border-radius: 4px; + padding-left: 8px; &.--focused { - border-color: var(--dremio--color--primary--500); + border-color: var(--color--brand--300); } .searchInput { @@ -36,7 +36,7 @@ font-weight: 400; font-size: 14px; line-height: 20px; - padding: 0.8rem; + padding: 8px; margin-top: 2px; outline: none; width: 100%; diff --git a/dac/ui/src/components/DatasetSummary/DatasetSummary.module.less b/dac/ui/src/components/DatasetSummary/DatasetSummary.module.less index f181d4d71a..0ae0c3d7d6 100644 --- a/dac/ui/src/components/DatasetSummary/DatasetSummary.module.less +++ b/dac/ui/src/components/DatasetSummary/DatasetSummary.module.less @@ -18,12 +18,85 @@ &-container { display: flex; flex-direction: column; - font-size: 14px; + font-size: var(--dremio--font-size--default); width: 300px; } + &-wiki-container { + display: flex; + flex-direction: column; + font-size: var(--dremio--font-size--default); + width: 368px; + padding-top: var(--dremio--spacing--1); + padding-left: var(--dremio--spacing--1); + padding-bottom: 0; + } &-top-section { - padding: 0 16px; - margin-bottom: 16px; - border-bottom: 1px solid var(--dremio--color--neutral--150); + padding: 0 var(--dremio--spacing--2); + border-bottom: 1px solid var(--color--neutral--50); + } + &-sqleditor { + border-bottom: none; + } + &-tagsWrapper { + width: 100%; + white-space: nowrap; + } + &-tags-stats { + display: flex; + flex-direction: column; + padding-bottom: var(--dremio--spacing--2); + } + &-button-wrapper { + display: flex; + align-items: center; + width: 100%; + gap: var(--dremio--spacing--1); + } + &-open-details-button { + display: flex; + width: 100%; + font-style: normal; + font-weight: 500; + font-size: var(--dremio--font-size--default); + line-height: 20px; + text-decoration: none !important; + color: var(--dremio--color--link); + align-items: center; + justify-content: center; + cursor: pointer; + padding: var(--dremio--spacing--05); + border-radius: var(--dremio--radius--1); + &:hover { + background-color: var(--color--brand--25); + } + } + &-open-details-icon { + height: 24px; + width: 24px; + padding: 2px; + } + + &-dataset-path { + display: flex; + align-items: center; + + .hidden-button { + height: 0; + width: 0; + padding: 0; + border: 0; + opacity: 0; + } + + .copy-button { + margin-bottom: var(--dremio--spacing--1); + opacity: 0; + } + + &:hover { + .copy-button { + opacity: 1; + } + } } } diff --git a/dac/ui/src/components/DatasetSummary/DatasetSummary.tsx b/dac/ui/src/components/DatasetSummary/DatasetSummary.tsx index 64ac5aea8e..fbfdb4a1b4 100644 --- a/dac/ui/src/components/DatasetSummary/DatasetSummary.tsx +++ b/dac/ui/src/components/DatasetSummary/DatasetSummary.tsx @@ -14,14 +14,22 @@ * limitations under the License. */ +import { Link } from "react-router"; +import { useIntl } from "react-intl"; import Immutable from "immutable"; import SummaryItemLabel from "./components/SummaryItemLabel/SummaryItemLabel"; import SummarySubHeader from "./components/SummarySubHeader/SummarySubHeader"; import SummaryStats from "./components/SummaryStats/SummaryStats"; import SummaryColumns from "./components/SummaryColumns/SummaryColumns"; -import exploreUtils from "@app/utils/explore/exploreUtils"; import { addProjectBase as wrapBackendLink } from "dremio-ui-common/utilities/projectBase.js"; import * as classes from "./DatasetSummary.module.less"; +import clsx from "clsx"; +import { ReactNode } from "react"; +// @ts-ignore +import { TagList } from "dremio-ui-lib"; +import CopyButton from "@app/components/Buttons/CopyButton"; +import { VersionContextType } from "dremio-ui-common/components/VersionContext.js"; +import { getEdition } from "@inject/utils/versionUtils"; type DatasetSummaryProps = { title: string; @@ -29,6 +37,12 @@ type DatasetSummaryProps = { fullPath: string; disableActionButtons: boolean; location: Record; + detailsView?: boolean; + tagsComponent?: ReactNode; + openWikiDrawer: (dataset: any) => void; + showColumns?: boolean; + hideSqlEditorIcon?: boolean; + versionContext?: VersionContextType; }; const DatasetSummary = ({ @@ -37,8 +51,20 @@ const DatasetSummary = ({ title, disableActionButtons, location, + detailsView, + tagsComponent, + openWikiDrawer, + showColumns, + hideSqlEditorIcon, + versionContext, }: DatasetSummaryProps) => { + const { formatMessage } = useIntl(); const datasetType = dataset.get("datasetType"); + const createdAt = dataset.get("createdAt"); + const hasReflection = dataset.get("hasReflection"); + const ownerEmail = dataset.get("ownerName"); + const lastModifyUserEmail = dataset.get("lastModifyingUserEmail"); + const lastModified = dataset.get("lastModified"); const selfLink = dataset.getIn(["links", "query"]); const editLink = dataset.getIn(["links", "edit"]); const jobsLink = wrapBackendLink(dataset.getIn(["links", "jobs"])); @@ -48,33 +74,135 @@ const DatasetSummary = ({ const canAlter = dataset.getIn(["permissions", "canAlter"]); const fieldsCount = fields && fields.size; const resourceId = dataset.getIn(["fullPath", 0]); - const isSqlEditorTab = exploreUtils.isSqlEditorTab(location); + const tags = dataset.get("tags"); + const canSeeDatasetGraph = dataset.getIn([ + "permissions", + "canExploreDatasetGraph", + ]); + + const shouldRenderLineageButton = + canSeeDatasetGraph && getEdition() !== "Community Edition"; + return (
        e.stopPropagation()} - className={classes["dataset-summary-container"]} + className={ + detailsView + ? classes["dataset-summary-wiki-container"] + : classes["dataset-summary-container"] + } > -
        - - +
        + {!detailsView && ( + + )} + {!detailsView ? ( + + ) : ( +
        + {/* need to render an invisible button here since opening the drawer will auto-select the button */} +
        + )} + {detailsView ? ( +
        + {tagsComponent} +
        + ) : tags?.size > 0 ? ( +
        + +
        + ) : ( + <> + )} + {!detailsView && ( +
        +
        { + openWikiDrawer(dataset); + }} + > + + {formatMessage({ id: "Wiki.OpenDetails" })} +
        + {shouldRenderLineageButton && ( + + + {formatMessage({ id: "Dataset.Summary.Lineage" })} + + )} +
        + )}
        - + {!detailsView && ( + + )}
        ); }; diff --git a/dac/ui/src/components/DatasetSummary/components/SummaryColumns/SummaryColumns.module.less b/dac/ui/src/components/DatasetSummary/components/SummaryColumns/SummaryColumns.module.less index 9c5358eed5..908384b945 100644 --- a/dac/ui/src/components/DatasetSummary/components/SummaryColumns/SummaryColumns.module.less +++ b/dac/ui/src/components/DatasetSummary/components/SummaryColumns/SummaryColumns.module.less @@ -15,39 +15,48 @@ */ .summary { + &-columns-container { + padding-top: var(--dremio--spacing--2); + } &-columns-title { - padding: 0px 16px; - margin-bottom: 16px; + padding: 0px var(--dremio--spacing--2); + margin-bottom: var(--dremio--spacing--2); font-weight: 600; } + &-rows { overflow: auto; display: flex; flex-direction: column; - font-size: 12px; + font-size: var(--dremio--font-size--default); max-height: 270px; } + &-column-loader { - padding-left: 16px; + padding-left: var(--dremio--spacing--2); width: 122px; } + &-row { - padding: 0px 16px; + padding: 0px var(--dremio--spacing--2); min-height: 32px; + :global { .draggable-row { margin-left: 0; padding: 0; } + .font-icon { display: flex; align-items: center; - width: 16px !important; - height: 16px !important; + width: 18px !important; + height: 18px !important; margin-right: 4px; + .icon-type { - width: 16px !important; - height: 16px !important; + width: 18px !important; + height: 18px !important; } } } diff --git a/dac/ui/src/components/DatasetSummary/components/SummaryColumns/SummaryColumns.tsx b/dac/ui/src/components/DatasetSummary/components/SummaryColumns/SummaryColumns.tsx index f6eb3d754b..e7fa6a804c 100644 --- a/dac/ui/src/components/DatasetSummary/components/SummaryColumns/SummaryColumns.tsx +++ b/dac/ui/src/components/DatasetSummary/components/SummaryColumns/SummaryColumns.tsx @@ -26,39 +26,49 @@ type SummaryColumnsProps = { isSorted: boolean; }[]; fieldsCount: number; + showColumns?: boolean; }; -const SummaryColumns = ({ fields, fieldsCount }: SummaryColumnsProps) => { +const SummaryColumns = ({ + fields, + fieldsCount, + showColumns, +}: SummaryColumnsProps) => { const { formatMessage } = intl; return ( -
        -
        - {formatMessage({ id: "Common.Columns" })} - {fieldsCount && ` (${fieldsCount})`} -
        -
        - {fields ? ( - fields.map((field: any, i: number) => { - return ( - + {showColumns && ( +
        +
        + {formatMessage({ id: "Common.Columns" })} + {fieldsCount && ` (${fieldsCount})`} +
        +
        + {fields ? ( + fields.map((field: any, i: number) => { + return ( + + ); + }) + ) : ( + - ); - }) - ) : ( - - )} -
        -
        + )} +
        +
        + )} + ); }; diff --git a/dac/ui/src/components/DatasetSummary/components/SummaryItemLabel/SummaryItemLabel.module.less b/dac/ui/src/components/DatasetSummary/components/SummaryItemLabel/SummaryItemLabel.module.less index 3782b1a022..4250bd4352 100644 --- a/dac/ui/src/components/DatasetSummary/components/SummaryItemLabel/SummaryItemLabel.module.less +++ b/dac/ui/src/components/DatasetSummary/components/SummaryItemLabel/SummaryItemLabel.module.less @@ -37,11 +37,17 @@ block-size: 32px; min-width: 32px; } + &-reflection { + block-size: 16px; + min-width: 16px; + margin-right: var(--dremio--spacing--1); + margin-left: var(--dremio--spacing--05); + } &-empty-icon { width: 32px; height: 32px; border-radius: 2px; - background-color: var(--dremio--color--neutral--100); + background-color: var(--color--neutral--25); } &-action { &-container { diff --git a/dac/ui/src/components/DatasetSummary/components/SummaryItemLabel/SummaryItemLabel.tsx b/dac/ui/src/components/DatasetSummary/components/SummaryItemLabel/SummaryItemLabel.tsx index dd78a1a945..299bdbd93a 100644 --- a/dac/ui/src/components/DatasetSummary/components/SummaryItemLabel/SummaryItemLabel.tsx +++ b/dac/ui/src/components/DatasetSummary/components/SummaryItemLabel/SummaryItemLabel.tsx @@ -13,82 +13,60 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + import { useRef, useState } from "react"; // @ts-ignore import { Tooltip, IconButton } from "dremio-ui-lib"; import LinkWithHref from "@app/components/LinkWithRef/LinkWithRef"; -import { - getIconType, - addTooltip, - openInNewTab, -} from "../../datasetSummaryUtils"; -import * as sqlPaths from "dremio-ui-common/paths/sqlEditor.js"; - -import * as classes from "./SummaryItemLabel.module.less"; +import { getIconType, addTooltip } from "../../datasetSummaryUtils"; import LinkWithRef from "@app/components/LinkWithRef/LinkWithRef"; -import { getSonarContext } from "dremio-ui-common/contexts/SonarContext.js"; +// @ts-ignore import { addProjectBase as wrapBackendLink } from "dremio-ui-common/utilities/projectBase.js"; +import { getIconPath } from "@app/utils/getIconPath"; +import QueryDataset from "@app/components/QueryDataset/QueryDataset"; -const DATASET_PATH_FROM_OVERLAY = "datasetPathFromOverlay"; +import * as classes from "./SummaryItemLabel.module.less"; type DatasetSummaryItemLabelProps = { disableActionButtons: boolean; datasetType: string; title: string; - editLink?: string; - selfLink?: string; canAlter: boolean; resourceId: string; fullPath: string; - isSqlEditorTab: boolean; + hasReflection: boolean; + editLink?: string; + selfLink?: string; + hideSqlEditorIcon?: boolean; }; const SummaryItemLabel = (props: DatasetSummaryItemLabelProps) => { - const projectId = getSonarContext()?.getSelectedProjectId?.(); const [showTooltip, setShowTooltip] = useState(false); const { title, canAlter, editLink, selfLink, - isSqlEditorTab, resourceId, datasetType, fullPath, disableActionButtons, + hideSqlEditorIcon, + hasReflection, } = props; - const newQueryLink = sqlPaths.sqlEditor.link({ projectId }); const titleRef = useRef(null); const iconName = getIconType(datasetType); - const newQueryUrlParams = "?context=" + encodeURIComponent(resourceId); - const newTabLink = sqlPaths.newQuery.link({ projectId, resourceId }); const disable = disableActionButtons ? classes["dataset-item-header-disable-action-buttons"] : ""; - const queryButtonProps = isSqlEditorTab - ? { - onClick: () => - openInNewTab(newTabLink, fullPath, DATASET_PATH_FROM_OVERLAY), - } - : { - as: LinkWithHref, - onClick: () => - sessionStorage.setItem(DATASET_PATH_FROM_OVERLAY, fullPath), - to: { - pathname: newQueryLink, - search: newQueryUrlParams, - }, - }; - const toLink = canAlter && editLink ? editLink : selfLink; - + const isView = iconName && iconName === "dataset-view"; return (
        {iconName ? ( - // @ts-ignore { {title}

        )} + {hasReflection && ( + + )}
        - - - + )} - - - + {iconName && + (isView ? ( + + + + ) : ( + + + + ))}
        ); diff --git a/dac/ui/src/components/DatasetSummary/components/SummaryStats/RenderStat.module.less b/dac/ui/src/components/DatasetSummary/components/SummaryStats/RenderStat.module.less new file mode 100644 index 0000000000..24b8ccc691 --- /dev/null +++ b/dac/ui/src/components/DatasetSummary/components/SummaryStats/RenderStat.module.less @@ -0,0 +1,34 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +.summary-stats { + display: flex; + align-items: center; + justify-content: space-between; + white-space: nowrap; + height: 28px; + margin-bottom: var(--dremio--spacing--05); + .data { + margin-left: var(--dremio--spacing--1); + line-height: 24px; + &-container { + overflow: hidden; + white-space: nowrap; + max-width: 53.5%; + text-overflow: ellipsis; + } + } +} diff --git a/dac/ui/src/components/DatasetSummary/components/SummaryStats/RenderStat.tsx b/dac/ui/src/components/DatasetSummary/components/SummaryStats/RenderStat.tsx new file mode 100644 index 0000000000..02ae1cbfe6 --- /dev/null +++ b/dac/ui/src/components/DatasetSummary/components/SummaryStats/RenderStat.tsx @@ -0,0 +1,93 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { useState } from "react"; +import { useIntl } from "react-intl"; +import LoadingBar from "@app/components/LoadingBar/LoadingBar"; +import { Avatar, Tooltip } from "dremio-ui-lib/components"; +import * as classes from "./RenderStat.module.less"; +import { nameToInitials } from "@app/exports/utilities/nameToInitials"; + +const RenderData = ({ + data, + showAvatar, +}: { + data: any; + showAvatar?: boolean; +}) => { + if (showAvatar) { + const initials = nameToInitials(data); + return ( + <> + + {data} + + ); + } else { + return data; + } +}; + +const RenderStat = ({ + title, + data, + showAvatar, + wrapContent, +}: { + title: string; + data: any; + style?: any; + showAvatar?: boolean; + wrapContent?: boolean; +}) => { + const { formatMessage } = useIntl(); + const [showTooltip, setShowTooltip] = useState(false); + + return data === undefined ? null : ( +
        +
        +
        {formatMessage({ id: title })}
        + {showTooltip ? ( + +
        + {data || data === 0 ? ( + + ) : ( + + )} +
        +
        + ) : ( +
        { + if (elem?.offsetWidth < elem?.scrollWidth) { + setShowTooltip(true); + } + }} + > + {data || data === 0 ? ( + + ) : ( + + )} +
        + )} +
        +
        + ); +}; + +export default RenderStat; diff --git a/dac/ui/src/components/DatasetSummary/components/SummaryStats/SummaryStats.module.less b/dac/ui/src/components/DatasetSummary/components/SummaryStats/SummaryStats.module.less index 45b5b3c328..172346d3c3 100644 --- a/dac/ui/src/components/DatasetSummary/components/SummaryStats/SummaryStats.module.less +++ b/dac/ui/src/components/DatasetSummary/components/SummaryStats/SummaryStats.module.less @@ -14,15 +14,37 @@ * limitations under the License. */ -.summary-stats { +.summary-stats-container { display: flex; - align-items: center; - justify-content: space-between; - height: 24px; - &-container { + flex-direction: column; + margin-bottom: var(--dremio--spacing--2); + font-size: var(--dremio--font-size--default); + + .analyze-container { display: flex; - flex-direction: column; - margin-bottom: 16px; - font-size: 12px; + align-items: center; + gap: var(--dremio--spacing--1); + + .analyze-button { + display: flex; + align-items: center; + justify-content: center; + height: 28px; + width: 36px; + padding: var(--dremio--spacing--1); + border: 1px solid var(--color--neutral--100); + border-radius: var(--dremio--radius--1); + transition: 300ms ease all; + + &__icon { + height: 20px; + width: 20px; + } + + &:hover { + cursor: pointer; + background-color: var(--color--neutral--25); + } + } } } diff --git a/dac/ui/src/components/DatasetSummary/components/SummaryStats/SummaryStats.tsx b/dac/ui/src/components/DatasetSummary/components/SummaryStats/SummaryStats.tsx index 59fec7194b..10f2b3fa0a 100644 --- a/dac/ui/src/components/DatasetSummary/components/SummaryStats/SummaryStats.tsx +++ b/dac/ui/src/components/DatasetSummary/components/SummaryStats/SummaryStats.tsx @@ -14,24 +14,46 @@ * limitations under the License. */ import * as classes from "./SummaryStats.module.less"; -import { intl } from "@app/utils/intl"; +import Immutable from "immutable"; import LinkWithHref from "@app/components/LinkWithRef/LinkWithRef"; -import LoadingBar from "@app/components/LoadingBar/LoadingBar"; +import { useDispatch } from "react-redux"; +//@ts-ignore +import { Tooltip } from "dremio-ui-lib"; +//@ts-ignore +import { formatDateTimestampShortNoTZ } from "dremio-ui-common/utilities/formatDate.js"; +import { fetchStatusOfAnalyzeTools } from "@app/utils/analyzeToolsUtils"; +import { getIconPath } from "@app/utils/getIconPath"; +import { openTableau, openPowerBI } from "actions/explore/download"; +import RenderStat from "./RenderStat"; +import { getVersionContextFromId } from "dremio-ui-common/utilities/datasetReference.js"; +import { useIsBIToolsEnabled } from "@app/utils/arsUtils"; type SummaryStatsProps = { + dataset: Immutable.Map; jobsLink?: string; jobCount: number; descendantsCount: number; location: Record; + createdAt?: Date; + ownerEmail?: string; + lastModifyUserEmail?: string; + lastModified?: Date; + detailsView?: boolean; }; const SummaryStats = ({ + dataset, jobsLink, jobCount, descendantsCount, + createdAt, + ownerEmail, + // lastModifyUserEmail, + lastModified, location, + detailsView, }: SummaryStatsProps) => { - const { formatMessage } = intl; + const dispatch = useDispatch(); const currentRoute = location.pathname + location?.search; // reloads the page if the jobs link is the same as the current location const onJobClick = () => { @@ -40,28 +62,104 @@ const SummaryStats = ({ } }; + const versionContext = getVersionContextFromId(dataset.get("entityId")); + const isBIToolsEnabled = useIsBIToolsEnabled(versionContext); + + const canAnalyzeWithBITools = dataset.getIn([ + "permissions", + "canAnalyzeWithBITools", + ]); + + const analyzeButtonsConfig = fetchStatusOfAnalyzeTools(); + + // The analyze buttons have three requirements to render + // 1) Must not be in details view + // 2) The user must have the "canAnalyzeWithBITools" permission for the dataset + // 3) At least one of the analyze support keys must be enabled + const shouldShowAnalyzeButtons = + !detailsView && + canAnalyzeWithBITools && + (analyzeButtonsConfig["client.tools.tableau"] || + analyzeButtonsConfig["client.tools.powerbi"]); + return (
        -
        - {formatMessage({ id: "Job.Jobs" })} - {jobCount || jobCount === 0 ? ( - // @ts-ignore - - {jobCount} - - ) : ( - - )} -
        - -
        - {formatMessage({ id: "Dataset.Descendants" })} - {descendantsCount || descendantsCount === 0 ? ( - descendantsCount - ) : ( - - )} -
        + + {jobCount} + + ) + } + /> + + + + + {/* */} + {shouldShowAnalyzeButtons && isBIToolsEnabled && ( + s instead of
        s, but buttons cause issues when + // rendered and clicked inside of MUI Tooltips + + {analyzeButtonsConfig["client.tools.tableau"] && ( + +
        dispatch(openTableau(dataset))} + > + +
        +
        + )} + {analyzeButtonsConfig["client.tools.powerbi"] && ( + +
        dispatch(openPowerBI(dataset))} + > + +
        +
        + )} +
        + } + /> + )}
        ); }; diff --git a/dac/ui/src/components/DatasetSummary/components/SummarySubHeader/SummarySubHeader.module.less b/dac/ui/src/components/DatasetSummary/components/SummarySubHeader/SummarySubHeader.module.less index d042170d8c..5b47ae4d87 100644 --- a/dac/ui/src/components/DatasetSummary/components/SummarySubHeader/SummarySubHeader.module.less +++ b/dac/ui/src/components/DatasetSummary/components/SummarySubHeader/SummarySubHeader.module.less @@ -14,7 +14,8 @@ * limitations under the License. */ -.summary-subHeader { +.summary-subHeader, +.summary-secondarySubHeader { width: 100%; overflow: hidden; white-space: nowrap; @@ -23,4 +24,12 @@ &-container { margin: 8px 0px 16px 0px; } + + &-container__details { + width: 93%; + } +} + +.summary-secondarySubHeader { + padding-top: 4px; } diff --git a/dac/ui/src/components/DatasetSummary/components/SummarySubHeader/SummarySubHeader.tsx b/dac/ui/src/components/DatasetSummary/components/SummarySubHeader/SummarySubHeader.tsx index 0f4c820e7b..f554992d91 100644 --- a/dac/ui/src/components/DatasetSummary/components/SummarySubHeader/SummarySubHeader.tsx +++ b/dac/ui/src/components/DatasetSummary/components/SummarySubHeader/SummarySubHeader.tsx @@ -17,14 +17,30 @@ import { useRef, useState } from "react"; // @ts-ignore import { Tooltip } from "dremio-ui-lib"; import { addTooltip } from "../../datasetSummaryUtils"; +import VersionContext, { + VersionContextType, +} from "dremio-ui-common/components/VersionContext.js"; + import * as classes from "./SummarySubHeader.module.less"; -const SummarySubHeader = ({ subTitle }: { subTitle: string }) => { +const SummarySubHeader = ({ + subTitle, + versionContext, + detailsView, +}: { + subTitle: string; + versionContext?: VersionContextType; + detailsView?: boolean; +}) => { const [showTooltip, setShowTooltip] = useState(false); const subTitleRef = useRef(null); return ( -
        +
        {showTooltip ? (

        {subTitle}

        @@ -38,6 +54,13 @@ const SummarySubHeader = ({ subTitle }: { subTitle: string }) => { {subTitle}

        )} + {versionContext && ( + + )}
        ); }; diff --git a/dac/ui/src/components/DatasetsSearch-spec.js b/dac/ui/src/components/DatasetsSearch-spec.js index d1ef5eb340..97e70131a3 100644 --- a/dac/ui/src/components/DatasetsSearch-spec.js +++ b/dac/ui/src/components/DatasetsSearch-spec.js @@ -15,6 +15,8 @@ */ import { shallow } from "enzyme"; import Immutable from "immutable"; +import * as sqlPaths from "dremio-ui-common/paths/sqlEditor.js"; +import { constructFullPath } from "@app/utils/pathUtils"; import DatasetsSearch from "./DatasetsSearch"; @@ -93,19 +95,36 @@ describe("DatasetsSearch-spec", () => { const dataset = wrapper.find(".dataset-wrapper").find(".dataset"); const mainSettingsBtn = dataset.at(1).find(".main-settings-btn"); - expect(dataset).have.length(2); - - expect(dataset.at(0).prop("to")).equal( - commonProps.searchData.get(0).getIn(["links", "self"]) + const firstFullPath = constructFullPath( + commonProps.searchData.get(0).get("fullPath") ); - expect(dataset.at(1).prop("to")).equal( - commonProps.searchData.get(1).getIn(["links", "self"]) + const secondFullPath = constructFullPath( + commonProps.searchData.get(1).get("fullPath") ); + + const firstTo = { + pathname: sqlPaths.sqlEditor.link(), + search: `?context="${encodeURIComponent( + "@test_user" + )}"&queryPath=${firstFullPath}`, + }; + + const secondTo = { + pathname: sqlPaths.sqlEditor.link(), + search: `?context="${encodeURIComponent( + "@test_user" + )}"&queryPath=${secondFullPath}`, + }; + + expect(dataset).have.length(2); + + expect(dataset.at(0).prop("to")).deep.equal(firstTo); + expect(dataset.at(1).prop("to")).deep.equal(secondTo); expect(mainSettingsBtn.childAt(0).prop("to")).equal( commonProps.searchData.get(1).getIn(["links", "edit"]) ); - expect(mainSettingsBtn.childAt(1).prop("to")).equal( - commonProps.searchData.get(1).getIn(["links", "self"]) + expect(mainSettingsBtn.childAt(1).prop("fullPath")).equal( + constructFullPath(commonProps.searchData.get(1).get("fullPath")) ); }); }); diff --git a/dac/ui/src/components/DatasetsSearch.js b/dac/ui/src/components/DatasetsSearch.js index aea85ea693..659381a0d9 100644 --- a/dac/ui/src/components/DatasetsSearch.js +++ b/dac/ui/src/components/DatasetsSearch.js @@ -18,15 +18,24 @@ import Immutable from "immutable"; import PropTypes from "prop-types"; import { Link } from "react-router"; import ViewStateWrapper from "components/ViewStateWrapper"; +import { IconButton } from "dremio-ui-lib/components"; import { injectIntl } from "react-intl"; import { getIconDataTypeFromDatasetType } from "utils/iconUtils"; -import { Tooltip, TagList } from "dremio-ui-lib"; +import { TagList } from "dremio-ui-lib"; +import { shouldUseNewDatasetNavigation } from "@app/utils/datasetNavigationUtils"; import { addProjectBase as wrapBackendLink } from "dremio-ui-common/utilities/projectBase.js"; +import { constructFullPath } from "@app/utils/pathUtils"; +import * as sqlPaths from "dremio-ui-common/paths/sqlEditor.js"; import { bodySmall } from "uiTheme/radium/typography"; import { PALE_NAVY } from "uiTheme/radium/colors"; import DatasetItemLabel from "./Dataset/DatasetItemLabel"; +import QueryDataset from "@app/components/QueryDataset/QueryDataset"; import "./DatasetsSearch.less"; +import { + DATASET_TYPES_TO_ICON_TYPES, + VIRTUAL_DATASET, +} from "@app/constants/datasetTypes"; const emptyList = new Immutable.List(); @@ -46,8 +55,23 @@ class DatasetsSearch extends PureComponent { getDatasetsList(searchData, inputValue) { const { globalSearch } = this.props; + return searchData.map((value, key) => { + const resourceId = value.getIn(["fullPath", 0]); + const newFullPath = constructFullPath(value.get("fullPath")); const name = value.getIn(["fullPath", -1]); + + const href = { + pathname: sqlPaths.sqlEditor.link(), + search: `?context="${encodeURIComponent( + resourceId + )}"&queryPath=${newFullPath}`, + }; + + const toLink = shouldUseNewDatasetNavigation() + ? href + : wrapBackendLink(value.getIn(["links", "self"])); + const datasetItem = (
        ); + return globalSearch ? ( {datasetItem} @@ -95,32 +120,41 @@ class DatasetsSearch extends PureComponent { const { intl: { formatMessage }, } = this.props; + const datasetIconType = getIconDataTypeFromDatasetType( + dataset.get("datasetType") + ); + return ( {dataset.getIn(["links", "edit"]) && ( - - - - - + e.stopPropagation()} + > + + )} - - - - - + {datasetIconType !== DATASET_TYPES_TO_ICON_TYPES[VIRTUAL_DATASET] && ( + e.stopPropagation()} + > + + + )} + ); } diff --git a/dac/ui/src/components/DateRangePicker/DateRangePicker.module.less b/dac/ui/src/components/DateRangePicker/DateRangePicker.module.less index 0a36f88554..d550ab9a08 100644 --- a/dac/ui/src/components/DateRangePicker/DateRangePicker.module.less +++ b/dac/ui/src/components/DateRangePicker/DateRangePicker.module.less @@ -17,14 +17,14 @@ :global(.rdrNextPrevButton) { background: transparent; &:hover { - background: var(--dremio--color--neutral--150); + background: var(--color--neutral--50); } } :global(.rdrMonthPicker select), :global(.rdrYearPicker select) { font-weight: 600; - color: var(--dremio--color--neutral--850); + color: var(--color--neutral--900); } :global(.rdrDateDisplayWrapper), @@ -37,7 +37,7 @@ } :global(.rdrDayToday .rdrDayNumber span:after) { - background: var(--dremio--color--neutral--850) !important; + background: var(--color--neutral--900) !important; height: 1px; width: 13px; bottom: 5px; @@ -47,7 +47,7 @@ :global(.rdrDay:not(.rdrDayPassive) .rdrStartEdge ~ .rdrDayNumber span), :global(.rdrDay:not(.rdrDayPassive) .rdrEndEdge ~ .rdrDayNumber span), :global(.rdrDay:not(.rdrDayPassive) .rdrSelected ~ .rdrDayNumber span) { - color: var(--dremio--color--neutral--850) !important; + color: var(--color--neutral--900) !important; } :global(.rdrMonth) { diff --git a/dac/ui/src/components/DragComponents/ColumnMenuItem.js b/dac/ui/src/components/DragComponents/ColumnMenuItem.js index c16a5af5ea..af97fdf3df 100644 --- a/dac/ui/src/components/DragComponents/ColumnMenuItem.js +++ b/dac/ui/src/components/DragComponents/ColumnMenuItem.js @@ -54,9 +54,11 @@ class ColumnMenuItem extends PureComponent { shouldAllowAdd: PropTypes.bool, addtoEditor: PropTypes.func, draggableRowClassName: PropTypes.string, + showReadonlyTooltip: PropTypes.bool }; static defaultProps = { fullPath: Immutable.List(), + showReadonlyTooltip: true }; checkThatDragAvailable = (e) => { @@ -89,6 +91,7 @@ class ColumnMenuItem extends PureComponent { shouldAllowAdd, addtoEditor, draggableRowClassName, + showReadonlyTooltip, intl: { formatMessage }, } = this.props; const markAsDisabled = preventDrag || disabled; @@ -96,7 +99,7 @@ class ColumnMenuItem extends PureComponent { // full paths are not yet supported by dremio in SELECT clauses, so force this to always be the simple name for now const idForDrag = true || // eslint-disable-line no-constant-condition - isGroupBy + isGroupBy ? item.get("name") : constructFullPath(this.props.fullPath.concat(item.get("name"))); return ( @@ -138,8 +141,7 @@ class ColumnMenuItem extends PureComponent { } text={item.get("name")} title={ - preventDrag - ? formatMessage({ id: "Read.Only" }) + preventDrag && showReadonlyTooltip ? formatMessage({ id: "Read.Only" }) : item.get("name") } > diff --git a/dac/ui/src/components/DragComponents/DragSource.js b/dac/ui/src/components/DragComponents/DragSource.js index 80bd8fa899..5019cb25a3 100644 --- a/dac/ui/src/components/DragComponents/DragSource.js +++ b/dac/ui/src/components/DragComponents/DragSource.js @@ -48,6 +48,7 @@ const source = { @DragSource((props) => props.dragType, source, (connect, monitor) => ({ connectDragSource: connect.dragSource(), isDragging: monitor.isDragging(), + betterIsDragging: !!monitor.getItem(), })) export default class DragSourceWrap extends Component { static propTypes = { @@ -55,7 +56,9 @@ export default class DragSourceWrap extends Component { nativeDragData: PropTypes.object, isFromAnother: PropTypes.bool, isDragging: PropTypes.bool, + betterIsDragging: PropTypes.bool, preventDrag: PropTypes.bool, + onDrag: PropTypes.func, connectDragSource: PropTypes.func, index: PropTypes.number, args: PropTypes.string, @@ -70,6 +73,14 @@ export default class DragSourceWrap extends Component { this.onDragStart = this.onDragStart.bind(this); } + componentDidUpdate(prevProps) { + const { betterIsDragging, onDrag: setDrag } = this.props; + + if (prevProps.betterIsDragging !== betterIsDragging) { + setDrag?.(betterIsDragging); + } + } + onDragStart(ev) { if (this.props.nativeDragData) { // dataType must be 'text' for IE diff --git a/dac/ui/src/components/ErrorBoundary/ErrorBoundary.tsx b/dac/ui/src/components/ErrorBoundary/ErrorBoundary.tsx index 9460ca910b..99cd1af8e6 100644 --- a/dac/ui/src/components/ErrorBoundary/ErrorBoundary.tsx +++ b/dac/ui/src/components/ErrorBoundary/ErrorBoundary.tsx @@ -22,7 +22,7 @@ import sentryUtil from "@app/utils/sentryUtil"; type ErrorBoundaryProps = { className?: string; children: ReactNode; - title: string; + title: string | (() => string); }; type ErrorBoundaryState = | { diff --git a/dac/ui/src/components/ErrorBoundary/ErrorDisplay.tsx b/dac/ui/src/components/ErrorBoundary/ErrorDisplay.tsx index 353efcfa42..cc224178b4 100644 --- a/dac/ui/src/components/ErrorBoundary/ErrorDisplay.tsx +++ b/dac/ui/src/components/ErrorBoundary/ErrorDisplay.tsx @@ -22,7 +22,7 @@ import { intl } from "@app/utils/intl"; type Props = { className?: string; - title: string; + title: string | (() => string); error: Error; errorInfo?: ErrorInfo | null; }; @@ -42,7 +42,13 @@ export const ErrorDisplay = (props: Props): JSX.Element => { production={process.env.NODE_ENV !== "development"} renderSupportInfo={() => } supportMessage={intl.formatMessage({ id: "Support.contact" })} - title={props.title || "An unexpected error occurred"} + title={ + props.title + ? typeof props.title === "function" + ? props.title() + : props.title + : "An unexpected error occurred" + } /> ); }; diff --git a/dac/ui/src/components/ErrorBoundary/SupportInfo.module.less b/dac/ui/src/components/ErrorBoundary/SupportInfo.module.less index a3846a1513..3d7ad4a638 100644 --- a/dac/ui/src/components/ErrorBoundary/SupportInfo.module.less +++ b/dac/ui/src/components/ErrorBoundary/SupportInfo.module.less @@ -15,7 +15,7 @@ */ .support-info { - color: var(--dremio--color--neutral--600); + color: var(--color--neutral--600); display: flex; gap: var(--dremio--spacing--3); justify-content: center; diff --git a/dac/ui/src/components/ErrorBoundary/withErrorBoundary.tsx b/dac/ui/src/components/ErrorBoundary/withErrorBoundary.tsx index 79457a58d6..6ea3a0600c 100644 --- a/dac/ui/src/components/ErrorBoundary/withErrorBoundary.tsx +++ b/dac/ui/src/components/ErrorBoundary/withErrorBoundary.tsx @@ -18,8 +18,7 @@ import * as React from "react"; import { ErrorBoundary } from "./ErrorBoundary"; type WithErrorBoundaryConfig = { - title: string; - wrapperClass?: string; + title: string | (() => string); }; export const withErrorBoundary = @@ -27,10 +26,12 @@ export const withErrorBoundary = (Component: T) => (props: any) => { return ( -
        - - - -
        + + + ); }; diff --git a/dac/ui/src/components/ExpandIcon.js b/dac/ui/src/components/ExpandIcon.js index b32c8434d4..86ecc0e47b 100644 --- a/dac/ui/src/components/ExpandIcon.js +++ b/dac/ui/src/components/ExpandIcon.js @@ -45,7 +45,7 @@ const styles = { icon: { height: 18, width: 18, - color: "var(--dremio--color--neutral--600)", + color: "var(--color--neutral--600)", }, }; diff --git a/dac/ui/src/components/Fields/Checkbox.js b/dac/ui/src/components/Fields/Checkbox.js index 95639a153e..9d1af571e7 100644 --- a/dac/ui/src/components/Fields/Checkbox.js +++ b/dac/ui/src/components/Fields/Checkbox.js @@ -122,7 +122,7 @@ export default class Checkbox extends PureComponent { height: 10, width: 12, marginBottom: 3, - color: "var(--dremio--color--neutral--000)", + color: "white", }} /> ) : ( diff --git a/dac/ui/src/components/Fields/FieldList.module.less b/dac/ui/src/components/Fields/FieldList.module.less index c0e3074d69..5507e37363 100644 --- a/dac/ui/src/components/Fields/FieldList.module.less +++ b/dac/ui/src/components/Fields/FieldList.module.less @@ -31,7 +31,7 @@ width: fit-content; margin-top: 16px; &:hover { - background: var(--dremio--color--primary--100); + background: var(--color--brand--25); } } } diff --git a/dac/ui/src/components/Fields/FilterSelectMenu.js b/dac/ui/src/components/Fields/FilterSelectMenu.js index 1a9d6f070a..af0a250eb3 100644 --- a/dac/ui/src/components/Fields/FilterSelectMenu.js +++ b/dac/ui/src/components/Fields/FilterSelectMenu.js @@ -131,7 +131,13 @@ export function FilterSelectMenuItem({ label={[ item.icon && (isJobStatus ? ( - + ) : ( this.onChange(e.target.value)} onKeyDown={this.handleKeyDown} onClick={this.props.onClick} + onBlur={this.props.onBlur} + autoFocus={this.props.autoFocus} /> {showCloseIcon && ( {buttonLabel} } disabled={disabled} - className={classNames(["field", button, className])} + className={classNames(["field", button, className, "pointerCursor"])} style={style} listClass={classNames([listCls, listClass])} listStyle={listStyle ? {...listStyle} : { width: 388 }} diff --git a/dac/ui/src/components/Fields/SelectView.less b/dac/ui/src/components/Fields/SelectView.less index 5ce3fe5035..88985fe8cd 100644 --- a/dac/ui/src/components/Fields/SelectView.less +++ b/dac/ui/src/components/Fields/SelectView.less @@ -39,7 +39,7 @@ } .disabled-icon { - color: var(--dremio--color--neutral--300); + color: var(--color--neutral--200); } .showHideBackgroundColor { diff --git a/dac/ui/src/components/Fields/TextField.module.less b/dac/ui/src/components/Fields/TextField.module.less index 2c54ad8cac..5c7fa6b4cc 100644 --- a/dac/ui/src/components/Fields/TextField.module.less +++ b/dac/ui/src/components/Fields/TextField.module.less @@ -17,10 +17,11 @@ .container { border-radius: 4px; height: 32px; + width: 100%; font-size: 14px; line-height: 20px; display: flex; - background-color: var(--dremio--color--neutral--000); + background-color: white; input { border-top-left-radius: 0px; @@ -29,7 +30,7 @@ } .prefix { - background-color: var(--dremio--color--neutral--200); + background-color: var(--color--neutral--100); border-top-left-radius: 4px; border-bottom-left-radius: 4px; padding: 0px 16px; diff --git a/dac/ui/src/components/Fields/TimePicker.module.less b/dac/ui/src/components/Fields/TimePicker.module.less index 1e6893dcad..aca3e1a80b 100644 --- a/dac/ui/src/components/Fields/TimePicker.module.less +++ b/dac/ui/src/components/Fields/TimePicker.module.less @@ -25,7 +25,7 @@ &__formatLabel { margin-left: var(--dremio--spacing--1); - color: var(--dremio--color--neutral--300); + color: var(--color--neutral--200); font-style: italic; user-select: none; } @@ -33,11 +33,11 @@ input { height: 32px; padding: 0 var(--dremio--spacing--1); - border: 1px solid var(--dremio--color--neutral--200); + border: 1px solid var(--color--neutral--100); border-radius: 2px; &:focus { outline: none; - border-color: var(--dremio--color--primary--500); + border-color: var(--color--brand--300); } } } diff --git a/dac/ui/src/components/FinderNav.less b/dac/ui/src/components/FinderNav.less index 998b662716..da1445899e 100644 --- a/dac/ui/src/components/FinderNav.less +++ b/dac/ui/src/components/FinderNav.less @@ -84,7 +84,7 @@ cursor: pointer; &:hover { - background-color: var(--dremio--color--primary--100); + background-color: var(--color--brand--25); } a { @@ -101,7 +101,7 @@ &.active { font-weight: 600; - background-color: var(--dremio--color--primary--150); + background-color: var(--color--brand--50); } } @@ -115,7 +115,7 @@ .add-space-icon { width: 24px; height: 24px; - color: var(--dremio--color--neutral--600); + color: var(--color--neutral--600); } &:hover { diff --git a/dac/ui/src/components/FinderNavItem.js b/dac/ui/src/components/FinderNavItem.js index f87687b921..e87a751920 100644 --- a/dac/ui/src/components/FinderNavItem.js +++ b/dac/ui/src/components/FinderNavItem.js @@ -39,6 +39,8 @@ import ResourcePin from "./ResourcePin"; import EllipsedText from "./EllipsedText"; import "./FinderNavItem.less"; +import { FeatureSwitch } from "@app/exports/components/FeatureSwitch/FeatureSwitch"; +import { CATALOG_ARS_ENABLED } from "@app/exports/flags/CATALOG_ARS_ENABLED"; const mapStateToPropsV3 = (state, { entityId }) => { const type = getRootEntityTypeByIdV3(state, entityId); @@ -78,7 +80,11 @@ export class FinderNavItemV3 extends PureComponent { style={{ marginRight: 5, width: "191px" }} /> - {entityId && } + null} + renderDisabled={() => entityId && } + /> ); } @@ -208,7 +214,11 @@ class FinderNavItem extends Component { count={numberOfDatasets} isBounded={datasetCountBounded} /> - {id && } + null} + renderDisabled={() => id && } + /> )} diff --git a/dac/ui/src/components/FinderNavItem.less b/dac/ui/src/components/FinderNavItem.less index 47a9b5a288..8cccd56581 100644 --- a/dac/ui/src/components/FinderNavItem.less +++ b/dac/ui/src/components/FinderNavItem.less @@ -22,7 +22,7 @@ .finder-nav-item { &:hover, &:focus { - background-color: var(--dremio--color--primary--100); + background-color: var(--color--brand--25); } .pin { @@ -72,7 +72,7 @@ } &.active { font-weight: 600; - background-color: var(--dremio--color--primary--150); + background-color: var(--color--brand--50); } } diff --git a/dac/ui/src/components/Forms/DataFreshnessSection.js b/dac/ui/src/components/Forms/DataFreshnessSection.js index dd9186b8cb..92c5182a7a 100644 --- a/dac/ui/src/components/Forms/DataFreshnessSection.js +++ b/dac/ui/src/components/Forms/DataFreshnessSection.js @@ -21,17 +21,17 @@ import { formDefault } from "uiTheme/radium/typography"; import DurationField from "components/Fields/DurationField"; import FieldWithError from "components/Fields/FieldWithError"; import Checkbox from "components/Fields/Checkbox"; -import { Button } from "dremio-ui-lib"; -import * as ButtonTypes from "components/Buttons/ButtonTypes"; +import { Button } from "dremio-ui-lib/components"; import ApiUtils from "utils/apiUtils/apiUtils"; import NotificationSystem from "react-notification-system"; import Message from "components/Message"; -import { isCME } from "dyn-load/utils/versionUtils"; +import { isCME, isNotSoftware } from "dyn-load/utils/versionUtils"; import { intl } from "@app/utils/intl"; -import { HoverHelp } from "dremio-ui-lib"; +import { getSupportFlag } from "@app/exports/endpoints/SupportFlags/getSupportFlag"; +import { SUBHOUR_ACCELERATION_POLICY } from "@app/exports/endpoints/SupportFlags/supportFlagConstants"; const DURATION_ONE_HOUR = 3600000; -const MIN_DURATION = config.subhourAccelerationPoliciesEnabled +window.subhourMinDuration = config.subhourAccelerationPoliciesEnabled ? 60 * 1000 : DURATION_ONE_HOUR; // when changed, must update validation error text @@ -67,30 +67,30 @@ class DataFreshnessSection extends Component { if ( values.accelerationRefreshPeriod === 0 || - values.accelerationRefreshPeriod < MIN_DURATION + values.accelerationRefreshPeriod < window.subhourMinDuration ) { - if (config.subhourAccelerationPoliciesEnabled) { + if (window.subhourMinDuration === DURATION_ONE_HOUR) { errors.accelerationRefreshPeriod = la( - "Reflection refresh must be at least 1 minute." + "Reflection refresh must be at least 1 hour." ); } else { errors.accelerationRefreshPeriod = la( - "Reflection refresh must be at least 1 hour." + "Reflection refresh must be at least 1 minute." ); } } if ( values.accelerationGracePeriod && - values.accelerationGracePeriod < MIN_DURATION + values.accelerationGracePeriod < window.subhourMinDuration ) { - if (config.subhourAccelerationPoliciesEnabled) { + if (window.subhourMinDuration === DURATION_ONE_HOUR) { errors.accelerationGracePeriod = la( - "Reflection expiry must be at least 1 minute." + "Reflection expiry must be at least 1 hour." ); } else { errors.accelerationGracePeriod = la( - "Reflection expiry must be at least 1 hour." + "Reflection expiry must be at least 1 minute." ); } } else if ( @@ -111,9 +111,24 @@ class DataFreshnessSection extends Component { this.notificationSystemRef = createRef(); this.state = { refreshingReflections: false, + minDuration: window.subhourMinDuration, }; } + async componentDidMount() { + if (isNotSoftware?.()) { + try { + const res = await getSupportFlag(SUBHOUR_ACCELERATION_POLICY); + this.setState({ + minDuration: res?.value ? 60 * 1000 : DURATION_ONE_HOUR, + }); + window.subhourMinDuration = res?.value ? 60 * 1000 : DURATION_ONE_HOUR; + } catch (e) { + // + } + } + } + refreshAll = () => { ApiUtils.fetch( `catalog/${encodeURIComponent(this.props.datasetId)}/refresh`, @@ -159,7 +174,6 @@ class DataFreshnessSection extends Component { render() { const { entityType, - elementConfig, editing, fields: { accelerationRefreshPeriod, @@ -188,21 +202,16 @@ class DataFreshnessSection extends Component { ); } - // do not show Refresh Policy header and tooltip in configurable forms with elementConfig return ( -
        +
        - {!elementConfig && ( - - {la("Refresh Policy")} - - - )} + {la("Refresh Policy")} +
        {helpContent}
        @@ -228,25 +237,23 @@ class DataFreshnessSection extends Component {
        {this.isRefreshAllowed() && entityType === "dataset" && ( )}
        @@ -274,7 +281,7 @@ class DataFreshnessSection extends Component { > @@ -296,6 +303,13 @@ class DataFreshnessSection extends Component { } const styles = { + container: { + marginTop: 6, + }, + info: { + maxWidth: 525, + marginBottom: 26, + }, section: { display: "flex", marginBottom: 6, @@ -307,11 +321,11 @@ const styles = { }, label: { fontSize: "18px", - fontWeight: 300, + fontWeight: 600, margin: "0 0 8px 0px", - color: "#555555", display: "flex", alignItems: "center", + color: "var(--color--neutral--900)", }, inputLabel: { ...formDefault, diff --git a/dac/ui/src/components/Forms/FormSection.js b/dac/ui/src/components/Forms/FormSection.js index ffa6f035e9..6c49028955 100644 --- a/dac/ui/src/components/Forms/FormSection.js +++ b/dac/ui/src/components/Forms/FormSection.js @@ -261,7 +261,7 @@ const styles = { height: 12, marginRight: 8, marginTop: -3, - color: "var(--dremio--color--neutral--600)", + color: "var(--color--neutral--600)", }, sourceIcon: { width: 40, diff --git a/dac/ui/src/components/Forms/MetadataRefresh.js b/dac/ui/src/components/Forms/MetadataRefresh.js index 489543af27..8e8ea89310 100644 --- a/dac/ui/src/components/Forms/MetadataRefresh.js +++ b/dac/ui/src/components/Forms/MetadataRefresh.js @@ -32,10 +32,7 @@ const DETAILS_TOOLTIP = `Metadata Dremio needs for query planning such as inform Fetch Modes: Only Queried Datasets: -Dremio updates details for previously queried objects in a source. This mode increases query performance as less work needs to be done at query time for these datasets. - -All Datasets (deprecated): -Dremio updates details for all datasets in a source. This mode increases query performance as less work needs to be done at query time.`; +Dremio updates details for previously queried objects in a source. This mode increases query performance as less work needs to be done at query time for these datasets.`; const AUTHORIZATION_TOOLTIP = "When impersonation is enabled, maximum amount of time Dremio will cache authorization information."; @@ -126,7 +123,6 @@ export default class MetadataRefresh extends Component { refreshModeOptions = [ { label: la("Only Queried Datasets"), option: "PREFETCH_QUERIED" }, - { label: la("All Datasets (deprecated)"), option: "PREFETCH" }, ]; render() { diff --git a/dac/ui/src/components/Forms/Wrappers/RepoSelector.tsx b/dac/ui/src/components/Forms/Wrappers/RepoSelector.tsx deleted file mode 100644 index cc885b2f36..0000000000 --- a/dac/ui/src/components/Forms/Wrappers/RepoSelector.tsx +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { compose } from "redux"; -import { connect } from "react-redux"; -import { useState, useEffect, useMemo } from "react"; -// @ts-ignore -import { Select } from "dremio-ui-lib"; - -import FieldWithError from "components/Fields/FieldWithError"; -import * as ProjectSelectors from "@inject/selectors/projects"; -import { useIntl } from "react-intl"; - -//@ts-ignore -const getNessieProjects = ProjectSelectors?.getNessieProjects || (() => []); //Stub for OSS/Enterprise - -type RepoSelectorProps = { - defaultValue?: string | null; - onChange?: (value: string) => void; - nessieProjects: any; -}; - -const CUSTOM = "customRepo"; - -const RepoSelector = (props: RepoSelectorProps) => { - const intl = useIntl(); - const { defaultValue = CUSTOM, onChange, nessieProjects } = props; - const [project, setProject] = useState(CUSTOM); - const projectsList = useMemo( - () => [ - ...nessieProjects.map((item: { name: string; id: string }) => { - return { - label: item.name, - value: item.id, - }; - }), - { - label: intl.formatMessage({ id: "Nessie.CustomRepository" }), - value: CUSTOM, - }, - ], - [nessieProjects, intl] - ); - - const doChange = (value: any) => { - setProject(value); - onChange && onChange(value); - }; - - const onRepoChange = (e: { target: { value: any } }) => { - const value = e.target.value; - doChange(value); - }; - - //Set initial value on mount - useEffect(() => { - if (!defaultValue && projectsList.length > 0) { - doChange(projectsList[0].value); - } else { - const found = projectsList.find((cur) => cur.value === defaultValue); - if (found) doChange(defaultValue); - else doChange(CUSTOM); - } - }, [projectsList]); //eslint-disable-line - - return ( -
        - -
        -
        { !item.disabledClick && onClick && @@ -492,7 +492,7 @@ export class DeferredRenderer extends Component { render() { const { index } = this.props; if (this.state.initial) return null; - return
        {this.props.render(index)}
        ; + return this.props.render(index); } } export default VirtualizedTableViewer; diff --git a/dac/ui/src/components/WikiDrawerWrapper.tsx b/dac/ui/src/components/WikiDrawerWrapper.tsx new file mode 100644 index 0000000000..e40327676e --- /dev/null +++ b/dac/ui/src/components/WikiDrawerWrapper.tsx @@ -0,0 +1,59 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { DialogContent, Drawer } from "dremio-ui-lib/components"; +import WikiLanding from "@app/pages/ExplorePage/components/Wiki/WikiLanding"; +import React, { useEffect } from "react"; + +interface WikiDrawerWrapperProps { + wikiDrawerTitle: JSX.Element; + datasetDetails: any; + drawerIsOpen: boolean; +} + +const WikiDrawerWrapper = ({ + wikiDrawerTitle, + datasetDetails, + drawerIsOpen, +}: WikiDrawerWrapperProps) => { + useEffect(() => { + // to stop stacking of drawers + if (drawerIsOpen) { + const existingDrawerLength = + document.getElementsByClassName("wiki-drawer").length; + const existingDrawer = + existingDrawerLength > 1 + ? document.getElementsByClassName("wiki-drawer")[0] + : null; + if (existingDrawer) document.getElementById("close-wiki-drawer")?.click(); + } + }, [drawerIsOpen]); + return ( + ) => { + e.preventDefault(); + e.stopPropagation(); + }} + className="wiki-drawer" + > + + + + + ); +}; + +export default WikiDrawerWrapper; diff --git a/dac/ui/src/components/Wizards/DetailsWizard/components/AddFieldEditor.js b/dac/ui/src/components/Wizards/DetailsWizard/components/AddFieldEditor.js index edd26e85f4..d51050da65 100644 --- a/dac/ui/src/components/Wizards/DetailsWizard/components/AddFieldEditor.js +++ b/dac/ui/src/components/Wizards/DetailsWizard/components/AddFieldEditor.js @@ -19,7 +19,7 @@ import PropTypes from "prop-types"; import { getExploreState } from "@app/selectors/explore"; import SqlAutoComplete from "pages/ExplorePage/components/SqlEditor/SqlAutoComplete"; -import FunctionsHelpPanel from "@app/pages/ExplorePage/components/SqlEditor/FunctionsHelpPanel"; +import SQLFunctionsPanel from "@app/pages/ExplorePage/components/SqlEditor/SQLFunctionsPanel"; import "./AddFieldEditor.less"; @@ -62,12 +62,13 @@ export class AddFieldEditor extends PureComponent { getFunctionsPanel() { return ( - + this.state.funcHelpPanel && ( + + ) ); } diff --git a/dac/ui/src/components/Wizards/DetailsWizard/components/AddFieldEditor.less b/dac/ui/src/components/Wizards/DetailsWizard/components/AddFieldEditor.less index 8b0401dc11..0e16ef81df 100644 --- a/dac/ui/src/components/Wizards/DetailsWizard/components/AddFieldEditor.less +++ b/dac/ui/src/components/Wizards/DetailsWizard/components/AddFieldEditor.less @@ -18,11 +18,11 @@ .add-field-editor { .sql-help-panel { - background: #f4f4f4; - left: 50%; + position: absolute; top: 0; - right: 0; - width: 49%; + right: 10px; + width: 48%; + overflow: hidden; } } } diff --git a/dac/ui/src/components/Wizards/components/StepWizard.js b/dac/ui/src/components/Wizards/components/StepWizard.js index 65695ef8e8..7fbd2a36d5 100644 --- a/dac/ui/src/components/Wizards/components/StepWizard.js +++ b/dac/ui/src/components/Wizards/components/StepWizard.js @@ -17,8 +17,7 @@ import { PureComponent } from "react"; import PropTypes from "prop-types"; -import { Button } from "dremio-ui-lib"; -import * as ButtonTypes from "components/Buttons/ButtonTypes"; +import { Button } from "dremio-ui-lib/components"; import WizardFooter from "./WizardFooter"; import { intl } from "@app/utils/intl"; @@ -38,20 +37,19 @@ class StepWizard extends PureComponent { disabled={!this.props.hasActiveDataset} onMouseDown={this.props.changeFormType.bind(this, "apply")} onClick={this.props.onNextClick} - color={ButtonTypes.UI_LIB_PRIMARY} - style={{ marginBottom: 0 }} - disableMargin + variant="primary" key="details-wizard-next" - text={intl.formatMessage({ id: "Common.Next" })} - /> + > + {intl.formatMessage({ id: "Common.Next" })} + ); } diff --git a/dac/ui/src/components/formsForAddData/ExistingForm.js b/dac/ui/src/components/formsForAddData/ExistingForm.js index 9092ea36b0..8ad5d305d1 100644 --- a/dac/ui/src/components/formsForAddData/ExistingForm.js +++ b/dac/ui/src/components/formsForAddData/ExistingForm.js @@ -89,6 +89,7 @@ export class ExistingForm extends Component { showDataSets showSources style={{ flex: 1, minHeight: 0, height: 210 }} + shouldShowOverlay={false} /> ), Search: ( @@ -112,11 +113,19 @@ export class ExistingForm extends Component { showDataSets showSources style={{ flex: 1, minHeight: 0, height: 210 }} + shouldShowOverlay={false} /> - ) + ), }); - return ; + return ( + + ); } } diff --git a/dac/ui/src/components/oldDragComponents/DragAreaColumn.module.less b/dac/ui/src/components/oldDragComponents/DragAreaColumn.module.less index 889cf34102..4380c24bea 100644 --- a/dac/ui/src/components/oldDragComponents/DragAreaColumn.module.less +++ b/dac/ui/src/components/oldDragComponents/DragAreaColumn.module.less @@ -84,7 +84,7 @@ height: 26px; background-color: @ACTIVE_DRAG_AREA; &--empty { - background-color: var(--dremio--color--neutral--000); + background-color: white; } } .empty { diff --git a/dac/ui/src/components/tableRowButtons/DeleteButton.js b/dac/ui/src/components/tableRowButtons/DeleteButton.js index ad9896d2f5..1e79527cf7 100644 --- a/dac/ui/src/components/tableRowButtons/DeleteButton.js +++ b/dac/ui/src/components/tableRowButtons/DeleteButton.js @@ -16,6 +16,7 @@ import { Component } from "react"; import PropTypes from "prop-types"; import { actionBtn, actionIcon } from "./actionButtons.less"; +import { IconButton } from "dremio-ui-lib/components"; export class DeleteButton extends Component { static propTypes = { @@ -33,10 +34,10 @@ export class DeleteButton extends Component { const { title, onClick, dataQa } = this.props; return ( - + ); } } diff --git a/dac/ui/src/components/tableRowButtons/EditButton.js b/dac/ui/src/components/tableRowButtons/EditButton.js index 0869d8adfe..99076108d0 100644 --- a/dac/ui/src/components/tableRowButtons/EditButton.js +++ b/dac/ui/src/components/tableRowButtons/EditButton.js @@ -16,6 +16,7 @@ import { Component } from "react"; import PropTypes from "prop-types"; import { actionBtn, actionIcon } from "./actionButtons.less"; +import { IconButton } from "dremio-ui-lib/components"; export class EditButton extends Component { static propTypes = { @@ -33,14 +34,14 @@ export class EditButton extends Component { const { title, onClick, dataQa } = this.props; return ( - + ); } } diff --git a/dac/ui/src/components/tableRowButtons/actionButtons.less b/dac/ui/src/components/tableRowButtons/actionButtons.less index e0fb4f7b91..95be4c459e 100644 --- a/dac/ui/src/components/tableRowButtons/actionButtons.less +++ b/dac/ui/src/components/tableRowButtons/actionButtons.less @@ -21,13 +21,13 @@ border: 0; border-radius: 2px; box-shadow: rgb(194, 194, 194) 0px 1px 1px; - background: @action-btn-bg; + background: @action-btn-bg !important; overflow: hidden; display: inline-flex; align-items: center; justify-content: center; &:hover { - background: @action-btn-hover; + background: @action-btn-hover !important; } } diff --git a/dac/ui/src/constants/Api.js b/dac/ui/src/constants/Api.js index 208be5a0be..53f2090ec8 100644 --- a/dac/ui/src/constants/Api.js +++ b/dac/ui/src/constants/Api.js @@ -22,6 +22,8 @@ export const API_V2 = "apiv2"; export const API_URL_V3 = `//${host}/api/v3`; export const API_V3 = "api/v3"; export const WEB_SOCKET_URL = `ws${isSecure ? "s" : ""}:${API_URL_V2}/socket`; +export const NESSIE_PROXY_URL_V2 = `//${host}/nessie-proxy/v2`; + class Api { toString() { console.warn( diff --git a/dac/ui/src/constants/datasetTypes.js b/dac/ui/src/constants/datasetTypes.js index a9b062060a..885b52f18f 100644 --- a/dac/ui/src/constants/datasetTypes.js +++ b/dac/ui/src/constants/datasetTypes.js @@ -32,6 +32,11 @@ export const DATASET_TYPES_TO_ICON_TYPES = { // [PHYSICAL_DATASET_HOME_FOLDER]: 'FolderData' }; +export const DATASET_TYPES_TO_ICEBERG_TYPES = { + [PHYSICAL_DATASET]: "IcebergTable", + [VIRTUAL_DATASET]: "IcebergView", +}; + export const datasetTypeToEntityType = { [VIRTUAL_DATASET]: "dataset", [PHYSICAL_DATASET]: "physicalDataset", diff --git a/dac/ui/src/constants/sourceTypes.js b/dac/ui/src/constants/sourceTypes.js index 0cffafbe7e..2a93db9461 100644 --- a/dac/ui/src/constants/sourceTypes.js +++ b/dac/ui/src/constants/sourceTypes.js @@ -36,9 +36,11 @@ export const AMAZONELASTIC = "AMAZONELASTIC"; export const AZURE_STORAGE = "AZURE_STORAGE"; export const SYNAPSE = "SYNAPSE"; export const ADX = "ADX"; -export const SNOWFLAKE = "SNOWFLAKE"; export const MSACCESS = "MSAccess"; export const SPARK = "SPARK"; +export const SNOWFLAKE = "SNOWFLAKE"; +export const DREMIOTODREMIO = "DREMIOTODREMIO"; +export const AZURE_SAMPLE_SOURCE = "SAMPLE_SOURCE"; // These are not implemented in the backend yet. export const CASSANDRA = "CASSANDRA"; @@ -72,7 +74,8 @@ export const sourceProperties = [ { label: "Google Cloud Storage", sourceType: GCS, beta: true }, { label: "Microsoft Azure Synapse Analytics", sourceType: SYNAPSE }, { label: "Microsoft Azure Data Explorer", sourceType: ADX }, - { label: "Snowflake", sourceType: SNOWFLAKE } + { label: "Snowflake", sourceType: SNOWFLAKE }, + { label: "Dremio to Dremio", sourceType: DREMIOTODREMIO }, ]; export const metastoresSourceType = { @@ -90,6 +93,7 @@ export const objectStorageSourceType = { [NAS]: true, [GCS]: true, [HISTORYTABLES]: true, + [AZURE_SAMPLE_SOURCE]: true, }; export const dataLakeSourceType = { @@ -112,7 +116,11 @@ const dataPlaneSources = { }; export const isDatabaseType = (sourceType) => { - return !dataLakeSourceType[sourceType] && !dataPlaneSources[sourceType]; + return ( + !dataPlaneSources[sourceType] && + !dataLakeSourceType[sourceType] && + sourceType !== AZURE_SAMPLE_SOURCE + ); }; export const isDataLakeSourceType = (sourceType) => { diff --git a/dac/ui/src/containers/App-spec.js b/dac/ui/src/containers/App-spec.js index 3a235a7e7c..ecce0f3ee3 100644 --- a/dac/ui/src/containers/App-spec.js +++ b/dac/ui/src/containers/App-spec.js @@ -48,47 +48,6 @@ describe("App-spec", () => { }); }); - describe("#handleGlobalError()", () => { - it("should ignore when url origin is different than window.location.origin", () => { - sinon.stub(instance, "displayError"); - sinon.stub(instance, "_getWindowOrigin").returns("http://localhost:4000"); - instance.handleGlobalError(undefined, "message", "https://foo"); - expect(instance.displayError).to.not.be.called; - instance.handleGlobalError( - undefined, - "message", - instance._getWindowOrigin() - ); - expect(instance.displayError).to.be.called; - }); - - it('should ignore when url is ""', () => { - sinon.stub(instance, "displayError"); - instance.handleGlobalError(undefined, "message", ""); - expect(instance.displayError).to.not.be.called; - }); - - it("should pass error || message to displayError", () => { - sinon.stub(instance, "displayError"); - sinon.stub(instance, "_getWindowOrigin").returns("http://localhost:4000"); - instance.handleGlobalError( - undefined, - "message", - instance._getWindowOrigin(), - null, - null, - "error" - ); - expect(instance.displayError).to.be.calledWith("error"); - }); - - it("should call previous onerror if defined", () => { - const prevOnerror = sinon.stub(); - instance.handleGlobalError(prevOnerror, "message", 1, 2, 3, "error"); - expect(prevOnerror).to.be.calledWith("message", 1, 2, 3, "error"); - }); - }); - describe("#_shouldIgnoreExternalStack()", () => { it("returns true when stackOrigin is not current origin", () => { expect(instance._shouldIgnoreExternalStack()).to.be.false; diff --git a/dac/ui/src/containers/App.js b/dac/ui/src/containers/App.js index c95ac04b91..7fcf5c3168 100644 --- a/dac/ui/src/containers/App.js +++ b/dac/ui/src/containers/App.js @@ -25,11 +25,9 @@ import { } from "@mui/material/styles"; import { replace } from "react-router-redux"; import DocumentTitle from "react-document-title"; -import urlParse from "url-parse"; import { showAppError } from "@app/actions/prodError"; import { DnDContextDecorator } from "@app/components/DragComponents/DnDContextDecorator"; -import { ErrorBoundary } from "@app/components/OldErrorBoundary"; import { Suspense } from "@app/components/Lazy"; import socket from "@inject/utils/socket"; @@ -48,7 +46,7 @@ import ConfirmationContainer from "@app/containers/Confirmation"; import ProdErrorContainer from "@app/containers/ProdError"; import { LocationProvider } from "@app/containers/dremioLocation"; import { withHookProvider } from "@app/containers/RouteLeave"; -import { isDcsEdition } from "dyn-load/utils/versionUtils"; +import { isNotSoftware } from "dyn-load/utils/versionUtils"; import { themeStyles } from "dremio-ui-lib"; import "../uiTheme/css/react-datepicker.css"; @@ -104,7 +102,7 @@ export class App extends Component { }; static redirectForServerStatus(props) { - if (!isDcsEdition()) { + if (!isNotSoftware()) { const { location } = props; if ( props.serverStatus.get("status") !== SERVER_STATUS_OK && @@ -125,9 +123,6 @@ export class App extends Component { constructor(props) { super(props); - // use window.onerror here instead of addEventListener('error') because ErrorEvent.error is - // experimental according to mdn. Can get both file url and error from either. - window.onerror = this.handleGlobalError.bind(this, window.onerror); window.onunhandledrejection = this.handleUnhandledRejection; } @@ -148,29 +143,13 @@ export class App extends Component { App.redirectForServerStatus(props); } - handleGlobalError = (prevOnerror, msg, url, lineNo, columnNo, error) => { - prevOnerror && prevOnerror.call(window, msg, url, lineNo, columnNo, error); - - // there is no URL for external scripts (at least in Chrome) - if (!url || urlParse(url).origin !== this._getWindowOrigin()) return; - - console.error("Uncaught Error", error || msg); - this.displayError(error || msg); - }; - handleUnhandledRejection = (rejectionEvent) => { const error = rejectionEvent.reason; if (!error) return; if (error.stack && this._shouldIgnoreExternalStack(error.stack)) return; - //By default, Raven.js does not capture unhandled promise rejections. sentryUtil.logException(error); - - console.error("UnhandledRejection", error); - if (error.status !== 401) { - this.displayError(error); - } }; displayError(error) { @@ -213,22 +192,20 @@ export class App extends Component { const { children } = this.props; return ( - - - - - {children} - - - - - - -
        -
        - - - + + + + {children} + + + + + + +
        +
        + + ); diff --git a/dac/ui/src/containers/Confirmation.js b/dac/ui/src/containers/Confirmation.js index 76b1eae260..0354d53950 100644 --- a/dac/ui/src/containers/Confirmation.js +++ b/dac/ui/src/containers/Confirmation.js @@ -28,11 +28,25 @@ export class ConfirmationContainer extends Component { hideConfirmationDialog: PropTypes.func, }; - onConfirm = (promptValue) => { - this.props.hideConfirmationDialog(); - const { confirm } = this.props.confirmation; - if (confirm && typeof confirm === "function") { - confirm(promptValue); + state = { + submitting: false, + }; + + onConfirm = async (promptValue) => { + const { confirm, isAsyncAction } = this.props.confirmation; + const isFunction = confirm && typeof confirm === "function"; + if (!isAsyncAction) { + this.props.hideConfirmationDialog(); + if (isFunction) { + confirm(promptValue); + } + } else { + if (isFunction) { + this.setState({ submitting: true }); + await confirm(promptValue); + } + this.setState({ submitting: false }); + this.props.hideConfirmationDialog(); } }; @@ -94,6 +108,7 @@ export class ConfirmationContainer extends Component { className={className} headerIcon={headerIcon} size={size} + asyncSubmitting={this.state.submitting} /> ); } diff --git a/dac/ui/src/containers/Notification.js b/dac/ui/src/containers/Notification.js index 59de9bdb03..d0f75808f0 100644 --- a/dac/ui/src/containers/Notification.js +++ b/dac/ui/src/containers/Notification.js @@ -21,6 +21,17 @@ import deepEqual from "deep-equal"; import Message from "components/Message"; +import { + CLOUD_CFT_NOTIFICATION_UID, + CLOUD_CFT_NOTIFICATION_ERROR_UID, + CLOUD_CFT_NOTIFICATION_SUCCESS_UID, +} from "@inject/components/AddCloudModal/constants"; +import { + PROJECT_CFT_NOTIFICATION_UID, + PROJECT_CFT_NOTIFICATION_ERROR_UID, + PROJECT_CFT_NOTIFICATION_SUCCESS_UID, +} from "@inject/components/AddProjectModal/constants"; + export class NotificationContainer extends Component { static propTypes = { notification: PropTypes.object, @@ -37,11 +48,34 @@ export class NotificationContainer extends Component { } componentWillReceiveProps(newProps) { - const { message, level, autoDismiss, removeMessageType, detailsStyle } = - newProps.notification; + const { + uid, + level, + message, + autoDismiss, + detailsStyle, + removeMessageType, + options: { messageAction } = {}, + } = newProps.notification; if (removeMessageType) { - this.removeMessages(removeMessageType); + this.removeMessages(removeMessageType, uid); + } else if ( + // See DX-61151 + [ + CLOUD_CFT_NOTIFICATION_ERROR_UID, + CLOUD_CFT_NOTIFICATION_SUCCESS_UID, + ].includes(uid) + ) { + this.removeMessages("info", CLOUD_CFT_NOTIFICATION_UID); + } else if ( + [ + PROJECT_CFT_NOTIFICATION_ERROR_UID, + PROJECT_CFT_NOTIFICATION_SUCCESS_UID, + ].includes(uid) + ) { + this.removeMessages("info", PROJECT_CFT_NOTIFICATION_UID); } + const handleDismiss = () => { this.notificationSystem.removeNotification(notification); return false; @@ -54,18 +88,20 @@ export class NotificationContainer extends Component { this.notificationSystem.addNotification({ children: ( ), // message, dismissible: false, level, position: "tc", - // see https://dremio.atlassian.net/browse/DX-5316 for commentary + // see DX-5316 for commentary autoDismiss: autoDismiss || (level === "success" ? 5 : 0), + uid: uid, }); if (notification) { // if the notification is the same as last then remove the previous one instead of stack. @@ -75,23 +111,30 @@ export class NotificationContainer extends Component { // message is defined if notification is truthy; if message has type, store it in the local list const messageType = message.messageType || (message.get && message.get("messageType")); - if (messageType) { - this.addedNotifications.push({ messageType, notification }); + if (messageType || uid) { + this.addedNotifications.push({ messageType, notification, uid }); } } } - removeMessages = (messageType) => { - // remove messages of the given type from notification system and hence from the screen - this.addedNotifications.forEach((entry) => { - if (entry.messageType === messageType) { - this.notificationSystem.removeNotification(entry.notification); - } - }); - // remove messages of the given type from local array - this.addedNotifications = this.addedNotifications.filter( - (entry) => entry.messageType !== messageType - ); + removeMessages = (messageType, uid) => { + if (uid) { + this.notificationSystem.removeNotification(uid); + this.addedNotifications = this.addedNotifications.filter( + (notification) => notification.uid !== uid + ); + } else if (messageType) { + // remove messages of the given type from notification system and hence from the screen + this.addedNotifications.forEach((entry) => { + if (entry.messageType === messageType) { + this.notificationSystem.removeNotification(entry.notification); + } + }); + // remove messages of the given type from local array + this.addedNotifications = this.addedNotifications.filter( + (entry) => entry.messageType !== messageType + ); + } }; render() { diff --git a/dac/ui/src/containers/Root.js b/dac/ui/src/containers/Root.js index 73a0f38f7b..2826128f9b 100644 --- a/dac/ui/src/containers/Root.js +++ b/dac/ui/src/containers/Root.js @@ -21,7 +21,6 @@ import { Provider } from "react-redux"; import { Router, browserHistory } from "react-router"; import { syncHistoryWithStore } from "react-router-redux"; -import { isDataPlaneEnabled } from "@inject/utils/dataPlaneUtils"; import { useProjectContext } from "@inject/utils/storageUtils/localStorageUtils"; import routes from "routes"; @@ -30,7 +29,12 @@ import { add } from "utils/storageUtils/localStorageListener"; import { setUserState } from "@app/actions/account"; import { intl } from "@app/utils/intl"; import { MantineProvider } from "@mantine/core"; -import { mantineTheme } from "dremio-ui-lib/dist-esm/mantineTheme"; +import { mantineTheme } from "dremio-ui-lib/components"; + +import { NetworkConnectivityBanner } from "dremio-ui-common/components/NetworkConnectivityBanner.js"; +import { ErrorBoundary } from "@app/components/ErrorBoundary/ErrorBoundary"; + +import { getIntlContext } from "dremio-ui-common/contexts/IntlContext.js"; function Root({ store }) { const history = syncHistoryWithStore(browserHistory, store); @@ -54,15 +58,20 @@ function Root({ store }) { if (localeLoading) return null; return ( - - - - - {routes(store.dispatch, projectContext, isDataPlaneEnabled)} - - - - + + + + + + + {routes(store.dispatch, projectContext)} + + + + + ); } Root.propTypes = { diff --git a/dac/ui/src/containers/RouteLeave-spec.js b/dac/ui/src/containers/RouteLeave-spec.js index 048cf2b5b5..b30ecf1f6f 100644 --- a/dac/ui/src/containers/RouteLeave-spec.js +++ b/dac/ui/src/containers/RouteLeave-spec.js @@ -155,7 +155,7 @@ describe("RouteLeave.js", () => { changeValue(true); //check transitions - // Todo: Fix the below UT (https://dremio.atlassian.net/browse/DX-30942) + // Todo: Fix the below UT (DX-30942) // changeValue(false); // true -> false changeValue(true); // false -> true }); diff --git a/dac/ui/src/containers/RouteLeaveComponent.tsx b/dac/ui/src/containers/RouteLeaveComponent.tsx new file mode 100644 index 0000000000..a0ea35d460 --- /dev/null +++ b/dac/ui/src/containers/RouteLeaveComponent.tsx @@ -0,0 +1,34 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { LeaveContextProvider } from "dremio-ui-common/components/LeaveContext/LeaveContext.js"; +import { browserHistory } from "react-router"; + +export const RouteLeaveComponent = ({ + children, +}: { + children: JSX.Element; +}) => { + return ( + { + browserHistory.push(nextLocation); + }} + > + {children} + + ); +}; diff --git a/dac/ui/src/containers/SettingPage/settingPage.less b/dac/ui/src/containers/SettingPage/settingPage.less index 8794115d5f..8a7e7f8ae6 100644 --- a/dac/ui/src/containers/SettingPage/settingPage.less +++ b/dac/ui/src/containers/SettingPage/settingPage.less @@ -42,7 +42,7 @@ } &.grey-icon { - color: var(--dremio--color--neutral--600); + color: var(--color--neutral--600); } } diff --git a/dac/ui/src/contexts/ApiContext.ts b/dac/ui/src/contexts/ApiContext.ts index 66cd3795ee..cbe8e44958 100644 --- a/dac/ui/src/contexts/ApiContext.ts +++ b/dac/ui/src/contexts/ApiContext.ts @@ -14,82 +14,25 @@ * limitations under the License. */ -import { UnauthorizedError } from "dremio-ui-common/errors/UnauthorizedError"; import { setApiContext } from "dremio-ui-common/contexts/ApiContext.js"; -import { HttpError } from "dremio-ui-common/errors/HttpError"; -import { narrowHttpError } from "dremio-ui-common/errors/narrowHttpError"; -import { getLoggingContext } from "dremio-ui-common/contexts/LoggingContext.js"; -import { getSessionContext } from "./SessionContext"; -import { BadGatewayError } from "dremio-ui-common/errors/BadGatewayError"; -import { ServiceUnavailableError } from "dremio-ui-common/errors/ServiceUnavailableError"; -import { GatewayTimeoutError } from "dremio-ui-common/errors/GatewayTimeoutError"; - -const MAX_RETRIES = 4; - -const getRetryDuration = (error: HttpError, triesRemaining: number): number => { - if (error.res.headers.get("retry-after")) { - // ignore for now - } - return 2 ** (MAX_RETRIES - triesRemaining) * 1000; -}; - -const logger = getLoggingContext().createLogger("ApiContext"); -const sessionContext = getSessionContext(); - -const isRetryableError = (error: HttpError) => { - return ( - error instanceof BadGatewayError || - error instanceof ServiceUnavailableError || - error instanceof GatewayTimeoutError - ); -}; +import { getSessionContext } from "dremio-ui-common/contexts/SessionContext.js"; +import { appFetch } from "dremio-ui-common/utilities/appFetch.js"; setApiContext({ - fetch: (input, init = {}) => { - const sessionIdentifier = sessionContext.getSessionIdentifier(); + fetch: (input: RequestInfo | URL, init: RequestInit = {}) => { + const sessionIdentifier = getSessionContext().getSessionIdentifier(); if (!sessionIdentifier) { - logger.debug("Canceling request because session is invalid", input); - sessionContext.handleInvalidSession(); + getSessionContext().handleInvalidSession(); return new Promise(() => {}); } - const startFetch = ( - triesRemaining: number - ): ReturnType => { - return fetch(input, { - ...init, - headers: { - Authorization: `Bearer ${sessionIdentifier}`, - ...init.headers, - }, - }) - .then(async (res) => { - if (!res.ok) { - const error = await narrowHttpError(new HttpError(res)); - if (isRetryableError(error) && triesRemaining) { - await new Promise((resolve) => - setTimeout(resolve, getRetryDuration(error, triesRemaining)) - ); - return startFetch(triesRemaining - 1); - } - throw error; - } - return res; - }) - .catch((err) => { - if (err instanceof TypeError) { - logger.error(`Failed to establish connection to ${input}`); - } - - if (err instanceof UnauthorizedError) { - // sessionContext.handleInvalidSession(); - return new Promise(() => {}); - } - - throw err; - }); - }; - - return startFetch(MAX_RETRIES); + return appFetch(input, { + ...init, + headers: { + Authorization: `Bearer ${sessionIdentifier}`, + ...init.headers, + }, + }); }, + doubleEncodeJsonParam: false, }); diff --git a/dac/ui/src/contexts/SessionContext.ts b/dac/ui/src/contexts/SessionContext.ts index 4190cdd874..2c0133e8ba 100644 --- a/dac/ui/src/contexts/SessionContext.ts +++ b/dac/ui/src/contexts/SessionContext.ts @@ -57,7 +57,7 @@ const sessionContext = { getSessionIdentifier, sessionIsValid, handleInvalidSession: () => { - handleLogout(); + // handleLogout(); // Disabled until we stop making feature flag calls in OSS }, handleLogout, }; diff --git a/dac/ui/src/endpoints/SQLFunctions/listSQLFunctions.ts b/dac/ui/src/endpoints/SQLFunctions/listSQLFunctions.ts index e51be7ca90..ade4506861 100644 --- a/dac/ui/src/endpoints/SQLFunctions/listSQLFunctions.ts +++ b/dac/ui/src/endpoints/SQLFunctions/listSQLFunctions.ts @@ -15,13 +15,25 @@ */ import { APIV2Call } from "@app/core/APICall"; -import { FunctionSignature, ModelFunction } from "@app/types/sqlFunctions"; +import { + FunctionSignature, + ModelFunction, + Parameter, + ParameterKindEnum, +} from "@app/types/sqlFunctions"; import localStorageUtils from "@inject/utils/storageUtils/localStorageUtils"; import { cloneDeep } from "lodash"; +// @ts-ignore +import { getDocsLink } from "@inject/utils/versionUtils"; +import { FunctionCategoryLabels } from "@app/utils/sqlFunctionUtils"; export type ModifiedSQLFunction = ModelFunction & { key: string; signature: FunctionSignature; + link: string; + label: string; + tags: any[]; + snippet: string; }; const listSqlFunctionsURL = new APIV2Call().paths("sql/functions").toString(); @@ -30,6 +42,14 @@ function isLetter(c: string) { return c.toLowerCase() != c.toUpperCase(); } +function constructParamName(label: string, isOptional: boolean) { + return isOptional ? `[${label}]` : `${label}`; +} + +function constructParamNameWithComma(label: string, isOptional: boolean) { + return isOptional ? ` [,${label}]` : `, ${label}`; +} + export const listSqlFunctions = (): Promise => fetch(listSqlFunctionsURL, { headers: { @@ -38,24 +58,71 @@ export const listSqlFunctions = (): Promise => }) .then((res: any) => res.json()) .then((res: any) => { - const nonAlphabetFunctions = (res?.functions ?? [])?.filter( + const documentedFunctions = (res?.functions ?? []).filter( + (fn: ModelFunction) => fn.description != null + ); + const nonAlphabetFunctions = documentedFunctions.filter( (fn: ModelFunction) => !isLetter(fn?.name[0]) ); - const sortedFunctions = (res?.functions ?? []) - ?.filter((fn: ModelFunction) => isLetter(fn?.name[0])) + const sortedFunctions = documentedFunctions + .filter((fn: ModelFunction) => isLetter(fn?.name[0])) .sort((a: ModelFunction, b: ModelFunction) => { if (a.name.toLowerCase() > b.name.toLowerCase()) return 1; else if (b.name.toLowerCase() > a.name.toLowerCase()) return -1; else return 0; }); const allSortedFunctions = sortedFunctions.concat(nonAlphabetFunctions); + allSortedFunctions.forEach((fn: ModelFunction) => { + fn.functionCategories = fn.functionCategories?.sort(); + }); return allSortedFunctions.flatMap((fn: ModelFunction) => { return fn?.signatures?.map( (signature: FunctionSignature, idx: number) => { + const { parameters = [], returnType, snippetOverride } = signature; + + let params = ""; + if (parameters.length > 0) { + parameters.forEach((param: Parameter, idx: number) => { + const name = `${param.type}${ + param?.name ? ` ${param.name}` : "" + }`; + if (idx === 0) { + params += constructParamName( + name, + param?.kind === ParameterKindEnum.OPTIONAL + ); + } else { + params += constructParamNameWithComma( + name, + param?.kind === ParameterKindEnum.OPTIONAL + ); + } + }); + } + + let snippet = "($1)"; + if (snippetOverride) { + // BE response snippet is `()`, and monaco only reads `()` + snippet = snippetOverride.substring( + fn.name.length, + snippetOverride.length + ); + } + + const label = `(${params}) → ${returnType}`; + const tags = + fn.functionCategories?.map( + (cat) => FunctionCategoryLabels[cat] + ) ?? []; + const newFunction = { ...cloneDeep(fn), signature: signature, key: fn.name + idx, + link: `${getDocsLink?.()}/${fn.name}/`, + snippet: snippet, + label: label, + tags: tags, }; delete newFunction.signatures; return newFunction; diff --git a/dac/ui/src/exports/components/AddToSonarDialog/AddToSonarButton.tsx b/dac/ui/src/exports/components/AddToSonarDialog/AddToSonarButton.tsx index ee16c79cf5..6c47ef1c22 100644 --- a/dac/ui/src/exports/components/AddToSonarDialog/AddToSonarButton.tsx +++ b/dac/ui/src/exports/components/AddToSonarDialog/AddToSonarButton.tsx @@ -22,7 +22,7 @@ import { Button, ModalContainer, useModalContainer, -} from "dremio-ui-lib/dist-esm"; +} from "dremio-ui-lib/components"; import AddToSonarDialogContent from "./components/AddToSonarDialogContent/AddToSonarDialogContent"; import { ArcticCatalog } from "@app/exports/endpoints/ArcticCatalogs/ArcticCatalog.type"; import { SonarProject } from "@app/exports/endpoints/SonarProjects/listSonarProjects"; diff --git a/dac/ui/src/exports/components/AddToSonarDialog/components/AddToSonarDialogContent/AddToSonarDialogContent.tsx b/dac/ui/src/exports/components/AddToSonarDialog/components/AddToSonarDialogContent/AddToSonarDialogContent.tsx index 4c3e5e8e67..cc8b0b22dd 100644 --- a/dac/ui/src/exports/components/AddToSonarDialog/components/AddToSonarDialogContent/AddToSonarDialogContent.tsx +++ b/dac/ui/src/exports/components/AddToSonarDialog/components/AddToSonarDialogContent/AddToSonarDialogContent.tsx @@ -20,7 +20,7 @@ import { usePromise } from "react-smart-promise"; import { zodResolver } from "@hookform/resolvers/zod"; import { FormattedMessage } from "react-intl"; -import { Button, DialogContent } from "dremio-ui-lib/dist-esm"; +import { Button, DialogContent } from "dremio-ui-lib/components"; import { intl } from "@app/utils/intl"; import { InvalidParamsError } from "dremio-ui-common/errors/InvalidParamsError"; import { SonarProjectsResource } from "@app/exports/resources/SonarProjectsResource"; @@ -35,8 +35,6 @@ import { } from "@app/exports/utilities/setHookFormErrorsFromInvalidParamsError"; import { SonarProject } from "@app/exports/endpoints/SonarProjects/listSonarProjects"; -import CLOUD_VENDORS from "@inject/constants/vendors"; - import * as classes from "./AddToSonarDialogContent.module.less"; type AddToSonarDialogProps = { @@ -77,14 +75,6 @@ function AddToSonarDialogContent({ ), [sonarProjects, submittedData] ); - const nonAzureSonarProjects = useMemo( - () => - sonarProjects?.filter( - // @ts-ignore - ({ cloudType }) => cloudType !== CLOUD_VENDORS.AZURE - ) || [], - [sonarProjects] - ); const { handleSubmit, setValue, control, formState, setError } = useForm({ resolver: zodResolver(addToSonarSchema), diff --git a/dac/ui/src/exports/components/AddToSonarDialog/components/SonarProjectSelect/SonarProjectSelect.tsx b/dac/ui/src/exports/components/AddToSonarDialog/components/SonarProjectSelect/SonarProjectSelect.tsx index fa0ae01b0a..7b367f3c40 100644 --- a/dac/ui/src/exports/components/AddToSonarDialog/components/SonarProjectSelect/SonarProjectSelect.tsx +++ b/dac/ui/src/exports/components/AddToSonarDialog/components/SonarProjectSelect/SonarProjectSelect.tsx @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { Skeleton } from "@app/../../ui-lib/dist-esm/Skeleton"; +import { Skeleton } from "dremio-ui-lib/components"; import { intl } from "@app/utils/intl"; import { Select } from "@mantine/core"; import { orderBy } from "lodash"; diff --git a/dac/ui/src/exports/components/ArcticCatalogCard/ArcticCatalogCard.tsx b/dac/ui/src/exports/components/ArcticCatalogCard/ArcticCatalogCard.tsx index 185bfb8768..c8c614cdd7 100644 --- a/dac/ui/src/exports/components/ArcticCatalogCard/ArcticCatalogCard.tsx +++ b/dac/ui/src/exports/components/ArcticCatalogCard/ArcticCatalogCard.tsx @@ -15,7 +15,7 @@ */ import { type FunctionComponent } from "react"; -import { Avatar, Card, IconButton } from "dremio-ui-lib/dist-esm"; +import { Avatar, Card, IconButton } from "dremio-ui-lib/components"; import { intl } from "@app/utils/intl"; import type { ArcticCatalog } from "../../endpoints/ArcticCatalogs/ArcticCatalog.type"; import classes from "./ArcticCatalogCard.less"; diff --git a/dac/ui/src/exports/components/ArcticCatalogCard/ArcticCatalogCardSkeleton.tsx b/dac/ui/src/exports/components/ArcticCatalogCard/ArcticCatalogCardSkeleton.tsx index 1c2bfaf690..49ca668a59 100644 --- a/dac/ui/src/exports/components/ArcticCatalogCard/ArcticCatalogCardSkeleton.tsx +++ b/dac/ui/src/exports/components/ArcticCatalogCard/ArcticCatalogCardSkeleton.tsx @@ -14,7 +14,7 @@ * limitations under the License. */ -import { Card, Skeleton } from "dremio-ui-lib/dist-esm"; +import { Card, Skeleton } from "dremio-ui-lib/components"; import classes from "./ArcticCatalogCard.less"; export const ArcticCatalogCardSkeleton = () => { return ( diff --git a/dac/ui/src/exports/components/ArcticCatalogsTable/ArcticCatalogsTable.tsx b/dac/ui/src/exports/components/ArcticCatalogsTable/ArcticCatalogsTable.tsx index 425232aae8..5b291755ab 100644 --- a/dac/ui/src/exports/components/ArcticCatalogsTable/ArcticCatalogsTable.tsx +++ b/dac/ui/src/exports/components/ArcticCatalogsTable/ArcticCatalogsTable.tsx @@ -14,8 +14,8 @@ * limitations under the License. */ -import { Avatar } from "dremio-ui-lib/dist-esm"; -import { createTable, type Columns } from "leantable/core"; +import { Avatar } from "dremio-ui-lib/components"; +import { createTable, type Columns } from "leantable2/core"; import { useMemo } from "react"; import { useIntl } from "react-intl"; import { Link } from "react-router"; diff --git a/dac/ui/src/exports/components/ArcticTableHeader/ArcticTableHeader.module.less b/dac/ui/src/exports/components/ArcticTableHeader/ArcticTableHeader.module.less index 509099399f..86d95dc024 100644 --- a/dac/ui/src/exports/components/ArcticTableHeader/ArcticTableHeader.module.less +++ b/dac/ui/src/exports/components/ArcticTableHeader/ArcticTableHeader.module.less @@ -41,22 +41,18 @@ input { &:hover { - border-color: var(--dremio--color--neutral--300); + border-color: var(--color--neutral--200); } &:focus { - border-color: var(--dremio--color--primary--500); - background-color: var(--dremio--color--neutral--000); + border-color: var(--color--brand--300); + background-color: white; } } } &__header-button { - min-width: auto; - span { - color: var(--dremio--color--icon--main); - font-size: 16px; - font-weight: 500; - } + font-size: var(--dremio--font-size--lg); + font-weight: 500; } } diff --git a/dac/ui/src/exports/components/CardLoadingState/CardLoadingState.tsx b/dac/ui/src/exports/components/CardLoadingState/CardLoadingState.tsx index 053aa5e67e..055b8d1c8e 100644 --- a/dac/ui/src/exports/components/CardLoadingState/CardLoadingState.tsx +++ b/dac/ui/src/exports/components/CardLoadingState/CardLoadingState.tsx @@ -13,8 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { Spinner } from "dremio-ui-lib/dist-esm"; +import { Spinner } from "dremio-ui-lib/components"; import * as classes from "./CardLoadingState.module.less"; +import clsx from "clsx"; const CardLoadingState = ({ loadingText = "Loading", @@ -26,7 +27,9 @@ const CardLoadingState = ({ return (
        - {loadingText} + + {loadingText} +
        ); }; diff --git a/dac/ui/src/exports/components/DeleteArcticCatalogDialog/DeleteArcticCatalogDialog.tsx b/dac/ui/src/exports/components/DeleteArcticCatalogDialog/DeleteArcticCatalogDialog.tsx index 2aba28282a..f9f02bc8db 100644 --- a/dac/ui/src/exports/components/DeleteArcticCatalogDialog/DeleteArcticCatalogDialog.tsx +++ b/dac/ui/src/exports/components/DeleteArcticCatalogDialog/DeleteArcticCatalogDialog.tsx @@ -14,7 +14,7 @@ * limitations under the License. */ -import { Button, DialogContent } from "dremio-ui-lib/dist-esm"; +import { Button, DialogContent } from "dremio-ui-lib/components"; import { TextInput } from "@mantine/core"; import { useForm } from "react-hook-form"; import { usePromise } from "react-smart-promise"; @@ -63,6 +63,7 @@ export const DeleteArcticCatalogDialog = (props: Props) => { onSuccess(); } }, [onSuccess, createStatus]); + const errorMessage = (err as any)?.responseBody?.errorMessage; return (
        { !(err instanceof InvalidParamsError) && ( ) } title={ <> - Delete{" "} + Delete{" "} {props.catalogName}? } diff --git a/dac/ui/src/exports/components/ErrorViews/NoAccess.tsx b/dac/ui/src/exports/components/ErrorViews/NoAccess.tsx new file mode 100644 index 0000000000..bc11eaf864 --- /dev/null +++ b/dac/ui/src/exports/components/ErrorViews/NoAccess.tsx @@ -0,0 +1,41 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { Link } from "react-router"; +import { NotFound } from "./NotFound"; +import { Button } from "dremio-ui-lib/components"; +import { useIntl } from "react-intl"; + +export const NoAccess = ({ + button: { to, text }, +}: { + button: { to: string; text: string }; +}) => { + const { formatMessage } = useIntl(); + return ( +
        + + {formatMessage({ id: text })} + + } + /> +
        + ); +}; diff --git a/dac/ui/src/exports/components/ErrorViews/NotFound.tsx b/dac/ui/src/exports/components/ErrorViews/NotFound.tsx index d5d0ca105a..966aa428b7 100644 --- a/dac/ui/src/exports/components/ErrorViews/NotFound.tsx +++ b/dac/ui/src/exports/components/ErrorViews/NotFound.tsx @@ -15,7 +15,7 @@ */ import { Link } from "react-router"; -import { Button } from "dremio-ui-lib/dist-esm/index"; +import { Button } from "dremio-ui-lib/components"; import { intl } from "@app/utils/intl"; import { ErrorView } from "./ErrorView"; import * as orgPaths from "dremio-ui-common/paths/organization.js"; @@ -23,47 +23,47 @@ import * as commonPaths from "dremio-ui-common/paths/common.js"; //@ts-ignore import narwhal404 from "dremio-ui-lib/icons/dremio/narwhal/narwhal-404.svg"; import { FeatureSwitch } from "../FeatureSwitch/FeatureSwitch"; -import { ORGANIZATION_LANDING } from "../../flags/ORGANIZATION_LANDING"; import { getSonarContext } from "dremio-ui-common/contexts/SonarContext.js"; +import { getSessionContext } from "dremio-ui-common/contexts/SessionContext.js"; export const NotFound = ({ title, action, + img, }: { title?: string; action?: any; + img?: any; }) => { const { formatMessage } = intl; const projectId = getSonarContext()?.getSelectedProjectId?.(); + const organizationLanding = + typeof getSessionContext().getOrganizationId === "function"; return ( } + image={img ?? } action={ <> - - action ?? ( - - ) - } - renderDisabled={() => ( + {organizationLanding ? ( + action ?? ( - )} - /> + ) + ) : ( + + )} } /> diff --git a/dac/ui/src/exports/components/ErrorViews/SonarProject404.tsx b/dac/ui/src/exports/components/ErrorViews/SonarProject404.tsx index fe505250b6..3f7f408f4e 100644 --- a/dac/ui/src/exports/components/ErrorViews/SonarProject404.tsx +++ b/dac/ui/src/exports/components/ErrorViews/SonarProject404.tsx @@ -17,7 +17,7 @@ import { useIntl } from "react-intl"; import { NotFound } from "./NotFound"; import { Link } from "react-router"; -import { Button } from "dremio-ui-lib/dist-esm/index"; +import { Button } from "dremio-ui-lib/components"; import * as commonPaths from "dremio-ui-common/paths/common.js"; export const SonarProject404 = () => { diff --git a/dac/ui/src/exports/components/GenericServerSectionMessage/GenericServerSectionMessage.tsx b/dac/ui/src/exports/components/GenericServerSectionMessage/GenericServerSectionMessage.tsx index d0dd953972..82c5eab417 100644 --- a/dac/ui/src/exports/components/GenericServerSectionMessage/GenericServerSectionMessage.tsx +++ b/dac/ui/src/exports/components/GenericServerSectionMessage/GenericServerSectionMessage.tsx @@ -15,7 +15,7 @@ */ import { HttpError } from "../../errors/HttpError"; -import { SectionMessage } from "dremio-ui-lib/dist-esm"; +import { SectionMessage } from "dremio-ui-lib/components"; import { HttpErrorSupportInfo } from "../SupportInfo/SupportInfo"; const DEFAULT_MESSAGE = "An unexpected error occured."; diff --git a/dac/ui/src/exports/components/NewArcticCatalogDialog/NewArcticCatalogDialog.tsx b/dac/ui/src/exports/components/NewArcticCatalogDialog/NewArcticCatalogDialog.tsx index 7f71832999..64588b7163 100644 --- a/dac/ui/src/exports/components/NewArcticCatalogDialog/NewArcticCatalogDialog.tsx +++ b/dac/ui/src/exports/components/NewArcticCatalogDialog/NewArcticCatalogDialog.tsx @@ -14,7 +14,7 @@ * limitations under the License. */ -import { Button, DialogContent } from "dremio-ui-lib/dist-esm"; +import { Button, DialogContent } from "dremio-ui-lib/components"; import { TextInput } from "@mantine/core"; import { useForm } from "react-hook-form"; import { zodResolver } from "@hookform/resolvers/zod"; diff --git a/dac/ui/src/exports/components/PageHeaders/SimplePageHeader.less b/dac/ui/src/exports/components/PageHeaders/SimplePageHeader.less index cf08046f47..b5fa01df77 100644 --- a/dac/ui/src/exports/components/PageHeaders/SimplePageHeader.less +++ b/dac/ui/src/exports/components/PageHeaders/SimplePageHeader.less @@ -15,6 +15,7 @@ */ .simple-page-header { + width: 100%; display: flex; flex-direction: row; align-items: center; @@ -22,6 +23,7 @@ border-block-end: 1px solid #eeeff1; &__title { + width: 100%; align-items: center; display: flex; flex-direction: row; diff --git a/dac/ui/src/exports/components/PrivilegeSwitch/PrivilegeSwitch.tsx b/dac/ui/src/exports/components/PrivilegeSwitch/PrivilegeSwitch.tsx new file mode 100644 index 0000000000..78cdf9388a --- /dev/null +++ b/dac/ui/src/exports/components/PrivilegeSwitch/PrivilegeSwitch.tsx @@ -0,0 +1,55 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { useSelector } from "react-redux"; + +type Props = { + privilege: string | string[]; + renderEnabled: () => JSX.Element; + renderDisabled?: () => JSX.Element; + renderPending?: () => JSX.Element; +}; + +const nullRender = () => null; + +/** + * Conditionally renders children based on the provided org privilege + */ +export const PrivilegeSwitch = (props: Props): JSX.Element | null => { + const { + renderDisabled = nullRender, + renderEnabled, + renderPending = nullRender, + } = props; + const result = useSelector((state: Record) => { + const orgPrivileges = state.privileges.organization; + if (typeof props.privilege === "string") { + return orgPrivileges[props.privilege]; + } else { + return orgPrivileges[props.privilege[0]]?.[props.privilege[1]]; + } + }); + + if (result === undefined) { + return renderPending(); + } + + if (!result) { + return renderDisabled(); + } + + return renderEnabled(); +}; diff --git a/dac/ui/src/exports/components/PrivilegesPage/PrivilegesPage.module.less b/dac/ui/src/exports/components/PrivilegesPage/PrivilegesPage.module.less new file mode 100644 index 0000000000..d5493c042a --- /dev/null +++ b/dac/ui/src/exports/components/PrivilegesPage/PrivilegesPage.module.less @@ -0,0 +1,193 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +.privileges { + display: flex; + flex-direction: column; + height: 100%; + + &__page { + display: flex; + flex-direction: column; + margin-top: 0px; + height: 100%; + } + + &__tableLoading { + height: 100%; + display: flex; + justify-content: center; + align-items: center; + + dremio-icon { + height: 28px; + width: 28px; + } + } + + &__form { + margin-top: 24px; + flex-grow: 1; + display: flex; + justify-content: space-between; + flex-direction: column; + } + + &__tableWrapper { + overflow: auto; + height: 100%; + } + + &__nameCell { + display: flex; + align-items: center; + justify-content: space-between; + width: 100%; + padding: var(--dremio--spacing--1); + + &__rightSide, + &__leftSide { + display: flex; + align-items: center; + } + + &__leftSide { + overflow: hidden; + } + + &__leftIcon { + margin-right: var(--dremio--spacing--05); + inline-size: 24px; + block-size: 24px; + + &__menuItem { + margin: 0px var(--dremio--spacing--05); + } + } + + &__name { + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; + } + + &__ownerIcon { + color: var(--dremio--color--status--warning--foreground); + } + + &__moreActions { + margin-left: 4px; + + &-disabled { + color: var(--dremio--color--text--disabled) !important; + cursor: default; + } + + &-remove { + color: var(--dremio--color--status--error--foreground); + } + } + } + + :global { + .leantable__table { + border: 1px solid var(--color--neutral--50); + } + + .leantable__body-cell { + &:first-child { + container-type: inline-size; + max-width: min(28cqw, 500px); + min-width: 280px; + border-right: 2px solid var(--color--neutral--50); + } + } + + .leantable__header-row { + border-bottom: 2px solid var(--color--neutral--50); + } + .leantable__header-cell { + min-width: 14ch; + height: 60px; + font-weight: normal; + padding-block: 0; + padding-inline: var(--dremio--spacing--1); + white-space: normal; + text-align: center; + &::after { + height: 43px !important; + top: 8px !important; + } + &:first-child { + min-width: 280px; + max-width: 500px; + border-right: 2px solid var(--color--neutral--50); + } + &:nth-child(2)::after { + width: 0px !important; + } + &:last-child { + width: 100%; + min-width: 0px; + padding-inline: 0px; + } + } + } + + &__tableCell { + display: flex; + justify-content: center; + align-items: center; + position: relative; + width: fit-content; + margin: auto; + + &__dirtyIndicator { + height: 6px; + width: 6px; + border-radius: 3px; + background-color: var(--color--brand--300); + position: absolute; + top: -7px; + right: -8px; + } + + :global { + input[type="checkbox"] { + margin: 0; + } + } + } + + &__label { + display: flex; + height: 100%; + width: 100%; + &:hover { + cursor: pointer; + } + } + + &__dialog { + width: 600px; + + :global { + .dremio-dialog-content__main { + height: 128px; + } + } + } +} diff --git a/dac/ui/src/exports/components/PrivilegesPage/PrivilegesPage.tsx b/dac/ui/src/exports/components/PrivilegesPage/PrivilegesPage.tsx new file mode 100644 index 0000000000..ffa8b30416 --- /dev/null +++ b/dac/ui/src/exports/components/PrivilegesPage/PrivilegesPage.tsx @@ -0,0 +1,316 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { useCallback, useEffect, useMemo, useRef, useState } from "react"; +import { FormProvider, useForm } from "react-hook-form"; +import { compose } from "redux"; + +import FormUnsavedRouteLeave from "@app/components/Forms/FormUnsavedRouteLeave"; +import { FormSubmit } from "@app/exports/pages/ArcticCatalog/components/ArcticCatalogDataItemSettings/ArcticTableOptimization/components/FormSubmit/FormSubmit"; +// @ts-ignore +import GrantOwnershipDialog from "dremio-ui-common/components/PrivilegesTable/components/GrantOwnershipDialog/GrantOwnershipDialog.js"; +// @ts-ignore +import RemoveGranteeDialog from "dremio-ui-common/components/PrivilegesTable/components/RemoveGranteeDialog/RemoveGranteeDialog.js"; +// @ts-ignore +import { PrivilegesTable } from "dremio-ui-common/components/PrivilegesTable/PrivilegesTable.js"; +import AddToPrivileges, { + UserOrRole, +} from "./components/AddToPrivileges/AddToPrivileges"; +import { + getPrivilegesColumns, + GrantActions, + GrantActionsType, +} from "./privileges-page-utils"; +import { + Grant, + GrantsResponse, + OutputGrant, + Privilege as OrgPrivilege, +} from "@app/exports/endpoints/Grants/Grants.types"; +import { + GrantForGet, + GrantForSet, + InlineResponse200, + Privilege as CatalogPrivilege, +} from "@app/exports/endpoints/ArcticCatalogGrants/ArcticCatalogGrants.types"; +import EmptyPrivileges from "./components/EmptyPrivileges/EmptyPrivileges"; +import { useUserDetails } from "@app/exports/providers/useUserDetails"; +import { useDispatch } from "react-redux"; +import { addNotification } from "@app/actions/notification"; + +import * as classes from "./PrivilegesPage.module.less"; + +export type GrantObject = GrantForGet | GrantForSet | Grant | OutputGrant; + +export type GranteeData = InlineResponse200 | GrantsResponse | null; + +type Privilege = OrgPrivilege | CatalogPrivilege; + +type PrivilegesPageProps = { + granteeData: GranteeData; + isLoading: boolean; + nameColumnLabel: string; + privilegeTooltipIds: any; + entityOwnerId: string; + addedDefaultGrantValues?: any; + handleUpdateOwnership: (id: string) => void; + onSubmit: ( + formValues: any, + tableItems: GrantObject[], + deletedTableItems: GrantObject[], + cachedData: any, + setDirtyStateForLeaving: () => void + ) => void; + setChildDirtyState: any; + granteeError?: string; +}; + +const INITIAL_DIALOG_STATE = { + openDialog: false, + granteeId: undefined, +}; + +const PRIVILEGES_KEY = "PRIVILEGES"; + +const PrivilegesPage = ({ + granteeData, + isLoading, + nameColumnLabel, + privilegeTooltipIds, + entityOwnerId, + addedDefaultGrantValues, + handleUpdateOwnership, + onSubmit, + setChildDirtyState, + granteeError, +}: PrivilegesPageProps) => { + const dispatch = useDispatch(); + const addPrivilegesRef = useRef(null); + const [ownershipDialogState, setOwnershipDialogState] = + useState(INITIAL_DIALOG_STATE); + const [removeDialogState, setRemoveDialogState] = + useState(INITIAL_DIALOG_STATE); + const [user] = useUserDetails(); + const currentRolesAndUserIds = useMemo(() => { + // Get the user id and roles' ids that the user is a member of + if (user) { + const ids = [user.id]; + (user.roles ?? []).forEach((role: any) => ids.push(role.id)); + return ids; + } else return []; + }, [user]); + const isCurrentUserAnOwner = currentRolesAndUserIds.includes( + entityOwnerId ?? "" + ); + + const [tableItems, setTableItems] = useState([]); + const [deletedTableItems, setDeletedTableItems] = useState([]); + + // Create default values based on GET data + const { defaultValues, cachedData }: any = useMemo(() => { + if (!granteeData) return {}; + + const cached: any = {}; + const initFormValues: any = {}; + granteeData?.grants?.forEach((grantee: GrantObject) => { + granteeData?.availablePrivileges?.forEach((priv: Privilege) => { + initFormValues[`${grantee.granteeId}-${priv}`] = + grantee.privileges.includes(priv as any); + }); + cached[grantee.granteeId] = true; + }); + + return { defaultValues: initFormValues, cachedData: cached }; + }, [granteeData]); + + const methods = useForm({ + mode: "onChange", + defaultValues: defaultValues, + }); + const { formState, handleSubmit, reset, setValue } = methods; + + const handleReset = useCallback(() => { + if (granteeData?.grants) { + setTableItems(granteeData.grants); + setDeletedTableItems([]); + // reset the form with default values after data is retrieved + reset(defaultValues); + setChildDirtyState(PRIVILEGES_KEY)(false); + } + }, [granteeData, reset, defaultValues, setChildDirtyState]); + + // Whenever data is retrieved, the form should reset + useEffect(() => { + handleReset(); + }, [handleReset]); + + useEffect(() => { + if (formState.isDirty) { + setChildDirtyState(PRIVILEGES_KEY)(true); + } + }, [formState.isDirty, setChildDirtyState]); + + useEffect(() => { + if (granteeError) { + dispatch(addNotification(granteeError, "error")); + } + }, [dispatch, granteeError]); + + const handleAddTableItems = (displayValues: UserOrRole[]) => { + setTableItems((items: GrantObject[]) => { + return [ + ...displayValues.map((item: UserOrRole) => ({ + granteeId: item.id, + privileges: [], + name: item.value, + granteeType: item.type, + ...(addedDefaultGrantValues ?? {}), + })), + ...items, + ]; + }); + // Need to clear from delete if adding back + setDeletedTableItems((items: GrantObject[]) => + items.filter( + (deleteItem) => + displayValues.find( + (updateItem) => updateItem.id === deleteItem.granteeId + ) === undefined + ) + ); + }; + + const handleDeleteTableItem = (id: string) => { + if (id in cachedData) { + // Add to deletion array if it exists from cached results + const deleteItem = tableItems.find((item) => item.granteeId === id); + setDeletedTableItems((prevItems: any) => [...prevItems, deleteItem]); + } + + // Remove from the UI + granteeData?.availablePrivileges?.forEach((priv: Privilege) => { + setValue(`${id}-${priv}`, false, { shouldDirty: true }); + }); + setTableItems((items) => items.filter((item) => item.granteeId !== id)); + }; + + const handleOpenDialog = useCallback( + (type: GrantActionsType, dialogState: any) => { + if (type === GrantActions.GRANT_OWNERSHIP) { + setOwnershipDialogState(dialogState); + } else { + setRemoveDialogState(dialogState); + } + }, + [] + ); + + const closeDialog = () => { + setOwnershipDialogState(INITIAL_DIALOG_STATE); + setRemoveDialogState(INITIAL_DIALOG_STATE); + }; + + const columns = useMemo( + () => + getPrivilegesColumns( + granteeData, + handleOpenDialog, + nameColumnLabel, + privilegeTooltipIds, + entityOwnerId, + isCurrentUserAnOwner + ), + [ + granteeData, + handleOpenDialog, + nameColumnLabel, + privilegeTooltipIds, + entityOwnerId, + isCurrentUserAnOwner, + ] + ); + + const disableSubmitButtons = + isLoading || !formState.isDirty || !!granteeError; + + return ( +
        +
        + + + + onSubmit( + formValues, + tableItems, + deletedTableItems, + cachedData, + () => setChildDirtyState(PRIVILEGES_KEY)(false) + ) + )} + className={classes["privileges__form"]} + > +
        + { + const data = tableItems[i]; + return { + id: data?.granteeId || i, + data: data ? data : null, + }; + }} + rowCount={tableItems?.length} + /> + {!isLoading && !granteeError && tableItems.length === 0 && ( + (addPrivilegesRef.current as any)?.click?.()} + /> + )} +
        + + +
        +
        + + item.granteeId === removeDialogState?.granteeId + )} + className={classes["privileges__dialog"]} + onRemove={handleDeleteTableItem} + /> +
        + ); +}; + +export default compose(FormUnsavedRouteLeave)(PrivilegesPage); diff --git a/dac/ui/src/exports/components/PrivilegesPage/components/AddToPrivileges/AddToPrivileges.module.less b/dac/ui/src/exports/components/PrivilegesPage/components/AddToPrivileges/AddToPrivileges.module.less new file mode 100644 index 0000000000..99720be6de --- /dev/null +++ b/dac/ui/src/exports/components/PrivilegesPage/components/AddToPrivileges/AddToPrivileges.module.less @@ -0,0 +1,37 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +.addPrivileges { + &__addSection { + display: flex; + flex-direction: column; + } + + &__addLabel { + margin: 16px 0px 4px 0; + } + + &__addActions { + display: flex; + flex-direction: row; + align-items: center; + gap: 16px; + } + + &__addButton { + margin: 0px !important; + } +} diff --git a/dac/ui/src/exports/components/PrivilegesPage/components/AddToPrivileges/AddToPrivileges.tsx b/dac/ui/src/exports/components/PrivilegesPage/components/AddToPrivileges/AddToPrivileges.tsx new file mode 100644 index 0000000000..b1c9ee40ec --- /dev/null +++ b/dac/ui/src/exports/components/PrivilegesPage/components/AddToPrivileges/AddToPrivileges.tsx @@ -0,0 +1,168 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { useState, useMemo, useCallback, useEffect, forwardRef } from "react"; +import { useDispatch, useSelector } from "react-redux"; +import { debounce } from "lodash"; + +import { intl } from "@app/utils/intl"; +// @ts-ignore +import { MultiSelect, Label } from "dremio-ui-lib"; +import { Button } from "dremio-ui-lib/components"; +import { searchRoles } from "@inject/actions/roles"; +import { searchUsers } from "@inject/actions/user"; +import { getFilteredRolesAndUsers } from "@inject/selectors/roles"; +import { getCustomChipIcon } from "../../privileges-page-utils"; +import { GrantObject } from "../../PrivilegesPage"; +import { GranteeType as CatalogGranteeType } from "@app/exports/endpoints/ArcticCatalogGrants/ArcticCatalogGrants.types"; +import { GranteeType as OrgGranteeType } from "@app/exports/endpoints/Grants/Grants.types"; + +import * as classes from "./AddToPrivileges.module.less"; + +export type UserOrRole = { + id: string; + value: string; + type: CatalogGranteeType | OrgGranteeType; + label: string; + name: string; + roles: any[]; +}; + +type AddToPrivilegesProps = { + innerRef?: any; + tableItems: GrantObject[]; + handleAddTableItems: any; + disabled: boolean; +}; + +const AddToPrivilegesComponent = ({ + innerRef, + tableItems, + handleAddTableItems, + disabled, +}: AddToPrivilegesProps) => { + const dispatch = useDispatch(); + const [selectedValues, setSelectedValues] = useState([]); + const [displayValues, setDisplayValues] = useState([]); + + const { + searchedOptions, + orgPrivileges, + }: { searchedOptions: UserOrRole[]; orgPrivileges: any } = useSelector( + (state: any) => ({ + searchedOptions: getFilteredRolesAndUsers( + state + ) as unknown as UserOrRole[], + orgPrivileges: state.privileges.organization, + }) + ); + + useEffect(() => { + const { roles, users } = orgPrivileges; + if (roles?.canView) dispatch(searchRoles("") as any); + if (users?.canView) dispatch(searchUsers("") as any); + }, [dispatch, orgPrivileges]); + + const filteredSearchOptions = useMemo(() => { + return searchedOptions.filter( + (member: UserOrRole) => + !tableItems.find((grant: GrantObject) => grant.granteeId === member.id) + ); + }, [tableItems, searchedOptions]); + + const handleValuesChange = useCallback( + (value: string[]) => { + const newSelectedValues: any[] = []; + value.forEach((val: string) => { + const element = filteredSearchOptions.find( + ({ label }: UserOrRole) => label === val + ); + if (element) { + newSelectedValues.push(element); + } else { + const isElementFound = displayValues.find( + ({ label }) => label === val + ); + if (isElementFound) { + newSelectedValues.push(isElementFound); + } + } + }); + setSelectedValues(value); + setDisplayValues(newSelectedValues); + }, + [filteredSearchOptions, displayValues] + ); + + const handleAddSelectedMembers = () => { + handleAddTableItems(displayValues); + setSelectedValues([]); + setDisplayValues([]); + }; + + const handleSearchKeyChange = debounce((value) => { + const { roles, users } = orgPrivileges; + if (roles?.canView) dispatch(searchRoles(value) as any); + if (users?.canView) dispatch(searchUsers(value) as any); + }, 300); + + return ( + <> +
        +
        + + ); +}; + +const AddToPrivileges = forwardRef((props: AddToPrivilegesProps, ref) => { + return ; +}); +export default AddToPrivileges; diff --git a/services/nessie/src/main/java/com/dremio/service/nessie/NessieAttachmentsStoreBuilder.java b/dac/ui/src/exports/components/PrivilegesPage/components/EmptyPrivileges/EmptyPrivileges.module.less similarity index 71% rename from services/nessie/src/main/java/com/dremio/service/nessie/NessieAttachmentsStoreBuilder.java rename to dac/ui/src/exports/components/PrivilegesPage/components/EmptyPrivileges/EmptyPrivileges.module.less index fb1112fb58..02a94fc85d 100644 --- a/services/nessie/src/main/java/com/dremio/service/nessie/NessieAttachmentsStoreBuilder.java +++ b/dac/ui/src/exports/components/PrivilegesPage/components/EmptyPrivileges/EmptyPrivileges.module.less @@ -13,13 +13,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.dremio.service.nessie; -/** - * Creates the KV store for Nessie Attachments. - */ -public class NessieAttachmentsStoreBuilder extends AbstractNessieStoreBuilder { - public NessieAttachmentsStoreBuilder() { - super("nessie_attachments"); +.emptyContainer { + &__action { + position: absolute; + left: 50%; + margin-top: var(--dremio--spacing--4); + } + &__button { + color: var(--dremio--color--link); + &:hover { + cursor: pointer; + text-decoration: underline; + } } } diff --git a/dac/ui/src/exports/components/PrivilegesPage/components/EmptyPrivileges/EmptyPrivileges.tsx b/dac/ui/src/exports/components/PrivilegesPage/components/EmptyPrivileges/EmptyPrivileges.tsx new file mode 100644 index 0000000000..fd6cc84d52 --- /dev/null +++ b/dac/ui/src/exports/components/PrivilegesPage/components/EmptyPrivileges/EmptyPrivileges.tsx @@ -0,0 +1,37 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import EmptyStateContainer from "@app/pages/HomePage/components/EmptyStateContainer"; +import { useIntl } from "react-intl"; +import * as classes from "./EmptyPrivileges.module.less"; + +const EmptyPrivileges = ({ onClick }: { onClick: () => void }) => { + const { formatMessage } = useIntl(); + return ( +
        + + + {formatMessage({ id: "Admin.Privileges.EmptyState.Action" })} + + +
        + ); +}; + +export default EmptyPrivileges; diff --git a/dac/ui/src/exports/components/PrivilegesPage/privileges-page-utils.tsx b/dac/ui/src/exports/components/PrivilegesPage/privileges-page-utils.tsx new file mode 100644 index 0000000000..fd31e241f0 --- /dev/null +++ b/dac/ui/src/exports/components/PrivilegesPage/privileges-page-utils.tsx @@ -0,0 +1,199 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { Checkbox } from "@mantine/core"; +// @ts-ignore +import { renderPrivilegesColumns } from "dremio-ui-common/components/PrivilegesTable/privilegesTableColumns.js"; +import { Controller } from "react-hook-form"; +import { Avatar } from "dremio-ui-lib/components"; +// @ts-ignore +import { Tooltip } from "dremio-ui-lib"; +import { nameToInitials } from "@app/exports/utilities/nameToInitials"; +import { intl } from "@app/utils/intl"; +import SettingsBtn from "@app/components/Buttons/SettingsBtn"; +import Menu from "@app/components/Menus/Menu"; +import MenuItem from "@app/components/Menus/MenuItem"; +import { GranteeData } from "./PrivilegesPage"; + +import * as classes from "./PrivilegesPage.module.less"; + +export enum GrantActions { + GRANT_OWNERSHIP = "GRANT_OWNERSHIP", + DELETE = "DELETE", +} +export type GrantActionsType = + | GrantActions.DELETE + | GrantActions.GRANT_OWNERSHIP; + +export const getPrivilegesColumns = ( + granteeData: GranteeData, + openDialog: (type: any, dialogState: any) => void, + nameColumnLabel: string, + privilegeTooltipIds: any, + owner?: string, + isCurrentUserOrOwner?: boolean +) => { + return renderPrivilegesColumns({ + nameColumnLabel: nameColumnLabel, + availablePrivileges: granteeData?.availablePrivileges, + renderPrivilegeTooltip: (privilege: string) => + intl.formatMessage({ id: privilegeTooltipIds[privilege] }), + renderNameCell: (data: any) => + nameCell(data, openDialog, owner, isCurrentUserOrOwner), + renderCheckboxCell: (data: any, privilege: string) => ( + { + const isDirty = fieldState.isDirty; + return ( +
        @@ -194,4 +256,4 @@ function mapStateToProps(state) { }; } -export default connect(mapStateToProps)(ClusterListView); +export default connect(mapStateToProps)(StoreSubscriber(ClusterListView)); diff --git a/dac/ui/src/pages/AdminPage/subpages/Provisioning/ClusterListViewMixin.js b/dac/ui/src/pages/AdminPage/subpages/Provisioning/ClusterListViewMixin.js index 6fc508756d..e8fdcffa85 100644 --- a/dac/ui/src/pages/AdminPage/subpages/Provisioning/ClusterListViewMixin.js +++ b/dac/ui/src/pages/AdminPage/subpages/Provisioning/ClusterListViewMixin.js @@ -13,15 +13,113 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { ENGINE_COLUMNS_CONFIG } from "@app/constants/provisioningPage/provisioningConstants"; import { isYarn } from "@app/pages/AdminPage/subpages/Provisioning/provisioningUtils"; import { EngineActionCell } from "@app/pages/AdminPage/subpages/Provisioning/components/EngineActionCell"; +import EngineStatus from "@app/pages/AdminPage/subpages/Provisioning/components/EngineStatus"; +import { ClickableCell } from "dremio-ui-common/components/TableCells/ClickableCell.js"; +import { SortableHeaderCell } from "dremio-ui-common/components/TableCells/SortableHeaderCell.js"; +import { intl } from "@app/utils/intl"; export default function (input) { + + const originalGetEngineSize = input.prototype.getEngineSize; + const originalGetRunningNodes = input.prototype.getRunningNodes; + const originalGetEngineName = input.prototype.getEngineName; + const originalCPUCores = input.prototype.getClusterCPUCores; + const originalClusterRAM = input.prototype.getClusterRAM; + const originalClusterIP = input.prototype.getClusterIp; + Object.assign(input.prototype, { // eslint-disable-line no-restricted-properties - getTableColumns() { - return ENGINE_COLUMNS_CONFIG; + + getClusterCPUCores(engine) { + return originalCPUCores.call(this, engine) + }, + + getClusterRAM(engine) { + return originalClusterRAM.call(this, engine) + }, + + getClusterIp(engine) { + return originalClusterIP.call(this, engine) + }, + + getEngineColumnConfig(statusViewState, onRowClick, sort) { + return [ + { + id: "status", + class: "leantable-sticky-column leantable-sticky-column--left", + renderHeaderCell: () => "", + renderCell: (row) => { + return ; + }, + }, + { + id: "engine", + renderHeaderCell: () => {intl.formatMessage({ id: "Engine.EngineName" })}, + renderCell: (row) => { + return ( + onRowClick(row.id)}> + {originalGetEngineName.call(this, row.data)} + + ) + }, + sortable: true + }, + { + id: "size", + renderHeaderCell: () => {intl.formatMessage({ id: "Engine.Size" })}, + renderCell: (row) => { + return originalGetEngineSize.call(this, row.data); + }, + sortable: true + }, + { + id: "cores", + renderHeaderCell: () => {intl.formatMessage({ id: "Engine.Cores" })}, + renderCell: (row) => { + return this.getClusterCPUCores(row.data); + }, + sortable: true + }, + { + id: "memory", + renderHeaderCell: () => {intl.formatMessage({ id: "Engine.Memory" })}, + renderCell: (row) => { + return this.getClusterRAM(row.data); + }, + sortable: true + }, + { + id: "ip", + renderHeaderCell: () => {intl.formatMessage({ id: "Engine.IP" })}, + renderCell: (row) => { + return this.getClusterIp(row.data); + }, + sortable: true + }, + { + id: "nodes", + renderHeaderCell: () => {intl.formatMessage({ id: "Engine.OnlineNodes" })}, + renderCell: (row) => { + return originalGetRunningNodes.call(this, row.data); + }, + sortable: true + }, + { + id: "action", + class: + "leantable-row-hover-visibility leantable-sticky-column leantable-sticky-column--right", + renderHeaderCell: () => "", + renderCell: (row) => { + return this.getAction(row.data); + }, + }, + ] + }, + + getTableColumns({statusViewState: statusViewState, onRowClick: onRowClick, sort}) { + return this.getEngineColumnConfig(statusViewState, onRowClick, sort); }, getAction(entity) { diff --git a/dac/ui/src/pages/AdminPage/subpages/Provisioning/ProvisioningPage.js b/dac/ui/src/pages/AdminPage/subpages/Provisioning/ProvisioningPage.js index 63ee36a945..d09a838a6d 100644 --- a/dac/ui/src/pages/AdminPage/subpages/Provisioning/ProvisioningPage.js +++ b/dac/ui/src/pages/AdminPage/subpages/Provisioning/ProvisioningPage.js @@ -365,7 +365,6 @@ export class ProvisioningPage extends Component {
        { + return function WrappedComponent(props: any) { + const enginesTable = useMemo(() => { + return createTable([ + columnSorting(), + (config: any) => ({ + ...config, + getRowProps: (props: any) => { + return { + ...props + }; + }, + }), + ]); + }, []); + const { scrolledDirections, scrollContainerRef } = useDetectScroll(["left"]); + const sortedColumns = useExternalStoreState(enginesTable.store, (state: any) => state.sortedColumns); + return ( + + ); + } +} diff --git a/dac/ui/src/pages/AdminPage/subpages/Provisioning/components/EngineActionCell.js b/dac/ui/src/pages/AdminPage/subpages/Provisioning/components/EngineActionCell.js index 075572ebf2..e9f88ebc3a 100644 --- a/dac/ui/src/pages/AdminPage/subpages/Provisioning/components/EngineActionCell.js +++ b/dac/ui/src/pages/AdminPage/subpages/Provisioning/components/EngineActionCell.js @@ -146,7 +146,7 @@ const styles = { height: 32, width: 100, marginTop: 5, - color: "var(--dremio--color--neutral--600)", + color: "var(--color--neutral--600)", }, settingsButton: { display: "flex", @@ -164,6 +164,6 @@ const styles = { buttonIcon: { height: 20, width: 20, - color: "var(--dremio--color--neutral--600)", + color: "var(--color--neutral--600)", }, }; diff --git a/dac/ui/src/pages/AdminPage/subpages/SqlEditor.js b/dac/ui/src/pages/AdminPage/subpages/SqlEditor.js deleted file mode 100644 index 023a45099c..0000000000 --- a/dac/ui/src/pages/AdminPage/subpages/SqlEditor.js +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { useState, useEffect } from "react"; -import { connect } from "react-redux"; -import PropTypes from "prop-types"; -import Immutable from "immutable"; -import SettingHeader from "@app/components/SettingHeader"; -import ViewStateWrapper from "@app/components/ViewStateWrapper"; -import Toggle from "@app/components/Fields/Toggle"; -import { fetchSupportFlags, saveSupportFlag } from "@app/actions/supportFlags"; -import { FormattedMessage } from "react-intl"; -import { getViewState } from "selectors/resources"; -import { getSupportFlags } from "@app/selectors/supportFlags"; -import authorize from "@inject/containers/authorize"; -import config from "@inject/utils/config"; -import { isEnterprise, isCommunity } from "dyn-load/utils/versionUtils"; -import { compose } from "redux"; - -import "./SqlEditor.less"; - -export const VIEW_ID = "SUPPORT_SETTINGS_VIEW_ID"; - -const SqlEditor = (props) => { - const { - dispatchFetchSupportFlags, - viewState, - dispatchSaveSupportFlag, - supportFlags, - } = props; - const viewStateWithoutError = viewState.set("isFailed", false); - const isAutoCompleteEnabled = - (supportFlags && supportFlags["ui.autocomplete.allow"]) || - config.autoComplete; - const [isEnabled, setIsEnabled] = useState(isAutoCompleteEnabled); - - useEffect(() => { - dispatchFetchSupportFlags("ui.autocomplete.allow").then((res) => { - setIsEnabled(res.payload.value); - }); - }, [dispatchFetchSupportFlags]); - - useEffect(() => { - const isEnterpriseFlag = isEnterprise && isEnterprise(); - const isCommunityFlag = isCommunity && isCommunity(); - - if (isEnterpriseFlag || isCommunityFlag) { - return; - } - if (supportFlags && supportFlags["ui.autocomplete.allow"] !== undefined) { - setIsEnabled(supportFlags["ui.autocomplete.allow"]); - } - }, [supportFlags]); - - const handleChange = () => { - const saveObj = { - type: "BOOLEAN", - id: "ui.autocomplete.allow", - value: !isEnabled, - }; - dispatchSaveSupportFlag("ui.autocomplete.allow", saveObj); - setIsEnabled(!isEnabled); - }; - - return ( -
        - - - -
        - -
        - - - -
        -
        - - - -
        -
        -
        - - - -
        -
        - -
        -
        -
        -
        -
        -
        - ); -}; - -const mapStateToProps = (state) => { - return { - viewState: getViewState(state, VIEW_ID), - supportFlags: getSupportFlags(state), - }; -}; - -const mapDispatchToProps = { - dispatchFetchSupportFlags: fetchSupportFlags, - dispatchSaveSupportFlag: saveSupportFlag, -}; - -SqlEditor.propTypes = { - dispatchFetchSupportFlags: PropTypes.func, - dispatchSaveSupportFlag: PropTypes.func, - viewState: PropTypes.instanceOf(Immutable.Map).isRequired, - supportFlags: PropTypes.object, -}; - -export default compose( - authorize("SqlEditor"), - connect(mapStateToProps, mapDispatchToProps) -)(SqlEditor); diff --git a/dac/ui/src/pages/AdminPage/subpages/Support.js b/dac/ui/src/pages/AdminPage/subpages/Support.js index 9fc6ffe26d..8543182185 100644 --- a/dac/ui/src/pages/AdminPage/subpages/Support.js +++ b/dac/ui/src/pages/AdminPage/subpages/Support.js @@ -19,6 +19,7 @@ import PropTypes from "prop-types"; import { compose } from "redux"; import { connect } from "react-redux"; +import { FormattedMessage } from "react-intl"; import authorize from "@inject/containers/authorize"; @@ -136,6 +137,17 @@ export class Support extends PureComponent { const valueEle = {value}; + if (value?.startsWith("%") && value.length < 3) { + this.props.addNotification( + , + "error" + ); + return; + } + if (this.getShownSettings().some((e) => e.id === value)) { this.props.addNotification( Setting “{valueEle}” already shown., // todo: loc substitution engine @@ -236,7 +248,7 @@ export class Support extends PureComponent { // SettingsMicroForm has a logic for error display. We should not duplicate it in the viewState const viewStateWithoutError = this.props.viewState.set("isFailed", false); const advancedForm = ( -
        + { const loc = rmProjectBase(location.pathname); const isNewQuery = loc === newQuery(); const { query } = location || {}; + const explorePageState = getExploreState(state); let datasetSql = ""; @@ -47,7 +48,8 @@ const mapStateToProp = (state, ownProps) => { return { datasetSql, history: getHistoryFromLocation(state, location), - currentSql: getExploreState(state).view.currentSql, + currentSql: explorePageState.view.currentSql, + customDefaultSql: explorePageState.view.customDefaultSql || "", }; }; @@ -59,17 +61,22 @@ const mapStateToProp = (state, ownProps) => { export class DatasetChangesView extends Component { static propTypes = { currentSql: PropTypes.string, + customDefaultSql: PropTypes.string, datasetSql: PropTypes.string, history: PropTypes.instanceOf(Immutable.Map), childComp: PropTypes.any, }; hasChanges = () => { - const { datasetSql, currentSql, history } = this.props; + const { datasetSql, currentSql, customDefaultSql, history } = this.props; return { // leaving modified sql? // currentSql === null means sql is unchanged. - sqlChanged: isSqlChanged(datasetSql, currentSql), + + // if viewing a dataset, compare currentSql to datasetSql + // if querying a dataset, compare currentSql to pre-populated "SELECT * FROM ..." + // otherwise compare currentSql to an empty string + sqlChanged: isSqlChanged(datasetSql || customDefaultSql, currentSql), historyChanged: history ? history.get("isEdited") : false, }; }; diff --git a/dac/ui/src/pages/ExplorePage/ExplorePageController.js b/dac/ui/src/pages/ExplorePage/ExplorePageController.js index 375be452ae..6f8fa1834c 100644 --- a/dac/ui/src/pages/ExplorePage/ExplorePageController.js +++ b/dac/ui/src/pages/ExplorePage/ExplorePageController.js @@ -51,8 +51,7 @@ import ExplorePage from "./ExplorePage"; import * as commonPaths from "dremio-ui-common/paths/common.js"; import { getSonarContext } from "dremio-ui-common/contexts/SonarContext.js"; import { rmProjectBase } from "dremio-ui-common/utilities/projectBase.js"; -import { newQuery } from "@app/exports/paths"; -import { isDcsEdition } from "dyn-load/utils/versionUtils"; +import { isNotSoftware } from "dyn-load/utils/versionUtils"; import { fetchFeatureFlag } from "@inject/actions/featureFlag"; import { SQL_JOB_STATUS } from "@app/exports/flags/SQL_JOB_STATUS"; @@ -111,7 +110,7 @@ export class ExplorePageControllerComponent extends Component { addHasChangesHook(this.shouldShowUnsavedChangesPopup); } - isDcsEdition() && this.props.fetchFeatureFlag(SQL_JOB_STATUS); + isNotSoftware() && this.props.fetchFeatureFlag(SQL_JOB_STATUS); } componentWillReceiveProps(nextProps) { @@ -304,8 +303,7 @@ export class ExplorePageControllerComponent extends Component { } function mapStateToProps(state, ownProps) { - const { location, routeParams } = ownProps; - const isNewQuery = location.pathname.includes(newQuery()); + const { routeParams } = ownProps; const dataset = getExplorePageDataset(state); const explorePageState = getExploreState(state); const sqlHeight = Math.min( @@ -318,7 +316,7 @@ function mapStateToProps(state, ownProps) { dataset, history: getHistory(state, dataset.get("tipVersion")), // in New Query, force sql open, but don't change state in localStorage - sqlState: explorePageState.ui.get("sqlState") || isNewQuery, + sqlState: explorePageState.ui.get("sqlState"), sqlSize: sqlHeight, isResizeInProgress: explorePageState.ui.get("isResizeInProgress"), rightTreeVisible: state.ui.get("rightTreeVisible"), diff --git a/dac/ui/src/pages/ExplorePage/components/DataColumns/DataColumn.js b/dac/ui/src/pages/ExplorePage/components/DataColumns/DataColumn.js index cdd0659b4c..3ea8fb1910 100644 --- a/dac/ui/src/pages/ExplorePage/components/DataColumns/DataColumn.js +++ b/dac/ui/src/pages/ExplorePage/components/DataColumns/DataColumn.js @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { PureComponent } from "react"; +import { Component } from "react"; import PropTypes from "prop-types"; import classNames from "clsx"; import { formatMessage } from "utils/locale"; @@ -21,33 +21,48 @@ import { typeToIconType, typeToFormatMessageId, } from "@app/constants/DataTypes"; -import { name as nameCls, icon as iconCls, wrapper } from "./DataColumn.less"; +import { + name as nameCls, + icon as iconCls, + wrapper, + wrapperHover, + nameWiki, + iconWiki, +} from "./DataColumn.less"; + +import HighlightedColumnName from "./HighlightedColumnName"; export const columnPropTypes = { type: PropTypes.string, //see constants/DataTypes for the list of available types name: PropTypes.string, }; -export class DataColumn extends PureComponent { +export class DataColumn extends Component { static propTypes = { ...columnPropTypes, className: PropTypes.string, + detailsView: PropTypes.bool, + searchTerm: PropTypes.string, }; render() { - const { type, name, className } = this.props; + const { type, name, className, detailsView, searchTerm } = this.props; const label = `data-types/${typeToIconType[type]}`; const alt = typeToFormatMessageId?.[type] ?? typeToFormatMessageId["ANY"]; return ( -
        +
        -
        {name}
        +
        + +
        ); } diff --git a/dac/ui/src/pages/ExplorePage/components/DataColumns/DataColumn.less b/dac/ui/src/pages/ExplorePage/components/DataColumns/DataColumn.less index 1527a252f8..a5c1790216 100644 --- a/dac/ui/src/pages/ExplorePage/components/DataColumns/DataColumn.less +++ b/dac/ui/src/pages/ExplorePage/components/DataColumns/DataColumn.less @@ -15,7 +15,7 @@ */ @import "./DataColumn.variables.less"; -.wrapper { +.wrapperHover { composes: flex-container from "~@app/uiTheme/less/layout.less"; composes: form-label from "~@app/uiTheme/less/typography.less"; @@ -28,14 +28,46 @@ } } +.wrapper { + composes: flex-container from "~@app/uiTheme/less/layout.less"; + composes: form-label from "~@app/uiTheme/less/typography.less"; + + align-items: center; + height: 30px; +} + .icon, .name { display: inline-block; } +.nameWiki { + display: inline-block; + font-size: var(--dremio--font-size--default); + font-weight: 400; +} + .icon { height: 22px; width: 29.33px; margin-left: -5px; // needed for vertial alignment of icon with column title margin-right: 5px; // controls spacing between column name and type icon } + +.iconWiki { + height: 18px; + width: 18px; + margin-right: 4px; +} + +.columnHighlight { + font-weight: 600; + color: var(--color--brand--300); +} + +.columnNameContent { + overflow: hidden; + white-space: nowrap; + text-overflow: ellipsis; + max-width: 350px; +} diff --git a/dac/ui/src/pages/ExplorePage/components/DataColumns/DataColumnList.less b/dac/ui/src/pages/ExplorePage/components/DataColumns/DataColumnList.less index 19e287560a..59320b60d1 100644 --- a/dac/ui/src/pages/ExplorePage/components/DataColumns/DataColumnList.less +++ b/dac/ui/src/pages/ExplorePage/components/DataColumns/DataColumnList.less @@ -24,6 +24,7 @@ .list { overflow-y: auto; + // padding: 16px; > .column:not(:first-child) { margin-top: 4px; @@ -34,3 +35,10 @@ // padding should not be used here margin-left: @left-padding; } + +.columnTitle { + padding: 8px; + font-weight: 600; + font-size: 14px; + line-height: 20px; +} diff --git a/dac/ui/src/pages/ExplorePage/components/DataColumns/HighlightedColumnName.tsx b/dac/ui/src/pages/ExplorePage/components/DataColumns/HighlightedColumnName.tsx new file mode 100644 index 0000000000..3002541aa2 --- /dev/null +++ b/dac/ui/src/pages/ExplorePage/components/DataColumns/HighlightedColumnName.tsx @@ -0,0 +1,57 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { columnHighlight, columnNameContent } from "./DataColumn.less"; + +type HighlightedColumnNameProps = { + searchTerm: string; + columnName: string; +}; + +const HighlightedColumnName = ({ + columnName, + searchTerm, +}: HighlightedColumnNameProps) => { + const renderHighlightedName = () => { + let highlightName = columnName; + if ( + searchTerm && + columnName.toLowerCase().includes(searchTerm?.toLowerCase()) + ) { + const firstPartIndex = columnName + .toLowerCase() + .indexOf(searchTerm?.toLowerCase()); + const firstPart = columnName.substring(0, firstPartIndex); + const highlightPart = columnName.substr( + firstPartIndex, + searchTerm.length + ); + const lastPart = columnName.substr(firstPartIndex + searchTerm.length); + highlightName = ( + <> + {firstPart} + {highlightPart} + {lastPart} + + ); + } + return highlightName; + }; + + return
        {renderHighlightedName()}
        ; +}; + +export default HighlightedColumnName; diff --git a/dac/ui/src/pages/ExplorePage/components/ExploreHeader.js b/dac/ui/src/pages/ExplorePage/components/ExploreHeader.js index ac6df210ed..4eadb3c24d 100644 --- a/dac/ui/src/pages/ExplorePage/components/ExploreHeader.js +++ b/dac/ui/src/pages/ExplorePage/components/ExploreHeader.js @@ -136,6 +136,7 @@ export class ExploreHeader extends PureComponent { getSelectedSql: PropTypes.func, statusesArray: PropTypes.array, resetSqlTabs: PropTypes.func, + toggleSqlPaneDisplay: PropTypes.func, // connected history: PropTypes.instanceOf(Immutable.Map), @@ -238,6 +239,7 @@ export class ExploreHeader extends PureComponent { case "saveView": return this.handleSaveView(); case "saveViewAs": + this.setState({ actionState: "savingAs" }); return this.handleSaveViewAs(); case "saveScript": return this.handleSaveScript(); @@ -604,7 +606,8 @@ export class ExploreHeader extends PureComponent { const disableEnginePickMenu = isCancellable && (this.state.actionState === "run" || - this.state.actionState === "preview"); + this.state.actionState === "preview" || + this.state.actionState === "savingAs"); const cancelText = intl.formatMessage({ id: "Common.Cancel" }); const runText = intl.formatMessage({ id: "Common.Run" }); const previewText = intl.formatMessage({ id: "Common.Preview" }); @@ -738,6 +741,7 @@ export class ExploreHeader extends PureComponent { )}
        + {this.renderShowHideSQLPane()} {this.renderPrivilegesIconButton()} {this.renderAnalyzeButtons()} {this.renderSaveButton()} @@ -893,6 +897,34 @@ export class ExploreHeader extends PureComponent { ); }; + renderShowHideSQLPane = () => { + const { intl, sqlState, toggleSqlPaneDisplay } = this.props; + const message = intl.formatMessage({ + id: `SQL.SQLEditor.${sqlState ? "Hide" : "Show"}SQLPane`, + }); + return ( + +
        + + {message} +
        +
        + ); + }; + getDefaultSaveButton = () => { const { location, activeScript, numberOfMineScripts, intl } = this.props; const isUntitledScript = !activeScript.id; @@ -988,7 +1020,7 @@ export class ExploreHeader extends PureComponent { } render() { - const { dataset, location } = this.props; + const { dataset, location, router } = this.props; const { isSaveAsModalOpen } = this.state; const isDatasetPage = exploreUtils.isExploreDatasetPage(location); const projectId = getSonarContext()?.getSelectedProjectId?.(); @@ -1014,13 +1046,18 @@ export class ExploreHeader extends PureComponent { fetchAllAndMineScripts(this.props.fetchScripts, null); this.props.setActiveScript({ script: payload }); }} - {...(isDatasetPage && { - push: () => - this.props.router.push({ - pathname: sqlPaths.sqlEditor.link({ projectId }), - state: { renderScriptTab: true }, - }), - })} + push={(payload) => { + isDatasetPage + ? router.push({ + pathname: sqlPaths.sqlEditor.link({ projectId }), + query: { scriptId: payload.id }, + state: { renderScriptTab: true }, + }) + : router.push({ + pathname: location.pathname, + query: { ...location.query, scriptId: payload.id }, + }); + }} /> )}
        diff --git a/dac/ui/src/pages/ExplorePage/components/ExploreHeader.less b/dac/ui/src/pages/ExplorePage/components/ExploreHeader.less index 015bdb5ebb..7f68165564 100644 --- a/dac/ui/src/pages/ExplorePage/components/ExploreHeader.less +++ b/dac/ui/src/pages/ExplorePage/components/ExploreHeader.less @@ -28,7 +28,7 @@ display: flex; flex-direction: row; justify-content: space-between; - padding: 0.4rem 1rem; + padding: 4px 10px; .ExploreHeader__left, .ExploreHeader__right { @@ -41,14 +41,41 @@ .preview-btn.--outlined, .discard-btn.--outlined, .edit-btn.--outlined { - color: var(--dremio--color--neutral--600); + color: var(--color--neutral--600); } .run-btn.--disabled, .preview-btn.--disabled, .discard-btn.--disabled, .edit-btn.--disabled { - color: var(--dremio--color--neutral--300); + color: var(--color--neutral--200); + } + } + + .ExploreHeader__right { + .show-hide-sql-btn { + display: flex; + align-items: center; + border-radius: 4px; + border: none; + height: 32px; + font-size: 14px; + padding: 8px 7px; + font-weight: 500; + cursor: pointer; + text-decoration: none !important; + color: var(--dremio--color--link); + gap: 4px; + + &:hover { + background: var(--color--brand--25); + } + + &__icon { + height: 20px; + width: 20px; + color: var(--dremio--color--link); + } } } } @@ -66,7 +93,7 @@ margin-left: 6px; padding: 2px 8px; border-radius: 8px; - background-color: var(--dremio--color--neutral--600); + background-color: var(--color--neutral--600); } } } diff --git a/dac/ui/src/pages/ExplorePage/components/ExploreHeader.module.less b/dac/ui/src/pages/ExplorePage/components/ExploreHeader.module.less index 20af8e330d..38e5518508 100644 --- a/dac/ui/src/pages/ExplorePage/components/ExploreHeader.module.less +++ b/dac/ui/src/pages/ExplorePage/components/ExploreHeader.module.less @@ -16,7 +16,7 @@ @import "../../../components/colors.less"; // contains only variables .privilegesIcon { - color: var(--dremio--color--neutral--600); + color: var(--color--neutral--600); inline-size: 24px; block-size: 24px; cursor: pointer; @@ -25,7 +25,7 @@ cursor: default; inline-size: 24px; block-size: 24px; - color: var(--dremio--color--neutral--200); + color: var(--color--neutral--100); } } @@ -33,16 +33,16 @@ height: 24px; width: 24px; margin-right: var(--dremio--spacing--1); - color: var(--dremio--color--neutral--600); + color: var(--color--neutral--600); } .run-icon { height: 20px; width: 20px; - color: var(--dremio--color--neutral--000); + color: white; &--disabled { - color: var(--dremio--color--neutral--200); + color: var(--color--neutral--100); } } @@ -50,10 +50,10 @@ .discard-icon { height: 20px; width: 20px; - color: var(--dremio--color--neutral--600); + color: var(--color--neutral--600); &--disabled { - color: var(--dremio--color--neutral--200); + color: var(--color--neutral--100); } } diff --git a/dac/ui/src/pages/ExplorePage/components/ExploreInfoHeader.js b/dac/ui/src/pages/ExplorePage/components/ExploreInfoHeader.js index b41f4df65b..0c3acf0a6c 100644 --- a/dac/ui/src/pages/ExplorePage/components/ExploreInfoHeader.js +++ b/dac/ui/src/pages/ExplorePage/components/ExploreInfoHeader.js @@ -64,7 +64,12 @@ import FontIcon from "components/Icon/FontIcon"; import { DatasetItemLabel } from "components/Dataset/DatasetItemLabel"; // {} for testing purposes since store is not needed here import { checkTypeToShowOverlay } from "utils/datasetUtils"; -import { getIconDataTypeFromDatasetType } from "utils/iconUtils"; +import { + getIconDataTypeFromDatasetType, + getIcebergIconDataTypeFromDatasetType, +} from "utils/iconUtils"; + +import { getVersionContextFromId } from "dremio-ui-common/utilities/datasetReference.js"; import { getHistory, @@ -72,6 +77,7 @@ import { getExploreState, } from "selectors/explore"; import { getActiveScript } from "@app/selectors/scripts"; +import { TagContent } from "@app/pages/HomePage/components/BranchPicker/components/BranchPickerTag/BranchPickerTag"; import "./ExploreInfoHeader.less"; @@ -91,6 +97,7 @@ export class ExploreInfoHeader extends PureComponent { location: PropTypes.object, exploreViewState: PropTypes.instanceOf(Immutable.Map).isRequired, intl: PropTypes.object.isRequired, + nessieState: PropTypes.object, // connected history: PropTypes.instanceOf(Immutable.Map), @@ -348,7 +355,8 @@ export class ExploreInfoHeader extends PureComponent { } renderDatasetLabel(dataset) { - const { activeScript, location, intl, currentSql } = this.props; + const { activeScript, location, intl, currentSql, nessieState } = + this.props; const nameForDisplay = ExploreInfoHeader.getNameForDisplay( dataset, activeScript, @@ -367,10 +375,14 @@ export class ExploreInfoHeader extends PureComponent { const edited = intl.formatMessage({ id: isSqlEditorTab ? "NewQuery.Unsaved" : "Dataset.Edited", }); - const typeIcon = getIconDataTypeFromDatasetType( - !isSqlEditorTab ? dataset.get("datasetType") : SCRIPT - ); - const showOverlay = checkTypeToShowOverlay(dataset.get("datasetType")); + const datasetType = dataset.get("datasetType"); + const versionContext = getVersionContextFromId(dataset.get("entityId")); + const typeIcon = isSqlEditorTab + ? getIconDataTypeFromDatasetType(SCRIPT) + : versionContext != null + ? getIcebergIconDataTypeFromDatasetType(datasetType) + : getIconDataTypeFromDatasetType(datasetType); + const showOverlay = checkTypeToShowOverlay(datasetType); const isUntitledScript = isSqlEditorTab && !this.props.activeScript.name; const labelText = `${nameForDisplay}${ @@ -388,6 +400,8 @@ export class ExploreInfoHeader extends PureComponent { ); + const nessieStateRef = + nessieState?.reference ?? nessieState?.defaultReference; return ( {fullPath && ( - + <> + + {nessieStateRef && ( +
        + +
        + )} + )} { index === columnKey ); - // https://dremio.atlassian.net/browse/DX-5848 + // DX-5848 // https://github.com/facebook/fixed-data-table/issues/401 // https://github.com/facebook/fixed-data-table/issues/415 $(".fixedDataTableColumnResizerLineLayout_main").addClass( @@ -540,10 +538,13 @@ export class ExploreTableView extends PureComponent { message={intl.formatMessage({ id: messageId })} viewState={viewState} dataIsNotAvailable={this.shouldShowNoData(viewState)} + offsetHeight={this.nodeRef.current?.offsetHeight} customStyle={{ + flexDirection: "column-reverse", + gap: 16, + fontSize: 16, + lineHeight: "normal", bottom: tableHeight / 2, - position: "absolute", - height: 0, }} />
        diff --git a/dac/ui/src/pages/ExplorePage/components/ExploreTable/ExploreTable.less b/dac/ui/src/pages/ExplorePage/components/ExploreTable/ExploreTable.less index e97826cfb3..376c545206 100644 --- a/dac/ui/src/pages/ExplorePage/components/ExploreTable/ExploreTable.less +++ b/dac/ui/src/pages/ExplorePage/components/ExploreTable/ExploreTable.less @@ -232,7 +232,7 @@ .action-menu-icon { height: 16px; width: 16px; - color: var(--dremio--color--neutral--600); + color: var(--color--neutral--600); cursor: pointer; } } diff --git a/dac/ui/src/pages/ExplorePage/components/ExploreTable/ExploreTableController.js b/dac/ui/src/pages/ExplorePage/components/ExploreTable/ExploreTableController.js index a21ef6c0fd..043dd37905 100644 --- a/dac/ui/src/pages/ExplorePage/components/ExploreTable/ExploreTableController.js +++ b/dac/ui/src/pages/ExplorePage/components/ExploreTable/ExploreTableController.js @@ -78,7 +78,6 @@ import apiUtils from "@app/utils/apiUtils/apiUtils"; import DropdownForSelectedText from "./DropdownForSelectedText"; import ExploreCellLargeOverlay from "./ExploreCellLargeOverlay"; import ExploreTable from "./ExploreTable"; -import ExploreTableJobDiagram from "./ExploreTableJobDiagram"; export class ExploreTableController extends PureComponent { static propTypes = { @@ -99,8 +98,6 @@ export class ExploreTableController extends PureComponent { height: PropTypes.number, widthScale: PropTypes.number, rightTreeVisible: PropTypes.bool, - sqlSize: PropTypes.number, - sqlState: PropTypes.bool, isResizeInProgress: PropTypes.bool, children: PropTypes.node, getTableHeight: PropTypes.func, @@ -631,22 +628,12 @@ export class ExploreTableController extends PureComponent { style={{ padding: "0 8px" }} /> - ) : Object.keys(currentTab).length === 1 && currentTab.showGraph ? ( - // if job was submitted but is not in the RUNNING state, only show the diagram -
        - -
        ) : ( - // if a job was submitted and is in the RUNNING state, show graph + button
        - {currentTab.buttonText === "Cancel Job" ? ( - - ) : ( -
        - {currentTab.renderIcon && renderJobStatus(currentTab.renderIcon)} - {currentTab.text} -
        - )} +
        + {currentTab.renderIcon && renderJobStatus(currentTab.renderIcon)} + {currentTab.text} +
        {currentTab.buttonFunc && ( + + + } + title={intl.formatMessage({ id: "Common.UnsavedWarning" })} + > +
        + +
        + +
        + + ); +}; + +export default WikiModalView; diff --git a/dac/ui/src/pages/ExplorePage/components/Wiki/WikiModal/WikiModalWithSave.tsx b/dac/ui/src/pages/ExplorePage/components/Wiki/WikiModal/WikiModalWithSave.tsx new file mode 100644 index 0000000000..0df6e7571f --- /dev/null +++ b/dac/ui/src/pages/ExplorePage/components/Wiki/WikiModal/WikiModalWithSave.tsx @@ -0,0 +1,147 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import ApiUtils from "@app/utils/apiUtils/apiUtils"; +import { showUnsavedChangesConfirmDialog } from "@app/actions/confirmation"; +import { connect } from "react-redux"; +import { Map, fromJS } from "immutable"; +import { useEffect, useState } from "react"; +import WikiModalView from "./WikiModal"; +import { useIntl } from "react-intl"; + +type SaveProps = { + saveVal: { text: string; version: string }; +}; + +interface WikiModalWithSaveProps { + entityId: string; + isOpen: boolean; + wikiValue: string; + wikiVersion: number; + isReadMode: boolean; + // topSectionButtons: typeof SectionTitle.propTypes.buttons; + onChange: () => void; + save: (saveVal: SaveProps) => void; + cancel: () => void; +} + +const WikiModalWithSave = ({ + wikiVersion, + entityId, + save, + onChange, + isOpen, + wikiValue, + isReadMode = false, + // topSectionButtons, + cancel: cancelProp, +}: WikiModalWithSaveProps) => { + const [wikiViewState, setWikiViewState] = useState(new Map()); + const [wikiChanged, setWikiChanged] = useState(false); + const [wikiVal, setWikiVal] = useState(wikiValue); + const [openChildModal, setOpenChildModal] = useState(false); + const intl = useIntl(); + + useEffect(() => { + setWikiVal(wikiValue); + }, [wikiValue]); + + const saveWiki = (newValue: string) => { + return ApiUtils.fetch( + `catalog/${entityId}/collaboration/wiki`, + { + method: "POST", + body: JSON.stringify({ + text: newValue, + version: wikiVersion, + }), + }, + 3 + ).then( + (response: { json: () => Promise }) => { + setWikiChanged(false); + return response.json().then(save, () => {}); // ignore json parsing error, but if save it not called, wiki will stay in edit mode + }, + async (response: any) => { + setWikiViewState( + fromJS({ + isFailed: true, + error: { + message: await ApiUtils.getErrorMessage( + intl.formatMessage({ id: "Wiki.NotSaved" }), + response + ), + id: "" + Math.random(), + }, + }) + ); + } + ); + }; + const cancelHandler = () => { + setWikiChanged(false); + setWikiVal(wikiValue); + setOpenChildModal(false); + cancelProp(); + }; + + const cancel = () => { + // reset value to original wiki if not saved + if (wikiChanged) { + setOpenChildModal(true); + return false; + } + cancelHandler(); + }; + + const stay = () => { + setOpenChildModal(false); + }; + + const leave = () => { + cancelHandler(); + }; + + const onChangeVal = (val: string) => { + setWikiChanged(true); + + if (onChange) { + onChange(); + setWikiVal(val); + } + }; + + const props = { + isOpen, + wikiValue: wikiVal, + isReadMode, + // topSectionButtons, + onChange: onChangeVal, + save: saveWiki, + cancel, + wikiViewState, + stay, + leave, + openChildModal, + }; + + return ; +}; + +const mapDispatchToProps = { + confirmUnsavedChanges: showUnsavedChangesConfirmDialog, +}; +export default connect(null, mapDispatchToProps)(WikiModalWithSave); diff --git a/dac/ui/src/pages/ExplorePage/components/Wiki/WikiModal/WikiWrapper.module.less b/dac/ui/src/pages/ExplorePage/components/Wiki/WikiModal/WikiWrapper.module.less new file mode 100644 index 0000000000..66eb21244e --- /dev/null +++ b/dac/ui/src/pages/ExplorePage/components/Wiki/WikiModal/WikiWrapper.module.less @@ -0,0 +1,115 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +.layout { + display: flex; + flex: 1; + align-items: stretch; +} + +.left-column { + border-right: 1px solid var(--color--neutral--50); + width: 100%; +} + +.right-column { + width: 500px; +} + +.dataset-wrapper { + padding: 0 var(--dremio--spacing--1); + + & > div { + padding-right: var(--dremio--spacing--1); + } +} + +.dataset-wrapper-overlay { + & > div > div > div:nth-child(3) { + margin-bottom: var(--dremio--spacing--1); + } +} + +.sections-container .section-item { + max-height: calc(100vh - 200px); +} + +.collapsed-right-section { + margin-top: var(--dremio--spacing--105); + min-width: 36px; +} + +.section-title { + display: flex; + align-items: center; + justify-content: space-between; + font-weight: 600; + font-size: var(--dremio--font-size--lg); + padding: var(--dremio--spacing--105); + padding-right: 0; + border-bottom: 1px solid var(--color--neutral--50); +} + +.tags { + display: flex; + &:focus-visible { + outline: none; + } +} + +.editor { + max-height: 550px; + overflow-y: scroll; +} + +.datasetOverviewContainer { + border-bottom: 1px solid var(--color--neutral--50); +} + +// overlay view +.collapsibleBodyOverview { + max-height: none; + min-height: fit-content; +} +.collapsibleBodyOverlay { + overflow-y: auto; + max-height: 26.5vh; + min-height: 15vh; +} + +.noColumns { + align-items: center; + justify-content: center; + display: flex; + font-size: var(--dremio--font-size--default); + color: var(--dremio--color--text--faded); +} + +.noColumnsOverlayHeight { + max-height: 30vh; + min-height: 20vh; +} + +.noColumnsHeight { + max-height: 40vh; + min-height: 36vh; +} + +:global { + .dremioContent__main .view-state-wrapper .CodeMirror .editor-preview p { + max-width: none; + } +} diff --git a/dac/ui/src/pages/ExplorePage/components/Wiki/WikiModal/WikiWrapper.tsx b/dac/ui/src/pages/ExplorePage/components/Wiki/WikiModal/WikiWrapper.tsx new file mode 100644 index 0000000000..704aa6963c --- /dev/null +++ b/dac/ui/src/pages/ExplorePage/components/Wiki/WikiModal/WikiWrapper.tsx @@ -0,0 +1,298 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import classNames from "clsx"; +import TagsView from "../Labels/Labels"; +import ViewStateWrapper from "@app/components/ViewStateWrapper"; +import DataColumns from "../DataColumns/DataColumns"; +import WikiModalWithSave from "./WikiModalWithSave"; +import Collapsible from "../Collapsible/Collapsible"; +import * as classes from "./WikiWrapper.module.less"; +import { ReactNode } from "react"; +import DatasetSummaryOverlay from "@app/components/Dataset/DatasetSummaryOverlay"; +import { useIntl } from "react-intl"; +//@ts-ignore +import ImmutablePropTypes from "react-immutable-proptypes"; + +type SaveProps = { + saveVal: { text: string; version: string }; +}; + +type toolbarProps = { + component: ReactNode; + componentClass: string; + name: string; + onClickHandle: () => void; + tooltip: string; +}; +interface WikiWrapperProps { + getLoadViewState: (showLoadMask: boolean) => void; + showLoadMask: boolean; + showWikiContent: boolean; + extClassName: string; + wikiViewState: ImmutablePropTypes.map; + wrapperStylesFix: { height: string }; + messageStyle: { top: number }; + columnDetails: ImmutablePropTypes.list; + columnToolbar: toolbarProps[]; + wikiToolbar: toolbarProps[]; + renderWikiContent: () => null | ReactNode; + isWikiInEditMode: boolean; + entityId: string; + onChange: () => void; + wiki: string; + wikiVersion: number; + saveWiki: (saveVal: SaveProps) => void; + cancelWikiEdit: () => void; + sidebarCollapsed: boolean; + tagsViewState: ImmutablePropTypes.map; + getTags: (tags: string) => void; + tags: ImmutablePropTypes.orderedMap; + isEditAllowed: boolean; + addTag?: (tagName: string) => void; + removeTag?: (tagName: string) => void; + startSearch: () => void; + showTags: boolean; + renderCollapseIcon: () => ReactNode; + isReadMode: boolean; + dataset: ImmutablePropTypes.map; + overlay?: boolean; + searchTerm?: string; +} + +const WikiWrapper = ({ + getLoadViewState, + showLoadMask, + showWikiContent, + extClassName, + wikiViewState, + wrapperStylesFix, + messageStyle, + columnDetails, + columnToolbar, + wikiToolbar, + renderWikiContent, + isWikiInEditMode, + entityId, + onChange, + wiki, + wikiVersion, + saveWiki, + cancelWikiEdit, + sidebarCollapsed, + tagsViewState, + getTags, + tags, + isEditAllowed, + addTag, + removeTag, + startSearch, + showTags, + renderCollapseIcon, + isReadMode = false, + dataset, + overlay = false, + searchTerm, +}: WikiWrapperProps) => { + const intl = useIntl(); + + const datasetOverviewComponent = () => { + let fullPath = dataset?.get("fullPath"); + if (dataset?.getIn(["fullPath", "0"]) === "tmp") { + fullPath = dataset?.get("displayFullPath"); + } + return ( +
        + + + + ) : null + } + /> +
        + ); + }; + + const dataColumnsComponent = () => { + return ( + 0 ? ( + + ) : ( +
        + {intl.formatMessage({ id: "Wiki.NoColumn" })} +
        + ) + } + bodyClass={overlay ? classes["collapsibleBodyOverlay"] : ""} + bodyStyle={overlay ? undefined : { height: "100%" }} + /> + ); + }; + + const wikiDetailsComponent = () => { + return ( + {renderWikiContent()}} + bodyClass={overlay ? classes["collapsibleBodyOverlay"] : ""} + bodyStyle={overlay ? undefined : { height: "100%" }} + /> + ); + }; + + return ( + <> + {!overlay ? ( + +
        + {showWikiContent && ( +
        + +
        + {dataColumnsComponent()} +
        +
        + {wikiDetailsComponent()} +
        +
        +
        + )} + {!sidebarCollapsed ? ( +
        + +
        + {intl.formatMessage({ id: "Wiki.DatasetOverview" })} + {renderCollapseIcon()} +
        + {datasetOverviewComponent()} +
        +
        + ) : ( +
        + {renderCollapseIcon()} +
        + )} +
        +
        + ) : ( + + + {dataColumnsComponent()} + {wikiDetailsComponent()} + + )} + + + ); +}; + +export default WikiWrapper; diff --git a/dac/ui/src/pages/ExplorePage/components/forms/SelectContextForm.js b/dac/ui/src/pages/ExplorePage/components/forms/SelectContextForm.js index 1b9ce40893..635f8fb437 100644 --- a/dac/ui/src/pages/ExplorePage/components/forms/SelectContextForm.js +++ b/dac/ui/src/pages/ExplorePage/components/forms/SelectContextForm.js @@ -20,6 +20,8 @@ import { connectComplexForm } from "components/Forms/connectComplexForm"; import ResourceTreeContainer from "components/Tree/ResourceTreeContainer"; import { TextField, FieldWithError } from "components/Fields"; +import * as classes from "./SelectContextForm.module.less"; + export const FIELDS = ["context"]; export class SelectContextForm extends Component { @@ -53,7 +55,10 @@ export class SelectContextForm extends Component { > -
        +
        { const column = item[0]; + if (!column) return; + if (column.contentRect.width <= 1000) { column.target.classList.add("--minimal"); } else { @@ -184,6 +192,7 @@ export class ExplorePageContentWrapper extends PureComponent { setPreviousAndCurrentSql: PropTypes.func, selectedSql: PropTypes.string, setSelectedSql: PropTypes.func, + setCustomDefaultSql: PropTypes.func, isMultiQueryRunning: PropTypes.bool, queryTabNumber: PropTypes.number, setQueryTabNumber: PropTypes.func, @@ -193,6 +202,9 @@ export class ExplorePageContentWrapper extends PureComponent { isNessieOrArcticSource: PropTypes.bool, fetchFilteredJobsList: PropTypes.func, resetFilteredJobsList: PropTypes.func, + setResizeProgressState: PropTypes.func, + toggleExploreSql: PropTypes.func, + nessieState: PropTypes.object, }; static contextTypes = { @@ -240,18 +252,19 @@ export class ExplorePageContentWrapper extends PureComponent { this.headerRef = createRef(); this.dremioSideBarRef = createRef(); this.dremioSideBarDrag = createRef(); - this.explorePageRef = createRef(); } + timeoutRef = undefined; componentDidMount() { // fetch support flags here for powerbi and tableau only if its not enterprise - const isEnterpriseFlag = isEnterprise && isEnterprise(); - const isCommunityFlag = isCommunity && isCommunity(); - if (!(isEnterpriseFlag || isCommunityFlag)) { + if (!(isEnterprise?.() || isCommunity?.())) { this.props.fetchSupportFlags("client.tools.tableau"); this.props.fetchSupportFlags("client.tools.powerbi"); } + if (!isCommunity?.()) { + this.props.fetchSupportFlags(REFLECTION_ARCTIC_ENABLED); + } Mousetrap.bind(["mod+enter", "mod+shift+enter"], this.kbdShorthand); this.getUserOperatingSystem(); @@ -270,9 +283,26 @@ export class ExplorePageContentWrapper extends PureComponent { // Observing content column resizeColumn.observe(document.querySelector(".dremioContent__content")); - const { location, setPreviousAndCurrentSql } = this.props; + const { location, router, setCustomDefaultSql, setPreviousAndCurrentSql } = + this.props; if (location.query?.create) { setPreviousAndCurrentSql({ sql: CREATE_NEW_QUERY }); + } else if (location.query?.queryPath) { + // executes when navigating to the editor to query a dataset + const sql = exploreUtils.createNewQueryFromDatasetOverlay( + location.query.queryPath + ); + + setPreviousAndCurrentSql({ sql }); + setCustomDefaultSql({ sql }); + + router.replace({ + ...location, + query: { + ...location.query, + queryPath: undefined, + }, + }); } if (sessionStorage.getItem(DATASET_PATH_FROM_ONBOARDING)) { @@ -284,14 +314,6 @@ export class ExplorePageContentWrapper extends PureComponent { ), }); sessionStorage.removeItem(DATASET_PATH_FROM_ONBOARDING); - } else if (sessionStorage.getItem(DATASET_PATH_FROM_OVERLAY)) { - // updates the sql when coming from a dataset's overlay query button - setPreviousAndCurrentSql({ - sql: exploreUtils.createNewQueryFromDatasetOverlay( - sessionStorage.getItem(DATASET_PATH_FROM_OVERLAY) - ), - }); - sessionStorage.removeItem(DATASET_PATH_FROM_OVERLAY); } } @@ -301,6 +323,7 @@ export class ExplorePageContentWrapper extends PureComponent { document.removeEventListener("mousemove", this.sidebarMouseMove); window.removeEventListener("resize", this.handleResize); resizeColumn.unobserve(document.querySelector(".dremioContent__content")); + clearTimeout(this.timeoutRef); // Sidebar resizing if (this.dremioSideBarDrag.current) { @@ -330,20 +353,26 @@ export class ExplorePageContentWrapper extends PureComponent { isMultiQueryRunning, jobDetails, location, + router, } = this.props; const loc = rmProjectBase(location.pathname); // This is specific to using the overlay query button from the dataset page. if ( loc === newQueryLink && - sessionStorage.getItem(DATASET_PATH_FROM_OVERLAY) && + location.query?.queryPath && this.getMonacoEditorInstance() ) { this.insertFullPathAtCursor( - exploreUtils.createNewQueryFromDatasetOverlay( - sessionStorage.getItem(DATASET_PATH_FROM_OVERLAY) - ) + exploreUtils.createNewQueryFromDatasetOverlay(location.query.queryPath) ); - sessionStorage.removeItem(DATASET_PATH_FROM_OVERLAY); + + router.replace({ + ...location, + query: { + ...location.query, + queryPath: undefined, + }, + }); } // if a single job, force the explore table to be in view @@ -659,6 +688,16 @@ export class ExplorePageContentWrapper extends PureComponent { } }; + toggleSqlPaneDisplay = () => { + clearTimeout(this.timeoutRef); + this.props.setResizeProgressState(true); + this.props.toggleExploreSql?.(); + this.timeoutRef = setTimeout( + () => this.props.setResizeProgressState(false), + 500 + ); // ref for calculating height isn't auto updated + }; + getBottomContent() { const { dataset, @@ -695,7 +734,6 @@ export class ExplorePageContentWrapper extends PureComponent { pageType={pageType} dataset={dataset} location={location} - sqlSize={this.props.sqlSize} rightTreeVisible={this.props.rightTreeVisible} exploreViewState={this.props.exploreViewState} /> @@ -709,7 +747,6 @@ export class ExplorePageContentWrapper extends PureComponent { ) @@ -744,8 +781,6 @@ export class ExplorePageContentWrapper extends PureComponent { dataset={dataset} dragType={EXPLORE_DRAG_TYPE} location={location} - sqlSize={this.props.sqlSize} - sqlState={this.props.sqlState} rightTreeVisible={this.props.rightTreeVisible} exploreViewState={this.props.exploreViewState} canSelect={canSelect} @@ -768,9 +803,7 @@ export class ExplorePageContentWrapper extends PureComponent { getControlsBlock() { const { dataset, - sqlSize, location, - sqlState, rightTreeVisible, exploreViewState, pageType, @@ -798,10 +831,8 @@ export class ExplorePageContentWrapper extends PureComponent { return ( ) ); @@ -885,6 +918,7 @@ export class ExplorePageContentWrapper extends PureComponent { statusesArray={this.props.statusesArray} resetSqlTabs={this.resetTabToJobList} supportFlagsObj={this.props.supportFlagsObj} + toggleSqlPaneDisplay={this.toggleSqlPaneDisplay} /> ); default: @@ -1114,6 +1148,7 @@ export class ExplorePageContentWrapper extends PureComponent { toggleRightTree={this.props.toggleRightTree} rightTreeVisible={this.props.rightTreeVisible} exploreViewState={this.props.exploreViewState} + nessieState={this.props.nessieState} />
        @@ -1152,7 +1187,11 @@ export class ExplorePageContentWrapper extends PureComponent { {this.getTabsBlock()} -
        +
        {this.getControlsBlock()} 0) { const sourceId = loc.split("/")[2]; const source = (sources || []).find( @@ -1185,6 +1225,7 @@ function mapStateToProps(state, ownProps) { ); if (source && isVersionedSource(source.get("type"))) { isNessieOrArcticSource = true; + nessieState = selectState(state.nessie, `ref/${source.get("name")}`); } } const entityId = getDatasetEntityId(state, location); @@ -1265,6 +1306,7 @@ function mapStateToProps(state, ownProps) { supportFlagsObj, jobDetails, isNessieOrArcticSource, + nessieState, }; } @@ -1282,10 +1324,13 @@ export default withRouter( setQuerySelections, setPreviousAndCurrentSql, setSelectedSql, + setCustomDefaultSql, setQueryTabNumber, fetchSupportFlags, fetchFilteredJobsList, resetFilteredJobsList, + toggleExploreSql, + setResizeProgressState, }, null, { forwardRef: true } diff --git a/dac/ui/src/pages/ExplorePage/subpages/ExplorePageContentWrapper.less b/dac/ui/src/pages/ExplorePage/subpages/ExplorePageContentWrapper.less index f25baf7ad5..603b0e057c 100644 --- a/dac/ui/src/pages/ExplorePage/subpages/ExplorePageContentWrapper.less +++ b/dac/ui/src/pages/ExplorePage/subpages/ExplorePageContentWrapper.less @@ -23,7 +23,7 @@ &.--collpase { .dremioSidebar { background-color: @SECONDARY_150; - width: 3.6rem; + width: 36px; } .dremioSidebar__drag { @@ -65,11 +65,11 @@ width: 100%; &__header { - border-left: 1px solid var(--dremio--color--neutral--150); - border-bottom: 1px solid var(--dremio--color--neutral--150); + border-left: 1px solid var(--color--neutral--50); + border-bottom: 1px solid var(--color--neutral--50); align-items: center; display: flex; - gap: 1rem; + gap: 10px; height: 54px; justify-content: space-between; @@ -78,7 +78,7 @@ } .input__container { - font-size: 1.5rem; + font-size: 15px; border: none; > input::placeholder { @@ -88,20 +88,20 @@ } .validationError { - margin-left: 0.8rem; + margin-left: 8px; } } &__actions { display: flex; justify-content: space-between; - margin: 1.5rem 1rem 0; - padding-bottom: 1rem; + margin: 15px 10px 0; + padding-bottom: 10px; } // Catalog, Graph, Reflections content area &__content { - border-left: 1px solid var(--dremio--color--neutral--150); + border-left: 1px solid var(--color--neutral--50); height: 100%; overflow: auto; } @@ -113,6 +113,10 @@ overflow-y: hidden; } + &__table.fullHeight { + height: 100%; + } + &__main { overflow: auto; } @@ -129,15 +133,22 @@ .ExploreHeader__left, .dremioContent__table .left-controls { .btn.--outlined { - min-width: 3.6rem !important; + min-width: 36px !important; text-indent: -99999px; // stopped working for Safari 16 - width: 3.6rem !important; + width: 36px !important; .btn__content { gap: 0; } } } + .ExploreHeader__right { + .show-hide-sql-btn { + min-width: 32px !important; + text-indent: -99999px; + width: 32px !important; + } + } } // Data content area @@ -174,7 +185,7 @@ .dremioContent-rightCol { align-items: center; display: flex; - margin-right: 0.5rem; + margin-right: 5px; &__time { height: 100%; @@ -203,13 +214,13 @@ align-items: center; flex: 1 1 auto; display: flex; - margin: 0 1.2rem 1.6rem; + margin: 0 12px 16px; justify-content: space-between; transition: 300ms ease margin; &-nav { flex: 1 1 auto; - max-width: 25rem; + max-width: 250px; opacity: 1; transition: 300ms ease-out max-width, 300ms ease opacity; visibility: visible; @@ -218,14 +229,14 @@ position: relative; text-transform: uppercase; width: 50%; - padding: 2rem 0; + padding: 20px 0; text-align: center; &::after { background-color: @ELM; bottom: 0; content: ""; - height: 0.3rem; + height: 3px; left: 0; position: absolute; width: 100%; @@ -242,36 +253,36 @@ min-width: 260px; &__heading { text-transform: uppercase; - font-size: 1.4rem; - padding: 1rem; + font-size: 14px; + padding: 10px; } .divider { background-color: @SECONDARY_200; - height: 0.1rem; + height: 1px; width: 100%; } &__list { - padding: 1rem; + padding: 10px; min-width: 160px; li { display: flex; - font-size: 1.4rem; + font-size: 14px; justify-content: space-between; align-items: center; &:not(:last-child) { - margin-bottom: 1rem; + margin-bottom: 10px; } span { background-color: @SECONDARY_200; - border-radius: 0.8rem; + border-radius: 8px; display: inline-block; - font-size: 1.4rem; - padding: 0.3rem 0.8rem; - height: 2.4rem; + font-size: 14px; + padding: 3px 8px; + height: 24px; } } } diff --git a/dac/ui/src/pages/ExplorePage/subpages/HistoryPage/HistoryPage.tsx b/dac/ui/src/pages/ExplorePage/subpages/HistoryPage/HistoryPage.tsx index 0c446acf35..6cb6889bc4 100644 --- a/dac/ui/src/pages/ExplorePage/subpages/HistoryPage/HistoryPage.tsx +++ b/dac/ui/src/pages/ExplorePage/subpages/HistoryPage/HistoryPage.tsx @@ -24,13 +24,16 @@ import { getSortedSources } from "@app/selectors/home"; import { withRouter, WithRouterProps } from "react-router"; import { getTableAndNamespace } from "./utils"; import { - getEndpointFromSourceConfig, + getEndpointFromSource, getSourceByName, + isArcticCatalogConfig, } from "@app/utils/nessieUtils"; import TableHistoryContent from "@app/pages/NessieHomePage/components/TableDetailsPage/components/TableHistoryContent/TableHistoryContent"; import { fetchDefaultReferenceIfNeeded as fetchDefaultReferenceAction } from "@app/actions/nessie/nessie"; import { useEffect, useMemo } from "react"; import { rmProjectBase } from "dremio-ui-common/utilities/projectBase.js"; +import { getSonarContext } from "dremio-ui-common/contexts/SonarContext.js"; +import * as commonPaths from "dremio-ui-common/paths/common.js"; import "./HistoryPage.less"; @@ -48,28 +51,38 @@ function HistoryPage({ nessie, namespace, fetchDefaultReference, + tableName, }: ConnectedProps & WithRouterProps) { const config = source?.config; - const endpoint = getEndpointFromSourceConfig(config); + const endpoint = getEndpointFromSource(source as any); + const isArcticConfig = isArcticCatalogConfig(config); + const context = useMemo( () => createNessieContext( { id: source?.id, name: source?.name, endpoint }, - nessie + nessie, + undefined, + isArcticConfig + ? commonPaths.arcticSource.link({ + sourceName: source?.name, + projectId: getSonarContext().getSelectedProjectId?.(), + }) + : undefined ), - [endpoint, nessie, source?.name, source?.id] + [endpoint, nessie, source?.name, source?.id, isArcticConfig] ); useEffect(() => { - fetchDefaultReference(source?.name, context.api); - }, [fetchDefaultReference, source?.name, context.api]); + fetchDefaultReference(source?.name, context.apiV2); + }, [fetchDefaultReference, source?.name, context.apiV2]); if (!source) return null; return ( - + ); } @@ -79,13 +92,14 @@ const mapStateToProps = (state: any, { location }: WithRouterProps) => { rmProjectBase(location.pathname) ); const namespace = (namespaceString || "").split("."); - namespace.pop(); + const tableName = namespace.pop(); const sources = getSortedSources(state); const source = getSourceByName(sourceName, sources.toJS()); return { nessie: state.nessie, namespace, source, + tableName, }; }; diff --git a/dac/ui/src/pages/ExplorePage/subpages/utils.tsx b/dac/ui/src/pages/ExplorePage/subpages/utils.tsx index 74c6a774cf..6c4b250378 100644 --- a/dac/ui/src/pages/ExplorePage/subpages/utils.tsx +++ b/dac/ui/src/pages/ExplorePage/subpages/utils.tsx @@ -31,7 +31,6 @@ type StatusObjectType = { buttonAlt?: string; ranJob?: boolean; error?: any; - showGraph?: boolean; }; export const assemblePendingOrRunningTabContent = ( @@ -78,7 +77,8 @@ export const assemblePendingOrRunningTabContent = ( JOB_STATUS.running, ].includes(statusesArray[index]) ) { - obj.showGraph = true; + obj.renderIcon = "RUNNING"; + obj.text = ; } else { return; } diff --git a/dac/ui/src/pages/HomePage/HomePage.js b/dac/ui/src/pages/HomePage/HomePage.js index af4393340a..f778e5e282 100644 --- a/dac/ui/src/pages/HomePage/HomePage.js +++ b/dac/ui/src/pages/HomePage/HomePage.js @@ -23,7 +23,7 @@ import ApiUtils from "@app/utils/apiUtils/apiUtils"; import { sourceTypesIncludeS3 } from "@app/utils/sourceUtils"; import { loadSourceListData } from "@app/actions/resources/sources"; -import { isDcsEdition } from "dyn-load/utils/versionUtils"; +import { isNotSoftware } from "dyn-load/utils/versionUtils"; import { getViewState } from "@app/selectors/resources"; import { fetchFeatureFlag } from "@inject/actions/featureFlag"; import { page } from "@app/uiTheme/radium/general"; @@ -43,7 +43,6 @@ import "./HomePage.less"; import HomePageActivating from "@inject/pages/HomePage/HomePageActivating"; import { intl } from "@app/utils/intl"; import { ErrorBoundary } from "@app/components/ErrorBoundary/ErrorBoundary"; -import { ORGANIZATION_LANDING } from "@app/exports/flags/ORGANIZATION_LANDING"; import { isSonarUrlabilityEnabled } from "@app/exports/utilities/featureFlags"; const PROJECT_CONTEXT = "projectContext"; @@ -61,7 +60,6 @@ class HomePage extends Component { children: PropTypes.node, style: PropTypes.object, isProjectInactive: PropTypes.bool, - orgLandingFlag: PropTypes.bool, }; state = { @@ -73,8 +71,7 @@ class HomePage extends Component { } componentDidMount() { - isDcsEdition() && this.props.fetchFeatureFlag(DATA_OPTIMIZATION); - isDcsEdition() && this.props.fetchFeatureFlag(ORGANIZATION_LANDING); + isNotSoftware() && this.props.fetchFeatureFlag(DATA_OPTIMIZATION); this.setStateWithSourceTypesFromServer(); } @@ -116,13 +113,12 @@ class HomePage extends Component { // Note were are getting the "ref" to the SearchBar React object. render() { - const { isProjectInactive, orgLandingFlag } = this.props; + const { isProjectInactive } = this.props; const homePageSearchClass = showHomePageTop() ? " --withSearch" : " --withoutSearch"; - const homePageNavCrumbClass = - showNavCrumbs && orgLandingFlag ? " --withNavCrumbs" : ""; + const homePageNavCrumbClass = showNavCrumbs ? " --withNavCrumbs" : ""; const storage = isSonarUrlabilityEnabled() ? sessionStorage : localStorage; const projectName = @@ -136,7 +132,7 @@ class HomePage extends Component { {!isProjectInactive && (
        - {isDcsEdition() && } + {isNotSoftware() && }
        {}, }: BranchPickerProps & ConnectedProps & { router?: any }) { const { ref } = useBranchPickerContext(); - const { state, api, stateKey } = useNessieContext(); + const { state, apiV2, stateKey } = useNessieContext(); const [refState, setRefState] = useState({ reference: state.reference, hash: state.hash, @@ -93,6 +96,14 @@ function BranchPicker({ }); const toggleProps = bindToggle(popupState); + useEffect(() => { + setRefState({ + reference: state.reference, + hash: state.hash, + date: state.date, + }); + }, [state]); + useImperativeHandle(ref, () => popupState); function submitForm() { @@ -149,11 +160,11 @@ function BranchPicker({ state.reference, newDate, stateKey, - api + apiV2 ); if (resultState) { setRefState(resultState as any); - // Picker will be replaced in https://dremio.atlassian.net/browse/DX-53586 + // Picker will be replaced in DX-53586 if (!e) hidePicker(); // Workaround to close picker after time select: event is undefined when clicking time } } @@ -295,7 +306,7 @@ function BranchPicker({ branch={reference} onClick={setHash} selectedHash={hash} - api={api} + api={apiV2} /> } diff --git a/dac/ui/src/pages/HomePage/components/BranchPicker/components/BranchList/BranchList.less b/dac/ui/src/pages/HomePage/components/BranchPicker/components/BranchList/BranchList.less index 879cf806a5..0071139c8d 100644 --- a/dac/ui/src/pages/HomePage/components/BranchPicker/components/BranchList/BranchList.less +++ b/dac/ui/src/pages/HomePage/components/BranchPicker/components/BranchList/BranchList.less @@ -67,7 +67,7 @@ } .MuiListItem-root.Mui-selected { - background-color: var(--dremio--color--primary--100) !important; + background-color: var(--color--brand--25) !important; font-weight: 600 !important; border-radius: 4px; } diff --git a/dac/ui/src/pages/HomePage/components/BranchPicker/components/BranchList/BranchList.tsx b/dac/ui/src/pages/HomePage/components/BranchPicker/components/BranchList/BranchList.tsx index 888e7f4e11..dd70cc4de1 100644 --- a/dac/ui/src/pages/HomePage/components/BranchPicker/components/BranchList/BranchList.tsx +++ b/dac/ui/src/pages/HomePage/components/BranchPicker/components/BranchList/BranchList.tsx @@ -45,10 +45,13 @@ function BranchList({ }: BranchListProps) { const intl = useIntl(); const ref = useRef(null); - const { api } = useNessieContext(); + const { apiV2 } = useNessieContext(); const [, data] = usePromise( - useCallback(() => api.getAllReferences({ maxRecords: 1000000 }), [api]) + useCallback( + () => apiV2.getAllReferencesV2({ maxRecords: 1000000 }), + [apiV2] + ) ); const branchList = useMemo(() => { if (!data) return []; diff --git a/dac/ui/src/pages/HomePage/components/BranchPicker/components/BranchPickerTag/BranchPickerTag.less b/dac/ui/src/pages/HomePage/components/BranchPicker/components/BranchPickerTag/BranchPickerTag.less index 1bc36a2d0d..7af8428e50 100644 --- a/dac/ui/src/pages/HomePage/components/BranchPicker/components/BranchPickerTag/BranchPickerTag.less +++ b/dac/ui/src/pages/HomePage/components/BranchPicker/components/BranchPickerTag/BranchPickerTag.less @@ -21,11 +21,12 @@ height: 24px; max-width: 250px; font-size: 14px; + font-weight: 400; .flex-center; padding: 2px 4px 2px 6.5px; border-radius: 4px; - background-color: var(--dremio--color--neutral--150); + background-color: var(--color--neutral--50); transition: background-color 100ms linear; &.isOpen, diff --git a/dac/ui/src/pages/HomePage/components/BranchPicker/components/CommitBrowser/CommitBrowser.tsx b/dac/ui/src/pages/HomePage/components/BranchPicker/components/CommitBrowser/CommitBrowser.tsx index 86296d2cb6..ad82d155e7 100644 --- a/dac/ui/src/pages/HomePage/components/BranchPicker/components/CommitBrowser/CommitBrowser.tsx +++ b/dac/ui/src/pages/HomePage/components/BranchPicker/components/CommitBrowser/CommitBrowser.tsx @@ -17,10 +17,10 @@ import debounce from "lodash/debounce"; import classNames from "clsx"; import { SearchField } from "@app/components/Fields"; import { - DefaultApi, + V2BetaApi, FetchOption, - LogEntry, - LogResponse, + LogEntryV2 as LogEntry, + LogResponseV2 as LogResponse, } from "@app/services/nessie/client"; import { useCallback, useEffect, useMemo, useReducer } from "react"; import InfiniteScroller from "../InfiniteScroller/InfiniteScroller"; @@ -42,17 +42,19 @@ function CommitBrowser({ selectedHash, disabled, pageSize = PAGE_SIZE, - api, + api: apiV2, + tableName, }: { branch: Reference; path?: string[]; + tableName?: string; hasSearch?: boolean; onDataChange?: (arg: LogResponse | undefined) => void; onClick?: (arg: LogEntry) => void; selectedHash?: string | null; disabled?: boolean; pageSize?: number; - api: DefaultApi; + api: V2BetaApi; }) { const [{ search, data, numRows }, dispatch] = useReducer( CommitBrowserReducer, @@ -78,16 +80,17 @@ function CommitBrowser({ const loadMoreRows = useCallback( async function () { - const value = await api.getCommitLog({ + const value = await apiV2.getCommitLogV2({ ref: branch.name, pageToken: token, maxRecords: pageSize, - filter: formatQuery(search, path), - fetch: !path?.length ? FetchOption.Minimal : FetchOption.All, + filter: formatQuery(search, path, tableName), + fetch: + !path?.length && !tableName ? FetchOption.Minimal : FetchOption.All, }); dispatch({ type: "SET_DATA", value }); }, - [branch.name, token, search, path, pageSize, api] + [branch.name, token, search, path, pageSize, apiV2, tableName] ); return ( diff --git a/dac/ui/src/pages/HomePage/components/BranchPicker/components/CommitBrowser/components/CommitEntry/CommitEntry.less b/dac/ui/src/pages/HomePage/components/BranchPicker/components/CommitBrowser/components/CommitEntry/CommitEntry.less index 41ffc1b660..bca021ff9d 100644 --- a/dac/ui/src/pages/HomePage/components/BranchPicker/components/CommitBrowser/components/CommitEntry/CommitEntry.less +++ b/dac/ui/src/pages/HomePage/components/BranchPicker/components/CommitBrowser/components/CommitEntry/CommitEntry.less @@ -106,5 +106,8 @@ .flex-center; line-height: normal; } + &-userName { + margin-left: var(--dremio--spacing--05); + } } } diff --git a/dac/ui/src/pages/HomePage/components/BranchPicker/components/CommitBrowser/components/CommitEntry/CommitEntry.tsx b/dac/ui/src/pages/HomePage/components/BranchPicker/components/CommitBrowser/components/CommitEntry/CommitEntry.tsx index d24c8fd2a7..7dfb4b546b 100644 --- a/dac/ui/src/pages/HomePage/components/BranchPicker/components/CommitBrowser/components/CommitEntry/CommitEntry.tsx +++ b/dac/ui/src/pages/HomePage/components/BranchPicker/components/CommitBrowser/components/CommitEntry/CommitEntry.tsx @@ -14,7 +14,10 @@ * limitations under the License. */ import { useIntl } from "react-intl"; -import { CommitMeta, LogEntry } from "@app/services/nessie/client"; +import { + CommitMetaV2 as CommitMeta, + LogEntryV2 as LogEntry, +} from "@app/services/nessie/client"; import { DEFAULT_FORMAT_WITH_TIME, formatDate, @@ -23,10 +26,10 @@ import { import { Tooltip } from "@mui/material"; import classNames from "clsx"; import { useRef } from "react"; - +import { Avatar } from "dremio-ui-lib/components"; import CommitHash from "../CommitHash/CommitHash"; -import UserIcon from "../UserIcon/UserIcon"; import { Reference } from "@app/types/nessie"; +import { nameToInitials } from "@app/exports/utilities/nameToInitials"; import "./CommitEntry.less"; @@ -61,7 +64,7 @@ function CommitEntryTooltip({
        - + @@ -95,7 +98,7 @@ function CommitEntry({ if (!commit) return null; - const user = commit.author || ""; + const user = commit.authors?.[0] || ""; return (
        {!disabled && ( @@ -116,7 +119,7 @@ function CommitEntry({
        {commit.message}
        - {user && } + {user && } {user} {commit.hash && ( diff --git a/dac/ui/src/pages/HomePage/components/BranchPicker/components/CommitBrowser/components/CommitHash/CommitHash.less b/dac/ui/src/pages/HomePage/components/BranchPicker/components/CommitBrowser/components/CommitHash/CommitHash.less index ab4260ec9b..a4a437894c 100644 --- a/dac/ui/src/pages/HomePage/components/BranchPicker/components/CommitBrowser/components/CommitHash/CommitHash.less +++ b/dac/ui/src/pages/HomePage/components/BranchPicker/components/CommitBrowser/components/CommitHash/CommitHash.less @@ -47,7 +47,6 @@ .copy__icon { display: flex; - opacity: 0; &, svg { font-size: 19px !important; diff --git a/dac/ui/src/pages/HomePage/components/BranchPicker/components/CommitBrowser/components/CommitHash/CommitHash.tsx b/dac/ui/src/pages/HomePage/components/BranchPicker/components/CommitBrowser/components/CommitHash/CommitHash.tsx index c0ee3e1bb2..21d31438f5 100644 --- a/dac/ui/src/pages/HomePage/components/BranchPicker/components/CommitBrowser/components/CommitHash/CommitHash.tsx +++ b/dac/ui/src/pages/HomePage/components/BranchPicker/components/CommitBrowser/components/CommitHash/CommitHash.tsx @@ -48,7 +48,7 @@ function CommitHash({ ...branch, } as Reference, }, - nessieCtx.source.name + nessieCtx.stateKey ) ); }; diff --git a/dac/ui/src/pages/HomePage/components/BranchPicker/components/CommitBrowser/utils.ts b/dac/ui/src/pages/HomePage/components/BranchPicker/components/CommitBrowser/utils.ts index 707dab58e6..df3c72e1b0 100644 --- a/dac/ui/src/pages/HomePage/components/BranchPicker/components/CommitBrowser/utils.ts +++ b/dac/ui/src/pages/HomePage/components/BranchPicker/components/CommitBrowser/utils.ts @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { LogResponse } from "@app/services/nessie/client"; +import { LogResponseV2 as LogResponse } from "@app/services/nessie/client"; type CommitBrowserState = { search?: string; @@ -57,7 +57,8 @@ export function CommitBrowserReducer( export function formatQuery( search: string | undefined, - path: string[] | undefined + path: string[] | undefined, + tableName: string | undefined ) { let clauses: string[] = []; if (search) { @@ -70,10 +71,21 @@ export function formatQuery( ]; } + const opClauses = []; if (path?.length) { - //TODO Check with nessie team on escaping this namespace const namespace = path.map((c) => decodeURIComponent(c)).join("."); - clauses.push(`operations.exists(op, op.namespace == '${namespace}')`); + opClauses.push(`op.namespace == '${namespace}'`); + } else { + //Only add this filter clause if tableName is provided since we're looking for a specific one + if (tableName) opClauses.push("size(op.namespaceElements) == 0"); + } + + if (tableName) { + opClauses.push(`op.name == '${tableName}'`); + } + + if (opClauses.length) { + clauses.push(`operations.exists(op, ${opClauses.join(" && ")})`); } return clauses.join(" || "); diff --git a/dac/ui/src/pages/HomePage/components/BrowseTable.js b/dac/ui/src/pages/HomePage/components/BrowseTable.js index d4c94c6e26..6621de1333 100644 --- a/dac/ui/src/pages/HomePage/components/BrowseTable.js +++ b/dac/ui/src/pages/HomePage/components/BrowseTable.js @@ -141,7 +141,7 @@ export default class BrowseTable extends Component { const resetScrollTop = Boolean( window.navigator.userAgent.toLowerCase().includes("firefox") && this.state.filter - ); //it's needed for https://dremio.atlassian.net/browse/DX-7140 + ); //it's needed for DX-7140 if (tableData.size) { passAlongProps.noDataText = intl.formatMessage( diff --git a/dac/ui/src/pages/HomePage/components/BrowseTable.less b/dac/ui/src/pages/HomePage/components/BrowseTable.less index e28b36a86f..dc300fdd2b 100644 --- a/dac/ui/src/pages/HomePage/components/BrowseTable.less +++ b/dac/ui/src/pages/HomePage/components/BrowseTable.less @@ -21,12 +21,12 @@ .search-field { input { &:hover { - border-color: var(--dremio--color--neutral--300); + border-color: var(--color--neutral--200); } &:focus { - border-color: var(--dremio--color--primary--500); - background-color: var(--dremio--color--neutral--000); + border-color: var(--color--brand--300); + background-color: white; } } } @@ -36,7 +36,7 @@ width: 257px !important; // accounts for 240px input width, 16px padding, 1px border margin-right: 8px !important; padding-right: 16px; - border-right: 1px solid var(--dremio--color--neutral--150); + border-right: 1px solid var(--color--neutral--50); } .active-wiki-button { @@ -216,7 +216,7 @@ overflow: hidden; height: 48px; flex-shrink: 0; - border-bottom: 1px solid var(--dremio--color--neutral--150); + border-bottom: 1px solid var(--color--neutral--50); .copy-button { display: none; } diff --git a/dac/ui/src/pages/HomePage/components/DataPlaneSection/DataPlaneSection.tsx b/dac/ui/src/pages/HomePage/components/DataPlaneSection/DataPlaneSection.tsx index 4a9b37b560..f5d2ad0065 100644 --- a/dac/ui/src/pages/HomePage/components/DataPlaneSection/DataPlaneSection.tsx +++ b/dac/ui/src/pages/HomePage/components/DataPlaneSection/DataPlaneSection.tsx @@ -14,14 +14,13 @@ * limitations under the License. */ import { useIntl } from "react-intl"; -// @ts-ignore -import { isDataPlaneEnabled } from "@inject/utils/dataPlaneUtils"; import FinderNav from "@app/components/FinderNav"; import ViewStateWrapper from "@app/components/ViewStateWrapper"; import SourceBranchPicker from "../SourceBranchPicker/SourceBranchPicker"; import { spacesSourcesListSpinnerStyleFinderNav } from "@app/pages/HomePage/HomePageConstants"; import * as commonPaths from "dremio-ui-common/paths/common.js"; import { getSonarContext } from "dremio-ui-common/contexts/SonarContext.js"; +import { isNotSoftware } from "dyn-load/utils/versionUtils"; type DataPlaneSectionProps = { dataPlaneSources: any; @@ -45,8 +44,6 @@ function DataPlaneSection({ isCollapsible = false, }: DataPlaneSectionProps) { const intl = useIntl(); - if (!isDataPlaneEnabled) return null; - return (
        diff --git a/dac/ui/src/pages/HomePage/components/LeftTree.js b/dac/ui/src/pages/HomePage/components/LeftTree.js index b139567559..2d16785805 100644 --- a/dac/ui/src/pages/HomePage/components/LeftTree.js +++ b/dac/ui/src/pages/HomePage/components/LeftTree.js @@ -14,12 +14,12 @@ * limitations under the License. */ -import { Component } from "react"; +import { Component, createRef } from "react"; import { connect } from "react-redux"; import classNames from "clsx"; import { FormattedMessage, injectIntl } from "react-intl"; import Immutable from "immutable"; - +import { Tooltip } from "dremio-ui-lib"; import PropTypes from "prop-types"; import FinderNav from "components/FinderNav"; import FinderNavItem from "components/FinderNavItem"; @@ -63,6 +63,10 @@ import { getSonarContext } from "dremio-ui-common/contexts/SonarContext.js"; import "./LeftTree.less"; @injectIntl export class LeftTree extends Component { + constructor(props) { + super(props); + this.headerRef = createRef(); + } state = { isAddingSampleSource: false, }; @@ -205,14 +209,20 @@ export class LeftTree extends Component { return (

        - {currentProject ? ( - currentProject + {this.headerRef?.current?.offsetWidth < + this.headerRef?.current?.scrollWidth ? ( + + + {currentProject ?? } + + ) : ( - + currentProject ?? )}

        this.onScroll(e)}> diff --git a/dac/ui/src/pages/HomePage/components/LeftTree.less b/dac/ui/src/pages/HomePage/components/LeftTree.less index 33488132af..2dbaddb0c4 100644 --- a/dac/ui/src/pages/HomePage/components/LeftTree.less +++ b/dac/ui/src/pages/HomePage/components/LeftTree.less @@ -29,7 +29,7 @@ .left-tree-holder { width: 318px; overflow: hidden; - border-right: 1px solid var(--dremio--color--neutral--150); + border-right: 1px solid var(--color--neutral--50); height: 100%; max-height: 100%; @@ -54,9 +54,10 @@ .header-viewer { font-weight: 600; - display: flex; - align-items: center; - border-bottom: 1px solid var(--dremio--color--neutral--150); + overflow: hidden; + text-overflow: ellipsis; + line-height: 24px; + border-bottom: 1px solid var(--color--neutral--50); height: 48px; padding: 11.5px 8px; } @@ -88,10 +89,10 @@ bottom: 0; font-weight: 600; z-index: 999; - background-color: var(--dremio--color--neutral--000); + background-color: white; display: flex; align-items: center; - border-top: 1px solid var(--dremio--color--neutral--150); + border-top: 1px solid var(--color--neutral--50); height: 64px; padding: 11.5px 8px; @@ -106,7 +107,7 @@ border-radius: 4px; } .add-source:hover { - background: var(--dremio--color--primary--100); + background: var(--color--brand--25); } } diff --git a/dac/ui/src/pages/HomePage/components/MainInfo.js b/dac/ui/src/pages/HomePage/components/MainInfo.js index b0847a23f5..6a028df1ce 100644 --- a/dac/ui/src/pages/HomePage/components/MainInfo.js +++ b/dac/ui/src/pages/HomePage/components/MainInfo.js @@ -19,7 +19,7 @@ import { Link } from "react-router"; import Immutable from "immutable"; import PropTypes from "prop-types"; import DocumentTitle from "react-document-title"; -import { FormattedMessage, injectIntl } from "react-intl"; +import { injectIntl } from "react-intl"; import urlParse from "url-parse"; import MainInfoMixin from "dyn-load/pages/HomePage/components/MainInfoMixin"; @@ -37,6 +37,7 @@ import { IconButton } from "dremio-ui-lib"; import { TagsAlert } from "@app/pages/HomePage/components/TagsAlert"; import { NESSIE, ARCTIC } from "@app/constants/sourceTypes"; +import { NEW_DATASET_NAVIGATION } from "@app/exports/endpoints/SupportFlags/supportFlagConstants"; import { tableStyles } from "../tableStyles"; import BrowseTable from "./BrowseTable"; import { HeaderButtons } from "./HeaderButtons"; @@ -51,11 +52,15 @@ import { constructArcticUrl } from "@app/exports/pages/ArcticCatalog/arctic-cata import { isVersionedSource as checkIsVersionedSource } from "@app/utils/sourceUtils"; import { fetchSupportFlagsDispatch } from "@inject/actions/supportFlags"; import { addProjectBase as wrapBackendLink } from "dremio-ui-common/utilities/projectBase.js"; +import WikiDrawerWrapper from "@app/components/WikiDrawerWrapper"; +import { getVersionContextFromId } from "dremio-ui-common/utilities/datasetReference.js"; +import { getCommonWikiDrawerTitle } from "@app/utils/WikiDrawerUtils"; const folderPath = "/folder/"; const shortcutBtnTypes = { edit: "edit", + goToTable: "goToTable", settings: "settings", }; @@ -92,11 +97,13 @@ export class MainInfoView extends Component { state = { isWikiShown: localStorageUtils.getWikiVisibleState(), + datasetDetails: Immutable.Map({}), + isDrawerOpen: false, }; componentDidMount() { this.fetchWiki(); - this.fetchUploadFlag(); + this.fetchSupportFlags(); } componentDidUpdate(prevProps) { @@ -111,10 +118,13 @@ export class MainInfoView extends Component { } } - fetchUploadFlag() { + fetchSupportFlags() { const { dispatchFetchSupportFlags } = this.props; dispatchFetchSupportFlags?.("ui.upload.allow"); + dispatchFetchSupportFlags?.("client.tools.tableau"); + dispatchFetchSupportFlags?.("client.tools.powerbi"); + dispatchFetchSupportFlags?.(NEW_DATASET_NAVIGATION); } getActionCell(item) { @@ -224,7 +234,6 @@ export class MainInfoView extends Component { // this method is targeted for dataset like entities: PDS, VDS and queriable files getShortcutButtons(item, entityType) { - const { isVersionedSource } = this.props; const allBtns = this.getShortcutButtonsData( item, entityType, @@ -251,7 +260,7 @@ export class MainInfoView extends Component { , item ), @@ -289,13 +298,18 @@ export class MainInfoView extends Component { const [name, jobs, action] = this.getTableColumns(); const jobsCount = item.get("jobCount") || item.getIn(["extendedConfig", "jobCount"]) || 0; - const isArcticSource = this.isArctic(); + const versionContext = getVersionContextFromId(item.get("id")); + const isNeitherNessieOrArctic = this.isNeitherNessieOrArctic(); return { rowClassName: item.get("name"), data: { [name.key]: { node: () => ( - + ), value: item.get("name"), }, @@ -424,10 +438,11 @@ export class MainInfoView extends Component { return !this.isNessie() && !this.isArctic(); }; - constructArcticSourceLink = () => { + constructVersionSourceLink = () => { const { source, nessieState = {} } = this.props; const { hash, reference } = nessieState; const { pathname } = this.context.location; + const versionBase = this.isArctic() ? "arctic" : "nessie"; let namespace = reference?.name || ""; if (pathname.includes(folderPath)) { namespace = pathname.substring( @@ -440,7 +455,7 @@ export class MainInfoView extends Component { } return constructArcticUrl({ type: "source", - baseUrl: `/sources/arctic/${source.get("name")}`, + baseUrl: `/sources/${versionBase}/${source.get("name")}`, tab: "commits", namespace: namespace, hash: hash ? `?hash=${hash}` : "", @@ -448,18 +463,11 @@ export class MainInfoView extends Component { }; renderExternalLink = () => { - const { source } = this.props; if (this.isNeitherNessieOrArctic()) return null; - else if (this.isNessie()) { - return ( - - - - ); - } else { + else { return ( @@ -474,6 +482,44 @@ export class MainInfoView extends Component { return ; }; + openWikiDrawer = (dataset) => { + this.setState({ + datasetDetails: dataset, + isDrawerOpen: true, + }); + }; + + openDatasetInNewTab = () => { + const { datasetDetails } = this.state; + + const selfLink = datasetDetails.getIn(["links", "query"]); + const editLink = datasetDetails.getIn(["links", "edit"]); + const canAlter = datasetDetails.getIn(["permissions", "canAlter"]); + const toLink = canAlter && editLink ? editLink : selfLink; + const urldetails = new URL(window.location.origin + toLink); + const pathname = urldetails.pathname + "/wiki" + urldetails.search; + window.open(wrapBackendLink(pathname), "_blank"); + }; + + closeWikiDrawer = (e) => { + e.stopPropagation(); + e.preventDefault(); + this.setState({ + datasetDetails: Immutable.fromJS({}), + isDrawerOpen: false, + }); + }; + + wikiDrawerTitle = () => { + const { datasetDetails } = this.state; + + return getCommonWikiDrawerTitle( + datasetDetails, + datasetDetails?.get("fullPath"), + this.closeWikiDrawer + ); + }; + render() { const { canUploadFile, @@ -482,6 +528,7 @@ export class MainInfoView extends Component { isVersionedSource, rootEntityType, } = this.props; + const { datasetDetails, isWikiShown, isDrawerOpen } = this.state; const { pathname } = this.context.location; const showWiki = entity && !entity.get("fileSystemFolder") && !this.isArctic(); // should be removed when DX-13804 would be fixed @@ -498,37 +545,45 @@ export class MainInfoView extends Component { ); return ( - - ) - } - buttons={buttons} - key={pathname} /* trick to clear out the searchbox on navigation */ - columns={this.getTableColumns()} - rightSidebar={showWiki ? : null} - rightSidebarExpanded={this.state.isWikiShown} - toggleSidebar={this.toggleWikiShow} - tableData={this.getTableData()} - viewState={viewState} - renderExternalLink={this.renderExternalLink} - renderTitleExtraContent={this.renderTitleExtraContent} - disableZebraStripes - rowHeight={40} - > - + + ) } + buttons={buttons} + key={pathname} /* trick to clear out the searchbox on navigation */ + columns={this.getTableColumns()} + rightSidebar={showWiki ? : null} + rightSidebarExpanded={isWikiShown} + toggleSidebar={this.toggleWikiShow} + tableData={this.getTableData()} + viewState={viewState} + renderExternalLink={this.renderExternalLink} + disableZebraStripes + rowHeight={40} + > + + + - + ); } } diff --git a/dac/ui/src/pages/HomePage/components/MainInfoItemName-spec.js b/dac/ui/src/pages/HomePage/components/MainInfoItemName-spec.js index c8d77ef8b6..d528e575e8 100644 --- a/dac/ui/src/pages/HomePage/components/MainInfoItemName-spec.js +++ b/dac/ui/src/pages/HomePage/components/MainInfoItemName-spec.js @@ -15,7 +15,7 @@ */ import { shallow } from "enzyme"; -import MainInfoItemName from "./MainInfoItemName"; +import { MainInfoItemName } from "./MainInfoItemName"; describe("MainInfoItemName", () => { const commonProps = { diff --git a/dac/ui/src/pages/HomePage/components/MainInfoItemName.js b/dac/ui/src/pages/HomePage/components/MainInfoItemName.js index 53d96974db..694f7b005e 100644 --- a/dac/ui/src/pages/HomePage/components/MainInfoItemName.js +++ b/dac/ui/src/pages/HomePage/components/MainInfoItemName.js @@ -14,6 +14,7 @@ * limitations under the License. */ import { Component } from "react"; +import { connect } from "react-redux"; import { Link, location } from "react-router"; import PropTypes from "prop-types"; import Immutable from "immutable"; @@ -30,14 +31,20 @@ import { checkIfUserShouldGetDeadLink, getHref, } from "@inject/utils/mainInfoUtils/mainInfoNameUtil"; +import { newGetHref } from "@inject/utils/mainInfoUtils/newMainInfoNameUtil"; +import { getRefQueryParams } from "@app/utils/nessieUtils"; +import { shouldUseNewDatasetNavigation } from "@app/utils/datasetNavigationUtils"; -class MainInfoItemName extends Component { +export class MainInfoItemName extends Component { static propTypes = { item: PropTypes.instanceOf(Immutable.Map).isRequired, intl: PropTypes.object.isRequired, entity: PropTypes.object, onMount: PropTypes.func, // takes width parameter isIceberg: PropTypes.bool, + showMetadataCard: PropTypes.bool, + refType: PropTypes.string, + refValue: PropTypes.string, }; static contextTypes = { @@ -65,7 +72,7 @@ class MainInfoItemName extends Component { } renderDatasetItemLabel(shouldGetADeadLink) { - const { item, isIceberg } = this.props; + const { item, isIceberg, showMetadataCard } = this.props; const type = item.get("entityType"); const typeIcon = isIceberg ? getIcebergIconTypeFromEntity(item) @@ -91,13 +98,14 @@ class MainInfoItemName extends Component { ); } @@ -114,9 +122,27 @@ class MainInfoItemName extends Component { } render() { - const { item } = this.props; + const { item, refType, refValue } = this.props; const fileType = item.get("fileType"); - const href = getHref(item, this.context); + + let tempHref; + if (shouldUseNewDatasetNavigation()) { + tempHref = newGetHref(item, this.context); + } else { + tempHref = getHref(item, this.context); + } + + let href; + + if (typeof tempHref === "string") { + href = + tempHref?.includes?.("mode=edit") && refType && refValue + ? tempHref + `&refType=${refType}&refValue=${refValue}` + : tempHref; + } else { + href = tempHref?.href ? tempHref.href : tempHref; + } + const shouldGetADeadLink = checkIfUserShouldGetDeadLink(item); const linkStyle = fileType === "folder" && !item.get("queryable") @@ -158,4 +184,15 @@ const styles = { color: "#333", }, }; -export default injectIntl(MainInfoItemName); + +const mapStateToProps = (state, ownProps) => { + const { isIceberg, item } = ownProps; + + if (!isIceberg) return {}; + + const pathList = item?.toJS()?.fullPathList ?? []; + const sourceName = pathList[0]; + return getRefQueryParams(state.nessie, sourceName); +}; + +export default injectIntl(connect(mapStateToProps)(MainInfoItemName)); diff --git a/dac/ui/src/pages/HomePage/components/MainInfoItemNameAndTag.js b/dac/ui/src/pages/HomePage/components/MainInfoItemNameAndTag.js index b53b7b6467..17975b5ce6 100644 --- a/dac/ui/src/pages/HomePage/components/MainInfoItemNameAndTag.js +++ b/dac/ui/src/pages/HomePage/components/MainInfoItemNameAndTag.js @@ -31,6 +31,7 @@ class MainInfoItemNameAndTag extends Component { intl: PropTypes.object.isRequired, startSearch: PropTypes.func, // (textToSearch) => {} isIceberg: PropTypes.bool, + showMetadataCard: PropTypes.bool, }; constructor() { @@ -54,12 +55,16 @@ class MainInfoItemNameAndTag extends Component { }; render() { - const { item, intl, isIceberg } = this.props; + const { item, intl, isIceberg, showMetadataCard } = this.props; const tagsFromItem = item.get("tags"); const fullPath = constructFullPath(getFullPathListFromEntity(item)); return (
        - + {fullPath && ( @@ -76,7 +79,7 @@ function SourceBranchPicker({ ); const stateKey = `${prefix}${source.name}`; - const apiRef = useRef(context.api); + const apiRef = useRef(context.apiV2); useEffect(() => { fetchDefaultReferenceIfNeeded(stateKey, apiRef.current); }, [fetchDefaultReferenceIfNeeded, stateKey]); diff --git a/dac/ui/src/pages/HomePage/components/TagList.js b/dac/ui/src/pages/HomePage/components/TagList.js index af958cf4a2..75c47ef922 100644 --- a/dac/ui/src/pages/HomePage/components/TagList.js +++ b/dac/ui/src/pages/HomePage/components/TagList.js @@ -17,7 +17,7 @@ import { useRef } from "react"; import TagsModal from "pages/HomePage/components/modals/TagsModal/TagsModal"; import { Tag } from "@app/pages/ExplorePage/components/TagsEditor/Tag"; -import { useOverflowIndex } from "dremio-ui-lib/dist-esm"; +import { useOverflowIndex } from "dremio-ui-lib/components"; import clsx from "clsx"; import classes from "./TagList.less"; diff --git a/dac/ui/src/pages/HomePage/components/WikiButton.less b/dac/ui/src/pages/HomePage/components/WikiButton.less index 4457030e27..a3d28435ff 100644 --- a/dac/ui/src/pages/HomePage/components/WikiButton.less +++ b/dac/ui/src/pages/HomePage/components/WikiButton.less @@ -15,7 +15,7 @@ */ .wiki-button { - color: var(--dremio--color--neutral--600) !important; + color: var(--color--neutral--600) !important; &:hover { color: var(--dremio--color--link) !important; diff --git a/dac/ui/src/pages/HomePage/components/WikiView.js b/dac/ui/src/pages/HomePage/components/WikiView.js index d84654ddb1..7226966193 100644 --- a/dac/ui/src/pages/HomePage/components/WikiView.js +++ b/dac/ui/src/pages/HomePage/components/WikiView.js @@ -161,7 +161,7 @@ export default class WikiView extends Component { styles: { width: 17, height: 18, - color: "var(--dremio--color--neutral--600)", + color: "var(--color--neutral--600)", }, }); } diff --git a/dac/ui/src/pages/HomePage/components/forms/FileFormatForm.js b/dac/ui/src/pages/HomePage/components/forms/FileFormatForm.js index 688fdd6d23..87d3392d47 100644 --- a/dac/ui/src/pages/HomePage/components/forms/FileFormatForm.js +++ b/dac/ui/src/pages/HomePage/components/forms/FileFormatForm.js @@ -40,7 +40,7 @@ function validate(values, props) { errors[values.type] = {}; for (const key in curType) { // "sheetName" field on XLS form can be empty - // https://dremio.atlassian.net/browse/DX-7497 + // DX-7497 if (!curType[key] && curType[key] !== false && key !== "sheetName") { errors[values.type][key] = intl.formatMessage({ id: "Error.NotEmptyField", diff --git a/dac/ui/src/pages/HomePage/components/modals/AboutModal/AboutModal.js b/dac/ui/src/pages/HomePage/components/modals/AboutModal/AboutModal.js index c897753294..9676f3711a 100644 --- a/dac/ui/src/pages/HomePage/components/modals/AboutModal/AboutModal.js +++ b/dac/ui/src/pages/HomePage/components/modals/AboutModal/AboutModal.js @@ -31,6 +31,8 @@ import { getEdition, getAboutMode } from "@inject/utils/versionUtils"; import timeUtils from "utils/timeUtils"; import { TabsNavigationItem } from "dremio-ui-lib"; +import config from "@inject/utils/config"; + import { clusterData, dremioLogo } from "./AboutModal.less"; const numDaysBack = 7; @@ -287,7 +289,10 @@ export default class AboutModal extends Component { render() { const { isOpen, hide, intl } = this.props; const { activeTab } = this.state; - const betaStyles = isBeta ? { background: "rgb(52, 66, 83)" } : {}; + const betaStyles = + isBeta && config.whiteLabelUrl === "dremio" + ? { background: "rgb(52, 66, 83)" } + : {}; return ( { const wrapper = shallow(); expect(wrapper).to.have.length(1); }); - - it("should render SelectConnectionButton for each source + sample source", () => { - const wrapper = shallow(); - expect(wrapper.find("SelectConnectionButton")).to.have.length( - // + 1 for sample source - sourceProperties.length + 1 - ); - }); }); diff --git a/dac/ui/src/pages/HomePage/components/modals/AddSourceModal/SelectSourceType.js b/dac/ui/src/pages/HomePage/components/modals/AddSourceModal/SelectSourceType.js index c85f55bc17..f02dd4d72d 100644 --- a/dac/ui/src/pages/HomePage/components/modals/AddSourceModal/SelectSourceType.js +++ b/dac/ui/src/pages/HomePage/components/modals/AddSourceModal/SelectSourceType.js @@ -20,14 +20,18 @@ import PropTypes from "prop-types"; import SelectConnectionButton from "components/SelectConnectionButton"; import { sourceTypesIncludeS3 } from "utils/sourceUtils"; +import { sourceTypesIncludeSampleSource } from "utils/sourceUtils"; import { isDatabaseType, isMetastoreSourceType, isDataPlaneSourceType, + AZURE_SAMPLE_SOURCE, } from "@app/constants/sourceTypes.js"; -import { isDataPlaneEnabled } from "@inject/utils/dataPlaneUtils"; +import { isNotSoftware } from "dyn-load/utils/versionUtils"; import "pages/HomePage/components/modals/AddSourceModal/SelectSourceType.less"; import SearchSource from "./SearchSource"; +import { getSonarContext } from "dremio-ui-common/contexts/SonarContext.js"; +import VENDORS from "@inject/constants/vendors"; @injectIntl export default class SelectSourceType extends Component { @@ -128,10 +132,13 @@ export default class SelectSourceType extends Component { const { sourceTypes, intl } = this.props; const { filteredSourceTypes } = this.state; const sources = filteredSourceTypes || sourceTypes; + const isAzureProject = + getSonarContext()?.getProjectVendorType?.() === VENDORS.AZURE; const externalSources = sources.filter( (source) => isDatabaseType(source.sourceType) && - !isDataPlaneSourceType(source.sourceType) + !isDataPlaneSourceType(source.sourceType) && + !(isAzureProject && source.sourceType === AZURE_SAMPLE_SOURCE) ); return ( externalSources.length > 0 && ( @@ -169,7 +176,11 @@ export default class SelectSourceType extends Component {
        - {intl.formatMessage({ id: "Source.Nessie" })} + {intl.formatMessage({ + id: isNotSoftware() + ? "Source.ArcticCatalogs" + : "Source.NessieCatalogs", + })}
        {this.renderSourceTypes( this.getEnabledSourceTypes(dataPlaneSources) @@ -188,11 +199,15 @@ export default class SelectSourceType extends Component { const { sourceTypes, intl } = this.props; const { filteredSourceTypes, searchString } = this.state; const sources = filteredSourceTypes || sourceTypes; + const isAzureProject = isNotSoftware() + ? getSonarContext()?.getProjectVendorType?.() === VENDORS?.AZURE + : false; const fileStoreSources = sources.filter( (source) => !isDatabaseType(source.sourceType) && !isMetastoreSourceType(source.sourceType) && - !isDataPlaneSourceType(source.sourceType) + !isDataPlaneSourceType(source.sourceType) && + !(isAzureProject && source.sourceType === AZURE_SAMPLE_SOURCE) ); const tableStoreSources = sources.filter((source) => isMetastoreSourceType(source.sourceType) @@ -201,6 +216,9 @@ export default class SelectSourceType extends Component { const renderSampleSource = filteredSourceTypes ? sampleSource.toLowerCase().indexOf(searchString) > -1 : true; + const isSampleSourceIncludedInSources = isAzureProject + ? sourceTypesIncludeSampleSource(sourceTypes) + : sourceTypesIncludeS3(sourceTypes); return ( (fileStoreSources.length > 0 || @@ -229,7 +247,7 @@ export default class SelectSourceType extends Component { {this.renderSourceTypes( this.getEnabledSourceTypes(fileStoreSources) )} - {sourceTypesIncludeS3(sourceTypes) && + {isSampleSourceIncludedInSources && renderSampleSource && this.renderSampleSource()} {this.renderSourceTypes( @@ -247,7 +265,7 @@ export default class SelectSourceType extends Component { return ( <> {this.renderSearchBox()} - {isDataPlaneEnabled && this.renderDataPlanSources()} + {this.renderDataPlanSources()} {this.renderDataLakeSources()} {this.renderExternalSources()} diff --git a/dac/ui/src/pages/HomePage/components/modals/DatasetSettings/AccelerationUpdates/AccelerationUpdatesController.js b/dac/ui/src/pages/HomePage/components/modals/DatasetSettings/AccelerationUpdates/AccelerationUpdatesController.js index 4db93cacca..68033ce7c3 100644 --- a/dac/ui/src/pages/HomePage/components/modals/DatasetSettings/AccelerationUpdates/AccelerationUpdatesController.js +++ b/dac/ui/src/pages/HomePage/components/modals/DatasetSettings/AccelerationUpdates/AccelerationUpdatesController.js @@ -33,7 +33,7 @@ import { formatMessage } from "@app/utils/locale"; import { INCREMENTAL_TYPES } from "@app/constants/columnTypeGroups"; import { getCurrentFormatUrl } from "@app/selectors/home"; import { loadFileFormat } from "@app/actions/modals/addFileModal"; - +import { getVersionContextFromId } from "dremio-ui-common/utilities/datasetReference.js"; import AccelerationUpdatesForm from "./AccelerationUpdatesForm"; const VIEW_ID = "AccelerationUpdatesController"; @@ -100,6 +100,7 @@ export class AccelerationUpdatesController extends Component { loadDataset(id, entity) { const updateVS = this.props.updateViewState; + const versionContext = getVersionContextFromId(id); // We fetch to the full schema using the v3 catalog api here so we can filter out types. v2 collapses types // by display types instead of returning the actual type. @@ -111,7 +112,8 @@ export class AccelerationUpdatesController extends Component { this.setState({ dataset: json }); this.props.loadDatasetAccelerationSettings( entity.get("fullPathList"), - VIEW_ID + VIEW_ID, + versionContext ); }, (error) => { @@ -153,8 +155,13 @@ export class AccelerationUpdatesController extends Component { submit = (form) => { const fullPathList = this.props.entity.get("fullPathList"); + const versionContext = getVersionContextFromId(this.props.entity.get("id")); return ApiUtils.attachFormSubmitHandlers( - this.props.updateDatasetAccelerationSettings(fullPathList, form) + this.props.updateDatasetAccelerationSettings( + fullPathList, + form, + versionContext + ) ).then(() => { this.props.clearDataSetAccelerationSettings(fullPathList); this.props.onDone(null, true); diff --git a/dac/ui/src/pages/HomePage/components/modals/DatasetSettings/AccelerationUpdates/AccelerationUpdatesForm.js b/dac/ui/src/pages/HomePage/components/modals/DatasetSettings/AccelerationUpdates/AccelerationUpdatesForm.js index 4ba7e9147e..699dfb403f 100644 --- a/dac/ui/src/pages/HomePage/components/modals/DatasetSettings/AccelerationUpdates/AccelerationUpdatesForm.js +++ b/dac/ui/src/pages/HomePage/components/modals/DatasetSettings/AccelerationUpdates/AccelerationUpdatesForm.js @@ -17,12 +17,7 @@ import { Component } from "react"; import Immutable from "immutable"; import PropTypes from "prop-types"; import { connectComplexForm } from "components/Forms/connectComplexForm"; -import { - FormBody, - FormTitle, - ModalForm, - modalFormProps, -} from "components/Forms"; +import { FormBody, ModalForm, modalFormProps } from "components/Forms"; import { label, section } from "uiTheme/radium/forms"; import { FieldSelect, Radio } from "components/Fields"; import DataFreshnessSection from "components/Forms/DataFreshnessSection"; @@ -127,7 +122,7 @@ export class AccelerationUpdatesForm extends Component { : formatMessage({ id: "Incremental.Update" }); return (
        -
        +
        {formatMessage({ id: "Refresh.Method" })} @@ -175,19 +170,13 @@ export class AccelerationUpdatesForm extends Component { render() { const { handleSubmit, onCancel } = this.props; - const { formatMessage } = intl; return ( - - - {formatMessage({ id: "Acceleration.RefreshPolicy" })} - - {this.renderContent()} - + {this.renderContent()} ); } @@ -214,8 +203,9 @@ const styles = { display: "flex", alignItems: "center", fontSize: 18, - fontWeight: 300, - marginBottom: 10, + fontWeight: 600, + marginBottom: 16, + color: "var(--color--neutral--900)", }, }; diff --git a/dac/ui/src/pages/HomePage/components/modals/DatasetSettings/DatasetOverviewForm.js b/dac/ui/src/pages/HomePage/components/modals/DatasetSettings/DatasetOverviewForm.js index 05b5e21f01..002e13083f 100644 --- a/dac/ui/src/pages/HomePage/components/modals/DatasetSettings/DatasetOverviewForm.js +++ b/dac/ui/src/pages/HomePage/components/modals/DatasetSettings/DatasetOverviewForm.js @@ -27,7 +27,7 @@ import { getIconDataTypeFromEntity, getIcebergIconTypeFromEntity, } from "utils/iconUtils"; -import { isArcticSource } from "@app/utils/sourceUtils"; +import { isArcticSource, isVersionedSource } from "@app/utils/sourceUtils"; import DatasetOverviewFormMixin from "dyn-load/pages/HomePage/components/modals/DatasetSettings/DatasetOverviewFormMixin"; // eslint-disable-line max-len @@ -49,20 +49,19 @@ export default class DatasetOverviewForm extends PureComponent { const typeIcon = isArcticSource(source?.type) ? getIcebergIconTypeFromEntity(entity) : getIconDataTypeFromEntity(entity); - // todo: if a real form likely want wrapped in ModalForm like siblings? return ( -
        +
        diff --git a/dac/ui/src/pages/HomePage/components/modals/DatasetSettings/DatasetSettings.js b/dac/ui/src/pages/HomePage/components/modals/DatasetSettings/DatasetSettings.js index 70de10cda0..3b481365ae 100644 --- a/dac/ui/src/pages/HomePage/components/modals/DatasetSettings/DatasetSettings.js +++ b/dac/ui/src/pages/HomePage/components/modals/DatasetSettings/DatasetSettings.js @@ -34,6 +34,7 @@ import AccelerationController from "components/Acceleration/AccelerationControll import DatasetSettingsMixin from "dyn-load/pages/HomePage/components/modals/DatasetSettings/DatasetSettingsMixin"; import { showUnsavedChangesConfirmDialog } from "actions/confirmation"; +import { fetchSupportFlags } from "@app/actions/supportFlags"; import NavPanel from "components/Nav/NavPanel"; import FileFormatController from "./FileFormatController"; import AccelerationUpdatesController from "./AccelerationUpdates/AccelerationUpdatesController"; @@ -52,6 +53,9 @@ import { } from "@inject/pages/HomePage/components/modals/DatasetSettings/compactionUtils"; import { NESSIE } from "@app/constants/sourceTypes"; import { rmProjectBase } from "dremio-ui-common/utilities/projectBase.js"; +import { isCommunity } from "dyn-load/utils/versionUtils"; +import config from "@inject/utils/config"; +import { REFLECTION_ARCTIC_ENABLED } from "@app/exports/endpoints/SupportFlags/supportFlagConstants"; const COMPACTION = "COMPACTION"; const DATASET_SETTINGS_VIEW_ID = "DATASET_SETTINGS_VIEW_ID"; @@ -86,6 +90,7 @@ export class DatasetSettings extends PureComponent { compactionTasks: PropTypes.array, enableCompaction: PropTypes.bool, isAdmin: PropTypes.bool, + fetchSupportFlags: PropTypes.func, }; state = { @@ -132,8 +137,13 @@ export class DatasetSettings extends PureComponent { source, isAdmin, enableCompaction, + fetchSupportFlags, } = this.props; + if (!isCommunity?.()) { + fetchSupportFlags?.(REFLECTION_ARCTIC_ENABLED); + } + if (enableCompaction && isAdmin && source?.type === NESSIE && entity) { const activeBranch = getActiveBranch(source?.name); const arcticProjectId = getArcticProjectId(source); @@ -355,6 +365,14 @@ const mapStateToProps = (state, { isHomePage }) => { // We need support both options. At this moment an only place where entity is stored in resources // is explore page ExploreSettingsButton const finalEntitySelector = isHomePage ? getHomeEntityOrChild : getEntity; + + let supportFlags = state.supportFlags; + if (isCommunity?.()) { + supportFlags = { + [REFLECTION_ARCTIC_ENABLED]: config.arcticReflectionsEnabled, + }; + } + return { source, compactionTasks, @@ -363,12 +381,14 @@ const mapStateToProps = (state, { isHomePage }) => { enableCompaction, entity: entityId && finalEntitySelector(state, entityId, entityType), viewState: getViewState(state, DATASET_SETTINGS_VIEW_ID), + supportFlags, }; }; export default connect(mapStateToProps, { loadDatasetForDatasetType, showUnsavedChangesConfirmDialog, + fetchSupportFlags, startCompaction, postCompactionData, getAllCompactionData, diff --git a/dac/ui/src/pages/HomePage/components/modals/DatasetSettings/DatasetSettingsMixin.js b/dac/ui/src/pages/HomePage/components/modals/DatasetSettings/DatasetSettingsMixin.js index 319c79ea4a..e84894acb5 100644 --- a/dac/ui/src/pages/HomePage/components/modals/DatasetSettings/DatasetSettingsMixin.js +++ b/dac/ui/src/pages/HomePage/components/modals/DatasetSettings/DatasetSettingsMixin.js @@ -17,6 +17,7 @@ import { abilities } from "utils/datasetUtils"; import datasetSettingsConfig from "@inject/pages/HomePage/components/modals/DatasetSettings/datasetSettingsConfig"; import { isVersionedSource } from "@app/utils/sourceUtils"; import { NESSIE } from "@app/constants/sourceTypes"; +import { REFLECTION_ARCTIC_ENABLED } from "@app/exports/endpoints/SupportFlags/supportFlagConstants"; export default function (input) { Object.assign(input.prototype, { @@ -24,14 +25,10 @@ export default function (input) { extendContentRenderers(contentRenderers) { return contentRenderers; }, - isReflectionsFullPage() { - const { - location: { pathname }, - } = this.props; - return pathname && pathname.endsWith("/reflections"); - }, + getTabs() { - const { entity, intl, source, isAdmin, enableCompaction } = this.props; + const { entity, intl, source, isAdmin, enableCompaction, supportFlags } = + this.props; if (!entity) { return new Immutable.OrderedMap(); @@ -44,28 +41,30 @@ export default function (input) { entity.get("entityType") ); + const arcticReflectionsEnabled = + supportFlags?.[REFLECTION_ARCTIC_ENABLED]; const { showFormatTab } = datasetSettingsConfig; const format = showFormatTab && canEditFormat && ["format", intl.formatMessage({ id: "File.Format" })]; // If a file or folder has not been converted to a dataset, hide all other tabs - // https://dremio.atlassian.net/browse/DX-3178 + // DX-3178 if (canEditFormat && !entity.get("queryable")) { map.push(format); return new Immutable.OrderedMap(map); } - const isReflectionsPage = this.isReflectionsFullPage(); const isVersioned = isVersionedSource(source?.type); + const showAccelerationTabs = + !isVersioned || (isVersioned && arcticReflectionsEnabled); map.push( ["overview", intl.formatMessage({ id: "Common.Overview" })], format, - !isVersioned && - !isReflectionsPage && [ - "acceleration", - intl.formatMessage({ id: "Reflection.Reflections" }), - ], - !isVersioned && + showAccelerationTabs && [ + "acceleration", + intl.formatMessage({ id: "Reflection.Reflections" }), + ], + showAccelerationTabs && canSetAccelerationUpdates && [ "accelerationUpdates", intl.formatMessage({ id: "Acceleration.RefreshPolicy" }), diff --git a/dac/ui/src/pages/HomePage/components/modals/DatasetSettings/DatasetSettingsModal.js b/dac/ui/src/pages/HomePage/components/modals/DatasetSettings/DatasetSettingsModal.js index 6c2263642b..aae34a68e4 100644 --- a/dac/ui/src/pages/HomePage/components/modals/DatasetSettings/DatasetSettingsModal.js +++ b/dac/ui/src/pages/HomePage/components/modals/DatasetSettings/DatasetSettingsModal.js @@ -50,9 +50,12 @@ export class DatasetSettingsModal extends Component { } = this.props; const modalTitle = entityName - ? `${intl.formatMessage({ - id: "Dataset.Settings.for", - })} ${entityName}` + ? intl.formatMessage( + { + id: "Dataset.Settings.for", + }, + { title: entityName } + ) : intl.formatMessage({ id: "Dataset.Settings", }); diff --git a/dac/ui/src/pages/HomePage/components/modals/NessieDatasetModal/NessieDatasetSettingsModal.tsx b/dac/ui/src/pages/HomePage/components/modals/NessieDatasetModal/NessieDatasetSettingsModal.tsx index 699af8393e..7cace8abce 100644 --- a/dac/ui/src/pages/HomePage/components/modals/NessieDatasetModal/NessieDatasetSettingsModal.tsx +++ b/dac/ui/src/pages/HomePage/components/modals/NessieDatasetModal/NessieDatasetSettingsModal.tsx @@ -39,9 +39,12 @@ class NessieDatasetSettingsModal extends Component< return ( diff --git a/dac/ui/src/pages/HomePage/subpages/AllSources/AllSources.js b/dac/ui/src/pages/HomePage/subpages/AllSources/AllSources.js index 0de86e3a22..d61a9be24d 100644 --- a/dac/ui/src/pages/HomePage/subpages/AllSources/AllSources.js +++ b/dac/ui/src/pages/HomePage/subpages/AllSources/AllSources.js @@ -32,6 +32,7 @@ import { isDataPlaneSourceType, } from "@app/constants/sourceTypes"; import AllSourcesView from "./AllSourcesView.js"; +import { isNotSoftware } from "dyn-load/utils/versionUtils"; @injectIntl export class AllSources extends PureComponent { @@ -63,7 +64,9 @@ export class AllSources extends PureComponent { ? "Source.AllObjectStorage" : isMetastoreSource ? "Source.AllMetastores" - : "Source.AllDataPlanes"; + : isNotSoftware() + ? "Source.AllArcticCatalogs" + : "Source.AllNessieCatalogs"; const title = intl.formatMessage({ id: headerId }); const metastoreSource = sources.filter((source) => diff --git a/dac/ui/src/pages/HomePage/subpages/AllSources/AllSourcesView.js b/dac/ui/src/pages/HomePage/subpages/AllSources/AllSourcesView.js index e09a7714b5..cc241b32b0 100644 --- a/dac/ui/src/pages/HomePage/subpages/AllSources/AllSourcesView.js +++ b/dac/ui/src/pages/HomePage/subpages/AllSources/AllSourcesView.js @@ -38,9 +38,12 @@ import { tableStyles } from "../../tableStyles"; import { getSettingsLocation } from "components/Menus/HomePage/AllSourcesMenu"; import LinkWithRef from "@app/components/LinkWithRef/LinkWithRef"; import { IconButton } from "dremio-ui-lib"; +import { CATALOG_ARS_ENABLED } from "@app/exports/flags/CATALOG_ARS_ENABLED"; +import { FeatureSwitch } from "@app/exports/components/FeatureSwitch/FeatureSwitch"; +import { isNotSoftware } from "dyn-load/utils/versionUtils"; const btnTypes = { - settings: "settings" + settings: "settings", }; class AllSourcesView extends PureComponent { @@ -83,7 +86,13 @@ class AllSourcesView extends PureComponent { - + null} + renderDisabled={() => ( + + )} + />
        ), value(sortDirection = null) { @@ -107,7 +116,7 @@ class AllSourcesView extends PureComponent { value: new Date(item.get("ctime")), }, [action.key]: { - node: () => this.getActionCell(item) + node: () => this.getActionCell(item), }, }, }; @@ -115,16 +124,18 @@ class AllSourcesView extends PureComponent { } getActionCell(item) { - return {this.getActionCellButtons(item)} + return {this.getActionCellButtons(item)}; } getActionCellButtons(item) { - const allBtns = [{ - label: this.getInlineIcon("interface/settings"), - tooltip: "Common.Settings", - link: getSettingsLocation(this.context.location, item), - type: btnTypes.settings - }] + const allBtns = [ + { + label: this.getInlineIcon("interface/settings"), + tooltip: "Common.Settings", + link: getSettingsLocation(this.context.location, item), + type: btnTypes.settings, + }, + ]; return [ ...allBtns // return rendered link buttons @@ -140,12 +151,7 @@ class AllSourcesView extends PureComponent { {btnType.label} )), - this.getSettingsBtnByType( - , - item - ), + this.getSettingsBtnByType(, item), ]; } @@ -179,14 +185,21 @@ class AllSourcesView extends PureComponent { } getTableColumns() { - const { intl } = this.props; + const { intl, isDataPlaneSource } = this.props; return [ { key: "name", label: intl.formatMessage({ id: "Common.Name" }), flexGrow: 1, }, - { key: "datasets", label: intl.formatMessage({ id: "Common.Datasets" }) }, + { + key: "datasets", + label: intl.formatMessage({ id: "Common.Datasets" }), + headerStyle: isDataPlaneSource ? { ...tableStyles.hidden } : {}, + style: { + display: this.props.isDataPlaneSource ? "none" : "block", + }, + }, { key: "created", label: intl.formatMessage({ id: "Common.Created" }) }, { key: "action", @@ -206,7 +219,9 @@ class AllSourcesView extends PureComponent { const headerId = isExternalSource ? "Source.AddDatabaseSource" : isDataPlaneSource - ? "Source.AddDataPlane" + ? isNotSoftware() + ? "Source.AddArcticCatalog" + : "Source.AddNessieCatalog" : isObjectStorageSource ? "Source.Add.Object.Storage" : "Source.Add.Metastore"; diff --git a/dac/ui/src/pages/HomePage/subpages/AllSpaces/AllSpacesView.js b/dac/ui/src/pages/HomePage/subpages/AllSpaces/AllSpacesView.js index 291b09f98c..f268decd0e 100644 --- a/dac/ui/src/pages/HomePage/subpages/AllSpaces/AllSpacesView.js +++ b/dac/ui/src/pages/HomePage/subpages/AllSpaces/AllSpacesView.js @@ -43,13 +43,15 @@ import BrowseTable from "../../components/BrowseTable"; import { tableStyles } from "../../tableStyles"; import { getSettingsLocation } from "components/Menus/HomePage/AllSpacesMenu"; import LinkWithRef from "@app/components/LinkWithRef/LinkWithRef"; +import { FeatureSwitch } from "@app/exports/components/FeatureSwitch/FeatureSwitch"; +import { CATALOG_ARS_ENABLED } from "@app/exports/flags/CATALOG_ARS_ENABLED"; const mapStateToProps = (state) => ({ spaces: getSpaces(state), }); const btnTypes = { - settings: "settings" + settings: "settings", }; export class AllSpacesView extends PureComponent { @@ -80,7 +82,11 @@ export class AllSpacesView extends PureComponent { - + null} + renderDisabled={() => } + />
        ), value(sortDirection = null) { @@ -105,7 +111,7 @@ export class AllSpacesView extends PureComponent { item.get("createdAt") ? new Date(item.get("createdAt")) : "", }, [action.key]: { - node: () => this.getActionCell(item) + node: () => this.getActionCell(item), }, }, }; @@ -113,16 +119,18 @@ export class AllSpacesView extends PureComponent { }; getActionCell(item) { - return {this.getActionCellButtons(item)} + return {this.getActionCellButtons(item)}; } getActionCellButtons(item) { - const allBtns = [{ - label: this.getInlineIcon("interface/settings"), - tooltip: "Common.Settings", - link: getSettingsLocation(this.context.location, item.get("id")), - type: btnTypes.settings - }] + const allBtns = [ + { + label: this.getInlineIcon("interface/settings"), + tooltip: "Common.Settings", + link: getSettingsLocation(this.context.location, item.get("id")), + type: btnTypes.settings, + }, + ]; return [ ...allBtns // return rendered link buttons @@ -139,9 +147,7 @@ export class AllSpacesView extends PureComponent { )), this.getSettingsBtnByType( - , + , item ), ]; @@ -184,9 +190,9 @@ export class AllSpacesView extends PureComponent { label: intl.formatMessage({ id: "Common.Name" }), flexGrow: 1, }, - { - key: "created", - label: intl.formatMessage({ id: "Common.Created" }) + { + key: "created", + label: intl.formatMessage({ id: "Common.Created" }), }, { key: "action", diff --git a/dac/ui/src/pages/HomePage/tableStyles.js b/dac/ui/src/pages/HomePage/tableStyles.js index 7dcba25f31..a58972969f 100644 --- a/dac/ui/src/pages/HomePage/tableStyles.js +++ b/dac/ui/src/pages/HomePage/tableStyles.js @@ -18,6 +18,9 @@ const digitColumn = { }; export const tableStyles = { + hidden: { + display: "none", + }, actionColumn: { height: "100%", display: "flex", diff --git a/dac/ui/src/pages/JobDetailsPageNew/JobDetailsPage.js b/dac/ui/src/pages/JobDetailsPageNew/JobDetailsPage.js index 2e329fead7..d9ced95df0 100644 --- a/dac/ui/src/pages/JobDetailsPageNew/JobDetailsPage.js +++ b/dac/ui/src/pages/JobDetailsPageNew/JobDetailsPage.js @@ -95,7 +95,7 @@ const JobDetailsPage = (props) => { // TODO: Revisit this to fetch the info from socket instead of making multiple calls to get job details useEffect(() => { - if (GetIsSocketForSingleJob() && jobDetailsFromStore) { + if (GetIsSocketForSingleJob()) { const { query: { attempts = 1 } = {} } = location || {}; const skipStartAction = diff --git a/dac/ui/src/pages/JobDetailsPageNew/components/OverView/OverView.js b/dac/ui/src/pages/JobDetailsPageNew/components/OverView/OverView.js index ac13cdbabc..318c698275 100644 --- a/dac/ui/src/pages/JobDetailsPageNew/components/OverView/OverView.js +++ b/dac/ui/src/pages/JobDetailsPageNew/components/OverView/OverView.js @@ -151,7 +151,7 @@ const OverView = (props) => { height: 16, width: 16, marginLeft: 4, - color: "var(--dremio--color--neutral--600)", + color: "var(--color--neutral--600)", }} > diff --git a/dac/ui/src/pages/JobDetailsPageNew/components/SQLTab/DatasetGraph.less b/dac/ui/src/pages/JobDetailsPageNew/components/SQLTab/DatasetGraph.less index 0a9f572fea..c8eddb2284 100644 --- a/dac/ui/src/pages/JobDetailsPageNew/components/SQLTab/DatasetGraph.less +++ b/dac/ui/src/pages/JobDetailsPageNew/components/SQLTab/DatasetGraph.less @@ -18,7 +18,7 @@ .datasetGraph { flex-grow: 1; position: absolute; - height: 110vh; + height: 100%; width: 100%; overflow-x: scroll !important; &__errorDisplay { diff --git a/dac/ui/src/pages/JobDetailsPageNew/components/SQLTab/SQLTab.js b/dac/ui/src/pages/JobDetailsPageNew/components/SQLTab/SQLTab.js index 10312d6841..1ed51ebeef 100644 --- a/dac/ui/src/pages/JobDetailsPageNew/components/SQLTab/SQLTab.js +++ b/dac/ui/src/pages/JobDetailsPageNew/components/SQLTab/SQLTab.js @@ -18,6 +18,8 @@ import PropTypes from "prop-types"; import SQL from "../SQL/SQL"; import DatasetGraph from "./DatasetGraph"; import Dataset from "./Dataset"; +import { CATALOG_ARS_ENABLED } from "@app/exports/flags/CATALOG_ARS_ENABLED"; +import { FeatureSwitch } from "@app/exports/components/FeatureSwitch/FeatureSwitch"; import "./SQLTab.less"; const SQLTab = ({ @@ -39,19 +41,27 @@ const SQLTab = ({ sqlClass="sqlTab__SQLBody" title={formatMessage({ id: "SubmittedSQL" })} /> - - {formatMessage({ id: "DataSetGraph" })} - -
        - {exceptionCheck.length && exceptionCheck[0].description ? ( - - ) : ( - + null} + renderDisabled={() => ( + <> + + {formatMessage({ id: "DataSetGraph" })} + +
        + {exceptionCheck.length && exceptionCheck[0].description ? ( + + ) : ( + + )} +
        + )} -
        + />
        ); }; diff --git a/dac/ui/src/pages/JobDetailsPageNew/components/SQLTab/SQLTab.less b/dac/ui/src/pages/JobDetailsPageNew/components/SQLTab/SQLTab.less index 1d67e2ed4f..943aeffeba 100644 --- a/dac/ui/src/pages/JobDetailsPageNew/components/SQLTab/SQLTab.less +++ b/dac/ui/src/pages/JobDetailsPageNew/components/SQLTab/SQLTab.less @@ -38,9 +38,10 @@ } &__SQLQueryVisualizer { position: absolute; - width: 95%; - height: 100vh; - left: 50px; + width: 100%; + height: 92vh; + top: 340px; + right: 0px; } } @@ -86,7 +87,7 @@ height: 24px; width: 24px; margin-right: 8px; - color: var(--dremio--color--neutral--600); + color: var(--color--neutral--600); } } } diff --git a/dac/ui/src/pages/JobDetailsPageNew/components/Summary/Summary.less b/dac/ui/src/pages/JobDetailsPageNew/components/Summary/Summary.less index 1e70dc3701..c61276eff5 100644 --- a/dac/ui/src/pages/JobDetailsPageNew/components/Summary/Summary.less +++ b/dac/ui/src/pages/JobDetailsPageNew/components/Summary/Summary.less @@ -46,7 +46,7 @@ color: @liteSlate-grey; &.outputLimited { - color: var(--dremio--color--neutral--300); + color: var(--color--neutral--200); font-size: 12px; } } diff --git a/dac/ui/src/pages/JobDetailsPageNew/components/TopPanel/TopPanel.less b/dac/ui/src/pages/JobDetailsPageNew/components/TopPanel/TopPanel.less index dfdc637985..42ea52b5b8 100644 --- a/dac/ui/src/pages/JobDetailsPageNew/components/TopPanel/TopPanel.less +++ b/dac/ui/src/pages/JobDetailsPageNew/components/TopPanel/TopPanel.less @@ -75,7 +75,7 @@ cursor: pointer; height: 15px; padding-right: 5px; - color: var(--dremio--color--neutral--600); + color: var(--color--neutral--600); &.--selected { filter: invert(14%) sepia(80%) saturate(7282%) hue-rotate(181deg) brightness(90%) contrast(80%); diff --git a/dac/ui/src/pages/JobDetailsPageNew/utils.js b/dac/ui/src/pages/JobDetailsPageNew/utils.js index b439dddc04..4772cf0d6d 100644 --- a/dac/ui/src/pages/JobDetailsPageNew/utils.js +++ b/dac/ui/src/pages/JobDetailsPageNew/utils.js @@ -38,6 +38,8 @@ export const getFormatMessageIdForQueryType = (jobDetails) => { return "Job.ODBCExecute"; } return "Job.ODBCClient"; + case "D2D": + return "Job.D2DClient"; case "JDBC": if (isPrepareCreate) { return "Job.JDBCCreate"; diff --git a/dac/ui/src/pages/JobPage/components/JobDetails/JobErrorLog.js b/dac/ui/src/pages/JobPage/components/JobDetails/JobErrorLog.js index 6f0878097a..8eb1a88de1 100644 --- a/dac/ui/src/pages/JobPage/components/JobDetails/JobErrorLog.js +++ b/dac/ui/src/pages/JobPage/components/JobDetails/JobErrorLog.js @@ -86,7 +86,7 @@ const styles = { WebkitUserSelect: "text", UserSelect: "text", position: "relative", - fontFamily: "Menlo, monospace", + fontFamily: "var(--dremio--font-family--monospace)", fontWeight: 400, fontSize: 12, color: "rgb(51, 51, 51)", diff --git a/dac/ui/src/pages/JobPage/components/JobDetails/OverviewContent.js b/dac/ui/src/pages/JobPage/components/JobDetails/OverviewContent.js index e5950823f5..03ae0876f7 100644 --- a/dac/ui/src/pages/JobPage/components/JobDetails/OverviewContent.js +++ b/dac/ui/src/pages/JobPage/components/JobDetails/OverviewContent.js @@ -94,6 +94,8 @@ class OverviewContent extends PureComponent { return "Job.ODBCMetadataRequest"; } return "Job.ODBCClient"; + case "D2D": + return "Job.D2DClient"; case "JDBC": if (isPrepareCreate) { return "Job.JDBCCreate"; diff --git a/dac/ui/src/pages/JobPage/components/JobsFilters/ContainsText.less b/dac/ui/src/pages/JobPage/components/JobsFilters/ContainsText.less index 0458fc2038..78babc2ddc 100644 --- a/dac/ui/src/pages/JobPage/components/JobsFilters/ContainsText.less +++ b/dac/ui/src/pages/JobPage/components/JobsFilters/ContainsText.less @@ -17,7 +17,7 @@ :global { .containsText { display: flex; - background-color: var(--dremio--color--neutral--000); + background-color: white; height: 24px; width: 300px; border-radius: 4px; @@ -29,11 +29,11 @@ } &:hover { - border-color: var(--dremio--color--neutral--300); + border-color: var(--color--neutral--200); } &.--focused { - border-color: var(--dremio--color--primary--500); + border-color: var(--color--brand--300); } } } diff --git a/dac/ui/src/pages/JobPage/components/JobsFilters/StartTimeSelect/DateTimeInput.module.less b/dac/ui/src/pages/JobPage/components/JobsFilters/StartTimeSelect/DateTimeInput.module.less index 842cfb1b10..20e62965db 100644 --- a/dac/ui/src/pages/JobPage/components/JobsFilters/StartTimeSelect/DateTimeInput.module.less +++ b/dac/ui/src/pages/JobPage/components/JobsFilters/StartTimeSelect/DateTimeInput.module.less @@ -28,7 +28,7 @@ display: flex; align-items: center; background: transparent; - border: 1px solid var(--dremio--color--neutral--200); + border: 1px solid var(--color--neutral--100); border-radius: 4px; margin-right: var(--dremio--spacing--05); } diff --git a/dac/ui/src/pages/JobPage/components/JobsFilters/StartTimeSelect/LeftPanel.module.less b/dac/ui/src/pages/JobPage/components/JobsFilters/StartTimeSelect/LeftPanel.module.less index 5cb020c66a..a6e14e0673 100644 --- a/dac/ui/src/pages/JobPage/components/JobsFilters/StartTimeSelect/LeftPanel.module.less +++ b/dac/ui/src/pages/JobPage/components/JobsFilters/StartTimeSelect/LeftPanel.module.less @@ -17,7 +17,7 @@ width: 160px; flex-shrink: 0; display: inline-block; - border-right: 1px solid var(--dremio--color--neutral--150); + border-right: 1px solid var(--color--neutral--50); overflow: auto; user-select: none; @@ -26,20 +26,20 @@ cursor: pointer; &:hover { - background-color: var(--dremio--color--primary--100); + background-color: var(--color--brand--25); } &--activeFilter { /* ,&.--activeInteval */ &, &:hover { - background-color: var(--dremio--color--primary--150); + background-color: var(--color--brand--50); } } } &__interval-separator { height: 1px; - background-color: var(--dremio--color--neutral--150); + background-color: var(--color--neutral--50); } } diff --git a/dac/ui/src/pages/JobPage/components/modals/JobProfileModal.tsx b/dac/ui/src/pages/JobPage/components/modals/JobProfileModal.tsx index 8e86b0ae78..615d19dc4d 100644 --- a/dac/ui/src/pages/JobPage/components/modals/JobProfileModal.tsx +++ b/dac/ui/src/pages/JobPage/components/modals/JobProfileModal.tsx @@ -18,7 +18,7 @@ import { useIntl } from "react-intl"; //@ts-ignore import { IconButton } from "dremio-ui-lib"; import Keys from "@app/constants/Keys.json"; -import { DialogContent, ModalContainer } from "dremio-ui-lib/dist-esm"; +import { DialogContent, ModalContainer } from "dremio-ui-lib/components"; import * as classes from "./JobProfileModal.module.less"; diff --git a/dac/ui/src/pages/JobPageNew/AdditionalJobPageColumns.js b/dac/ui/src/pages/JobPageNew/AdditionalJobPageColumns.js index cbd77d2328..7d1d02f4db 100644 --- a/dac/ui/src/pages/JobPageNew/AdditionalJobPageColumns.js +++ b/dac/ui/src/pages/JobPageNew/AdditionalJobPageColumns.js @@ -33,14 +33,14 @@ export const additionalColumns = [ }, ]; -const renderColumn = (data, isNumeric) => ( - -); +const renderColumn = (data, isNumeric, className) => { + return ; +}; export const additionalColumnName = (job) => [ { qn: { - node: () => renderColumn(job.get("wlmQueue")), + node: () => renderColumn(job.get("wlmQueue"), false, "fullHeight"), value: job.get("wlmQueue"), }, }, diff --git a/dac/ui/src/pages/JobPageNew/JobPageNew.less b/dac/ui/src/pages/JobPageNew/JobPageNew.less index 895479b984..8039c29389 100644 --- a/dac/ui/src/pages/JobPageNew/JobPageNew.less +++ b/dac/ui/src/pages/JobPageNew/JobPageNew.less @@ -41,6 +41,7 @@ width: 100%; display: flex; flex-direction: column; + z-index: 9; // Below tooltip zindex of 10 } } diff --git a/dac/ui/src/pages/JobPageNew/components/DatasetCell.js b/dac/ui/src/pages/JobPageNew/components/DatasetCell.js index 7da46dcdc6..54a3088675 100644 --- a/dac/ui/src/pages/JobPageNew/components/DatasetCell.js +++ b/dac/ui/src/pages/JobPageNew/components/DatasetCell.js @@ -15,18 +15,21 @@ */ import { useState, useRef, useEffect } from "react"; import PropTypes from "prop-types"; - +import Immutable from "immutable"; import { Tooltip } from "@app/components/Tooltip"; import FontIcon from "components/Icon/FontIcon"; import { getIconByEntityType } from "utils/iconUtils"; import DatasetSummaryOverlay from "components/Dataset/DatasetSummaryOverlay"; import { checkTypeToShowOverlay } from "utils/datasetUtils"; import { Tooltip as DremioTooltip } from "dremio-ui-lib"; - +import WikiDrawerWrapper from "@app/components/WikiDrawerWrapper"; +import { getCommonWikiDrawerTitle } from "@app/utils/WikiDrawerUtils"; import "./JobsContent.less"; const DatasetCell = ({ job, isContextMenuOpen }) => { const [tooltipOpen, setTooltipOpen] = useState(false); + const [datasetDetails, setDatasetDetails] = useState(Immutable.fromJS({})); + const [drawerIsOpen, setDrawerIsOpen] = useState(false); const TooltipInnerStyle = { width: "auto", @@ -49,11 +52,32 @@ const DatasetCell = ({ job, isContextMenuOpen }) => { setTooltipOpen(false); }; + const openWikiDrawer = (dataset) => { + setDatasetDetails(dataset); + setDrawerIsOpen(true); + }; + + const closeWikiDrawer = (e) => { + e.stopPropagation(); + e.preventDefault(); + setDatasetDetails(Immutable.fromJS({})); + setDrawerIsOpen(false); + }; + + const wikiDrawerTitle = () => { + return getCommonWikiDrawerTitle( + datasetDetails, + datasetDetails?.get("fullPath"), + closeWikiDrawer + ); + }; + const datasetRef = useRef(null); const datasetArray = job.get("queriedDatasets"); const isInternalQuery = job.get("queryType") && job.get("queryType") === "UI_INITIAL_PREVIEW"; const datasetType = datasetArray.getIn([0, "datasetType"]); + const versionContextObj = datasetArray.getIn([0, "versionContext"]); if (!datasetArray) { return null; @@ -61,7 +85,8 @@ const DatasetCell = ({ job, isContextMenuOpen }) => { const iconType = getIconByEntityType(isInternalQuery ? "OTHER" : datasetType); const datasetTitle = datasetArray.getIn([0, "datasetName"]); - const showOverlay = checkTypeToShowOverlay(datasetType); + const showOverlay = + checkTypeToShowOverlay(datasetType) || !!versionContextObj; const renderNameAndIcon = () => { return ( @@ -83,75 +108,91 @@ const DatasetCell = ({ job, isContextMenuOpen }) => { }; return ( -
        - {showOverlay && !isContextMenuOpen ? ( - - } - > - {renderNameAndIcon()} - - ) : ( - renderNameAndIcon() - )} - {!showOverlay && ( - (tooltipOpen ? datasetRef.current : null)} - placement="bottom-start" - type="custom" - className="jobsContent-dataset__tooltip" - tooltipArrowClass="textWithHelp__tooltipArrow --light" - tooltipInnerStyle={TooltipInnerStyle} - > - {datasetArray.map((dataset, index) => { - const datasetName = dataset.get("datasetName"); - const datasetPath = dataset.get("datasetPath"); - const queryText = job.get("queryText"); - const description = job.get("description"); - const datasetDescription = - !queryText || queryText === "NA" ? description : datasetPath; - const datasetType = dataset.get("datasetType"); - return ( -
        - +
        + {showOverlay && !isContextMenuOpen ? ( + <> + -
        -
        - {datasetName} -
        -
        - {datasetDescription} + } + > + {renderNameAndIcon()} + + + + ) : ( + renderNameAndIcon() + )} + {!showOverlay && ( + (tooltipOpen ? datasetRef.current : null)} + placement="bottom-start" + type="custom" + className="jobsContent-dataset__tooltip" + tooltipArrowClass="textWithHelp__tooltipArrow --light" + tooltipInnerStyle={TooltipInnerStyle} + > + {datasetArray.map((dataset, index) => { + const datasetName = dataset.get("datasetName"); + const datasetPath = dataset.get("datasetPath"); + const queryText = job.get("queryText"); + const description = job.get("description"); + const datasetDescription = + !queryText || queryText === "NA" ? description : datasetPath; + const datasetType = dataset.get("datasetType"); + return ( +
        + +
        +
        + {datasetName} +
        +
        + {datasetDescription} +
        -
        - ); - })} - - )} -
        + ); + })} + + )} +
        + ); }; diff --git a/dac/ui/src/pages/JobPageNew/components/JobContextMenu/JobContextMenu.module.less b/dac/ui/src/pages/JobPageNew/components/JobContextMenu/JobContextMenu.module.less index b8c854135e..f02a4a6385 100644 --- a/dac/ui/src/pages/JobPageNew/components/JobContextMenu/JobContextMenu.module.less +++ b/dac/ui/src/pages/JobPageNew/components/JobContextMenu/JobContextMenu.module.less @@ -19,7 +19,7 @@ justify-content: space-between; &__cmd { - background-color: var(--dremio--color--neutral--150); + background-color: var(--color--neutral--50); border-radius: 8px; font-size: 14px; padding: 1px 8px; diff --git a/dac/ui/src/pages/JobPageNew/components/JobsContent.js b/dac/ui/src/pages/JobPageNew/components/JobsContent.js index 6634cf602f..6bc4c879a6 100644 --- a/dac/ui/src/pages/JobPageNew/components/JobsContent.js +++ b/dac/ui/src/pages/JobPageNew/components/JobsContent.js @@ -230,9 +230,7 @@ export class JobsContent extends PureComponent { const renderColumn = (data, isNumeric, className) => ( ); - const renderSQL = (sql) => ( - - ); + const renderSQL = (sql) => ; const renderDataset = (job) => ( ); @@ -298,9 +296,12 @@ export class JobsContent extends PureComponent { node: () => renderJobStatus(job.get("state")), value: job.get("state"), }, - job: { node: () => renderColumn(jobIdForMap), value: jobIdForMap }, + job: { + node: () => renderColumn(jobIdForMap, false, "fullHeight"), + value: jobIdForMap, + }, usr: { - node: () => renderColumn(job.get("user")), + node: () => renderColumn(job.get("user"), false, "fullHeight"), value: job.get("queryUser"), }, acceleration: { @@ -315,7 +316,9 @@ export class JobsContent extends PureComponent { qt: { node: () => renderColumn( - intl.formatMessage({ id: getFormatMessageIdForQueryType(job) }) + intl.formatMessage({ id: getFormatMessageIdForQueryType(job) }), + false, + "fullHeight" ), value: job.get("queryType"), }, @@ -352,19 +355,26 @@ export class JobsContent extends PureComponent { }), }, cost: { - node: () => renderColumn(formattedCost.toString(), true), + node: () => + renderColumn(formattedCost.toString(), true, "fullHeight"), value: formattedCost.toString(), }, planningTime: { - node: () => renderColumn(formattedPlanningTime, true), + node: () => renderColumn(formattedPlanningTime, true, "fullHeight"), value: formattedPlanningTime, }, rowsScanned: { - node: () => renderColumn(formattedRowsScanned.toString(), true), + node: () => + renderColumn(formattedRowsScanned.toString(), true, "fullHeight"), value: formattedRowsScanned.toString(), }, rowsReturned: { - node: () => renderColumn(formattedRowsReturned.toString(), true), + node: () => + renderColumn( + formattedRowsReturned.toString(), + true, + "fullHeight" + ), value: formattedRowsReturned.toString(), }, buttons: { node: () => renderButtons(job.get("state"), jobIdForMap) }, diff --git a/dac/ui/src/pages/JobPageNew/components/JobsContent.less b/dac/ui/src/pages/JobPageNew/components/JobsContent.less index d0e27bdef7..8adc81f042 100644 --- a/dac/ui/src/pages/JobPageNew/components/JobsContent.less +++ b/dac/ui/src/pages/JobPageNew/components/JobsContent.less @@ -16,10 +16,14 @@ @import "~@app/uiTheme/less/color-schema.less"; :global { .jobsContent { + &__fullHeight { + line-height: 40px; + } &__durationSpill { display: flex; align-items: center; gap: 4px; + line-height: 40px; } &__spillIcon { height: 20px; @@ -48,7 +52,7 @@ } &__sqlTooltipText { color: @font-color1; - font-family: "Menlo"; + font-family: var(--dremio--font-family--monospace); font-size: 12px; line-height: 20px; } @@ -58,7 +62,7 @@ white-space: nowrap; } &__sqlWrapper { - font-family: "Menlo"; + font-family: var(--dremio--font-family--monospace); font-size: 12px; font-style: normal; font-weight: 400; @@ -84,6 +88,7 @@ &__leftAlign { display: block; text-align: right; + line-height: 40px; } &-dataset { display: flex; @@ -136,6 +141,16 @@ padding-left: 6px; overflow: hidden; } + &-icon { + cursor: pointer; + &:hover { + color: var(--dremio--color--link); + } + } + } + &-sql-content { + line-height: 40px; + width: fit-content; } } .ReactVirtualized__Table__Grid::-webkit-scrollbar { @@ -164,4 +179,58 @@ max-height: 76px; overflow-y: auto; } + + .wikiOverlayTitle { + display: flex; + justify-content: space-between; + width: 400px; + padding: 0 var(--dremio--spacing--1) 0 var(--dremio--spacing--2); + + &__info { + display: flex; + align-items: center; + max-width: calc(100% - 64px); + + &-datasetIcon { + height: 24px; + width: 24px; + margin-right: var(--dremio--spacing--1); + } + + &-name { + overflow: hidden; + white-space: nowrap; + text-overflow: ellipsis; + } + + &-reflection { + height: 24px; + width: 24px; + margin-left: var(--dremio--spacing--05); + } + } + + &__hidden-button { + height: 0; + width: 0; + padding: 0; + border: 0; + opacity: 0; + } + + &__actions { + display: flex; + align-items: center; + + &-button { + height: 32px; + width: 32px; + color: var(--dremio--color--neutral--600) !important; + + &:hover { + color: var(--dremio--color--link) !important; + } + } + } + } } diff --git a/dac/ui/src/pages/JobPageNew/components/SQLCell-spec.js b/dac/ui/src/pages/JobPageNew/components/SQLCell-spec.js deleted file mode 100644 index e71e0b53c7..0000000000 --- a/dac/ui/src/pages/JobPageNew/components/SQLCell-spec.js +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { mount } from "enzyme"; -import { Tooltip } from "@app/components/Tooltip"; -import SQLCell from "./SQLCell"; - -describe("SQLCell", () => { - let commonProps; - const context = { loggedInUser: {} }; - beforeEach(() => { - commonProps = { - sql: 'SELECT * FROM "nyctaxi-trips-2013.json.gz"', - }; - }); - - it("should render with props without exploding", () => { - const wrapper = mount(, { context }); - expect(wrapper).to.have.length(1); - }); - - it("should show tooltip on mouse hover", () => { - const wrapper = mount(, { context }); - wrapper.simulate("mouseover"); - const component = wrapper.find(Tooltip); - expect(component.exists()).to.eql(true); - }); -}); diff --git a/dac/ui/src/pages/JobPageNew/components/SQLCell.js b/dac/ui/src/pages/JobPageNew/components/SQLCell.js deleted file mode 100644 index 713e0c2a83..0000000000 --- a/dac/ui/src/pages/JobPageNew/components/SQLCell.js +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { useState, useRef, useEffect } from "react"; -import PropTypes from "prop-types"; -import { Tooltip } from "@app/components/Tooltip"; -import { Tooltip as DremioTooltip } from "dremio-ui-lib"; -import "./JobsContent.less"; - -const SQLCell = ({ sql, isFromExplorePage }) => { - const [tooltipOpen, setTooltipOpen] = useState(false); - const sqlRef = useRef(null); - - const TooltipInnerStyle = { - width: "auto", - maxWidth: "33.31rem", - maxHeight: "16.5rem", - background: "#F4FAFC", //DX-34369 - border: "1.5px solid #43B8C9", - padding: "12px 15px 12px 15px", - }; - - useEffect(() => { - const timer = setTimeout(() => setTooltipOpen(false), 3000); - return () => clearTimeout(timer); - }, [tooltipOpen]); - - const handleMouseEnter = () => { - setTooltipOpen(true); - }; - - const handleMouseLeave = () => { - setTooltipOpen(false); - }; - - return isFromExplorePage ? ( -
        -
        - {sql}
        } - placement="bottom-start" - type="richTooltip" - interactive - > -
        {sql}
        - -
        -
        - ) : ( -
        -
        {sql}
        - (tooltipOpen ? sqlRef.current : null)} - placement="bottom-start" - type="custom" - className="jobsContent__tooltip" - tooltipInnerStyle={TooltipInnerStyle} - tooltipArrowClass="textWithHelp__tooltipArrow --light" - > -
        {sql}
        -
        -
        - ); -}; - -SQLCell.propTypes = { - sql: PropTypes.string, - isFromExplorePage: PropTypes.bool, -}; - -SQLCell.defaultProps = { - sql: "", - isFromExplorePage: false, -}; - -export default SQLCell; diff --git a/dac/ui/src/pages/JobPageNew/components/SQLCell.tsx b/dac/ui/src/pages/JobPageNew/components/SQLCell.tsx new file mode 100644 index 0000000000..266e9220bb --- /dev/null +++ b/dac/ui/src/pages/JobPageNew/components/SQLCell.tsx @@ -0,0 +1,47 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +//@ts-ignore +import { Tooltip } from "dremio-ui-lib"; +import "./JobsContent.less"; + +const SQLCell = ({ sql }: { sql: string }) => { + return ( +
        +
        + { + e.stopPropagation(); + }} + className="sql-tooltip-content-container dremio-typography-monospace" + > + {sql} +
        + } + placement="bottom-start" + type="richTooltip" + > +
        + {sql} +
        + +
        +
        + ); +}; + +export default SQLCell; diff --git a/dac/ui/src/pages/NessieHomePage/NessieHomePage.tsx b/dac/ui/src/pages/NessieHomePage/NessieHomePage.tsx index ac63b3552d..353a89694d 100644 --- a/dac/ui/src/pages/NessieHomePage/NessieHomePage.tsx +++ b/dac/ui/src/pages/NessieHomePage/NessieHomePage.tsx @@ -36,11 +36,12 @@ const DATA_OPTIMIZATION = "data_optimization"; type NessieHomePageProps = { children: any; - source: { id: string; name: string; endpoint?: string }; + source: { id: string; name: string; endpoint?: string; endpointV1?: string }; viewState: any; isBareMinimumNessie?: boolean; baseUrl?: string; initialRef?: Branch; + statePrefix?: string; }; type ConnectedProps = { @@ -84,25 +85,26 @@ function HomePageContentUnconnected({ source: sourceInfo, baseUrl, initialRef, + statePrefix = "", }: NessieHomePageProps & ConnectedProps) { const contextValue = useMemo( - () => createNessieContext(sourceInfo, nessie, ARCTIC_STATE_PREFIX, baseUrl), - [baseUrl, nessie, sourceInfo] + () => createNessieContext(sourceInfo, nessie, statePrefix, baseUrl), + [baseUrl, nessie, sourceInfo, statePrefix] ); const initReference = useRef(initialRef); - const { stateKey, api } = contextValue; + const { stateKey, apiV2 } = contextValue; useEffect(() => { - fetchDefaultReference(stateKey, api); - }, [fetchDefaultReference, stateKey, api]); + fetchDefaultReference(stateKey, apiV2); + }, [fetchDefaultReference, stateKey, apiV2]); useEffect(() => { // prevent infinite refetching by destructuring params - fetchBranchReference(stateKey, api, { + fetchBranchReference(stateKey, apiV2, { name: initReference.current?.name, hash: initReference.current?.hash, } as Branch); - }, [fetchBranchReference, stateKey, api]); + }, [fetchBranchReference, stateKey, apiV2]); useEffect(() => { // DX-53967: fetchFeatureFlag does not exist in enterprise (backend team uses enterprise for local development) diff --git a/dac/ui/src/pages/NessieHomePage/NessieRoutes.tsx b/dac/ui/src/pages/NessieHomePage/NessieRoutes.tsx index 86ccbad51d..7c3533f35d 100644 --- a/dac/ui/src/pages/NessieHomePage/NessieRoutes.tsx +++ b/dac/ui/src/pages/NessieHomePage/NessieRoutes.tsx @@ -23,6 +23,9 @@ import NessieProjectHomePage from "./components/NessieProjectHomePage/NessieProj import NessieSourceHomePage from "./components/NessieSourceHomePage/NessieSourceHomePage"; import ArcticSourceWithNessie from "@app/exports/pages/ArcticSource/ArcticSource"; import { ArcticSourceRoutes } from "@inject/additionalRequiredRoutes"; +import ArcticCatalogCommits from "@app/exports/pages/ArcticCatalog/components/ArcticCatalogCommits/ArcticCatalogCommits"; +import ArcticCatalogTags from "@app/exports/pages/ArcticCatalog/components/ArcticCatalogTags/ArcticCatalogTags"; +import ArcticCommitDetails from "@app/exports/pages/ArcticCatalog/components/ArcticCommitDetails/ArcticCommitDetails"; import * as PATHS from "@app/exports/paths"; @@ -52,19 +55,70 @@ function nessieRoutes() { ); } +export const NessieHistorySourceRoutes = [ + , + , + , + , + , + } + />, +]; + export function nessieSourceRoutes() { return [ - , - {CommonRoutes} + + {NessieHistorySourceRoutes} + null} + /> , ]; } diff --git a/dac/ui/src/pages/NessieHomePage/components/BranchButton/BranchButton.tsx b/dac/ui/src/pages/NessieHomePage/components/BranchButton/BranchButton.tsx index 2c236b168c..316f20ba58 100644 --- a/dac/ui/src/pages/NessieHomePage/components/BranchButton/BranchButton.tsx +++ b/dac/ui/src/pages/NessieHomePage/components/BranchButton/BranchButton.tsx @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { Button } from "dremio-ui-lib/dist-esm"; +import { Button } from "dremio-ui-lib/components"; import { FormattedMessage } from "react-intl"; import "./BranchButton.less"; diff --git a/dac/ui/src/pages/NessieHomePage/components/BranchHistory/components/BranchHistoryCommits/utils.tsx b/dac/ui/src/pages/NessieHomePage/components/BranchHistory/components/BranchHistoryCommits/utils.tsx index 8ec074a63e..33344ce2ff 100644 --- a/dac/ui/src/pages/NessieHomePage/components/BranchHistory/components/BranchHistoryCommits/utils.tsx +++ b/dac/ui/src/pages/NessieHomePage/components/BranchHistory/components/BranchHistoryCommits/utils.tsx @@ -16,12 +16,16 @@ import * as React from "react"; import { FormattedMessage } from "react-intl"; -import { Avatar } from "@mui/material"; +import { Avatar } from "dremio-ui-lib/components"; import CommitHash from "@app/pages/HomePage/components/BranchPicker/components/CommitBrowser/components/CommitHash/CommitHash"; -import { DefaultApi, LogEntry, LogResponse } from "@app/services/nessie/client"; +import { + DefaultApi, + LogEntryV1 as LogEntry, + LogResponseV1 as LogResponse, +} from "@app/services/nessie/client"; import { convertISOStringWithTooltip } from "../../../RepoView/components/RepoViewBody/components/RepoViewBranchList/utils"; import { Reference } from "@app/types/nessie"; - +import { nameToInitials } from "@app/exports/utilities/nameToInitials"; export const columns = [ { key: "name", @@ -42,17 +46,6 @@ export const columns = [ }, ]; -const stringAvatar = (name: string | undefined) => { - if (!name) return { children: null }; - const splitName = name.split(" "); - return { - children: - splitName.length > 1 - ? `${splitName[0][0].toUpperCase()}${splitName[1][0].toUpperCase()}` - : `${splitName[0][0].toUpperCase()}`, - }; -}; - const handleLoadMoreCommits = async ( branchName: string, commitLog: LogResponse, @@ -124,7 +117,9 @@ const createTableRow = ( entry.commitMeta && (
        - + {entry.commitMeta.author} diff --git a/dac/ui/src/pages/NessieHomePage/components/BranchHistory/components/BranchHistoryHeader/BranchHistoryHeader.less b/dac/ui/src/pages/NessieHomePage/components/BranchHistory/components/BranchHistoryHeader/BranchHistoryHeader.less index cc07a6d055..780db349c7 100644 --- a/dac/ui/src/pages/NessieHomePage/components/BranchHistory/components/BranchHistoryHeader/BranchHistoryHeader.less +++ b/dac/ui/src/pages/NessieHomePage/components/BranchHistory/components/BranchHistoryHeader/BranchHistoryHeader.less @@ -78,7 +78,7 @@ } &-more { - color: var(--dremio--color--neutral--600); + color: var(--color--neutral--600); } } } diff --git a/dac/ui/src/pages/NessieHomePage/components/BranchHistory/components/BranchHistoryMetadata/utils.ts b/dac/ui/src/pages/NessieHomePage/components/BranchHistory/components/BranchHistoryMetadata/utils.ts index 949cdf0e0e..8921dbb17a 100644 --- a/dac/ui/src/pages/NessieHomePage/components/BranchHistory/components/BranchHistoryMetadata/utils.ts +++ b/dac/ui/src/pages/NessieHomePage/components/BranchHistory/components/BranchHistoryMetadata/utils.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { LogEntry } from "@app/services/nessie/client"; +import { LogEntryV1 as LogEntry } from "@app/services/nessie/client"; export const countUniqueAuthors = ( logEntries: LogEntry[] | undefined @@ -22,7 +22,7 @@ export const countUniqueAuthors = ( const authorSet = new Set(); if (logEntries) { - for (let logEntry of logEntries) { + for (const logEntry of logEntries) { if (logEntry.commitMeta && logEntry.commitMeta.author) { authorSet.add(logEntry.commitMeta.author); } diff --git a/dac/ui/src/pages/NessieHomePage/components/BranchHistory/utils.ts b/dac/ui/src/pages/NessieHomePage/components/BranchHistory/utils.ts index 6f031323b2..70e772f19d 100644 --- a/dac/ui/src/pages/NessieHomePage/components/BranchHistory/utils.ts +++ b/dac/ui/src/pages/NessieHomePage/components/BranchHistory/utils.ts @@ -20,7 +20,7 @@ import { usePromise } from "react-smart-promise"; import { DefaultApi, FetchOption, - LogResponse, + LogResponseV1 as LogResponse, } from "@app/services/nessie/client"; import { Reference } from "@app/types/nessie"; diff --git a/dac/ui/src/pages/NessieHomePage/components/CommitDetails/CommitDetails.tsx b/dac/ui/src/pages/NessieHomePage/components/CommitDetails/CommitDetails.tsx index 92116e9378..1127b89a06 100644 --- a/dac/ui/src/pages/NessieHomePage/components/CommitDetails/CommitDetails.tsx +++ b/dac/ui/src/pages/NessieHomePage/components/CommitDetails/CommitDetails.tsx @@ -17,7 +17,7 @@ import { useIntl } from "react-intl"; import CommitHash from "@app/pages/HomePage/components/BranchPicker/components/CommitBrowser/components/CommitHash/CommitHash"; import UserIcon from "@app/pages/HomePage/components/BranchPicker/components/CommitBrowser/components/UserIcon/UserIcon"; import { formatDate } from "@app/utils/date"; -import { CommitMeta } from "@app/services/nessie/client"; +import { CommitMetaV2 as CommitMeta } from "@app/services/nessie/client"; import { Reference } from "@app/types/nessie"; import "./CommitDetails.less"; @@ -30,6 +30,8 @@ function CommitDetails({ branch: Reference; }) { const intl = useIntl(); + + const author = commitMeta.authors?.[0]; return (
        @@ -47,18 +49,18 @@ function CommitDetails({
        - {commitMeta.author && ( + {author && (
        {intl.formatMessage({ id: "Common.Author" })}
        - + - {commitMeta.author} + {author}
        diff --git a/dac/ui/src/pages/NessieHomePage/components/CommitDetailsPage/CommitDetailsPage.tsx b/dac/ui/src/pages/NessieHomePage/components/CommitDetailsPage/CommitDetailsPage.tsx index 5dc2b81129..4cce77ee2e 100644 --- a/dac/ui/src/pages/NessieHomePage/components/CommitDetailsPage/CommitDetailsPage.tsx +++ b/dac/ui/src/pages/NessieHomePage/components/CommitDetailsPage/CommitDetailsPage.tsx @@ -24,7 +24,7 @@ import { useNessieContext } from "../../utils/context"; import "./CommitDetailsPage.less"; function CommitDetailsPage({ params }: { params: any }) { - const { api } = useNessieContext(); + const { apiV2 } = useNessieContext(); const [branchName, commitHash] = useMemo(() => { const name = decodeURIComponent(params?.branchName || ""); const hash = params?.commitHash || ""; @@ -37,11 +37,11 @@ function CommitDetailsPage({ params }: { params: any }) { !branchName || !commitHash ? null : () => - api.getCommitLog({ + apiV2.getCommitLogV2({ ref: branchName, filter: `commit.hash == "${commitHash}"`, }), - [branchName, commitHash, api] + [branchName, commitHash, apiV2] ) ); diff --git a/dac/ui/src/pages/NessieHomePage/components/DeleteBranchDialog/DeleteBranchDialog.tsx b/dac/ui/src/pages/NessieHomePage/components/DeleteBranchDialog/DeleteBranchDialog.tsx index 75fd51c2bb..0bd692ae8b 100644 --- a/dac/ui/src/pages/NessieHomePage/components/DeleteBranchDialog/DeleteBranchDialog.tsx +++ b/dac/ui/src/pages/NessieHomePage/components/DeleteBranchDialog/DeleteBranchDialog.tsx @@ -16,7 +16,11 @@ import { useState } from "react"; import { FormattedMessage, useIntl } from "react-intl"; -import { Button, ModalContainer, DialogContent } from "dremio-ui-lib/dist-esm"; +import { + Button, + ModalContainer, + DialogContent, +} from "dremio-ui-lib/components"; import { useDispatch } from "react-redux"; import { setReference } from "@app/actions/nessie/nessie"; import { Reference } from "@app/types/nessie"; @@ -47,7 +51,7 @@ function DeleteBranchDialog({ const [isSending, setIsSending] = useState(false); const [errorText, setErrorText] = useState(null); const { - api, + apiV2, stateKey, state: { reference, defaultReference }, } = useNessieContext(); @@ -56,10 +60,11 @@ function DeleteBranchDialog({ setIsSending(true); try { - await api.deleteReference({ - referenceName: referenceToDelete.name, - referenceType: ReferenceType.Branch, - expectedHash: referenceToDelete.hash, + await apiV2.deleteReferenceV2({ + ref: referenceToDelete.hash + ? `${referenceToDelete.name}@${referenceToDelete.hash}` + : referenceToDelete.name, + type: ReferenceType.Branch, }); if (allRefs && setAllRefs) { diff --git a/dac/ui/src/pages/NessieHomePage/components/DeleteTagDialog/DeleteTagDialog.module.less b/dac/ui/src/pages/NessieHomePage/components/DeleteTagDialog/DeleteTagDialog.module.less new file mode 100644 index 0000000000..c2f2886a5b --- /dev/null +++ b/dac/ui/src/pages/NessieHomePage/components/DeleteTagDialog/DeleteTagDialog.module.less @@ -0,0 +1,29 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +.delete-tag-dialog { + width: 600px; + + :global { + .dremio-dialog-content__main { + height: 150px; + } + } + + &-body { + font-size: 14px; + } +} diff --git a/dac/ui/src/pages/NessieHomePage/components/DeleteTagDialog/DeleteTagDialog.tsx b/dac/ui/src/pages/NessieHomePage/components/DeleteTagDialog/DeleteTagDialog.tsx new file mode 100644 index 0000000000..7ece71425e --- /dev/null +++ b/dac/ui/src/pages/NessieHomePage/components/DeleteTagDialog/DeleteTagDialog.tsx @@ -0,0 +1,110 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { useDispatch } from "react-redux"; +import { FormattedMessage, useIntl } from "react-intl"; +import { + Button, + ModalContainer, + DialogContent, +} from "dremio-ui-lib/components"; +import { Reference } from "@app/types/nessie"; +import { useNessieContext } from "../../utils/context"; +import { addNotification } from "actions/notification"; +import { ReferenceType } from "@app/services/nessie/client/index"; + +import * as classes from "./DeleteTagDialog.module.less"; + +type DeleteTagDialogProps = { + open: boolean; + closeDialog: () => void; + forkFrom: Reference; + refetch?: () => void; +}; + +function DeleteTagDialog({ + open, + closeDialog, + forkFrom, + refetch, +}: DeleteTagDialogProps): JSX.Element { + const { apiV2 } = useNessieContext(); + const intl = useIntl(); + const dispatch = useDispatch(); + + const onCancel = () => { + closeDialog(); + }; + + const onDelete = async () => { + try { + await apiV2.deleteReferenceV2({ + ref: forkFrom.hash + ? `${forkFrom.name}@${forkFrom.hash}` + : forkFrom.name, + type: ReferenceType.Tag, + }); + dispatch( + addNotification( + intl.formatMessage( + { id: "ArcticCatalog.Tags.DeleteSuccess" }, + { tag: forkFrom.name } + ), + "success" + ) + ); + refetch?.(); + closeDialog(); + } catch (error: any) { + dispatch( + addNotification( + intl.formatMessage({ id: "ArcticCatalog.Tags.DeleteFailure" }), + "error" + ) + ); + closeDialog(); + } + }; + + return ( + {}} isOpen={open} close={closeDialog}> + + + + + } + > +
        + +
        +
        +
        + ); +} +export default DeleteTagDialog; diff --git a/dac/ui/src/pages/NessieHomePage/components/MergeBranchDialog/MergeBranchDialog.less b/dac/ui/src/pages/NessieHomePage/components/MergeBranchDialog/MergeBranchDialog.less index 5ebf2e2891..f27202f97d 100644 --- a/dac/ui/src/pages/NessieHomePage/components/MergeBranchDialog/MergeBranchDialog.less +++ b/dac/ui/src/pages/NessieHomePage/components/MergeBranchDialog/MergeBranchDialog.less @@ -66,7 +66,7 @@ height: 32px; min-height: 32px; &:hover { - border-color: var(--dremio--color--neutral--300); + border-color: var(--color--neutral--200); } } } diff --git a/dac/ui/src/pages/NessieHomePage/components/MergeBranchDialog/MergeBranchDialog.tsx b/dac/ui/src/pages/NessieHomePage/components/MergeBranchDialog/MergeBranchDialog.tsx index c5adf01cdd..155e3c4f3a 100644 --- a/dac/ui/src/pages/NessieHomePage/components/MergeBranchDialog/MergeBranchDialog.tsx +++ b/dac/ui/src/pages/NessieHomePage/components/MergeBranchDialog/MergeBranchDialog.tsx @@ -17,8 +17,13 @@ import { useMemo, useState } from "react"; import { useDispatch } from "react-redux"; import { FormattedMessage, useIntl } from "react-intl"; -import { Button, ModalContainer, DialogContent } from "dremio-ui-lib/dist-esm"; -import { Merge, MergeRefIntoBranchRequest } from "@app/services/nessie/client"; +import { + Button, + ModalContainer, + DialogContent, + SectionMessage, + MessageDetails, +} from "dremio-ui-lib/components"; import { Reference } from "@app/types/nessie"; import { Select } from "@mantine/core"; import { useNessieContext } from "../../utils/context"; @@ -36,6 +41,7 @@ type MergeBranchDialogProps = { }; // The reason mergeTo is still here is for the old UI, once that is deprecated the mergeTo conditionals can be removed. +type Err = Record | null; function MergeBranchDialog({ open, @@ -47,10 +53,10 @@ function MergeBranchDialog({ }: MergeBranchDialogProps): JSX.Element { const [selectedBranch, setSelectedBranch] = useState(null); const [isSending, setIsSending] = useState(false); - const [errorText, setErrorText] = useState(null); - const { api } = useNessieContext(); + const [error, setError] = useState(null); + const { apiV2 } = useNessieContext(); const dispatch = useDispatch(); - const intl = useIntl(); + const { formatMessage } = useIntl(); const onMerge = async () => { setIsSending(true); @@ -62,14 +68,13 @@ function MergeBranchDialog({ const fromRefName = mergeFrom.name; const fromHash = mergeFrom.hash as string; - await api.mergeRefIntoBranch({ - branchName, - expectedHash, + await apiV2.mergeV2({ + branch: expectedHash ? `${branchName}@${expectedHash}` : branchName, merge: { fromRefName, fromHash, - } as Merge, - } as MergeRefIntoBranchRequest); + }, + }); if (setSuccessMessage) { setSuccessMessage( @@ -77,10 +82,10 @@ function MergeBranchDialog({ ); } - setErrorText(null); + setError(null); dispatch( addNotification( - intl.formatMessage( + formatMessage( { id: "ArcticCatalog.Merge.Dialog.SuccessMessage" }, { branchName, fromRefName } ), @@ -90,27 +95,36 @@ function MergeBranchDialog({ closeDialog(); setIsSending(false); } catch (error: any) { - if (error.status === 409) { - setErrorText( - - ); - } else if (error.status === 400) { - setErrorText( - - ); - } else { - setErrorText( - - ); + try { + const body = await error.json(); + setError(body); + } catch (e: any) { + setError(e); + } finally { + setIsSending(false); } + } + }; - setIsSending(false); + const getErrorProps = (err: Err) => { + let id = "RepoView.Dialog.DeleteBranch.Error"; + if (err?.status === 409) { + id = "BranchHistory.Dialog.MergeBranch.Error.Conflict"; + } else if (err?.status === 400) { + id = "BranchHistory.Dialog.MergeBranch.Error.NoHashes"; + } else if (err?.errorCode === "REFERENCE_NOT_FOUND") { + id = "BranchHistory.Dialog.MergeBranch.Error.RefNotFound"; } + + return { + message: formatMessage({ id }), + details: err instanceof TypeError ? undefined : (err?.message as string), + }; }; const onClose = () => { setSelectedBranch(null); - setErrorText(null); + setError(null); closeDialog(); }; @@ -132,8 +146,17 @@ function MergeBranchDialog({ className="modal-container-overflow" > + + + ) : ( + <> + ) + } className="merge-branch-dialog" - title={intl.formatMessage( + title={formatMessage( { id: mergeTo ? "RepoView.Dialog.CreateBranch.CreateBranch" @@ -173,9 +196,6 @@ function MergeBranchDialog({ onChange={(value: string) => setSelectedBranch(value)} styles={() => ({ item: styles.options })} /> -
        - {errorText} -
        )} diff --git a/dac/ui/src/pages/NessieHomePage/components/NamespaceItem/NamespaceItem.tsx b/dac/ui/src/pages/NessieHomePage/components/NamespaceItem/NamespaceItem.tsx index b1a53c7f81..210c603169 100644 --- a/dac/ui/src/pages/NessieHomePage/components/NamespaceItem/NamespaceItem.tsx +++ b/dac/ui/src/pages/NessieHomePage/components/NamespaceItem/NamespaceItem.tsx @@ -16,7 +16,7 @@ import FontIcon from "@app/components/Icon/FontIcon"; import { useIntl } from "react-intl"; -import { Entry } from "@app/services/nessie/client"; +import { EntryV1 as Entry } from "@app/services/nessie/client"; import { getUrlByType, getIconByType } from "../../utils/utils"; import NessieLink from "../NessieLink/NessieLink"; diff --git a/dac/ui/src/pages/NessieHomePage/components/NamespaceSettings/NamespaceSettings.tsx b/dac/ui/src/pages/NessieHomePage/components/NamespaceSettings/NamespaceSettings.tsx index c68382fcae..14dad715b1 100644 --- a/dac/ui/src/pages/NessieHomePage/components/NamespaceSettings/NamespaceSettings.tsx +++ b/dac/ui/src/pages/NessieHomePage/components/NamespaceSettings/NamespaceSettings.tsx @@ -15,7 +15,8 @@ */ import { ChangeEvent } from "react"; import { withRouter, InjectedRouter, WithRouterProps } from "react-router"; -import { Entry } from "@app/services/nessie/client"; +import { EntryV1 as Entry } from "@app/services/nessie/client"; +import { Type } from "@app/types/nessie"; import * as classes from "./NamespaceSettings.module.less"; @@ -49,7 +50,7 @@ const NamespaceSettings = ({ }: { entry: Entry; } & WithRouterProps) => { - const validTypes = ["ICEBERG_TABLE"]; + const validTypes = [Type.IcebergTable]; const entryType = entry.type; if (entryType && !validTypes.includes(entryType)) return null; return ( diff --git a/dac/ui/src/pages/NessieHomePage/components/NessieSourceHomePage/NessieSourceHomePage.tsx b/dac/ui/src/pages/NessieHomePage/components/NessieSourceHomePage/NessieSourceHomePage.tsx index 95ad611de5..508d8fb147 100644 --- a/dac/ui/src/pages/NessieHomePage/components/NessieSourceHomePage/NessieSourceHomePage.tsx +++ b/dac/ui/src/pages/NessieHomePage/components/NessieSourceHomePage/NessieSourceHomePage.tsx @@ -21,7 +21,7 @@ import { getViewState } from "@app/selectors/resources"; import { loadSourceListData as loadSourceListDataAction } from "@app/actions/resources/sources"; import HomePage from "@app/pages/HomePage/HomePage"; import { HomePageContent } from "../../NessieHomePage"; -import { getEndpointFromSourceConfig } from "@app/utils/nessieUtils"; +import { getEndpointFromSource } from "@app/utils/nessieUtils"; import "./NessieSourceHomePage.less"; @@ -36,11 +36,11 @@ function NessieSourceHomePage({ ); if (!source) return null; - const config = source.get("config"); return { name: source.get("name"), id: source.get("id"), - endpoint: getEndpointFromSourceConfig(config.toJS()), + endpointV1: getEndpointFromSource(source.toJS(), "v1"), + endpoint: getEndpointFromSource(source.toJS()), }; }, [props.params.sourceId, props.sources]); diff --git a/dac/ui/src/pages/NessieHomePage/components/NewBranchDialog/NewBranchDialog.less b/dac/ui/src/pages/NessieHomePage/components/NewBranchDialog/NewBranchDialog.less index 1c96d7d6bc..57031f3864 100644 --- a/dac/ui/src/pages/NessieHomePage/components/NewBranchDialog/NewBranchDialog.less +++ b/dac/ui/src/pages/NessieHomePage/components/NewBranchDialog/NewBranchDialog.less @@ -55,7 +55,7 @@ font-size: 14px; &:hover { fieldset { - border-color: var(--dremio--color--neutral--300); + border-color: var(--color--neutral--200); } } @@ -88,7 +88,7 @@ } &-name { dremio-icon { - color: var(--dremio--color--neutral--600); + color: var(--color--neutral--600); margin-right: 2px; } } diff --git a/dac/ui/src/pages/NessieHomePage/components/NewBranchDialog/NewBranchDialog.tsx b/dac/ui/src/pages/NessieHomePage/components/NewBranchDialog/NewBranchDialog.tsx index 8f0df1a22f..ef8d437a32 100644 --- a/dac/ui/src/pages/NessieHomePage/components/NewBranchDialog/NewBranchDialog.tsx +++ b/dac/ui/src/pages/NessieHomePage/components/NewBranchDialog/NewBranchDialog.tsx @@ -17,11 +17,16 @@ import { useState } from "react"; import { useDispatch } from "react-redux"; import { FormattedMessage, useIntl } from "react-intl"; -import { Button, ModalContainer, DialogContent } from "dremio-ui-lib/dist-esm"; +import { + Button, + ModalContainer, + DialogContent, +} from "dremio-ui-lib/components"; import { setReference } from "@app/actions/nessie/nessie"; import { TextField } from "@mui/material"; import { Reference } from "@app/types/nessie"; import { useNessieContext } from "../../utils/context"; +import { ReferenceType } from "@app/services/nessie/client/index"; import "./NewBranchDialog.less"; @@ -45,7 +50,7 @@ function NewBranchDialog({ fromType, }: NewBranchDialogProps): JSX.Element { const intl = useIntl(); - const { api, stateKey } = useNessieContext(); + const { apiV2, stateKey } = useNessieContext(); const [newBranchName, setNewBranchName] = useState(""); const [isSending, setIsSending] = useState(false); const [errorText, setErrorText] = useState(null); @@ -68,13 +73,11 @@ function NewBranchDialog({ const onAdd = async () => { setIsSending(true); try { - const reference = (await api.createReference({ - sourceRefName: forkFrom ? forkFrom.name : undefined, - reference: { - type: "BRANCH", - hash: forkFrom ? forkFrom.hash : null, - name: newBranchName, - } as Reference, + // https://github.com/projectnessie/nessie/issues/6210 + const { reference } = (await apiV2.createReferenceV2({ + name: newBranchName, + type: ReferenceType.Branch, + reference: forkFrom, })) as Reference; if (allRefs && setAllRefs) { @@ -143,7 +146,7 @@ function NewBranchDialog({ <> - {forkFrom?.hash?.substring(0, 30)} + {forkFrom?.hash?.substring(0, 8)} ) : ( diff --git a/dac/ui/src/pages/NessieHomePage/components/NewTagDialog/NewTagDialog.less b/dac/ui/src/pages/NessieHomePage/components/NewTagDialog/NewTagDialog.less index f1797c8f0c..e1e3b5acc5 100644 --- a/dac/ui/src/pages/NessieHomePage/components/NewTagDialog/NewTagDialog.less +++ b/dac/ui/src/pages/NessieHomePage/components/NewTagDialog/NewTagDialog.less @@ -67,7 +67,7 @@ font-size: 14px; &:hover { fieldset { - border-color: var(--dremio--color--neutral--300); + border-color: var(--color--neutral--200); } } diff --git a/dac/ui/src/pages/NessieHomePage/components/NewTagDialog/NewTagDialog.tsx b/dac/ui/src/pages/NessieHomePage/components/NewTagDialog/NewTagDialog.tsx index 118929178a..2cf61e01e5 100644 --- a/dac/ui/src/pages/NessieHomePage/components/NewTagDialog/NewTagDialog.tsx +++ b/dac/ui/src/pages/NessieHomePage/components/NewTagDialog/NewTagDialog.tsx @@ -17,11 +17,16 @@ import { useState } from "react"; import { useDispatch } from "react-redux"; import { FormattedMessage, useIntl } from "react-intl"; -import { Button, ModalContainer, DialogContent } from "dremio-ui-lib/dist-esm"; +import { + Button, + ModalContainer, + DialogContent, +} from "dremio-ui-lib/components"; import { TextField } from "@mui/material"; import { Reference } from "@app/types/nessie"; import { useNessieContext } from "../../utils/context"; import { addNotification } from "actions/notification"; +import { ReferenceType } from "@app/services/nessie/client/index"; import "./NewTagDialog.less"; @@ -38,7 +43,7 @@ function NewTagDialog({ forkFrom, refetch, }: NewTagDialogProps): JSX.Element { - const { api } = useNessieContext(); + const { apiV2 } = useNessieContext(); const [newTagName, setNewTagName] = useState(""); const [isSending, setIsSending] = useState(false); const [errorText, setErrorText] = useState(null); @@ -58,13 +63,10 @@ function NewTagDialog({ const onAdd = async () => { setIsSending(true); try { - await api.createReference({ - sourceRefName: forkFrom ? forkFrom.name : undefined, - reference: { - type: "TAG", - hash: forkFrom ? forkFrom.hash : null, - name: newTagName, - } as Reference, + await apiV2.createReferenceV2({ + name: newTagName, + type: ReferenceType.Tag, + reference: forkFrom, }); setErrorText(null); @@ -123,7 +125,7 @@ function NewTagDialog({ - {forkFrom.hash.substring(0, 30)} + {forkFrom.hash.substring(0, 8)}
        diff --git a/dac/ui/src/pages/NessieHomePage/components/RenameBranchDialog/RenameBranchDialog.tsx b/dac/ui/src/pages/NessieHomePage/components/RenameBranchDialog/RenameBranchDialog.tsx index d8a468faef..11c50de43c 100644 --- a/dac/ui/src/pages/NessieHomePage/components/RenameBranchDialog/RenameBranchDialog.tsx +++ b/dac/ui/src/pages/NessieHomePage/components/RenameBranchDialog/RenameBranchDialog.tsx @@ -18,7 +18,7 @@ import { useState } from "react"; import { connect } from "react-redux"; import { FormattedMessage } from "react-intl"; -import { Button } from "dremio-ui-lib/dist-esm"; +import { Button } from "dremio-ui-lib/components"; import { Dialog, DialogActions, @@ -89,7 +89,7 @@ function RenameBranchDialog({ await api.deleteReference({ referenceName: referenceToRename.name, referenceType: ReferenceType.Branch, - expectedHash: referenceToRename.hash, + expectedHash: referenceToRename.hash || "", }); if (reference && referenceToRename.name === reference.name) { diff --git a/dac/ui/src/pages/NessieHomePage/components/RepoHeader/RepoHeader.less b/dac/ui/src/pages/NessieHomePage/components/RepoHeader/RepoHeader.less index f9f1e10076..f9192c3559 100644 --- a/dac/ui/src/pages/NessieHomePage/components/RepoHeader/RepoHeader.less +++ b/dac/ui/src/pages/NessieHomePage/components/RepoHeader/RepoHeader.less @@ -55,7 +55,7 @@ &--icon { height: 24px; width: 24px; - color: var(--dremio--color--neutral--600); + color: var(--color--neutral--600); } } diff --git a/dac/ui/src/pages/NessieHomePage/components/RepoHeader/RepoHeader.tsx b/dac/ui/src/pages/NessieHomePage/components/RepoHeader/RepoHeader.tsx index da84ccabe0..53dfc6cc06 100644 --- a/dac/ui/src/pages/NessieHomePage/components/RepoHeader/RepoHeader.tsx +++ b/dac/ui/src/pages/NessieHomePage/components/RepoHeader/RepoHeader.tsx @@ -15,7 +15,7 @@ */ import FontIcon from "@app/components/Icon/FontIcon"; -import { Entry } from "@app/services/nessie/client"; +import { EntryV1 as Entry } from "@app/services/nessie/client"; import NessieLink from "../NessieLink/NessieLink"; import "./RepoHeader.less"; diff --git a/dac/ui/src/pages/NessieHomePage/components/RepoView/RepoView.tsx b/dac/ui/src/pages/NessieHomePage/components/RepoView/RepoView.tsx index 1771546f16..c191282951 100644 --- a/dac/ui/src/pages/NessieHomePage/components/RepoView/RepoView.tsx +++ b/dac/ui/src/pages/NessieHomePage/components/RepoView/RepoView.tsx @@ -33,9 +33,9 @@ type RepoViewProps = { }; function RepoView(props: RepoViewProps) { const { showHeader = true } = props; - const { state, api } = useNessieContext(); + const { state, apiV2 } = useNessieContext(); const defaultReferenceLoading = isDefaultReferenceLoading(state); - const repoViewContext = useRepoViewContext(api); + const repoViewContext = useRepoViewContext(apiV2); const { setDefaultRef } = repoViewContext; useEffect(() => { diff --git a/dac/ui/src/pages/NessieHomePage/components/RepoView/components/RepoViewBody/components/RepoViewBranchList/RepoViewBranchList.less b/dac/ui/src/pages/NessieHomePage/components/RepoView/components/RepoViewBody/components/RepoViewBranchList/RepoViewBranchList.less index b0c4dbfe96..efbe614828 100644 --- a/dac/ui/src/pages/NessieHomePage/components/RepoView/components/RepoViewBody/components/RepoViewBranchList/RepoViewBranchList.less +++ b/dac/ui/src/pages/NessieHomePage/components/RepoView/components/RepoViewBody/components/RepoViewBranchList/RepoViewBranchList.less @@ -74,7 +74,7 @@ width: 1px; height: 24px; margin: 0 8px; - background-color: var(--dremio--color--neutral--150); + background-color: var(--color--neutral--50); } &-container { cursor: pointer; diff --git a/dac/ui/src/pages/NessieHomePage/components/RepoView/components/RepoViewBody/components/RepoViewBranchList/RepoViewBranchList.tsx b/dac/ui/src/pages/NessieHomePage/components/RepoView/components/RepoViewBody/components/RepoViewBranchList/RepoViewBranchList.tsx index f8c98e17bc..c0b26f097d 100644 --- a/dac/ui/src/pages/NessieHomePage/components/RepoView/components/RepoViewBody/components/RepoViewBranchList/RepoViewBranchList.tsx +++ b/dac/ui/src/pages/NessieHomePage/components/RepoView/components/RepoViewBody/components/RepoViewBranchList/RepoViewBranchList.tsx @@ -24,12 +24,13 @@ import { MenuItem } from "@mui/material"; import CommitHash from "@app/pages/HomePage/components/BranchPicker/components/CommitBrowser/components/CommitHash/CommitHash"; //@ts-ignore import { Tooltip } from "dremio-ui-lib"; -import { Avatar } from "dremio-ui-lib/dist-esm"; +import { Avatar } from "dremio-ui-lib/components"; import PromiseViewState from "@app/components/PromiseViewState/PromiseViewState"; import { Reference } from "@app/types/nessie"; import { RepoViewContext } from "../../../../RepoView"; import { convertISOStringWithTooltip, renderIcons } from "./utils"; import { useNessieContext } from "@app/pages/NessieHomePage/utils/context"; +import { nameToInitials } from "@app/exports/utilities/nameToInitials"; import { constructArcticUrl, useArcticCatalogContext, @@ -73,10 +74,7 @@ function RepoViewBranchList({ new Array(rows.length).fill(false) ); - const { - source: { name }, - baseUrl, - } = useNessieContext(); + const { baseUrl, stateKey } = useNessieContext(); const { isCatalog } = useArcticCatalogContext() ?? {}; @@ -88,7 +86,7 @@ function RepoViewBranchList({ const cur = rows[index]; const goToDatasetOnClick = () => { - dispatchSetReference({ reference: cur }, name); + dispatchSetReference({ reference: cur }, stateKey); router.push( constructArcticUrl({ @@ -132,7 +130,7 @@ function RepoViewBranchList({ {cur.metadata && cur.metadata.commitMetaOfHEAD && - cur.metadata.commitMetaOfHEAD.author && ( + cur.metadata.commitMetaOfHEAD.authors?.[0] && (
        {cur.hash && cur.name && ( )} @@ -166,13 +164,12 @@ function RepoViewBranchList({ by - {cur.metadata.commitMetaOfHEAD.author} + {cur.metadata.commitMetaOfHEAD.authors[0] || ""} stopPropagation(e)}> diff --git a/dac/ui/src/pages/NessieHomePage/components/RepoView/utils.ts b/dac/ui/src/pages/NessieHomePage/components/RepoView/utils.ts index 7d97b3830a..a4f532f558 100644 --- a/dac/ui/src/pages/NessieHomePage/components/RepoView/utils.ts +++ b/dac/ui/src/pages/NessieHomePage/components/RepoView/utils.ts @@ -17,7 +17,7 @@ import { useCallback, useEffect, useState } from "react"; import { usePromise } from "react-smart-promise"; -import { DefaultApi, FetchOption } from "@app/services/nessie/client"; +import { FetchOption, V2BetaApi } from "@app/services/nessie/client"; import { Reference } from "@app/types/nessie"; export type RepoViewContextType = { @@ -29,9 +29,9 @@ export type RepoViewContextType = { setAllRefs: React.Dispatch>; }; -export function useRepoViewContext(api: DefaultApi): RepoViewContextType { +export function useRepoViewContext(api: V2BetaApi): RepoViewContextType { const [allRefsErr, allBranches, allRefsStatus] = usePromise( - useCallback(() => api.getAllReferences({ fetch: FetchOption.All }), [api]) + useCallback(() => api.getAllReferencesV2({ fetch: FetchOption.All }), [api]) ); const [defaultRef, setDefaultRef] = useState({} as Reference); diff --git a/dac/ui/src/pages/NessieHomePage/components/TableDetailsPage/TableDetailsPage.tsx b/dac/ui/src/pages/NessieHomePage/components/TableDetailsPage/TableDetailsPage.tsx index 8a869926fd..f65300fe31 100644 --- a/dac/ui/src/pages/NessieHomePage/components/TableDetailsPage/TableDetailsPage.tsx +++ b/dac/ui/src/pages/NessieHomePage/components/TableDetailsPage/TableDetailsPage.tsx @@ -20,6 +20,7 @@ import PageBreadcrumbHeader from "../PageBreadcrumbHeader/PageBreadcrumbHeader"; import TableHistoryContent from "./components/TableHistoryContent/TableHistoryContent"; import TableHistoryHeader from "./components/TableHistoryHeader/TableHistoryHeader"; import { rmProjectBase } from "dremio-ui-common/utilities/projectBase.js"; +import { Type } from "@app/types/nessie"; import "./TableDetailsPage.less"; @@ -40,7 +41,7 @@ function TableDetailsPage({
        diff --git a/dac/ui/src/pages/NessieHomePage/components/TableDetailsPage/components/TableHistoryContent/TableHistoryContent.tsx b/dac/ui/src/pages/NessieHomePage/components/TableDetailsPage/components/TableHistoryContent/TableHistoryContent.tsx index e9a23baea2..d9cc80b471 100644 --- a/dac/ui/src/pages/NessieHomePage/components/TableDetailsPage/components/TableHistoryContent/TableHistoryContent.tsx +++ b/dac/ui/src/pages/NessieHomePage/components/TableDetailsPage/components/TableHistoryContent/TableHistoryContent.tsx @@ -14,17 +14,26 @@ * limitations under the License. */ import CommitBrowser from "@app/pages/HomePage/components/BranchPicker/components/CommitBrowser/CommitBrowser"; -import { LogEntry, LogResponse } from "@app/services/nessie/client"; +import { + LogEntryV2 as LogEntry, + LogResponseV2 as LogResponse, +} from "@app/services/nessie/client"; import { useCallback, useEffect, useRef, useState } from "react"; import { useNessieContext } from "../../../../utils/context"; import CommitDetails from "../../../CommitDetails/CommitDetails"; import "./TableHistoryContent.less"; -function TableHistoryContent({ path }: { path: string[] }) { +function TableHistoryContent({ + path, + tableName, +}: { + path: string[]; + tableName?: string; +}) { const { state: { reference }, - api, + apiV2, } = useNessieContext(); const [commit, setCommit] = useState(); const [list, setList] = useState(); @@ -48,6 +57,7 @@ function TableHistoryContent({ path }: { path: string[] }) { {!!reference && ( )} diff --git a/dac/ui/src/pages/NessieHomePage/utils/context.ts b/dac/ui/src/pages/NessieHomePage/utils/context.ts index 7eb8fb6d1f..c6f475d2d2 100644 --- a/dac/ui/src/pages/NessieHomePage/utils/context.ts +++ b/dac/ui/src/pages/NessieHomePage/utils/context.ts @@ -15,8 +15,8 @@ */ import { NessieRootState, NessieState } from "@app/types/nessie"; import { selectState } from "@app/selectors/nessie/nessie"; -import { DefaultApi } from "@app/services/nessie/client"; -import { getTreeApi } from "@app/services/nessie/impl/TreeApi"; +import { DefaultApi, V2BetaApi } from "@app/services/nessie/client"; +import { getTreeApi, getApiV2 } from "@app/services/nessie/impl/TreeApi"; import { createContext, useContext } from "react"; import * as commonPaths from "dremio-ui-common/paths/common.js"; import { getSonarContext } from "dremio-ui-common/contexts/SonarContext.js"; @@ -25,12 +25,14 @@ type SourceInfo = { name: string; id: string; endpoint?: string; + endpointV1?: string; }; type NessieContextType = { source: SourceInfo; state: NessieState; api: DefaultApi; + apiV2: V2BetaApi; baseUrl: string; stateKey: string; }; @@ -47,7 +49,7 @@ export function createNessieContext( source: SourceInfo, state: NessieRootState, prefix = "", - baseUrl = !source.endpoint + baseUrl = !source.endpoint && !source.endpointV1 ? "" : commonPaths.dataplaneSource.link({ sourceName: source.name, @@ -59,7 +61,8 @@ export function createNessieContext( source, stateKey, state: selectState(state, stateKey), - api: getTreeApi(source.endpoint), - baseUrl: baseUrl, //Different routes for Dataplane only and Dataplane source + api: getTreeApi(source.endpointV1), + apiV2: getApiV2(source.endpoint), + baseUrl, //Different routes for Dataplane only and Dataplane source }; } diff --git a/dac/ui/src/pages/NessieHomePage/utils/useNamespaceList.ts b/dac/ui/src/pages/NessieHomePage/utils/useNamespaceList.ts index c3859648e7..4256214c6e 100644 --- a/dac/ui/src/pages/NessieHomePage/utils/useNamespaceList.ts +++ b/dac/ui/src/pages/NessieHomePage/utils/useNamespaceList.ts @@ -19,8 +19,6 @@ import { useMemo } from "react"; import { usePromise } from "react-smart-promise"; import { DefaultApi } from "@app/services/nessie/client"; -const QUERY_POSTFIX = "(\\\\.|$)"; - const memoGetEntries = moize(getEntries, { maxSize: 1, isPromise: true, @@ -28,8 +26,9 @@ const memoGetEntries = moize(getEntries, { }); function formatQuery(path: string[] = []) { - return `entry.namespace.matches('${ - path.map((c) => decodeURIComponent(c)).join("\\\\.") + QUERY_POSTFIX + if (!path?.length) return; + return `entry.encodedKey.startsWith('${ + path.map((c) => decodeURIComponent(c)).join(".") + "." }')`; } diff --git a/dac/ui/src/pages/NessieHomePage/utils/utils.ts b/dac/ui/src/pages/NessieHomePage/utils/utils.ts index 88992c7b9e..6452eceeee 100644 --- a/dac/ui/src/pages/NessieHomePage/utils/utils.ts +++ b/dac/ui/src/pages/NessieHomePage/utils/utils.ts @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { Type } from "@app/services/nessie/client"; +import { Type } from "@app/types/nessie"; //@ts-ignore import { useProjectContext } from "@inject/utils/storageUtils/localStorageUtils"; @@ -22,7 +22,7 @@ export function getIconByType(type?: string | null, elements?: string[]) { case Type.IcebergTable: case Type.DeltaLakeTable: return { type: "PhysicalDataset", id: `Nessie.${type}` }; - case "ICEBERG_VIEW": // TODO, need to update generated types + case Type.IcebergView: return { type: "VirtualDataset", id: `Nessie.${type}` }; default: return { diff --git a/dac/ui/src/reducers/confirmation-spec.js b/dac/ui/src/reducers/confirmation-spec.js index a743547f43..3ee71bd3d9 100644 --- a/dac/ui/src/reducers/confirmation-spec.js +++ b/dac/ui/src/reducers/confirmation-spec.js @@ -62,6 +62,7 @@ describe("confirmation reducer", () => { validatePromptText: validateFn, isCentered: true, size: "small", + isAsyncAction: false, }); expect(result).to.be.eql({ isOpen: true, @@ -87,6 +88,7 @@ describe("confirmation reducer", () => { validatePromptText: validateFn, isCentered: true, size: "small", + isAsyncAction: false, }); }); }); diff --git a/dac/ui/src/reducers/confirmation.js b/dac/ui/src/reducers/confirmation.js index 2fa8f13034..40e324163d 100644 --- a/dac/ui/src/reducers/confirmation.js +++ b/dac/ui/src/reducers/confirmation.js @@ -50,6 +50,7 @@ export default function confirmation(state = initialState, action) { className, headerIcon, size, + isAsyncAction, } = action; // list all to be sure to reset everything return { @@ -77,6 +78,7 @@ export default function confirmation(state = initialState, action) { className, headerIcon, size, + isAsyncAction, }; } case ActionTypes.HIDE_CONFIRMATION_DIALOG: diff --git a/dac/ui/src/reducers/explore/recommended.js b/dac/ui/src/reducers/explore/recommended.js index 66067c385e..ff80aa5b5e 100644 --- a/dac/ui/src/reducers/explore/recommended.js +++ b/dac/ui/src/reducers/explore/recommended.js @@ -112,7 +112,7 @@ export default function grid(oldState, action) { { cards: [action.payload] }, actionType ); - cards = cards.cards || cards; + cards = cards ? cards.cards ?? cards : []; const examplesList = (cards[0] && cards[0].examplesList) || []; const unmatchedCount = (cards[0] && cards[0].unmatchedCount) || 0; const matchedCount = (cards[0] && cards[0].matchedCount) || 0; diff --git a/dac/ui/src/reducers/explore/view.js b/dac/ui/src/reducers/explore/view.js index 3780b2396d..69464cbf2b 100644 --- a/dac/ui/src/reducers/explore/view.js +++ b/dac/ui/src/reducers/explore/view.js @@ -26,6 +26,7 @@ import { SET_QUERY_SELECTIONS, SET_PREVIOUS_MULTI_SQL, SET_SELECTED_SQL, + SET_CUSTOM_DEFAULT_SQL, SET_IS_MULTI_QUERY_RUNNING, SET_QUERY_TAB_NUMBER, RESET_QUERY_STATE, @@ -82,6 +83,17 @@ const selectedSql = (state = null, { type, sql }) => { } }; +const customDefaultSql = (state = null, { type, sql }) => { + switch (type) { + case SET_CUSTOM_DEFAULT_SQL: + return sql === undefined ? null : sql; + case RESET_QUERY_STATE: + return null; + default: + return state; + } +}; + const updateSqlFromHistory = (state = false, { type, updateSql }) => { switch (type) { case SET_UPDATE_SQL_FROM_HISTORY: @@ -183,6 +195,7 @@ export default combineReducers({ currentSql, previousMultiSql, selectedSql, + customDefaultSql, updateSqlFromHistory, isMultiQueryRunning, sqlEditorFocusKey, diff --git a/dac/ui/src/reducers/jobs/jobs.js b/dac/ui/src/reducers/jobs/jobs.js index a258289399..8e1e28d868 100644 --- a/dac/ui/src/reducers/jobs/jobs.js +++ b/dac/ui/src/reducers/jobs/jobs.js @@ -34,6 +34,7 @@ const initialState = Immutable.fromJS({ isSupport: false, jobExecutionDetails: [], jobExecutionOperatorDetails: {}, + uniqueSavingJob: undefined, }); export default function jobs(state = initialState, action) { @@ -162,6 +163,11 @@ export default function jobs(state = initialState, action) { state.get("jobList")?.toJS() ?? new Immutable.List(); curJobList[action.meta.replaceIndex] = action.payload.jobs[0]; return state.set("jobList", Immutable.fromJS(curJobList)); + } else if (action.meta?.isSaveJob) { + return state.set( + "uniqueSavingJob", + Immutable.fromJS(jobsMapper.mapJobs(action.payload)[0]) + ); } else { return state.set( "jobList", @@ -188,6 +194,9 @@ export default function jobs(state = initialState, action) { case JobListActionTypes.JOBS_LIST_RESET: return state.set("jobList", action.payload); + case JobListActionTypes.SAVE_JOB_RESET: + return state.set("uniqueSavingJob", action.payload); + case JobListActionTypes.ITEMS_FOR_FILTER_JOBS_LIST_SUCCESS: return state.setIn( ["dataForFilter", action.meta.tag], diff --git a/dac/ui/src/reducers/notification.js b/dac/ui/src/reducers/notification.js index cd677876c7..38ea3eda5a 100644 --- a/dac/ui/src/reducers/notification.js +++ b/dac/ui/src/reducers/notification.js @@ -42,10 +42,13 @@ export default function notification(state = {}, action) { message: action.message, level: action.level, autoDismiss: action.autoDismiss, + uid: action.uid, + options: action.options, }; case ActionTypes.REMOVE_NOTIFICATION: return { removeMessageType: action.messageType, + uid: action?.uid || null, }; default: return state; diff --git a/dac/ui/src/reducers/resources/entityReducers/table.js b/dac/ui/src/reducers/resources/entityReducers/table.js index 23b0ffae97..4b76eb1ae3 100644 --- a/dac/ui/src/reducers/resources/entityReducers/table.js +++ b/dac/ui/src/reducers/resources/entityReducers/table.js @@ -41,7 +41,7 @@ export default function table(state, action) { } return state; case LOAD_NEXT_ROWS_SUCCESS: { - const { rows, columns } = action.payload; + const { rows = [], columns = [] } = action?.payload || {}; const { offset, datasetVersion } = action.meta; const oldRows = state.getIn(["tableData", datasetVersion, "rows"]) || Immutable.List(); diff --git a/dac/ui/src/routes.js b/dac/ui/src/routes.js index aabe9dabab..a29c9d5d65 100644 --- a/dac/ui/src/routes.js +++ b/dac/ui/src/routes.js @@ -27,7 +27,6 @@ import { } from "@app/actions/explore/dataset/data"; import localStorageUtils from "@inject/utils/storageUtils/localStorageUtils"; // import Votes from '@inject/pages/AdminPage/subpages/Votes'; // To Be Removed -import EulaPage from "@inject/pages/EulaPage/EulaPage"; import SSOLandingPage from "@inject/pages/AuthenticationPage/components/SSOLandingPage"; import { resetModuleState } from "@app/actions/modulesState"; import { exploreStateKey } from "@app/selectors/explore"; @@ -37,7 +36,7 @@ import { SSO_LANDING_PATH, } from "@app/sagas/loginLogout"; import { lazy } from "@app/components/Lazy"; -import { AdminPageRouting } from "@inject/RouteMixin.js"; +import { AdminPageRouting, EulaRoute } from "@inject/RouteMixin.js"; import SSOConsent from "@inject/pages/AuthenticationPage/components/SSOConsent"; import AuthenticationPage from "@inject/pages/AuthenticationPage/AuthenticationPage"; import additionalRoutes from "@inject/additionalRoutes"; @@ -75,6 +74,7 @@ import NessieRoutes, { nessieSourceRoutes, } from "./pages/NessieHomePage/NessieRoutes"; import SonarRouteComponent from "@inject/sonar/components/SonarRouteComponent"; +import { RouteLeaveComponent } from "./containers/RouteLeaveComponent"; const ExplorePage = lazy(() => import( @@ -142,7 +142,7 @@ const JobsRouting = () => { } }; -export default (dispatch, projectContext, isDataPlaneEnabled) => { +export default (dispatch, projectContext) => { const isDDPOnly = localStorageUtils ? localStorageUtils.isDataPlaneOnly(projectContext) : false; @@ -151,113 +151,117 @@ export default (dispatch, projectContext, isDataPlaneEnabled) => { return ( - {additionalRootRoutes()} - - - - - - - - - - {config.enableSignUp ? ( - - ) : ( - - )} - - - - {additionalRenderedRoutes} - {additionalRoutes} - - - - {JobsRouting()} - - {AdminPageRouting()} - - {isDDPOnly ? ( - NessieRoutes() + + {additionalRootRoutes()} + + + + + + + {EulaRoute()} + + + {config.enableSignUp ? ( + ) : ( - - - - - - - - - - - - - - - - - - - {isDataPlaneEnabled && nessieSourceRoutes()} - {isDataPlaneEnabled && arcticSourceRoutes()} - + )} + - {!isDDPOnly && ( - - {getExploreRoute( - { - component: UserIsAuthenticated(ExploreModals), - children: [ - , - , - ], - }, - dispatch + + {additionalRenderedRoutes} + {additionalRoutes} + + + + {JobsRouting()} + + {AdminPageRouting()} + + {isDDPOnly ? ( + NessieRoutes() + ) : ( + + + + + + + + + + + + + + + + + + + {nessieSourceRoutes()} + {arcticSourceRoutes()} + )} - )} + {!isDDPOnly && ( + + {getExploreRoute( + { + component: UserIsAuthenticated(ExploreModals), + children: [ + , + , + ], + }, + dispatch + )} + + )} + + {notFoundRoute} - {notFoundRoute} ); }; diff --git a/dac/ui/src/sagas/performLoadDataset-spec.js b/dac/ui/src/sagas/performLoadDataset-spec.js index 97bb6554fc..0227be46ab 100644 --- a/dac/ui/src/sagas/performLoadDataset-spec.js +++ b/dac/ui/src/sagas/performLoadDataset-spec.js @@ -20,7 +20,7 @@ import { getLocation } from "selectors/routing"; import { updateViewState } from "actions/resources"; import { handleResumeRunDataset, DataLoadError } from "sagas/runDataset"; import { loadExistingDataset } from "actions/explore/dataset/edit"; -import { getExploreJobId, getFullDataset } from "@app/selectors/explore"; +import { oldGetExploreJobId, getFullDataset } from "@app/selectors/explore"; import { newUntitled } from "actions/explore/dataset/new"; import { EXPLORE_TABLE_ID } from "reducers/explore/view"; import { focusSqlEditor } from "@app/actions/explore/view"; @@ -189,6 +189,8 @@ describe("performLoadDataset saga", () => { location.query.tipVersion, undefined, undefined, + undefined, + undefined, undefined ) ); @@ -203,6 +205,8 @@ describe("performLoadDataset saga", () => { location.query.tipVersion, undefined, undefined, + undefined, + undefined, undefined ) ); @@ -213,7 +217,13 @@ describe("performLoadDataset saga", () => { shouldWatchApiAction( location, datasetWithoutVersion, - call(newUntitled, datasetWithoutVersion, "foo.path.to.dataset", viewId) + call( + newUntitled, + datasetWithoutVersion, + "foo.path.to.dataset", + viewId, + undefined + ) ); }); }); @@ -239,7 +249,7 @@ describe("performLoadDataset saga", () => { }); next = loadTableDataGen.next(validDataset); // get job id - expect(next.value).to.be.eql(select(getExploreJobId)); + expect(next.value).to.be.eql(select(oldGetExploreJobId)); next = loadTableDataGen.next(jobId); //setExploreJobIdInProgress next = loadTableDataGen.next(); //spawn jobUpdateWatchers next = loadTableDataGen.next(); //update viewstate diff --git a/dac/ui/src/sagas/performLoadDataset.js b/dac/ui/src/sagas/performLoadDataset.js index c3d73f6e30..b1b1ba72d2 100644 --- a/dac/ui/src/sagas/performLoadDataset.js +++ b/dac/ui/src/sagas/performLoadDataset.js @@ -33,7 +33,6 @@ import { DataLoadError, explorePageChanged, jobUpdateWatchers, - loadDatasetMetadata, } from "sagas/runDataset"; import { EXPLORE_TABLE_ID } from "reducers/explore/view"; import { focusSqlEditor } from "@app/actions/explore/view"; @@ -41,8 +40,8 @@ import { getViewStateFromAction } from "@app/reducers/resources/view"; import { getFullDataset, getDatasetVersionFromLocation, - getExploreJobId, getTableDataRaw, + oldGetExploreJobId, } from "@app/selectors/explore"; import { getLocation } from "selectors/routing"; import { TRANSFORM_PEEK_START } from "@app/actions/explore/dataset/peek"; @@ -175,7 +174,7 @@ export function* loadTableData( const dataset = yield select(getFullDataset, datasetVersion); if (!dataset) return; // do not load a data if metadata is not loaded - const jobId = yield select(getExploreJobId); + const jobId = yield select(oldGetExploreJobId); const paginationUrl = dataset.get("paginationUrl"); if (!paginationUrl || !jobId) return; @@ -225,72 +224,6 @@ export function* loadTableData( } } -export function* listenToJobProgress( - datasetVersion, - jobId, - paginationUrl, - isRun, - datasetPath, - callback, - curIndex, - sessionId, - viewId -) { - let resetViewState = true; - let raceResult; - - // cancels any other data loads before beginning - yield call(cancelDataLoad); - - // track all preview queries - if (!isRun && datasetVersion) { - sonarEvents.jobPreview(); - } - - try { - yield put(setExploreJobIdInProgress(jobId, datasetVersion)); - yield spawn(jobUpdateWatchers, jobId); - yield put( - updateViewState(EXPLORE_TABLE_ID, { - isInProgress: true, - isFailed: false, - error: null, - }) - ); - - raceResult = yield race({ - dataLoaded: call( - loadDatasetMetadata, - datasetVersion, - jobId, - isRun, - paginationUrl, - datasetPath, - callback, - curIndex, - sessionId, - viewId - ), - isLoadCanceled: take([CANCEL_TABLE_DATA_LOAD, TRANSFORM_PEEK_START]), - locationChange: call(resetTableViewStateOnPageLeave), - }); - } catch (e) { - if (!(e instanceof DataLoadError)) { - throw e; - } - - resetViewState = false; - const viewState = yield call(getViewStateFromAction, e.response); - yield put(updateViewState(EXPLORE_TABLE_ID, viewState)); - } finally { - if (resetViewState) { - yield call(hideTableSpinner); - } - } - - return raceResult.dataLoaded ?? false; -} - const defaultViewState = { isInProgress: false, isFailed: false, @@ -369,7 +302,7 @@ export function* loadDataset( willLoadTable ) { const location = yield select(getLocation); - const { mode, tipVersion } = location.query || {}; + const { mode, tipVersion, refType, refValue } = location.query || {}; let apiAction; if (mode === "edit" || dataset.get("datasetVersion")) { //Set references after this actions is completed @@ -380,7 +313,9 @@ export function* loadDataset( tipVersion, forceDataLoad, sessionId, - willLoadTable + willLoadTable, + refType, + refValue ); } else { const loc = rmProjectBase(location.pathname); @@ -388,7 +323,13 @@ export function* loadDataset( const parentFullPath = decodeURIComponent( constructFullPath([pathnameParts[2]]) + "." + pathnameParts[3] ); - apiAction = yield call(newUntitled, dataset, parentFullPath, viewId); + apiAction = yield call( + newUntitled, + dataset, + parentFullPath, + viewId, + willLoadTable + ); } return apiAction; diff --git a/dac/ui/src/sagas/performLoadDatasetNew.ts b/dac/ui/src/sagas/performLoadDatasetNew.ts new file mode 100644 index 0000000000..07baa3f7c3 --- /dev/null +++ b/dac/ui/src/sagas/performLoadDatasetNew.ts @@ -0,0 +1,122 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { call, put, race, select, spawn, take } from "redux-saga/effects"; +import { getLocation } from "@app/selectors/routing"; +import { + cancelDataLoad, + CANCEL_TABLE_DATA_LOAD, + hideTableSpinner, + resetTableViewStateOnPageLeave, +} from "@app/sagas/performLoadDataset"; +import { DataLoadError, jobUpdateWatchers } from "@app/sagas/runDataset"; +import { loadDatasetMetadata } from "@app/sagas/runDatasetNew"; +import { newLoadExistingDataset } from "@app/actions/explore/datasetNew/edit"; +import { setExploreJobIdInProgress } from "@app/actions/explore/dataset/data"; +import { updateViewState } from "@app/actions/resources/index"; +import { TRANSFORM_PEEK_START } from "@app/actions/explore/dataset/peek"; +import { EXPLORE_TABLE_ID } from "@app/reducers/explore/view"; +import { getViewStateFromAction } from "@app/reducers/resources/view"; +// @ts-ignore +import { sonarEvents } from "dremio-ui-common/sonar/sonarEvents.js"; +import Immutable from "immutable"; + +export function* listenToJobProgress( + dataset: Immutable.Map, + datasetVersion: string, + jobId: string, + paginationUrl: string, + navigateOptions: Record, + isRun: boolean, + datasetPath: string, + callback: any, + curIndex: number, + sessionId: string, + viewId: string +): any { + let resetViewState = true; + let raceResult; + + // cancels any other data loads before beginning + yield call(cancelDataLoad); + + // track all preview queries + if (!isRun && datasetVersion) { + sonarEvents.jobPreview(); + } + + try { + yield put(setExploreJobIdInProgress(jobId, datasetVersion)); + yield spawn(jobUpdateWatchers, jobId); + yield put( + updateViewState(EXPLORE_TABLE_ID, { + isInProgress: true, + isFailed: false, + error: null, + }) + ); + + raceResult = yield race({ + dataLoaded: call( + loadDatasetMetadata, + dataset, + datasetVersion, + jobId, + paginationUrl, + navigateOptions, + isRun, + datasetPath, + callback, + curIndex, + sessionId, + viewId + ), + isLoadCanceled: take([CANCEL_TABLE_DATA_LOAD, TRANSFORM_PEEK_START]), + locationChange: call(resetTableViewStateOnPageLeave), + }); + } catch (e) { + if (!(e instanceof DataLoadError)) { + throw e; + } + + resetViewState = false; + const viewState = yield call(getViewStateFromAction, e.response); + yield put(updateViewState(EXPLORE_TABLE_ID, viewState)); + } finally { + if (resetViewState) { + yield call(hideTableSpinner); + } + } + + return raceResult.dataLoaded ?? false; +} + +export function* newLoadDataset( + dataset: Immutable.Map, + viewId: string, + sessionId: string +): any { + const location = yield select(getLocation); + const { tipVersion } = location.query || {}; + + return yield call( + newLoadExistingDataset, + dataset, + viewId, + tipVersion, + sessionId + ); +} diff --git a/dac/ui/src/sagas/performTransform-spec.js b/dac/ui/src/sagas/performTransform-spec.js index 2f70b229c6..2d03255831 100644 --- a/dac/ui/src/sagas/performTransform-spec.js +++ b/dac/ui/src/sagas/performTransform-spec.js @@ -144,6 +144,7 @@ describe("performTransform saga", () => { viewId: exploreViewState.get("viewId"), isRun: true, runningSql: "", + useOptimizedJobFlow: undefined, }; gen = handleRunDatasetSql({}); next = gen.next(); // yield dataset @@ -166,6 +167,7 @@ describe("performTransform saga", () => { viewId: exploreViewState.get("viewId"), forceDataLoad: true, runningSql: "", + useOptimizedJobFlow: undefined, }; gen = handleRunDatasetSql({ isPreview: true }); next = gen.next(); // yield dataset diff --git a/dac/ui/src/sagas/performTransform.js b/dac/ui/src/sagas/performTransform.js index a217ca4453..335e85bf0f 100644 --- a/dac/ui/src/sagas/performTransform.js +++ b/dac/ui/src/sagas/performTransform.js @@ -50,8 +50,8 @@ import { loadTableData, cancelDataLoad, loadDataset, - listenToJobProgress, } from "@app/sagas/performLoadDataset"; +import { listenToJobProgress } from "@app/sagas/performLoadDatasetNew"; import { transformHistoryCheck } from "sagas/transformHistoryCheck"; import { getExploreState, getExplorePageDataset } from "selectors/explore"; import { getExploreViewState } from "selectors/resources"; @@ -88,13 +88,15 @@ import { fetchFilteredJobsList, JOB_PAGE_NEW_VIEW_ID, resetFilteredJobsList, + resetUniqueSavingJob, } from "@app/actions/joblist/jobList"; import { extractSql, toQueryRange } from "@app/utils/statements/statement"; import { handlePostNewQueryJobSuccess, - postNewQueryJob, + newPerformTransformSingle, } from "./performTransformNew"; import { getFeatureFlag } from "@app/selectors/featureFlagsSelector"; +import { PHYSICAL_DATASET_TYPES } from "@app/constants/datasetTypes"; export default function* watchPerformTransform() { yield all([ @@ -132,6 +134,10 @@ export function* performTransform(payload) { isSaveViewAs, isRun, viewId, + forceDataLoad, + queryContext, + transformData, + useOptimizedJobFlow, } = payload; yield put(setIsMultiQueryRunning({ running: true })); @@ -153,6 +159,7 @@ export function* performTransform(payload) { // callback is passed in when clicking on actions if (!callback && indexToModify == undefined) { yield put(resetFilteredJobsList()); + yield put(resetUniqueSavingJob()); yield put(setQueryStatuses({ statuses: queryStatuses })); yield put(setQuerySelections({ selections })); yield put(setPreviousMultiSql({ sql: currentSql })); @@ -181,26 +188,53 @@ export function* performTransform(payload) { let willProceed = true; + const datasetType = dataset.get("datasetType"); + const isNotDataset = !dataset.get("datasetVersion") || (!dataset.get("datasetType") && !dataset.get("sql")); + const references = yield getNessieReferences(); + + const sql = !isSaveViewAs + ? queryStatuses[i].sqlStatement + : runningSql || currentSql; + + const finalTransformData = yield call( + getTransformData, + dataset, + sql || dataset.get("sql"), + queryContext, + transformData, + references + ); + const useNewQueryFlow = yield select( getFeatureFlag, "job_status_in_sql_runner" ); - // the process for running/previewing a new query is handled differently - // submit job request -> listen to job progress -> fetch dataset data - // if job fails -> call jobs API to fetch error details - if (isNotDataset && useNewQueryFlow !== "DISABLED") { - const [response] = yield call(postNewQueryJob, { - ...payload, - sessionId, - sqlStatement: queryStatuses[i].sqlStatement, - }); + // The process for running/previewing a new query is now handled differently. + // submit job request -> listen to job progress -> fetch dataset data, + // if job fails -> call jobs API to fetch error details. + // useOptimizedQueryFlow guarantees that this logic is only followed when cliking on run/preview + // OR trying to save a new or modified query that hasn't been ran/previewed yet + if ( + useNewQueryFlow !== "DISABLED" && + (useOptimizedJobFlow || + (isSaveViewAs && (isNotDataset || finalTransformData))) + ) { + const [response, navigateOptions, newVersion] = yield call( + newPerformTransformSingle, + { + ...payload, + sessionId, + sqlStatement: queryStatuses[i].sqlStatement, + finalTransformData, + references, + } + ); - // TODO: this can be cleaned up by being put in a saga since we fetch queryStatuses in multiple places exploreState = yield select(getExploreState); if (!exploreState) { @@ -220,32 +254,47 @@ export function* performTransform(payload) { // handle successful job submission if (response?.payload) { + let newDataset = undefined; let datasetPath = ""; let datasetVersion = ""; let jobId = ""; let paginationUrl = ""; // destructure response and update the queryStatuses object in Redux - [datasetPath, datasetVersion, jobId, paginationUrl, sessionId] = - yield call(handlePostNewQueryJobSuccess, { - response, - queryStatuses: mostRecentStatuses, - curIndex: i, - indexToModify, - callback, - }); - - if (!isSaveViewAs) { - // add job definition to jobs table - yield put(fetchFilteredJobsList(jobId, JOB_PAGE_NEW_VIEW_ID)); - } + [ + newDataset, + datasetPath, + datasetVersion, + jobId, + paginationUrl, + sessionId, + ] = yield call(handlePostNewQueryJobSuccess, { + response, + newVersion, + queryStatuses: mostRecentStatuses, + curIndex: i, + indexToModify, + callback, + }); + + // add job definition to jobs table + yield put( + fetchFilteredJobsList( + jobId, + JOB_PAGE_NEW_VIEW_ID, + undefined, + isSaveViewAs + ) + ); // start the job listener and track job progress in Redux willProceed = yield call( listenToJobProgress, + newDataset, datasetVersion, jobId, paginationUrl, + navigateOptions, isRun, datasetPath, callback, @@ -279,9 +328,20 @@ export function* performTransform(payload) { continue; } + const isSavingPDS = + isSaveViewAs && PHYSICAL_DATASET_TYPES.has(datasetType); + + // need to call the /preview endpoint when trying to save a PDS that wasn't ran/previewed first const [response, newVersion] = yield call( performTransformSingle, - { ...payload, sessionId }, + { + ...payload, + sessionId, + forceDataLoad: + isSavingPDS && preUpdatedQueryStatuses?.length === 0 + ? true + : forceDataLoad, + }, queryStatuses[i] ); @@ -538,7 +598,11 @@ function handlePerformTransformError(e) { ); } -export function* handleRunDatasetSql({ isPreview, selectedSql }) { +export function* handleRunDatasetSql({ + isPreview, + selectedSql, + useOptimizedJobFlow, +}) { const dataset = yield select(getExplorePageDataset); const exploreViewState = yield select(getExploreViewState); const exploreState = yield select(getExploreState); @@ -554,6 +618,7 @@ export function* handleRunDatasetSql({ isPreview, selectedSql }) { runningSql, queryContext, viewId, + useOptimizedJobFlow, }; if (isPreview) { diff --git a/dac/ui/src/sagas/performTransformNew.ts b/dac/ui/src/sagas/performTransformNew.ts index eb11e649d0..63bbbd078c 100644 --- a/dac/ui/src/sagas/performTransformNew.ts +++ b/dac/ui/src/sagas/performTransformNew.ts @@ -18,28 +18,38 @@ import { call, put, select } from "redux-saga/effects"; import { newTmpUntitledSql, newTmpUntitledSqlAndRun, -} from "@app/actions/explore/dataset/new"; -import exploreUtils from "@app/utils/explore/exploreUtils"; +} from "@app/actions/explore/datasetNew/new"; +import { + newRunDataset, + newTransformAndRunDataset, +} from "@app/actions/explore/datasetNew/run"; +import { newRunTableTransform } from "@app/actions/explore/datasetNew/transform"; +import { newLoadDataset } from "@app/sagas/performLoadDatasetNew"; import { - PostNewQueryJobProps, - GenerateRequestForNewDatasetProps, + NewGetFetchDatasetMetaActionProps, + NewPerformTransformSingleProps, HandlePostNewQueryJobSuccessProps, -} from "@app/utils/performTransform/newTransform"; -import { getNessieReferences } from "./nessie"; -import { cancelDataLoad } from "./performLoadDataset"; +} from "@app/types/performTransformNewTypes"; +import { cancelDataLoad } from "@app/sagas/performLoadDataset"; import { initializeExploreJobProgress } from "@app/actions/explore/dataset/data"; -import { fetchJobMetadata } from "./transformWatcher"; +import { submitTransformationJob } from "@app/sagas/transformWatcherNew"; import { setQueryStatuses } from "@app/actions/explore/view"; -import apiUtils from "@app/utils/apiUtils/apiUtils"; import { getExploreState } from "@app/selectors/explore"; -import { cloneDeep } from "lodash"; import { loadJobDetails } from "@app/actions/jobs/jobs"; import { JOB_DETAILS_VIEW_ID } from "@app/actions/joblist/jobList"; -import { showFailedJobDialog } from "./performTransform"; +import { showFailedJobDialog } from "@app/sagas/performTransform"; +// @ts-ignore +import { updateTransformData } from "@inject/actions/explore/dataset/updateLocation"; +import { EXPLORE_TABLE_ID } from "@app/reducers/explore/view"; +import { resetViewState } from "@app/actions/resources"; import { addNotification } from "@app/actions/notification"; +import { cloneDeep } from "lodash"; import Immutable from "immutable"; +import apiUtils from "@app/utils/apiUtils/apiUtils"; +import exploreUtils from "@app/utils/explore/exploreUtils"; -export function* postNewQueryJob({ +export function* newPerformTransformSingle({ + dataset, currentSql, queryContext, viewId, @@ -48,83 +58,144 @@ export function* postNewQueryJob({ isSaveViewAs, sessionId, sqlStatement, -}: PostNewQueryJobProps): any { + nextTable, + finalTransformData, + references, +}: NewPerformTransformSingleProps): any { try { - const { apiAction } = yield call(generateRequestForNewDataset, { - sql: !isSaveViewAs ? sqlStatement : runningSql || currentSql, - queryContext, - viewId, - isRun, - sessionId, - noUpdate: true, - }); + const { apiAction, navigateOptions, newVersion } = yield call( + newGetFetchDatasetMetaAction, + { + dataset, + currentSql: !isSaveViewAs ? sqlStatement : runningSql || currentSql, + queryContext, + viewId, + isRun, + sessionId, + nextTable, + finalTransformData, + references, + } + ); let response; if (apiAction) { yield call(cancelDataLoad); yield put(initializeExploreJobProgress(isRun)); - - response = yield call(fetchJobMetadata, apiAction, viewId); + response = yield call(submitTransformationJob, apiAction, viewId); } - return [response]; + return [response, navigateOptions, newVersion]; } catch (e) { return [e]; } } -export function* generateRequestForNewDataset({ - sql, +export function* newGetFetchDatasetMetaAction({ + dataset, + currentSql, queryContext, viewId, isRun, sessionId, - noUpdate, -}: GenerateRequestForNewDatasetProps): any { - const references = yield getNessieReferences(); - const newVersion = exploreUtils.getNewDatasetVersion(); + nextTable, + finalTransformData, + references, +}: NewGetFetchDatasetMetaActionProps): any { + const sql = currentSql || dataset.get("sql"); + const isNotDataset = + !dataset.get("datasetVersion") || + (!dataset.get("datasetType") && !dataset.get("sql")); + let apiAction; + let navigateOptions; + let newVersion = exploreUtils.getNewDatasetVersion(); if (isRun) { - apiAction = yield call( - newTmpUntitledSqlAndRun, - sql, - queryContext, - viewId, - references, - sessionId, - newVersion, - noUpdate - ); + if (isNotDataset) { + apiAction = yield call( + newTmpUntitledSqlAndRun, + sql, + queryContext, + viewId, + references, + sessionId, + newVersion + ); + + navigateOptions = { changePathName: true }; + } else if (finalTransformData) { + updateTransformData(finalTransformData); + yield put(resetViewState(EXPLORE_TABLE_ID)); + + apiAction = yield call( + newTransformAndRunDataset, + dataset, + finalTransformData, + viewId, + sessionId, + newVersion + ); + } else { + apiAction = yield call(newRunDataset, dataset, viewId, sessionId); + navigateOptions = { replaceNav: true, preserveTip: true }; + newVersion = dataset.get("datasetVersion"); + } } else { - apiAction = yield call( - newTmpUntitledSql, - sql, - queryContext?.toJS(), - viewId, - references, - sessionId, - newVersion, - noUpdate - ); + if (isNotDataset) { + apiAction = yield call( + newTmpUntitledSql, + sql, + queryContext?.toJS(), + viewId, + references, + sessionId, + newVersion + ); + + navigateOptions = { changePathName: true }; + } else if (finalTransformData) { + apiAction = yield call( + newRunTableTransform, + dataset, + finalTransformData, + viewId, + nextTable, + sessionId, + newVersion + ); + } else { + apiAction = yield call(newLoadDataset, dataset, viewId, sessionId); + navigateOptions = { replaceNav: true, preserveTip: true }; + newVersion = dataset.get("datasetVersion"); + } } - return { apiAction }; + return { apiAction, navigateOptions, newVersion }; } export function* handlePostNewQueryJobSuccess({ response, + newVersion, queryStatuses, curIndex, callback, }: HandlePostNewQueryJobSuccessProps) { - const { datasetPath, datasetVersion, jobId, paginationUrl, sessionId } = - apiUtils.getFromNewQueryResponse(response); + const { + dataset, + datasetPath, + datasetVersion, + jobId, + paginationUrl, + sessionId, + } = apiUtils.getFromJSONResponse(response); + + const versionToUse = datasetVersion ?? newVersion; const mostRecentStatuses = queryStatuses; mostRecentStatuses[curIndex].jobId = jobId; - mostRecentStatuses[curIndex].version = datasetVersion; + mostRecentStatuses[curIndex].version = versionToUse; if (mostRecentStatuses[curIndex].cancelled) { mostRecentStatuses[curIndex].cancelled = false; @@ -136,7 +207,7 @@ export function* handlePostNewQueryJobSuccess({ yield put(setQueryStatuses({ statuses: mostRecentStatuses })); } - return [datasetPath, datasetVersion, jobId, paginationUrl, sessionId]; + return [dataset, datasetPath, versionToUse, jobId, paginationUrl, sessionId]; } export function* fetchJobFailureInfo( @@ -151,15 +222,15 @@ export function* fetchJobFailureInfo( // @ts-ignore const jobDetails = yield put(loadJobDetails(jobId, JOB_DETAILS_VIEW_ID)); const jobDetailsResponse = yield jobDetails; - const error = jobDetailsResponse.payload.getIn([ + const failureInfo = jobDetailsResponse.payload.getIn([ "entities", "jobDetails", jobId, "failureInfo", - "errors", - 0, ]); + const error = failureInfo.getIn(["errors", 0]); + const cancellationInfo = jobDetailsResponse.payload.getIn([ "entities", "jobDetails", @@ -174,15 +245,16 @@ export function* fetchJobFailureInfo( if (cancellationInfo) { willProceed = false; } else { - const isParseError = - error?.get("message") === "Failure parsing the query."; + const isParseError = failureInfo.get("type") === "PARSE"; // if a job wasn't cancelled but still failed, show the dialog willProceed = yield call( showFailedJobDialog, curIndex, mostRecentStatuses[curIndex].sqlStatement, - isParseError ? undefined : error?.get("message") + isParseError + ? undefined + : error?.get("message") ?? failureInfo.get("message") ); } } else if (callback) { @@ -190,7 +262,7 @@ export function* fetchJobFailureInfo( yield put( addNotification( - apiUtils.getThrownErrorException(error ?? ""), + apiUtils.getThrownErrorException(error ?? failureInfo), "error", 10 ) @@ -207,9 +279,14 @@ export function* fetchJobFailureInfo( if (cancellationInfo) { mostRecentStatuses[curIndex].cancelled = true; - } else { + } else if ( + failureInfo.has("errors") && + failureInfo.get("errors").size > 0 + ) { // @ts-ignore mostRecentStatuses[curIndex].error = new Immutable.Map(error); + } else { + mostRecentStatuses[curIndex].error = failureInfo; } yield put(setQueryStatuses({ statuses: mostRecentStatuses })); diff --git a/dac/ui/src/sagas/runDataset-spec.js b/dac/ui/src/sagas/runDataset-spec.js index 7c1e08130e..dab04268fd 100644 --- a/dac/ui/src/sagas/runDataset-spec.js +++ b/dac/ui/src/sagas/runDataset-spec.js @@ -178,7 +178,7 @@ describe("runDataset saga", () => { } // throw an exceotion - // Todo: Fix this to use .to.throw instead (https://dremio.atlassian.net/browse/DX-30942) + // Todo: Fix this to use .to.throw instead (DX-30942) try { checkFinallyBlock(response); } catch (ex) { diff --git a/dac/ui/src/sagas/runDataset.js b/dac/ui/src/sagas/runDataset.js index 6a75ab413c..34fef5a131 100644 --- a/dac/ui/src/sagas/runDataset.js +++ b/dac/ui/src/sagas/runDataset.js @@ -22,8 +22,6 @@ import { EXPLORE_PAGE_EXIT, updateExploreJobProgress, updateJobRecordCount, - stopExplorePageListener, - startExplorePageListener, } from "actions/explore/dataset/data"; import { updateHistoryWithJobState } from "actions/explore/history"; @@ -46,11 +44,6 @@ import { loadJobDetails } from "@app/actions/jobs/jobs"; import { intl } from "@app/utils/intl"; import { addNotification } from "@app/actions/notification"; import { JOB_DETAILS_VIEW_ID } from "@app/actions/joblist/jobList"; -import { loadNewDataset } from "actions/explore/dataset/edit"; -import { fetchDatasetMetadata } from "./transformWatcher"; -import { navigateToNextDataset } from "@app/actions/explore/dataset/common"; -import apiUtils from "@app/utils/apiUtils/apiUtils"; -import { fetchJobFailureInfo } from "./performTransformNew"; const getJobDoneActionFilter = (jobId) => (action) => (action.type === WS_MESSAGE_JOB_PROGRESS || @@ -116,64 +109,6 @@ export function* handleResumeRunDataset( } } -export function* loadDatasetMetadata( - datasetVersion, - jobId, - isRun, - paginationUrl, - datasetPath, - callback, - curIndex, - sessionId, - viewId -) { - const tableData = yield select(getTableDataRaw, datasetVersion); - const rows = tableData?.get("rows"); - - if (isRun || !rows) { - const { jobDone } = yield race({ - jobDone: call( - handlePendingMetadataFetch, - datasetVersion, - paginationUrl, - jobId, - datasetPath, - callback, - curIndex, - sessionId, - viewId - ), - locationChange: call(explorePageChanged), - }); - - const willProceed = jobDone?.willProceed ?? false; - const newResponse = jobDone?.newResponse; - - if (newResponse) { - yield put(stopExplorePageListener()); - yield put( - navigateToNextDataset(newResponse, { - replaceNav: true, - preserveTip: true, - newJobId: jobId, - }) - ); - yield put(startExplorePageListener(false)); - } - - if (callback && newResponse !== undefined) { - const resultDataset = apiUtils.getEntityFromResponse( - "datasetUI", - newResponse - ); - - yield call(callback, true, resultDataset); - } - - return willProceed; - } -} - export class DataLoadError { constructor(response) { this.name = "DataLoadError"; @@ -181,13 +116,6 @@ export class DataLoadError { } } -export class JobFailedError { - constructor(response) { - this.name = "JobFailedError"; - this.response = response; - } -} - //export for tests /** * Registers a listener for a job progress and triggers data load when job is completed @@ -273,108 +201,6 @@ export function* waitForRunToComplete( } } -export function* handlePendingMetadataFetch( - datasetVersion, - paginationUrl, - jobId, - datasetPath, - callback, - curIndex, - sessionId, - viewId -) { - let willProceed = true; - let newResponse; - - try { - if (!socket.isOpen) { - const raceResult = yield race({ - socketOpen: take(WS_CONNECTION_OPEN), - stop: take(LOGOUT_USER_SUCCESS), - }); - - if (raceResult.stop) { - return; - } - } - - yield call([socket, socket.startListenToJobProgress], jobId, true); - - const { jobDone } = yield race({ - jobProgress: call(watchUpdateHistoryOnJobProgress, datasetVersion, jobId), - jobDone: take(getJobDoneActionFilter(jobId)), - locationChange: call(explorePageChanged), - }); - - if (jobDone) { - // if a job fails, throw an error to avoid calling the /preview endpoint - if (jobDone.payload?.update?.state === "FAILED") { - const failureInfo = jobDone.payload.update.failureInfo; - throw new JobFailedError( - failureInfo?.errors?.[0]?.message || failureInfo?.message - ); - } - - const apiAction = yield call( - loadNewDataset, - datasetPath, - sessionId, - datasetVersion, - jobId, - paginationUrl, - viewId - ); - - newResponse = yield call(fetchDatasetMetadata, apiAction, viewId); - - if (!callback) { - const promise = yield put( - loadNextRows(datasetVersion, paginationUrl, 0) - ); - const response = yield promise; - const exploreState = yield select(getExploreState); - const queryStatuses = cloneDeep( - exploreState?.view?.queryStatuses ?? [] - ); - - if (response?.error) { - if (queryStatuses.length) { - const index = queryStatuses.findIndex( - (query) => query.jobId === jobId - ); - - if (index > -1 && !queryStatuses[index].error) { - const newStatuses = cloneDeep(queryStatuses); - newStatuses[index].error = response; - yield put(setQueryStatuses({ statuses: newStatuses })); - } - } - } - - if (!response || response.error) { - throw new DataLoadError(response); - } - - yield put( - updateHistoryWithJobState( - datasetVersion, - jobDone.payload.update.state - ) - ); - yield put(updateExploreJobProgress(jobDone.payload.update)); - yield call(genLoadJobDetails, jobId, queryStatuses); - } - } - } catch (e) { - // if a job fails, fetch the correct job failure info using the Jobs API - willProceed = yield fetchJobFailureInfo(jobId, curIndex, callback); - } finally { - yield call([socket, socket.stopListenToJobProgress], jobId); - } - - return { willProceed, newResponse }; -} - /** * Returns a redux action that treated as explore page url change. The action could be one of the following cases: * 1) Navigation out of explore page has happen diff --git a/dac/ui/src/sagas/runDatasetNew.ts b/dac/ui/src/sagas/runDatasetNew.ts new file mode 100644 index 0000000000..622b1c657b --- /dev/null +++ b/dac/ui/src/sagas/runDatasetNew.ts @@ -0,0 +1,237 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { call, put, race, select, take } from "redux-saga/effects"; +import { + DataLoadError, + explorePageChanged, + genLoadJobDetails, + watchUpdateHistoryOnJobProgress, +} from "@app/sagas/runDataset"; +import { fetchJobFailureInfo } from "@app/sagas/performTransformNew"; +import { fetchDatasetMetadata } from "@app/sagas/transformWatcherNew"; +import { LOGOUT_USER_SUCCESS } from "@app/actions/account"; +import { navigateToNextDataset } from "@app/actions/explore/dataset/common"; +import { + loadNextRows, + startExplorePageListener, + stopExplorePageListener, + updateExploreJobProgress, +} from "@app/actions/explore/dataset/data"; +import { loadNewDataset } from "@app/actions/explore/datasetNew/edit"; +import { setQueryStatuses } from "@app/actions/explore/view"; +import { updateHistoryWithJobState } from "@app/actions/explore/history"; +import { getExploreState, getTableDataRaw } from "@app/selectors/explore"; +import socket, { + // @ts-ignore + WS_MESSAGE_JOB_PROGRESS, + // @ts-ignore + WS_MESSAGE_QV_JOB_PROGRESS, + // @ts-ignore + WS_CONNECTION_OPEN, +} from "@inject/utils/socket"; +import { cloneDeep } from "lodash"; +import Immutable from "immutable"; +import apiUtils from "@app/utils/apiUtils/apiUtils"; + +class JobFailedError { + name: string; + response: string; + + constructor(response: string) { + this.name = "JobFailedError"; + this.response = response; + } +} + +const getJobDoneActionFilter = (jobId: string) => (action: Record) => + (action.type === WS_MESSAGE_JOB_PROGRESS || + action.type === WS_MESSAGE_QV_JOB_PROGRESS) && + action.payload.id.id === jobId && + action.payload.update.isComplete; + +export function* loadDatasetMetadata( + dataset: Immutable.Map, + datasetVersion: string, + jobId: string, + paginationUrl: string, + navigateOptions: Record, + isRun: boolean, + datasetPath: string, + callback: any, + curIndex: number, + sessionId: string, + viewId: string +): any { + const tableData = yield select(getTableDataRaw, datasetVersion); + const rows = tableData?.get("rows"); + + if (isRun || !rows) { + const { jobDone } = yield race({ + jobDone: call( + handlePendingMetadataFetch, + dataset, + datasetVersion, + jobId, + paginationUrl, + datasetPath, + callback, + curIndex, + sessionId, + viewId + ), + locationChange: call(explorePageChanged), + }); + + const willProceed = jobDone?.willProceed ?? false; + const newResponse = jobDone?.newResponse; + + if (newResponse) { + yield put(stopExplorePageListener()); + yield put( + // @ts-ignore + navigateToNextDataset(newResponse, { + ...navigateOptions, + newJobId: jobId, + }) + ); + yield put(startExplorePageListener(false)); + } + + if (callback && newResponse !== undefined) { + const resultDataset = apiUtils.getEntityFromResponse( + "datasetUI", + newResponse + ); + + yield call(callback, true, resultDataset); + } + + return willProceed; + } +} + +export function* handlePendingMetadataFetch( + dataset: Immutable.Map, + datasetVersion: string, + jobId: string, + paginationUrl: string, + datasetPath: string, + callback: any, + curIndex: number, + sessionId: string, + viewId: string +): any { + let willProceed = true; + let newResponse; + + try { + // @ts-ignore + if (!socket.isOpen) { + const raceResult = yield race({ + socketOpen: take(WS_CONNECTION_OPEN), + stop: take(LOGOUT_USER_SUCCESS), + }); + + if (raceResult.stop) { + return; + } + } + + // @ts-ignore + yield call([socket, socket.startListenToJobProgress], jobId, true); + + const { jobDone } = yield race({ + jobProgress: call(watchUpdateHistoryOnJobProgress, datasetVersion, jobId), + jobDone: take(getJobDoneActionFilter(jobId)), + locationChange: call(explorePageChanged), + }); + + if (jobDone) { + // if a job fails, throw an error to avoid calling the /preview endpoint + if (jobDone.payload?.update?.state === "FAILED") { + const failureInfo = jobDone.payload.update.failureInfo; + throw new JobFailedError( + failureInfo?.errors?.[0]?.message || failureInfo?.message + ); + } + + const apiAction = yield call( + loadNewDataset, + dataset, + datasetPath, + sessionId, + datasetVersion, + jobId, + paginationUrl, + viewId + ); + + if (apiAction === undefined) { + throw new JobFailedError("Failed to fetch dataset."); + } + + newResponse = yield call(fetchDatasetMetadata, apiAction, viewId); + + if (!callback) { + const promise = yield put( + // @ts-ignore + loadNextRows(datasetVersion, paginationUrl, 0) + ); + const response = yield promise; + const exploreState = yield select(getExploreState); + const queryStatuses = cloneDeep( + exploreState?.view?.queryStatuses ?? [] + ); + + if (response?.error) { + if (queryStatuses.length) { + const index = queryStatuses.findIndex( + (query: any) => query.jobId === jobId + ); + + if (index > -1 && !queryStatuses[index].error) { + const newStatuses = cloneDeep(queryStatuses); + newStatuses[index].error = response; + yield put(setQueryStatuses({ statuses: newStatuses })); + } + } + } + + if (!response || response.error) { + throw new DataLoadError(response); + } + + yield put( + updateHistoryWithJobState( + datasetVersion, + jobDone.payload.update.state + ) + ); + yield put(updateExploreJobProgress(jobDone.payload.update)); + yield call(genLoadJobDetails, jobId, queryStatuses); + } + } + } catch (e) { + // if a job fails, fetch the correct job failure info using the Jobs API + willProceed = yield fetchJobFailureInfo(jobId, curIndex, callback); + } finally { + // @ts-ignore + yield call([socket, socket.stopListenToJobProgress], jobId); + } + + return { willProceed, newResponse }; +} diff --git a/dac/ui/src/sagas/transformWatcher-spec.js b/dac/ui/src/sagas/transformWatcher-spec.js index 88f206ef49..f45b803930 100644 --- a/dac/ui/src/sagas/transformWatcher-spec.js +++ b/dac/ui/src/sagas/transformWatcher-spec.js @@ -96,7 +96,7 @@ describe("transformWatcher saga", () => { expect(next.value).to.be.eql(put(completeDatasetMetadataLoad())); next = gen.next(); }; - // Todo: Fix this to use .to.throw instead (https://dremio.atlassian.net/browse/DX-30942) + // Todo: Fix this to use .to.throw instead (DX-30942) try { testException(); } catch (ex) { @@ -116,7 +116,7 @@ describe("transformWatcher saga", () => { }); it("should throw TransformCanceledError if cancel wins the race", () => { - // Todo: Fix this to use .to.throw instead (https://dremio.atlassian.net/browse/DX-30942) + // Todo: Fix this to use .to.throw instead (DX-30942) try { gen.next({ cancel: "cancel" }); } catch (ex) { @@ -130,7 +130,7 @@ describe("transformWatcher saga", () => { it("should hide the modal and throw TransformCanceledError if resetNewQuery wins the race", () => { next = gen.next({ resetNewQuery: true }); expect(next.value).to.eql(put(hideConfirmationDialog())); - // Todo: Fix this to use .to.throw instead (https://dremio.atlassian.net/browse/DX-30942) + // Todo: Fix this to use .to.throw instead (DX-30942) try { next = gen.next(); } catch (ex) { @@ -152,7 +152,7 @@ describe("transformWatcher saga", () => { it("should hide the modal and throw TransformCanceledByLocationChangeError if location change wins", () => { next = gen.next({ locationChange: "locationChange" }); expect(next.value).to.eql(put(hideConfirmationDialog())); - // Todo: Fix this to use .to.throw instead (https://dremio.atlassian.net/browse/DX-30942) + // Todo: Fix this to use .to.throw instead (DX-30942) try { next = gen.next(); } catch (ex) { diff --git a/dac/ui/src/sagas/transformWatcher.js b/dac/ui/src/sagas/transformWatcher.js index 7c493f212e..e3f40f607c 100644 --- a/dac/ui/src/sagas/transformWatcher.js +++ b/dac/ui/src/sagas/transformWatcher.js @@ -84,33 +84,6 @@ export function* transformThenNavigate(action, viewId, navigateOptions) { } } -export function* fetchJobMetadata(action, viewId) { - const response = yield call(performWatchedTransform, action, viewId); - - if (response && !response.error) { - return response; - } - - yield put(failedExploreJobProgress()); - throw new TransformFailedError(response); -} - -export function* fetchDatasetMetadata(action, viewId) { - try { - yield put(startDatasetMetadataLoad()); - const response = yield call(performWatchedTransform, action, viewId); - - if (response && !response.error) { - return response; - } - - yield put(failedExploreJobProgress()); - throw new TransformFailedError(response); - } finally { - yield put(completeDatasetMetadataLoad()); - } -} - //export for tests export function* performWatchedTransform(apiAction, viewId) { invariant(viewId, "viewId param is required for performWatchedTransform"); diff --git a/dac/ui/src/sagas/transformWatcherNew.ts b/dac/ui/src/sagas/transformWatcherNew.ts new file mode 100644 index 0000000000..956743df4e --- /dev/null +++ b/dac/ui/src/sagas/transformWatcherNew.ts @@ -0,0 +1,59 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { call, put } from "redux-saga/effects"; +import { + performWatchedTransform, + TransformFailedError, +} from "@app/sagas/transformWatcher"; +import { failedExploreJobProgress } from "@app/actions/explore/dataset/data"; +import { + completeDatasetMetadataLoad, + startDatasetMetadataLoad, +} from "@app/actions/explore/view"; + +export function* submitTransformationJob( + action: any, + viewId: string +): Record { + const response = yield call(performWatchedTransform, action, viewId); + + if (response && !response.error) { + return response; + } + + yield put(failedExploreJobProgress()); + throw new TransformFailedError(response); +} + +export function* fetchDatasetMetadata( + action: any, + viewId: string +): Record { + try { + yield put(startDatasetMetadataLoad()); + const response = yield call(performWatchedTransform, action, viewId); + + if (response && !response.error) { + return response; + } + + yield put(failedExploreJobProgress()); + throw new TransformFailedError(response); + } finally { + yield put(completeDatasetMetadataLoad()); + } +} diff --git a/dac/ui/src/selectors/explore.js b/dac/ui/src/selectors/explore.js index c3d9de4196..f415f351de 100644 --- a/dac/ui/src/selectors/explore.js +++ b/dac/ui/src/selectors/explore.js @@ -99,22 +99,34 @@ export function getPaginationUrl(state, datasetVersion) { return paginationUrl || datasetVersion; } +export function oldGetExploreJobId(state) { + // this selector will have to change once we move jobId out of fullDataset and load it prior to metadata + const location = getLocation(state); + const version = getDatasetVersionFromLocation(location); + const fullDataset = getFullDataset(state, version); + return fullDataset ? fullDataset.getIn(["jobId", "id"], "") : ""; +} + export function getExploreJobId(state) { // this selector will have to change once we move jobId out of fullDataset and load it prior to metadata const location = getLocation(state); const version = getDatasetVersionFromLocation(location); const fullDataset = getFullDataset(state, version); + const jobIdFromDataset = fullDataset?.getIn(["jobId", "id"]); // a dataset is not returned in the first response of the new_tmp_untitled_sql endpoints // so we need to get jobId from the jobList where the last job is the most recently submitted const jobListArray = getJobList(state).toArray(); - const jobIdFromList = jobListArray?.[jobListArray.length - 1]?.get("id"); + const jobIdFromList = + jobListArray?.[jobListArray.length - 1]?.get("id") ?? ""; - return fullDataset - ? fullDataset.getIn(["jobId", "id"], "") - : jobIdFromList + return jobIdFromDataset !== jobIdFromList && jobIdFromList ? jobIdFromList - : ""; + : jobIdFromDataset; +} + +export function getSavingJob(state) { + return state.jobs.jobs.get("uniqueSavingJob"); } export function getPaginationJobId(state, datasetVersion) { diff --git a/dac/ui/src/selectors/roles.ts b/dac/ui/src/selectors/roles.ts new file mode 100644 index 0000000000..ceb612873b --- /dev/null +++ b/dac/ui/src/selectors/roles.ts @@ -0,0 +1,17 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +export const getFilteredRolesAndUsers = (value: any) => {}; diff --git a/dac/ui/src/selectors/supportFlags.js b/dac/ui/src/selectors/supportFlags.js index 130a84cadb..7ff3f191bb 100644 --- a/dac/ui/src/selectors/supportFlags.js +++ b/dac/ui/src/selectors/supportFlags.js @@ -16,5 +16,5 @@ export const getSupportFlags = (state) => { const supportFlags = state.supportFlags; - return supportFlags || []; + return supportFlags || {}; }; diff --git a/dac/ui/src/selectors/tree.js b/dac/ui/src/selectors/tree.js index 704ee091f2..5d56cf2bd2 100644 --- a/dac/ui/src/selectors/tree.js +++ b/dac/ui/src/selectors/tree.js @@ -42,6 +42,10 @@ const getStarredResourcesData = (state) => { return state.resources.stars.get("starResourceList") || Immutable.List(); }; +const getResourceTreeModalData = (state) => { + return state.resources.entities.get("treeModal") || Immutable.List(); +}; + export const getStarredResources = createSelector( [getStarredResourcesData], (starredResourceList) => { @@ -49,6 +53,13 @@ export const getStarredResources = createSelector( } ); +export const getResourceTreeModal = createSelector( + [getResourceTreeModalData], + (resourceModalList) => { + return Immutable.List(resourceModalList); + } +); + export const getResourceTree = createSelector([getResourceTreeData], (tree) => tree.sortBy((t) => t.get("type") !== "HOME" && t.get("name")) ); diff --git a/dac/ui/src/services/nessie/client/apis/DefaultApi.ts b/dac/ui/src/services/nessie/client/apis/V1Api.ts similarity index 86% rename from dac/ui/src/services/nessie/client/apis/DefaultApi.ts rename to dac/ui/src/services/nessie/client/apis/V1Api.ts index 83ac28f022..9eda5a3c39 100644 --- a/dac/ui/src/services/nessie/client/apis/DefaultApi.ts +++ b/dac/ui/src/services/nessie/client/apis/V1Api.ts @@ -1,10 +1,10 @@ /* tslint:disable */ /* eslint-disable */ /** - * nessie-quarkus API + * Nessie API * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) * - * The version of the OpenAPI document: 0.30.0 + * The version of the OpenAPI document: ${project.version} * * * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). @@ -15,31 +15,33 @@ import * as runtime from '../runtime'; import { - Branch, - Content, - DiffResponse, - EntriesResponse, + BranchV1, + ContentV1, + DiffResponseV1, + EntriesResponseV1, FetchOption, GetMultipleContentsRequest, - GetMultipleContentsResponse, - GetNamespacesResponse, - LogResponse, - Merge, + GetMultipleContentsResponseV1, + GetNamespacesResponseV1, + LogResponseV1, + Merge1, + MergeResponseV1, Namespace, NamespaceUpdate, - NessieConfiguration, + NamespaceV1, + NessieConfigurationV1, Operations, - RefLogResponse, Reference, ReferenceType, - ReferencesResponse, - Transplant, + ReferenceV1, + ReferencesResponseV1, + Transplant1, } from '../models'; export interface AssignReferenceRequest { referenceName: string; referenceType: ReferenceType; - expectedHash?: string; + expectedHash: string; reference?: Reference; } @@ -70,7 +72,7 @@ export interface DeleteNamespaceRequest { export interface DeleteReferenceRequest { referenceName: string; referenceType: ReferenceType; - expectedHash?: string; + expectedHash: string; } export interface GetAllReferencesRequest { @@ -97,10 +99,8 @@ export interface GetContentRequest { } export interface GetDiffRequest { - fromHashOnRef: string; - fromRef: string; - toHashOnRef: string; - toRef: string; + fromRefWithHash: string; + toRefWithHash: string; } export interface GetEntriesRequest { @@ -145,15 +145,15 @@ export interface GetReferenceByNameRequest { export interface MergeRefIntoBranchRequest { branchName: string; - expectedHash?: string; - merge?: Merge; + expectedHash: string; + merge1?: Merge1; } export interface TransplantCommitsIntoBranchRequest { branchName: string; - expectedHash?: string; + expectedHash: string; message?: string; - transplant?: Transplant; + transplant1?: Transplant1; } export interface UpdatePropertiesRequest { @@ -166,7 +166,7 @@ export interface UpdatePropertiesRequest { /** * */ -export class DefaultApi extends runtime.BaseAPI { +export class V1Api extends runtime.BaseAPI { /** * This operation takes the name of the named reference to reassign and the hash and the name of a named-reference via which the caller has access to that hash. @@ -181,6 +181,10 @@ export class DefaultApi extends runtime.BaseAPI { throw new runtime.RequiredError('referenceType','Required parameter requestParameters.referenceType was null or undefined when calling assignReference.'); } + if (requestParameters.expectedHash === null || requestParameters.expectedHash === undefined) { + throw new runtime.RequiredError('expectedHash','Required parameter requestParameters.expectedHash was null or undefined when calling assignReference.'); + } + const queryParameters: any = {}; if (requestParameters.expectedHash !== undefined) { @@ -213,7 +217,7 @@ export class DefaultApi extends runtime.BaseAPI { /** * Commit multiple operations against the given branch expecting that branch to have the given hash as its latest commit. The hash in the successful response contains the hash of the commit that contains the operations of the invocation. */ - async commitMultipleOperationsRaw(requestParameters: CommitMultipleOperationsRequest, initOverrides?: RequestInit): Promise> { + async commitMultipleOperationsRaw(requestParameters: CommitMultipleOperationsRequest, initOverrides?: RequestInit): Promise> { if (requestParameters.branchName === null || requestParameters.branchName === undefined) { throw new runtime.RequiredError('branchName','Required parameter requestParameters.branchName was null or undefined when calling commitMultipleOperations.'); } @@ -242,7 +246,7 @@ export class DefaultApi extends runtime.BaseAPI { /** * Commit multiple operations against the given branch expecting that branch to have the given hash as its latest commit. The hash in the successful response contains the hash of the commit that contains the operations of the invocation. */ - async commitMultipleOperations(requestParameters: CommitMultipleOperationsRequest, initOverrides?: RequestInit): Promise { + async commitMultipleOperations(requestParameters: CommitMultipleOperationsRequest, initOverrides?: RequestInit): Promise { const response = await this.commitMultipleOperationsRaw(requestParameters, initOverrides); return await response.value(); } @@ -250,7 +254,7 @@ export class DefaultApi extends runtime.BaseAPI { /** * Creates a Namespace */ - async createNamespaceRaw(requestParameters: CreateNamespaceRequest, initOverrides?: RequestInit): Promise> { + async createNamespaceRaw(requestParameters: CreateNamespaceRequest, initOverrides?: RequestInit): Promise> { if (requestParameters.name === null || requestParameters.name === undefined) { throw new runtime.RequiredError('name','Required parameter requestParameters.name was null or undefined when calling createNamespace.'); } @@ -283,7 +287,7 @@ export class DefaultApi extends runtime.BaseAPI { /** * Creates a Namespace */ - async createNamespace(requestParameters: CreateNamespaceRequest, initOverrides?: RequestInit): Promise { + async createNamespace(requestParameters: CreateNamespaceRequest, initOverrides?: RequestInit): Promise { const response = await this.createNamespaceRaw(requestParameters, initOverrides); return await response.value(); } @@ -292,7 +296,7 @@ export class DefaultApi extends runtime.BaseAPI { * The type of \'refObj\', which can be either a \'Branch\' or \'Tag\', determines the type of the reference to be created. \'Reference.name\' defines the the name of the reference to be created,\'Reference.hash\' is the hash of the created reference, the HEAD of the created reference. \'sourceRefName\' is the name of the reference which contains \'Reference.hash\', and must be present if \'Reference.hash\' is present. Specifying no \'Reference.hash\' means that the new reference will be created \"at the beginning of time\". * Create a new reference */ - async createReferenceRaw(requestParameters: CreateReferenceRequest, initOverrides?: RequestInit): Promise> { + async createReferenceRaw(requestParameters: CreateReferenceRequest, initOverrides?: RequestInit): Promise> { const queryParameters: any = {}; if (requestParameters.sourceRefName !== undefined) { @@ -318,7 +322,7 @@ export class DefaultApi extends runtime.BaseAPI { * The type of \'refObj\', which can be either a \'Branch\' or \'Tag\', determines the type of the reference to be created. \'Reference.name\' defines the the name of the reference to be created,\'Reference.hash\' is the hash of the created reference, the HEAD of the created reference. \'sourceRefName\' is the name of the reference which contains \'Reference.hash\', and must be present if \'Reference.hash\' is present. Specifying no \'Reference.hash\' means that the new reference will be created \"at the beginning of time\". * Create a new reference */ - async createReference(requestParameters: CreateReferenceRequest, initOverrides?: RequestInit): Promise { + async createReference(requestParameters: CreateReferenceRequest, initOverrides?: RequestInit): Promise { const response = await this.createReferenceRaw(requestParameters, initOverrides); return await response.value(); } @@ -372,6 +376,10 @@ export class DefaultApi extends runtime.BaseAPI { throw new runtime.RequiredError('referenceType','Required parameter requestParameters.referenceType was null or undefined when calling deleteReference.'); } + if (requestParameters.expectedHash === null || requestParameters.expectedHash === undefined) { + throw new runtime.RequiredError('expectedHash','Required parameter requestParameters.expectedHash was null or undefined when calling deleteReference.'); + } + const queryParameters: any = {}; if (requestParameters.expectedHash !== undefined) { @@ -400,7 +408,7 @@ export class DefaultApi extends runtime.BaseAPI { /** * Get all references */ - async getAllReferencesRaw(requestParameters: GetAllReferencesRequest, initOverrides?: RequestInit): Promise> { + async getAllReferencesRaw(requestParameters: GetAllReferencesRequest, initOverrides?: RequestInit): Promise> { const queryParameters: any = {}; if (requestParameters.fetch !== undefined) { @@ -434,7 +442,7 @@ export class DefaultApi extends runtime.BaseAPI { /** * Get all references */ - async getAllReferences(requestParameters: GetAllReferencesRequest, initOverrides?: RequestInit): Promise { + async getAllReferences(requestParameters: GetAllReferencesRequest, initOverrides?: RequestInit): Promise { const response = await this.getAllReferencesRaw(requestParameters, initOverrides); return await response.value(); } @@ -443,7 +451,7 @@ export class DefaultApi extends runtime.BaseAPI { * Retrieve the commit log for a ref, potentially truncated by the backend. Retrieves up to \'maxRecords\' commit-log-entries starting at the HEAD of the given named reference (tag or branch) or the given hash. The backend may respect the given \'max\' records hint, but return less or more entries. Backends may also cap the returned entries at a hard-coded limit, the default REST server implementation has such a hard-coded limit. To implement paging, check \'hasMore\' in the response and, if \'true\', pass the value returned as \'token\' in the next invocation as the \'pageToken\' parameter. The content and meaning of the returned \'token\' is \"private\" to the implementation,treat is as an opaque value. It is wrong to assume that invoking this method with a very high \'maxRecords\' value will return all commit log entries. The \'filter\' parameter allows for advanced filtering capabilities using the Common Expression Language (CEL). An intro to CEL can be found at https://github.com/google/cel-spec/blob/master/doc/intro.md. * Get commit log for a reference */ - async getCommitLogRaw(requestParameters: GetCommitLogRequest, initOverrides?: RequestInit): Promise> { + async getCommitLogRaw(requestParameters: GetCommitLogRequest, initOverrides?: RequestInit): Promise> { if (requestParameters.ref === null || requestParameters.ref === undefined) { throw new runtime.RequiredError('ref','Required parameter requestParameters.ref was null or undefined when calling getCommitLog.'); } @@ -490,7 +498,7 @@ export class DefaultApi extends runtime.BaseAPI { * Retrieve the commit log for a ref, potentially truncated by the backend. Retrieves up to \'maxRecords\' commit-log-entries starting at the HEAD of the given named reference (tag or branch) or the given hash. The backend may respect the given \'max\' records hint, but return less or more entries. Backends may also cap the returned entries at a hard-coded limit, the default REST server implementation has such a hard-coded limit. To implement paging, check \'hasMore\' in the response and, if \'true\', pass the value returned as \'token\' in the next invocation as the \'pageToken\' parameter. The content and meaning of the returned \'token\' is \"private\" to the implementation,treat is as an opaque value. It is wrong to assume that invoking this method with a very high \'maxRecords\' value will return all commit log entries. The \'filter\' parameter allows for advanced filtering capabilities using the Common Expression Language (CEL). An intro to CEL can be found at https://github.com/google/cel-spec/blob/master/doc/intro.md. * Get commit log for a reference */ - async getCommitLog(requestParameters: GetCommitLogRequest, initOverrides?: RequestInit): Promise { + async getCommitLog(requestParameters: GetCommitLogRequest, initOverrides?: RequestInit): Promise { const response = await this.getCommitLogRaw(requestParameters, initOverrides); return await response.value(); } @@ -498,7 +506,7 @@ export class DefaultApi extends runtime.BaseAPI { /** * List all configuration settings */ - async getConfigRaw(initOverrides?: RequestInit): Promise> { + async getConfigRaw(initOverrides?: RequestInit): Promise> { const queryParameters: any = {}; const headerParameters: runtime.HTTPHeaders = {}; @@ -516,7 +524,7 @@ export class DefaultApi extends runtime.BaseAPI { /** * List all configuration settings */ - async getConfig(initOverrides?: RequestInit): Promise { + async getConfig(initOverrides?: RequestInit): Promise { const response = await this.getConfigRaw(initOverrides); return await response.value(); } @@ -525,7 +533,7 @@ export class DefaultApi extends runtime.BaseAPI { * This operation returns the content-value for a content-key in a named-reference (a branch or tag). If the table-metadata is tracked globally (Iceberg), Nessie returns a \'Content\' object, that contains the most up-to-date part for the globally tracked part (Iceberg: table-metadata) plus the per-Nessie-reference/hash specific part (Iceberg: snapshot-id, schema-id, partition-spec-id, default-sort-order-id). * Get object content associated with a key. */ - async getContentRaw(requestParameters: GetContentRequest, initOverrides?: RequestInit): Promise> { + async getContentRaw(requestParameters: GetContentRequest, initOverrides?: RequestInit): Promise> { if (requestParameters.key === null || requestParameters.key === undefined) { throw new runtime.RequiredError('key','Required parameter requestParameters.key was null or undefined when calling getContent.'); } @@ -556,7 +564,7 @@ export class DefaultApi extends runtime.BaseAPI { * This operation returns the content-value for a content-key in a named-reference (a branch or tag). If the table-metadata is tracked globally (Iceberg), Nessie returns a \'Content\' object, that contains the most up-to-date part for the globally tracked part (Iceberg: table-metadata) plus the per-Nessie-reference/hash specific part (Iceberg: snapshot-id, schema-id, partition-spec-id, default-sort-order-id). * Get object content associated with a key. */ - async getContent(requestParameters: GetContentRequest, initOverrides?: RequestInit): Promise { + async getContent(requestParameters: GetContentRequest, initOverrides?: RequestInit): Promise { const response = await this.getContentRaw(requestParameters, initOverrides); return await response.value(); } @@ -564,7 +572,7 @@ export class DefaultApi extends runtime.BaseAPI { /** * Get default branch for commits and reads */ - async getDefaultBranchRaw(initOverrides?: RequestInit): Promise> { + async getDefaultBranchRaw(initOverrides?: RequestInit): Promise> { const queryParameters: any = {}; const headerParameters: runtime.HTTPHeaders = {}; @@ -582,30 +590,22 @@ export class DefaultApi extends runtime.BaseAPI { /** * Get default branch for commits and reads */ - async getDefaultBranch(initOverrides?: RequestInit): Promise { + async getDefaultBranch(initOverrides?: RequestInit): Promise { const response = await this.getDefaultBranchRaw(initOverrides); return await response.value(); } /** - * The URL pattern is basically \'from\' and \'to\' separated by \'...\' (three dots). \'from\' and \'to\' must start with a reference name, optionally followed by hash on that reference, the hash prefixed with the\'*\' character. Examples: diffs/main...myBranch diffs/main...myBranch*1234567890123456 diffs/main*1234567890123456...myBranch diffs/main*1234567890123456...myBranch*1234567890123456 + * The URL pattern is basically \'from\' and \'to\' separated by \'...\' (three dots). \'from\' and \'to\' must start with a reference name, optionally followed by hash on that reference, the hash prefixed with the\'*\' character. Examples: diffs/main...myBranch diffs/main...myBranch\\*1234567890123456 diffs/main\\*1234567890123456...myBranch diffs/main\\*1234567890123456...myBranch\\*1234567890123456 * Get a diff for two given references */ - async getDiffRaw(requestParameters: GetDiffRequest, initOverrides?: RequestInit): Promise> { - if (requestParameters.fromHashOnRef === null || requestParameters.fromHashOnRef === undefined) { - throw new runtime.RequiredError('fromHashOnRef','Required parameter requestParameters.fromHashOnRef was null or undefined when calling getDiff.'); - } - - if (requestParameters.fromRef === null || requestParameters.fromRef === undefined) { - throw new runtime.RequiredError('fromRef','Required parameter requestParameters.fromRef was null or undefined when calling getDiff.'); - } - - if (requestParameters.toHashOnRef === null || requestParameters.toHashOnRef === undefined) { - throw new runtime.RequiredError('toHashOnRef','Required parameter requestParameters.toHashOnRef was null or undefined when calling getDiff.'); + async getDiffRaw(requestParameters: GetDiffRequest, initOverrides?: RequestInit): Promise> { + if (requestParameters.fromRefWithHash === null || requestParameters.fromRefWithHash === undefined) { + throw new runtime.RequiredError('fromRefWithHash','Required parameter requestParameters.fromRefWithHash was null or undefined when calling getDiff.'); } - if (requestParameters.toRef === null || requestParameters.toRef === undefined) { - throw new runtime.RequiredError('toRef','Required parameter requestParameters.toRef was null or undefined when calling getDiff.'); + if (requestParameters.toRefWithHash === null || requestParameters.toRefWithHash === undefined) { + throw new runtime.RequiredError('toRefWithHash','Required parameter requestParameters.toRefWithHash was null or undefined when calling getDiff.'); } const queryParameters: any = {}; @@ -613,7 +613,7 @@ export class DefaultApi extends runtime.BaseAPI { const headerParameters: runtime.HTTPHeaders = {}; const response = await this.request({ - path: `/api/v1/diffs/{fromRef}{f}{fromHashOnRef}...{toRef}{t}{toHashOnRef}`.replace(`{${"fromHashOnRef"}}`, encodeURIComponent(String(requestParameters.fromHashOnRef))).replace(`{${"fromRef"}}`, encodeURIComponent(String(requestParameters.fromRef))).replace(`{${"toHashOnRef"}}`, encodeURIComponent(String(requestParameters.toHashOnRef))).replace(`{${"toRef"}}`, encodeURIComponent(String(requestParameters.toRef))), + path: `/api/v1/diffs/{fromRefWithHash}...{toRefWithHash}`.replace(`{${"fromRefWithHash"}}`, encodeURIComponent(String(requestParameters.fromRefWithHash))).replace(`{${"toRefWithHash"}}`, encodeURIComponent(String(requestParameters.toRefWithHash))), method: 'GET', headers: headerParameters, query: queryParameters, @@ -623,10 +623,10 @@ export class DefaultApi extends runtime.BaseAPI { } /** - * The URL pattern is basically \'from\' and \'to\' separated by \'...\' (three dots). \'from\' and \'to\' must start with a reference name, optionally followed by hash on that reference, the hash prefixed with the\'*\' character. Examples: diffs/main...myBranch diffs/main...myBranch*1234567890123456 diffs/main*1234567890123456...myBranch diffs/main*1234567890123456...myBranch*1234567890123456 + * The URL pattern is basically \'from\' and \'to\' separated by \'...\' (three dots). \'from\' and \'to\' must start with a reference name, optionally followed by hash on that reference, the hash prefixed with the\'*\' character. Examples: diffs/main...myBranch diffs/main...myBranch\\*1234567890123456 diffs/main\\*1234567890123456...myBranch diffs/main\\*1234567890123456...myBranch\\*1234567890123456 * Get a diff for two given references */ - async getDiff(requestParameters: GetDiffRequest, initOverrides?: RequestInit): Promise { + async getDiff(requestParameters: GetDiffRequest, initOverrides?: RequestInit): Promise { const response = await this.getDiffRaw(requestParameters, initOverrides); return await response.value(); } @@ -635,7 +635,7 @@ export class DefaultApi extends runtime.BaseAPI { * Retrieves objects for a ref, potentially truncated by the backend. Retrieves up to \'maxRecords\' entries for the given named reference (tag or branch) or the given hash. The backend may respect the given \'max\' records hint, but return less or more entries. Backends may also cap the returned entries at a hard-coded limit, the default REST server implementation has such a hard-coded limit. To implement paging, check \'hasMore\' in the response and, if \'true\', pass the value returned as \'token\' in the next invocation as the \'pageToken\' parameter. The content and meaning of the returned \'token\' is \"private\" to the implementation,treat is as an opaque value. It is wrong to assume that invoking this method with a very high \'maxRecords\' value will return all commit log entries. The \'filter\' parameter allows for advanced filtering capabilities using the Common Expression Language (CEL). An intro to CEL can be found at https://github.com/google/cel-spec/blob/master/doc/intro.md. The \'namespaceDepth\' parameter returns only the ContentKey components up to the depth of \'namespaceDepth\'. For example they key \'a.b.c.d\' with a depth of 3 will return \'a.b.c\'. The operation is guaranteed to not return duplicates and therefore will never page. * Fetch all entries for a given reference */ - async getEntriesRaw(requestParameters: GetEntriesRequest, initOverrides?: RequestInit): Promise> { + async getEntriesRaw(requestParameters: GetEntriesRequest, initOverrides?: RequestInit): Promise> { if (requestParameters.ref === null || requestParameters.ref === undefined) { throw new runtime.RequiredError('ref','Required parameter requestParameters.ref was null or undefined when calling getEntries.'); } @@ -678,7 +678,7 @@ export class DefaultApi extends runtime.BaseAPI { * Retrieves objects for a ref, potentially truncated by the backend. Retrieves up to \'maxRecords\' entries for the given named reference (tag or branch) or the given hash. The backend may respect the given \'max\' records hint, but return less or more entries. Backends may also cap the returned entries at a hard-coded limit, the default REST server implementation has such a hard-coded limit. To implement paging, check \'hasMore\' in the response and, if \'true\', pass the value returned as \'token\' in the next invocation as the \'pageToken\' parameter. The content and meaning of the returned \'token\' is \"private\" to the implementation,treat is as an opaque value. It is wrong to assume that invoking this method with a very high \'maxRecords\' value will return all commit log entries. The \'filter\' parameter allows for advanced filtering capabilities using the Common Expression Language (CEL). An intro to CEL can be found at https://github.com/google/cel-spec/blob/master/doc/intro.md. The \'namespaceDepth\' parameter returns only the ContentKey components up to the depth of \'namespaceDepth\'. For example they key \'a.b.c.d\' with a depth of 3 will return \'a.b.c\'. The operation is guaranteed to not return duplicates and therefore will never page. * Fetch all entries for a given reference */ - async getEntries(requestParameters: GetEntriesRequest, initOverrides?: RequestInit): Promise { + async getEntries(requestParameters: GetEntriesRequest, initOverrides?: RequestInit): Promise { const response = await this.getEntriesRaw(requestParameters, initOverrides); return await response.value(); } @@ -687,7 +687,7 @@ export class DefaultApi extends runtime.BaseAPI { * Similar to \'getContent\', but takes multiple \'ContentKey\'s and returns the content-values for the one or more content-keys in a named-reference (a branch or tag). If the table-metadata is tracked globally (Iceberg), Nessie returns a \'Content\' object, that contains the most up-to-date part for the globally tracked part (Iceberg: table-metadata) plus the per-Nessie-reference/hash specific part (Iceberg: snapshot-ID,schema-ID, partition-spec-ID, default-sort-order-ID). * Get multiple objects\' content. */ - async getMultipleContentsRaw(requestParameters: GetMultipleContentsOperationRequest, initOverrides?: RequestInit): Promise> { + async getMultipleContentsRaw(requestParameters: GetMultipleContentsOperationRequest, initOverrides?: RequestInit): Promise> { const queryParameters: any = {}; if (requestParameters.hashOnRef !== undefined) { @@ -717,7 +717,7 @@ export class DefaultApi extends runtime.BaseAPI { * Similar to \'getContent\', but takes multiple \'ContentKey\'s and returns the content-values for the one or more content-keys in a named-reference (a branch or tag). If the table-metadata is tracked globally (Iceberg), Nessie returns a \'Content\' object, that contains the most up-to-date part for the globally tracked part (Iceberg: table-metadata) plus the per-Nessie-reference/hash specific part (Iceberg: snapshot-ID,schema-ID, partition-spec-ID, default-sort-order-ID). * Get multiple objects\' content. */ - async getMultipleContents(requestParameters: GetMultipleContentsOperationRequest, initOverrides?: RequestInit): Promise { + async getMultipleContents(requestParameters: GetMultipleContentsOperationRequest, initOverrides?: RequestInit): Promise { const response = await this.getMultipleContentsRaw(requestParameters, initOverrides); return await response.value(); } @@ -725,7 +725,7 @@ export class DefaultApi extends runtime.BaseAPI { /** * Retrieves a Namespace */ - async getNamespaceRaw(requestParameters: GetNamespaceRequest, initOverrides?: RequestInit): Promise> { + async getNamespaceRaw(requestParameters: GetNamespaceRequest, initOverrides?: RequestInit): Promise> { if (requestParameters.name === null || requestParameters.name === undefined) { throw new runtime.RequiredError('name','Required parameter requestParameters.name was null or undefined when calling getNamespace.'); } @@ -755,14 +755,14 @@ export class DefaultApi extends runtime.BaseAPI { /** * Retrieves a Namespace */ - async getNamespace(requestParameters: GetNamespaceRequest, initOverrides?: RequestInit): Promise { + async getNamespace(requestParameters: GetNamespaceRequest, initOverrides?: RequestInit): Promise { const response = await this.getNamespaceRaw(requestParameters, initOverrides); return await response.value(); } /** */ - async getNamespacesRaw(requestParameters: GetNamespacesRequest, initOverrides?: RequestInit): Promise> { + async getNamespacesRaw(requestParameters: GetNamespacesRequest, initOverrides?: RequestInit): Promise> { if (requestParameters.ref === null || requestParameters.ref === undefined) { throw new runtime.RequiredError('ref','Required parameter requestParameters.ref was null or undefined when calling getNamespaces.'); } @@ -791,16 +791,16 @@ export class DefaultApi extends runtime.BaseAPI { /** */ - async getNamespaces(requestParameters: GetNamespacesRequest, initOverrides?: RequestInit): Promise { + async getNamespaces(requestParameters: GetNamespacesRequest, initOverrides?: RequestInit): Promise { const response = await this.getNamespacesRaw(requestParameters, initOverrides); return await response.value(); } /** - * Retrieve the reflog entries from a specified endHash or from the current HEAD if the endHash is null, potentially truncated by the backend. Retrieves up to \'maxRecords\' refLog-entries starting at the endHash or HEAD.The backend may respect the given \'max\' records hint, but return less or more entries. Backends may also cap the returned entries at a hard-coded limit, the default REST server implementation has such a hard-coded limit. To implement paging, check \'hasMore\' in the response and, if \'true\', pass the value returned as \'token\' in the next invocation as the \'pageToken\' parameter. The content and meaning of the returned \'token\' is \"private\" to the implementation,treat is as an opaque value. It is wrong to assume that invoking this method with a very high \'maxRecords\' value will return all reflog entries. - * Get reflog entries + * The Nessie reflog in this form is deprecated, likely for removal. Retrieve the reflog entries from a specified endHash or from the current HEAD if the endHash is null, potentially truncated by the backend. Retrieves up to \'maxRecords\' refLog-entries starting at the endHash or HEAD.The backend may respect the given \'max\' records hint, but return less or more entries. Backends may also cap the returned entries at a hard-coded limit, the default REST server implementation has such a hard-coded limit. To implement paging, check \'hasMore\' in the response and, if \'true\', pass the value returned as \'token\' in the next invocation as the \'pageToken\' parameter. The content and meaning of the returned \'token\' is \"private\" to the implementation,treat is as an opaque value. It is wrong to assume that invoking this method with a very high \'maxRecords\' value will return all reflog entries. + * Get reflog entries (DEPRECATED) */ - async getRefLogRaw(requestParameters: GetRefLogRequest, initOverrides?: RequestInit): Promise> { + async getRefLogRaw(requestParameters: GetRefLogRequest, initOverrides?: RequestInit): Promise> { const queryParameters: any = {}; if (requestParameters.endHash !== undefined) { @@ -832,14 +832,14 @@ export class DefaultApi extends runtime.BaseAPI { query: queryParameters, }, initOverrides); - return new runtime.JSONApiResponse(response); + return new runtime.TextApiResponse(response) as any; } /** - * Retrieve the reflog entries from a specified endHash or from the current HEAD if the endHash is null, potentially truncated by the backend. Retrieves up to \'maxRecords\' refLog-entries starting at the endHash or HEAD.The backend may respect the given \'max\' records hint, but return less or more entries. Backends may also cap the returned entries at a hard-coded limit, the default REST server implementation has such a hard-coded limit. To implement paging, check \'hasMore\' in the response and, if \'true\', pass the value returned as \'token\' in the next invocation as the \'pageToken\' parameter. The content and meaning of the returned \'token\' is \"private\" to the implementation,treat is as an opaque value. It is wrong to assume that invoking this method with a very high \'maxRecords\' value will return all reflog entries. - * Get reflog entries + * The Nessie reflog in this form is deprecated, likely for removal. Retrieve the reflog entries from a specified endHash or from the current HEAD if the endHash is null, potentially truncated by the backend. Retrieves up to \'maxRecords\' refLog-entries starting at the endHash or HEAD.The backend may respect the given \'max\' records hint, but return less or more entries. Backends may also cap the returned entries at a hard-coded limit, the default REST server implementation has such a hard-coded limit. To implement paging, check \'hasMore\' in the response and, if \'true\', pass the value returned as \'token\' in the next invocation as the \'pageToken\' parameter. The content and meaning of the returned \'token\' is \"private\" to the implementation,treat is as an opaque value. It is wrong to assume that invoking this method with a very high \'maxRecords\' value will return all reflog entries. + * Get reflog entries (DEPRECATED) */ - async getRefLog(requestParameters: GetRefLogRequest, initOverrides?: RequestInit): Promise { + async getRefLog(requestParameters: GetRefLogRequest, initOverrides?: RequestInit): Promise { const response = await this.getRefLogRaw(requestParameters, initOverrides); return await response.value(); } @@ -847,7 +847,7 @@ export class DefaultApi extends runtime.BaseAPI { /** * Fetch details of a reference */ - async getReferenceByNameRaw(requestParameters: GetReferenceByNameRequest, initOverrides?: RequestInit): Promise> { + async getReferenceByNameRaw(requestParameters: GetReferenceByNameRequest, initOverrides?: RequestInit): Promise> { if (requestParameters.ref === null || requestParameters.ref === undefined) { throw new runtime.RequiredError('ref','Required parameter requestParameters.ref was null or undefined when calling getReferenceByName.'); } @@ -873,7 +873,7 @@ export class DefaultApi extends runtime.BaseAPI { /** * Fetch details of a reference */ - async getReferenceByName(requestParameters: GetReferenceByNameRequest, initOverrides?: RequestInit): Promise { + async getReferenceByName(requestParameters: GetReferenceByNameRequest, initOverrides?: RequestInit): Promise { const response = await this.getReferenceByNameRaw(requestParameters, initOverrides); return await response.value(); } @@ -882,11 +882,15 @@ export class DefaultApi extends runtime.BaseAPI { * Merge items from an existing hash in \'mergeRef\' into the requested branch. The merge is always a rebase + fast-forward merge and is only completed if the rebase is conflict free. The set of commits added to the branch will be all of those until we arrive at a common ancestor. Depending on the underlying implementation, the number of commits allowed as part of this operation may be limited. * Merge commits from \'mergeRef\' onto \'branchName\'. */ - async mergeRefIntoBranchRaw(requestParameters: MergeRefIntoBranchRequest, initOverrides?: RequestInit): Promise> { + async mergeRefIntoBranchRaw(requestParameters: MergeRefIntoBranchRequest, initOverrides?: RequestInit): Promise> { if (requestParameters.branchName === null || requestParameters.branchName === undefined) { throw new runtime.RequiredError('branchName','Required parameter requestParameters.branchName was null or undefined when calling mergeRefIntoBranch.'); } + if (requestParameters.expectedHash === null || requestParameters.expectedHash === undefined) { + throw new runtime.RequiredError('expectedHash','Required parameter requestParameters.expectedHash was null or undefined when calling mergeRefIntoBranch.'); + } + const queryParameters: any = {}; if (requestParameters.expectedHash !== undefined) { @@ -902,29 +906,34 @@ export class DefaultApi extends runtime.BaseAPI { method: 'POST', headers: headerParameters, query: queryParameters, - body: requestParameters.merge, + body: requestParameters.merge1, }, initOverrides); - return new runtime.VoidApiResponse(response); + return new runtime.JSONApiResponse(response); } /** * Merge items from an existing hash in \'mergeRef\' into the requested branch. The merge is always a rebase + fast-forward merge and is only completed if the rebase is conflict free. The set of commits added to the branch will be all of those until we arrive at a common ancestor. Depending on the underlying implementation, the number of commits allowed as part of this operation may be limited. * Merge commits from \'mergeRef\' onto \'branchName\'. */ - async mergeRefIntoBranch(requestParameters: MergeRefIntoBranchRequest, initOverrides?: RequestInit): Promise { - await this.mergeRefIntoBranchRaw(requestParameters, initOverrides); + async mergeRefIntoBranch(requestParameters: MergeRefIntoBranchRequest, initOverrides?: RequestInit): Promise { + const response = await this.mergeRefIntoBranchRaw(requestParameters, initOverrides); + return await response.value(); } /** * This is done as an atomic operation such that only the last of the sequence is ever visible to concurrent readers/writers. The sequence to transplant must be contiguous and in order. * Transplant commits from \'transplant\' onto \'branchName\' */ - async transplantCommitsIntoBranchRaw(requestParameters: TransplantCommitsIntoBranchRequest, initOverrides?: RequestInit): Promise> { + async transplantCommitsIntoBranchRaw(requestParameters: TransplantCommitsIntoBranchRequest, initOverrides?: RequestInit): Promise> { if (requestParameters.branchName === null || requestParameters.branchName === undefined) { throw new runtime.RequiredError('branchName','Required parameter requestParameters.branchName was null or undefined when calling transplantCommitsIntoBranch.'); } + if (requestParameters.expectedHash === null || requestParameters.expectedHash === undefined) { + throw new runtime.RequiredError('expectedHash','Required parameter requestParameters.expectedHash was null or undefined when calling transplantCommitsIntoBranch.'); + } + const queryParameters: any = {}; if (requestParameters.expectedHash !== undefined) { @@ -944,18 +953,19 @@ export class DefaultApi extends runtime.BaseAPI { method: 'POST', headers: headerParameters, query: queryParameters, - body: requestParameters.transplant, + body: requestParameters.transplant1, }, initOverrides); - return new runtime.VoidApiResponse(response); + return new runtime.JSONApiResponse(response); } /** * This is done as an atomic operation such that only the last of the sequence is ever visible to concurrent readers/writers. The sequence to transplant must be contiguous and in order. * Transplant commits from \'transplant\' onto \'branchName\' */ - async transplantCommitsIntoBranch(requestParameters: TransplantCommitsIntoBranchRequest, initOverrides?: RequestInit): Promise { - await this.transplantCommitsIntoBranchRaw(requestParameters, initOverrides); + async transplantCommitsIntoBranch(requestParameters: TransplantCommitsIntoBranchRequest, initOverrides?: RequestInit): Promise { + const response = await this.transplantCommitsIntoBranchRaw(requestParameters, initOverrides); + return await response.value(); } /** diff --git a/dac/ui/src/services/nessie/client/apis/V2BetaApi.ts b/dac/ui/src/services/nessie/client/apis/V2BetaApi.ts new file mode 100644 index 0000000000..818e414d82 --- /dev/null +++ b/dac/ui/src/services/nessie/client/apis/V2BetaApi.ts @@ -0,0 +1,726 @@ +/* tslint:disable */ +/* eslint-disable */ +/** + * Nessie API + * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + * + * The version of the OpenAPI document: ${project.version} + * + * + * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + * https://openapi-generator.tech + * Do not edit the class manually. + */ + + +import * as runtime from '../runtime'; +import { + CommitResponseV2, + ContentKey, + ContentResponseV2, + DiffResponseV2, + EntriesResponseV2, + FetchOption, + GetMultipleContentsRequest, + GetMultipleContentsResponseV2, + LogResponseV2, + Merge, + MergeResponseV2, + NessieConfiguration, + Operations, + Reference, + ReferenceType, + ReferencesResponseV2, + SingleReferenceResponseV2, + Transplant, +} from '../models'; + +export interface AssignReferenceV2Request { + ref: any; + reference: Reference; + type?: ReferenceType; +} + +export interface CommitV2Request { + branch: any; + operations: Operations; +} + +export interface CreateReferenceV2Request { + name: string; + type: ReferenceType; + reference: Reference; +} + +export interface DeleteReferenceV2Request { + ref: any; + type?: ReferenceType; +} + +export interface GetAllReferencesV2Request { + fetch?: FetchOption; + filter?: string; + maxRecords?: number; + pageToken?: string; +} + +export interface GetCommitLogV2Request { + ref: any; + fetch?: FetchOption; + filter?: string; + limitHash?: string; + maxRecords?: number; + pageToken?: string; +} + +export interface GetContentV2Request { + key: ContentKey; + ref: any; +} + +export interface GetDiffV2Request { + fromRef: string; + toRef: string; + maxRecords?: number; + pageToken?: string; +} + +export interface GetEntriesV2Request { + ref: any; + content?: boolean; + filter?: string; + maxRecords?: number; + pageToken?: string; +} + +export interface GetMultipleContentsV2Request { + ref: any; + getMultipleContentsRequest?: GetMultipleContentsRequest; +} + +export interface GetReferenceByNameV2Request { + ref: string; + fetch?: FetchOption; +} + +export interface GetSeveralContentsRequest { + ref: string; + key?: Array; +} + +export interface MergeV2Request { + branch: any; + merge: Merge; +} + +export interface TransplantV2Request { + branch: any; + transplant: Transplant; +} + +/** + * + */ +export class V2BetaApi extends runtime.BaseAPI { + + /** + * The \'ref\' parameter identifies the branch or tag to be reassigned. The \'ref\' parameter may contain a hash qualifier. That hash as well as the optional \'type\' parameter may be used to ensure the operation is performed on the same object that the user expects. Only branches and tags may be reassigned. The payload object identifies any reference visible to the current user whose \'hash\' will be used to define the new HEAD of the reference being reassigned. Detached hashes may be used in the payload. + * Set a named reference to a specific hash via another reference. + */ + async assignReferenceV2Raw(requestParameters: AssignReferenceV2Request, initOverrides?: RequestInit): Promise> { + if (requestParameters.ref === null || requestParameters.ref === undefined) { + throw new runtime.RequiredError('ref','Required parameter requestParameters.ref was null or undefined when calling assignReferenceV2.'); + } + + if (requestParameters.reference === null || requestParameters.reference === undefined) { + throw new runtime.RequiredError('reference','Required parameter requestParameters.reference was null or undefined when calling assignReferenceV2.'); + } + + const queryParameters: any = {}; + + if (requestParameters.type !== undefined) { + queryParameters['type'] = requestParameters.type; + } + + const headerParameters: runtime.HTTPHeaders = {}; + + headerParameters['Content-Type'] = 'application/json'; + + const response = await this.request({ + path: `/api/v2/trees/{ref}`.replace(`{${"ref"}}`, encodeURIComponent(String(requestParameters.ref))), + method: 'PUT', + headers: headerParameters, + query: queryParameters, + body: requestParameters.reference, + }, initOverrides); + + return new runtime.JSONApiResponse(response); + } + + /** + * The \'ref\' parameter identifies the branch or tag to be reassigned. The \'ref\' parameter may contain a hash qualifier. That hash as well as the optional \'type\' parameter may be used to ensure the operation is performed on the same object that the user expects. Only branches and tags may be reassigned. The payload object identifies any reference visible to the current user whose \'hash\' will be used to define the new HEAD of the reference being reassigned. Detached hashes may be used in the payload. + * Set a named reference to a specific hash via another reference. + */ + async assignReferenceV2(requestParameters: AssignReferenceV2Request, initOverrides?: RequestInit): Promise { + const response = await this.assignReferenceV2Raw(requestParameters, initOverrides); + return await response.value(); + } + + /** + * The state of contents specified by the \'branch\' reference will be used for detecting conflicts with the operation being committed. The hash in the successful response will be the hash of the commit that contains the requested operations, whose immediate parent commit will be the current HEAD of the specified branch. + * Commit one or more operations against the given \'branch\'. + */ + async commitV2Raw(requestParameters: CommitV2Request, initOverrides?: RequestInit): Promise> { + if (requestParameters.branch === null || requestParameters.branch === undefined) { + throw new runtime.RequiredError('branch','Required parameter requestParameters.branch was null or undefined when calling commitV2.'); + } + + if (requestParameters.operations === null || requestParameters.operations === undefined) { + throw new runtime.RequiredError('operations','Required parameter requestParameters.operations was null or undefined when calling commitV2.'); + } + + const queryParameters: any = {}; + + const headerParameters: runtime.HTTPHeaders = {}; + + headerParameters['Content-Type'] = 'application/json'; + + const response = await this.request({ + path: `/api/v2/trees/{branch}/history/commit`.replace(`{${"branch"}}`, encodeURIComponent(String(requestParameters.branch))), + method: 'POST', + headers: headerParameters, + query: queryParameters, + body: requestParameters.operations, + }, initOverrides); + + return new runtime.JSONApiResponse(response); + } + + /** + * The state of contents specified by the \'branch\' reference will be used for detecting conflicts with the operation being committed. The hash in the successful response will be the hash of the commit that contains the requested operations, whose immediate parent commit will be the current HEAD of the specified branch. + * Commit one or more operations against the given \'branch\'. + */ + async commitV2(requestParameters: CommitV2Request, initOverrides?: RequestInit): Promise { + const response = await this.commitV2Raw(requestParameters, initOverrides); + return await response.value(); + } + + /** + * The name and type query parameters define the kind of reference to be created. The payload object defines the new reference\'s origin in the commit history. Only branches and tags can be created by this method, but the payload object may be any valid reference, including a detached commit. If the payload reference object does not define a commit hash, the HEAD of that reference will be used. + * Create a new branch or tag + */ + async createReferenceV2Raw(requestParameters: CreateReferenceV2Request, initOverrides?: RequestInit): Promise> { + if (requestParameters.name === null || requestParameters.name === undefined) { + throw new runtime.RequiredError('name','Required parameter requestParameters.name was null or undefined when calling createReferenceV2.'); + } + + if (requestParameters.type === null || requestParameters.type === undefined) { + throw new runtime.RequiredError('type','Required parameter requestParameters.type was null or undefined when calling createReferenceV2.'); + } + + if (requestParameters.reference === null || requestParameters.reference === undefined) { + throw new runtime.RequiredError('reference','Required parameter requestParameters.reference was null or undefined when calling createReferenceV2.'); + } + + const queryParameters: any = {}; + + if (requestParameters.name !== undefined) { + queryParameters['name'] = requestParameters.name; + } + + if (requestParameters.type !== undefined) { + queryParameters['type'] = requestParameters.type; + } + + const headerParameters: runtime.HTTPHeaders = {}; + + headerParameters['Content-Type'] = 'application/json'; + + const response = await this.request({ + path: `/api/v2/trees`, + method: 'POST', + headers: headerParameters, + query: queryParameters, + body: requestParameters.reference, + }, initOverrides); + + return new runtime.JSONApiResponse(response); + } + + /** + * The name and type query parameters define the kind of reference to be created. The payload object defines the new reference\'s origin in the commit history. Only branches and tags can be created by this method, but the payload object may be any valid reference, including a detached commit. If the payload reference object does not define a commit hash, the HEAD of that reference will be used. + * Create a new branch or tag + */ + async createReferenceV2(requestParameters: CreateReferenceV2Request, initOverrides?: RequestInit): Promise { + const response = await this.createReferenceV2Raw(requestParameters, initOverrides); + return await response.value(); + } + + /** + * The \'ref\' parameter identifies the branch or tag to be deleted. The \'ref\' parameter may contain a hash qualifier. That hash as well as the optional \'type\' parameter may be used to ensure the operation is performed on the same object that the user expects. Only branches and tags can be deleted. However, deleting the default branch may be restricted. + * Delete a reference + */ + async deleteReferenceV2Raw(requestParameters: DeleteReferenceV2Request, initOverrides?: RequestInit): Promise> { + if (requestParameters.ref === null || requestParameters.ref === undefined) { + throw new runtime.RequiredError('ref','Required parameter requestParameters.ref was null or undefined when calling deleteReferenceV2.'); + } + + const queryParameters: any = {}; + + if (requestParameters.type !== undefined) { + queryParameters['type'] = requestParameters.type; + } + + const headerParameters: runtime.HTTPHeaders = {}; + + const response = await this.request({ + path: `/api/v2/trees/{ref}`.replace(`{${"ref"}}`, encodeURIComponent(String(requestParameters.ref))), + method: 'DELETE', + headers: headerParameters, + query: queryParameters, + }, initOverrides); + + return new runtime.JSONApiResponse(response); + } + + /** + * The \'ref\' parameter identifies the branch or tag to be deleted. The \'ref\' parameter may contain a hash qualifier. That hash as well as the optional \'type\' parameter may be used to ensure the operation is performed on the same object that the user expects. Only branches and tags can be deleted. However, deleting the default branch may be restricted. + * Delete a reference + */ + async deleteReferenceV2(requestParameters: DeleteReferenceV2Request, initOverrides?: RequestInit): Promise { + const response = await this.deleteReferenceV2Raw(requestParameters, initOverrides); + return await response.value(); + } + + /** + * Get information about all branches and tags + */ + async getAllReferencesV2Raw(requestParameters: GetAllReferencesV2Request, initOverrides?: RequestInit): Promise> { + const queryParameters: any = {}; + + if (requestParameters.fetch !== undefined) { + queryParameters['fetch'] = requestParameters.fetch; + } + + if (requestParameters.filter !== undefined) { + queryParameters['filter'] = requestParameters.filter; + } + + if (requestParameters.maxRecords !== undefined) { + queryParameters['max-records'] = requestParameters.maxRecords; + } + + if (requestParameters.pageToken !== undefined) { + queryParameters['page-token'] = requestParameters.pageToken; + } + + const headerParameters: runtime.HTTPHeaders = {}; + + const response = await this.request({ + path: `/api/v2/trees`, + method: 'GET', + headers: headerParameters, + query: queryParameters, + }, initOverrides); + + return new runtime.JSONApiResponse(response); + } + + /** + * Get information about all branches and tags + */ + async getAllReferencesV2(requestParameters: GetAllReferencesV2Request, initOverrides?: RequestInit): Promise { + const response = await this.getAllReferencesV2Raw(requestParameters, initOverrides); + return await response.value(); + } + + /** + * Retrieve the commit log for a reference, potentially truncated by the backend. The backend may respect the given \'max-entries\' records hint, or may return more or less entries. Backends may also cap the returned entries at a hard-coded limit To implement paging, check \'hasMore\' in the response and, if \'true\', pass the value returned as \'token\' in the next invocation as the \'pageToken\' parameter. The content and meaning of the returned \'token\' is \"private\" to the implementation,treat is as an opaque value. It is wrong to assume that invoking this method with a very high \'maxRecords\' value will return all available data in one page. Different pages may have different numbers of log records in them even if they come from another call to this method with the same parameters. Also, pages are not guaranteed to be filled to contain exactly \'maxRecords\' even if the total amount of available data allows that. Pages may contain more of less entries at server\'s discretion. The \'filter\' parameter allows for advanced filtering capabilities using the Common Expression Language (CEL). An intro to CEL can be found at https://github.com/google/cel-spec/blob/master/doc/intro.md. The fetching of the log starts from the HEAD of the given ref (or a more specific commit, if provided as part of the \'ref\' path element) and proceeds until the \'root\' commit or the \'limit-hash\' commit are encountered. + * Get commit log for a particular reference + */ + async getCommitLogV2Raw(requestParameters: GetCommitLogV2Request, initOverrides?: RequestInit): Promise> { + if (requestParameters.ref === null || requestParameters.ref === undefined) { + throw new runtime.RequiredError('ref','Required parameter requestParameters.ref was null or undefined when calling getCommitLogV2.'); + } + + const queryParameters: any = {}; + + if (requestParameters.fetch !== undefined) { + queryParameters['fetch'] = requestParameters.fetch; + } + + if (requestParameters.filter !== undefined) { + queryParameters['filter'] = requestParameters.filter; + } + + if (requestParameters.limitHash !== undefined) { + queryParameters['limit-hash'] = requestParameters.limitHash; + } + + if (requestParameters.maxRecords !== undefined) { + queryParameters['max-records'] = requestParameters.maxRecords; + } + + if (requestParameters.pageToken !== undefined) { + queryParameters['page-token'] = requestParameters.pageToken; + } + + const headerParameters: runtime.HTTPHeaders = {}; + + const response = await this.request({ + path: `/api/v2/trees/{ref}/history`.replace(`{${"ref"}}`, encodeURIComponent(String(requestParameters.ref))), + method: 'GET', + headers: headerParameters, + query: queryParameters, + }, initOverrides); + + return new runtime.JSONApiResponse(response); + } + + /** + * Retrieve the commit log for a reference, potentially truncated by the backend. The backend may respect the given \'max-entries\' records hint, or may return more or less entries. Backends may also cap the returned entries at a hard-coded limit To implement paging, check \'hasMore\' in the response and, if \'true\', pass the value returned as \'token\' in the next invocation as the \'pageToken\' parameter. The content and meaning of the returned \'token\' is \"private\" to the implementation,treat is as an opaque value. It is wrong to assume that invoking this method with a very high \'maxRecords\' value will return all available data in one page. Different pages may have different numbers of log records in them even if they come from another call to this method with the same parameters. Also, pages are not guaranteed to be filled to contain exactly \'maxRecords\' even if the total amount of available data allows that. Pages may contain more of less entries at server\'s discretion. The \'filter\' parameter allows for advanced filtering capabilities using the Common Expression Language (CEL). An intro to CEL can be found at https://github.com/google/cel-spec/blob/master/doc/intro.md. The fetching of the log starts from the HEAD of the given ref (or a more specific commit, if provided as part of the \'ref\' path element) and proceeds until the \'root\' commit or the \'limit-hash\' commit are encountered. + * Get commit log for a particular reference + */ + async getCommitLogV2(requestParameters: GetCommitLogV2Request, initOverrides?: RequestInit): Promise { + const response = await this.getCommitLogV2Raw(requestParameters, initOverrides); + return await response.value(); + } + + /** + * Returns repository and server settings relevant to clients. + */ + async getConfigV2Raw(initOverrides?: RequestInit): Promise> { + const queryParameters: any = {}; + + const headerParameters: runtime.HTTPHeaders = {}; + + const response = await this.request({ + path: `/api/v2/config`, + method: 'GET', + headers: headerParameters, + query: queryParameters, + }, initOverrides); + + return new runtime.JSONApiResponse(response); + } + + /** + * Returns repository and server settings relevant to clients. + */ + async getConfigV2(initOverrides?: RequestInit): Promise { + const response = await this.getConfigV2Raw(initOverrides); + return await response.value(); + } + + /** + * This operation returns the content value for a content key at a particular point in history as defined by the \'ref\' parameter. + * Get the content object associated with a key. + */ + async getContentV2Raw(requestParameters: GetContentV2Request, initOverrides?: RequestInit): Promise> { + if (requestParameters.key === null || requestParameters.key === undefined) { + throw new runtime.RequiredError('key','Required parameter requestParameters.key was null or undefined when calling getContentV2.'); + } + + if (requestParameters.ref === null || requestParameters.ref === undefined) { + throw new runtime.RequiredError('ref','Required parameter requestParameters.ref was null or undefined when calling getContentV2.'); + } + + const queryParameters: any = {}; + + const headerParameters: runtime.HTTPHeaders = {}; + + const response = await this.request({ + path: `/api/v2/trees/{ref}/contents/{key}`.replace(`{${"key"}}`, encodeURIComponent(String(requestParameters.key))).replace(`{${"ref"}}`, encodeURIComponent(String(requestParameters.ref))), + method: 'GET', + headers: headerParameters, + query: queryParameters, + }, initOverrides); + + return new runtime.JSONApiResponse(response); + } + + /** + * This operation returns the content value for a content key at a particular point in history as defined by the \'ref\' parameter. + * Get the content object associated with a key. + */ + async getContentV2(requestParameters: GetContentV2Request, initOverrides?: RequestInit): Promise { + const response = await this.getContentV2Raw(requestParameters, initOverrides); + return await response.value(); + } + + /** + * The URL pattern is basically \'from\' and \'to\' reference specs separated by \'/diff/\' Examples: - main/diff/myBranch - main@1234567890123456/diff/myBranch - main@1234567890123456/diff/myBranch@23445678 - main/diff/myBranch@23445678 - main/diff/myBranch@23445678 - my/branch@/diff/main - myBranch/diff/- + * Get contents that differ in the trees specified by the two given references + */ + async getDiffV2Raw(requestParameters: GetDiffV2Request, initOverrides?: RequestInit): Promise> { + if (requestParameters.fromRef === null || requestParameters.fromRef === undefined) { + throw new runtime.RequiredError('fromRef','Required parameter requestParameters.fromRef was null or undefined when calling getDiffV2.'); + } + + if (requestParameters.toRef === null || requestParameters.toRef === undefined) { + throw new runtime.RequiredError('toRef','Required parameter requestParameters.toRef was null or undefined when calling getDiffV2.'); + } + + const queryParameters: any = {}; + + if (requestParameters.maxRecords !== undefined) { + queryParameters['max-records'] = requestParameters.maxRecords; + } + + if (requestParameters.pageToken !== undefined) { + queryParameters['page-token'] = requestParameters.pageToken; + } + + const headerParameters: runtime.HTTPHeaders = {}; + + const response = await this.request({ + path: `/api/v2/trees/{from-ref}/diff/{to-ref}`.replace(`{${"from-ref"}}`, encodeURIComponent(String(requestParameters.fromRef))).replace(`{${"to-ref"}}`, encodeURIComponent(String(requestParameters.toRef))), + method: 'GET', + headers: headerParameters, + query: queryParameters, + }, initOverrides); + + return new runtime.JSONApiResponse(response); + } + + /** + * The URL pattern is basically \'from\' and \'to\' reference specs separated by \'/diff/\' Examples: - main/diff/myBranch - main@1234567890123456/diff/myBranch - main@1234567890123456/diff/myBranch@23445678 - main/diff/myBranch@23445678 - main/diff/myBranch@23445678 - my/branch@/diff/main - myBranch/diff/- + * Get contents that differ in the trees specified by the two given references + */ + async getDiffV2(requestParameters: GetDiffV2Request, initOverrides?: RequestInit): Promise { + const response = await this.getDiffV2Raw(requestParameters, initOverrides); + return await response.value(); + } + + /** + * Retrieves objects for a ref, potentially truncated by the backend. Retrieves up to \'maxRecords\' entries for the given named reference (tag or branch) or the given hash. The backend may respect the given \'max\' records hint, but return less or more entries. Backends may also cap the returned entries at a hard-coded limit, the default REST server implementation has such a hard-coded limit. To implement paging, check \'hasMore\' in the response and, if \'true\', pass the value returned as \'token\' in the next invocation as the \'pageToken\' parameter. The content and meaning of the returned \'token\' is \"private\" to the implementation,treat is as an opaque value. It is wrong to assume that invoking this method with a very high \'maxRecords\' value will return all available data in one page. Different pages may have different numbers of log records in them even if they come from another call to this method with the same parameters. Also, pages are not guaranteed to be filled to contain exactly \'maxRecords\' even if the total amount of available data allows that. Pages may contain more of less entries at server\'s discretion. The \'filter\' parameter allows for advanced filtering capabilities using the Common Expression Language (CEL). An intro to CEL can be found at https://github.com/google/cel-spec/blob/master/doc/intro.md. + * Fetch all entries for a given reference + */ + async getEntriesV2Raw(requestParameters: GetEntriesV2Request, initOverrides?: RequestInit): Promise> { + if (requestParameters.ref === null || requestParameters.ref === undefined) { + throw new runtime.RequiredError('ref','Required parameter requestParameters.ref was null or undefined when calling getEntriesV2.'); + } + + const queryParameters: any = {}; + + if (requestParameters.content !== undefined) { + queryParameters['content'] = requestParameters.content; + } + + if (requestParameters.filter !== undefined) { + queryParameters['filter'] = requestParameters.filter; + } + + if (requestParameters.maxRecords !== undefined) { + queryParameters['max-records'] = requestParameters.maxRecords; + } + + if (requestParameters.pageToken !== undefined) { + queryParameters['page-token'] = requestParameters.pageToken; + } + + const headerParameters: runtime.HTTPHeaders = {}; + + const response = await this.request({ + path: `/api/v2/trees/{ref}/entries`.replace(`{${"ref"}}`, encodeURIComponent(String(requestParameters.ref))), + method: 'GET', + headers: headerParameters, + query: queryParameters, + }, initOverrides); + + return new runtime.JSONApiResponse(response); + } + + /** + * Retrieves objects for a ref, potentially truncated by the backend. Retrieves up to \'maxRecords\' entries for the given named reference (tag or branch) or the given hash. The backend may respect the given \'max\' records hint, but return less or more entries. Backends may also cap the returned entries at a hard-coded limit, the default REST server implementation has such a hard-coded limit. To implement paging, check \'hasMore\' in the response and, if \'true\', pass the value returned as \'token\' in the next invocation as the \'pageToken\' parameter. The content and meaning of the returned \'token\' is \"private\" to the implementation,treat is as an opaque value. It is wrong to assume that invoking this method with a very high \'maxRecords\' value will return all available data in one page. Different pages may have different numbers of log records in them even if they come from another call to this method with the same parameters. Also, pages are not guaranteed to be filled to contain exactly \'maxRecords\' even if the total amount of available data allows that. Pages may contain more of less entries at server\'s discretion. The \'filter\' parameter allows for advanced filtering capabilities using the Common Expression Language (CEL). An intro to CEL can be found at https://github.com/google/cel-spec/blob/master/doc/intro.md. + * Fetch all entries for a given reference + */ + async getEntriesV2(requestParameters: GetEntriesV2Request, initOverrides?: RequestInit): Promise { + const response = await this.getEntriesV2Raw(requestParameters, initOverrides); + return await response.value(); + } + + /** + * Similar to \'GET /trees/{ref}/content/{key}\', but takes multiple \'ContentKey\'s (in the JSON payload) and returns zero or more content objects. Note that if some keys from the request do not have an associated content object at the point in history defined by the \'ref\' parameter, the response will be successful, but no data will be returned for the missing keys. + * Get multiple content objects. + */ + async getMultipleContentsV2Raw(requestParameters: GetMultipleContentsV2Request, initOverrides?: RequestInit): Promise> { + if (requestParameters.ref === null || requestParameters.ref === undefined) { + throw new runtime.RequiredError('ref','Required parameter requestParameters.ref was null or undefined when calling getMultipleContentsV2.'); + } + + const queryParameters: any = {}; + + const headerParameters: runtime.HTTPHeaders = {}; + + headerParameters['Content-Type'] = 'application/json'; + + const response = await this.request({ + path: `/api/v2/trees/{ref}/contents`.replace(`{${"ref"}}`, encodeURIComponent(String(requestParameters.ref))), + method: 'POST', + headers: headerParameters, + query: queryParameters, + body: requestParameters.getMultipleContentsRequest, + }, initOverrides); + + return new runtime.JSONApiResponse(response); + } + + /** + * Similar to \'GET /trees/{ref}/content/{key}\', but takes multiple \'ContentKey\'s (in the JSON payload) and returns zero or more content objects. Note that if some keys from the request do not have an associated content object at the point in history defined by the \'ref\' parameter, the response will be successful, but no data will be returned for the missing keys. + * Get multiple content objects. + */ + async getMultipleContentsV2(requestParameters: GetMultipleContentsV2Request, initOverrides?: RequestInit): Promise { + const response = await this.getMultipleContentsV2Raw(requestParameters, initOverrides); + return await response.value(); + } + + /** + * Fetch details of a reference + */ + async getReferenceByNameV2Raw(requestParameters: GetReferenceByNameV2Request, initOverrides?: RequestInit): Promise> { + if (requestParameters.ref === null || requestParameters.ref === undefined) { + throw new runtime.RequiredError('ref','Required parameter requestParameters.ref was null or undefined when calling getReferenceByNameV2.'); + } + + const queryParameters: any = {}; + + if (requestParameters.fetch !== undefined) { + queryParameters['fetch'] = requestParameters.fetch; + } + + const headerParameters: runtime.HTTPHeaders = {}; + + const response = await this.request({ + path: `/api/v2/trees/{ref}`.replace(`{${"ref"}}`, encodeURIComponent(String(requestParameters.ref))), + method: 'GET', + headers: headerParameters, + query: queryParameters, + }, initOverrides); + + return new runtime.JSONApiResponse(response); + } + + /** + * Fetch details of a reference + */ + async getReferenceByNameV2(requestParameters: GetReferenceByNameV2Request, initOverrides?: RequestInit): Promise { + const response = await this.getReferenceByNameV2Raw(requestParameters, initOverrides); + return await response.value(); + } + + /** + * Similar to \'GET /trees/{ref}/content/{key}\', but takes multiple \'key\' query parameters and returns zero or more content values in the same JSON structure as the \'POST /trees/{ref}/content\' endpoint. This is a convenience method for fetching a small number of content objects. It is mostly intended for human use. For automated use cases or when the number of keys is large the \'POST /trees/{ref}/content\' method is preferred. + * Get multiple content objects. + */ + async getSeveralContentsRaw(requestParameters: GetSeveralContentsRequest, initOverrides?: RequestInit): Promise> { + if (requestParameters.ref === null || requestParameters.ref === undefined) { + throw new runtime.RequiredError('ref','Required parameter requestParameters.ref was null or undefined when calling getSeveralContents.'); + } + + const queryParameters: any = {}; + + if (requestParameters.key) { + queryParameters['key'] = requestParameters.key; + } + + const headerParameters: runtime.HTTPHeaders = {}; + + const response = await this.request({ + path: `/api/v2/trees/{ref}/contents`.replace(`{${"ref"}}`, encodeURIComponent(String(requestParameters.ref))), + method: 'GET', + headers: headerParameters, + query: queryParameters, + }, initOverrides); + + return new runtime.JSONApiResponse(response); + } + + /** + * Similar to \'GET /trees/{ref}/content/{key}\', but takes multiple \'key\' query parameters and returns zero or more content values in the same JSON structure as the \'POST /trees/{ref}/content\' endpoint. This is a convenience method for fetching a small number of content objects. It is mostly intended for human use. For automated use cases or when the number of keys is large the \'POST /trees/{ref}/content\' method is preferred. + * Get multiple content objects. + */ + async getSeveralContents(requestParameters: GetSeveralContentsRequest, initOverrides?: RequestInit): Promise { + const response = await this.getSeveralContentsRaw(requestParameters, initOverrides); + return await response.value(); + } + + /** + * Merge commits referenced by the \'mergeRefName\' and \'fromHash\' parameters of the payload object into the requested \'branch\'. The state of contents specified by the \'branch\' reference will be used for detecting conflicts with the commits being transplanted. The merge is committed if it is free from conflicts. The set of commits merged into the target branch will be all of those starting at \'fromHash\' on \'mergeRefName\' until we arrive at the common ancestor. Depending on the underlying implementation, the number of commits allowed as part of this operation may be limited. + * Merge commits from another reference onto \'branch\'. + */ + async mergeV2Raw(requestParameters: MergeV2Request, initOverrides?: RequestInit): Promise> { + if (requestParameters.branch === null || requestParameters.branch === undefined) { + throw new runtime.RequiredError('branch','Required parameter requestParameters.branch was null or undefined when calling mergeV2.'); + } + + if (requestParameters.merge === null || requestParameters.merge === undefined) { + throw new runtime.RequiredError('merge','Required parameter requestParameters.merge was null or undefined when calling mergeV2.'); + } + + const queryParameters: any = {}; + + const headerParameters: runtime.HTTPHeaders = {}; + + headerParameters['Content-Type'] = 'application/json'; + + const response = await this.request({ + path: `/api/v2/trees/{branch}/history/merge`.replace(`{${"branch"}}`, encodeURIComponent(String(requestParameters.branch))), + method: 'POST', + headers: headerParameters, + query: queryParameters, + body: requestParameters.merge, + }, initOverrides); + + return new runtime.JSONApiResponse(response); + } + + /** + * Merge commits referenced by the \'mergeRefName\' and \'fromHash\' parameters of the payload object into the requested \'branch\'. The state of contents specified by the \'branch\' reference will be used for detecting conflicts with the commits being transplanted. The merge is committed if it is free from conflicts. The set of commits merged into the target branch will be all of those starting at \'fromHash\' on \'mergeRefName\' until we arrive at the common ancestor. Depending on the underlying implementation, the number of commits allowed as part of this operation may be limited. + * Merge commits from another reference onto \'branch\'. + */ + async mergeV2(requestParameters: MergeV2Request, initOverrides?: RequestInit): Promise { + const response = await this.mergeV2Raw(requestParameters, initOverrides); + return await response.value(); + } + + /** + * This is done as an atomic operation such that only the last of the sequence is ever visible to concurrent readers/writers. The sequence to transplant must be contiguous and in order. The state of contents specified by the \'branch\' reference will be used for detecting conflicts with the commits being transplanted. + * Transplant commits specified by the \'Transplant\' payload object onto the given \'branch\' + */ + async transplantV2Raw(requestParameters: TransplantV2Request, initOverrides?: RequestInit): Promise> { + if (requestParameters.branch === null || requestParameters.branch === undefined) { + throw new runtime.RequiredError('branch','Required parameter requestParameters.branch was null or undefined when calling transplantV2.'); + } + + if (requestParameters.transplant === null || requestParameters.transplant === undefined) { + throw new runtime.RequiredError('transplant','Required parameter requestParameters.transplant was null or undefined when calling transplantV2.'); + } + + const queryParameters: any = {}; + + const headerParameters: runtime.HTTPHeaders = {}; + + headerParameters['Content-Type'] = 'application/json'; + + const response = await this.request({ + path: `/api/v2/trees/{branch}/history/transplant`.replace(`{${"branch"}}`, encodeURIComponent(String(requestParameters.branch))), + method: 'POST', + headers: headerParameters, + query: queryParameters, + body: requestParameters.transplant, + }, initOverrides); + + return new runtime.JSONApiResponse(response); + } + + /** + * This is done as an atomic operation such that only the last of the sequence is ever visible to concurrent readers/writers. The sequence to transplant must be contiguous and in order. The state of contents specified by the \'branch\' reference will be used for detecting conflicts with the commits being transplanted. + * Transplant commits specified by the \'Transplant\' payload object onto the given \'branch\' + */ + async transplantV2(requestParameters: TransplantV2Request, initOverrides?: RequestInit): Promise { + const response = await this.transplantV2Raw(requestParameters, initOverrides); + return await response.value(); + } + +} diff --git a/dac/ui/src/services/nessie/client/apis/index.ts b/dac/ui/src/services/nessie/client/apis/index.ts index 69c44c00fa..d1c5b1cf1b 100644 --- a/dac/ui/src/services/nessie/client/apis/index.ts +++ b/dac/ui/src/services/nessie/client/apis/index.ts @@ -1,3 +1,6 @@ /* tslint:disable */ /* eslint-disable */ -export * from './DefaultApi'; +import { V1Api as DefaultApi } from "./V1Api"; +export * from "./V1Api"; +export * from "./V2BetaApi"; +export { DefaultApi }; //Export as DefaultApi instead of updating all references diff --git a/dac/ui/src/services/nessie/client/models/index.ts b/dac/ui/src/services/nessie/client/models/index.ts index 47a9a67140..cb22c0f10c 100644 --- a/dac/ui/src/services/nessie/client/models/index.ts +++ b/dac/ui/src/services/nessie/client/models/index.ts @@ -1,5 +1,31 @@ /* tslint:disable */ /* eslint-disable */ +/** + * + * @export + * @interface AddedContent + */ +export interface AddedContent { + /** + * + * @type {ContentKey} + * @memberof AddedContent + */ + key: ContentKey; +} +/** + * + * @export + * @interface AddedContentV2 + */ +export interface AddedContentV2 { + /** + * + * @type {ContentKeyV2} + * @memberof AddedContentV2 + */ + key: ContentKeyV2; +} /** * * @export @@ -25,6 +51,56 @@ export interface Branch { */ hash?: string; } +/** + * + * @export + * @interface BranchV1 + */ +export interface BranchV1 { + /** + * + * @type {string} + * @memberof BranchV1 + */ + name: string; + /** + * + * @type {ReferenceMetadataV1} + * @memberof BranchV1 + */ + metadata?: ReferenceMetadataV1; + /** + * + * @type {string} + * @memberof BranchV1 + */ + hash?: string; +} +/** + * + * @export + * @interface BranchV2 + */ +export interface BranchV2 { + /** + * + * @type {string} + * @memberof BranchV2 + */ + name: string; + /** + * + * @type {ReferenceMetadataV2} + * @memberof BranchV2 + */ + metadata?: ReferenceMetadataV2; + /** + * + * @type {string} + * @memberof BranchV2 + */ + hash?: string; +} /** * * @export @@ -49,12 +125,24 @@ export interface CommitMeta { * @memberof CommitMeta */ author?: string; + /** + * + * @type {Array} + * @memberof CommitMeta + */ + authors: Array; /** * * @type {string} * @memberof CommitMeta */ signedOffBy?: string; + /** + * + * @type {Array} + * @memberof CommitMeta + */ + allSignedOffBy: Array; /** * * @type {string} @@ -79,543 +167,1945 @@ export interface CommitMeta { * @memberof CommitMeta */ properties: { [key: string]: string }; + /** + * + * @type {{ [key: string]: Array; }} + * @memberof CommitMeta + */ + allProperties: { [key: string]: Array }; + /** + * + * @type {Array} + * @memberof CommitMeta + */ + parentCommitHashes: Array; } /** - * @type Content * * @export + * @interface CommitMetaV1 */ -export type Content = - | ({ type: "DELTA_LAKE_TABLE" } & DeltaLakeTable) - | ({ type: "ICEBERG_TABLE" } & IcebergTable) - | ({ type: "ICEBERG_VIEW" } & IcebergView) - | ({ type: "NAMESPACE" } & Namespace); +export interface CommitMetaV1 { + /** + * + * @type {string} + * @memberof CommitMetaV1 + */ + hash?: string; + /** + * + * @type {string} + * @memberof CommitMetaV1 + */ + committer?: string; + /** + * + * @type {string} + * @memberof CommitMetaV1 + */ + author?: string; + /** + * + * @type {string} + * @memberof CommitMetaV1 + */ + signedOffBy?: string; + /** + * + * @type {string} + * @memberof CommitMetaV1 + */ + message: string; + /** + * + * @type {Date} + * @memberof CommitMetaV1 + */ + commitTime?: Date; + /** + * + * @type {Date} + * @memberof CommitMetaV1 + */ + authorTime?: Date; + /** + * + * @type {{ [key: string]: string; }} + * @memberof CommitMetaV1 + */ + properties: { [key: string]: string }; +} /** * * @export - * @interface ContentKey + * @interface CommitMetaV2 */ -export interface ContentKey { +export interface CommitMetaV2 { + /** + * + * @type {string} + * @memberof CommitMetaV2 + */ + hash?: string; + /** + * + * @type {string} + * @memberof CommitMetaV2 + */ + committer?: string; /** * * @type {Array} - * @memberof ContentKey + * @memberof CommitMetaV2 */ - elements: Array; + authors: Array; + /** + * + * @type {Array} + * @memberof CommitMetaV2 + */ + allSignedOffBy: Array; + /** + * + * @type {string} + * @memberof CommitMetaV2 + */ + message: string; + /** + * + * @type {Date} + * @memberof CommitMetaV2 + */ + commitTime?: Date; + /** + * + * @type {Date} + * @memberof CommitMetaV2 + */ + authorTime?: Date; + /** + * + * @type {{ [key: string]: Array; }} + * @memberof CommitMetaV2 + */ + allProperties: { [key: string]: Array }; + /** + * + * @type {Array} + * @memberof CommitMetaV2 + */ + parentCommitHashes: Array; } /** * * @export - * @interface ContentWithKey + * @interface CommitResponse */ -export interface ContentWithKey { +export interface CommitResponse { /** * - * @type {ContentKey} - * @memberof ContentWithKey + * @type {Branch} + * @memberof CommitResponse */ - key: ContentKey | null; + targetBranch: Branch; /** * - * @type {Content} - * @memberof ContentWithKey + * @type {Array} + * @memberof CommitResponse */ - content: Content | null; + addedContents?: Array; } /** * * @export - * @interface Delete + * @interface CommitResponseV2 */ -export interface Delete { +export interface CommitResponseV2 { /** * - * @type {ContentKey} - * @memberof Delete + * @type {BranchV2} + * @memberof CommitResponseV2 + */ + targetBranch: BranchV2; + /** + * + * @type {Array} + * @memberof CommitResponseV2 */ - key: ContentKey | null; + addedContents?: Array; } /** + * @type Content * * @export - * @interface DeltaLakeTable */ -export interface DeltaLakeTable { +export type Content = + | ({ type: "DELTA_LAKE_TABLE" } & DeltaLakeTable) + | ({ type: "ICEBERG_TABLE" } & IcebergTable) + | ({ type: "ICEBERG_VIEW" } & IcebergView) + | ({ type: "NAMESPACE" } & Namespace); +/** + * + * @export + * @interface ContentKey + */ +export interface ContentKey { /** * - * @type {string} - * @memberof DeltaLakeTable + * @type {Array} + * @memberof ContentKey */ - id?: string; + elements: Array; +} +/** + * + * @export + * @enum {string} + */ +export enum ContentKeyConflict { + None = "NONE", + Unresolvable = "UNRESOLVABLE", +} +/** + * + * @export + * @enum {string} + */ +export enum ContentKeyConflictV1 { + None = "NONE", + Unresolvable = "UNRESOLVABLE", +} +/** + * + * @export + * @enum {string} + */ +export enum ContentKeyConflictV2 { + None = "NONE", + Unresolvable = "UNRESOLVABLE", +} +/** + * + * @export + * @interface ContentKeyDetails + */ +export interface ContentKeyDetails { /** * - * @type {Array} - * @memberof DeltaLakeTable + * @type {ContentKey} + * @memberof ContentKeyDetails */ - metadataLocationHistory: Array; + key?: ContentKey; /** * - * @type {Array} - * @memberof DeltaLakeTable + * @type {MergeBehavior} + * @memberof ContentKeyDetails */ - checkpointLocationHistory: Array; + mergeBehavior?: MergeBehavior; /** * - * @type {string} - * @memberof DeltaLakeTable + * @type {ContentKeyConflict} + * @memberof ContentKeyDetails */ - lastCheckpoint?: string; + conflictType?: ContentKeyConflict; } /** * * @export - * @interface Detached + * @interface ContentKeyDetailsV1 */ -export interface Detached { +export interface ContentKeyDetailsV1 { /** * - * @type {ReferenceMetadata} - * @memberof Detached + * @type {ContentKeyV1} + * @memberof ContentKeyDetailsV1 */ - metadata?: ReferenceMetadata; + key?: ContentKeyV1; /** * - * @type {string} - * @memberof Detached + * @type {MergeBehaviorV1} + * @memberof ContentKeyDetailsV1 */ - hash: string; + mergeBehavior?: MergeBehaviorV1; + /** + * + * @type {ContentKeyConflictV1} + * @memberof ContentKeyDetailsV1 + */ + conflictType?: ContentKeyConflictV1; } /** * * @export - * @interface DiffEntry + * @interface ContentKeyDetailsV2 */ -export interface DiffEntry { +export interface ContentKeyDetailsV2 { /** * - * @type {ContentKey} - * @memberof DiffEntry + * @type {ContentKeyV2} + * @memberof ContentKeyDetailsV2 */ - key?: ContentKey; + key?: ContentKeyV2; /** * - * @type {Content} - * @memberof DiffEntry + * @type {MergeBehaviorV2} + * @memberof ContentKeyDetailsV2 */ - from?: Content; + mergeBehavior?: MergeBehaviorV2; /** * - * @type {Content} - * @memberof DiffEntry + * @type {ContentKeyConflictV2} + * @memberof ContentKeyDetailsV2 */ - to?: Content; + conflictType?: ContentKeyConflictV2; } /** * * @export - * @interface DiffResponse + * @interface ContentKeyV1 */ -export interface DiffResponse { +export interface ContentKeyV1 { /** * - * @type {Array} - * @memberof DiffResponse + * @type {Array} + * @memberof ContentKeyV1 */ - diffs?: Array; + elements: Array; +} +/** + * + * @export + * @interface ContentKeyV2 + */ +export interface ContentKeyV2 { + /** + * + * @type {Array} + * @memberof ContentKeyV2 + */ + elements: Array; +} +/** + * + * @export + * @interface ContentResponseV2 + */ +export interface ContentResponseV2 { + /** + * + * @type {ContentV2} + * @memberof ContentResponseV2 + */ + content: ContentV2; + /** + * + * @type {ReferenceV2} + * @memberof ContentResponseV2 + */ + effectiveReference?: ReferenceV2; +} +/** + * @type ContentV1 + * + * @export + */ +export type ContentV1 = + | ({ type: "DELTA_LAKE_TABLE" } & DeltaLakeTableV1) + | ({ type: "ICEBERG_TABLE" } & IcebergTableV1) + | ({ type: "ICEBERG_VIEW" } & IcebergViewV1) + | ({ type: "NAMESPACE" } & NamespaceV1); +/** + * @type ContentV2 + * + * @export + */ +export type ContentV2 = + | ({ type: "DELTA_LAKE_TABLE" } & DeltaLakeTableV2) + | ({ type: "ICEBERG_TABLE" } & IcebergTableV2) + | ({ type: "ICEBERG_VIEW" } & IcebergViewV2) + | ({ type: "NAMESPACE" } & NamespaceV2); +/** + * + * @export + * @interface ContentWithKey + */ +export interface ContentWithKey { + /** + * + * @type {ContentKey} + * @memberof ContentWithKey + */ + key: ContentKey; + /** + * + * @type {Content} + * @memberof ContentWithKey + */ + content: Content; +} +/** + * + * @export + * @interface ContentWithKeyV1 + */ +export interface ContentWithKeyV1 { + /** + * + * @type {ContentKeyV1} + * @memberof ContentWithKeyV1 + */ + key: ContentKeyV1; + /** + * + * @type {ContentV1} + * @memberof ContentWithKeyV1 + */ + content: ContentV1; +} +/** + * + * @export + * @interface ContentWithKeyV2 + */ +export interface ContentWithKeyV2 { + /** + * + * @type {ContentKeyV2} + * @memberof ContentWithKeyV2 + */ + key: ContentKeyV2; + /** + * + * @type {ContentV2} + * @memberof ContentWithKeyV2 + */ + content: ContentV2; +} +/** + * + * @export + * @interface Delete + */ +export interface Delete { + /** + * + * @type {ContentKey} + * @memberof Delete + */ + key: ContentKey; +} +/** + * + * @export + * @interface DeleteV1 + */ +export interface DeleteV1 { + /** + * + * @type {ContentKeyV1} + * @memberof DeleteV1 + */ + key: ContentKeyV1; +} +/** + * + * @export + * @interface DeleteV2 + */ +export interface DeleteV2 { + /** + * + * @type {ContentKeyV2} + * @memberof DeleteV2 + */ + key: ContentKeyV2; +} +/** + * + * @export + * @interface DeltaLakeTable + */ +export interface DeltaLakeTable { + /** + * + * @type {string} + * @memberof DeltaLakeTable + */ + id?: string; + /** + * + * @type {Array} + * @memberof DeltaLakeTable + */ + metadataLocationHistory: Array; + /** + * + * @type {Array} + * @memberof DeltaLakeTable + */ + checkpointLocationHistory: Array; + /** + * + * @type {string} + * @memberof DeltaLakeTable + */ + lastCheckpoint?: string; +} +/** + * + * @export + * @interface DeltaLakeTableV1 + */ +export interface DeltaLakeTableV1 { + /** + * + * @type {string} + * @memberof DeltaLakeTableV1 + */ + id?: string; + /** + * + * @type {Array} + * @memberof DeltaLakeTableV1 + */ + metadataLocationHistory: Array; + /** + * + * @type {Array} + * @memberof DeltaLakeTableV1 + */ + checkpointLocationHistory: Array; + /** + * + * @type {string} + * @memberof DeltaLakeTableV1 + */ + lastCheckpoint?: string; +} +/** + * + * @export + * @interface DeltaLakeTableV2 + */ +export interface DeltaLakeTableV2 { + /** + * + * @type {string} + * @memberof DeltaLakeTableV2 + */ + id?: string; + /** + * + * @type {Array} + * @memberof DeltaLakeTableV2 + */ + metadataLocationHistory: Array; + /** + * + * @type {Array} + * @memberof DeltaLakeTableV2 + */ + checkpointLocationHistory: Array; + /** + * + * @type {string} + * @memberof DeltaLakeTableV2 + */ + lastCheckpoint?: string; +} +/** + * + * @export + * @interface Detached + */ +export interface Detached { + /** + * + * @type {string} + * @memberof Detached + */ + hash: string; + /** + * + * @type {ReferenceMetadata} + * @memberof Detached + */ + metadata?: ReferenceMetadata; +} +/** + * + * @export + * @interface DetachedV1 + */ +export interface DetachedV1 { + /** + * + * @type {string} + * @memberof DetachedV1 + */ + hash: string; + /** + * + * @type {ReferenceMetadataV1} + * @memberof DetachedV1 + */ + metadata?: ReferenceMetadataV1; +} +/** + * + * @export + * @interface DetachedV2 + */ +export interface DetachedV2 { + /** + * + * @type {string} + * @memberof DetachedV2 + */ + hash: string; + /** + * + * @type {ReferenceMetadataV2} + * @memberof DetachedV2 + */ + metadata?: ReferenceMetadataV2; +} +/** + * + * @export + * @interface DiffEntry + */ +export interface DiffEntry { + /** + * + * @type {ContentKey} + * @memberof DiffEntry + */ + key?: ContentKey; + /** + * + * @type {Content} + * @memberof DiffEntry + */ + from?: Content; + /** + * + * @type {Content} + * @memberof DiffEntry + */ + to?: Content; +} +/** + * + * @export + * @interface DiffEntryV1 + */ +export interface DiffEntryV1 { + /** + * + * @type {ContentKeyV1} + * @memberof DiffEntryV1 + */ + key?: ContentKeyV1; + /** + * + * @type {ContentV1} + * @memberof DiffEntryV1 + */ + from?: ContentV1; + /** + * + * @type {ContentV1} + * @memberof DiffEntryV1 + */ + to?: ContentV1; +} +/** + * + * @export + * @interface DiffEntryV2 + */ +export interface DiffEntryV2 { + /** + * + * @type {ContentKeyV2} + * @memberof DiffEntryV2 + */ + key?: ContentKeyV2; + /** + * + * @type {ContentV2} + * @memberof DiffEntryV2 + */ + from?: ContentV2; + /** + * + * @type {ContentV2} + * @memberof DiffEntryV2 + */ + to?: ContentV2; +} +/** + * + * @export + * @interface DiffResponse + */ +export interface DiffResponse { + /** + * + * @type {boolean} + * @memberof DiffResponse + */ + hasMore?: boolean; + /** + * + * @type {string} + * @memberof DiffResponse + */ + token?: string; + /** + * + * @type {Array} + * @memberof DiffResponse + */ + diffs?: Array; + /** + * + * @type {Reference} + * @memberof DiffResponse + */ + effectiveFromReference?: Reference; + /** + * + * @type {Reference} + * @memberof DiffResponse + */ + effectiveToReference?: Reference; +} +/** + * + * @export + * @interface DiffResponseV1 + */ +export interface DiffResponseV1 { + /** + * + * @type {boolean} + * @memberof DiffResponseV1 + */ + hasMore?: boolean; + /** + * + * @type {string} + * @memberof DiffResponseV1 + */ + token?: string; + /** + * + * @type {Array} + * @memberof DiffResponseV1 + */ + diffs?: Array; +} +/** + * + * @export + * @interface DiffResponseV2 + */ +export interface DiffResponseV2 { + /** + * + * @type {boolean} + * @memberof DiffResponseV2 + */ + hasMore?: boolean; + /** + * + * @type {string} + * @memberof DiffResponseV2 + */ + token?: string; + /** + * + * @type {Array} + * @memberof DiffResponseV2 + */ + diffs?: Array; + /** + * + * @type {ReferenceV2} + * @memberof DiffResponseV2 + */ + effectiveFromReference?: ReferenceV2; + /** + * + * @type {ReferenceV2} + * @memberof DiffResponseV2 + */ + effectiveToReference?: ReferenceV2; +} +/** + * + * @export + * @interface EntriesResponseV1 + */ +export interface EntriesResponseV1 { + /** + * + * @type {boolean} + * @memberof EntriesResponseV1 + */ + hasMore?: boolean; + /** + * + * @type {string} + * @memberof EntriesResponseV1 + */ + token?: string; + /** + * + * @type {Array} + * @memberof EntriesResponseV1 + */ + entries: Array; +} +/** + * + * @export + * @interface EntriesResponseV2 + */ +export interface EntriesResponseV2 { + /** + * + * @type {boolean} + * @memberof EntriesResponseV2 + */ + hasMore?: boolean; + /** + * + * @type {string} + * @memberof EntriesResponseV2 + */ + token?: string; + /** + * + * @type {Array} + * @memberof EntriesResponseV2 + */ + entries: Array; + /** + * + * @type {ReferenceV2} + * @memberof EntriesResponseV2 + */ + effectiveReference?: ReferenceV2; +} +/** + * + * @export + * @interface EntryV1 + */ +export interface EntryV1 { + /** + * Declares the type of a Nessie content object, which is currently one of ICEBERG_TABLE, DELTA_LAKE_TABLE, ICEBERG_VIEW or NAMESPACE, which are the discriminator mapping values of the 'Content' type. + * @type {string} + * @memberof EntryV1 + */ + type: string; + /** + * + * @type {ContentKeyV1} + * @memberof EntryV1 + */ + name: ContentKeyV1; +} +/** + * + * @export + * @interface EntryV2 + */ +export interface EntryV2 { + /** + * Declares the type of a Nessie content object, which is currently one of ICEBERG_TABLE, DELTA_LAKE_TABLE, ICEBERG_VIEW or NAMESPACE, which are the discriminator mapping values of the 'Content' type. + * @type {string} + * @memberof EntryV2 + */ + type: string; + /** + * + * @type {ContentKeyV2} + * @memberof EntryV2 + */ + name: ContentKeyV2; + /** + * + * @type {string} + * @memberof EntryV2 + */ + contentId?: string; + /** + * + * @type {ContentV2} + * @memberof EntryV2 + */ + content?: ContentV2; +} +/** + * + * @export + * @enum {string} + */ +export enum FetchOption { + Minimal = "MINIMAL", + All = "ALL", +} +/** + * + * @export + * @interface GenericMetadata + */ +export interface GenericMetadata { + /** + * + * @type {string} + * @memberof GenericMetadata + */ + variant: string; + /** + * + * @type {JsonNode} + * @memberof GenericMetadata + */ + metadata?: JsonNode; +} +/** + * + * @export + * @interface GenericMetadataV1 + */ +export interface GenericMetadataV1 { + /** + * + * @type {string} + * @memberof GenericMetadataV1 + */ + variant: string; + /** + * + * @type {JsonNodeV1} + * @memberof GenericMetadataV1 + */ + metadata?: JsonNodeV1; +} +/** + * + * @export + * @interface GenericMetadataV2 + */ +export interface GenericMetadataV2 { + /** + * + * @type {string} + * @memberof GenericMetadataV2 + */ + variant: string; + /** + * + * @type {JsonNodeV2} + * @memberof GenericMetadataV2 + */ + metadata?: JsonNodeV2; +} +/** + * + * @export + * @interface GetMultipleContentsRequest + */ +export interface GetMultipleContentsRequest { + /** + * + * @type {Array} + * @memberof GetMultipleContentsRequest + */ + requestedKeys: Array; +} +/** + * + * @export + * @interface GetMultipleContentsResponse + */ +export interface GetMultipleContentsResponse { + /** + * + * @type {Array} + * @memberof GetMultipleContentsResponse + */ + contents: Array; + /** + * + * @type {Reference} + * @memberof GetMultipleContentsResponse + */ + effectiveReference?: Reference; +} +/** + * + * @export + * @interface GetMultipleContentsResponseV1 + */ +export interface GetMultipleContentsResponseV1 { + /** + * + * @type {Array} + * @memberof GetMultipleContentsResponseV1 + */ + contents: Array; +} +/** + * + * @export + * @interface GetMultipleContentsResponseV2 + */ +export interface GetMultipleContentsResponseV2 { + /** + * + * @type {Array} + * @memberof GetMultipleContentsResponseV2 + */ + contents: Array; + /** + * + * @type {ReferenceV2} + * @memberof GetMultipleContentsResponseV2 + */ + effectiveReference?: ReferenceV2; +} +/** + * + * @export + * @interface GetNamespacesResponseV1 + */ +export interface GetNamespacesResponseV1 { + /** + * + * @type {Array} + * @memberof GetNamespacesResponseV1 + */ + namespaces: Array; +} +/** + * Represents the state of an Iceberg table in Nessie. An Iceberg table is globally identified via its unique 'Content.id'. + * + * A Nessie commit-operation, performed via 'TreeApi.commitMultipleOperations',for Iceberg consists of a 'Operation.Put' with an 'IcebergTable' as in the 'content' field and the previous value of 'IcebergTable' in the 'expectedContent' field. + * @export + * @interface IcebergTable + */ +export interface IcebergTable { + /** + * + * @type {string} + * @memberof IcebergTable + */ + id?: string; + /** + * + * @type {string} + * @memberof IcebergTable + */ + metadataLocation: string; + /** + * + * @type {number} + * @memberof IcebergTable + */ + snapshotId?: number; + /** + * + * @type {number} + * @memberof IcebergTable + */ + schemaId?: number; + /** + * + * @type {number} + * @memberof IcebergTable + */ + specId?: number; + /** + * + * @type {number} + * @memberof IcebergTable + */ + sortOrderId?: number; + /** + * + * @type {GenericMetadata} + * @memberof IcebergTable + */ + metadata?: GenericMetadata; +} +/** + * Represents the state of an Iceberg table in Nessie. An Iceberg table is globally identified via its unique 'Content.id'. + * + * A Nessie commit-operation, performed via 'TreeApi.commitMultipleOperations',for Iceberg consists of a 'Operation.Put' with an 'IcebergTable' as in the 'content' field and the previous value of 'IcebergTable' in the 'expectedContent' field. + * @export + * @interface IcebergTableV1 + */ +export interface IcebergTableV1 { + /** + * + * @type {string} + * @memberof IcebergTableV1 + */ + id?: string; + /** + * + * @type {string} + * @memberof IcebergTableV1 + */ + metadataLocation: string; + /** + * + * @type {number} + * @memberof IcebergTableV1 + */ + snapshotId?: number; + /** + * + * @type {number} + * @memberof IcebergTableV1 + */ + schemaId?: number; + /** + * + * @type {number} + * @memberof IcebergTableV1 + */ + specId?: number; + /** + * + * @type {number} + * @memberof IcebergTableV1 + */ + sortOrderId?: number; + /** + * + * @type {GenericMetadataV1} + * @memberof IcebergTableV1 + */ + metadata?: GenericMetadataV1; +} +/** + * Represents the state of an Iceberg table in Nessie. An Iceberg table is globally identified via its unique 'Content.id'. + * + * A Nessie commit-operation, performed via 'TreeApi.commitMultipleOperations',for Iceberg consists of a 'Operation.Put' with an 'IcebergTable' as in the 'content' field and the previous value of 'IcebergTable' in the 'expectedContent' field. + * @export + * @interface IcebergTableV2 + */ +export interface IcebergTableV2 { + /** + * + * @type {string} + * @memberof IcebergTableV2 + */ + id?: string; + /** + * + * @type {string} + * @memberof IcebergTableV2 + */ + metadataLocation: string; + /** + * + * @type {number} + * @memberof IcebergTableV2 + */ + snapshotId?: number; + /** + * + * @type {number} + * @memberof IcebergTableV2 + */ + schemaId?: number; + /** + * + * @type {number} + * @memberof IcebergTableV2 + */ + specId?: number; + /** + * + * @type {number} + * @memberof IcebergTableV2 + */ + sortOrderId?: number; + /** + * + * @type {GenericMetadataV2} + * @memberof IcebergTableV2 + */ + metadata?: GenericMetadataV2; +} +/** + * + * @export + * @interface IcebergView + */ +export interface IcebergView { + /** + * + * @type {string} + * @memberof IcebergView + */ + id?: string; + /** + * + * @type {string} + * @memberof IcebergView + */ + metadataLocation: string; + /** + * + * @type {number} + * @memberof IcebergView + */ + versionId?: number; + /** + * + * @type {number} + * @memberof IcebergView + */ + schemaId?: number; + /** + * + * @type {string} + * @memberof IcebergView + */ + sqlText: string; + /** + * + * @type {string} + * @memberof IcebergView + */ + dialect?: string; + /** + * + * @type {GenericMetadata} + * @memberof IcebergView + */ + metadata?: GenericMetadata; +} +/** + * + * @export + * @interface IcebergViewV1 + */ +export interface IcebergViewV1 { + /** + * + * @type {string} + * @memberof IcebergViewV1 + */ + id?: string; + /** + * + * @type {string} + * @memberof IcebergViewV1 + */ + metadataLocation: string; + /** + * + * @type {number} + * @memberof IcebergViewV1 + */ + versionId?: number; + /** + * + * @type {number} + * @memberof IcebergViewV1 + */ + schemaId?: number; + /** + * + * @type {string} + * @memberof IcebergViewV1 + */ + sqlText: string; + /** + * + * @type {string} + * @memberof IcebergViewV1 + */ + dialect?: string; + /** + * + * @type {GenericMetadataV1} + * @memberof IcebergViewV1 + */ + metadata?: GenericMetadataV1; +} +/** + * + * @export + * @interface IcebergViewV2 + */ +export interface IcebergViewV2 { + /** + * + * @type {string} + * @memberof IcebergViewV2 + */ + id?: string; + /** + * + * @type {string} + * @memberof IcebergViewV2 + */ + metadataLocation: string; + /** + * + * @type {number} + * @memberof IcebergViewV2 + */ + versionId?: number; + /** + * + * @type {number} + * @memberof IcebergViewV2 + */ + schemaId?: number; + /** + * + * @type {string} + * @memberof IcebergViewV2 + */ + sqlText: string; + /** + * + * @type {string} + * @memberof IcebergViewV2 + */ + dialect?: string; + /** + * + * @type {GenericMetadataV2} + * @memberof IcebergViewV2 + */ + metadata?: GenericMetadataV2; +} +/** + * + * @export + * @interface LogEntry + */ +export interface LogEntry { + /** + * + * @type {CommitMeta} + * @memberof LogEntry + */ + commitMeta: CommitMeta; + /** + * + * @type {Array} + * @memberof LogEntry + */ + additionalParents?: Array; + /** + * + * @type {string} + * @memberof LogEntry + */ + parentCommitHash?: string; + /** + * + * @type {Array} + * @memberof LogEntry + */ + operations?: Array; +} +/** + * + * @export + * @interface LogEntryV1 + */ +export interface LogEntryV1 { + /** + * + * @type {CommitMetaV1} + * @memberof LogEntryV1 + */ + commitMeta: CommitMetaV1; + /** + * + * @type {Array} + * @memberof LogEntryV1 + */ + additionalParents?: Array; + /** + * + * @type {string} + * @memberof LogEntryV1 + */ + parentCommitHash?: string; + /** + * + * @type {Array} + * @memberof LogEntryV1 + */ + operations?: Array; +} +/** + * + * @export + * @interface LogEntryV2 + */ +export interface LogEntryV2 { + /** + * + * @type {CommitMetaV2} + * @memberof LogEntryV2 + */ + commitMeta: CommitMetaV2; + /** + * + * @type {string} + * @memberof LogEntryV2 + */ + parentCommitHash?: string; + /** + * + * @type {Array} + * @memberof LogEntryV2 + */ + operations?: Array; +} +/** + * + * @export + * @interface LogResponse + */ +export interface LogResponse { + /** + * + * @type {boolean} + * @memberof LogResponse + */ + hasMore?: boolean; + /** + * + * @type {string} + * @memberof LogResponse + */ + token?: string; + /** + * + * @type {Array} + * @memberof LogResponse + */ + logEntries: Array; +} +/** + * + * @export + * @interface LogResponseV1 + */ +export interface LogResponseV1 { + /** + * + * @type {boolean} + * @memberof LogResponseV1 + */ + hasMore?: boolean; + /** + * + * @type {string} + * @memberof LogResponseV1 + */ + token?: string; + /** + * + * @type {Array} + * @memberof LogResponseV1 + */ + logEntries: Array; +} +/** + * + * @export + * @interface LogResponseV2 + */ +export interface LogResponseV2 { + /** + * + * @type {boolean} + * @memberof LogResponseV2 + */ + hasMore?: boolean; + /** + * + * @type {string} + * @memberof LogResponseV2 + */ + token?: string; + /** + * + * @type {Array} + * @memberof LogResponseV2 + */ + logEntries: Array; +} +/** + * + * @export + * @interface Merge + */ +export interface Merge { + /** + * Optional commit message for this merge request + * + * If not set, the server will generate a commit message automatically using metadata from the + * merged commits. + * @type {string} + * @memberof Merge + */ + message?: string; + /** + * The hash of the last commit to merge. + * + * This commit must be present in the history on 'fromRefName' before the first common parent with respect to the target branch. + * @type {string} + * @memberof Merge + */ + fromHash: string; + /** + * The name of the reference that contains the 'source' commits for the requested merge or transplant operation. + * @type {string} + * @memberof Merge + */ + fromRefName: string; + /** + * Specific merge behaviour requests by content key. + * + * The default is set by the `defaultKeyMergeMode` parameter. + * @type {Array} + * @memberof Merge + */ + keyMergeModes?: Array; + /** + * The default merge mode. If not set, `NORMAL` is assumed. + * + * This settings applies to key thaWhen set to 'true' instructs the server to validate the request + * but to avoid committing any changes.t are not explicitly mentioned in the `keyMergeModes` property. + * @type {MergeBehavior} + * @memberof Merge + */ + defaultKeyMergeMode?: MergeBehavior; + /** + * When set to 'true' instructs the server to validate the request but to avoid committing any changes. + * @type {boolean} + * @memberof Merge + */ + dryRun?: boolean; + /** + * Whether to provide optional response data. + * @type {boolean} + * @memberof Merge + */ + fetchAdditionalInfo?: boolean; + /** + * When set to 'true' instructs the server to produce normal (non-error) responses in case a conflict is detected and report conflict details in the response payload. + * @type {boolean} + * @memberof Merge + */ + returnConflictAsResult?: boolean; } /** * * @export - * @interface EntriesResponse + * @interface Merge1 */ -export interface EntriesResponse { +export interface Merge1 { /** * - * @type {boolean} - * @memberof EntriesResponse + * @type {string} + * @memberof Merge1 */ - hasMore?: boolean; + fromRefName: string; /** * * @type {string} - * @memberof EntriesResponse + * @memberof Merge1 */ - token?: string; + fromHash: string; /** * - * @type {Array} - * @memberof EntriesResponse + * @type {Array} + * @memberof Merge1 */ - entries: Array; -} -/** - * - * @export - * @interface Entry - */ -export interface Entry { + keyMergeModes?: Array; /** * - * @type {Type} - * @memberof Entry + * @type {MergeBehavior} + * @memberof Merge1 */ - type: Type | null; + defaultKeyMergeMode?: MergeBehavior; /** * - * @type {ContentKey} - * @memberof Entry + * @type {boolean} + * @memberof Merge1 + */ + dryRun?: boolean; + /** + * + * @type {boolean} + * @memberof Merge1 + */ + fetchAdditionalInfo?: boolean; + /** + * + * @type {boolean} + * @memberof Merge1 */ - name: ContentKey | null; + returnConflictAsResult?: boolean; } /** * * @export * @enum {string} */ -export enum FetchOption { - Minimal = "MINIMAL", - All = "ALL", +export enum MergeBehavior { + Normal = "NORMAL", + Force = "FORCE", + Drop = "DROP", } /** * * @export - * @interface GenericMetadata + * @enum {string} */ -export interface GenericMetadata { - /** - * - * @type {string} - * @memberof GenericMetadata - */ - variant: string; - /** - * - * @type {JsonNode & object} - * @memberof GenericMetadata - */ - metadata?: object | null; +export enum MergeBehaviorV1 { + Normal = "NORMAL", + Force = "FORCE", + Drop = "DROP", } /** * * @export - * @interface GetMultipleContentsRequest + * @enum {string} */ -export interface GetMultipleContentsRequest { - /** - * - * @type {Array} - * @memberof GetMultipleContentsRequest - */ - requestedKeys: Array; +export enum MergeBehaviorV2 { + Normal = "NORMAL", + Force = "FORCE", + Drop = "DROP", } /** * * @export - * @interface GetMultipleContentsResponse + * @interface MergeKeyBehavior */ -export interface GetMultipleContentsResponse { +export interface MergeKeyBehavior { /** * - * @type {Array} - * @memberof GetMultipleContentsResponse + * @type {ContentKey} + * @memberof MergeKeyBehavior */ - contents: Array; -} -/** - * - * @export - * @interface GetNamespacesResponse - */ -export interface GetNamespacesResponse { + key?: ContentKey; /** * - * @type {Array} - * @memberof GetNamespacesResponse + * @type {MergeBehavior} + * @memberof MergeKeyBehavior */ - namespaces: Array; + mergeBehavior?: MergeBehavior; } /** - * Represents the state of an Iceberg table in Nessie. An Iceberg table is globally identified via its unique 'Content.id'. * - * A Nessie commit-operation, performed via 'TreeApi.commitMultipleOperations',for Iceberg consists of a 'Operation.Put' with an 'IcebergTable' as in the 'content' field and the previous value of 'IcebergTable' in the 'expectedContent' field. * @export - * @interface IcebergTable + * @interface MergeResponse */ -export interface IcebergTable { +export interface MergeResponse { /** * * @type {string} - * @memberof IcebergTable + * @memberof MergeResponse */ - id?: string; + resultantTargetHash?: string; /** * * @type {string} - * @memberof IcebergTable - */ - metadataLocation: string; - /** - * - * @type {number} - * @memberof IcebergTable + * @memberof MergeResponse */ - snapshotId?: number; + commonAncestor?: string; /** * - * @type {number} - * @memberof IcebergTable + * @type {string} + * @memberof MergeResponse */ - schemaId?: number; + targetBranch?: string; /** * - * @type {number} - * @memberof IcebergTable + * @type {string} + * @memberof MergeResponse */ - specId?: number; + effectiveTargetHash?: string; /** * - * @type {number} - * @memberof IcebergTable + * @type {string} + * @memberof MergeResponse */ - sortOrderId?: number; + expectedHash?: string; /** * - * @type {GenericMetadata} - * @memberof IcebergTable + * @type {Array} + * @memberof MergeResponse */ - metadata?: GenericMetadata; + details?: Array; } /** * * @export - * @interface IcebergView + * @interface MergeResponseV1 */ -export interface IcebergView { +export interface MergeResponseV1 { /** * * @type {string} - * @memberof IcebergView + * @memberof MergeResponseV1 */ - id?: string; + resultantTargetHash?: string; /** * * @type {string} - * @memberof IcebergView + * @memberof MergeResponseV1 */ - metadataLocation: string; - /** - * - * @type {number} - * @memberof IcebergView - */ - versionId?: number; + commonAncestor?: string; /** * - * @type {number} - * @memberof IcebergView + * @type {string} + * @memberof MergeResponseV1 */ - schemaId?: number; + targetBranch?: string; /** * * @type {string} - * @memberof IcebergView + * @memberof MergeResponseV1 */ - sqlText: string; + effectiveTargetHash?: string; /** * * @type {string} - * @memberof IcebergView + * @memberof MergeResponseV1 */ - dialect?: string; + expectedHash?: string; /** * - * @type {GenericMetadata} - * @memberof IcebergView + * @type {Array} + * @memberof MergeResponseV1 */ - metadata?: GenericMetadata; + details?: Array; } /** * * @export - * @interface LogEntry + * @interface MergeResponseV2 */ -export interface LogEntry { +export interface MergeResponseV2 { /** * - * @type {CommitMeta} - * @memberof LogEntry + * @type {string} + * @memberof MergeResponseV2 */ - commitMeta: CommitMeta | null; + resultantTargetHash?: string; /** * * @type {string} - * @memberof LogEntry + * @memberof MergeResponseV2 */ - parentCommitHash?: string; + commonAncestor?: string; /** * - * @type {Array} - * @memberof LogEntry + * @type {string} + * @memberof MergeResponseV2 */ - operations?: Array; -} -/** - * - * @export - * @interface LogResponse - */ -export interface LogResponse { + targetBranch?: string; /** * - * @type {boolean} - * @memberof LogResponse + * @type {string} + * @memberof MergeResponseV2 */ - hasMore?: boolean; + effectiveTargetHash?: string; /** * * @type {string} - * @memberof LogResponse + * @memberof MergeResponseV2 */ - token?: string; + expectedHash?: string; /** * - * @type {Array} - * @memberof LogResponse + * @type {Array} + * @memberof MergeResponseV2 */ - logEntries: Array; + details?: Array; } /** * * @export - * @interface Merge + * @interface Namespace */ -export interface Merge { - /** - * - * @type {string} - * @memberof Merge - */ - fromRefName: string; +export interface Namespace { /** * * @type {string} - * @memberof Merge + * @memberof Namespace */ - fromHash: string; + id?: string; /** * - * @type {Array} - * @memberof Merge + * @type {Array} + * @memberof Namespace */ - keyMergeModes?: Array; + elements: Array; /** * - * @type {MergeBehavior} - * @memberof Merge + * @type {{ [key: string]: string; }} + * @memberof Namespace */ - defaultKeyMergeMode?: MergeBehavior; + properties: { [key: string]: string }; } /** * * @export - * @enum {string} + * @interface NamespaceUpdate */ -export enum MergeBehavior { - Normal = "NORMAL", - Force = "FORCE", - Drop = "DROP", +export interface NamespaceUpdate { + /** + * + * @type {{ [key: string]: string; }} + * @memberof NamespaceUpdate + */ + propertyUpdates?: { [key: string]: string }; + /** + * + * @type {Set} + * @memberof NamespaceUpdate + */ + propertyRemovals?: Set; } /** * * @export - * @interface MergeKeyBehavior + * @interface NamespaceV1 */ -export interface MergeKeyBehavior { +export interface NamespaceV1 { /** * - * @type {ContentKey} - * @memberof MergeKeyBehavior + * @type {string} + * @memberof NamespaceV1 */ - key?: ContentKey; + id?: string; /** * - * @type {MergeBehavior} - * @memberof MergeKeyBehavior + * @type {Array} + * @memberof NamespaceV1 */ - mergeBehavior?: MergeBehavior; + elements: Array; + /** + * + * @type {{ [key: string]: string; }} + * @memberof NamespaceV1 + */ + properties: { [key: string]: string }; } /** * * @export - * @interface Namespace + * @interface NamespaceV2 */ -export interface Namespace { +export interface NamespaceV2 { /** * * @type {string} - * @memberof Namespace + * @memberof NamespaceV2 */ id?: string; /** * * @type {Array} - * @memberof Namespace + * @memberof NamespaceV2 */ elements: Array; /** * * @type {{ [key: string]: string; }} - * @memberof Namespace + * @memberof NamespaceV2 */ properties: { [key: string]: string }; } /** * * @export - * @interface NamespaceUpdate + * @interface NessieConfiguration */ -export interface NamespaceUpdate { +export interface NessieConfiguration { /** * - * @type {{ [key: string]: string; }} - * @memberof NamespaceUpdate + * @type {string} + * @memberof NessieConfiguration */ - propertyUpdates?: { [key: string]: string }; + defaultBranch?: string; /** * - * @type {Set} - * @memberof NamespaceUpdate + * @type {number} + * @memberof NessieConfiguration */ - propertyRemovals?: Set; + maxSupportedApiVersion?: number; } /** * * @export - * @interface NessieConfiguration + * @interface NessieConfigurationV1 */ -export interface NessieConfiguration { +export interface NessieConfigurationV1 { /** * * @type {string} - * @memberof NessieConfiguration + * @memberof NessieConfigurationV1 */ defaultBranch?: string; /** * * @type {number} - * @memberof NessieConfiguration + * @memberof NessieConfigurationV1 */ maxSupportedApiVersion?: number; } /** - * @type Operation + * @type Operation + * + * @export + */ +export type Operation = + | ({ type: "DELETE" } & Delete) + | ({ type: "PUT" } & Put) + | ({ type: "UNCHANGED" } & Unchanged); +/** + * @type OperationV1 + * + * @export + */ +export type OperationV1 = + | ({ type: "DELETE" } & DeleteV1) + | ({ type: "PUT" } & PutV1) + | ({ type: "UNCHANGED" } & UnchangedV1); +/** + * @type OperationV2 * * @export */ -export type Operation = - | ({ type: "DELETE" } & Delete) - | ({ type: "PUT" } & Put) - | ({ type: "UNCHANGED" } & Unchanged); +export type OperationV2 = + | ({ type: "DELETE" } & DeleteV2) + | ({ type: "PUT" } & PutV2) + | ({ type: "UNCHANGED" } & UnchangedV2); /** * * @export @@ -627,7 +2117,7 @@ export interface Operations { * @type {CommitMeta} * @memberof Operations */ - commitMeta: CommitMeta | null; + commitMeta: CommitMeta; /** * * @type {Array} @@ -646,13 +2136,13 @@ export interface Put { * @type {ContentKey} * @memberof Put */ - key: ContentKey | null; + key: ContentKey; /** * * @type {Content} * @memberof Put */ - content: Content | null; + content: Content; /** * * @type {Content} @@ -661,94 +2151,158 @@ export interface Put { expectedContent?: Content; } /** - * + * Add or replace (put) a 'Content' object for a 'ContentKey'. If the actual table type tracks the 'global state' of individual tables (Iceberg as of today), every 'Put'-operation must contain a non-null value for 'expectedContent'. * @export - * @interface RefLogResponse + * @interface PutV1 */ -export interface RefLogResponse { +export interface PutV1 { /** * - * @type {boolean} - * @memberof RefLogResponse + * @type {ContentKeyV1} + * @memberof PutV1 */ - hasMore?: boolean; + key: ContentKeyV1; /** * - * @type {string} - * @memberof RefLogResponse + * @type {ContentV1} + * @memberof PutV1 */ - token?: string; + content: ContentV1; /** * - * @type {Array} - * @memberof RefLogResponse + * @type {ContentV1} + * @memberof PutV1 */ - logEntries: Array; + expectedContent?: ContentV1; } /** - * + * Add or replace (put) a 'Content' object for a 'ContentKey'. If the actual table type tracks the 'global state' of individual tables (Iceberg as of today), every 'Put'-operation must contain a non-null value for 'expectedContent'. * @export - * @interface RefLogResponseEntry + * @interface PutV2 */ -export interface RefLogResponseEntry { +export interface PutV2 { /** * - * @type {string} - * @memberof RefLogResponseEntry + * @type {ContentKeyV2} + * @memberof PutV2 */ - refLogId: string; + key: ContentKeyV2; /** * - * @type {string} - * @memberof RefLogResponseEntry + * @type {ContentV2} + * @memberof PutV2 */ - refName: string; + content: ContentV2; /** * - * @type {string} - * @memberof RefLogResponseEntry + * @type {ContentV2} + * @memberof PutV2 */ - refType: string; + expectedContent?: ContentV2; +} +/** + * @type Reference + * + * @export + */ +export type Reference = + | ({ type: "BRANCH" } & Branch) + | ({ type: "DETACHED" } & Detached) + | ({ type: "TAG" } & Tag); +/** + * Only returned by the server when explicitly requested by the client and contains the following information: + * + * - numCommitsAhead (number of commits ahead of the default branch) + * + * - numCommitsBehind (number of commits behind the default branch) + * + * - commitMetaOfHEAD (the commit metadata of the HEAD commit) + * + * - commonAncestorHash (the hash of the common ancestor in relation to the default branch). + * + * - numTotalCommits (the total number of commits in this reference). + * @export + * @interface ReferenceMetadata + */ +export interface ReferenceMetadata { /** * - * @type {string} - * @memberof RefLogResponseEntry + * @type {number} + * @memberof ReferenceMetadata */ - commitHash: string; + numCommitsAhead?: number; /** * - * @type {string} - * @memberof RefLogResponseEntry + * @type {number} + * @memberof ReferenceMetadata */ - parentRefLogId: string; + numCommitsBehind?: number; /** * - * @type {number} - * @memberof RefLogResponseEntry + * @type {CommitMeta} + * @memberof ReferenceMetadata */ - operationTime: number; + commitMetaOfHEAD?: CommitMeta; /** * * @type {string} - * @memberof RefLogResponseEntry + * @memberof ReferenceMetadata */ - operation: string; + commonAncestorHash?: string; /** * - * @type {Array} - * @memberof RefLogResponseEntry + * @type {number} + * @memberof ReferenceMetadata */ - sourceHashes: Array; + numTotalCommits?: number; } /** - * @type Reference + * Only returned by the server when explicitly requested by the client and contains the following information: + * + * - numCommitsAhead (number of commits ahead of the default branch) + * + * - numCommitsBehind (number of commits behind the default branch) + * + * - commitMetaOfHEAD (the commit metadata of the HEAD commit) + * + * - commonAncestorHash (the hash of the common ancestor in relation to the default branch). * + * - numTotalCommits (the total number of commits in this reference). * @export + * @interface ReferenceMetadataV1 */ -export type Reference = - | ({ type: "BRANCH" } & Branch) - | ({ type: "DETACHED" } & Detached) - | ({ type: "TAG" } & Tag); +export interface ReferenceMetadataV1 { + /** + * + * @type {number} + * @memberof ReferenceMetadataV1 + */ + numCommitsAhead?: number; + /** + * + * @type {number} + * @memberof ReferenceMetadataV1 + */ + numCommitsBehind?: number; + /** + * + * @type {CommitMetaV1} + * @memberof ReferenceMetadataV1 + */ + commitMetaOfHEAD?: CommitMetaV1; + /** + * + * @type {string} + * @memberof ReferenceMetadataV1 + */ + commonAncestorHash?: string; + /** + * + * @type {number} + * @memberof ReferenceMetadataV1 + */ + numTotalCommits?: number; +} /** * Only returned by the server when explicitly requested by the client and contains the following information: * @@ -762,37 +2316,37 @@ export type Reference = * * - numTotalCommits (the total number of commits in this reference). * @export - * @interface ReferenceMetadata + * @interface ReferenceMetadataV2 */ -export interface ReferenceMetadata { +export interface ReferenceMetadataV2 { /** * * @type {number} - * @memberof ReferenceMetadata + * @memberof ReferenceMetadataV2 */ numCommitsAhead?: number; /** * * @type {number} - * @memberof ReferenceMetadata + * @memberof ReferenceMetadataV2 */ numCommitsBehind?: number; /** * - * @type {CommitMeta} - * @memberof ReferenceMetadata + * @type {CommitMetaV2} + * @memberof ReferenceMetadataV2 */ - commitMetaOfHEAD?: CommitMeta; + commitMetaOfHEAD?: CommitMetaV2; /** * * @type {string} - * @memberof ReferenceMetadata + * @memberof ReferenceMetadataV2 */ commonAncestorHash?: string; /** * * @type {number} - * @memberof ReferenceMetadata + * @memberof ReferenceMetadataV2 */ numTotalCommits?: number; } @@ -805,30 +2359,99 @@ export enum ReferenceType { Branch = "branch", Tag = "tag", } +/** + * @type ReferenceV1 + * + * @export + */ +export type ReferenceV1 = + | ({ type: "BRANCH" } & BranchV1) + | ({ type: "DETACHED" } & DetachedV1) + | ({ type: "TAG" } & TagV1); +/** + * @type ReferenceV2 + * + * @export + */ +export type ReferenceV2 = + | ({ type: "BRANCH" } & BranchV2) + | ({ type: "DETACHED" } & DetachedV2) + | ({ type: "TAG" } & TagV2); +/** + * + * @export + * @interface ReferencesResponseV1 + */ +export interface ReferencesResponseV1 { + /** + * + * @type {boolean} + * @memberof ReferencesResponseV1 + */ + hasMore?: boolean; + /** + * + * @type {string} + * @memberof ReferencesResponseV1 + */ + token?: string; + /** + * + * @type {Array} + * @memberof ReferencesResponseV1 + */ + references: Array; +} /** * * @export - * @interface ReferencesResponse + * @interface ReferencesResponseV2 */ -export interface ReferencesResponse { +export interface ReferencesResponseV2 { /** * * @type {boolean} - * @memberof ReferencesResponse + * @memberof ReferencesResponseV2 */ hasMore?: boolean; /** * * @type {string} - * @memberof ReferencesResponse + * @memberof ReferencesResponseV2 */ token?: string; /** * - * @type {Array} - * @memberof ReferencesResponse + * @type {Array} + * @memberof ReferencesResponseV2 + */ + references: Array; +} +/** + * + * @export + * @interface SingleReferenceResponse + */ +export interface SingleReferenceResponse { + /** + * + * @type {Reference} + * @memberof SingleReferenceResponse + */ + reference: Reference; +} +/** + * + * @export + * @interface SingleReferenceResponseV2 + */ +export interface SingleReferenceResponseV2 { + /** + * + * @type {ReferenceV2} + * @memberof SingleReferenceResponseV2 */ - references: Array; + reference: ReferenceV2; } /** * @@ -855,6 +2478,56 @@ export interface Tag { */ hash?: string; } +/** + * + * @export + * @interface TagV1 + */ +export interface TagV1 { + /** + * + * @type {string} + * @memberof TagV1 + */ + name: string; + /** + * + * @type {ReferenceMetadataV1} + * @memberof TagV1 + */ + metadata?: ReferenceMetadataV1; + /** + * + * @type {string} + * @memberof TagV1 + */ + hash?: string; +} +/** + * + * @export + * @interface TagV2 + */ +export interface TagV2 { + /** + * + * @type {string} + * @memberof TagV2 + */ + name: string; + /** + * + * @type {ReferenceMetadataV2} + * @memberof TagV2 + */ + metadata?: ReferenceMetadataV2; + /** + * + * @type {string} + * @memberof TagV2 + */ + hash?: string; +} /** * * @export @@ -862,41 +2535,107 @@ export interface Tag { */ export interface Transplant { /** - * + * Commit message for this transplant request. * @type {string} * @memberof Transplant */ - fromRefName: string; + message?: string; /** - * + * Lists the hashes of commits that should be transplanted into the target branch. * @type {Set} * @memberof Transplant */ hashesToTransplant: Set; /** + * The name of the reference that contains the 'source' commits for the requested merge or transplant operation. + * @type {string} + * @memberof Transplant + */ + fromRefName: string; + /** + * Specific merge behaviour requests by content key. * + * The default is set by the `defaultKeyMergeMode` parameter. * @type {Array} * @memberof Transplant */ keyMergeModes?: Array; /** + * The default merge mode. If not set, `NORMAL` is assumed. * + * This settings applies to key thaWhen set to 'true' instructs the server to validate the request + * but to avoid committing any changes.t are not explicitly mentioned in the `keyMergeModes` property. * @type {MergeBehavior} * @memberof Transplant */ defaultKeyMergeMode?: MergeBehavior; + /** + * When set to 'true' instructs the server to validate the request but to avoid committing any changes. + * @type {boolean} + * @memberof Transplant + */ + dryRun?: boolean; + /** + * Whether to provide optional response data. + * @type {boolean} + * @memberof Transplant + */ + fetchAdditionalInfo?: boolean; + /** + * When set to 'true' instructs the server to produce normal (non-error) responses in case a conflict is detected and report conflict details in the response payload. + * @type {boolean} + * @memberof Transplant + */ + returnConflictAsResult?: boolean; } /** * * @export - * @enum {string} + * @interface Transplant1 */ -export enum Type { - Unknown = "UNKNOWN", - IcebergTable = "ICEBERG_TABLE", - DeltaLakeTable = "DELTA_LAKE_TABLE", - IcebergView = "ICEBERG_VIEW", - Namespace = "NAMESPACE", +export interface Transplant1 { + /** + * + * @type {string} + * @memberof Transplant1 + */ + fromRefName: string; + /** + * + * @type {Set} + * @memberof Transplant1 + */ + hashesToTransplant: Set; + /** + * + * @type {Array} + * @memberof Transplant1 + */ + keyMergeModes?: Array; + /** + * + * @type {MergeBehavior} + * @memberof Transplant1 + */ + defaultKeyMergeMode?: MergeBehavior; + /** + * + * @type {boolean} + * @memberof Transplant1 + */ + dryRun?: boolean; + /** + * + * @type {boolean} + * @memberof Transplant1 + */ + fetchAdditionalInfo?: boolean; + /** + * + * @type {boolean} + * @memberof Transplant1 + */ + returnConflictAsResult?: boolean; } /** * @@ -909,5 +2648,31 @@ export interface Unchanged { * @type {ContentKey} * @memberof Unchanged */ - key: ContentKey | null; + key: ContentKey; +} +/** + * + * @export + * @interface UnchangedV1 + */ +export interface UnchangedV1 { + /** + * + * @type {ContentKeyV1} + * @memberof UnchangedV1 + */ + key: ContentKeyV1; +} +/** + * + * @export + * @interface UnchangedV2 + */ +export interface UnchangedV2 { + /** + * + * @type {ContentKeyV2} + * @memberof UnchangedV2 + */ + key: ContentKeyV2; } diff --git a/dac/ui/src/services/nessie/client/runtime.ts b/dac/ui/src/services/nessie/client/runtime.ts index 25a22d122f..2c1d1209fd 100644 --- a/dac/ui/src/services/nessie/client/runtime.ts +++ b/dac/ui/src/services/nessie/client/runtime.ts @@ -1,243 +1,299 @@ /* tslint:disable */ /* eslint-disable */ /** - * nessie-quarkus API + * Nessie API * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) * - * The version of the OpenAPI document: 0.30.0 - * + * The version of the OpenAPI document: 0.53.1 + * * * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). * https://openapi-generator.tech * Do not edit the class manually. */ - export const BASE_PATH = "http://localhost".replace(/\/+$/, ""); -const isBlob = (value: any) => typeof Blob !== 'undefined' && value instanceof Blob; +const isBlob = (value: any) => + typeof Blob !== "undefined" && value instanceof Blob; /** * This is the base class for all generated API classes. */ export class BaseAPI { - - private middleware: Middleware[]; - - constructor(protected configuration = new Configuration()) { - this.middleware = configuration.middleware; - } - - withMiddleware(this: T, ...middlewares: Middleware[]) { - const next = this.clone(); - next.middleware = next.middleware.concat(...middlewares); - return next; + private middleware: Middleware[]; + + constructor(protected configuration = new Configuration()) { + this.middleware = configuration.middleware; + } + + withMiddleware(this: T, ...middlewares: Middleware[]) { + const next = this.clone(); + next.middleware = next.middleware.concat(...middlewares); + return next; + } + + withPreMiddleware( + this: T, + ...preMiddlewares: Array + ) { + const middlewares = preMiddlewares.map((pre) => ({ pre })); + return this.withMiddleware(...middlewares); + } + + withPostMiddleware( + this: T, + ...postMiddlewares: Array + ) { + const middlewares = postMiddlewares.map((post) => ({ post })); + return this.withMiddleware(...middlewares); + } + + protected async request( + context: RequestOpts, + initOverrides?: RequestInit + ): Promise { + const { url, init } = this.createFetchParams(context, initOverrides); + const response = await this.fetchApi(url, init); + if (response.status >= 200 && response.status < 300) { + return response; } - - withPreMiddleware(this: T, ...preMiddlewares: Array) { - const middlewares = preMiddlewares.map((pre) => ({ pre })); - return this.withMiddleware(...middlewares); - } - - withPostMiddleware(this: T, ...postMiddlewares: Array) { - const middlewares = postMiddlewares.map((post) => ({ post })); - return this.withMiddleware(...middlewares); - } - - protected async request(context: RequestOpts, initOverrides?: RequestInit): Promise { - const { url, init } = this.createFetchParams(context, initOverrides); - const response = await this.fetchApi(url, init); - if (response.status >= 200 && response.status < 300) { - return response; - } - throw response; + throw response; + } + + private createFetchParams(context: RequestOpts, initOverrides?: RequestInit) { + let url = this.configuration.basePath + context.path; + if ( + context.query !== undefined && + Object.keys(context.query).length !== 0 + ) { + // only add the querystring to the URL if there are query parameters. + // this is done to avoid urls ending with a "?" character which buggy webservers + // do not handle correctly sometimes. + url += "?" + this.configuration.queryParamsStringify(context.query); } - - private createFetchParams(context: RequestOpts, initOverrides?: RequestInit) { - let url = this.configuration.basePath + context.path; - if (context.query !== undefined && Object.keys(context.query).length !== 0) { - // only add the querystring to the URL if there are query parameters. - // this is done to avoid urls ending with a "?" character which buggy webservers - // do not handle correctly sometimes. - url += '?' + this.configuration.queryParamsStringify(context.query); - } - const body = ((typeof FormData !== "undefined" && context.body instanceof FormData) || context.body instanceof URLSearchParams || isBlob(context.body)) + const body = + (typeof FormData !== "undefined" && context.body instanceof FormData) || + context.body instanceof URLSearchParams || + isBlob(context.body) ? context.body : JSON.stringify(context.body); - const headers = Object.assign({}, this.configuration.headers, context.headers); - const init = { - method: context.method, - headers: headers, - body, - credentials: this.configuration.credentials, - ...initOverrides - }; - return { url, init }; - } - - private fetchApi = async (url: string, init: RequestInit) => { - let fetchParams = { url, init }; - for (const middleware of this.middleware) { - if (middleware.pre) { - fetchParams = await middleware.pre({ - fetch: this.fetchApi, - ...fetchParams, - }) || fetchParams; - } - } - let response = await (this.configuration.fetchApi || fetch)(fetchParams.url, fetchParams.init); - for (const middleware of this.middleware) { - if (middleware.post) { - response = await middleware.post({ - fetch: this.fetchApi, - url: fetchParams.url, - init: fetchParams.init, - response: response.clone(), - }) || response; - } - } - return response; + const headers = Object.assign( + {}, + this.configuration.headers, + context.headers + ); + const init = { + method: context.method, + headers: headers, + body, + credentials: this.configuration.credentials, + ...initOverrides, + }; + return { url, init }; + } + + private fetchApi = async (url: string, init: RequestInit) => { + let fetchParams = { url, init }; + for (const middleware of this.middleware) { + if (middleware.pre) { + fetchParams = + (await middleware.pre({ + fetch: this.fetchApi, + ...fetchParams, + })) || fetchParams; + } } - - /** - * Create a shallow clone of `this` by constructing a new instance - * and then shallow cloning data members. - */ - private clone(this: T): T { - const constructor = this.constructor as any; - const next = new constructor(this.configuration); - next.middleware = this.middleware.slice(); - return next; + let response = await (this.configuration.fetchApi || fetch)( + fetchParams.url, + fetchParams.init + ); + for (const middleware of this.middleware) { + if (middleware.post) { + response = + (await middleware.post({ + fetch: this.fetchApi, + url: fetchParams.url, + init: fetchParams.init, + response: response.clone(), + })) || response; + } } -}; + return response; + }; + + /** + * Create a shallow clone of `this` by constructing a new instance + * and then shallow cloning data members. + */ + private clone(this: T): T { + const constructor = this.constructor as any; + const next = new constructor(this.configuration); + next.middleware = this.middleware.slice(); + return next; + } +} export class RequiredError extends Error { - name: "RequiredError" = "RequiredError"; - constructor(public field: string, msg?: string) { - super(msg); - } + name: "RequiredError" = "RequiredError"; + constructor(public field: string, msg?: string) { + super(msg); + } } export const COLLECTION_FORMATS = { - csv: ",", - ssv: " ", - tsv: "\t", - pipes: "|", + csv: ",", + ssv: " ", + tsv: "\t", + pipes: "|", }; -export type FetchAPI = WindowOrWorkerGlobalScope['fetch']; +export type FetchAPI = WindowOrWorkerGlobalScope["fetch"]; export interface ConfigurationParameters { - basePath?: string; // override base path - fetchApi?: FetchAPI; // override for fetch implementation - middleware?: Middleware[]; // middleware to apply before/after fetch requests - queryParamsStringify?: (params: HTTPQuery) => string; // stringify function for query strings - username?: string; // parameter for basic security - password?: string; // parameter for basic security - apiKey?: string | ((name: string) => string); // parameter for apiKey security - accessToken?: string | Promise | ((name?: string, scopes?: string[]) => string | Promise); // parameter for oauth2 security - headers?: HTTPHeaders; //header params we want to use on every request - credentials?: RequestCredentials; //value for the credentials param we want to use on each request + basePath?: string; // override base path + fetchApi?: FetchAPI; // override for fetch implementation + middleware?: Middleware[]; // middleware to apply before/after fetch requests + queryParamsStringify?: (params: HTTPQuery) => string; // stringify function for query strings + username?: string; // parameter for basic security + password?: string; // parameter for basic security + apiKey?: string | ((name: string) => string); // parameter for apiKey security + accessToken?: + | string + | Promise + | ((name?: string, scopes?: string[]) => string | Promise); // parameter for oauth2 security + headers?: HTTPHeaders; //header params we want to use on every request + credentials?: RequestCredentials; //value for the credentials param we want to use on each request } export class Configuration { - constructor(private configuration: ConfigurationParameters = {}) {} - - get basePath(): string { - return this.configuration.basePath != null ? this.configuration.basePath : BASE_PATH; + constructor(private configuration: ConfigurationParameters = {}) {} + + get basePath(): string { + return this.configuration.basePath != null + ? this.configuration.basePath + : BASE_PATH; + } + + get fetchApi(): FetchAPI | undefined { + return this.configuration.fetchApi; + } + + get middleware(): Middleware[] { + return this.configuration.middleware || []; + } + + get queryParamsStringify(): (params: HTTPQuery) => string { + return this.configuration.queryParamsStringify || querystring; + } + + get username(): string | undefined { + return this.configuration.username; + } + + get password(): string | undefined { + return this.configuration.password; + } + + get apiKey(): ((name: string) => string) | undefined { + const apiKey = this.configuration.apiKey; + if (apiKey) { + return typeof apiKey === "function" ? apiKey : () => apiKey; } - - get fetchApi(): FetchAPI | undefined { - return this.configuration.fetchApi; + return undefined; + } + + get accessToken(): + | ((name?: string, scopes?: string[]) => string | Promise) + | undefined { + const accessToken = this.configuration.accessToken; + if (accessToken) { + return typeof accessToken === "function" + ? accessToken + : async () => accessToken; } + return undefined; + } - get middleware(): Middleware[] { - return this.configuration.middleware || []; - } - - get queryParamsStringify(): (params: HTTPQuery) => string { - return this.configuration.queryParamsStringify || querystring; - } - - get username(): string | undefined { - return this.configuration.username; - } - - get password(): string | undefined { - return this.configuration.password; - } - - get apiKey(): ((name: string) => string) | undefined { - const apiKey = this.configuration.apiKey; - if (apiKey) { - return typeof apiKey === 'function' ? apiKey : () => apiKey; - } - return undefined; - } - - get accessToken(): ((name?: string, scopes?: string[]) => string | Promise) | undefined { - const accessToken = this.configuration.accessToken; - if (accessToken) { - return typeof accessToken === 'function' ? accessToken : async () => accessToken; - } - return undefined; - } - - get headers(): HTTPHeaders | undefined { - return this.configuration.headers; - } + get headers(): HTTPHeaders | undefined { + return this.configuration.headers; + } - get credentials(): RequestCredentials | undefined { - return this.configuration.credentials; - } + get credentials(): RequestCredentials | undefined { + return this.configuration.credentials; + } } export type Json = any; -export type HTTPMethod = 'GET' | 'POST' | 'PUT' | 'PATCH' | 'DELETE' | 'OPTIONS' | 'HEAD'; +export type HTTPMethod = + | "GET" + | "POST" + | "PUT" + | "PATCH" + | "DELETE" + | "OPTIONS" + | "HEAD"; export type HTTPHeaders = { [key: string]: string }; -export type HTTPQuery = { [key: string]: string | number | null | boolean | Array | HTTPQuery }; +export type HTTPQuery = { + [key: string]: + | string + | number + | null + | boolean + | Array + | HTTPQuery; +}; export type HTTPBody = Json | FormData | URLSearchParams; -export type ModelPropertyNaming = 'camelCase' | 'snake_case' | 'PascalCase' | 'original'; +export type ModelPropertyNaming = + | "camelCase" + | "snake_case" + | "PascalCase" + | "original"; export interface FetchParams { - url: string; - init: RequestInit; + url: string; + init: RequestInit; } export interface RequestOpts { - path: string; - method: HTTPMethod; - headers: HTTPHeaders; - query?: HTTPQuery; - body?: HTTPBody; + path: string; + method: HTTPMethod; + headers: HTTPHeaders; + query?: HTTPQuery; + body?: HTTPBody; } export function exists(json: any, key: string) { - const value = json[key]; - return value !== null && value !== undefined; + const value = json[key]; + return value !== null && value !== undefined; } -export function querystring(params: HTTPQuery, prefix: string = ''): string { - return Object.keys(params) - .map((key) => { - const fullKey = prefix + (prefix.length ? `[${key}]` : key); - const value = params[key]; - if (value instanceof Array) { - const multiValue = value.map(singleValue => encodeURIComponent(String(singleValue))) - .join(`&${encodeURIComponent(fullKey)}=`); - return `${encodeURIComponent(fullKey)}=${multiValue}`; - } - if (value instanceof Date) { - return `${encodeURIComponent(fullKey)}=${encodeURIComponent(value.toISOString())}`; - } - if (value instanceof Object) { - return querystring(value as HTTPQuery, fullKey); - } - return `${encodeURIComponent(fullKey)}=${encodeURIComponent(String(value))}`; - }) - .filter(part => part.length > 0) - .join('&'); +export function querystring(params: HTTPQuery, prefix: string = ""): string { + return Object.keys(params) + .map((key) => { + const fullKey = prefix + (prefix.length ? `[${key}]` : key); + const value = params[key]; + if (value instanceof Array) { + const multiValue = value + .map((singleValue) => encodeURIComponent(String(singleValue))) + .join(`&${encodeURIComponent(fullKey)}=`); + return `${encodeURIComponent(fullKey)}=${multiValue}`; + } + if (value instanceof Date) { + return `${encodeURIComponent(fullKey)}=${encodeURIComponent( + value.toISOString() + )}`; + } + if (value instanceof Object) { + return querystring(value as HTTPQuery, fullKey); + } + return `${encodeURIComponent(fullKey)}=${encodeURIComponent( + String(value) + )}`; + }) + .filter((part) => part.length > 0) + .join("&"); } export function mapValues(data: any, fn: (item: any) => any) { @@ -248,73 +304,76 @@ export function mapValues(data: any, fn: (item: any) => any) { } export function canConsumeForm(consumes: Consume[]): boolean { - for (const consume of consumes) { - if ('multipart/form-data' === consume.contentType) { - return true; - } + for (const consume of consumes) { + if ("multipart/form-data" === consume.contentType) { + return true; } - return false; + } + return false; } export interface Consume { - contentType: string + contentType: string; } export interface RequestContext { - fetch: FetchAPI; - url: string; - init: RequestInit; + fetch: FetchAPI; + url: string; + init: RequestInit; } export interface ResponseContext { - fetch: FetchAPI; - url: string; - init: RequestInit; - response: Response; + fetch: FetchAPI; + url: string; + init: RequestInit; + response: Response; } export interface Middleware { - pre?(context: RequestContext): Promise; - post?(context: ResponseContext): Promise; + pre?(context: RequestContext): Promise; + post?(context: ResponseContext): Promise; } export interface ApiResponse { - raw: Response; - value(): Promise; + raw: Response; + value(): Promise; } export interface ResponseTransformer { - (json: any): T; + (json: any): T; } export class JSONApiResponse { - constructor(public raw: Response, private transformer: ResponseTransformer = (jsonValue: any) => jsonValue) {} - - async value(): Promise { - return this.transformer(await this.raw.json()); - } + constructor( + public raw: Response, + private transformer: ResponseTransformer = (jsonValue: any) => jsonValue + ) {} + + async value(): Promise { + return this.transformer(await this.raw.json()); + } } export class VoidApiResponse { - constructor(public raw: Response) {} + constructor(public raw: Response) {} - async value(): Promise { - return undefined; - } + async value(): Promise { + return undefined; + } } export class BlobApiResponse { - constructor(public raw: Response) {} + constructor(public raw: Response) {} - async value(): Promise { - return await this.raw.blob(); - }; + async value(): Promise { + return await this.raw.blob(); + } } export class TextApiResponse { - constructor(public raw: Response) {} + constructor(public raw: Response) {} - async value(): Promise { - return await this.raw.text(); - }; + async value(): Promise { + return await this.raw.text(); + } } diff --git a/dac/ui/src/services/nessie/impl/SwaggerConfig.ts b/dac/ui/src/services/nessie/impl/SwaggerConfig.ts index e5d4b808e7..60a213d5c7 100644 --- a/dac/ui/src/services/nessie/impl/SwaggerConfig.ts +++ b/dac/ui/src/services/nessie/impl/SwaggerConfig.ts @@ -13,15 +13,23 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import getMiddleWare from "@inject/services/nessie/impl/middleware"; +import getMiddleWare from "./middleware"; import { Configuration } from "../client"; const SwaggerConfig = new Configuration({ - basePath: "/nessie", + basePath: "/nessieV1", middleware: getMiddleWare() || undefined, }); export function createSwaggerConfig(endpoint?: string) { + return new Configuration({ + basePath: "/nessieV1", + //@ts-ignore + middleware: getMiddleWare(endpoint) || undefined, + }); +} + +export function createSwaggerV2Config(endpoint?: string) { return new Configuration({ basePath: "/nessie", //@ts-ignore diff --git a/dac/ui/src/services/nessie/impl/TreeApi.ts b/dac/ui/src/services/nessie/impl/TreeApi.ts index 8daff42649..f89757133a 100644 --- a/dac/ui/src/services/nessie/impl/TreeApi.ts +++ b/dac/ui/src/services/nessie/impl/TreeApi.ts @@ -15,17 +15,11 @@ */ import moize from "moize"; -import { - DefaultApi, - GetAllReferencesRequest, - GetReferenceByNameRequest, - GetCommitLogRequest, - GetEntriesRequest, - CreateReferenceRequest, - DeleteReferenceRequest, - MergeRefIntoBranchRequest, -} from "../client"; -import SwaggerConfig, { createSwaggerConfig } from "./SwaggerConfig"; +import { DefaultApi, GetEntriesRequest, V2BetaApi } from "../client"; +import SwaggerConfig, { + createSwaggerConfig, + createSwaggerV2Config, +} from "./SwaggerConfig"; //Use default Atlantis project API const TreeApi = new DefaultApi(SwaggerConfig); @@ -33,43 +27,26 @@ const TreeApi = new DefaultApi(SwaggerConfig); //Get and cache (moize) endpoint-specific API (empty endpoint = default Atlantis API) export const getTreeApi = moize( function (endpoint?: string) { - return new DefaultApi(createSwaggerConfig(endpoint)); + return new DefaultApi( + createSwaggerConfig(endpoint?.replace("/nessie/", "/nessieV1/")) + ); }, { maxSize: 10, } ); -export function getDefaultBranch() { - return TreeApi.getDefaultBranch(); -} - -export function getAllReferences(requestParameters: GetAllReferencesRequest) { - return TreeApi.getAllReferences(requestParameters); -} - -export function getReferenceByName(req: GetReferenceByNameRequest) { - return TreeApi.getReferenceByName(req); -} - -export function getCommitLog(req: GetCommitLogRequest) { - return TreeApi.getCommitLog(req); -} +export const getApiV2 = moize( + function (endpoint?: string) { + return new V2BetaApi(createSwaggerV2Config(endpoint)); + }, + { + maxSize: 50, + } +); export function getEntries(req: GetEntriesRequest) { return TreeApi.getEntries(req); } -export function createReference(req: CreateReferenceRequest) { - return TreeApi.createReference(req); -} - -export function deleteReference(req: DeleteReferenceRequest) { - return TreeApi.deleteReference(req); -} - -export function mergeReference(req: MergeRefIntoBranchRequest) { - return TreeApi.mergeRefIntoBranch(req); -} - export default TreeApi; diff --git a/dac/ui/src/services/nessie/impl/middleware.js b/dac/ui/src/services/nessie/impl/middleware.js new file mode 100644 index 0000000000..c5a91d2e9a --- /dev/null +++ b/dac/ui/src/services/nessie/impl/middleware.js @@ -0,0 +1,70 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import apiUtils from "@app/utils/apiUtils/apiUtils"; +import { handleUnauthorized } from "@app/store/authMiddleware"; +import { store } from "@app/store/store"; + +const NESSIE_V1_PREFIX = "/nessieV1/api/v1/"; +const NESSIE_V2_PREFIX = "/nessie/api/v2/"; + +function getUrl(initialUrl, urlPath) { + let url = initialUrl; + if (!url.endsWith("/")) url += "/"; + url += urlPath; + return url; +} + +const getMiddleWare = (externalUrl = "") => { + return [ + { + pre(context) { + const [, urlPath] = + context.url.indexOf(NESSIE_V2_PREFIX) !== -1 + ? context.url.split(NESSIE_V2_PREFIX) + : context.url.split(NESSIE_V1_PREFIX); + const url = getUrl( + externalUrl || + apiUtils.getAPIVersion("NESSIE", { nessieVersion: "v1" }), + urlPath + ); + + return Promise.resolve({ + url, + init: { + ...context.init, + headers: { + ...context.init.headers, + //Always send credentials, even to external URLs + ...apiUtils.prepareHeaders(), + }, + }, + }); + }, + //Only handle unauthorized for internal DDP only nessie + ...(!externalUrl && { + post({ response }) { + const resultAction = handleUnauthorized(response, store.dispatch); + if (resultAction) { + window.location.replace("/"); //Clear history since project context is cleared on login + } + return Promise.resolve(response); + }, + }), + }, + ]; +}; + +export default getMiddleWare; diff --git a/dac/ui/src/setupMsw.ts b/dac/ui/src/setupMsw.ts index 10f759f740..599cdd81c9 100644 --- a/dac/ui/src/setupMsw.ts +++ b/dac/ui/src/setupMsw.ts @@ -15,16 +15,18 @@ */ import { setupWorker } from "msw"; - -import * as ArcticCatalogs from "./exports/endpoints/ArcticCatalogs/__mocks__"; +import additionalMockHandlers from "@inject/additionalMockHandlers"; +// import * as ArcticCatalogs from "./exports/endpoints/ArcticCatalogs/__mocks__"; +// import { getSupportFlagHandler } from "./exports/endpoints/SupportFlags/__mocks__/getSupportFlag.handler"; import { getFeatureFlagEnabledHandler } from "./exports/endpoints/Features/__mocks__/getFeatureFlagEnabled.handler"; -import { getSupportFlagHandler } from "./exports/endpoints/SupportFlags/__mocks__/getSupportFlag.handler"; - +import * as reflections from "dremio-ui-common/mocks/reflections/index.ts"; export const browserMocks = async () => { return setupWorker( - ...Object.values(ArcticCatalogs), - getFeatureFlagEnabledHandler, - getSupportFlagHandler + // ...Object.values(ArcticCatalogs), + // getSupportFlagHandler, + ...Object.values(reflections), + ...additionalMockHandlers, + getFeatureFlagEnabledHandler ).start({ onUnhandledRequest: "bypass", }); diff --git a/dac/ui/src/types/Sources/SourceFormTypes.ts b/dac/ui/src/types/Sources/SourceFormTypes.ts index 376474a082..d641938048 100644 --- a/dac/ui/src/types/Sources/SourceFormTypes.ts +++ b/dac/ui/src/types/Sources/SourceFormTypes.ts @@ -25,6 +25,7 @@ type ElementConfigJSON = { placeholder?: string; label?: string; foundInFunctionalConfig?: boolean; + prefix?: string; value?: any; secure?: boolean; tooltip?: string; diff --git a/dac/ui/src/types/nessie.ts b/dac/ui/src/types/nessie.ts index 0768d55b3e..071d17cc6c 100644 --- a/dac/ui/src/types/nessie.ts +++ b/dac/ui/src/types/nessie.ts @@ -32,3 +32,11 @@ export type NessieState = { loading: { [key: string]: boolean }; errors: { [key: string]: any }; }; + +export enum Type { + Unknown = "UNKNOWN", + IcebergTable = "ICEBERG_TABLE", + DeltaLakeTable = "DELTA_LAKE_TABLE", + IcebergView = "ICEBERG_VIEW", + Namespace = "NAMESPACE", +} diff --git a/dac/ui/src/utils/performTransform/newTransform.ts b/dac/ui/src/types/performTransformNewTypes.ts similarity index 71% rename from dac/ui/src/utils/performTransform/newTransform.ts rename to dac/ui/src/types/performTransformNewTypes.ts index e8d02ffc29..d944fe9521 100644 --- a/dac/ui/src/utils/performTransform/newTransform.ts +++ b/dac/ui/src/types/performTransformNewTypes.ts @@ -16,7 +16,8 @@ import Immutable from "immutable"; -export type PostNewQueryJobProps = { +export type NewPerformTransformSingleProps = { + dataset: Immutable.Map; currentSql: string; queryContext: Immutable.List; viewId: string; @@ -25,19 +26,26 @@ export type PostNewQueryJobProps = { isSaveViewAs?: boolean; sessionId: string; sqlStatement: string; + nextTable: Immutable.Map; + finalTransformData: Record; + references: any; }; -export type GenerateRequestForNewDatasetProps = { - sql: string; +export type NewGetFetchDatasetMetaActionProps = { + dataset: Immutable.Map; + currentSql: string; queryContext: Immutable.List; viewId: string; isRun: boolean; sessionId: string; - noUpdate: boolean; + nextTable: Immutable.Map; + finalTransformData: Record; + references: any; }; export type HandlePostNewQueryJobSuccessProps = { response: any; + newVersion: string; queryStatuses: any[]; curIndex: number; callback: any; diff --git a/dac/ui/src/types/sqlFunctions.ts b/dac/ui/src/types/sqlFunctions.ts index 924fb1d65a..5c297acedb 100644 --- a/dac/ui/src/types/sqlFunctions.ts +++ b/dac/ui/src/types/sqlFunctions.ts @@ -130,7 +130,7 @@ export enum ModelFunctionFunctionCategoriesEnum { CONTEXT = "CONTEXT", CONVERSION = "CONVERSION", DATETIME = "DATETIME", - DATETYPE = "DATETYPE", + DATATYPE = "DATATYPE", DIRECTORY = "DIRECTORY", GEOSPATIAL = "GEOSPATIAL", MATH = "MATH", diff --git a/dac/ui/src/uiTheme/css/leantable.css b/dac/ui/src/uiTheme/css/leantable.css index ccdc9522b4..534eb5179f 100644 --- a/dac/ui/src/uiTheme/css/leantable.css +++ b/dac/ui/src/uiTheme/css/leantable.css @@ -14,8 +14,8 @@ * limitations under the License. */ -@import "../../../node_modules/leantable/dist-css/fixed-header.css"; -@import "../../../node_modules/leantable/dist-css/theme-hooks.css"; +@import "../../../node_modules/leantable2/dist-css/fixed-header.css"; +@import "../../../node_modules/leantable2/dist-css/theme-hooks.css"; :root { --leantable--column--gap: 0; @@ -61,7 +61,7 @@ display: inline-block; width: 1px; height: 23px; - background: var(--dremio--color--neutral--150); + background: var(--color--neutral--50); position: absolute; left: 0; top: 9px; @@ -74,6 +74,8 @@ flex-grow: 1; margin-inline: -10px; padding-inline: 10px; + color: var(--dremio--color--link); + font-weight: 500; } .clickable-cell:hover { diff --git a/dac/ui/src/uiTheme/css/react-datepicker.css b/dac/ui/src/uiTheme/css/react-datepicker.css index 27bceda571..fac84a21e7 100644 --- a/dac/ui/src/uiTheme/css/react-datepicker.css +++ b/dac/ui/src/uiTheme/css/react-datepicker.css @@ -102,7 +102,7 @@ .react-datepicker__header { text-align: center; - /* background-color: var(--dremio--color--primary--100); + /* background-color: var(--color--brand--25); border-bottom: 1px solid #aeaeae; */ border-top-left-radius: 0.3rem; padding: 8px 0; @@ -288,12 +288,12 @@ .react-datepicker__time-container { float: right; - border-left: 1px solid var(--dremio--color--neutral--150); + border-left: 1px solid var(--color--neutral--50); width: 85px; } .react-datepicker__time-container--with-today-button { display: inline; - border: 1px solid var(--dremio--color--neutral--150); + border: 1px solid var(--color--neutral--50); border-radius: 0.3rem; position: absolute; right: -72px; @@ -341,14 +341,14 @@ ul.react-datepicker__time-list li.react-datepicker__time-list-item:hover { cursor: pointer; - /* background-color: var(--dremio--color--primary--100); */ + /* background-color: var(--color--brand--25); */ } .react-datepicker__time-container .react-datepicker__time .react-datepicker__time-box ul.react-datepicker__time-list li.react-datepicker__time-list-item--selected { - background-color: var(--dremio--color--primary--150); + background-color: var(--color--brand--50); /* color: white; */ font-weight: bold; } @@ -357,7 +357,7 @@ .react-datepicker__time-box ul.react-datepicker__time-list li.react-datepicker__time-list-item--selected:hover { - background-color: var(--dremio--color--primary--150); + background-color: var(--color--brand--50); } .react-datepicker__time-container .react-datepicker__time @@ -388,7 +388,7 @@ } .react-datepicker__week-number.react-datepicker__week-number--clickable:hover { border-radius: 0.3rem; - background-color: var(--dremio--color--primary--100); + background-color: var(--color--brand--25); } .react-datepicker__day-names, @@ -418,7 +418,7 @@ .react-datepicker__quarter--in-selecting-range, .react-datepicker__quarter--in-range { border-radius: 0.3rem; - background-color: var(--dremio--color--primary--150); + background-color: var(--color--brand--50); /* color: #fff; */ } .react-datepicker__month--selected:hover, @@ -427,7 +427,7 @@ .react-datepicker__quarter--selected:hover, .react-datepicker__quarter--in-selecting-range:hover, .react-datepicker__quarter--in-range:hover { - background-color: var(--dremio--color--primary--150); + background-color: var(--color--brand--50); } .react-datepicker__month--disabled, .react-datepicker__quarter--disabled { @@ -451,7 +451,7 @@ .react-datepicker__quarter-text:hover, .react-datepicker__year-text:hover { border-radius: 0.3rem; - background-color: var(--dremio--color--primary--100); + background-color: var(--color--brand--25); } .react-datepicker__day--today, .react-datepicker__month-text--today, @@ -498,7 +498,7 @@ .react-datepicker__year-text--in-selecting-range, .react-datepicker__year-text--in-range { border-radius: 0.3rem; - background-color: var(--dremio--color--primary--150); + background-color: var(--color--brand--50); /* color: #fff; */ } .react-datepicker__day--selected:hover, @@ -513,21 +513,21 @@ .react-datepicker__year-text--selected:hover, .react-datepicker__year-text--in-selecting-range:hover, .react-datepicker__year-text--in-range:hover { - background-color: var(--dremio--color--primary--150); + background-color: var(--color--brand--50); } .react-datepicker__day--keyboard-selected, .react-datepicker__month-text--keyboard-selected, .react-datepicker__quarter-text--keyboard-selected, .react-datepicker__year-text--keyboard-selected { border-radius: 0.3rem; - background-color: var(--dremio--color--primary--150); + background-color: var(--color--brand--50); /* color: #fff; */ } .react-datepicker__day--keyboard-selected:hover, .react-datepicker__month-text--keyboard-selected:hover, .react-datepicker__quarter-text--keyboard-selected:hover, .react-datepicker__year-text--keyboard-selected:hover { - background-color: var(--dremio--color--primary--150); + background-color: var(--color--brand--50); } .react-datepicker__day--in-selecting-range:not(.react-datepicker__day--in-range, .react-datepicker__month-text--in-range, .react-datepicker__quarter-text--in-range, .react-datepicker__year-text--in-range), .react-datepicker__month-text--in-selecting-range:not(.react-datepicker__day--in-range, .react-datepicker__month-text--in-range, .react-datepicker__quarter-text--in-range, .react-datepicker__year-text--in-range), @@ -543,7 +543,7 @@ .react-datepicker__quarter-text--in-range:not(.react-datepicker__day--in-selecting-range, .react-datepicker__month-text--in-selecting-range, .react-datepicker__quarter-text--in-selecting-range, .react-datepicker__year-text--in-selecting-range), .react-datepicker__month--selecting-range .react-datepicker__year-text--in-range:not(.react-datepicker__day--in-selecting-range, .react-datepicker__month-text--in-selecting-range, .react-datepicker__quarter-text--in-selecting-range, .react-datepicker__year-text--in-selecting-range) { - background-color: var(--dremio--color--primary--100); + background-color: var(--color--brand--25); color: #000; } .react-datepicker__day--disabled, @@ -568,11 +568,11 @@ .react-datepicker__quarter-text.react-datepicker__month--in-range:hover, .react-datepicker__quarter-text.react-datepicker__quarter--selected:hover, .react-datepicker__quarter-text.react-datepicker__quarter--in-range:hover { - background-color: var(--dremio--color--primary--150); + background-color: var(--color--brand--50); } .react-datepicker__month-text:hover, .react-datepicker__quarter-text:hover { - background-color: var(--dremio--color--primary--100); + background-color: var(--color--brand--25); } .react-datepicker__input-container { @@ -618,7 +618,7 @@ .react-datepicker__year-dropdown, .react-datepicker__month-dropdown, .react-datepicker__month-year-dropdown { - background-color: var(--dremio--color--primary--100); + background-color: var(--color--brand--25); position: absolute; width: 50%; left: 25%; @@ -708,7 +708,7 @@ } .react-datepicker__close-icon::after { cursor: pointer; - background-color: var(--dremio--color--primary--150); + background-color: var(--color--brand--50); color: #fff; border-radius: 50%; height: 16px; @@ -723,7 +723,7 @@ } .react-datepicker__today-button { - background: var(--dremio--color--primary--100); + background: var(--color--brand--25); border-top: 1px solid #aeaeae; cursor: pointer; text-align: center; diff --git a/dac/ui/src/uiTheme/css/typography.css b/dac/ui/src/uiTheme/css/typography.css index 0a6cc87443..a59224a4e7 100644 --- a/dac/ui/src/uiTheme/css/typography.css +++ b/dac/ui/src/uiTheme/css/typography.css @@ -75,7 +75,7 @@ h5, } .whiteText { - color: var(--dremio--color--neutral--000); + color: white; } .blueText { diff --git a/dac/ui/src/uiTheme/less/DragComponents/ColumnMenuItem.less b/dac/ui/src/uiTheme/less/DragComponents/ColumnMenuItem.less index 03f7ec57a9..8a2586ac13 100644 --- a/dac/ui/src/uiTheme/less/DragComponents/ColumnMenuItem.less +++ b/dac/ui/src/uiTheme/less/DragComponents/ColumnMenuItem.less @@ -98,8 +98,8 @@ } .iconInDropdown { - width: 28px; - height: 28px; + width: 24px; + height: 24px; margin-left: 5px; color: var(--dremio--color--icon--main); } diff --git a/dac/ui/src/uiTheme/less/forms.less b/dac/ui/src/uiTheme/less/forms.less index c1855398f5..7547ec9244 100644 --- a/dac/ui/src/uiTheme/less/forms.less +++ b/dac/ui/src/uiTheme/less/forms.less @@ -49,7 +49,7 @@ .section-label { composes: common-title; - font-size: 18px; + font-size: 16px; font-weight: 600; line-height: 19px; color: #333; diff --git a/dac/ui/src/uiTheme/less/mixins.less b/dac/ui/src/uiTheme/less/mixins.less index 38821ee3bd..96b5819951 100644 --- a/dac/ui/src/uiTheme/less/mixins.less +++ b/dac/ui/src/uiTheme/less/mixins.less @@ -55,13 +55,13 @@ color: @font-color-disabled; } .fixedWidthSmall() { - font-family: Menlo; + font-family: var(--dremio--font-family--monospace); font-size: 11px; color: @font-color1; } .fixedWidthDefault() { - font-family: Menlo; + font-family: var(--dremio--font-family--monospace); font-size: 12px; color: @font-color1; } diff --git a/dac/ui/src/uiTheme/less/typography.less b/dac/ui/src/uiTheme/less/typography.less index 02e163effd..79816ff8ae 100644 --- a/dac/ui/src/uiTheme/less/typography.less +++ b/dac/ui/src/uiTheme/less/typography.less @@ -63,7 +63,7 @@ } .fixed-width-default { - font-family: Menlo; + font-family: var(--dremio--font-family--monospace); font-size: 12px; color: @font-color1; } @@ -73,7 +73,7 @@ } .fixed-width-bold { - font-family: Menlo; + font-family: var(--dremio--font-family--monospace); font-weight: 700; font-size: 11px; color: @font-color1; diff --git a/dac/ui/src/uiTheme/radium/forms.js b/dac/ui/src/uiTheme/radium/forms.js index 2cc2f09831..8581d29b08 100644 --- a/dac/ui/src/uiTheme/radium/forms.js +++ b/dac/ui/src/uiTheme/radium/forms.js @@ -47,7 +47,7 @@ export const section = { export const sectionTitle = { marginTop: 24, marginBottom: 24, - fontSize: 18, + fontSize: 16, fontWeight: 600, }; @@ -81,7 +81,7 @@ export const textInputError = { export const textInputDisabled = { ...typography.formDescription, background: "#EEEFF1", - border: "1px solid var(--dremio--color--neutral--200)", + border: "1px solid var(--color--neutral--100)", color: "#B0B7BF", }; @@ -95,7 +95,7 @@ export const textArea = { height: "56px", width: "100%", display: "block", - border: "1px solid var(--dremio--color--neutral--200)", + border: "1px solid var(--color--neutral--100)", }; export const description = { diff --git a/dac/ui/src/uiTheme/radium/replacingRadiumPseudoClasses.module.less b/dac/ui/src/uiTheme/radium/replacingRadiumPseudoClasses.module.less index 47f5b125e6..f7741df86c 100644 --- a/dac/ui/src/uiTheme/radium/replacingRadiumPseudoClasses.module.less +++ b/dac/ui/src/uiTheme/radium/replacingRadiumPseudoClasses.module.less @@ -25,21 +25,24 @@ .primaryButtonPsuedoClasses { &:hover { - border-color: var(--dremio--color--primary--600) !important; - background-color: var(--dremio--color--primary--600) !important; + border-color: var(--color--brand--400) !important; + background-color: var(--color--brand--400) !important; + outline: none !important; } } .secondaryButtonPsuedoClasses { &:hover { - background-color: var(--dremio--color--neutral--100) !important; + background-color: var(--color--neutral--25) !important; outline: none !important; } } .dangerButtonPsuedoClasses { &:hover { - background-color: var(--dremio--color--status--delete--background) !important; + background-color: var( + --dremio--color--status--delete--background + ) !important; outline: none !important; } } @@ -47,23 +50,23 @@ .textAreaPsuedoClasses { resize: none; &:hover { - border-color: var(--dremio--color--neutral--300) !important; + border-color: var(--color--neutral--200) !important; } &:focus { outline: none; - border-color: var(--dremio--color--primary--500) !important; + border-color: var(--color--brand--300) !important; } &:disabled { - background: var(--dremio--color--neutral--150) !important; + background: var(--color--neutral--50) !important; color: var(--dremio--color--text--disabled) !important; } } .contextCardPsuedoClasses { &:hover { - background-color: var(--dremio--color--neutral--000) !important; + background-color: white !important; } } diff --git a/dac/ui/src/uiTheme/radium/typography.js b/dac/ui/src/uiTheme/radium/typography.js index 55d848d045..6276692c12 100644 --- a/dac/ui/src/uiTheme/radium/typography.js +++ b/dac/ui/src/uiTheme/radium/typography.js @@ -14,7 +14,6 @@ * limitations under the License. */ -const menlo = "Menlo, monospace"; const inter = "var(--dremio--font-family)"; export const bodySmall = { @@ -60,21 +59,21 @@ export const formPlaceholder = { }; export const fixedWidthDefault = { - fontFamily: menlo, + fontFamily: "var(--dremio--font-family--monospace)", fontWeight: 400, fontSize: 12, color: "var(--dremio--color--text--main)", }; export const fixedWidthSmall = { - fontFamily: menlo, + fontFamily: "var(--dremio--font-family--monospace)", fontWeight: 400, fontSize: 11, color: "var(--dremio--color--text--main)", }; export const fixedWidthBold = { - fontFamily: menlo, + fontFamily: "var(--dremio--font-family--monospace)", fontWeight: 700, fontSize: 11, color: "var(--dremio--color--text--main)", @@ -139,42 +138,42 @@ export const h2White = { fontFamily: inter, fontWeight: 300, fontSize: 18, - color: "var(--dremio--color--neutral--000)", + color: "white", }; export const h3White = { fontFamily: inter, fontWeight: 300, fontSize: 16, - color: "var(--dremio--color--neutral--000)", + color: "white", }; export const h4White = { fontFamily: inter, fontWeight: 500, fontSize: 13, - color: "var(--dremio--color--neutral--000)", + color: "white", }; export const h5White = { fontFamily: inter, fontWeight: 500, fontSize: 12, - color: "var(--dremio--color--neutral--000)", + color: "white", }; export const bodyWhite = { fontFamily: inter, fontWeight: 400, fontSize: 12, - color: "var(--dremio--color--neutral--000)", + color: "white", }; export const bodySmallWhite = { fontFamily: inter, fontWeight: 400, fontSize: 11, - color: "var(--dremio--color--neutral--000)", + color: "white", }; export const metadataWhite = { diff --git a/dac/ui/src/utils/FileUtils.js b/dac/ui/src/utils/FileUtils.js index bf6781ce76..2863547017 100644 --- a/dac/ui/src/utils/FileUtils.js +++ b/dac/ui/src/utils/FileUtils.js @@ -108,7 +108,10 @@ export default class FileUtils { static getDatasetPathForClientTools(dataset) { if (dataset) { const path = - dataset.get("displayFullPath") ?? dataset.get("fullPathList") ?? ""; + dataset.get("displayFullPath") ?? + dataset.get("fullPathList") ?? + dataset.get("fullPath") ?? + ""; return path.join("/"); } else { return ""; diff --git a/dac/ui/src/utils/WikiDrawerUtils.tsx b/dac/ui/src/utils/WikiDrawerUtils.tsx new file mode 100644 index 0000000000..d3180adc28 --- /dev/null +++ b/dac/ui/src/utils/WikiDrawerUtils.tsx @@ -0,0 +1,84 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import Immutable from "immutable"; +// @ts-ignore +import { IconButton } from "dremio-ui-lib/components"; +// @ts-ignore +import { addProjectBase as wrapBackendLink } from "dremio-ui-common/utilities/projectBase.js"; +import { getIconPath } from "@app/utils/getIconPath"; +import { getIconType } from "@app/components/DatasetSummary/datasetSummaryUtils"; +import { intl } from "@app/utils/intl"; + +const openDatasetInNewTab = (datasetDetails: Immutable.Map) => { + const selfLink = datasetDetails.getIn(["links", "query"]); + const editLink = datasetDetails.getIn(["links", "edit"]); + const canAlter = datasetDetails.getIn(["permissions", "canAlter"]); + const toLink = canAlter && editLink ? editLink : selfLink; + const urldetails = new URL(window.location.origin + toLink); + const pathname = urldetails.pathname + "/wiki" + urldetails.search; + window.open(wrapBackendLink(pathname), "_blank"); +}; + +export const getCommonWikiDrawerTitle = ( + datasetDetails: Immutable.Map | undefined, + fullPath: Immutable.List | undefined, + closeWikiDrawer: (e: any) => void +) => { + const { formatMessage } = intl; + const iconName = getIconType(datasetDetails?.get("datasetType")); + + return ( +
        +
        + {iconName && ( + + )} +
        {fullPath?.last()}
        + {datasetDetails?.get("hasReflection") && ( + + )} +
        + {/* used to prevent auto-focus on when opening the panel */} +
        + ); +}; diff --git a/dac/ui/src/utils/analyzeToolsUtils.ts b/dac/ui/src/utils/analyzeToolsUtils.ts new file mode 100644 index 0000000000..00ee430ffd --- /dev/null +++ b/dac/ui/src/utils/analyzeToolsUtils.ts @@ -0,0 +1,46 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// @ts-ignore +import { HANDLE_THROUGH_API } from "@inject/pages/HomePage/components/HeaderButtonConstants"; +import config from "dyn-load/utils/config"; + +export const fetchStatusOfAnalyzeTools = () => { + const analyzeButtonsConfig = { + "client.tools.tableau": false, + "client.tools.powerbi": false, + }; + + if (HANDLE_THROUGH_API) { + const supportFlags = localStorage.getItem("supportFlags") + ? JSON.parse(localStorage.getItem("supportFlags") as string) + : null; + + analyzeButtonsConfig["client.tools.tableau"] = + supportFlags?.["client.tools.tableau"] ?? false; + + analyzeButtonsConfig["client.tools.powerbi"] = + supportFlags?.["client.tools.powerbi"] ?? false; + } else { + const { tableau = { enabled: false }, powerbi = { enabled: false } } = + config?.analyzeTools ?? {}; + + analyzeButtonsConfig["client.tools.tableau"] = tableau.enabled; + analyzeButtonsConfig["client.tools.powerbi"] = powerbi.enabled; + } + + return analyzeButtonsConfig; +}; diff --git a/dac/ui/src/utils/apiUtils/apiUtils.js b/dac/ui/src/utils/apiUtils/apiUtils.js index a8b9b9b507..cdb396b916 100644 --- a/dac/ui/src/utils/apiUtils/apiUtils.js +++ b/dac/ui/src/utils/apiUtils/apiUtils.js @@ -21,6 +21,7 @@ import APICall from "@app/core/APICall"; import ApiCallMixin from "@inject/utils/apiUtils/ApiUtilsMixin"; import { DEFAULT_ERR_MSG } from "@inject/constants/errors"; +import { appFetchWithoutErrorHandling } from "dremio-ui-common/utilities/appFetch.js"; /** * Error names from api middleware. @@ -86,10 +87,11 @@ class ApiUtils { ]; } - getFromNewQueryResponse(response) { + getFromJSONResponse(response) { const payload = response.payload; return { + dataset: payload.dataset, datasetPath: payload.datasetPath, datasetVersion: payload.datasetVersion, jobId: payload.jobId?.id, @@ -185,18 +187,16 @@ class ApiUtils { ...fetchOptions } = options; - return fetch(url, { ...fetchOptions, headers }) - .then((response) => { - return response.ok ? response : Promise.reject(response); - }) - .catch((e) => { - if (!Object.prototype.isPrototypeOf.call(Response.prototype, e)) { - //Catch error that occurs when server doesnt respond (e.g. loss of internet connection) - return this.handleError(e); - } else { - throw e; - } - }); + return appFetchWithoutErrorHandling(url, { + ...fetchOptions, + headers, + }).catch((e) => { + if (!Object.prototype.isPrototypeOf.call(Response.prototype, e)) { + return this.handleError(e); + } else { + throw e; + } + }); }; fetchJson(endpoint, jsonHandler, errorHandler, options = {}, version = 3) { @@ -259,7 +259,7 @@ class ApiUtils { message = secondError; } else if (thirdError) { message = thirdError; - } else if (typeof error === "string" && !error) { + } else if (typeof error === "string" && error) { message = error; } else { message = "Error"; diff --git a/dac/ui/src/utils/apiUtils/newTmpUntitledUtils.ts b/dac/ui/src/utils/apiUtils/responseUtils.ts similarity index 90% rename from dac/ui/src/utils/apiUtils/newTmpUntitledUtils.ts rename to dac/ui/src/utils/apiUtils/responseUtils.ts index 4456ce43ce..e9e93295a1 100644 --- a/dac/ui/src/utils/apiUtils/newTmpUntitledUtils.ts +++ b/dac/ui/src/utils/apiUtils/responseUtils.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -function readNewTmpUntitledResponse(type: any, meta: any) { +function readResponseAsJSON(type: any, meta: any) { return { type, meta, @@ -35,4 +35,4 @@ function readNewTmpUntitledResponse(type: any, meta: any) { }; } -export default readNewTmpUntitledResponse; +export default readResponseAsJSON; diff --git a/dac/ui/src/utils/arsUtils.ts b/dac/ui/src/utils/arsUtils.ts new file mode 100644 index 0000000000..6800ae955b --- /dev/null +++ b/dac/ui/src/utils/arsUtils.ts @@ -0,0 +1,25 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { CATALOG_ARS_ENABLED } from "@app/exports/flags/CATALOG_ARS_ENABLED"; +import { useFeatureFlag } from "@app/exports/providers/useFeatureFlag"; +import { isDefaultBranch } from "dremio-ui-common/utilities/versionContext.js"; + +export function useIsBIToolsEnabled(versionContext: any) { + const [arsEnabled, loading] = useFeatureFlag(CATALOG_ARS_ENABLED); + // Disable Analyze With when catalog_ars_enabled is on and user is not on default branch + return !loading && (!arsEnabled || isDefaultBranch(versionContext)); +} diff --git a/dac/ui/src/utils/config.js b/dac/ui/src/utils/config.js index 1ababf229a..4ac6bcee55 100644 --- a/dac/ui/src/utils/config.js +++ b/dac/ui/src/utils/config.js @@ -36,6 +36,7 @@ export default { outsideCommunicationDisabled: false, lowerProvisioningSettingsEnabled: false, allowFileUploads: true, + allowDownload: true, allowSpaceManagement: false, allowSourceManagement: false, subhourAccelerationPoliciesEnabled: false, @@ -61,7 +62,9 @@ export default { ...((window && window.dremioConfig) || {}), displayTutorialsLink: false, showNewJobsPage: true, + useNewDatasetNavigation: true, showOldReflectionsListing: false, showMetadataValidityCheckbox: false, + arcticReflectionsEnabled: false, ...((window && window.dremioConfig) || {}), }; diff --git a/dac/ui/src/utils/datasetNavigationUtils.ts b/dac/ui/src/utils/datasetNavigationUtils.ts new file mode 100644 index 0000000000..9d2f8f6c9d --- /dev/null +++ b/dac/ui/src/utils/datasetNavigationUtils.ts @@ -0,0 +1,31 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { HANDLE_THROUGH_API } from "@inject/pages/HomePage/components/HeaderButtonConstants"; +import { NEW_DATASET_NAVIGATION } from "@app/exports/endpoints/SupportFlags/supportFlagConstants"; +import config from "dyn-load/utils/config"; + +export const shouldUseNewDatasetNavigation = () => { + if (HANDLE_THROUGH_API) { + const supportFlags = localStorage.getItem("supportFlags") + ? JSON.parse(localStorage.getItem("supportFlags") as string) + : null; + + return supportFlags?.[NEW_DATASET_NAVIGATION]; + } else { + return config?.useNewDatasetNavigation; + } +}; diff --git a/dac/ui/src/utils/datasetUtils-spec.js b/dac/ui/src/utils/datasetUtils-spec.js index 25433e58f1..409046ff6e 100644 --- a/dac/ui/src/utils/datasetUtils-spec.js +++ b/dac/ui/src/utils/datasetUtils-spec.js @@ -35,6 +35,7 @@ describe("datasetUtils", () => { canMove: false, canDelete: false, canSetAccelerationUpdates: true, + isPhysical: true, }); }); @@ -48,6 +49,7 @@ describe("datasetUtils", () => { canMove: false, canDelete: true, canSetAccelerationUpdates: false, + isPhysical: true, }); }); @@ -59,6 +61,7 @@ describe("datasetUtils", () => { canMove: false, canDelete: false, canSetAccelerationUpdates: true, + isPhysical: true, }); }); @@ -70,6 +73,7 @@ describe("datasetUtils", () => { canMove: false, canDelete: false, canSetAccelerationUpdates: true, + isPhysical: true, }); }); @@ -81,6 +85,7 @@ describe("datasetUtils", () => { canMove: true, canDelete: true, canSetAccelerationUpdates: false, + isPhysical: false, }); }); }); diff --git a/dac/ui/src/utils/datasetUtils.js b/dac/ui/src/utils/datasetUtils.js index d497432857..d5a19c8858 100644 --- a/dac/ui/src/utils/datasetUtils.js +++ b/dac/ui/src/utils/datasetUtils.js @@ -38,8 +38,8 @@ export function abilities( const canDelete = !isPhysical || isHomeFile; // the "Acceleration Updates" tab should only be visible for physical datasets/folder - // https://dremio.atlassian.net/browse/DX-5019 - // https://dremio.atlassian.net/browse/DX-5689 + // DX-5019 + // DX-5689 // (doesn't make sense for home files though) const canSetAccelerationUpdates = isPhysical && !isHomeFile; @@ -50,6 +50,7 @@ export function abilities( canMove, canDelete, canSetAccelerationUpdates, + isPhysical, }; } diff --git a/dac/ui/src/utils/decorators/resourceDecorators.js b/dac/ui/src/utils/decorators/resourceDecorators.js index 923ad104e2..41ff52e00e 100644 --- a/dac/ui/src/utils/decorators/resourceDecorators.js +++ b/dac/ui/src/utils/decorators/resourceDecorators.js @@ -82,7 +82,7 @@ export function decorateFolder(folder) { export function decorateFile(file) { // todo: remove hacks: making files "quack" like other things - // pending new API in https://dremio.atlassian.net/browse/DX-4760 + // pending new API in DX-4760 const uiProperties = Immutable.fromJS({ fileType: "file", entityType: "file", diff --git a/dac/ui/src/utils/explore/exploreUtils.js b/dac/ui/src/utils/explore/exploreUtils.js index 18039c6811..580e82504a 100644 --- a/dac/ui/src/utils/explore/exploreUtils.js +++ b/dac/ui/src/utils/explore/exploreUtils.js @@ -474,12 +474,14 @@ class ExploreUtils { getAPICallForUntitledDatasetConfig = ( dottedFullPath, newVersion, - doNotWaitJobCompletion + doNotWaitJobCompletion, + willLoadTable ) => { const apiCall = new APIV2Call().paths("/datasets/new_untitled").params({ parentDataset: dottedFullPath, newVersion, limit: doNotWaitJobCompletion ? 0 : ROWS_LIMIT, + triggerJob: willLoadTable, }); return apiCall; @@ -488,16 +490,28 @@ class ExploreUtils { getHrefForDatasetConfig = (resourcePath) => `${resourcePath}?view=explore&limit=50`; - getDatasetMetadataLink = (datasetPath, sessionId, version) => { + getDatasetMetadataLink = (dataset, datasetPath, sessionId, version) => { + // there should always be a datasetPath or dataset self link, but if there isn't + // then there's no need make a call to 'undefined/preview', this is then handled as an error + if (!datasetPath && !dataset?.apiLinks?.self) { + return undefined; + } + const apiCall = new APIV2Call() - .paths(`dataset/${datasetPath.join(".")}/version/${version}/preview`) + .paths( + datasetPath + ? `dataset/${datasetPath.join(".")}/version/${version}/preview` + : `${dataset.apiLinks.self}/preview` + ) .params({ view: "explore", limit: 0, triggerJob: false, }); - // TODO: check if getRefQueryParamsFromDataset is needed here + if (dataset) { + apiCall.params(getRefQueryParamsFromDataset(dataset.fullPath)); + } if (sessionId) { apiCall.params({ sessionId }); @@ -506,7 +520,25 @@ class ExploreUtils { return apiCall.toPath(); }; - getPreviewLink = (dataset, tipVersion, sessionId, willLoadTable) => { + getDatasetMetadata = (datasetPath, version) => { + const apiCall = new APIV2Call() + .paths(`dataset/${datasetPath}/version/${version}/preview`) + .params({ + view: "explore", + limit: 0, + triggerJob: false, + }); + return apiCall.toPath(); + }; + + getPreviewLink = ( + dataset, + tipVersion, + sessionId, + willLoadTable = true, + refType, + refValue + ) => { const apiCall = new APIV2Call() .paths(`${dataset.getIn(["apiLinks", "self"])}/preview`) .params({ @@ -514,6 +546,10 @@ class ExploreUtils { limit: 0, }); + if (refType && refValue) { + apiCall.params({ refType, refValue }); + } + if (tipVersion) { apiCall.params({ tipVersion }); } else { @@ -581,6 +617,14 @@ class ExploreUtils { return `${dataset.getIn(["apiLinks", "self"])}/${end}`; } + getNewPreviewTransformationLink(dataset, newVersion) { + const end = `transform_and_preview?newVersion=${encodeURIComponent( + newVersion + )}&limit=0`; + + return `${dataset.getIn(["apiLinks", "self"])}/${end}`; + } + getTransformPeekHref(dataset) { const newVersion = this.getNewDatasetVersion(); const end = `transformPeek?newVersion=${encodeURIComponent( @@ -610,15 +654,9 @@ class ExploreUtils { } getTmpUntitledSqlHref({ newVersion, sessionId }) { - if (sessionId) { - return `/datasets/new_tmp_untitled_sql?newVersion=${encodeURIComponent( - newVersion - )}&sessionId=${sessionId}&limit=0`; - } else { - return `/datasets/new_tmp_untitled_sql?newVersion=${encodeURIComponent( - newVersion - )}&limit=0`; - } + return `/datasets/new_tmp_untitled_sql?newVersion=${encodeURIComponent( + newVersion + )}${sessionId ? `&sessionId=${sessionId}` : ""}&limit=0`; } getUntitledSqlAndRunHref({ newVersion, sessionId }) { @@ -635,15 +673,9 @@ class ExploreUtils { } getTmpUntitledSqlAndRunHref({ newVersion, sessionId }) { - if (sessionId) { - return `/datasets/new_tmp_untitled_sql_and_run?newVersion=${encodeURIComponent( - newVersion - )}&sessionId=${sessionId}`; - } else { - return `/datasets/new_tmp_untitled_sql_and_run?newVersion=${encodeURIComponent( - newVersion - )}`; - } + return `/datasets/new_tmp_untitled_sql_and_run?newVersion=${encodeURIComponent( + newVersion + )}${sessionId ? `&sessionId=${sessionId}` : ""}`; } getMappedDataForTransform(item, detailsType) { diff --git a/dac/ui/src/utils/iconUtils.js b/dac/ui/src/utils/iconUtils.js index 7123e5736c..aee651c24e 100644 --- a/dac/ui/src/utils/iconUtils.js +++ b/dac/ui/src/utils/iconUtils.js @@ -13,7 +13,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { DATASET_TYPES_TO_ICON_TYPES } from "@app/constants/datasetTypes"; +import { + DATASET_TYPES_TO_ICEBERG_TYPES, + DATASET_TYPES_TO_ICON_TYPES, + PHYSICAL_DATASET, + VIRTUAL_DATASET, +} from "@app/constants/datasetTypes"; import { NESSIE, ARCTIC } from "@app/constants/sourceTypes"; import { formatMessage } from "./locale"; @@ -24,7 +29,7 @@ const FILE_TYPES_TO_ICON_TYPES = { physicalDatasets: "PhysicalDataset", }; -const ICEBERG_ICON_TYPES = { +const FILE_TYPES_TO_ICEBERG_ICON_TYPES = { table: "IcebergTable", dataset: "IcebergView", physicalDatasets: "IcebergTable", @@ -50,7 +55,7 @@ export function getIconDataTypeFromEntity(entity) { export function getIcebergIconTypeFromEntity(entity) { const fileType = entity.get("fileType"); if (["table", "dataset", "physicalDatasets"].includes(fileType)) { - return ICEBERG_ICON_TYPES[fileType]; + return FILE_TYPES_TO_ICEBERG_ICON_TYPES[fileType]; } else return getIconDataTypeFromEntity(entity); } @@ -58,6 +63,12 @@ export function getIconDataTypeFromDatasetType(datasetType) { return DATASET_TYPES_TO_ICON_TYPES[datasetType]; } +export function getIcebergIconDataTypeFromDatasetType(datasetType) { + if ([VIRTUAL_DATASET, PHYSICAL_DATASET].includes(datasetType)) { + return DATASET_TYPES_TO_ICEBERG_TYPES[datasetType]; + } else return getIconDataTypeFromDatasetType(datasetType); +} + const STATUSES_ICON_POSTFIX = { good: "", bad: "-Bad", diff --git a/dac/ui/src/utils/jobsUtils.js b/dac/ui/src/utils/jobsUtils.js index b52da4b152..7e1fae1931 100644 --- a/dac/ui/src/utils/jobsUtils.js +++ b/dac/ui/src/utils/jobsUtils.js @@ -351,7 +351,7 @@ export class JobsUtils { url = jobPaths.reflection.link({ projectId, reflectionId: id }); } else { url = `${jobPaths.jobs.link({ projectId })}?filters=${encodeURIComponent( - JSON.stringify({ contains: [id] }) + JSON.stringify({ sql: ["*" + id + "*"], qt: ["ACCELERATION"] }) )}`; } return createFullUrl ? window.location.origin + url : url; diff --git a/dac/ui/src/utils/mainInfoUtils/newMainInfoNameUtil.ts b/dac/ui/src/utils/mainInfoUtils/newMainInfoNameUtil.ts new file mode 100644 index 0000000000..3de4cef8b9 --- /dev/null +++ b/dac/ui/src/utils/mainInfoUtils/newMainInfoNameUtil.ts @@ -0,0 +1,85 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import Immutable from "immutable"; +import { addProjectBase as wrapBackendLink } from "dremio-ui-common/utilities/projectBase.js"; +import { constructFullPath } from "@app/utils/pathUtils"; +import * as sqlPaths from "dremio-ui-common/paths/sqlEditor.js"; + +export function newGetHref( + entity: Immutable.Map, + context: Record +) { + const fileType = entity.get("fileType"); + + if (entity.get("fileType") === "file") { + if (entity.get("queryable")) { + const resourceId = entity.getIn(["fullPathList", 0]); + const newFullPath = constructFullPath(entity.get("fullPathList")); + + return { + href: { + pathname: sqlPaths.sqlEditor.link(), + search: `?context="${encodeURIComponent( + resourceId + )}"&queryPath=${newFullPath}`, + }, + }; + } + + return { + ...context.location, + state: { + modal: "DatasetSettingsModal", + tab: "format", + entityType: entity.get("entityType"), + entityId: entity.get("id"), + fullPath: entity.get("filePath"), + query: { then: "query" }, + isHomePage: true, + }, + }; + } + + if (fileType === "folder") { + if (entity.get("queryable")) { + const resourceId = entity.getIn(["fullPathList", 0]); + const newFullPath = constructFullPath(entity.get("fullPathList")); + + return { + href: { + pathname: sqlPaths.sqlEditor.link(), + search: `?context="${encodeURIComponent( + resourceId + )}"&queryPath=${newFullPath}`, + }, + }; + } + return wrapBackendLink(entity.getIn(["links", "self"])); + } + + const resourceId = entity.getIn(["fullPathList", 0]); + const newFullPath = constructFullPath(entity.get("fullPathList")); + + return { + href: { + pathname: sqlPaths.sqlEditor.link(), + search: `?context="${encodeURIComponent( + resourceId + )}"&queryPath=${newFullPath}`, + }, + }; +} diff --git a/dac/ui/src/utils/nessieUtils-spec.ts b/dac/ui/src/utils/nessieUtils-spec.ts index abc80ddcb3..d3cfb47812 100644 --- a/dac/ui/src/utils/nessieUtils-spec.ts +++ b/dac/ui/src/utils/nessieUtils-spec.ts @@ -17,7 +17,6 @@ import { COMMIT_TYPE } from "@app/constants/nessie"; import { expect } from "chai"; import { getNessieReferencePayload, - getProjectIdFromUrl, getReferenceListForTransform, getRefQueryParams, getTypeAndValue, @@ -156,27 +155,4 @@ describe("nessieUtils", () => { ]); }); }); - - describe("getProjectIdFromUrl", () => { - it("empty state", () => { - expect(getProjectIdFromUrl(undefined)).to.equal(""); - expect(getProjectIdFromUrl(null)).to.equal(""); - expect(getProjectIdFromUrl("")).to.equal(""); - }); - - it("non-string values", () => { - expect(getProjectIdFromUrl(123)).to.equal(""); - }); - - it("correctly formed url state", () => { - expect( - getProjectIdFromUrl( - "http://app.test1.dremio.site/v1/projects/test-guid" - ) - ).to.equal("test-guid"); - expect( - getProjectIdFromUrl("http://localhost:3005/v1/projects/test-guid") - ).to.equal("test-guid"); - }); - }); }); diff --git a/dac/ui/src/utils/nessieUtils.ts b/dac/ui/src/utils/nessieUtils.ts index dab97d6c9d..750cd93d05 100644 --- a/dac/ui/src/utils/nessieUtils.ts +++ b/dac/ui/src/utils/nessieUtils.ts @@ -21,9 +21,11 @@ import { store } from "@app/store/store"; import apiUtils from "@app/utils/apiUtils/apiUtils"; import moize from "moize"; import { isVersionedSource } from "./sourceUtils"; +import { NESSIE } from "@app/constants/sourceTypes"; +import { NESSIE_PROXY_URL_V2 } from "@app/constants/Api"; export function getShortHash(hash?: string) { - return hash && hash.length > 6 ? hash.substring(0, 6) : hash; + return hash && hash.length > 6 ? hash.substring(0, 8) : hash; } export function getIconByType(refType: string, hash?: string | null) { @@ -140,19 +142,17 @@ export function getProjectUrl(id?: string) { })}`; } -export function getArcticProjectUrl(id?: string) { +export function getArcticProjectUrl( + id: string | undefined, + nessieVersion?: string +) { //@ts-ignore return `${window.location.protocol}${apiUtils.getAPIVersion("ARCTIC", { projectId: id, + nessieVersion, })}`; } -export function getProjectIdFromUrl(url?: any) { - if (!url || typeof url !== "string") return ""; - const value = url.substring(url.lastIndexOf("/") + 1, url.length) || ""; - return value.replace("/", ""); -} - export function isBranchSelected(state?: NessieState) { if (!state) return false; @@ -177,21 +177,43 @@ export function parseNamespaceUrl(url: string, path: string) { return url.replace(`/${path}/`, "").split("/"); } +type SourceType = { + name: string; + type: string; + config?: CatalogSourceConfig | NessieSourceConfig; +}; type CatalogSourceConfig = { arcticCatalogId: string }; type NessieSourceConfig = { nessieEndpoint: string }; -function isArcticCatalogConfig( - config: CatalogSourceConfig | NessieSourceConfig +export function isArcticCatalogConfig( + config?: CatalogSourceConfig | NessieSourceConfig ): config is CatalogSourceConfig { - return (config as CatalogSourceConfig).arcticCatalogId != null; + return (config as CatalogSourceConfig)?.arcticCatalogId != null; } -export function getEndpointFromSourceConfig( - config?: CatalogSourceConfig | NessieSourceConfig +function getEndpointFromSourceConfig( + config: CatalogSourceConfig | NessieSourceConfig | undefined, + nessieVersion: string ) { if (!config) return ""; if (isArcticCatalogConfig(config)) { - return getArcticProjectUrl(config.arcticCatalogId); + return getArcticProjectUrl(config.arcticCatalogId, nessieVersion); } else { return config.nessieEndpoint; } } + +function getNessieSourceUrl(sourceName: string) { + return `${NESSIE_PROXY_URL_V2}/${`source/${sourceName}`}`; +} + +export function getEndpointFromSource( + source: SourceType | undefined, + nessieVersion = "v2" +) { + if (!source) return ""; + if (source.type === NESSIE) { + return getNessieSourceUrl(source.name); + } + + return getEndpointFromSourceConfig(source.config, nessieVersion); +} diff --git a/dac/ui/src/utils/pathUtils.js b/dac/ui/src/utils/pathUtils.js index f624bf5145..cac1b1fadf 100644 --- a/dac/ui/src/utils/pathUtils.js +++ b/dac/ui/src/utils/pathUtils.js @@ -153,7 +153,9 @@ export const RESERVED_WORDS = new Set( // Portions: CodeMirror, copyright (c) by Marijn Haverbeke and others // Distributed under an MIT license: http://codemirror.net/LICENSE " " + - "BOOL BOOLEAN BIT BLOB DECIMAL DOUBLE FLOAT LONG LONGBLOB LONGTEXT MEDIUM MEDIUMBLOB MEDIUMINT MEDIUMTEXT TIME TIMESTAMP TINYBLOB TINYINT TINYTEXT TEXT CLOB BIGINT INT INT2 INT8 INTEGER FLOAT DOUBLE CHAR VARCHAR DATE DATETIME YEAR UNSIGNED SIGNED NUMERIC REAL" + "BOOL BOOLEAN BIT BLOB DECIMAL DOUBLE FLOAT LONG LONGBLOB LONGTEXT MEDIUM MEDIUMBLOB MEDIUMINT MEDIUMTEXT TIME TIMESTAMP TINYBLOB TINYINT TINYTEXT TEXT CLOB BIGINT INT INT2 INT8 INTEGER FLOAT DOUBLE CHAR VARCHAR DATE DATETIME YEAR UNSIGNED SIGNED NUMERIC REAL" + + " " + + "REPLACE" ) // todo: reconcile with constants/DataTypes.js, factor into independant list .split(" ") diff --git a/dac/ui/src/utils/projects.ts b/dac/ui/src/utils/projects.ts index be69fe69cd..bc4fe9bfb1 100644 --- a/dac/ui/src/utils/projects.ts +++ b/dac/ui/src/utils/projects.ts @@ -17,7 +17,7 @@ import localStorageUtils from "@inject/utils/storageUtils/localStorageUtils"; import { store } from "@app/store/store"; // @ts-ignore import { resetPrivilegesState } from "@inject/actions/privileges"; -import { isDcsEdition } from "dyn-load/utils/versionUtils"; +import { isNotSoftware } from "dyn-load/utils/versionUtils"; export const handleSonarProjectChange = ( project: any, @@ -25,7 +25,7 @@ export const handleSonarProjectChange = ( ) => { (localStorageUtils as any)?.setProjectContext?.(project); (localStorageUtils as any)?.clearCurrentEngine?.(); - if (isDcsEdition()) { + if (isNotSoftware()) { // reset redux state related to projects store.dispatch(resetPrivilegesState(["engineMapping"])); } diff --git a/dac/ui/src/utils/resourcePathUtils/dataset.js b/dac/ui/src/utils/resourcePathUtils/dataset.js index c064aa575d..fc8a17301c 100644 --- a/dac/ui/src/utils/resourcePathUtils/dataset.js +++ b/dac/ui/src/utils/resourcePathUtils/dataset.js @@ -37,7 +37,7 @@ class DatasetResourcePathUtils { } /* - * Creating the dataset URL: https://dremio.atlassian.net/browse/DX-56494 + * Creating the dataset URL: DX-56494 * * Decoding the part separately before concatenating into a URL. Putting * the roote space before the rest of the namespace, so that diff --git a/dac/ui/src/utils/sourceUtils.ts b/dac/ui/src/utils/sourceUtils.ts index 73c57d0677..017bb2667d 100644 --- a/dac/ui/src/utils/sourceUtils.ts +++ b/dac/ui/src/utils/sourceUtils.ts @@ -20,6 +20,10 @@ export function sourceTypesIncludeS3(sourceTypes: { sourceType: string }[]) { return sourceTypes && !!sourceTypes.find((type) => type.sourceType === "S3"); } +export function sourceTypesIncludeSampleSource(sourceTypes: { sourceType: string }[]) { + return sourceTypes && !!sourceTypes.find((type) => type.sourceType === "SAMPLE_SOURCE"); +} + export function isVersionedSource(type: string) { switch (type) { case NESSIE: diff --git a/dac/ui/src/utils/sql-autocomplete-spec.ts b/dac/ui/src/utils/sql-autocomplete-spec.ts index 25276805b4..bf7cb0a95d 100644 --- a/dac/ui/src/utils/sql-autocomplete-spec.ts +++ b/dac/ui/src/utils/sql-autocomplete-spec.ts @@ -14,29 +14,9 @@ * limitations under the License. */ -import { - constructTransformValues, - getAutoCompleteInsertText, -} from "./sql-autocomplete"; +import { constructTransformValues } from "./sql-autocomplete"; import { expect } from "chai"; -const subEntityExamples = [ - { - contentWithPairedQuotes: ['select * from "samples or"'], - contentWithSoloQuote: ['select * from "samples or'], - position: { lineNumber: 1, column: 26 }, - entity: '"samples original"', - activeWord: "or", - }, - { - contentWithPairedQuotes: ['select * from "samples.or"'], - contentWithSoloQuote: ['select * from "samples.or'], - position: { lineNumber: 1, column: 26 }, - entity: '"samples.original"', - activeWord: "or", - }, -]; - describe("sql-autocomplete", () => { const transformCases = { insidePairedDoubleQuote: false, @@ -86,70 +66,4 @@ describe("sql-autocomplete", () => { }); }); }); - - describe("getAutoCompleteInsertText", () => { - it("should return the original insertText (without double quotes) if no transform is required", () => { - expect( - getAutoCompleteInsertText("samples", transformCases, "samples") - ).to.equal("samples"); - }); - - it("should return the original insertText (with double quotes) if no transform is required", () => { - expect( - getAutoCompleteInsertText('"samples"', transformCases, '"samples"') - ).to.equal('"samples"'); - }); - - it("should return insertText with only the right double quote", () => { - expect( - getAutoCompleteInsertText( - '"samples"', - { ...transformCases, insideSoloDoubleQuote: true }, - '"samples"' - ) - ).to.equal('samples"'); - }); - - it("should return insertText with neither of the double quotes", () => { - expect( - getAutoCompleteInsertText( - '"samples"', - { ...transformCases, insidePairedDoubleQuote: true }, - '"samples"' - ) - ).to.equal("samples"); - }); - - subEntityExamples.forEach((example: any) => { - it(`using ${example.entity}, it should return insertText with only the remaining subentities and no double quotes`, () => { - const subTransformCases = constructTransformValues( - example.contentWithPairedQuotes, - example.position, - example.activeWord - ); - expect( - getAutoCompleteInsertText( - example.entity, - subTransformCases, - example.activeWord - ) - ).to.equal("original"); - }); - - it(`using ${example.entity}, it should return insertText with only the remaining subentities and with the right double quote`, () => { - const subTransformCases = constructTransformValues( - example.contentWithSoloQuote, - example.position, - example.activeWord - ); - expect( - getAutoCompleteInsertText( - example.entity, - subTransformCases, - example.activeWord - ) - ).to.equal('original"'); - }); - }); - }); }); diff --git a/dac/ui/src/utils/sqlFunctionUtils.ts b/dac/ui/src/utils/sqlFunctionUtils.ts index 342d1587c5..ed3c8d2b12 100644 --- a/dac/ui/src/utils/sqlFunctionUtils.ts +++ b/dac/ui/src/utils/sqlFunctionUtils.ts @@ -27,7 +27,7 @@ export const FunctionCategoryLabels = { [FunctionCategories.CONTEXT]: "Context", [FunctionCategories.CONVERSION]: "Conversion", [FunctionCategories.DATETIME]: "Date/Time", - [FunctionCategories.DATETYPE]: "Data type", + [FunctionCategories.DATATYPE]: "Data type", [FunctionCategories.DIRECTORY]: "Directory", [FunctionCategories.GEOSPATIAL]: "Geospatial", [FunctionCategories.MATH]: "Math", diff --git a/dac/ui/src/utils/storageUtils/localStorageUtils.js b/dac/ui/src/utils/storageUtils/localStorageUtils.js index f3fc1da8fd..95cbdfd39e 100644 --- a/dac/ui/src/utils/storageUtils/localStorageUtils.js +++ b/dac/ui/src/utils/storageUtils/localStorageUtils.js @@ -153,6 +153,11 @@ export class LocalStorageUtils { } getDefaultSqlState() { + const sqlState = localStorage.getItem("sqlState"); + if (sqlState == null) { + this.setDefaultSqlState(true); + return true; + } return localStorage.getItem("sqlState") === "true"; } diff --git a/dac/ui/src/utils/timeUtils-spec.js b/dac/ui/src/utils/timeUtils-spec.js index 4961b8b7dc..a0e2f7a025 100644 --- a/dac/ui/src/utils/timeUtils-spec.js +++ b/dac/ui/src/utils/timeUtils-spec.js @@ -169,41 +169,4 @@ describe("Tests for time utils", () => { expect(TimeUtils.formatTimeDiff(61 * 60000)).to.equal("1:01:00"); }); }); - - describe("isMoreThanYearsFromNow", () => { - const secondsInYear = 365 * 24 * 60 * 60; - it("should compare valid datetime", () => { - const now = new Date(); - expect(TimeUtils.isMoreThanYearsFromNow(now, 1)).to.be.false; - expect( - TimeUtils.isMoreThanYearsFromNow(moment().add(secondsInYear, "s"), 1) - ).to.be.false; - expect( - TimeUtils.isMoreThanYearsFromNow( - moment().add(secondsInYear * 2, "s"), - 2 - ) - ).to.be.false; - expect( - TimeUtils.isMoreThanYearsFromNow( - moment().add(secondsInYear + 1000, "s"), - 1 - ) - ).to.be.true; - expect( - TimeUtils.isMoreThanYearsFromNow( - moment().add(secondsInYear * 2 + 180 * 24 * 60 * 60, "s"), - 2 - ) - ).to.be.true; - }); - it("should compare valid timestamp", () => { - //04/04/2020 - expect(TimeUtils.isMoreThanYearsFromNow(1586043405000, 1)).to.be.false; - //04/04/2022 - expect(TimeUtils.isMoreThanYearsFromNow(1649115862000, 2)).to.be.false; - //04/04/2030 - expect(TimeUtils.isMoreThanYearsFromNow(1901576205000, 1)).to.be.true; - }); - }); }); diff --git a/dac/ui/src/utils/validation-spec.js b/dac/ui/src/utils/validation-spec.js index a62f92c8fd..8a370c2f35 100644 --- a/dac/ui/src/utils/validation-spec.js +++ b/dac/ui/src/utils/validation-spec.js @@ -105,19 +105,20 @@ describe("validation", () => { expect(isEmail("email")({ email: "dremio@" })).to.eql({ email: "Not a valid email address.", }); - expect(isEmail("email")({ email: "dremio@gmail" })).to.eql({ - email: "Not a valid email address.", - }); expect(isEmail("email")({ email: "dremio@gmail." })).to.eql({ email: "Not a valid email address.", }); - expect(isEmail("email")({ email: "dremio@gmail.c" })).to.eql({ - email: "Not a valid email address.", - }); }); it("should success for correct email", () => { expect(isEmail("email")({ email: "dremio@gmail.com" })).to.eql(undefined); + expect( + isEmail("email")({ + email: "dremio@sdlkfjlskdjflsdf.sdflkjsdlkfjlskdjf", + }) + ).to.eql(undefined); + expect(isEmail("email")({ email: "dremio@gmail.c" })).to.eql(undefined); + expect(isEmail("email")({ email: "dremio@gmail" })).to.eql(undefined); }); }); diff --git a/dac/ui/src/utils/validation.js b/dac/ui/src/utils/validation.js index 2eb2d03ae9..82ffa2eb4a 100644 --- a/dac/ui/src/utils/validation.js +++ b/dac/ui/src/utils/validation.js @@ -19,6 +19,9 @@ import { capitalize } from "lodash"; import Immutable from "immutable"; // todo: loc +// https://html.spec.whatwg.org/multipage/input.html#email-state-(type=email) +const emailAddressExpr = + /^[a-zA-Z0-9.!#$%&'*+\/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$/; export function isEmptyValue(value) { const isInValidString = @@ -100,7 +103,7 @@ export function confirmPassword(password, confirm) { export function isEmail(key) { return function (values) { const email = values[key]; - if (!/^[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,4}$/i.test(email)) { + if (!emailAddressExpr.test(email)) { return set({}, key, "Not a valid email address."); } }; diff --git a/dac/ui/src/utils/versionUtils.js b/dac/ui/src/utils/versionUtils.js index 817f4e339a..7f11631e03 100644 --- a/dac/ui/src/utils/versionUtils.js +++ b/dac/ui/src/utils/versionUtils.js @@ -27,6 +27,9 @@ export function getAboutMode() { return "full"; } -export function isDcsEdition() { +export function isNotSoftware() { return false; } + +export const getDocsLink = () => + "https://docs.dremio.com/software/sql-reference/sql-functions/functions"; diff --git a/dac/ui/tsconfig.json b/dac/ui/tsconfig.json index f1a2d9a22e..f6c3c3c0fe 100644 --- a/dac/ui/tsconfig.json +++ b/dac/ui/tsconfig.json @@ -14,5 +14,11 @@ "../scripts/stubModule.js" ] } + }, + "ts-node": { + "compilerOptions": { + "esModuleInterop": true, + "module": "CommonJS" + } } } diff --git a/dac/ui/webpack.config.js b/dac/ui/webpack.config.ts similarity index 80% rename from dac/ui/webpack.config.js rename to dac/ui/webpack.config.ts index 70cabb7cd5..0d6bc7f3bf 100644 --- a/dac/ui/webpack.config.js +++ b/dac/ui/webpack.config.ts @@ -13,26 +13,41 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -const path = require("path"); -const webpack = require("webpack"); -const HtmlWebpackPlugin = require("html-webpack-plugin"); -const CopyWebpackPlugin = require("copy-webpack-plugin"); -const HtmlWebpackTagsPlugin = require("html-webpack-tags-plugin"); -const SentryCliPlugin = require("@sentry/webpack-plugin"); -const MiniCssExtractPlugin = require("mini-css-extract-plugin"); -const CssMinimizerPlugin = require("css-minimizer-webpack-plugin"); -const TerserPlugin = require("terser-webpack-plugin"); -const { getVersion, getEdition } = require("./scripts/versionUtils"); -const dynLoader = require("./dynLoader"); -const { - injectionPath, - InjectionResolver, -} = require("./scripts/injectionResolver"); -const BundleAnalyzerPlugin = - require("webpack-bundle-analyzer").BundleAnalyzerPlugin; +//@ts-nocheck +import path from "path"; +import webpack from "webpack"; +import HtmlWebpackPlugin from "html-webpack-plugin"; +import CopyWebpackPlugin from "copy-webpack-plugin"; +import HtmlWebpackTagsPlugin from "html-webpack-tags-plugin"; +import SentryCliPlugin from "@sentry/webpack-plugin"; +import MiniCssExtractPlugin from "mini-css-extract-plugin"; +import CssMinimizerPlugin from "css-minimizer-webpack-plugin"; +import TerserPlugin from "terser-webpack-plugin"; +import { getVersion, getEdition } from "./scripts/versionUtils"; +import dynLoader from "./dynLoader"; +import { injectionPath, InjectionResolver } from "./scripts/injectionResolver"; +import { BundleAnalyzerPlugin } from "webpack-bundle-analyzer"; +import * as env from "env-var"; dynLoader.applyNodeModulesResolver(); +const DEV_DCS_TARGET = env + .get("DEV_DCS_TARGET") + .default("https://app.dev.dremio.site") + .asString(); +const DEV_NESSIE_TARGET = env + .get("DEV_NESSIE_TARGET") + .default(DEV_DCS_TARGET.replace("app.", "nessie.")) + .asString(); +const DEV_NESSIE_TARGET_PREFIX = env + .get("DEV_NESSIE_TARGET_PREFIX") + .default("true") + .asBool(); +const DEV_APP_TARGET = env + .get("DEV_APP_TARGET") + .default("http://automaster:9047") + .asString(); + const enableBundleAnalyzer = process.env.ENABLE_BUNDLE_ANALYZER === "true"; const isProductionBuild = process.env.NODE_ENV === "production"; const isBeta = process.env.DREMIO_BETA === "true"; @@ -168,12 +183,53 @@ const outFilenameJsChunkTemplate = "static/js/[name].[contenthash:8].chunk.js"; const outFilenameCssTemplate = "static/css/[name].[contenthash:8].css"; const config = { - // abort process on errors - bail: true, mode: isProductionBuild ? "production" : "development", entry: { app: [path.resolve(__dirname, "src/index.js")], }, + devServer: { + compress: true, + historyApiFallback: true, + hot: true, + port: 3005, + proxy: { + headers: { Connection: "keep-alive" }, + "/api": { + target: DEV_APP_TARGET, + changeOrigin: true, + ws: true, + }, + "/nessie-proxy/v2": { + target: DEV_APP_TARGET, + changeOrigin: true, + }, + "/nessieV1": { + target: DEV_NESSIE_TARGET, + changeOrigin: true, + pathRewrite: { "^/nessieV1/": DEV_NESSIE_TARGET_PREFIX ? "/v1/" : "/" }, + }, + "/nessie": { + target: DEV_NESSIE_TARGET, + changeOrigin: true, + pathRewrite: { "^/nessie/": DEV_NESSIE_TARGET_PREFIX ? "/v2/" : "/" }, + }, + "/support": { + target: DEV_DCS_TARGET.replace("app.", "support."), + changeOrigin: true, + }, + "/ui": { + target: DEV_DCS_TARGET, + changeOrigin: true, + }, + "/v0": { + target: DEV_DCS_TARGET.replace("app.", "api."), + changeOrigin: true, + }, + }, + static: { + directory: "./public", + }, + }, output: { publicPath: "/", path: outputPath, @@ -347,9 +403,13 @@ const config = { // Todo: Below lines are to fix the issue with 2 instances of react because of lib. Find a better fix for this. https://github.com/facebook/react/issues/13991 react: path.resolve(__dirname, "node_modules/react"), "@mui": path.resolve(__dirname, "node_modules/@mui"), + "leantable/react": path.resolve( + __dirname, + "node_modules/leantable/dist-cjs/react" + ), }, plugins: [new InjectionResolver()], }, }; -module.exports = config; +export default config; diff --git a/distribution/docker/Dockerfile b/distribution/docker/Dockerfile index ef9a25aa7b..9170e82d5a 100644 --- a/distribution/docker/Dockerfile +++ b/distribution/docker/Dockerfile @@ -43,6 +43,7 @@ RUN \ EXPOSE 9047/tcp EXPOSE 31010/tcp +EXPOSE 32010/tcp EXPOSE 45678/tcp USER dremio diff --git a/distribution/docker/README.md b/distribution/docker/README.md index ef0f7f9fbb..26d2c25711 100644 --- a/distribution/docker/README.md +++ b/distribution/docker/README.md @@ -14,7 +14,7 @@ Note: This should work for both Community and Enterprise editions of Dremio. ## Single Node Deployment ```bash -docker run -p 9047:9047 -p 31010:31010 -p 45678:45678 dremio/dremio-oss +docker run -p 9047:9047 -p 31010:31010 -p 32010:32010 -p 45678:45678 dremio/dremio-oss ``` This includes a single node deployment that starts up a single daemon that includes: * Embedded Zookeeper diff --git a/distribution/jdbc-driver/pom.xml b/distribution/jdbc-driver/pom.xml index 34d05d7123..c856c423b7 100644 --- a/distribution/jdbc-driver/pom.xml +++ b/distribution/jdbc-driver/pom.xml @@ -21,15 +21,15 @@ com.dremio.distribution dremio-distribution-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-jdbc-driver Distribution - All-in-one JDBC JAR - - true + + 40000000 @@ -379,6 +379,20 @@ org.apache.calcite.avatica avatica-core + + org.apache.zookeeper + zookeeper + + + org.slf4j + slf4j-log4j12 + + + log4j + log4j + + + @@ -485,6 +499,10 @@ ${junit:junit:jar} ${org.hamcrest:hamcrest-core:jar} + ${org.junit.jupiter:junit-jupiter-api:jar} + ${org.junit.jupiter:junit-jupiter-params:jar} + ${org.junit.jupiter:junit-jupiter-engine:jar} + ${org.junit.vintage:junit-vintage-engine:jar} ${org.assertj:assertj-core:jar} ${org.slf4j:slf4j-api:jar} ${ch.qos.logback:logback-classic:jar} @@ -802,7 +820,7 @@ This is likely due to you adding new dependencies to a sabot-kernel and not updating the excludes in this module. This is important as it minimizes the size of the dependency of Dremio application users. - 40000000 + ${dremio.distribution.jar.maxSize} 15000000 ${project.build.directory}/dremio-jdbc-driver-${project.version}.jar @@ -814,29 +832,20 @@ - - jdk9-or-higher + mapr - [9,) + + mapr + - - - - - maven-failsafe-plugin - - - 1.8 - - - - - + + 41000000 + diff --git a/distribution/jdbc-driver/src/test/java/com/dremio/jdbc/ITTestShadedJar.java b/distribution/jdbc-driver/src/test/java/com/dremio/jdbc/ITTestShadedJar.java index 19194a8162..5301aa9151 100644 --- a/distribution/jdbc-driver/src/test/java/com/dremio/jdbc/ITTestShadedJar.java +++ b/distribution/jdbc-driver/src/test/java/com/dremio/jdbc/ITTestShadedJar.java @@ -29,8 +29,8 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; +import java.util.List; import java.util.Properties; -import java.util.Vector; import java.util.concurrent.TimeUnit; import org.junit.AfterClass; @@ -40,11 +40,13 @@ import org.junit.rules.TestRule; import org.junit.rules.Timeout; +import com.dremio.common.SuppressForbidden; + public class ITTestShadedJar { private static final org.slf4j.Logger LOGGER = org.slf4j.LoggerFactory.getLogger(ITTestShadedJar.class); private static NodeClassLoader nodeLoader; - private static URLClassLoader rootClassLoader; + private static ClassLoader rootClassLoader; private static volatile String jdbcURL = null; private static URL getJdbcUrl() throws MalformedURLException { @@ -145,7 +147,7 @@ private static void printQuery(Connection c, String query) throws SQLException { @BeforeClass public static void setupDefaultTestCluster() throws Exception { nodeLoader = new NodeClassLoader(); - rootClassLoader = (URLClassLoader) Thread.currentThread().getContextClassLoader(); + rootClassLoader = Thread.currentThread().getContextClassLoader(); try { runWithLoader("NodeStartThread", nodeLoader); } catch (Exception e) { @@ -159,23 +161,12 @@ public static void closeClient() throws Exception { runWithLoader("NodeStopThread", nodeLoader); } - private static int getClassesLoadedCount(ClassLoader classLoader) { - try { - Field f = ClassLoader.class.getDeclaredField("classes"); - f.setAccessible(true); - Vector> classes = (Vector>) f.get(classLoader); - return classes.size(); - } catch (Exception e) { - System.out.println("Failure while loading class count."); - return -1; - } - } - + @SuppressForbidden // needed for: setAccessible(true) private static void printClassesLoaded(String prefix, ClassLoader classLoader) { try { Field f = ClassLoader.class.getDeclaredField("classes"); f.setAccessible(true); - Vector> classes = (Vector>) f.get(classLoader); + List> classes = (List>) f.get(classLoader); for (Class c : classes) { System.out.println(prefix + ": " + c.getName()); } diff --git a/distribution/pom.xml b/distribution/pom.xml index f6d7a3e51c..2c702f2446 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -22,7 +22,7 @@ com.dremio dremio-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 com.dremio.distribution diff --git a/distribution/resources/pom.xml b/distribution/resources/pom.xml index b4cfaed789..f4fa7cc84a 100644 --- a/distribution/resources/pom.xml +++ b/distribution/resources/pom.xml @@ -22,7 +22,7 @@ com.dremio.distribution dremio-distribution-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-distribution-resources diff --git a/distribution/resources/src/main/resources-default/licenses/LICENSES_FOR_DEPENDENCIES.md b/distribution/resources/src/main/resources-default/licenses/LICENSES_FOR_DEPENDENCIES.md index af8177cf3a..9a9fb6638d 100644 --- a/distribution/resources/src/main/resources-default/licenses/LICENSES_FOR_DEPENDENCIES.md +++ b/distribution/resources/src/main/resources-default/licenses/LICENSES_FOR_DEPENDENCIES.md @@ -2,7 +2,7 @@ |Name |Version |License | |------------------------------------------------------------|--------------------|--------------------| -s|@ampproject/remapping |2.2.0 |Apache-2.0 | +|@ampproject/remapping |2.2.0 |Apache-2.0 | |@babel/code-frame |7.16.7 |MIT | |@babel/compat-data |7.18.5 |MIT | |@babel/core |7.18.5 |MIT | @@ -42,7 +42,9 @@ s|@ampproject/remapping |2.2.0 |@floating-ui/core |0.7.3 |MIT | |@floating-ui/dom |0.5.4 |MIT | |@floating-ui/react-dom |0.7.2 |MIT | +|@floating-ui/react-dom |1.3.0 |MIT | |@floating-ui/react-dom-interactions |0.6.6 |MIT | +|@floating-ui/react-dom-interactions |0.9.3 |MIT | |@formatjs/ecma402-abstract |1.9.8 |MIT | |@formatjs/fast-memoize |1.2.0 |ISC | |@formatjs/icu-messageformat-parser |2.0.11 |MIT | @@ -61,10 +63,13 @@ s|@ampproject/remapping |2.2.0 |@jridgewell/trace-mapping |0.3.13 |MIT | |@jridgewell/trace-mapping |0.3.15 |MIT | |@mantine/core |5.0.2 |MIT | +|@mantine/dates |5.9.3 |MIT | |@mantine/hooks |5.0.2 |MIT | |@mantine/styles |5.0.2 |MIT | |@mantine/utils |5.0.2 |MIT | +|@mantine/utils |5.9.3 |MIT | |@mui/base |5.0.0-alpha.91 |MIT | +|@mui/material |5.9.0 |MIT | |@mui/material |5.9.2 |MIT | |@mui/private-theming |5.9.1 |MIT | |@mui/styled-engine |5.8.7 |MIT | @@ -85,176 +90,177 @@ s|@ampproject/remapping |2.2.0 |@radix-ui/react-use-callback-ref |1.0.0 |MIT | |@radix-ui/react-use-layout-effect |1.0.0 |MIT | |@remix-run/router |1.0.3 |MIT | -|@sentry/browser |5.13.2 |BSD-3-Clause | -|@sentry/core |5.13.2 |BSD-3-Clause | -|@sentry/hub |5.13.2 |BSD-3-Clause | -|@sentry/minimal |5.13.2 |BSD-3-Clause | -|@sentry/types |5.13.2 |BSD-3-Clause | -|@sentry/utils |5.13.2 |BSD-3-Clause | +|@sentry/browser |7.43.0 |MIT | +|@sentry/core |7.43.0 |MIT | +|@sentry/replay |7.43.0 |MIT | +|@sentry/types |7.43.0 |MIT | +|@sentry/utils |7.43.0 |MIT | |@types/hast |2.3.4 |MIT | |@types/hoist-non-react-statics |3.3.1 |MIT | |@types/lodash |4.14.168 |MIT | +|@types/lodash |4.14.191 |MIT | |@types/node |16.7.1 |MIT | |@types/parse-json |4.0.0 |MIT | |@types/prop-types |15.7.3 |MIT | |@types/prop-types |15.7.5 |MIT | -|@types/react |16.14.23 |MIT | |@types/react |17.0.44 |MIT | |@types/react |18.0.14 |MIT | +|@types/react |18.0.24 |MIT | +|@types/react |18.0.25 |MIT | |@types/react-dom |18.0.5 |MIT | |@types/react-is |17.0.3 |MIT | |@types/react-transition-group |4.4.5 |MIT | |@types/scheduler |0.16.1 |MIT | |@types/unist |2.0.6 |MIT | |@types/use-sync-external-store |0.0.3 |MIT | +|accessors-smart |2.4.9 |Apache 2.0 | |Aggregate Designer Algorithm |6 |Apache 2.0 | +|agrona |1.18.0 |Apache 2.0 | +|aircompressor |0.21 |Apache 2.0 | |almost-equal |1.1.0 |MIT | |Amazon Redshift JDBC Driver |2.1.0.8 |Apache 2.0 | -|Animal Sniffer Annotations |1.18 |MIT | +|Animal Sniffer Annotations |1.21 |MIT | |ansi-styles |3.2.1 |MIT | |Antlr 3.4 Runtime |3.4 |BSD 3-clause | |AntLR Parser Generator |2.7.7 |Public Domain | |ANTLR StringTemplate |3.2.1 |BSD 3-clause | +|antlr4-c3 |2.2.3 |MIT | +|antlr4ts |0.5.0-alpha.4 |BSD-3-Clause | |aopalliance version 1.0 repackaged as a module |2.6.1 |CDDL 1.1 | -|Apache Avro |1.9.2 |Apache 2.0 | -|Apache Calcite Avatica |1.18.0 |Apache 2.0 | -|Apache Calcite Avatica Metrics |1.18.0 |Apache 2.0 | +|Apache Avro |1.10.2 |Apache 2.0 | +|Apache Calcite Avatica |1.23.0 |Apache 2.0 | +|Apache Calcite Avatica Metrics |1.23.0 |Apache 2.0 | |Apache Commons BeanUtils |1.9.4 |Apache 2.0 | |Apache Commons Collections |4.4 |Apache 2.0 | -|Apache Commons Compress |1.2 |Apache 2.0 | +|Apache Commons Compress |1.22 |Apache 2.0 | |Apache Commons DBCP |2.2.0 |Apache 2.0 | -|Apache Commons IO |2.4 |Apache 2.0 | +|Apache Commons IO |2.11.0 |Apache 2.0 | |Apache Commons IO |2.5 |Apache 2.0 | |Apache Commons Lang |3.12.0 |Apache 2.0 | |Apache Commons Math |3.6.1 |Apache 2.0 | |Apache Commons Pool |2.5.0 |Apache 2.0 | -|Apache Commons Text |1.4 |Apache 2.0 | +|Apache Commons Text |1.10.0 |Apache 2.0 | |Apache Drill | |Apache 2.0 | -|Apache Hadoop Amazon Web Services support |2.8.5 |Apache 2.0 | -|Apache Hadoop Amazon Web Services support |3.2.1-dremio-202107061151520241-a2c072f|Apache 2.0 | -|Apache Hadoop Annotations |2.8.5 |Apache 2.0 | -|Apache Hadoop Annotations |3.2.1-dremio-202107061151520241-a2c072f|Apache 2.0 | +|Apache Hadoop Amazon Web Services support |3.3.2-dremio-202305122016210436-942ab1c|Apache 2.0 | +|Apache Hadoop Annotations |2.10.2 |Apache 2.0 | +|Apache Hadoop Annotations |3.3.2-dremio-202305122016210436-942ab1c|Apache 2.0 | |Apache Hadoop Auth |2.8.5 |Apache 2.0 | -|Apache Hadoop Auth |3.2.1-dremio-202107061151520241-a2c072f|Apache 2.0 | +|Apache Hadoop Auth |3.3.2-dremio-202305122016210436-942ab1c|Apache 2.0 | |Apache Hadoop Azure Data Lake support |2.8.5 |Apache 2.0 | -|Apache Hadoop Azure Data Lake support |3.2.1-dremio-202107061151520241-a2c072f|Apache 2.0 | -|Apache Hadoop Azure support |2.8.5-dremio-r2-202106241733540604-acdda22|Apache 2.0 | +|Apache Hadoop Azure Data Lake support |3.3.2-dremio-202305122016210436-942ab1c|Apache 2.0 | +|Apache Hadoop Azure support |2.8.5-dremio-r2-202305092225000902-c107c46|Apache 2.0 | |Apache Hadoop Client |2.8.5 |Apache 2.0 | -|Apache Hadoop Client |3.2.1-dremio-202107061151520241-a2c072f|Apache 2.0 | +|Apache Hadoop Client |3.3.2-dremio-202305122016210436-942ab1c|Apache 2.0 | |Apache Hadoop Common |2.8.5 |Apache 2.0 | -|Apache Hadoop Common |3.2.1-dremio-202107061151520241-a2c072f|Apache 2.0 | -|Apache Hadoop HDFS |3.2.1-dremio-202107061151520241-a2c072f|Apache 2.0 | +|Apache Hadoop Common |3.3.2-dremio-202305122016210436-942ab1c|Apache 2.0 | +|Apache Hadoop HDFS |3.3.2-dremio-202305122016210436-942ab1c|Apache 2.0 | |Apache Hadoop HDFS Client |2.8.5 |Apache 2.0 | -|Apache Hadoop HDFS Client |3.2.1-dremio-202107061151520241-a2c072f|Apache 2.0 | +|Apache Hadoop HDFS Client |3.3.2-dremio-202305122016210436-942ab1c|Apache 2.0 | |Apache Hadoop MapReduce App |2.8.5 |Apache 2.0 | |Apache Hadoop MapReduce Common |2.8.5 |Apache 2.0 | -|Apache Hadoop MapReduce Common |3.2.1-dremio-202107061151520241-a2c072f|Apache 2.0 | +|Apache Hadoop MapReduce Common |3.3.2-dremio-202305122016210436-942ab1c|Apache 2.0 | |Apache Hadoop MapReduce Core |2.8.5 |Apache 2.0 | -|Apache Hadoop MapReduce Core |3.2.1-dremio-202107061151520241-a2c072f|Apache 2.0 | +|Apache Hadoop MapReduce Core |3.3.2-dremio-202305122016210436-942ab1c|Apache 2.0 | |Apache Hadoop MapReduce JobClient |2.8.5 |Apache 2.0 | -|Apache Hadoop MapReduce JobClient |3.2.1-dremio-202107061151520241-a2c072f|Apache 2.0 | +|Apache Hadoop MapReduce JobClient |3.3.2-dremio-202305122016210436-942ab1c|Apache 2.0 | |Apache Hadoop MapReduce Shuffle |2.8.5 |Apache 2.0 | |Apache Hadoop YARN API |2.8.5 |Apache 2.0 | -|Apache Hadoop YARN API |3.2.1-dremio-202107061151520241-a2c072f|Apache 2.0 | +|Apache Hadoop YARN API |3.3.2-dremio-202305122016210436-942ab1c|Apache 2.0 | |Apache Hadoop YARN Common |2.8.5 |Apache 2.0 | -|Apache Hadoop YARN Common |3.2.1-dremio-202107061151520241-a2c072f|Apache 2.0 | +|Apache Hadoop YARN Common |3.3.2-dremio-202305122016210436-942ab1c|Apache 2.0 | |Apache Hadoop YARN Server Common |2.8.5 |Apache 2.0 | |Apache HTTP Transport V2 For The Google HTTP Client Library For Java|1.39.0 |Apache 2.0 | |Apache HttpClient Mime |4.5.13 |Apache 2.0 | -|Apache HttpCore |4.4.9 |Apache 2.0 | -|Apache Iceberg (incubating) |0.12-aba898b-20210716185948-115a822|Apache 2.0 | -|Apache Iceberg (incubating) |0.12-aba898b-20210716185948-115a822|Apache 2.0 | -|Apache Iceberg (incubating) |0.12-aba898b-20210716185948-115a822|Apache 2.0 | -|Apache Iceberg (incubating) |0.12-aba898b-20210716185948-115a822|Apache 2.0 | -|Apache Iceberg (incubating) |0.12-aba898b-20210716185948-115a822|Apache 2.0 | -|Apache Iceberg (incubating) |0.12-aba898b-20210716185948-115a822|Apache 2.0 | +|Apache HttpCore |4.4.16 |Apache 2.0 | +|Apache Iceberg (incubating) |1.2.0-e340ad5-20230511162417-de84403|Apache 2.0 | +|Apache Iceberg (incubating) |1.2.0-e340ad5-20230511162417-de84403|Apache 2.0 | +|Apache Iceberg (incubating) |1.2.0-e340ad5-20230511162417-de84403|Apache 2.0 | +|Apache Iceberg (incubating) |1.2.0-e340ad5-20230511162417-de84403|Apache 2.0 | +|Apache Iceberg (incubating) |1.2.0-e340ad5-20230511162417-de84403|Apache 2.0 | +|Apache Iceberg (incubating) |1.2.0-e340ad5-20230511162417-de84403|Apache 2.0 | |Apache Kafka |0.8.2.2 |Apache 2.0 | |Apache Kafka Clients |0.8.2.2 |Apache 2.0 | -|Apache Log4j API |2.13.3 |Apache 2.0 | -|Apache Log4j to SLF4J Adapter |2.13.3 |Apache 2.0 | -|Apache Parquet Arrow |1.12.0-202012300655090309-fc8298d|Apache 2.0 | -|Apache Parquet Column |1.12.0-202012300655090309-fc8298d|Apache 2.0 | -|Apache Parquet Common |1.12.0-202012300655090309-fc8298d|Apache 2.0 | -|Apache Parquet Encodings |1.12.0-202012300655090309-fc8298d|Apache 2.0 | +|Apache Log4j API |2.19.0 |Apache 2.0 | +|Apache Log4j to SLF4J Adapter |2.19.0 |Apache 2.0 | +|Apache Parquet Arrow |1.12.0-202302141732150599-28d943b|Apache 2.0 | +|Apache Parquet Column |1.12.0-202302141732150599-28d943b|Apache 2.0 | +|Apache Parquet Common |1.12.0-202302141732150599-28d943b|Apache 2.0 | +|Apache Parquet Encodings |1.12.0-202302141732150599-28d943b|Apache 2.0 | |Apache Parquet Format (Incubating) |2.7.0-201901172054060715-5352a59|Apache 2.0 | -|Apache Parquet Format Structures |1.12.0-202012300655090309-fc8298d|Apache 2.0 | -|Apache Parquet Generator |1.12.0-202012300655090309-fc8298d|Apache 2.0 | -|Apache Parquet Hadoop |1.12.0-202012300655090309-fc8298d|Apache 2.0 | -|Apache Parquet Jackson |1.12.0-202012300655090309-fc8298d|Apache 2.0 | +|Apache Parquet Format Structures |1.12.0-202302141732150599-28d943b|Apache 2.0 | +|Apache Parquet Generator |1.12.0-202302141732150599-28d943b|Apache 2.0 | +|Apache Parquet Hadoop |1.12.0-202302141732150599-28d943b|Apache 2.0 | +|Apache Parquet Jackson |1.12.0-202302141732150599-28d943b|Apache 2.0 | |Apache POI |4.1.2 |Apache 2.0 | |Apache POI |4.1.2 |Apache 2.0 | |Apache POI |4.1.2 |Apache 2.0 | +|Apache Ranger |1.1.0 |Apache 2.0 | +|Apache Ranger |1.1.0 |Apache 2.0 | +|Apache Ranger |1.1.0 |Apache 2.0 | +|Apache Ranger |1.1.0 |Apache 2.0 | |Apache Solr library |8.11.2 |Apache 2.0 | +|Apache Thrift |0.13.0 |Apache 2.0 | |Apache Twill Apache Hadoop YARN library |0.14.0-202111020547020344-41637331|Apache 2.0 | |Apache Twill API |0.14.0-202111020547020344-41637331|Apache 2.0 | -|Apache Twill common library |0.14.0 |Apache 2.0 | -|Apache Twill core library |0.14.0 |Apache 2.0 | +|Apache Twill common library |0.14.0-202111020547020344-41637331|Apache 2.0 | +|Apache Twill core library |0.14.0-202111020547020344-41637331|Apache 2.0 | |Apache Twill discovery service API |0.14.0-202111020547020344-41637331|Apache 2.0 | -|Apache Twill discovery service implementations |0.14.0 |Apache 2.0 | +|Apache Twill discovery service implementations |0.14.0-202111020547020344-41637331|Apache 2.0 | |Apache Twill extensions |0.14.0-202111020547020344-41637331|Apache 2.0 | |Apache Twill ZooKeeper client library |0.14.0-202111020547020344-41637331|Apache 2.0 | +|Apache Yetus Audience Annotations |0.13.0 |Apache 2.0 | |Apache Yetus Audience Annotations |0.5.0 |Apache 2.0 | -|Apache Yetus Audience Annotations |0.7.0 |Apache 2.0 | |API Common |1.8.1 |BSD 3-clause | -|API Common |1.9.3 |BSD 3-clause | |aria-hidden |1.1.3 |ISC | -|Arrow Flight Core |4.0.0-20210722102535-bda216e83f-dremio|Apache 2.0 | -|Arrow Flight GRPC |4.0.0-20210722102535-bda216e83f-dremio|Apache 2.0 | -|Arrow Format |4.0.0-20210722102535-bda216e83f-dremio|Apache 2.0 | -|Arrow Gandiva |4.0.0-20210722102535-bda216e83f-dremio|Apache 2.0 | -|Arrow Memory Core |4.0.0-20210722102535-bda216e83f-dremio|Apache 2.0 | -|Arrow Memory Netty |4.0.0-20210722102535-bda216e83f-dremio|Apache 2.0 | -|Arrow Vectors |4.0.0-20210722102535-bda216e83f-dremio|Apache 2.0 | +|Arrow Flight Core |9.0.0-20221123064031-c39b8a6253-dremio|Apache 2.0 | +|Arrow Flight GRPC |9.0.0-20221123064031-c39b8a6253-dremio|Apache 2.0 | +|Arrow Format |9.0.0-20221123064031-c39b8a6253-dremio|Apache 2.0 | +|Arrow Gandiva |9.0.0-20221123064031-c39b8a6253-dremio|Apache 2.0 | +|Arrow Memory Core |9.0.0-20221123064031-c39b8a6253-dremio|Apache 2.0 | +|Arrow Memory Netty |9.0.0-20221123064031-c39b8a6253-dremio|Apache 2.0 | +|Arrow Vectors |9.0.0-20221123064031-c39b8a6253-dremio|Apache 2.0 | +|arrow-jdbc |9.0.0 |Apache 2.0 | |asap |2.0.6 |MIT | |ASCII List |0.0.3 |Apache 2.0 | |Ascii Table |0.2.5 |Apache 2.0 | -|ASM Core |7 |BSD 3-clause | -|Aspect-Oriented Programming Alliance |1 |Public Domain | +|ASM Core |9.2 |BSD 3-clause | +|Aspect-Oriented Programming Alliance |1.0 |Public Domain | |assert |2.0.0 |MIT | +|Asynchronous Http Client |2.7.0 |Apache 2.0 | |Asynchronous Http Client Netty Utils |2.7.0 |Apache 2.0 | |attr-accept |1.1.0 |MIT | -|Audit Component |1.1.0 |Apache 2.0 | -|Audit Component |1.1.0 |Apache 2.0 | -|Audit Component |1.1.0 |Apache 2.0 | -|Audit Component |1.1.0 |Apache 2.0 | -|AutoValue Annotations |1.7.2 |Apache 2.0 | |AutoValue Annotations |1.7.4 |Apache 2.0 | |available-typed-arrays |1.0.5 |MIT | |AWS Event Stream |1.0.1 |Apache 2.0 | -|AWS Glue Data Catalog Client for Apache Hive Metastore |1.10.0-202006290831310925-84f4082|Apache 2.0 | -|AWS Java SDK :: Annotations |2.16.104 |Apache 2.0 | -|AWS Java SDK :: Arns |2.16.104 |Apache 2.0 | -|AWS Java SDK :: Auth |2.16.104 |Apache 2.0 | -|AWS Java SDK :: AWS Core |2.16.104 |Apache 2.0 | -|AWS Java SDK :: Core :: Protocols :: AWS Json Protocol |2.16.104 |Apache 2.0 | -|AWS Java SDK :: Core :: Protocols :: AWS Query Protocol |2.16.104 |Apache 2.0 | -|AWS Java SDK :: Core :: Protocols :: AWS Xml Protocol |2.16.104 |Apache 2.0 | -|AWS Java SDK :: Core :: Protocols :: Protocol Core |2.16.104 |Apache 2.0 | -|AWS Java SDK :: HTTP Client Interface |2.16.104 |Apache 2.0 | -|AWS Java SDK :: HTTP Clients :: Apache |2.16.104 |Apache 2.0 | -|AWS Java SDK :: HTTP Clients :: Netty Non Blocking I/O |2.16.104 |Apache 2.0 | -|AWS Java SDK :: Metrics SPI |2.16.104 |Apache 2.0 | -|AWS Java SDK :: Profiles |2.16.104 |Apache 2.0 | -|AWS Java SDK :: Regions |2.16.104 |Apache 2.0 | -|AWS Java SDK :: SDK Core |2.16.104 |Apache 2.0 | -|AWS Java SDK :: Services :: Amazon S3 |2.16.104 |Apache 2.0 | -|AWS Java SDK :: Services :: AWS Secrets Manager |2.16.104 |Apache 2.0 | -|AWS Java SDK :: Services :: AWS STS |2.16.104 |Apache 2.0 | -|AWS Java SDK :: Utilities |2.16.104 |Apache 2.0 | -|AWS Java SDK For Amazon DynamoDB |1.11.761 |Apache 2.0 | -|AWS Java SDK For Amazon DynamoDB |1.12.75 |Apache 2.0 | -|AWS Java SDK For Amazon Redshift |1.11.761 |Apache 2.0 | -|AWS Java SDK For Amazon Redshift |1.12.75 |Apache 2.0 | -|AWS Java SDK For AWS Lake Formation |1.12.75 |Apache 2.0 | -|AWS Java SDK for AWS STS |1.11.761 |Apache 2.0 | -|AWS Java SDK for AWS STS |1.12.75 |Apache 2.0 | -|AWS SDK for AWS KMS |1.11.761 |Apache 2.0 | -|AWS SDK for AWS KMS |1.12.75 |Apache 2.0 | -|AWS SDK for AWS S3 |1.11.761 |Apache 2.0 | -|AWS SDK for AWS S3 |1.12.75 |Apache 2.0 | -|AWS SDK for Java - Core |1.11.761 |Apache 2.0 | -|AWS SDK for Java - Core |1.12.75 |Apache 2.0 | -|Azure Data Lake Store - Java client SDK |2.3.3-202107080930050113-179a837|MIT License | +|AWS Glue Data Catalog Client for Apache Hive Metastore |1.10.0-202302080426290085-edca6ae|Apache 2.0 | +|AWS Java SDK :: Annotations |2.17.295 |Apache 2.0 | +|AWS Java SDK :: Arns |2.17.295 |Apache 2.0 | +|AWS Java SDK :: Auth |2.17.295 |Apache 2.0 | +|AWS Java SDK :: AWS Core |2.17.295 |Apache 2.0 | +|AWS Java SDK :: Core :: Protocols :: AWS Json Protocol |2.17.295 |Apache 2.0 | +|AWS Java SDK :: Core :: Protocols :: AWS Query Protocol |2.17.295 |Apache 2.0 | +|AWS Java SDK :: Core :: Protocols :: AWS Xml Protocol |2.17.295 |Apache 2.0 | +|AWS Java SDK :: Core :: Protocols :: Protocol Core |2.17.295 |Apache 2.0 | +|AWS Java SDK :: HTTP Client Interface |2.17.295 |Apache 2.0 | +|AWS Java SDK :: HTTP Clients :: Apache |2.17.295 |Apache 2.0 | +|AWS Java SDK :: HTTP Clients :: Netty Non Blocking I/O |2.17.295 |Apache 2.0 | +|AWS Java SDK :: Lakeformation |1.12.400 |Apache 2.0 | +|AWS Java SDK :: Metrics SPI |2.17.295 |Apache 2.0 | +|AWS Java SDK :: Profiles |2.17.295 |Apache 2.0 | +|AWS Java SDK :: Regions |2.17.295 |Apache 2.0 | +|AWS Java SDK :: SDK Core |2.17.295 |Apache 2.0 | +|AWS Java SDK :: Services :: Amazon S3 |2.17.295 |Apache 2.0 | +|AWS Java SDK :: Services :: AWS Secrets Manager |2.17.295 |Apache 2.0 | +|AWS Java SDK :: Services :: AWS STS |2.17.295 |Apache 2.0 | +|AWS Java SDK :: Utilities |2.17.295 |Apache 2.0 | +|AWS Java SDK For Amazon DynamoDB |1.12.400 |Apache 2.0 | +|AWS Java SDK For Amazon Redshift |1.12 |Apache 2.0 | +|AWS Java SDK For Amazon Redshift |1.12.400 |Apache 2.0 | +|AWS Java SDK for AWS STS |1.12.400 |Apache 2.0 | +|AWS SDK for AWS KMS |1.12.400 |Apache 2.0 | +|AWS SDK for AWS S3 |1.12.400 |Apache 2.0 | +|AWS SDK for Java - Core |1.12.400 |Apache 2.0 | +|Azure Data Lake Store - Java client SDK |2.3.10-202208021035330109-f5bda9e|MIT License | |babel-plugin-macros |2.8.0 |MIT | |babel-plugin-transform-runtime |6.23.0 |MIT | |babel-runtime |6.26.0 |MIT | @@ -263,23 +269,30 @@ s|@ampproject/remapping |2.2.0 |Bouncy Castle PKIX, CMS, EAC, TSP, PKCS, OCSP, CMP, and CRMF APIs|1.64 |MIT | |Bouncy Castle Provider |1.64 |MIT | |browserslist |4.21.0 |MIT | +|bson |4.3.4 |Apache 2.0, Creative Commons| |Byte Buddy |1.10.19 |Apache 2.0 | |c3 |0.4.18 |MIT | -|Caffeine cache |2.7.0 |Apache 2.0 | -|Calcite Core |1.16.0-202110140531410732-6a46ce2e|Apache 2.0 | -|Calcite Linq4j |1.16.0-202110140531410732-6a46ce2e|Apache 2.0 | +|Caffeine cache |2.9.3 |Apache 2.0 | +|Calcite Core |1.17.0-202305081555330806-ba52e3e7|Apache 2.0 | +|Calcite Linq4j |1.17.0-202305081555330806-ba52e3e7|Apache 2.0 | |call-bind |1.0.2 |MIT | |callsites |3.1.0 |MIT | |CDI APIs |2.0.2 |Apache 2.0 | +|cdi-api |2.0 |Apache 2.0 | +|cel-core |0.3.12 |Apache 2.0 | +|cel-generated-antlr |0.3.12 |Apache 2.0 | +|cel-generated-pb |0.3.12 |Apache 2.0 | +|cel-jackson |0.3.12 |Apache 2.0 | +|cel-tools |0.3.12 |Apache 2.0 | |chalk |2.4.2 |MIT | |change-emitter |0.1.6 |MIT | |character-entities |1.2.4 |MIT | |character-entities-legacy |1.1.4 |MIT | |character-reference-invalid |1.1.4 |MIT | |Checker Qual |2.5.3 |MIT | -|Checker Qual |2.8.1 |MIT | +|Checker Qual |3.12.0 |MIT | |classcat |4.1.0 |MIT | -|ClassMate |1.3.4 |Apache 2.0 | +|ClassMate |1.5.1 |Apache 2.0 | |classnames |2.3.1 |MIT | |Cloud Storage JSON API V1 Rev171 1.25.0 |v1-rev20190624-1.30.1|Apache 2.0 | |clsx |1.0.4 |MIT | @@ -295,23 +308,26 @@ s|@ampproject/remapping |2.2.0 |comma-separated-tokens |1.0.8 |MIT | |common-tags |1.4.0 |MIT | |Commons CLI |1.2 |Apache 2.0 | -|Commons Codec |1.4 |Apache 2.0 | +|Commons Codec |1.15 |Apache 2.0 | |Commons Collections |3.2.2 |Apache 2.0 | -|Commons Compiler |2.7.6 |BSD 3-clause | +|Commons Compiler |3.1.6 |BSD 3-clause | |Commons Configuration |1.6 |Apache 2.0 | |Commons Configuration |2.1.1 |Apache 2.0 | +|Commons Configuration |2.1.1 |Apache 2.0 | |Commons Daemon |1.0.13 |Apache 2.0 | |Commons Lang |2.4 |Apache 2.0 | |Commons Lang |2.6 |Apache 2.0 | |Commons Math |2.2 |Apache 2.0 | |Commons Math |3.1.1 |Apache 2.0 | |Commons Net |3.1 |Apache 2.0 | -|Commons Net |3.6 |Apache 2.0 | +|Commons Net |3.9.0 |Apache 2.0 | |Commons Pool |1.6 |Apache 2.0 | -|config |1.4.1 |Apache 2.0 | +|config |1.4.2 |Apache 2.0 | |Conscrypt OpenJDK Uber |2.2.1 |Apache 2.0 | +|content-type |2.1 |Apache 2.0 | |convert-source-map |1.8.0 |MIT | |copy-to-clipboard |3.0.8 |MIT | +|copy-to-clipboard |3.3.1 |MIT | |core-js |1.2.7 |MIT | |core-js |2.6.12 |MIT | |core-js |3.22.3 |MIT | @@ -348,6 +364,8 @@ s|@ampproject/remapping |2.2.0 |deep-equal |1.0.1 |MIT | |deepmerge |2.2.1 |MIT | |define-properties |1.1.4 |MIT | +|define-route |0.3.1 |Apache-2.0 | +|dialog-polyfill |0.5.6 |BSD-3-Clause | |Digester |1.8 |Apache 2.0 | |Disruptor Framework |3.4.2 |Apache 2.0 | |dnd-core |7.0.2 |MIT | @@ -355,22 +373,23 @@ s|@ampproject/remapping |2.2.0 |dom-helpers |2.4.0 |MIT | |dom-helpers |3.4.0 |MIT | |dom-helpers |5.2.1 |MIT | -|Dremio Cache Manager |18.0.0 |Dremio Free Software License 1.0| -|Dremio Fast Threads |18.0.0 |Dremio Free Software License 1.0| -|Dremio Joust Library |18.0.0 |Dremio Free Software License 1.0| -|Dremio Parquet Accelerator |18.0.0 |Dremio Free Software License 1.0| -|Dremio Pushdown Pack |18.0.0 |Dremio Free Software License 1.0| -|Dremio Smart Substitutions |18.0.0 |Dremio Free Software License 1.0| +|downshift |7.2.0 |MIT | +|Dremio Cache Manager |24.0.0 |Dremio Free Software License 1.0| +|Dremio Fast Threads |24.0.0 |Dremio Free Software License 1.0| +|Dremio Joust Library |24.0.0 |Dremio Free Software License 1.0| +|Dremio Parquet Accelerator |24.0.0 |Dremio Free Software License 1.0| +|Dremio Pushdown Pack |24.0.0 |Dremio Free Software License 1.0| +|Dremio Smart Substitutions |24.0.0 |Dremio Free Software License 1.0| |easy-peasy |4.0.1 |MIT | |echarts |5.3.1 |Apache-2.0 | |EclipseLink |2.5.2 |Eclipse Public License 1.0 & Eclipse Distribution License v. 1.0| |Elasticsearch SecureSM |1.1 |Apache 2.0 | -|Elasticsearch: Core |5.5.3 |Apache 2.0 | +|Elasticsearch: Core |6.8.23 |Apache 2.0 | |electron-to-chromium |1.4.170 |ISC | |element-closest |2.0.2 |CC0-1.0 | |emojis-list |3.0.0 |MIT | |encoding |0.1.12 |MIT | -|Error Prone Annotations |2.10.0 |Apache 2.0 | +|Error Prone Annotations |2.18.0 |Apache 2.0 | |error-ex |1.3.1 |MIT | |es-abstract |1.20.1 |MIT | |es-to-primitive |1.2.1 |MIT | @@ -389,17 +408,18 @@ s|@ampproject/remapping |2.2.0 |fbjs |0.8.16 |MIT | |file-saver |1.3.3 |MIT | |find-root |1.1.0 |MIT | -|FindBugs-Annotations |3.0.1 |LGPL v2 | -|FindBugs-jsr305 |3.0.1 |Apache 2.0 | +|FindBugs-Annotations |3.0.1u2 |LGPL v2 | +|FindBugs-jsr305 |3.0.2 |Apache 2.0 | |fixed-data-table-2 |1.1.2 |BSD-3-Clause | -|FlatBuffers Java API |1.9.0 |Apache 2.0 | +|FlatBuffers Java API |1.12.0 |Apache 2.0 | +|flight-sql |9.0.0-20221123064031-c39b8a6253|Apache 2.0 | |Flogger |0.5.1 |Apache 2.0 | |Flogger System Backend |0.5.1 |Apache 2.0 | |foodmart-data-json |0.4 |Apache 2.0 | |for-each |0.3.3 |MIT | |format |0.2.2 |MIT | |formik |2.2.6 |Apache-2.0 | -|FreeMarker |2.3.29 |Apache 2.0 | +|FreeMarker |2.3.31 |Apache 2.0 | |function-bind |1.1.1 |MIT | |function.prototype.name |1.1.5 |MIT | |functions-have-names |1.2.3 |MIT | @@ -407,13 +427,14 @@ s|@ampproject/remapping |2.2.0 |GAX (Google Api EXtensions) For Java |1.48.0 |BSD | |GAX (Google Api EXtensions) For Java |1.60.0 |BSD | |GAX (Google Api EXtensions) For Java |1.62.0 |BSD | -|GCS Connector Hadoop3 |hadoop2-2.2.2-dremio-202108161113150127-7bb5f20-shaded|Apache 2.0 | -|GCS Connector Hadoop3 |hadoop3-2.2.2-dremio-202108161113150127-7bb5f20-shaded|Apache 2.0 | -|Gcsio |2.2.2-dremio-202108161113150127-7bb5f20|Apache 2.0 | +|GCS Connector Hadoop3 |hadoop2-2.2.2-dremio-202302142306550801-5be8d7e-shaded|Apache 2.0 | +|GCS Connector Hadoop3 |hadoop3-2.2.2-dremio-202302142306550801-5be8d7e-shaded|Apache 2.0 | +|Gcsio |2.2.2-dremio-202302142306550801-5be8d7e|Apache 2.0 | |gensync |1.0.0-beta.2 |MIT | |get-intrinsic |1.1.1 |MIT | |get-symbol-description |1.0.0 |MIT | |globals |11.12.0 |MIT | +|glue |2.17.295 |Apache 2.0 | |Google Android Annotations Library |4.1.1.4 |Apache 2.0 | |Google APIs Client Library For Java |1.31.3 |Apache 2.0 | |Google App Engine Extensions to The Google HTTP Client Library For Java|1.31.0 |Apache 2.0 | @@ -428,38 +449,41 @@ s|@ampproject/remapping |2.2.0 |Google Cloud Monitoring |1.82.0 |Apache 2.0 | |Google Cloud Storage |1.88.0 |Apache 2.0 | |Google Cloud Trace |0.100.0-beta |Apache 2.0 | -|Google Guice Core Library |4.2.2 |Apache 2.0 | -|Google Guice Extensions Servlet |4.2.2 |Apache 2.0 | +|Google Guice Core Library |5.1.0 |Apache 2.0 | +|Google Guice Core Library |5.1.0 |Apache 2.0 | |Google HTTP Client Library for Java |1.35.0 |Apache 2.0 | |Google HTTP Client Library for Java |1.39.0 |Apache 2.0 | |Google Logger |0.5.1 |Apache 2.0 | -|Google OAuth Client Library For Java |1.31.2 |Apache 2.0 | +|Google OAuth Client Library For Java |1.34.1 |Apache 2.0 | |Google Testing and Mocking Framework |1.10.x |BSD 3-clause | |graphlib |2.1.8 |MIT | |GRPC ALTs |1.32.2 |Apache 2.0 | -|GRPC API |1.32.2 |Apache 2.0 | +|GRPC API |1.54.1 |Apache 2.0 | |GRPC Auth |1.32.2 |Apache 2.0 | -|GRPC Context |1.32.2 |Apache 2.0 | -|GRPC Core |1.32.2 |Apache 2.0 | +|GRPC Context |1.54.1 |Apache 2.0 | +|GRPC Core |1.54.1 |Apache 2.0 | |GRPC Google Cloud Monitoring V3 |1.64.0 |Apache 2.0 | |GRPC GRPCLB |1.32.2 |Apache 2.0 | -|GRPC Netty |1.32.2 |Apache 2.0 | +|GRPC Netty |1.54.1 |Apache 2.0 | |GRPC OpenTracing |0.2.0 |BSD 3-clause | -|GRPC Protobuf |1.32.2 |Apache 2.0 | -|GRPC Protobuf Lite |1.32.2 |Apache 2.0 | -|GRPC Stub |1.32.2 |Apache 2.0 | +|GRPC Protobuf |1.54.1 |Apache 2.0 | +|GRPC Protobuf Lite |1.54.1 |Apache 2.0 | +|GRPC Stub |1.54.1 |Apache 2.0 | +|Gson |2.10.1 |Apache 2.0 | |Gson |2.2.4 |Apache 2.0 | -|Gson |2.9.0 |Apache 2.0 | |GSON Extensions to The Google HTTP Client Library For Java |1.39.0 |Apache 2.0 | |Guava InternalFutureFailureAccess and InternalFutures |1.0.1 |Apache 2.0 | |Guava ListenableFuture Only |9999.0-empty-to-avoid-conflict-with-guava|Apache 2.0 | |Guava: Google Core Libraries for Java |13.0.1 |Apache 2.0 | |Guava: Google Core Libraries for Java |20 |Apache 2.0 | -|Guava: Google Core Libraries for Java |28.1-jre |Apache 2.0 | +|Guava: Google Core Libraries for Java |31.1-jre |Apache 2.0 | |gud |1.0.0 |MIT | |Hadoop Winutils |3.2.0 |Apache 2.0 | |Hadoop YARN Client |2.8.5 |Apache 2.0 | -|Hadoop YARN Client |3.2.1-dremio-202107061151520241-a2c072f|Apache 2.0 | +|Hadoop YARN Client |3.3.2-dremio-202305122016210436-942ab1c|Apache 2.0 | +|hadoop-shaded-guava |1.1.1 |Apache 2.0 | +|hadoop-shaded-protobuf_3_7-1.1.1 |1.1.1 |Apache 2.0 | +|hamcrest |2.1 |BSD-3-Clause | |has |1.0.3 |MIT | |has-bigints |1.0.2 |MIT | |has-flag |3.0.0 |MIT | @@ -470,7 +494,7 @@ s|@ampproject/remapping |2.2.0 |hastscript |6.0.0 |MIT | |HdrHistogram |2.1.8 |CC0 1.0 Universal | |HdrHistogram |2.1.9 |CC0 1.0 Universal | -|Hibernate Validator Engine |6.1.5.Final |Apache 2.0 | +|Hibernate Validator Engine |6.2.0.Final |Apache 2.0 | |highlight.js |10.7.3 |BSD-3-Clause | |history |3.3.0 |MIT | |HK2 API module |2.6.1 |CDDL 1.1 | @@ -484,16 +508,26 @@ s|@ampproject/remapping |2.2.0 |HTrace Core4 |4.0.1-incubating |Apache 2.0 | |HTrace Core4 |4.1.0-incubating |Apache 2.0 | |HTrace Core4 |4.2.0-incubating |Apache 2.0 | -|HttpClient |4.5.13 |Apache 2.0 | +|HttpClient |4.5.14 |Apache 2.0 | +|httpclient5 |5.1.3 |Apache 2.0 | +|httpclient5 |5.2.1 |Apache 2.0 | +|httpcore5 |5.1.3 |Apache 2.0 | +|httpcore5-h2 |5.1.3 |Apache 2.0 | +|httpcore5-h2 |5.2 |Apache 2.0 | |humanable |0.0.2 |MIT | |IAM Service Account Credentials API V1 Rev67 1.25.0 |v1-rev20201022-1.31.0|Apache 2.0 | +|IBM Data Server Driver For JDBC and SQLJ |11.5.8.0 |IPLA | +|iceberg-aws |1.2.0 |Apache 2.0 | +|iceberg-nessie |1.2.1 |Apache 2.0 | +|iceberg-views |0.58.0 |Apache 2.0 | |iconv-lite |0.4.24 |MIT | |immer |7.0.9 |MIT | |immutable |3.8.2 |MIT | |import-fresh |3.1.0 |MIT | |inherits |2.0.4 |ISC | -|IntelliJ IDEA Annotations |12 |Apache 2.0 | +|IntelliJ IDEA Annotations |12.0 |Apache 2.0 | |internal-slot |1.0.3 |MIT | +|intl-messageformat |10.2.1 |BSD-3-Clause | |intl-messageformat |9.9.1 |BSD-3-Clause | |invariant |2.2.4 |MIT | |is-alphabetical |1.0.4 |MIT | @@ -527,118 +561,124 @@ s|@ampproject/remapping |2.2.0 |Jackson 2 extensions to the Google HTTP Client Library for Java.|1.35.0 |Apache 2.0 | |Jackson 2 extensions to the Google HTTP Client Library for Java.|1.38.0 |Apache 2.0 | |Jackson Datatype Protobuf |0.9.12 |Apache 2.0 | -|Jackson datatype: Guava |2.11.4 |Apache 2.0 | -|Jackson Datatype: JSR310 |2.11.4 |Apache 2.0 | -|Jackson module: Afterburner |2.11.4 |Apache 2.0 | -|Jackson-annotations |2.11.4 |Apache 2.0 | -|Jackson-core |2.11.4 |Apache 2.0 | -|jackson-databind |2.11.4 |Apache 2.0 | -|Jackson-dataformat-CBOR |2.11.4 |Apache 2.0 | -|Jackson-dataformat-Smile |2.11.4 |Apache 2.0 | -|Jackson-dataformat-XML |2.11.4 |Apache 2.0 | -|Jackson-dataformat-YAML |2.11.4 |Apache 2.0 | -|Jackson-JAXRS-base |2.11.4 |Apache 2.0 | -|Jackson-JAXRS-JSON |2.11.4 |Apache 2.0 | -|Jackson-module-JAXB-annotations |2.11.4 |Apache 2.0 | -|jackson-module-jsonSchema |2.11.4 |Apache 2.0 | +|Jackson datatype: Guava |2.14.2 |Apache 2.0 | +|Jackson Datatype: JSR310 |2.14.2 |Apache 2.0 | +|Jackson module: Afterburner |2.14.2 |Apache 2.0 | +|Jackson-annotations |2.14.2 |Apache 2.0 | +|Jackson-core |2.14.2 |Apache 2.0 | +|jackson-databind |2.14.2 |Apache 2.0 | +|Jackson-dataformat-CBOR |2.14.2 |Apache 2.0 | +|jackson-dataformat-protobuf |2.14.2 |Apache 2.0 | +|Jackson-dataformat-Smile |2.14.2 |Apache 2.0 | +|Jackson-dataformat-XML |2.14.2 |Apache 2.0 | +|Jackson-dataformat-YAML |2.14.2 |Apache 2.0 | +|jackson-datatype-jdk8 |2.14.2 |Apache 2.0 | +|Jackson-JAXRS-base |2.14.2 |Apache 2.0 | +|Jackson-JAXRS-JSON |2.14.2 |Apache 2.0 | +|Jackson-module-JAXB-annotations |2.14.2 |Apache 2.0 | +|jackson-module-jsonSchema |2.14.2 |Apache 2.0 | |Jaeger Client |1.5.0 |Apache 2.0 | |Jaeger Core |1.5.0 |Apache 2.0 | |Jaeger Thrift |1.5.0 |Apache 2.0 | |Jaeger TracerResolver |1.5.0 |Apache 2.0 | -|Jakarta Activation API JAR |1.2.1 |EDL 1.0 | +|Jakarta Activation API JAR |1.2.2 |EDL 1.0 | |Jakarta Annotations API |1.3.5 |Eclipse Public License 2.0| |Jakarta Bean Validation API |2.0.2 |Apache 2.0 | -|Jakarta Dependency Injection |1 |Apache 2.0 | +|Jakarta Dependency Injection |1.0 |Apache 2.0 | |Jakarta Enterprise Beans API |3.2.6 |EPL 2.0 & GPL2 w/ CPE| |Jakarta Expression Language API |3.0.3 |EPL 2.0 & GPL2 w/ CPE| |Jakarta Inject |2.6.1 |Eclipse Public License 2.0| |Jakarta Interceptors |1.2.5 |EPL 2.0 & GPL2 w/ CPE| |Jakarta RESTful Web Services API |2.1.6 |Eclipse Public License 2.0| |Jakarta Transaction API |1.3.2 |EPL 2.0 & GPL2 w/ CPE| -|Jakarta XML Bind API |2.3.2 |EDL 1.0 | -|Janino |2.7.6 |BSD 3-clause | +|Jakarta XML Bind API |2.3.3 |EDL 1.0 | +|Janino |3.1.6 |BSD 3-clause | |Java Concurrency in Practice book annotations |1.0-1 |CC 2.5 | |Java implementation of the SemVer Specification |0.9.0 |MIT License | -|Java Native Access |4.5.0 |Apache 2.0 | -|Java Native Access |4.5.0 |Apache 2.0 | +|Java Native Access |5.12.1 |Apache 2.0 | +|Java Native Access |5.12.1 |Apache 2.0 | |Java Servlet API |3.1.0 |GPL v2 w/ CPE | |JavaBeans Activation Framework |1.2.0 |CDDL 1.1 | -|JavaBeans Activation Framework (JAF) |1.1 |CDDL 1.0 | |JavaMail API |1.6.1 |CDDL 1.1 | -|Javassist |3.19.0-GA |Apache 2.0 | +|Javassist |3.28.0-GA |Apache 2.0 | |Javax Persistence |2.1.0 |Eclipse Public License 1.0 & Eclipse Distribution License v. 1.0| -|Javax WS RS API |2.0.1 |GPL v2 w/ CPE | +|Javax WS RS API |2.1.1 |GPL v2 w/ CPE | +|javax.activation-api |1.2.0 |COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) Version 1.1| |javax.annotation API |1.3.2 |GPL v2 w/ CPE | -|javax.inject |1 |Apache 2.0 | +|javax.inject |1.0 |Apache 2.0 | +|javax.interceptor-api |1.2 |Eclipse Public License - v 2.0, The GNU General Public License (GPL) Version 2, June 1991| |JAX RS Provider For JSON Content Type |1.9.13 |Apache 2.0 | |JAXB API bundle for GlassFish V3 |2.2.11 |GPL v2 w/ CPE | -|JAXB API bundle for GlassFish V3 |2.2.2 |GPL v2 w/ CPE | +|JAXB API bundle for GlassFish V3 |2.3.1 |GPL v2 w/ CPE | |JBoss Jakarta JAXRS Api_spec |2.0.1.Final |EPL 2.0 & GPL2 w/ CPE| -|JBoss Logging 3 |3.3.2.Final |Apache 2.0 | +|JBoss Logging 3 |3.4.1.Final |Apache 2.0 | |JCL 1.1.1 implemented over SLF4J |1.7.36 |MIT | -|JCommander |1.81 |Apache 2.0 | +|JCommander |1.82 |Apache 2.0 | |JDK Tools |1.8 | | |Jersey Bundle |1.19.3 |CDDL 1.1 | |Jersey Client |1.19 |CDDL 1.1 | |Jersey Core |1.19 |CDDL 1.1 | -|Jersey Ext Entity Filtering |2.3 |CDDL 1.1 | +|Jersey Ext Entity Filtering |2.39.1 |CDDL 1.1 | |Jersey Guice |1.19 |CDDL 1.1 | |Jersey Guice |1.9 |CDDL 1.1 | -|Jersey Inject HK2 |2.3 |Eclipse Public License 2.0| +|Jersey Inject HK2 |2.39.1 |Eclipse Public License 2.0| |Jersey JSON |1.19 |CDDL 1.1 | -|Jersey Media JSON Jackson |2.3 |CDDL 1.1 | +|Jersey Media JSON Jackson |2.39.1 |CDDL 1.1 | |Jersey Server |1.19 |CDDL 1.1 | |Jersey Servlet |1.19 |CDDL 1.1 | -|jersey-container-jetty-http |2.3 |CDDL 1.1 | -|jersey-container-jetty-servlet |2.3 |CDDL 1.1 | -|jersey-container-servlet |2.3 |CDDL 1.1 | -|jersey-container-servlet-core |2.3 |CDDL 1.1 | -|jersey-core-client |2.3 |CDDL 1.1 | -|jersey-core-common |2.3 |CDDL 1.1 | -|jersey-core-server |2.3 |CDDL 1.1 | -|jersey-ext-mvc |2.3 |CDDL 1.1 | -|jersey-ext-mvc-freemarker |2.3 |CDDL 1.1 | -|jersey-media-jaxb |2.3 |CDDL 1.1 | -|jersey-media-multipart |2.3 |CDDL 1.1 | -|JetBrains Java Annotations |13 |Apache 2.0 | -|Jettison |1.1 |Apache 2.0 | -|Jetty :: Asynchronous HTTP Client |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: Continuation |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: Http Utility |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: IO Utility |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: Security |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: Server Core |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: Servlet Handling |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: Utilities |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: Utilities :: Ajax(JSON) |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: Utility Servlets and Filters |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: Webapp Application Support |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: Websocket :: API |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: Websocket :: Client |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: Websocket :: Common |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: Websocket :: Server |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: Websocket :: Servlet Interface |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: XML utilities |9.4.43.v20210629 |Apache 2.0 | +|jersey-container-jetty-http |2.39.1 |CDDL 1.1 | +|jersey-container-jetty-servlet |2.39.1 |CDDL 1.1 | +|jersey-container-servlet |2.39.1 |CDDL 1.1 | +|jersey-container-servlet-core |2.39.1 |CDDL 1.1 | +|jersey-core-client |2.39.1 |CDDL 1.1 | +|jersey-core-common |2.39.1 |CDDL 1.1 | +|jersey-core-server |2.39.1 |CDDL 1.1 | +|jersey-ext-mvc |2.39.1 |CDDL 1.1 | +|jersey-ext-mvc-freemarker |2.39.1 |CDDL 1.1 | +|jersey-media-jaxb |2.35 |CDDL 1.1 | +|jersey-media-multipart |2.39.1 |CDDL 1.1 | +|JetBrains Java Annotations |13.0 |Apache 2.0 | +|Jettison |1.5.4 |Apache 2.0 | +|Jetty :: Asynchronous HTTP Client |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: Continuation |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: Http Utility |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: IO Utility |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: Security |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: Server Core |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: Servlet Handling |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: Utilities |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: Utilities :: Ajax(JSON) |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: Utility Servlets and Filters |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: Webapp Application Support |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: Websocket :: API |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: Websocket :: Client |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: Websocket :: Common |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: Websocket :: Server |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: Websocket :: Servlet Interface |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: XML utilities |9.4.51.v20230217 |Apache 2.0 | |Jetty Server |6.1.26 |Apache 2.0 | |Jetty SSLEngine |6.1.26 |Apache 2.0 | |Jetty Utilities |6.1.26 |Apache 2.0 | |JLine |0.9.94 |BSD 3-clause | +|jline |2.14.3 |BSD-3-Clause | +|jline |3.9.0 |BSD-3-Clause | |JMES Path Query library |1.11.761 |Apache 2.0 | -|JMES Path Query library |1.12.75 |Apache 2.0 | -|Joda-Time |2.9 |Apache 2.0 | +|JMES Path Query library |1.12.400 |Apache 2.0 | +|Joda-Time |2.12.1 |Apache 2.0 | |JOpt Simple |3.2 |MIT | |JOpt Simple |5.0.2 |MIT | |jquery |3.5.1 |MIT | |js-tokens |4.0.0 |MIT | -|JSch |0.1.54 |BSD 3-clause | +|JSch |0.1.55 |BSD 3-clause | |jsesc |2.5.2 |MIT | -|JSON Small and Fast Parser |2.4.8 |Apache 2.0 | +|JSON Small and Fast Parser |2.4.10 |Apache 2.0 | |json-parse-even-better-errors |2.3.1 |MIT | |json-ptr |3.1.1 |MIT | +|json-utils |2.17.295 |Apache 2.0 | |json5 |2.2.1 |MIT | |jsplumb |2.1.4 |MIT | |JSR311 API |1.1.1 |CDDL 1.0 | -|JUL to SLF4J bridge |1.7.28 |MIT | +|JUL to SLF4J bridge |1.7.36 |MIT | |JVM Integration For Metrics |4.1.19 |Apache 2.0 | |jwt-decode |3.1.2 |MIT | |Kerb Simple Kdc |1.0.1 |Apache 2.0 | @@ -657,16 +697,21 @@ s|@ampproject/remapping |2.2.0 |Kerby XDR Project |1.0.1 |Apache 2.0 | |Koloboke Collections API |1.0.0 |Apache 2.0 | |Koloboke Collections Implementation Commons |1.0.0 |Apache 2.0 | -|Kotlin Common Standard Library |1.4.10 |Apache 2.0 | -|Kotlin Standard Library |1.4.10 |Apache 2.0 | +|Kotlin Common Standard Library |1.5.31 |Apache 2.0 | +|Kotlin Common Standard Library |1.5.31 |Apache 2.0 | +|Kotlin Common Standard Library |1.5.31 |Apache 2.0 | +|Kotlin Standard Library |1.6.20 |Apache 2.0 | |Kryo |4.0.1 |BSD 3-clause | +|LatencyUtils |2.0.3 |Public Domain, BSD 2-Clause License| |leantable |0.2.6 |Apache-2.0 | +|leantable |0.4.11 |Apache-2.0 | +|leantable |0.4.12 |Apache-2.0 | |leveldbjni-all |1.8 |BSD 3-clause | -|lilith-data-converter |8.2.0 |Apache 2.0 | -|lilith-data-eventsource |8.2.0 |Apache 2.0 | -|lilith-data-logging |8.2.0 |Apache 2.0 | -|lilith-data-logging-protobuf |8.2.0 |Apache 2.0 | -|lilith-sender |8.2.0 |Apache 2.0 | +|lilith-data-converter |8.3.0 |Apache 2.0 | +|lilith-data-eventsource |8.3.0 |Apache 2.0 | +|lilith-data-logging |8.3.0 |Apache 2.0 | +|lilith-data-logging-protobuf |8.3.0 |Apache 2.0 | +|lilith-sender |8.3.0 |Apache 2.0 | |lines-and-columns |1.2.4 |MIT | |linkifyjs |2.1.9 |MIT | |lodash |4.17.21 |MIT | @@ -679,36 +724,36 @@ s|@ampproject/remapping |2.2.0 |lodash.isequal |4.5.0 |MIT | |lodash.isplainobject |4.0.6 |MIT | |lodash.omit |4.5.0 |MIT | -|Log4j Implemented Over SLF4J |1.7.28 |MIT | -|Logback Access Module |1.2.3 |EPL 1.0 | -|Logback Classic Module |1.2.3 |EPL 1.0 | +|Log4j Implemented Over SLF4J |1.7.36 |MIT | +|Logback Access Module |1.2.11 |EPL 1.0 | +|Logback Classic Module |1.2.11 |EPL 1.0 | |Logback Core Module |1.2.11 |EPL 1.0 | -|logback-classic |8.2.0 |Apache 2.0 | -|logback-converter-classic |8.2.0 |Apache 2.0 | -|logback-multiplex-appender-classic |8.2.0 |Apache 2.0 | -|logback-multiplex-appender-core |8.2.0 |Apache 2.0 | -|Logstash Logback Encoder |6.2 |Apache 2.0 | +|logback-classic |8.3.0 |Apache 2.0 | +|logback-converter-classic |8.3.0 |Apache 2.0 | +|logback-multiplex-appender-classic |8.3.0 |Apache 2.0 | +|logback-multiplex-appender-core |8.3.0 |Apache 2.0 | +|Logstash Logback Encoder |7.2 |Apache 2.0 | |loose-envify |1.4.0 |MIT | |lottie-web |5.8.1 |MIT | |lowlight |1.20.0 |MIT | -|Lucene Common Analyzers |6.6.0 |Apache 2.0 | -|Lucene Core |6.6.0 |Apache 2.0 | -|Lucene Grouping |6.6.0 |Apache 2.0 | -|Lucene Highlighter |6.6.0 |Apache 2.0 | -|Lucene Join |6.6.0 |Apache 2.0 | -|Lucene Memory |6.6.0 |Apache 2.0 | -|Lucene Memory |6.6.0 |Apache 2.0 | -|Lucene Miscellaneous |6.6.0 |Apache 2.0 | -|Lucene Queries |6.6.0 |Apache 2.0 | -|Lucene QueryParsers |6.6.0 |Apache 2.0 | -|Lucene Sandbox |6.6.0 |Apache 2.0 | -|Lucene Spatial |6.6.0 |Apache 2.0 | -|Lucene Spatial 3D |6.6.0 |Apache 2.0 | -|Lucene Spatial Extras |6.6.0 |Apache 2.0 | -|Lucene Suggest |6.6.0 |Apache 2.0 | +|Lucene Common Analyzers |7.7.3 |Apache 2.0 | +|Lucene Core |7.7.3 |Apache 2.0 | +|Lucene Grouping |7.7.3 |Apache 2.0 | +|Lucene Highlighter |7.7.3 |Apache 2.0 | +|Lucene Join |7.7.3 |Apache 2.0 | +|Lucene Memory |7.7.3 |Apache 2.0 | +|Lucene Memory |7.7.3 |Apache 2.0 | +|Lucene Miscellaneous |7.7.3 |Apache 2.0 | +|Lucene Queries |7.7.3 |Apache 2.0 | +|Lucene QueryParsers |7.7.3 |Apache 2.0 | +|Lucene Sandbox |7.7.3 |Apache 2.0 | +|Lucene Spatial |7.7.3 |Apache 2.0 | +|Lucene Spatial 3D |7.7.3 |Apache 2.0 | +|Lucene Spatial Extras |7.7.3 |Apache 2.0 | +|Lucene Suggest |7.7.3 |Apache 2.0 | |LZ4 and XxHash |1.7.1 |Apache 2.0 | |map-or-similar |1.5.0 |MIT | -|MariaDB |2.3.0 |LGPL v2.1 | +|MariaDB |3.0.8 |LGPL v2.1 | |marked |0.7.0 |MIT | |material-icons |0.1.0 |MIT | |material-ui-popup-state |3.1.1 |MIT | @@ -719,57 +764,85 @@ s|@ampproject/remapping |2.2.0 |Metrics Integration For Jetty 9.3 and Higher |4.1.19 |Apache 2.0 | |Metrics Integration with JMX |4.1.19 |Apache 2.0 | |micro-memoize |4.0.9 |MIT | -|Microsoft Azure Active Directory Authentication Library (ADAL) for Java|1.6.4 |MIT License | +|micrometer-commons |1.10.6 |Apache 2.0 | +|micrometer-core |1.10.6 |Apache 2.0 | +|micrometer-observation |1.10.6 |Apache 2.0 | +|microprofile-openapi-api |3.1 |Apache 2.0 | +|Microsoft Azure Active Directory Authentication Library (ADAL) for Java|1.6.7 |MIT License | |Microsoft Azure Common Module For Storage |12.14.1 |MIT License | -|Microsoft Azure Java Core Library |1.1.0 |MIT License | -|Microsoft Azure Netty HTTP Client Library |1.1.0 |MIT License | +|Microsoft Azure Java Core Library |1.22.0 |MIT License | +|Microsoft Azure Netty HTTP Client Library |1.11.2 |MIT License | |Microsoft Azure SDK for Key Vault Core |1.0.0 |MIT License | |Microsoft Azure Storage Client SDK |8.3.0 |MIT | |Microsoft JDBC Driver For SQL Server |7.0.0.jre8 |MIT License | -|MIME streaming extension |1.9.11 |GPL v2 w/ CPE | +|MIME streaming extension |1.9.15 |GPL v2 w/ CPE | |MinLog |1.3.0 |BSD 3-clause | |ModelMapper |2.3.0 |Apache 2.0 | |ModelMapper Protobuf Extension |2.3.0 |Apache 2.0 | |moize |6.1.0 |MIT | +|moize |6.1.3 |MIT | |monaco-editor |0.10.0 |MIT | |MongoDB Java Driver |3.12.0 |Apache 2.0 | +|mongodb-driver-core |4.3.4 |Apache 2.0 | +|mongodb-driver-legacy |4.3.4 |Apache 2.0 | +|mongodb-driver-sync |4.3.4 |Apache 2.0 | |mousetrap |1.6.1 |Apache-2.0 | |ms |2.1.2 |MIT | +|msw |1.2.0 |MIT | |mumath |3.3.4 |Unlicense | |nanoclone |0.2.1 |MIT | |nanoid |3.3.4 |MIT | |Native Library Loader |2.3.4 |Simplified BSD License| -|Nessie API |0.4.0 |Apache 2.0 | -|Nessie Server Store |0.4.0 |Apache 2.0 | -|Nessie Services |0.4.0 |Apache 2.0 | +|Nessie API |0.58.0 |Apache 2.0 | +|Nessie Server Store |0.58.0 |Apache 2.0 | +|Nessie Services |0.58.0 |Apache 2.0 | |Nessie Versioned Memory Store |0.4.0 |Apache 2.0 | -|Nessie Versioned Store SPI |0.4.0 |Apache 2.0 | +|Nessie Versioned Store SPI |0.58.0 |Apache 2.0 | +|nessie-client |0.58.0 |Apache 2.0 | +|nessie-protobuf-relocated |0.58.0 |Apache 2.0 | +|nessie-rest-services |0.58.0 |Apache 2.0 | +|nessie-server-store-proto |0.58.0 |Apache 2.0 | +|nessie-versioned-persist-adapter |0.58.0 |Apache 2.0 | +|nessie-versioned-persist-in-memory |0.58.0 |Apache 2.0 | +|nessie-versioned-persist-non-transactional |0.58.0 |Apache 2.0 | +|nessie-versioned-persist-serialize |0.58.0 |Apache 2.0 | +|nessie-versioned-persist-serialize-proto |0.58.0 |Apache 2.0 | +|nessie-versioned-persist-store |0.58.0 |Apache 2.0 | |Netty Reactive Streams HTTP Support |2.0.5 |Apache 2.0 | |Netty Reactive Streams Implementation |2.0.0 |Apache 2.0 | |Netty Reactive Streams Implementation |2.0.5 |Apache 2.0 | -|Netty/Buffer |4.1.68.Final |Apache 2.0 | -|Netty/Codec |4.1.68.Final |Apache 2.0 | -|Netty/Codec/DNS |4.1.48.Final |Apache 2.0 | -|Netty/Codec/HTTP |4.1.68.Final |Apache 2.0 | -|Netty/Codec/HTTP2 |4.1.48.Final |Apache 2.0 | -|Netty/Codec/Socks |4.1.48.Final |Apache 2.0 | -|Netty/Common |4.1.68.Final |Apache 2.0 | -|Netty/Handler |4.1.68.Final |Apache 2.0 | -|Netty/Handler/Proxy |4.1.48.Final |Apache 2.0 | -|Netty/Resolver |4.1.68.Final |Apache 2.0 | -|Netty/Resolver/DNS |4.1.48.Final |Apache 2.0 | -|Netty/TomcatNative [BoringSSL - Static] |2.0.28.Final |Apache 2.0 | -|Netty/Transport |4.1.68.Final |Apache 2.0 | -|Netty/Transport/Native/Epoll |4.1.48.Final-linux-x86_64|Apache 2.0 | -|Netty/Transport/Native/Unix/Common |4.1.48.Final |Apache 2.0 | +|netty-tcnative-classes |2.0.56.Final |Apache 2.0 | +|netty-transport-classes-epoll |4.1.89.Final |Apache 2.0 | +|netty-transport-classes-kqueue |4.1.89.Final |Apache 2.0 | +|netty-transport-native-kqueue |4.1.89.Final |Apache 2.0 | +|Netty/Buffer |4.1.89.Final |Apache 2.0 | +|Netty/Codec |4.1.89.Final |Apache 2.0 | +|Netty/Codec/DNS |4.1.89.Final |Apache 2.0 | +|Netty/Codec/HTTP |4.1.89.Final |Apache 2.0 | +|Netty/Codec/HTTP2 |4.1.89.Final |Apache 2.0 | +|Netty/Codec/Socks |4.1.89.Final |Apache 2.0 | +|Netty/Common |4.1.89.Final |Apache 2.0 | +|Netty/Handler |4.1.89.Final |Apache 2.0 | +|Netty/Handler/Proxy |4.1.89.Final |Apache 2.0 | +|Netty/Resolver |4.1.89.Final |Apache 2.0 | +|Netty/Resolver/DNS |4.1.89.Final |Apache 2.0 | +|Netty/TomcatNative [BoringSSL - Static] |2.0.56.Final |Apache 2.0 | +|Netty/TomcatNative [BoringSSL - Static] |2.0.56.Final-linux-aarch_64.jar|Apache 2.0 | +|Netty/TomcatNative [BoringSSL - Static] |2.0.56.Final-linux-x86_64.jar|Apache 2.0 | +|Netty/TomcatNative [BoringSSL - Static] |2.0.56.Final-osx-aarch_64.jar|Apache 2.0 | +|Netty/TomcatNative [BoringSSL - Static] |2.0.56.Final-osx-x86_64.jar|Apache 2.0 | +|Netty/TomcatNative [BoringSSL - Static] |2.0.56.Final-windows-x86_64.jar|Apache 2.0 | +|Netty/Transport |4.1.89.Final |Apache 2.0 | +|Netty/Transport/Native/Epoll |4.1.89.Final |Apache 2.0 | +|Netty/Transport/Native/Unix/Common |4.1.89.Final |Apache 2.0 | |Nimbus JOSE+JWT |8.8 |Apache 2.0 | -|Nimbus LangTag |1.5 |Apache 2.0 | +|Nimbus LangTag |1.4.4 |Apache 2.0 | |node-fetch |1.7.3 |MIT | |node-releases |2.0.5 |MIT | |Noggit |0.6 |Apache 2.0 | |normalizr |2.3.1 |MIT | -|OAuth 2.0 SDK with OpenID Connect Extensions |6.5 |Apache 2.0 | -|OAuth2 Client |2.3 |CDDL 1.1 | +|OAuth 2.0 SDK with OpenID Connect Extensions |9.3 |Apache 2.0 | +|OAuth2 Client |2.39.1 |CDDL 1.1 | |object-assign |4.1.1 |MIT | |object-inspect |1.12.0 |MIT | |object-is |1.1.5 |MIT | @@ -778,39 +851,47 @@ s|@ampproject/remapping |2.2.0 |Objenesis |2.4 |Apache 2.0 | |Ojdbc8 |19.3.0.0 |Oracle Free Use Terms and Conditions (FUTC)| |OkHttp |2.7.5 |Apache 2.0 | +|OkHttp |4.10.0 |Apache 2.0 | |OkHttp |4.9.0 |Apache 2.0 | |Okio |1.6.0 |Apache 2.0 | +|Okio-jvm |3.0.0 |Apache 2.0 | |Old JAXB Runtime |2.2.3-1 |CDDL 1.1 | |Ons |19.3.0.0 |Oracle Free Use Terms and Conditions (FUTC)| -|OpenCensus API |0.24.0 |Apache 2.0 | -|OpenCensus DropWizard Util for Java |0.24.0 |Apache 2.0 | +|OpenCensus API |0.31.1 |Apache 2.0 | +|OpenCensus DropWizard Util for Java |0.31.1 |Apache 2.0 | |OpenCensus Exemplar Util |0.24.0 |Apache 2.0 | |OpenCensus HTTP Util |0.24.0 |Apache 2.0 | |OpenCensus HTTP Util |0.28.0 |Apache 2.0 | -|OpenCensus implementation |0.24.0 |Apache 2.0 | -|OpenCensus Java implementation |0.24.0 |Apache 2.0 | +|OpenCensus implementation |0.31.1 |Apache 2.0 | +|OpenCensus Java implementation |0.31.1 |Apache 2.0 | |OpenCensus Java Metrics Exporter Util |0.24.0 |Apache 2.0 | |OpenCensus Resources Util |0.24.0 |Apache 2.0 | |OpenCensus Stackdriver Stats Exporter |0.24.0 |Apache 2.0 | |OpenCensus Stackdriver Trace Exporter |0.24.0 |Apache 2.0 | |OpenHFT/Java-Thread-Affinity/affinity |3.1.7 |Apache 2.0 | -|OpenSSL Bindings for Java |1.0.9.Final |Apache 2.0 | +|OpenSearch SQL JDBC Driver |1.1.0.1 |Apache 2.0 | +|OpenSSL Bindings for Java |1.1.3.Final |Apache 2.0 | |OpenSSL toolkit |1.1.1d |OpenSSL and SSLeay license| -|OpenTelemetry - Jaeger Remote sampler |1.0.1 |Apache 2.0 | -|OpenTelemetry API |1.0.1 |Apache 2.0 | -|OpenTelemetry Context (Incubator) |1.0.1 |Apache 2.0 | -|OpenTelemetry Metrics API |1.0.1-alpha |Apache 2.0 | -|OpenTelemetry OpenTracing Bridge |1.0.1-alpha |Apache 2.0 | -|OpenTelemetry Proto |1.0.1-alpha |Apache 2.0 | -|OpenTelemetry Protocol Exporter |1.0.1 |Apache 2.0 | -|OpenTelemetry Protocol Exporters |1.0.1 |Apache 2.0 | -|OpenTelemetry Protocol JSON Logging Exporters |1.0.1 |Apache 2.0 | -|OpenTelemetry Protocol Trace Exporter |1.0.1 |Apache 2.0 | -|OpenTelemetry SDK |1.0.1 |Apache 2.0 | -|OpenTelemetry SDK Common |1.0.1 |Apache 2.0 | -|OpenTelemetry SDK For Tracing |1.0.1 |Apache 2.0 | -|OpenTelemetry SDK Metrics |1.0.1-alpha |Apache 2.0 | -|OpenTelemetry Semantic Conventions |1.0.1-alpha |Apache 2.0 | +|OpenTelemetry - Jaeger Remote sampler |1.25.0 |Apache 2.0 | +|OpenTelemetry API |1.25.0 |Apache 2.0 | +|OpenTelemetry Context (Incubator) |1.25.0 |Apache 2.0 | +|OpenTelemetry Metrics API Events |1.25.0-alpha |Apache 2.0 | +|OpenTelemetry Metrics API Logs |1.25.0-alpha |Apache 2.0 | +|OpenTelemetry OpenTracing Bridge |1.25.0-alpha |Apache 2.0 | +|OpenTelemetry Proto |1.25.0-alpha |Apache 2.0 | +|OpenTelemetry Protocol Exporter |1.25.0 |Apache 2.0 | +|OpenTelemetry Protocol Exporters |1.25.0 |Apache 2.0 | +|OpenTelemetry Protocol JSON Logging Exporters |1.25.0 |Apache 2.0 | +|OpenTelemetry Protocol Trace Exporter |1.25.0 |Apache 2.0 | +|OpenTelemetry SDK |1.25.0 |Apache 2.0 | +|OpenTelemetry SDK Common |1.25.0 |Apache 2.0 | +|OpenTelemetry SDK For Tracing |1.25.0 |Apache 2.0 | +|OpenTelemetry SDK Metrics |1.25.0 |Apache 2.0 | +|OpenTelemetry Semantic Conventions |1.25.0-alpha |Apache 2.0 | +|OpenTelemetry Semantic Conventions |1.25.0-alpha |Apache 2.0 | +|opentelemetry-exporter-common |1.25.0 |Apache 2.0 | +|opentelemetry-extension-trace-propagators |1.25.0 |Apache 2.0 | +|opentelemetry-instrumentation-annotations |1.25.0 |Apache 2.0 | |OpenTracing Java API |0.33.0 |Apache 2.0 | |OpenTracing NoOp |0.33.0 |Apache 2.0 | |OpenTracing utilities |0.33.0 |Apache 2.0 | @@ -821,26 +902,32 @@ s|@ampproject/remapping |2.2.0 |OSGi resource locator bundle - used by various API providers that rely on META-INF/services mechanism to locate providers.|1.0.3 |CDDL 1.1 | |ParaNamer Core |2.5.6 |BSD 3-clause | |parent-module |1.0.1 |MIT | +|parquet-avro |1.12.3 |Apache 2.0 | |parse-entities |2.0.0 |MIT | |parse-json |5.2.0 |MIT | |path-browserify |1.0.1 |MIT | |path-parse |1.0.7 |MIT | |path-type |4.0.0 |MIT | -|Perfmark:perfmark API |0.19.0 |Apache 2.0 | +|Perfmark:perfmark API |0.25.0 |Apache 2.0 | |performance-now |0.2.0 |MIT | |performance-now |2.1.0 |MIT | |picocolors |1.0.0 |ISC | -|Plugin Framework for Java |3.0.1 |Apache 2.0 | +|Plugin Framework for Java |3.6.0 |Apache 2.0 | |popper.js |1.14.7 |MIT | -|PostgreSQL JDBC Driver |42.3.4 |BSD 2-clause | +|PostgreSQL JDBC Driver |42.4.1 |BSD 2-clause | |prismjs |1.27.0 |MIT | |prismjs |1.28.0 |MIT | -|Prometheus Java Simpleclient |0.7.0 |Apache 2.0 | -|Prometheus Java Simpleclient Common |0.7.0 |Apache 2.0 | -|Prometheus Java Simpleclient Dropwizard |0.7.0 |Apache 2.0 | -|Prometheus Java Simpleclient Hotspot |0.7.0 |Apache 2.0 | -|Prometheus Java Simpleclient Servlet |0.7.0 |Apache 2.0 | +|Prometheus Java Simpleclient |0.16.0 |Apache 2.0 | +|Prometheus Java Simpleclient Common |0.16.0 |Apache 2.0 | +|Prometheus Java Simpleclient Dropwizard |0.16.0 |Apache 2.0 | +|Prometheus Java Simpleclient Hotspot |0.16.0 |Apache 2.0 | +|Prometheus Java Simpleclient Servlet |0.16.0 |Apache 2.0 | +|Prometheus Java Simpleclient Servlet |0.16.0 |Apache 2.0 | +|Prometheus Java Simpleclient Servlet |0.16.0 |Apache 2.0 | +|Prometheus Java Simpleclient Servlet |0.16.0 |Apache 2.0 | +|Prometheus Java Simpleclient Servlet |0.16.0 |Apache 2.0 | |promise |7.3.1 |MIT | +|prop-types |15.5.10 |BSD-3-Clause | |prop-types |15.5.8 |BSD-3-Clause | |prop-types |15.7.2 |MIT | |prop-types |15.8.1 |MIT | @@ -849,11 +936,12 @@ s|@ampproject/remapping |2.2.0 |property-information |5.6.0 |MIT | |Proto Google Cloud Trace V1 |0.65.0 |Apache 2.0 | |Proto Google Cloud Trace V2 |0.65.0 |Apache 2.0 | -|Proto Google Common Protos |1.17.0 |Apache 2.0 | +|Proto Google Common Protos |2.9.0 |Apache 2.0 | |Proto Google IAM V1 |1.0.5 |Apache 2.0 | |Protobuf Jackson |1.2.0 |MIT License | -|Protocol Buffer [Util] |3.9.1 |BSD 3-clause | -|Protocol Buffer Java API |3.9.1 |BSD 3-clause | +|Protocol Buffer [Util] |3.21.9 |BSD 3-clause | +|Protocol Buffer Java API |3.21.9 |BSD 3-clause | +|protoparser |4.0.3 |Apache 2.0 | |protostuff :: api |1.4.4 |Apache 2.0 | |protostuff :: collectionschema |1.4.4 |Apache 2.0 | |protostuff :: core |1.4.4 |Apache 2.0 | @@ -886,6 +974,7 @@ s|@ampproject/remapping |2.2.0 |react-gtm-module |2.0.11 |MIT | |react-hook-form |7.34.0 |MIT | |react-immutable-proptypes |2.1.0 |MIT | +|react-intl |2.9.0 |BSD-3-Clause | |react-intl |5.20.10 |BSD-3-Clause | |react-is |16.13.1 |MIT | |react-is |18.2.0 |MIT | @@ -914,6 +1003,7 @@ s|@ampproject/remapping |2.2.0 |react-syntax-highlighter |15.5.0 |MIT | |react-textarea-autosize |8.3.4 |MIT | |react-transition-group |4.4.2 |BSD-3-Clause | +|react-transition-group |4.4.5 |BSD-3-Clause | |react-virtualized |9.22.3 |MIT | |react-virtualized-tree |3.4.1 |MIT | |Reactive Streams |1.0.2 |CC0 | @@ -927,20 +1017,22 @@ s|@ampproject/remapping |2.2.0 |redux-saga |0.15.6 |MIT | |redux-thunk |2.3.0 |MIT | |ReflectASM |1.11.3 |BSD 3-clause | -|Reflections |0.9.10 |WTFPL | +|Reflections |0.10.2 |WTFPL | |refractor |3.6.0 |MIT | |regenerator-runtime |0.11.1 |MIT | |regenerator-runtime |0.13.9 |MIT | |regexp.prototype.flags |1.4.3 |MIT | -|Rendezvous Hash |1 |BSD 3-clause | +|Rendezvous Hash |1.0 |BSD 3-clause | |requires-port |1.0.0 |MIT | |reselect |2.5.4 |MIT | |reselect |3.0.1 |MIT | |reselect |4.0.0 |MIT | |resolve |1.20.0 |MIT | |resolve-from |4.0.0 |MIT | +|RoaringBitmap |0.9.22 |Apache 2.0 | |RocksDB JNI |5.14.2 |Apache 2.0 | |RSQL-parser |2.1.0 |MIT | +|rxjs |7.5.5 |Apache-2.0 | |S2 Geometry Library |0.9.0 |Apache 2.0 | |safe-buffer |5.1.1 |MIT | |safe-buffer |5.2.1 |MIT | @@ -952,38 +1044,42 @@ s|@ampproject/remapping |2.2.0 |setimmediate |1.0.5 |MIT | |shallow-equal |1.2.1 |MIT | |shallowequal |1.1.0 |MIT | +|shims |0.9.22 |Apache 2.0 | |side-channel |1.0.4 |MIT | |Simplefan |19.3.0.0 |Oracle Free Use Terms and Conditions (FUTC)| |simplemde |1.11.2 |MIT | |Sketches Core |0.9.0 |Apache 2.0 | |SLF4J API Module |1.7.36 |MIT | |Slugify - Core |2.1.7 |Apache 2.0 | -|smart-resource |0.3.6 |Apache-2.0 | +|smart-icon |1.4.3 |Apache-2.0 | +|smart-resource |0.3.8 |Apache-2.0 | +|smart-resource |1.0.0 |Apache-2.0 | |SnakeYAML |1.15 |Apache 2.0 | -|SnakeYAML |1.26 |Apache 2.0 | +|SnakeYAML |1.33 |Apache 2.0 | |Snappy for Java |1.1.4 |Apache 2.0 | -|Snowflake JDBC |3.13.24 |Apache 2.0 | +|Snowflake JDBC |3.13.30 |Apache 2.0 | |software.amazon.ion:ion-java |1.0.2 |Apache 2.0 | -|solid-js |1.4.8 |MIT | +|solid-js |1.6.2 |MIT | |source-map |0.5.7 |BSD-3-Clause | |space-separated-tokens |1.1.5 |MIT | |SparseBitSet |1.2 |Apache 2.0 | |SpotBugs Annotations |3.1.9 |GNU LESSER GENERAL PUBLIC LICENSE, Version 2.1| -|Stax2 API |3.1.4 |BSD License | +|Stax2 API |4.2 |BSD License | |Streaming API for XML |1.0-2 |GPL v2 w/ CPE | |strict-uri-encode |1.1.0 |MIT | |string.prototype.trimend |1.0.5 |MIT | |string.prototype.trimstart |1.0.5 |MIT | |stylis |4.0.13 |MIT | -|Sulky ULID |8.2.0 |Apache 2.0 | -|sulky-codec |8.2.0 |Apache 2.0 | -|sulky-formatting |8.2.0 |Apache 2.0 | -|sulky-io |8.2.0 |Apache 2.0 | +|Sulky ULID |8.3.0 |Apache 2.0 | +|sulky-codec |8.3.0 |Apache 2.0 | +|sulky-formatting |8.3.0 |Apache 2.0 | +|sulky-io |8.3.0 |Apache 2.0 | |supports-color |5.5.0 |MIT | |symbol-observable |1.2.0 |MIT | |symbol-observable |2.0.3 |MIT | |T-Digest |3.2 |Apache 2.0 | |The Netty Project |3.10.6.Final-nohttp |Apache 2.0 | +|third-party-jackson-core |2.17.295 |Apache 2.0 | |ThreeTen backport |1.3.3 |BSD 3-clause | |ThreeTen backport |1.4.1 |BSD 3-clause | |tiny-warning |1.0.3 |MIT | @@ -1019,9 +1115,11 @@ s|@ampproject/remapping |2.2.0 |use-memo-one |1.1.2 |MIT | |use-sync-external-store |1.2.0 |MIT | |util |0.12.4 |MIT | -|Utilities |2.2.2-dremio-202108161113150127-7bb5f20|Apache 2.0 | -|Utilities Hadoop Hadoop3 |hadoop2-2.2.2-dremio-202108161113150127-7bb5f20|Apache 2.0 | +|Utilities |2.2.2-dremio-202302142306550801-5be8d7e|Apache 2.0 | +|Utilities Hadoop Hadoop3 |hadoop2-2.2.2-dremio-202302142306550801-5be8d7e|Apache 2.0 | |uuid |2.0.3 |MIT | +|uuid |8.3.2 |MIT | +|value-annotations |2.9.3 |Apache 2.0 | |warning |3.0.0 |BSD-3-Clause | |warning |4.0.3 |MIT | |whatwg-fetch |2.0.3 |MIT | @@ -1029,11 +1127,11 @@ s|@ampproject/remapping |2.2.0 |which-typed-array |1.1.8 |MIT | |WildFly OpenSSL Java |1.0.9.Final |Apache 2.0 | |WildFly OpenSSL Linux X86 64 |1.0.9.Final |Apache 2.0 | -|Woodstox |5.2.1 |Apache 2.0 | +|Woodstox |5.4.0 |Apache 2.0 | |ws |1.1.5 |MIT | |Xalan Java |2.7.2 |Apache 2.0 | |Xalan Java Serializer |2.7.2 |Apache 2.0 | -|Xerces2-j |2.12.0 |Apache 2.0 | +|Xerces2-j |2.12.2 |Apache 2.0 | |XML Commons External Components XML APIs |1.4.01 |Apache 2.0 | |Xml Compatibility Extensions For Jackson |1.9.13 |Apache 2.0 | |XmlBeans |3.1.0 |Apache 2.0 | @@ -1044,8 +1142,9 @@ s|@ampproject/remapping |2.2.0 |ZkClient |0.1 |Apache 2.0 | |zod |3.17.10 |MIT | |zookeeper |3.4.14 |Apache 2.0 | -|zookeeper |3.6.2 |Apache 2.0 | +|zookeeper-jute |3.6.2 |Apache 2.0 | |zrender |5.3.1 |BSD-3-Clause | +|zstd-jni |1.5.0-1 |https://github.com/luben/zstd-jni/blob/master/LICENSE| # License Texts ## Apache 2.0 @@ -5204,4 +5303,4 @@ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -``` +``` \ No newline at end of file diff --git a/distribution/resources/src/main/resources-default/licenses/NOTICE b/distribution/resources/src/main/resources-default/licenses/NOTICE index 142dc510d7..af794e79d9 100644 --- a/distribution/resources/src/main/resources-default/licenses/NOTICE +++ b/distribution/resources/src/main/resources-default/licenses/NOTICE @@ -7,99 +7,119 @@ The Apache Software Foundation (http://www.apache.org/). This product incorporates and/or depends on software that falls under the following notices and/or trademarks: -# List of contributors Red Hat Inc. Akira Kawauchi Davide D'Alto Dhanji R. Prasanna Emmanuel Bernard Gavin King Gerhard Petracek Guillaume Smet Gunnar Morling Hardy Ferentschik Hendrik Ebbers Kevin Pollet Sebastian Thomschke + ========================================================================= == NOTICE file corresponding to section 4(d) of the Apache License, == == Version 2.0, in this case for MicroProfile OpenAPI == ========================================================================= The majority of this software were originally based on the following: * Swagger Core https://github.com/swagger-api/swagger-core under Apache License, v2.0 -

        Copyright © 2022 Sami Samhuri, http://samhuri.net <[email protected]>

        + Apache Parquet MR (Incubating) Copyright 2014 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). -------------------------------------------------------------------------------- This product includes parquet-tools, initially developed at ARRIS, Inc. with the following copyright notice: Copyright 2013 ARRIS, Inc. Licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -------------------------------------------------------------------------------- This product includes parquet-protobuf, initially developed by Lukas Nalezenc with the following copyright notice: Copyright 2013 Lukas Nalezenec. Licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -------------------------------------------------------------------------------- This product includes code from Apache Avro, which includes the following in its NOTICE file: Apache Avro Copyright 2010-2015 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). -------------------------------------------------------------------------------- This project includes code from Kite, developed at Cloudera, Inc. with the following copyright notice: | Copyright 2013 Cloudera Inc. | | Licensed under the Apache License, Version 2.0 (the License); | you may not use this file except in compliance with the License. | You may obtain a copy of the License at | | http://www.apache.org/licenses/LICENSE-2.0 | | Unless required by applicable law or agreed to in writing, software | distributed under the License is distributed on an AS IS BASIS, | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | See the License for the specific language governing permissions and | limitations under the License. -------------------------------------------------------------------------------- This project includes code from Netflix, Inc. with the following copyright notice: | Copyright 2016 Netflix, Inc. | | Licensed under the Apache License, Version 2.0 (the License); | you may not use this file except in compliance with the License. | You may obtain a copy of the License at | | http://www.apache.org/licenses/LICENSE-2.0 | | Unless required by applicable law or agreed to in writing, software | distributed under the License is distributed on an AS IS BASIS, | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | See the License for the specific language governing permissions and | limitations under the License. -ASM: a very small and fast Java bytecode manipulation framework Copyright (c) 2000-2011 INRIA, France Telecom All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holders nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# List of contributors Red Hat Inc. Akira Kawauchi Davide D'Alto Dhanji R. Prasanna Emmanuel Bernard Gavin King Gerhard Petracek Guillaume Smet Gunnar Morling Hardy Ferentschik Hendrik Ebbers Kevin Pollet Sebastian Thomschke -AWS EventStream for Java Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. +* This code was Written by Gil Tene of Azul Systems, and released to the * public domain, as explained at http://creativecommons.org/publicdomain/zero/1.0/ For users of this code who wish to consume it under the BSD license rather than under the public domain or CC0 contribution text mentioned above, the code found under this directory is *also* provided under the following license (commonly referred to as the BSD 2-Clause License). This license does not detract from the above stated release of the code into the public domain, and simply represents an additional license granted by the Author. ----------------------------------------------------------------------------- ** Beginning of BSD 2-Clause License text. ** Copyright (c) 2012, 2013, 2014 Gil Tene All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -AWS Glue Catalog Client For Apache Hive Metastore Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. +// Copyright (c) 2013 The Chromium Authors. All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -AWS SDK for Java 2.0 Copyright 2010-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. This product includes software developed by Amazon Technologies, Inc (http://www.amazon.com/). ********************** THIRD PARTY COMPONENTS ********************** This software includes third party software subject to the following copyrights: - XML parsing and utility functions from JetS3t - Copyright 2006-2009 James Murty. - PKCS#1 PEM encoded private key parsing and utility functions from oauth.googlecode.com - Copyright 1998-2010 AOL Inc. The licenses for these third party components are included in LICENSE.txt +

        Copyright © 2023 Sami Samhuri, http://samhuri.net <[email protected]>

        -AWS SDK for Java Backend,,,Both,OpenSearch SQL JDBC Driver,org.opensearch,opensearch-sql-jdbc,1.1.0.1,https://github.com/opensearch-project/sql,Apache 2.0,http://www.apache.org/licenses/LICENSE-2.0.txt,Copyright OpenSearch Contributors. +ASM: a very small and fast Java bytecode manipulation framework Copyright (c) 2000-2011 INRIA, France Telecom All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holders nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -AWS SDK for Java Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. This product includes software developed by Amazon Technologies, Inc (http://www.amazon.com/). ********************** THIRD PARTY COMPONENTS ********************** This software includes third party software subject to the following copyrights: - XML parsing and utility functions from JetS3t - Copyright 2006-2009 James Murty. - PKCS#1 PEM encoded private key parsing and utility functions from oauth.googlecode.com - Copyright 1998-2010 AOL Inc. The licenses for these third party components are included in LICENSE.txt +AWS EventStream for Java Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. -Adam Stawicki Alaa Nassef Andrey Derevyanko Andrey Rodionov Benson Margulies Brent Douglas Carlos Vara Dag Hovland Davide Marchignoli Carlo de Wolf Chris Beckey Christian Ivan Denis Tiago Doug Lea Emmanuel Bernard Efthymis Sarbanis Federico Federico Mancini Gavin King George Gastaldi Gerhard Petracek Guillaume Husta Guillaume Smet Gunnar Morling Hardy Ferentschik Henno Vermeulen Jan-Willem Willebrands Jason T. Greene Julien May Julien Furgerot Juraci Krohling Justin Nauman Kathryn Killebrew Kevin Pollet Khalid Alqinyah Lee KyoungIl Leonardo Loch Zanivan Lucas Pouzac Lukas Niemeier Mark Hobson Marko Bekhta Mert C?alis?kan Paolo Perrotta Pete Muir Sanne Grinovero Sebastian Bayerl Shane Bryzak Shelly McGowan Steve Ebersole Strong Liu Victor Rezende dos Santos Willi Schönborn Yoann Rodière +AWS Glue Catalog Client For Apache Hive Metastore Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. -Aggregate Designer Copyright 2006 - 2013 Pentaho Corporation. All rights reserved. Copyright 2000-2005, 2014-2016 Julian Hyde +AWS SDK for Java Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. This product includes software developed by Amazon Technologies, Inc (http://www.amazon.com/). ********************** THIRD PARTY COMPONENTS ********************** This software includes third party software subject to the following copyrights: - XML parsing and utility functions from JetS3t - Copyright 2006-2009 James Murty. - PKCS#1 PEM encoded private key parsing and utility functions from oauth.googlecode.com - Copyright 1998-2010 AOL Inc. The licenses for these third party components are included in LICENSE.txt -Amazon Ion Java Copyright 2007-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. +AWS SDK for Java Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. This product includes software developed by Amazon Technologies, Inc (http://www.amazon.com/). ********************** THIRD PARTY COMPONENTS ********************** This software includes third party software subject to the following copyrights: - XML parsing and utility functions from JetS3t - Copyright 2006-2009 James Murty. - PKCS#1 PEM encoded private key parsing and utility functions from oauth.googlecode.com - Copyright 1998-2010 AOL Inc. The licenses for these third party components are included in LICENSE.txt -Apache Arrow Copyright 2016 The Apache Software Foundation +AWS SDK for Java 2.0 Copyright 2010-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. This product includes software developed by Amazon Technologies, Inc (http://www.amazon.com/). ********************** THIRD PARTY COMPONENTS ********************** This software includes third party software subject to the following copyrights: - XML parsing and utility functions from JetS3t - Copyright 2006-2009 James Murty. - PKCS#1 PEM encoded private key parsing and utility functions from oauth.googlecode.com - Copyright 1998-2010 AOL Inc. The licenses for these third party components are included in LICENSE.txt -Apache Arrow Copyright 2016-2019 The Apache Software Foundation +AWS SDK for Java 2.0 Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. This product includes software developed by Amazon Technologies, Inc (http://www.amazon.com/). ********************** THIRD PARTY COMPONENTS ********************** This software includes third party software subject to the following copyrights: - XML parsing and utility functions from JetS3t - Copyright 2006-2009 James Murty. - PKCS#1 PEM encoded private key parsing and utility functions from oauth.googlecode.com - Copyright 1998-2010 AOL Inc. - Apache Commons Lang - https://github.com/apache/commons-lang - Netty Reactive Streams - https://github.com/playframework/netty-reactive-streams - Jackson-core - https://github.com/FasterXML/jackson-core - Jackson-dataformat-cbor - https://github.com/FasterXML/jackson-dataformats-binary The licenses for these third party components are included in LICENSE.txt - For Apache Commons Lang see also this required NOTICE: Apache Commons Lang Copyright 2001-2020 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (https://www.apache.org/). -Apache Avro Copyright 2010-2019 The Apache Software Foundation +Adam Stawicki Alaa Nassef Andrey Derevyanko Andrey Rodionov Benson Margulies Brent Douglas Carlos Vara Dag Hovland Davide Marchignoli Carlo de Wolf Chris Beckey Christian Ivan Denis Tiago Doug Lea Emmanuel Bernard Efthymis Sarbanis Federico Federico Mancini Gavin King George Gastaldi Gerhard Petracek Guillaume Husta Guillaume Smet Gunnar Morling Hardy Ferentschik Henno Vermeulen Jan-Willem Willebrands Jason T. Greene Julien May Julien Furgerot Juraci Krohling Justin Nauman Kathryn Killebrew Kevin Pollet Khalid Alqinyah Lee KyoungIl Leonardo Loch Zanivan Lucas Pouzac Lukas Niemeier Mark Hobson Marko Bekhta Mert C?alis?kan Paolo Perrotta Pete Muir Sanne Grinovero Sebastian Bayerl Shane Bryzak Shelly McGowan Steve Ebersole Strong Liu Victor Rezende dos Santos Willi Schönborn Yoann Rodière -Apache Calcite -- Avatica Copyright 2012-2021 The Apache Software Foundation +Aggregate Designer Copyright 2006 - 2013 Pentaho Corporation. All rights reserved. Copyright 2000-2005, 2014-2016 Julian Hyde -Apache Calcite Copyright 2012-2017 The Apache Software Foundation +Amazon Ion Java Copyright 2007-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. -Apache Commons BeanUtils Copyright 2000-2019 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). +Apache Arrow Copyright 2016 The Apache Software Foundation -Apache Commons Collections Copyright 2001-2019 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). +Apache Arrow Copyright 2016 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). This product includes software from the SFrame project (BSD, 3-clause). * Copyright (C) 2015 Dato, Inc. * Copyright (c) 2009 Carnegie Mellon University. This product includes software from the Feather project (Apache 2.0) https://github.com/wesm/feather This product includes software from the DyND project (BSD 2-clause) https://github.com/libdynd This product includes software from the LLVM project * distributed under the University of Illinois Open Source This product includes software from the google-lint project * Copyright (c) 2009 Google Inc. All rights reserved. This product includes software from the mman-win32 project * Copyright https://code.google.com/p/mman-win32/ * Licensed under the MIT License; This product includes software from the LevelDB project * Copyright (c) 2011 The LevelDB Authors. All rights reserved. * Use of this source code is governed by a BSD-style license that can be * Moved from Kudu http://github.com/cloudera/kudu This product includes software from the CMake project * Copyright 2001-2009 Kitware, Inc. * Copyright 2012-2014 Continuum Analytics, Inc. * All rights reserved. This product includes software from https://github.com/matthew-brett/multibuild (BSD 2-clause) * Copyright (c) 2013-2016, Matt Terry and Matthew Brett; all rights reserved. This product includes software from the Ibis project (Apache 2.0) * Copyright (c) 2015 Cloudera, Inc. * https://github.com/cloudera/ibis The web site includes files generated by Jekyll. -------------------------------------------------------------------------------- This product includes code from Apache Kudu, which includes the following in its NOTICE file: Apache Kudu Copyright 2016 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). Portions of this software were developed at Cloudera, Inc (http://www.cloudera.com/). -Apache Commons Compress Copyright 2002-2020 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (https://www.apache.org/). +Apache Arrow Copyright 2016-2019 The Apache Software Foundation -Apache Commons Configuration Copyright 2001-2019 The Apache Software Foundation +Apache Avro Copyright 2010-2019 The Apache Software Foundation -Apache Commons IO Copyright 2002-2012 The Apache Software Foundation This product includes software developed by The Apache Software Foundation (http://www.apache.org/). +Apache Calcite Copyright 2012-2017 The Apache Software Foundation -Apache Commons IO Copyright 2002-2016 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). +Apache Calcite -- Avatica Copyright 2012-2021 The Apache Software Foundation -Apache Commons Lang Copyright 2001-2011 The Apache Software Foundation This product includes software developed by The Apache Software Foundation (http://www.apache.org/). +Apache Commons BeanUtils Copyright 2000-2019 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). -Apache Commons Lang Copyright 2001-2017 The Apache Software Foundation +Apache Commons Collections Copyright 2001-2019 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). -Apache Commons Math Copyright 2001-2016 The Apache Software Foundation +Apache Commons Compress Copyright 2002-2020 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (https://www.apache.org/). -Apache Commons Math Copyright 2001-2016 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). This product includes software developed for Orekit by CS Systèmes d'Information (http://www.c-s.fr/) Copyright 2010-2012 CS Systèmes d'Information +Apache Commons Configuration Copyright 2001-2019 The Apache Software Foundation -Apache Commons Pool Copyright 2001-2016 The Apache Software Foundation +Apache Commons IO Copyright 2002-2012 The Apache Software Foundation This product includes software developed by The Apache Software Foundation (http://www.apache.org/). -Apache Commons Text Copyright 2014-2019 The Apache Software Foundation +Apache Commons IO Copyright 2002-2016 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). -Apache Curator Copyright 2013-2014 The Apache Software Foundation +Apache Commons Lang Copyright 2001-2011 The Apache Software Foundation This product includes software developed by The Apache Software Foundation (http://www.apache.org/). -Apache DataSketches Java Copyright 2021 The Apache Software Foundation Copyright 2015-2018 Yahoo Copyright 2019 Verizon Media This product includes software developed at The Apache Software Foundation (http://www.apache.org/). Prior to moving to ASF, the software for this project was developed at Yahoo (now Verizon Media) (https://developer.yahoo.com). +Apache Commons Lang Copyright 2001-2017 The Apache Software Foundation -Apache DataSketches Memory Copyright 2021 - The Apache Software Foundation Copyright 2015-2018 Yahoo Copyright 2019 Verizon Media This product includes software developed at The Apache Software Foundation (http://www.apache.org/). Prior to moving to ASF, the software for this project was developed at Yahoo (now Verizon Media) (https://developer.yahoo.com). +Apache Commons Math Copyright 2001-2016 The Apache Software Foundation + +Apache Commons Math Copyright 2001-2016 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). This product includes software developed for Orekit by CS Systèmes d'Information (http://www.c-s.fr/) Copyright 2010-2012 CS Systèmes d'Information + +Apache Commons Pool Copyright 2001-2016 The Apache Software Foundation + +Apache Commons Text Copyright 2014-2019 The Apache Software Foundation + +Apache Curator Copyright 2013-2014 The Apache Software Foundation + +Apache DataSketches Java Copyright 2021 The Apache Software Foundation Copyright 2015-2018 Yahoo Copyright 2019 Verizon Media This product includes software developed at The Apache Software Foundation (http://www.apache.org/). Prior to moving to ASF, the software for this project was developed at Yahoo (now Verizon Media) (https://developer.yahoo.com). + +Apache DataSketches Memory Copyright 2021 - The Apache Software Foundation Copyright 2015-2018 Yahoo Copyright 2019 Verizon Media This product includes software developed at The Apache Software Foundation (http://www.apache.org/). Prior to moving to ASF, the software for this project was developed at Yahoo (now Verizon Media) (https://developer.yahoo.com). Apache Drill Copyright 2013-2014 The Apache Software Foundation -Apache HTrace Copyright 2017 The Apache Software Foundation +Apache HTrace Copyright 2017 The Apache Software Foundation + +Apache HTrace Copyright 2017 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). In addition, this product includes software dependencies. See the accompanying LICENSE.txt for a listing of dependencies that are NOT Apache licensed (with pointers to their licensing) + +Apache Hadoop Third-party Libs Copyright 2020 and onwards The Apache Software Foundation. This product includes software developed at The Apache Software Foundation (http://www.apache.org/). + +Apache HttpComponents Client Copyright 1999-2017 The Apache Software Foundation -Apache HTrace Copyright 2017 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). In addition, this product includes software dependencies. See the accompanying LICENSE.txt for a listing of dependencies that are NOT Apache licensed (with pointers to their licensing) +Apache HttpComponents Client Copyright 1999-2017 The Apache Software Foundation -Apache HttpComponents Client Copyright 1999-2017 The Apache Software Foundation +Apache HttpComponents Core Copyright 2005-2017 The Apache Software Foundation -Apache HttpComponents Core Copyright 2005-2017 The Apache Software Foundation +Apache Iceberg Copyright 2017-2022 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). -------------------------------------------------------------------------------- This project includes code from Kite, developed at Cloudera, Inc. with the following copyright notice: | Copyright 2013 Cloudera Inc. | | Licensed under the Apache License, Version 2.0 (the License); | you may not use this file except in compliance with the License. | You may obtain a copy of the License at | | http://www.apache.org/licenses/LICENSE-2.0 | | Unless required by applicable law or agreed to in writing, software | distributed under the License is distributed on an AS IS BASIS, | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | See the License for the specific language governing permissions and | limitations under the License. -Apache Iceberg (incubating) Copyright 2017-2018 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). -------------------------------------------------------------------------------- This project includes code from Kite, developed at Cloudera, Inc. with the following copyright notice: | Copyright 2013 Cloudera Inc. | | Licensed under the Apache License, Version 2.0 (the License); | you may not use this file except in compliance with the License. | You may obtain a copy of the License at | | http://www.apache.org/licenses/LICENSE-2.0 | | Unless required by applicable law or agreed to in writing, software | distributed under the License is distributed on an AS IS BASIS, | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | See the License for the specific language governing permissions and | limitations under the License. +Apache Iceberg (incubating) Copyright 2017-2018 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). -------------------------------------------------------------------------------- This project includes code from Kite, developed at Cloudera, Inc. with the following copyright notice: | Copyright 2013 Cloudera Inc. | | Licensed under the Apache License, Version 2.0 (the License); | you may not use this file except in compliance with the License. | You may obtain a copy of the License at | | http://www.apache.org/licenses/LICENSE-2.0 | | Unless required by applicable law or agreed to in writing, software | distributed under the License is distributed on an AS IS BASIS, | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | See the License for the specific language governing permissions and | limitations under the License. -Apache Kafka Copyright 2012 The Apache Software Foundation. +Apache Kafka Copyright 2012 The Apache Software Foundation. -Apache Kerby Copyright 2015-2019 The Apache Software Foundation +Apache Kerby Copyright 2015-2019 The Apache Software Foundation -Apache Log4j Copyright 1999-2017 Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). ResolverUtil.java Copyright 2005-2006 Tim Fennell Dumbster SMTP test server Copyright 2004 Jason Paul Kitchen TypeUtil.java Copyright 2002-2012 Ramnivas Laddad, Juergen Hoeller, Chris Beams picocli (http://picocli.info) Copyright 2017 Remko Popma +Apache Log4j Copyright 1999-2017 Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). ResolverUtil.java Copyright 2005-2006 Tim Fennell Dumbster SMTP test server Copyright 2004 Jason Paul Kitchen TypeUtil.java Copyright 2002-2012 Ramnivas Laddad, Juergen Hoeller, Chris Beams picocli (http://picocli.info) Copyright 2017 Remko Popma -Apache Lucene Copyright 2001-2017 The Apache Software Foundation +Apache Lucene Copyright 2001-2017 The Apache Software Foundation -Apache Parquet MR (Incubating) Copyright 2014 The Apache Software Foundation +Apache Parquet MR (Incubating) Copyright 2014 The Apache Software Foundation -Apache Ranger Copyright 2014-2017 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). This product includes software developed by Spring Security Project (http://www.springframework.org/security) +Apache Ranger Copyright 2014-2017 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). This product includes software developed by Spring Security Project (http://www.springframework.org/security) -Apache Twill Copyright 2013-2017 The Apache Software Foundation +Apache Thrift Copyright 2006-2010 The Apache Software Foundation. -Apache Yetus Copyright 2008-2019 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). --- Additional licenses for the Apache Yetus Source/Website: --- See LICENSE for terms. +Apache Twill Copyright 2013-2017 The Apache Software Foundation -Apache ZooKeeper Copyright 2009-2014 The Apache Software Foundation +Apache Yetus Copyright 2008-2019 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). --- Additional licenses for the Apache Yetus Source/Website: --- See LICENSE for terms. -Azure Data Lake Store Java SDK Copyright (c) Microsoft Corporation All rights reserved. The MIT License (MIT) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +Apache ZooKeeper Copyright 2009-2014 The Apache Software Foundation + +Azure Data Lake Store Java SDK Copyright (c) Microsoft Corporation All rights reserved. The MIT License (MIT) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Copyright (C) 2004 Sam Hocevar @@ -125,19 +145,19 @@ Copyright (C) 2018 The Guava Authors Copyright (C) 2020 Dremio -Copyright (c) 1997, 2018 Oracle and/or its affiliates and others. All rights reserved. This program and the accompanying materials are made available under the terms of the Eclipse Public License v. 2.0, which is available at http://www.eclipse.org/legal/epl-2.0. This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse Public License v. 2.0 are satisfied: GNU General Public License, version 2 with the GNU Classpath Exception, which is available at https://www.gnu.org/software/classpath/license.html. SPDX-License-Identifier: EPL-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 +Copyright (c) 1997, 2018 Oracle and/or its affiliates and others. All rights reserved. This program and the accompanying materials are made available under the terms of the Eclipse Public License v. 2.0, which is available at http://www.eclipse.org/legal/epl-2.0. This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse Public License v. 2.0 are satisfied: GNU General Public License, version 2 with the GNU Classpath Exception, which is available at https://www.gnu.org/software/classpath/license.html. SPDX-License-Identifier: EPL-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 -Copyright (c) 1997, PostgreSQL Global Development Group +Copyright (c) 1997, PostgreSQL Global Development Group -Copyright (c) 2000 - 2019 The Legion of the Bouncy Castle Inc. (https://www.bouncycastle.org) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +Copyright (c) 2000 - 2019 The Legion of the Bouncy Castle Inc. (https://www.bouncycastle.org) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Copyright (c) 2000-2011 INRIA, France Telecom -Copyright (c) 2001-2016, Arno Unkrig Copyright (c) 2015-2016 TIBCO Software Inc. +Copyright (c) 2001-2016, Arno Unkrig Copyright (c) 2015-2016 TIBCO Software Inc. Copyright (c) 2002-2015 Atsuhiko Yamanaka, JCraft,Inc. All rights reserved. -Copyright (c) 2002-2017 EPFL Copyright (c) 2011-2017 Lightbend, Inc. +Copyright (c) 2002-2017 EPFL Copyright (c) 2011-2017 Lightbend, Inc. Copyright (c) 2003-2013, Objenesis Team and all contributors @@ -147,21 +167,21 @@ Copyright (c) 2004-2016 Paul R. Holser, Jr. Copyright (c) 2004-2017 QOS.ch -Copyright (c) 2005, Graph Builder All rights reserved. +Copyright (c) 2005, Graph Builder All rights reserved. Copyright (c) 2006 Google, Inc. All rights reserved. -Copyright (c) 2006 Paul Hammant & ThoughtWorks Inc All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holders nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +Copyright (c) 2006 Paul Hammant & ThoughtWorks Inc All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holders nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Copyright (c) 2006, Ivan Sagalaev. All rights reserved. -Copyright (c) 2007, Eclipse Foundation, Inc. and its licensors. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.Neither the name of the Eclipse Foundation, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +Copyright (c) 2007, Eclipse Foundation, Inc. and its licensors. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.Neither the name of the Eclipse Foundation, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Copyright (c) 2007-present, Stephen Colebourne & Michael Nascimento Santos -Copyright (c) 2008, 2013 Sun Microsystems, Oracle Corporation. All rights reserved. This program and the accompanying materials are made available under the terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 which accompanies this distribution. The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html and the Eclipse Distribution License is available at http://www.eclipse.org/org/documents/edl-v10.php. Contributors: Linda DeMichiel -Java Persistence 2.1 Specification available from http://jcp.org/en/jsr/detail?id=338 Oracle Committers - EclipseLink specific implementations and OSGi support Oracle Committers - Misc Bugfixes Java(TM) Persistence API, Version 2.1 +Copyright (c) 2008, 2013 Sun Microsystems, Oracle Corporation. All rights reserved. This program and the accompanying materials are made available under the terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 which accompanies this distribution. The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html and the Eclipse Distribution License is available at http://www.eclipse.org/org/documents/edl-v10.php. Contributors: Linda DeMichiel -Java Persistence 2.1 Specification available from http://jcp.org/en/jsr/detail?id=338 Oracle Committers - EclipseLink specific implementations and OSGi support Oracle Committers - Misc Bugfixes Java(TM) Persistence API, Version 2.1 -Copyright (c) 2008, Nathan Sweet All rights reserved. +Copyright (c) 2008, Nathan Sweet All rights reserved. Copyright (c) 2009-2011, Mozilla Foundation and contributors All rights reserved. @@ -171,13 +191,13 @@ Copyright (c) 2010 - 2014 jsPlumb, http://jsplumbtoolkit.com/ Copyright (c) 2010 - 2015, Board of Regents of the University of Wisconsin-Madison and Glencoe Software, Inc. -Copyright (c) 2010 Oracle and/or its affiliates. All rights reserved. The contents of this file are subject to the terms of either the GNU General Public License Version 2 only (GPL) or the Common Development and Distribution License(CDDL) (collectively, the License). You may not use this file except in compliance with the License. You can obtain a copy of the License at https://glassfish.dev.java.net/public/CDDL+GPL_1_1.html or packager/legal/LICENSE.txt. See the License for the specific language governing permissions and limitations under the License. When distributing the software, include this License Header Notice in each file and include the License file at packager/legal/LICENSE.txt. GPL Classpath Exception: Oracle designates this particular file as subject to the Classpath exception as provided by Oracle in the GPL Version 2 section of the License file that accompanied this code. Modifications: If applicable, add the following below the License Header, with the fields enclosed by brackets [] replaced by your own identifying information: Portions Copyright [year] [name of copyright owner] Contributor(s): If you wish your version of this file to be governed by only the CDDL or only the GPL Version 2, indicate your decision by adding [Contributor] elects to include this software in this distribution under the [CDDL or GPL Version 2] license. If you don't indicate a single choice of license, a recipient has the option to distribute your version of this file under either the CDDL, the GPL Version 2 or to extend the choice of license to its licensees as provided above. However, if you add GPL Version 2 code and therefore, elected the GPL Version 2 license, then the option applies only if the new code is made subject to such option by the copyright holder. +Copyright (c) 2010 Oracle and/or its affiliates. All rights reserved. The contents of this file are subject to the terms of either the GNU General Public License Version 2 only (GPL) or the Common Development and Distribution License(CDDL) (collectively, the License). You may not use this file except in compliance with the License. You can obtain a copy of the License at https://glassfish.dev.java.net/public/CDDL+GPL_1_1.html or packager/legal/LICENSE.txt. See the License for the specific language governing permissions and limitations under the License. When distributing the software, include this License Header Notice in each file and include the License file at packager/legal/LICENSE.txt. GPL Classpath Exception: Oracle designates this particular file as subject to the Classpath exception as provided by Oracle in the GPL Version 2 section of the License file that accompanied this code. Modifications: If applicable, add the following below the License Header, with the fields enclosed by brackets [] replaced by your own identifying information: Portions Copyright [year] [name of copyright owner] Contributor(s): If you wish your version of this file to be governed by only the CDDL or only the GPL Version 2, indicate your decision by adding [Contributor] elects to include this software in this distribution under the [CDDL or GPL Version 2] license. If you don't indicate a single choice of license, a recipient has the option to distribute your version of this file under either the CDDL, the GPL Version 2 or to extend the choice of license to its licensees as provided above. However, if you add GPL Version 2 code and therefore, elected the GPL Version 2 license, then the option applies only if the new code is made subject to such option by the copyright holder. Copyright (c) 2010-2012 Robert Kieffer MIT License - http://opensource.org/licenses/mit-license.php Copyright (c) 2010-2015 Oracle and/or its affiliates. All rights reserved. -Copyright (c) 2010-2015 Oracle and/or its affiliates. All rights reserved. The contents of this file are subject to the terms of either the GNU General Public License Version 2 only (GPL) or the Common Development and Distribution License(CDDL) (collectively, the License). You may not use this file except in compliance with the License. You can obtain a copy of the License at http://glassfish.java.net/public/CDDL+GPL_1_1.html or packager/legal/LICENSE.txt. See the License for the specific language governing permissions and limitations under the License. When distributing the software, include this License Header Notice in each file and include the License file at packager/legal/LICENSE.txt. GPL Classpath Exception: Oracle designates this particular file as subject to the Classpath exception as provided by Oracle in the GPL Version 2 section of the License file that accompanied this code. Modifications: If applicable, add the following below the License Header, with the fields enclosed by brackets [] replaced by your own identifying information: Portions Copyright [year] [name of copyright owner] Contributor(s): If you wish your version of this file to be governed by only the CDDL or only the GPL Version 2, indicate your decision by adding [Contributor] elects to include this software in this distribution under the [CDDL or GPL Version 2] license. If you don't indicate a single choice of license, a recipient has the option to distribute your version of this file under either the CDDL, the GPL Version 2 or to extend the choice of license to its licensees as provided above. However, if you add GPL Version 2 code and therefore, elected the GPL Version 2 license, then the option applies only if the new code is made subject to such option by the copyright holder. +Copyright (c) 2010-2015 Oracle and/or its affiliates. All rights reserved. The contents of this file are subject to the terms of either the GNU General Public License Version 2 only (GPL) or the Common Development and Distribution License(CDDL) (collectively, the License). You may not use this file except in compliance with the License. You can obtain a copy of the License at http://glassfish.java.net/public/CDDL+GPL_1_1.html or packager/legal/LICENSE.txt. See the License for the specific language governing permissions and limitations under the License. When distributing the software, include this License Header Notice in each file and include the License file at packager/legal/LICENSE.txt. GPL Classpath Exception: Oracle designates this particular file as subject to the Classpath exception as provided by Oracle in the GPL Version 2 section of the License file that accompanied this code. Modifications: If applicable, add the following below the License Header, with the fields enclosed by brackets [] replaced by your own identifying information: Portions Copyright [year] [name of copyright owner] Contributor(s): If you wish your version of this file to be governed by only the CDDL or only the GPL Version 2, indicate your decision by adding [Contributor] elects to include this software in this distribution under the [CDDL or GPL Version 2] license. If you don't indicate a single choice of license, a recipient has the option to distribute your version of this file under either the CDDL, the GPL Version 2 or to extend the choice of license to its licensees as provided above. However, if you add GPL Version 2 code and therefore, elected the GPL Version 2 license, then the option applies only if the new code is made subject to such option by the copyright holder. Copyright (c) 2010-2015, Michael Bostock All rights reserved. @@ -187,15 +207,17 @@ Copyright (c) 2010-2016, Michael Bostock All rights reserved. Copyright (c) 2010-2018, Michael Bostock All rights reserved. +Copyright (c) 2010-2020 Robert Kieffer and other contributors + Copyright (c) 2011 Alexander Shtuchkin Copyright (c) 2011 Einar Otto Stangvik <einaros@gmail.com> -Copyright (c) 2011 FuseSource Corp. All rights reserved. +Copyright (c) 2011 FuseSource Corp. All rights reserved. Copyright (c) 2011 Google Inc. -Copyright (c) 2011 Oracle and/or its affiliates. All rights reserved. +Copyright (c) 2011 Oracle and/or its affiliates. All rights reserved. Copyright (c) 2011, Christopher Finke All rights reserved. @@ -209,7 +231,7 @@ Copyright (c) 2011-2018, Christopher Jeffrey (https://github.com/chjj/) Copyright (c) 2011-2021 Phillip Clark -Copyright (c) 2011-present, Facebook, Inc. Copyright (c) 2011 The LevelDB Authors. All rights reserved. +Copyright (c) 2011-present, Facebook, Inc. Copyright (c) 2011 The LevelDB Authors. All rights reserved. Copyright (c) 2012 Barnesandnoble.com, llc, Donavon West, and Domenic Denicola @@ -225,15 +247,15 @@ Copyright (c) 2012 Nicholas Fisher Copyright (c) 2012 Raynos. -Copyright (c) 2012, 2018 Oracle and/or its affiliates. All rights reserved. This program and the accompanying materials are made available under the terms of the Eclipse Public License v. 2.0, which is available at http://www.eclipse.org/legal/epl-2.0. This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse Public License v. 2.0 are satisfied: GNU General Public License, version 2 with the GNU Classpath Exception, which is available at https://www.gnu.org/software/classpath/license.html. SPDX-License-Identifier: EPL-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 +Copyright (c) 2012, 2018 Oracle and/or its affiliates. All rights reserved. This program and the accompanying materials are made available under the terms of the Eclipse Public License v. 2.0, which is available at http://www.eclipse.org/legal/epl-2.0. This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse Public License v. 2.0 are satisfied: GNU General Public License, version 2 with the GNU Classpath Exception, which is available at https://www.gnu.org/software/classpath/license.html. SPDX-License-Identifier: EPL-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 -Copyright (c) 2012, 2019 Oracle and/or its affiliates. All rights reserved. This program and the accompanying materials are made available under the terms of the Eclipse Public License v. 2.0, which is available at http://www.eclipse.org/legal/epl-2.0. This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse Public License v. 2.0 are satisfied: GNU General Public License, version 2 with the GNU Classpath Exception, which is available at https://www.gnu.org/software/classpath/license.html. SPDX-License-Identifier: EPL-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 +Copyright (c) 2012, 2019 Oracle and/or its affiliates. All rights reserved. This program and the accompanying materials are made available under the terms of the Eclipse Public License v. 2.0, which is available at http://www.eclipse.org/legal/epl-2.0. This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse Public License v. 2.0 are satisfied: GNU General Public License, version 2 with the GNU Classpath Exception, which is available at https://www.gnu.org/software/classpath/license.html. SPDX-License-Identifier: EPL-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 Copyright (c) 2012-2014 Andris Reinman Copyright (c) 2012-2014 Chris Pettitt -Copyright (c) 2012-2017 The ANTLR Project. All rights reserved. +Copyright (c) 2012-2017 The ANTLR Project. All rights reserved. Copyright (c) 2012-2018 Aseem Kishore, and [others]. @@ -285,7 +307,7 @@ Copyright (c) 2014 Petka Antonov 2015 Sindre Sorhus Copyright (c) 2014, 2015, 2016, 2017, 2018 Simon Lydell -Copyright (c) 2014, Chris Lohfink All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the {organization} nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +Copyright (c) 2014, Chris Lohfink All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the {organization} nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Copyright (c) 2014-2016 GitHub, Inc. @@ -379,6 +401,8 @@ Copyright (c) 2015-2017 Rubén Norte Copyright (c) 2015-2018 Andrew Clark +Copyright (c) 2015-2018 Google, Inc., Netflix, Inc., Microsoft Corp. and contributors + Copyright (c) 2015-2018 Reselect Contributors Copyright (c) 2015-present Dan Abramov @@ -415,11 +439,13 @@ Copyright (c) 2016 Your Name. Copyright (c) 2016 Zeit, Inc. +Copyright (c) 2016, 2017, Mike Lischke + Copyright (c) 2016, Claudéric Demers Copyright (c) 2016, Schrodinger, Inc. All rights reserved. -Copyright (c) 2016, gRPC EcosystemAll rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of grpc-opentracing nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +Copyright (c) 2016, gRPC EcosystemAll rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of grpc-opentracing nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Copyright (c) 2016-2019 Ryan Carniato @@ -451,7 +477,9 @@ Copyright (c) 2017 Titus Wormer Copyright (c) 2017 Tony Quetano -Copyright (c) 2017, 2018 Oracle and/or its affiliates. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of the Eclipse Foundation, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +Copyright (c) 2017 sudodoki + +Copyright (c) 2017, 2018 Oracle and/or its affiliates. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of the Eclipse Foundation, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Copyright (c) 2017, Baidu Inc. All rights reserved. @@ -471,7 +499,7 @@ Copyright (c) 2018 Jordan Harband Copyright (c) 2018 Nikita Skovoroda -Copyright (c) 2018 Oracle and/or its affiliates. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of the Eclipse Foundation, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +Copyright (c) 2018 Oracle and/or its affiliates. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of the Eclipse Foundation, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Copyright (c) 2018 React Popper authors @@ -485,6 +513,8 @@ Copyright (c) 2018, React Community Forked from React (https://github.com/facebo Copyright (c) 2018-present, iamkun +Copyright (c) 2018–present Artem Zakharchenko + Copyright (c) 2019 Ademola Adegbuyi Copyright (c) 2019 Alexander Reardon @@ -499,9 +529,9 @@ Copyright (c) 2019 FormatJS Copyright (c) 2019 Jordan Harband -Copyright (c) 2019, James Clarke +Copyright (c) 2019 Sentry (https://sentry.io) and individual contributors. All rights reserved. -Copyright (c) 2019, Sentry All rights reserved. +Copyright (c) 2019, James Clarke Copyright (c) 2019-present Beier(Bill) Luo @@ -529,6 +559,8 @@ Copyright (c) 2021, Oath Inc. Copyright (c) 2022 Inspect JS +Copyright (c) 2022 Sentry (https://sentry.io) and individual contributors. All rights reserved. + Copyright (c) 2022 WorkOS Copyright (c) Microsoft Corporation. @@ -537,15 +569,13 @@ Copyright (c) React Training 2015-2019 Copyright (c) Remix Software 2020-2022 Copyright 2000-2016 JetBrains s.r.o. -Copyright 2003-2005, Ernst de Haan All rights reserved. - -Copyright 2006 SUN MICROSYSTEMS, INC +Copyright 2003-2005, Ernst de Haan All rights reserved. Copyright 2008 Google Inc. -Copyright 2008 Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Code generated by the Protocol Buffer compiler is owned by the owner of the input file used when generating it. This code is not standalone and requires a support library to be linked with it. This support library is itself covered by the above license. +Copyright 2008 Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Code generated by the Protocol Buffer compiler is owned by the owner of the input file used when generating it. This code is not standalone and requires a support library to be linked with it. This support library is itself covered by the above license. -Copyright 2008, Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +Copyright 2008, Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Copyright 2008-2011 Google Inc. @@ -585,25 +615,29 @@ Copyright 2014 Google Inc. Copyright 2014 The gRPC Authors -Copyright 2014, Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +Copyright 2014 Yahoo Inc. All rights reserved. + +Copyright 2014, Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Copyright 2014-2016 AsyncHttpClient Project Copyright 2014-2018 Chronicle Software Ltd -Copyright 2015, Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +Copyright 2014-2023 Real Logic Limited. Licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License. You may obtain a copy of the License at https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + +Copyright 2015, Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Copyright 2015-2019 The Jaeger Project Authors Copyright 2015-2020 Ping Identity Corporation -Copyright 2016 The Android Open Source Project Licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ----------------------------------------------------------------------- This product contains a modified portion of `Netty`, a configurable network stack in Java, which can be obtained at: * LICENSE: * licenses/LICENSE.netty.txt (Apache License 2.0) * HOMEPAGE: * http://netty.io/ This product contains a modified portion of `Apache Harmony`, modular Java runtime, which can be obtained at: * LICENSE: * licenses/LICENSE.harmony.txt (Apache License 2.0) * HOMEPAGE: * https://harmony.apache.org/ +Copyright 2016 The Android Open Source Project Licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ----------------------------------------------------------------------- This product contains a modified portion of `Netty`, a configurable network stack in Java, which can be obtained at: * LICENSE: * licenses/LICENSE.netty.txt (Apache License 2.0) * HOMEPAGE: * http://netty.io/ This product contains a modified portion of `Apache Harmony`, modular Java runtime, which can be obtained at: * LICENSE: * licenses/LICENSE.harmony.txt (Apache License 2.0) * HOMEPAGE: * https://harmony.apache.org/ Copyright 2016 Vladimir Bukhtoyarov. -Copyright 2016, Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +Copyright 2016, Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -Copyright 2016, Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +Copyright 2016, Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Copyright 2016-2019 The OpenTracing Authors @@ -621,7 +655,7 @@ Copyright 2018 Kilian Valkhof Copyright 2018 Logan Smyth -Copyright 2019 Carl Mastrangelo Licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ----------------------------------------------------------------------- This product contains a modified portion of 'Catapult', an open source Trace Event viewer for Chome, Linux, and Android applications, which can be obtained at: * LICENSE: * traceviewer/src/main/resources/io/perfmark/traceviewer/third_party/catapult/LICENSE (New BSD License) * HOMEPAGE: * https://github.com/catapult-project/catapult +Copyright 2019 Carl Mastrangelo Licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ----------------------------------------------------------------------- This product contains a modified portion of 'Catapult', an open source Trace Event viewer for Chome, Linux, and Android applications, which can be obtained at: * LICENSE: * traceviewer/src/main/resources/io/perfmark/traceviewer/third_party/catapult/LICENSE (New BSD License) * HOMEPAGE: * https://github.com/catapult-project/catapult Copyright 2019 Eric Mantooth @@ -657,7 +691,9 @@ Copyright Joyent, Inc. and other Node contributors. All rights reserved. Permiss Copyright OpenJS Foundation and other contributors -Copyright The OpenTelemetry Authors SPDX-License-Identifier: Apache-2.0 +Copyright OpenSearch Contributors Copyright 2010-2014 Amazon.com,Inc. or its affiliates. All Rights Reserved. This product includes software developed by Amazon Technologies,Inc (http://www.amazon.com/). ********************** THIRD PARTY COMPONENTS ********************** This software includes third party software subject to the following copyrights: - XML parsing and utility functions from JetS3t - Copyright 2006-2009 James Murty. - PKCS#1 PEM encoded private key parsing and utility functions from oauth.googlecode.com - Copyright 1998-2010 AOL Inc. The licenses for these third party components are included in LICENSE.txt + +Copyright The OpenTelemetry Authors SPDX-License-Identifier: Apache-2.0 Copyright jQuery Foundation and other contributors @@ -683,64 +719,82 @@ Copyright ©2009-2011 FasterXML, LLC. All rights reserved unless otherwise indic Copyright ©2010-2019 Oracle Corporation. All Rights Reserved. -Copyright(c) 2018 Microsoft Corporation All rights reserved. +Copyright(c) 2018 Microsoft Corporation All rights reserved. + +Dremio Copyright 2015-2017 Dremio Corporation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). -------------------------------------------------------------------------------- This project includes code from Iceberg, developed at Netflix, Inc. with the following copyright notice: | Copyright 2017 Netflix, Inc. | | Licensed under the Apache License, Version 2.0 (the License); | you may not use this file except in compliance with the License. | You may obtain a copy of the License at | | http://www.apache.org/licenses/LICENSE-2.0 | | Unless required by applicable law or agreed to in writing, software | distributed under the License is distributed on an AS IS BASIS, | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | See the License for the specific language governing permissions and | limitations under the License. -Elasticsearch Copyright 2009-2017 Elasticsearch +Elasticsearch Copyright 2009-2017 Elasticsearch FreeMarker is Free software, licensed under the Apache License, Version 2.0. -JBoss, Home of Professional Open Source Copyright 2010, Red Hat, Inc., and individual contributors +JBoss, Home of Professional Open Source Copyright 2010, Red Hat, Inc., and individual contributors -Jetty Web Container Copyright 1995-2017 Mort Bay Consulting Pty Ltd. +Jetty Web Container Copyright 1995-2017 Mort Bay Consulting Pty Ltd. -Jetty Web Container Copyright 1995-2017 Mort Bay Consulting Pty Ltd. +Jetty Web Container Copyright 1995-2017 Mort Bay Consulting Pty Ltd. -LICENSE ISSUES ============== The OpenSSL toolkit stays under a double license, i.e. both the conditions of the OpenSSL License and the original SSLeay license apply to the toolkit. See below for the actual license texts. OpenSSL License --------------- /* ==================================================================== * Copyright (c) 1998-2019 The OpenSSL Project. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. All advertising materials mentioning features or use of this * software must display the following acknowledgment: * This product includes software developed by the OpenSSL Project * for use in the OpenSSL Toolkit. (http://www.openssl.org/) * * 4. The names OpenSSL Toolkit and OpenSSL Project must not be used to * endorse or promote products derived from this software without * prior written permission. For written permission, please contact * openssl-core@openssl.org. * * 5. Products derived from this software may not be called OpenSSL * nor may OpenSSL appear in their names without prior written * permission of the OpenSSL Project. * * 6. Redistributions of any form whatsoever must retain the following * acknowledgment: * This product includes software developed by the OpenSSL Project * for use in the OpenSSL Toolkit (http://www.openssl.org/) * * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. * ==================================================================== * * This product includes cryptographic software written by Eric Young * (eay@cryptsoft.com). This product includes software written by Tim * Hudson (tjh@cryptsoft.com). * */ Original SSLeay License ----------------------- /* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) * All rights reserved. * * This package is an SSL implementation written * by Eric Young (eay@cryptsoft.com). * The implementation was written so as to conform with Netscapes SSL. * * This library is free for commercial and non-commercial use as long as * the following conditions are aheared to. The following conditions * apply to all code found in this distribution, be it the RC4, RSA, * lhash, DES, etc., code; not just the SSL code. The SSL documentation * included with this distribution is covered by the same copyright terms * except that the holder is Tim Hudson (tjh@cryptsoft.com). * * Copyright remains Eric Young's, and as such any Copyright notices in * the code are not to be removed. * If this package is used in a product, Eric Young should be given attribution * as the author of the parts of the library used. * This can be in the form of a textual message at program startup or * in documentation (online or textual) provided with the package. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. All advertising materials mentioning features or use of this software * must display the following acknowledgement: * This product includes cryptographic software written by * Eric Young (eay@cryptsoft.com) * The word 'cryptographic' can be left out if the rouines from the library * being used are not cryptographic related :-). * 4. If you include any Windows specific code (or a derivative thereof) from * the apps directory (application code) you must include an acknowledgement: * This product includes software written by Tim Hudson (tjh@cryptsoft.com) * * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * * The licence and distribution terms for any publically available version or * derivative of this code cannot be changed. i.e. this code cannot simply be * copied and put under another distribution licence * [including the GNU Public Licence.] */ +LICENSE INFORMATION The Programs listed below are licensed under the following License Information terms and conditions in addition to the Program license terms previously agreed to by Client and IBM. If Client does not have previously agreed to license terms in effect for the Program, the International Program License Agreement (i125-3301-15) applies. Program Name (Program Number): IBM Data Server Driver for JDBC and SQLJ v4.32 (11.5.8) (Tool) The following standard terms apply to Licensee's use of the Program. Modifiable Third Party Code To the extent, if any, in the NOTICES file IBM identifies third party code as Modifiable Third Party Code, IBM authorizes Licensee to 1) modify the Modifiable Third Party Code and 2) reverse engineer the Program modules that directly interface with the Modifiable Third Party Code provided that it is only for the purpose of debugging Licensee's modifications to such third party code. IBM's service and support obligations, if any, apply only to the unmodified Program. Redistributables The Program includes components that are Redistributable and they are listed below. Redistributables may be distributed, in object-code form, only as part of Licensee's value-added application that was developed using the Program (Licensee's Application) and only to support use of Licensee's Application. If the Redistributables include a Java Runtime Environment, Licensee must also include other non-Java Redistributables with Licensee's Application. Licensee may not remove any copyright or notice files contained in the Redistributables or use IBM's, it's suppliers' or distributors' names or trademarks in connection with the marketing of Licensee's Application without IBM's or that supplier's or distributor's prior written consent. Licensee's license agreement with the end user must be at least as protective of IBM as the terms of this Agreement. IBM, its suppliers and distributors provide the Redistributables and related documentation without obligation of support and AS IS, WITH NO WARRANTY OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING THE WARRANTY OF TITLE, NON-INFRINGEMENT OR NON-INTERFERENCE AND THE IMPLIED WARRANTIES AND CONDITIONS OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. The following are Redistributables: IBM Data Server Driver for JDBC and SQLJ V4.32 Redistributables: The following list includes files that are provided to Licensee pursuant to the Redistributables section of the IBM International Program License Agreements License Information that applies to this Program: db2jcc4.jar sqlj4.zip L/N: L-KHAI-CASRX7 D/N: L-KHAI-CASRX7 P/N: L-KHAI-CASRX7 + +LICENSE ISSUES ============== The OpenSSL toolkit stays under a double license, i.e. both the conditions of the OpenSSL License and the original SSLeay license apply to the toolkit. See below for the actual license texts. OpenSSL License --------------- /* ==================================================================== * Copyright (c) 1998-2019 The OpenSSL Project. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. All advertising materials mentioning features or use of this * software must display the following acknowledgment: * This product includes software developed by the OpenSSL Project * for use in the OpenSSL Toolkit. (http://www.openssl.org/) * * 4. The names OpenSSL Toolkit and OpenSSL Project must not be used to * endorse or promote products derived from this software without * prior written permission. For written permission, please contact * openssl-core@openssl.org. * * 5. Products derived from this software may not be called OpenSSL * nor may OpenSSL appear in their names without prior written * permission of the OpenSSL Project. * * 6. Redistributions of any form whatsoever must retain the following * acknowledgment: * This product includes software developed by the OpenSSL Project * for use in the OpenSSL Toolkit (http://www.openssl.org/) * * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. * ==================================================================== * * This product includes cryptographic software written by Eric Young * (eay@cryptsoft.com). This product includes software written by Tim * Hudson (tjh@cryptsoft.com). * */ Original SSLeay License ----------------------- /* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) * All rights reserved. * * This package is an SSL implementation written * by Eric Young (eay@cryptsoft.com). * The implementation was written so as to conform with Netscapes SSL. * * This library is free for commercial and non-commercial use as long as * the following conditions are aheared to. The following conditions * apply to all code found in this distribution, be it the RC4, RSA, * lhash, DES, etc., code; not just the SSL code. The SSL documentation * included with this distribution is covered by the same copyright terms * except that the holder is Tim Hudson (tjh@cryptsoft.com). * * Copyright remains Eric Young's, and as such any Copyright notices in * the code are not to be removed. * If this package is used in a product, Eric Young should be given attribution * as the author of the parts of the library used. * This can be in the form of a textual message at program startup or * in documentation (online or textual) provided with the package. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. All advertising materials mentioning features or use of this software * must display the following acknowledgement: * This product includes cryptographic software written by * Eric Young (eay@cryptsoft.com) * The word 'cryptographic' can be left out if the rouines from the library * being used are not cryptographic related :-). * 4. If you include any Windows specific code (or a derivative thereof) from * the apps directory (application code) you must include an acknowledgement: * This product includes software written by Tim Hudson (tjh@cryptsoft.com) * * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * * The licence and distribution terms for any publically available version or * derivative of this code cannot be changed. i.e. this code cannot simply be * copied and put under another distribution licence * [including the GNU Public Licence.] */ Logback: the reliable, generic, fast and flexible logging framework. Copyright (C) 1999-2017, QOS.ch. All rights reserved. -MIT License Copyright (c) Microsoft Corporation Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +MIT License Copyright (c) Microsoft Corporation Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +Micrometer Copyright (c) 2017-Present VMware, Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License. You may obtain a copy of the License at https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------------------- This product contains a modified portion of 'io.netty.util.internal.logging', in the Netty/Common library distributed by The Netty Project: * Copyright 2013 The Netty Project * License: Apache License v2.0 * Homepage: https://netty.io This product contains a modified portion of 'StringUtils.isBlank()', in the Commons Lang library distributed by The Apache Software Foundation: * Copyright 2001-2019 The Apache Software Foundation * License: Apache License v2.0 * Homepage: https://commons.apache.org/proper/commons-lang/ This product contains a modified portion of 'JsonUtf8Writer', in the Moshi library distributed by Square, Inc: * Copyright 2010 Google Inc. * License: Apache License v2.0 * Homepage: https://github.com/square/moshi This product contains a modified portion of the 'org.springframework.lang' package in the Spring Framework library, distributed by VMware, Inc: * Copyright 2002-2019 the original author or authors. * License: Apache License v2.0 * Homepage: https://spring.io/projects/spring-framework -Nimbus JOSE + JWT Copyright 2012 - 2020, Connect2id Ltd. +Nimbus JOSE + JWT Copyright 2012 - 2020, Connect2id Ltd. -Nimbus Language Tags Copyright 2012-2016, Connect2id Ltd. +Nimbus Language Tags Copyright 2012-2016, Connect2id Ltd. -Nimbus OAuth 2.0 SDK with OpenID Connect extensions Copyright (c) Connect2id Ltd., 2012 - 2019 +Nimbus OAuth 2.0 SDK with OpenID Connect extensions Copyright (c) Connect2id Ltd., 2012 - 2019 -Prometheus instrumentation library for JVM applications Copyright 2012-2015 The Prometheus Authors This product includes software developed at Boxever Ltd. (http://www.boxever.com/). This product includes software developed at SoundCloud Ltd. (http://soundcloud.com/). This product includes software developed as part of the Ocelli project by Netflix Inc. (https://github.com/Netflix/ocelli/). +Notices for Eclipse Project for Interceptors This content is produced and maintained by the Eclipse Project for Interceptors project. Project home: https://projects.eclipse.org/projects/ee4j.interceptors Trademarks Eclipse Project for Interceptors is a trademark of the Eclipse Foundation. Copyright All content is the property of the respective authors or their employers. For more information regarding authorship of content, please consult the listed source code repository logs. Declared Project Licenses This program and the accompanying materials are made available under the terms of the Eclipse Public License v. 2.0 which is available at http://www.eclipse.org/legal/epl-2.0. This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse Public License v. 2.0 are satisfied: GNU General Public License, version 2 with the GNU Classpath Exception which is available at https://www.gnu.org/software/classpath/license.html. SPDX-License-Identifier: EPL-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 Source Code The project maintains the following source code repositories: https://github.com/eclipse-ee4j/interceptor-api Third-party Content Cryptography Content may contain encryption software. The country in which you are currently may have restrictions on the import, possession, and use, and/or re-export to another country, of encryption software. BEFORE using any encryption software, please check the country's laws, regulations and policies concerning the import, possession, or use, and re-export of encryption software, to see if this is permitted. -The Checker Framework Copyright 2004-present by the Checker Framework developers Most of the Checker Framework is licensed under the GNU General Public License, version 2 (GPL2), with the classpath exception. The text of this license appears below. This is the same license used for OpenJDK. A few parts of the Checker Framework have more permissive licenses. * The annotations are licensed under the MIT License. (The text of this license appears below.) More specifically, all the parts of the Checker Framework that you might want to include with your own program use the MIT License. This is the checker-qual.jar file and all the files that appear in it: every file in a qual/ directory, plus utility files such as NullnessUtil.java, RegexUtil.java, SignednessUtil.java, etc. In addition, the cleanroom implementations of third-party annotations, which the Checker Framework recognizes as aliases for its own annotations, are licensed under the MIT License. Some external libraries that are included with the Checker Framework have different licenses. * javaparser is dual licensed under the LGPL or the Apache license -- you may use it under whichever one you want. (The javaparser source code contains a file with the text of the GPL, but it is not clear why, since javaparser does not use the GPL.) See file stubparser/LICENSE and the source code of all its files. * JUnit is licensed under the Common Public License v1.0 (see http://www.junit.org/license), with parts (Hamcrest) licensed under the BSD License (see http://hamcrest.org/JavaHamcrest/). * Libraries in plume-lib (https://github.com/plume-lib/) are licensed under the MIT License. The Checker Framework includes annotations for the JDK in directory checker/jdk/, and for some other libraries. Each annotated library uses the same license as the unannotated version of the library. +Prometheus instrumentation library for JVM applications Copyright 2012-2015 The Prometheus Authors This product includes software developed at Boxever Ltd. (http://www.boxever.com/). This product includes software developed at SoundCloud Ltd. (http://soundcloud.com/). This product includes software developed as part of the Ocelli project by Netflix Inc. (https://github.com/Netflix/ocelli/). -The MIT License Copyright (c) 2009 codehaus.org. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +The Checker Framework Copyright 2004-present by the Checker Framework developers Most of the Checker Framework is licensed under the GNU General Public License, version 2 (GPL2), with the classpath exception. The text of this license appears below. This is the same license used for OpenJDK. A few parts of the Checker Framework have more permissive licenses. * The annotations are licensed under the MIT License. (The text of this license appears below.) More specifically, all the parts of the Checker Framework that you might want to include with your own program use the MIT License. This is the checker-qual.jar file and all the files that appear in it: every file in a qual/ directory, plus utility files such as NullnessUtil.java, RegexUtil.java, SignednessUtil.java, etc. In addition, the cleanroom implementations of third-party annotations, which the Checker Framework recognizes as aliases for its own annotations, are licensed under the MIT License. Some external libraries that are included with the Checker Framework have different licenses. * javaparser is dual licensed under the LGPL or the Apache license -- you may use it under whichever one you want. (The javaparser source code contains a file with the text of the GPL, but it is not clear why, since javaparser does not use the GPL.) See file stubparser/LICENSE and the source code of all its files. * JUnit is licensed under the Common Public License v1.0 (see http://www.junit.org/license), with parts (Hamcrest) licensed under the BSD License (see http://hamcrest.org/JavaHamcrest/). * Libraries in plume-lib (https://github.com/plume-lib/) are licensed under the MIT License. The Checker Framework includes annotations for the JDK in directory checker/jdk/, and for some other libraries. Each annotated library uses the same license as the unannotated version of the library. + +The MIT License Copyright (c) 2009 codehaus.org. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +The MIT License (MIT) Copyright (c) 2015 Microsoft Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. The MIT License (MIT) Copyright (c) 2012-2014 Raynos. The MIT License (MIT) Copyright (c) 2015 Dmitry Ivanov -The MIT License (MIT) Copyright (c) 2015 Microsoft Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - The MIT License (MIT) Copyright (c) 2015-2020 Adphorus Copyright (c) 2020- Hypeserver The MIT License (MIT) Copyright (c) 2017 Kent C. Dodds -The Xerces-J 2.12.0 release is available in source code and precompiled binary (JAR files) form. Both Xerces-J packages are made available under the Apache Software License. +The MIT License (MIT) Copyright (c) 2017 PayPal + +The MongoDB Java Driver uses third-party libraries or other resources that may be distributed under licenses different than the MongoDB Java Driver software. In the event that we accidentally failed to list a required notice, please bring it to our attention through any of the ways detailed here: https://jira.mongodb.org/browse/JAVA The attached notices are provided for information only. For any licenses that require disclosure of source, sources are available at https://github.com/mongodb/mongo-java-driver. 1) The following files: Immutable.java, NotThreadSafe.java, ThreadSafe.java Copyright (c) 2005 Brian Goetz and Tim Peierls Released under the Creative Commons Attribution License (http://creativecommons.org/licenses/by/2.5) Official home: http://www.jcip.net Any republication or derived work distributed in source code form must include this copyright and license notice. 2) The following files: Assertions.java, AbstractCopyOnWriteMap.java, CopyOnWriteMap.java Copyright (c) 2008-2014 Atlassian Pty Ltd Licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. 3) The following files: Beta.java, UnsignedLongs.java, UnsignedLongsTest.java Copyright 2010 The Guava Authors Copyright 2011 The Guava Authors Licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. 4) The following files: ReadTimeoutHandler.java Copyright 2008-present MongoDB, Inc. Copyright 2012 The Netty Project Licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. 5) The following files: InstantCodec.java, Jsr310CodecProvider.java, LocalDateCodec.java, LocalDateTimeCodec.java, LocalTimeCodec.java Copyright 2008-present MongoDB, Inc. Copyright 2018 Cezary Bartosiak Licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. 6) The following files: SaslPrep.java Copyright 2008-present MongoDB, Inc. Copyright 2017 Tom Bentley Licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. 7) The following files (originally from https://github.com/marianobarrios/tls-channel): AsynchronousTlsChannel.java AsynchronousTlsChannelGroup.java BufferAllocator.java BufferHolder.java ByteBufferSet.java ByteBufferUtil.java ClientTlsChannel.java DirectBufferAllocator.java DirectBufferDeallocator.java ExtendedAsynchronousByteChannel.java HeapBufferAllocator.java NeedsReadException.java NeedsTaskException.java NeedsWriteException.java ServerTlsChannel.java SniSslContextFactory.java TlsChannel.java TlsChannelBuilder.java TlsChannelCallbackException.java TlsChannelFlowControlException.java TlsChannelImpl.java TlsExplorer.java TrackingAllocator.java Util.java WouldBlockException.java Copyright (c) [2015-2020] all contributors MIT License Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -The code for the t-digest was originally authored by Ted Dunning A number of small but very helpful changes have been contributed by Adrien Grand (https://github.com/jpountz) +The Xerces-J 2.12.0 release is available in source code and precompiled binary (JAR files) form. Both Xerces-J packages are made available under the Apache Software License. -The code in this repository code was Written by Gil Tene, Michael Barker, and Matt Warren, and released to the public domain, as explained at http://creativecommons.org/publicdomain/zero/1.0/ For users of this code who wish to consume it under the BSD license rather than under the public domain or CC0 contribution text mentioned above, the code found under this directory is *also* provided under the following license (commonly referred to as the BSD 2-Clause License). This license does not detract from the above stated release of the code into the public domain, and simply represents an additional license granted by the Author. ----------------------------------------------------------------------------- ** Beginning of BSD 2-Clause License text. ** Copyright (c) 2012, 2013, 2014 Gil Tene Copyright (c) 2014 Michael Barker Copyright (c) 2014 Matt Warren All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +The code for the t-digest was originally authored by Ted Dunning A number of small but very helpful changes have been contributed by Adrien Grand (https://github.com/jpountz) -The code released under the CDDL shall be governed by the laws of the State of California (excluding conflict-of-law provisions). Any litigation relating to this License shall be subject to the jurisdiction of the Federal Courts of the Northern District of California and the state courts of the State of California, with venue lying in Santa Clara County, California. +The code in this repository code was Written by Gil Tene, Michael Barker, and Matt Warren, and released to the public domain, as explained at http://creativecommons.org/publicdomain/zero/1.0/ For users of this code who wish to consume it under the BSD license rather than under the public domain or CC0 contribution text mentioned above, the code found under this directory is *also* provided under the following license (commonly referred to as the BSD 2-Clause License). This license does not detract from the above stated release of the code into the public domain, and simply represents an additional license granted by the Author. ----------------------------------------------------------------------------- ** Beginning of BSD 2-Clause License text. ** Copyright (c) 2012, 2013, 2014 Gil Tene Copyright (c) 2014 Michael Barker Copyright (c) 2014 Matt Warren All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -This product includes software developed by Google Snappy: http://code.google.com/p/snappy/ (New BSD License) This product includes software developed by Apache PureJavaCrc32C from apache-hadoop-common http://hadoop.apache.org/ (Apache 2.0 license) This library containd statically linked libstdc++. This inclusion is allowed by GCC RUntime Library Exception http://gcc.gnu.org/onlinedocs/libstdc++/manual/license.html == Contributors == * Tatu Saloranta * Providing benchmark suite * Alec Wysoker * Performance and memory usage improvement +The code released under the CDDL shall be governed by the laws of the State of California (excluding conflict-of-law provisions). Any litigation relating to this License shall be subject to the jurisdiction of the Federal Courts of the Northern District of California and the state courts of the State of California, with venue lying in Santa Clara County, California. + +This code is licensed under Apache License, Version 2.0 (AL2.0). + +This product includes software developed by Google Snappy: http://code.google.com/p/snappy/ (New BSD License) This product includes software developed by Apache PureJavaCrc32C from apache-hadoop-common http://hadoop.apache.org/ (Apache 2.0 license) This library containd statically linked libstdc++. This inclusion is allowed by GCC RUntime Library Exception http://gcc.gnu.org/onlinedocs/libstdc++/manual/license.html == Contributors == * Tatu Saloranta * Providing benchmark suite * Alec Wysoker * Performance and memory usage improvement This product includes software developed by Joda.org (http://www.joda.org/). This product includes software developed by The Apache Software Foundation (http://www.apache.org/). +This product includes software developed by The Apache Software Foundation (http://www.apache.org/). + Toposort - Topological sorting for node.js Copyright (c) 2012 by Marcel Klehr MIT LICENSE Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -foodmart-data-json Foodmart data set in JSON format Copyright (C) 2013-2013 Pentaho Copyright (C) 2015-2015 Julian Hyde All Rights Reserved. Based upon the Pentaho mondrian-data-foodmart-json artifact developed as part of the Pentaho Mondrian OLAP engine (http://mondrian.pentaho.com). +[The BSD license] Copyright (c) 2016 The ANTLR Project All rights reserved. + +foodmart-data-json Foodmart data set in JSON format Copyright (C) 2013-2013 Pentaho Copyright (C) 2015-2015 Julian Hyde All Rights Reserved. Based upon the Pentaho mondrian-data-foodmart-json artifact developed as part of the Pentaho Mondrian OLAP engine (http://mondrian.pentaho.com). -protostuff Copyright 2009 David Yu dyuproject@gmail.com +protostuff Copyright 2009 David Yu dyuproject@gmail.com -sulky-modules - several general-purpose modules. Copyright (C) 2007-2018 Joern Huxhorn \ No newline at end of file +sulky-modules - several general-purpose modules. Copyright (C) 2007-2018 Joern Huxhorn \ No newline at end of file diff --git a/distribution/resources/src/main/resources-mapr/licenses/LICENSES_FOR_DEPENDENCIES.md b/distribution/resources/src/main/resources-mapr/licenses/LICENSES_FOR_DEPENDENCIES.md index 60d9881f71..4f968b8ccc 100644 --- a/distribution/resources/src/main/resources-mapr/licenses/LICENSES_FOR_DEPENDENCIES.md +++ b/distribution/resources/src/main/resources-mapr/licenses/LICENSES_FOR_DEPENDENCIES.md @@ -42,7 +42,9 @@ |@floating-ui/core |0.7.3 |MIT | |@floating-ui/dom |0.5.4 |MIT | |@floating-ui/react-dom |0.7.2 |MIT | +|@floating-ui/react-dom |1.3.0 |MIT | |@floating-ui/react-dom-interactions |0.6.6 |MIT | +|@floating-ui/react-dom-interactions |0.9.3 |MIT | |@formatjs/ecma402-abstract |1.9.8 |MIT | |@formatjs/fast-memoize |1.2.0 |ISC | |@formatjs/icu-messageformat-parser |2.0.11 |MIT | @@ -61,10 +63,13 @@ |@jridgewell/trace-mapping |0.3.13 |MIT | |@jridgewell/trace-mapping |0.3.15 |MIT | |@mantine/core |5.0.2 |MIT | +|@mantine/dates |5.9.3 |MIT | |@mantine/hooks |5.0.2 |MIT | |@mantine/styles |5.0.2 |MIT | |@mantine/utils |5.0.2 |MIT | +|@mantine/utils |5.9.3 |MIT | |@mui/base |5.0.0-alpha.91 |MIT | +|@mui/material |5.9.0 |MIT | |@mui/material |5.9.2 |MIT | |@mui/private-theming |5.9.1 |MIT | |@mui/styled-engine |5.8.7 |MIT | @@ -85,45 +90,51 @@ |@radix-ui/react-use-callback-ref |1.0.0 |MIT | |@radix-ui/react-use-layout-effect |1.0.0 |MIT | |@remix-run/router |1.0.3 |MIT | -|@sentry/browser |5.13.2 |BSD-3-Clause | -|@sentry/core |5.13.2 |BSD-3-Clause | -|@sentry/hub |5.13.2 |BSD-3-Clause | -|@sentry/minimal |5.13.2 |BSD-3-Clause | -|@sentry/types |5.13.2 |BSD-3-Clause | -|@sentry/utils |5.13.2 |BSD-3-Clause | +|@sentry/browser |7.43.0 |MIT | +|@sentry/core |7.43.0 |MIT | +|@sentry/replay |7.43.0 |MIT | +|@sentry/types |7.43.0 |MIT | +|@sentry/utils |7.43.0 |MIT | |@types/hast |2.3.4 |MIT | |@types/hoist-non-react-statics |3.3.1 |MIT | |@types/lodash |4.14.168 |MIT | +|@types/lodash |4.14.191 |MIT | |@types/node |16.7.1 |MIT | |@types/parse-json |4.0.0 |MIT | |@types/prop-types |15.7.3 |MIT | |@types/prop-types |15.7.5 |MIT | -|@types/react |16.14.23 |MIT | |@types/react |17.0.44 |MIT | |@types/react |18.0.14 |MIT | +|@types/react |18.0.24 |MIT | +|@types/react |18.0.25 |MIT | |@types/react-dom |18.0.5 |MIT | |@types/react-is |17.0.3 |MIT | |@types/react-transition-group |4.4.5 |MIT | |@types/scheduler |0.16.1 |MIT | |@types/unist |2.0.6 |MIT | |@types/use-sync-external-store |0.0.3 |MIT | +|accessors-smart |2.4.9 |Apache 2.0 | |Aggregate Designer Algorithm |6 |Apache 2.0 | +|agrona |1.18.0 |Apache 2.0 | +|aircompressor |0.21 |Apache 2.0 | |almost-equal |1.1.0 |MIT | |Amazon Redshift JDBC Driver |2.1.0.8 |Apache 2.0 | -|Animal Sniffer Annotations |1.18 |MIT | +|Animal Sniffer Annotations |1.21 |MIT | |ansi-styles |3.2.1 |MIT | |Antlr 3.4 Runtime |3.4 |BSD 3-clause | |AntLR Parser Generator |2.7.7 |Public Domain | |ANTLR StringTemplate |3.2.1 |BSD 3-clause | +|antlr4-c3 |2.2.3 |MIT | +|antlr4ts |0.5.0-alpha.4 |BSD-3-Clause | |aopalliance version 1.0 repackaged as a module |2.6.1 |CDDL 1.1 | -|Apache Avro |1.9.2 |Apache 2.0 | -|Apache Calcite Avatica |1.18.0 |Apache 2.0 | -|Apache Calcite Avatica Metrics |1.18.0 |Apache 2.0 | +|Apache Avro |1.10.2 |Apache 2.0 | +|Apache Calcite Avatica |1.23.0 |Apache 2.0 | +|Apache Calcite Avatica Metrics |1.23.0 |Apache 2.0 | |Apache Commons BeanUtils |1.9.4 |Apache 2.0 | |Apache Commons Collections |4.4 |Apache 2.0 | -|Apache Commons Compress |1.2 |Apache 2.0 | +|Apache Commons Compress |1.22 |Apache 2.0 | |Apache Commons DBCP |2.2.0 |Apache 2.0 | -|Apache Commons IO |2.4 |Apache 2.0 | +|Apache Commons IO |2.11.0 |Apache 2.0 | |Apache Commons Lang |3.12.0 |Apache 2.0 | |Apache Commons Pool |2.5.0 |Apache 2.0 | |Apache Directory API ASN.1 API |1.0.0-M20 |Apache 2.0 | @@ -143,82 +154,82 @@ |Apache Hadoop YARN Common |2.7.0-mapr-1803-dremio-20190717|Apache 2.0 | |Apache Hadoop YARN Server Common |2.7.0-mapr-1803 |Apache 2.0 | |Apache HttpClient Mime |4.5.13 |Apache 2.0 | -|Apache HttpCore |4.4.9 |Apache 2.0 | -|Apache Iceberg (incubating) |0.12-aba898b-20210716185948-115a822|Apache 2.0 | -|Apache Iceberg (incubating) |0.12-aba898b-20210716185948-115a822|Apache 2.0 | -|Apache Iceberg (incubating) |0.12-aba898b-20210716185948-115a822|Apache 2.0 | -|Apache Iceberg (incubating) |0.12-aba898b-20210716185948-115a822|Apache 2.0 | -|Apache Iceberg (incubating) |0.12-aba898b-20210716185948-115a822|Apache 2.0 | -|Apache Iceberg (incubating) |0.12-aba898b-20210716185948-115a822|Apache 2.0 | +|Apache HttpCore |4.4.16 |Apache 2.0 | +|Apache Iceberg (incubating) |1.2.0-e340ad5-20230511162417-de84403|Apache 2.0 | +|Apache Iceberg (incubating) |1.2.0-e340ad5-20230511162417-de84403|Apache 2.0 | +|Apache Iceberg (incubating) |1.2.0-e340ad5-20230511162417-de84403|Apache 2.0 | +|Apache Iceberg (incubating) |1.2.0-e340ad5-20230511162417-de84403|Apache 2.0 | +|Apache Iceberg (incubating) |1.2.0-e340ad5-20230511162417-de84403|Apache 2.0 | +|Apache Iceberg (incubating) |1.2.0-e340ad5-20230511162417-de84403|Apache 2.0 | |Apache Kafka |0.8.2.2 |Apache 2.0 | |Apache Kafka Clients |0.8.2.2 |Apache 2.0 | -|Apache Log4j API |2.13.3 |Apache 2.0 | -|Apache Log4j to SLF4J Adapter |2.13.3 |Apache 2.0 | -|Apache Parquet Arrow |1.12.0-202012300655090309-fc8298d|Apache 2.0 | -|Apache Parquet Column |1.12.0-202012300655090309-fc8298d|Apache 2.0 | -|Apache Parquet Common |1.12.0-202012300655090309-fc8298d|Apache 2.0 | -|Apache Parquet Encodings |1.12.0-202012300655090309-fc8298d|Apache 2.0 | +|Apache Log4j API |2.19.0 |Apache 2.0 | +|Apache Log4j to SLF4J Adapter |2.19.0 |Apache 2.0 | +|Apache Parquet Arrow |1.12.0-202302141732150599-28d943b|Apache 2.0 | +|Apache Parquet Column |1.12.0-202302141732150599-28d943b|Apache 2.0 | +|Apache Parquet Common |1.12.0-202302141732150599-28d943b|Apache 2.0 | +|Apache Parquet Encodings |1.12.0-202302141732150599-28d943b|Apache 2.0 | |Apache Parquet Format (Incubating) |2.7.0-201901172054060715-5352a59|Apache 2.0 | -|Apache Parquet Format Structures |1.12.0-202012300655090309-fc8298d|Apache 2.0 | -|Apache Parquet Generator |1.12.0-202012300655090309-fc8298d|Apache 2.0 | -|Apache Parquet Hadoop |1.12.0-202012300655090309-fc8298d|Apache 2.0 | -|Apache Parquet Jackson |1.12.0-202012300655090309-fc8298d|Apache 2.0 | +|Apache Parquet Format Structures |1.12.0-202302141732150599-28d943b|Apache 2.0 | +|Apache Parquet Generator |1.12.0-202302141732150599-28d943b|Apache 2.0 | +|Apache Parquet Hadoop |1.12.0-202302141732150599-28d943b|Apache 2.0 | +|Apache Parquet Jackson |1.12.0-202302141732150599-28d943b|Apache 2.0 | |Apache POI |4.1.2 |Apache 2.0 | |Apache POI |4.1.2 |Apache 2.0 | |Apache POI |4.1.2 |Apache 2.0 | +|Apache Ranger |1.1.0 |Apache 2.0 | +|Apache Ranger |1.1.0 |Apache 2.0 | +|Apache Ranger |1.1.0 |Apache 2.0 | +|Apache Ranger |1.1.0 |Apache 2.0 | |Apache Solr library |8.11.2 |Apache 2.0 | +|Apache Thrift |0.13.0 |Apache 2.0 | |Apache Twill Apache Hadoop YARN library |0.14.0-202111020547020344-41637331|Apache 2.0 | |Apache Twill API |0.14.0-202111020547020344-41637331|Apache 2.0 | -|Apache Twill common library |0.14.0 |Apache 2.0 | -|Apache Twill core library |0.14.0 |Apache 2.0 | +|Apache Twill common library |0.14.0-202111020547020344-41637331|Apache 2.0 | +|Apache Twill core library |0.14.0-202111020547020344-41637331|Apache 2.0 | |Apache Twill discovery service API |0.14.0-202111020547020344-41637331|Apache 2.0 | -|Apache Twill discovery service implementations |0.14.0 |Apache 2.0 | +|Apache Twill discovery service implementations |0.14.0-202111020547020344-41637331|Apache 2.0 | |Apache Twill extensions |0.14.0-202111020547020344-41637331|Apache 2.0 | |Apache Twill ZooKeeper client library |0.14.0-202111020547020344-41637331|Apache 2.0 | -|Apache Yetus Audience Annotations |0.7.0 |Apache 2.0 | +|Apache Yetus Audience Annotations |0.13.0 |Apache 2.0 | |ApacheDS I18n |2.0.0-M15 |Apache 2.0 | |ApacheDS Protocol Kerberos Codec |2.0.0-M15 |Apache 2.0 | -|API Common |1.9.3 |BSD 3-clause | |aria-hidden |1.1.3 |ISC | -|Arrow Flight Core |4.0.0-20210722102535-bda216e83f-dremio|Apache 2.0 | -|Arrow Flight GRPC |4.0.0-20210722102535-bda216e83f-dremio|Apache 2.0 | -|Arrow Format |4.0.0-20210722102535-bda216e83f-dremio|Apache 2.0 | -|Arrow Gandiva |4.0.0-20210722102535-bda216e83f-dremio|Apache 2.0 | -|Arrow Memory Core |4.0.0-20210722102535-bda216e83f-dremio|Apache 2.0 | -|Arrow Memory Netty |4.0.0-20210722102535-bda216e83f-dremio|Apache 2.0 | -|Arrow Vectors |4.0.0-20210722102535-bda216e83f-dremio|Apache 2.0 | +|Arrow Flight Core |9.0.0-20221123064031-c39b8a6253-dremio|Apache 2.0 | +|Arrow Flight GRPC |9.0.0-20221123064031-c39b8a6253-dremio|Apache 2.0 | +|Arrow Format |9.0.0-20221123064031-c39b8a6253-dremio|Apache 2.0 | +|Arrow Gandiva |9.0.0-20221123064031-c39b8a6253-dremio|Apache 2.0 | +|Arrow Memory Core |9.0.0-20221123064031-c39b8a6253-dremio|Apache 2.0 | +|Arrow Memory Netty |9.0.0-20221123064031-c39b8a6253-dremio|Apache 2.0 | +|Arrow Vectors |9.0.0-20221123064031-c39b8a6253-dremio|Apache 2.0 | +|arrow-jdbc |9.0.0 |Apache 2.0 | |asap |2.0.6 |MIT | |ASCII List |0.0.3 |Apache 2.0 | |Ascii Table |0.2.5 |Apache 2.0 | -|ASM Core |7 |BSD 3-clause | -|Aspect-Oriented Programming Alliance |1 |Public Domain | +|ASM Core |9.2 |BSD 3-clause | +|Aspect-Oriented Programming Alliance |1.0 |Public Domain | |assert |2.0.0 |MIT | |attr-accept |1.1.0 |MIT | -|Audit Component |1.1.0 |Apache 2.0 | -|Audit Component |1.1.0 |Apache 2.0 | -|Audit Component |1.1.0 |Apache 2.0 | -|Audit Component |1.1.0 |Apache 2.0 | -|AutoValue Annotations |1.7.2 |Apache 2.0 | |available-typed-arrays |1.0.5 |MIT | |AWS Event Stream |1.0.1 |Apache 2.0 | -|AWS Java SDK :: Annotations |2.16.104 |Apache 2.0 | -|AWS Java SDK :: Auth |2.16.104 |Apache 2.0 | -|AWS Java SDK :: AWS Core |2.16.104 |Apache 2.0 | -|AWS Java SDK :: Core :: Protocols :: AWS Json Protocol |2.16.104 |Apache 2.0 | -|AWS Java SDK :: Core :: Protocols :: Protocol Core |2.16.104 |Apache 2.0 | -|AWS Java SDK :: HTTP Client Interface |2.16.104 |Apache 2.0 | -|AWS Java SDK :: HTTP Clients :: Apache |2.16.104 |Apache 2.0 | -|AWS Java SDK :: HTTP Clients :: Netty Non Blocking I/O |2.16.104 |Apache 2.0 | -|AWS Java SDK :: Metrics SPI |2.16.104 |Apache 2.0 | -|AWS Java SDK :: Profiles |2.16.104 |Apache 2.0 | -|AWS Java SDK :: Regions |2.16.104 |Apache 2.0 | -|AWS Java SDK :: SDK Core |2.16.104 |Apache 2.0 | -|AWS Java SDK :: Services :: AWS Secrets Manager |2.16.104 |Apache 2.0 | -|AWS Java SDK :: Utilities |2.16.104 |Apache 2.0 | -|AWS Java SDK For Amazon Redshift |1.11.761 |Apache 2.0 | -|AWS Java SDK For Amazon Redshift |1.12.75 |Apache 2.0 | -|AWS SDK for Java - Core |1.11.761 |Apache 2.0 | -|AWS SDK for Java - Core |1.12.75 |Apache 2.0 | +|AWS Java SDK :: Annotations |2.17.295 |Apache 2.0 | +|AWS Java SDK :: Auth |2.17.295 |Apache 2.0 | +|AWS Java SDK :: AWS Core |2.17.295 |Apache 2.0 | +|AWS Java SDK :: Core :: Protocols :: AWS Json Protocol |2.17.295 |Apache 2.0 | +|AWS Java SDK :: Core :: Protocols :: Protocol Core |2.17.295 |Apache 2.0 | +|AWS Java SDK :: HTTP Client Interface |2.17.295 |Apache 2.0 | +|AWS Java SDK :: HTTP Clients :: Apache |2.17.295 |Apache 2.0 | +|AWS Java SDK :: HTTP Clients :: Netty Non Blocking I/O |2.17.295 |Apache 2.0 | +|AWS Java SDK :: Lakeformation |1.12.400 |Apache 2.0 | +|AWS Java SDK :: Metrics SPI |2.17.295 |Apache 2.0 | +|AWS Java SDK :: Profiles |2.17.295 |Apache 2.0 | +|AWS Java SDK :: Regions |2.17.295 |Apache 2.0 | +|AWS Java SDK :: SDK Core |2.17.295 |Apache 2.0 | +|AWS Java SDK :: Services :: AWS Secrets Manager |2.17.295 |Apache 2.0 | +|AWS Java SDK :: Utilities |2.17.295 |Apache 2.0 | +|AWS Java SDK For Amazon Redshift |1.12 |Apache 2.0 | +|AWS Java SDK For Amazon Redshift |1.12.400 |Apache 2.0 | +|AWS SDK for Java - Core |1.12.400 |Apache 2.0 | |babel-plugin-macros |2.8.0 |MIT | |babel-plugin-transform-runtime |6.23.0 |MIT | |babel-runtime |6.26.0 |MIT | @@ -227,22 +238,29 @@ |Bouncy Castle PKIX, CMS, EAC, TSP, PKCS, OCSP, CMP, and CRMF APIs|1.64 |MIT | |Bouncy Castle Provider |1.64 |MIT | |browserslist |4.21.0 |MIT | +|bson |4.3.4 |Apache 2.0, Creative Commons| |Byte Buddy |1.10.19 |Apache 2.0 | |c3 |0.4.18 |MIT | -|Caffeine cache |2.7.0 |Apache 2.0 | -|Calcite Core |1.16.0-202110140531410732-6a46ce2e|Apache 2.0 | -|Calcite Linq4j |1.16.0-202110140531410732-6a46ce2e|Apache 2.0 | +|Caffeine cache |2.9.3 |Apache 2.0 | +|Calcite Core |1.17.0-202305081555330806-ba52e3e7|Apache 2.0 | +|Calcite Linq4j |1.17.0-202305081555330806-ba52e3e7|Apache 2.0 | |call-bind |1.0.2 |MIT | |callsites |3.1.0 |MIT | |CDI APIs |2.0.2 |Apache 2.0 | +|cdi-api |2.0 |Apache 2.0 | +|cel-core |0.3.12 |Apache 2.0 | +|cel-generated-antlr |0.3.12 |Apache 2.0 | +|cel-generated-pb |0.3.12 |Apache 2.0 | +|cel-jackson |0.3.12 |Apache 2.0 | +|cel-tools |0.3.12 |Apache 2.0 | |chalk |2.4.2 |MIT | |change-emitter |0.1.6 |MIT | |character-entities |1.2.4 |MIT | |character-entities-legacy |1.1.4 |MIT | |character-reference-invalid |1.1.4 |MIT | -|Checker Qual |2.8.1 |MIT | +|Checker Qual |3.12.0 |MIT | |classcat |4.1.0 |MIT | -|ClassMate |1.3.4 |Apache 2.0 | +|ClassMate |1.5.1 |Apache 2.0 | |classnames |2.3.1 |MIT | |clsx |1.0.4 |MIT | |clsx |1.1.1 |MIT | @@ -257,20 +275,24 @@ |comma-separated-tokens |1.0.8 |MIT | |common-tags |1.4.0 |MIT | |Commons CLI |1.2 |Apache 2.0 | -|Commons Codec |1.4 |Apache 2.0 | +|Commons Codec |1.15 |Apache 2.0 | |Commons Collections |3.2.2 |Apache 2.0 | -|Commons Compiler |2.7.6 |BSD 3-clause | +|Commons Compiler |3.1.6 |BSD 3-clause | |Commons Configuration |1.6 |Apache 2.0 | +|Commons Configuration |2.1.1 |Apache 2.0 | |Commons Daemon |1.0.13 |Apache 2.0 | |Commons Lang |2.6 |Apache 2.0 | |Commons Math |2.2 |Apache 2.0 | |Commons Math |3.1.1 |Apache 2.0 | |Commons Net |3.1 |Apache 2.0 | +|Commons Net |3.9.0 |Apache 2.0 | |Commons Pool |1.6 |Apache 2.0 | -|config |1.4.1 |Apache 2.0 | +|config |1.4.2 |Apache 2.0 | |Conscrypt OpenJDK Uber |2.2.1 |Apache 2.0 | +|content-type |2.1 |Apache 2.0 | |convert-source-map |1.8.0 |MIT | |copy-to-clipboard |3.0.8 |MIT | +|copy-to-clipboard |3.3.1 |MIT | |core-js |1.2.7 |MIT | |core-js |2.6.12 |MIT | |core-js |3.22.3 |MIT | @@ -307,28 +329,31 @@ |deep-equal |1.0.1 |MIT | |deepmerge |2.2.1 |MIT | |define-properties |1.1.4 |MIT | +|define-route |0.3.1 |Apache-2.0 | +|dialog-polyfill |0.5.6 |BSD-3-Clause | |Digester |1.8 |Apache 2.0 | |Disruptor Framework |3.4.2 |Apache 2.0 | |dnd-core |7.0.2 |MIT | |dom-helpers |2.4.0 |MIT | |dom-helpers |3.4.0 |MIT | |dom-helpers |5.2.1 |MIT | -|Dremio Cache Manager |18.0.0 |Dremio Free Software License 1.0| -|Dremio Fast Threads |18.0.0 |Dremio Free Software License 1.0| -|Dremio Joust Library |18.0.0 |Dremio Free Software License 1.0| -|Dremio Parquet Accelerator |18.0.0 |Dremio Free Software License 1.0| -|Dremio Pushdown Pack |18.0.0 |Dremio Free Software License 1.0| -|Dremio Smart Substitutions |18.0.0 |Dremio Free Software License 1.0| +|downshift |7.2.0 |MIT | +|Dremio Cache Manager |24.0.0 |Dremio Free Software License 1.0| +|Dremio Fast Threads |24.0.0 |Dremio Free Software License 1.0| +|Dremio Joust Library |24.0.0 |Dremio Free Software License 1.0| +|Dremio Parquet Accelerator |24.0.0 |Dremio Free Software License 1.0| +|Dremio Pushdown Pack |24.0.0 |Dremio Free Software License 1.0| +|Dremio Smart Substitutions |24.0.0 |Dremio Free Software License 1.0| |easy-peasy |4.0.1 |MIT | |echarts |5.3.1 |Apache-2.0 | |EclipseLink |2.5.2 |Eclipse Public License 1.0 & Eclipse Distribution License v. 1.0| |Elasticsearch SecureSM |1.1 |Apache 2.0 | -|Elasticsearch: Core |5.5.3 |Apache 2.0 | +|Elasticsearch: Core |6.8.23 |Apache 2.0 | |electron-to-chromium |1.4.170 |ISC | |element-closest |2.0.2 |CC0-1.0 | |emojis-list |3.0.0 |MIT | |encoding |0.1.12 |MIT | -|Error Prone Annotations |2.10.0 |Apache 2.0 | +|Error Prone Annotations |2.18.0 |Apache 2.0 | |error-ex |1.3.1 |MIT | |es-abstract |1.20.1 |MIT | |es-to-primitive |1.2.1 |MIT | @@ -347,15 +372,16 @@ |fbjs |0.8.16 |MIT | |file-saver |1.3.3 |MIT | |find-root |1.1.0 |MIT | -|FindBugs-Annotations |3.0.1 |LGPL v2 | -|FindBugs-jsr305 |3.0.1 |Apache 2.0 | +|FindBugs-Annotations |3.0.1u2 |LGPL v2 | +|FindBugs-jsr305 |3.0.2 |Apache 2.0 | |fixed-data-table-2 |1.1.2 |BSD-3-Clause | -|FlatBuffers Java API |1.9.0 |Apache 2.0 | +|FlatBuffers Java API |1.12.0 |Apache 2.0 | +|flight-sql |9.0.0-20221123064031-c39b8a6253|Apache 2.0 | |foodmart-data-json |0.4 |Apache 2.0 | |for-each |0.3.3 |MIT | |format |0.2.2 |MIT | |formik |2.2.6 |Apache-2.0 | -|FreeMarker |2.3.29 |Apache 2.0 | +|FreeMarker |2.3.31 |Apache 2.0 | |function-bind |1.1.1 |MIT | |function.prototype.name |1.1.5 |MIT | |functions-have-names |1.2.3 |MIT | @@ -365,6 +391,7 @@ |get-intrinsic |1.1.1 |MIT | |get-symbol-description |1.0.0 |MIT | |globals |11.12.0 |MIT | +|glue |2.17.295 |Apache 2.0 | |Google Android Annotations Library |4.1.1.4 |Apache 2.0 | |Google Auth Library for Java - Credentials |0.16.2 |BSD 3-clause | |Google Auth Library for Java - OAuth2 HTTP |0.21.0 |BSD 3-clause | @@ -372,33 +399,36 @@ |Google Cloud Core GRPC |1.82.0 |Apache 2.0 | |Google Cloud Monitoring |1.82.0 |Apache 2.0 | |Google Cloud Trace |0.100.0-beta |Apache 2.0 | -|Google Guice Core Library |4.2.2 |Apache 2.0 | -|Google Guice Extensions Servlet |4.2.2 |Apache 2.0 | +|Google Guice Core Library |5.1.0 |Apache 2.0 | +|Google Guice Core Library |5.1.0 |Apache 2.0 | |Google HTTP Client Library for Java |1.35.0 |Apache 2.0 | |Google Testing and Mocking Framework |1.10.x |BSD 3-clause | |graphlib |2.1.8 |MIT | |GRPC ALTs |1.32.2 |Apache 2.0 | -|GRPC API |1.32.2 |Apache 2.0 | +|GRPC API |1.54.1 |Apache 2.0 | |GRPC Auth |1.32.2 |Apache 2.0 | -|GRPC Context |1.32.2 |Apache 2.0 | -|GRPC Core |1.32.2 |Apache 2.0 | +|GRPC Context |1.54.1 |Apache 2.0 | +|GRPC Core |1.54.1 |Apache 2.0 | |GRPC Google Cloud Monitoring V3 |1.64.0 |Apache 2.0 | |GRPC GRPCLB |1.32.2 |Apache 2.0 | -|GRPC Netty |1.32.2 |Apache 2.0 | +|GRPC Netty |1.54.1 |Apache 2.0 | |GRPC OpenTracing |0.2.0 |BSD 3-clause | -|GRPC Protobuf |1.32.2 |Apache 2.0 | -|GRPC Protobuf Lite |1.32.2 |Apache 2.0 | -|GRPC Stub |1.32.2 |Apache 2.0 | +|GRPC Protobuf |1.54.1 |Apache 2.0 | +|GRPC Protobuf Lite |1.54.1 |Apache 2.0 | +|GRPC Stub |1.54.1 |Apache 2.0 | +|Gson |2.10.1 |Apache 2.0 | |Gson |2.2.4 |Apache 2.0 | -|Gson |2.9.0 |Apache 2.0 | |Guava InternalFutureFailureAccess and InternalFutures |1.0.1 |Apache 2.0 | |Guava ListenableFuture Only |9999.0-empty-to-avoid-conflict-with-guava|Apache 2.0 | |Guava: Google Core Libraries for Java |13.0.1 |Apache 2.0 | |Guava: Google Core Libraries for Java |20 |Apache 2.0 | -|Guava: Google Core Libraries for Java |28.1-jre |Apache 2.0 | +|Guava: Google Core Libraries for Java |31.1-jre |Apache 2.0 | |gud |1.0.0 |MIT | |Hadoop Winutils |3.2.0 |Apache 2.0 | |Hadoop YARN Client |2.7.0-mapr-1803 |Apache 2.0 | +|hadoop-shaded-guava |1.1.1 |Apache 2.0 | +|hadoop-shaded-protobuf_3_7-1.1.1 |1.1.1 |Apache 2.0 | +|hamcrest |2.1 |BSD-3-Clause | |has |1.0.3 |MIT | |has-bigints |1.0.2 |MIT | |has-flag |3.0.0 |MIT | @@ -409,7 +439,7 @@ |hastscript |6.0.0 |MIT | |HdrHistogram |2.1.8 |CC0 1.0 Universal | |HdrHistogram |2.1.9 |CC0 1.0 Universal | -|Hibernate Validator Engine |6.1.5.Final |Apache 2.0 | +|Hibernate Validator Engine |6.2.0.Final |Apache 2.0 | |highlight.js |10.7.3 |BSD-3-Clause | |history |3.3.0 |MIT | |HK2 API module |2.6.1 |CDDL 1.1 | @@ -422,15 +452,25 @@ |hsluv |0.0.3 |MIT | |HTrace Core |3.1.0-incubating |Apache 2.0 | |HttpClient |3.1 |Apache 2.0 | -|HttpClient |4.5.13 |Apache 2.0 | +|HttpClient |4.5.14 |Apache 2.0 | +|httpclient5 |5.1.3 |Apache 2.0 | +|httpclient5 |5.2.1 |Apache 2.0 | +|httpcore5 |5.1.3 |Apache 2.0 | +|httpcore5-h2 |5.1.3 |Apache 2.0 | +|httpcore5-h2 |5.2 |Apache 2.0 | |humanable |0.0.2 |MIT | +|IBM Data Server Driver For JDBC and SQLJ |11.5.8.0 |IPLA | +|iceberg-aws |1.2.0 |Apache 2.0 | +|iceberg-nessie |1.2.1 |Apache 2.0 | +|iceberg-views |0.58.0 |Apache 2.0 | |iconv-lite |0.4.24 |MIT | |immer |7.0.9 |MIT | |immutable |3.8.2 |MIT | |import-fresh |3.1.0 |MIT | |inherits |2.0.4 |ISC | -|IntelliJ IDEA Annotations |12 |Apache 2.0 | +|IntelliJ IDEA Annotations |12.0 |Apache 2.0 | |internal-slot |1.0.3 |MIT | +|intl-messageformat |10.2.1 |BSD-3-Clause | |intl-messageformat |9.9.1 |BSD-3-Clause | |invariant |2.2.4 |MIT | |is-alphabetical |1.0.4 |MIT | @@ -462,121 +502,130 @@ |Jackson |1.9.13 |Apache 2.0 | |Jackson 2 extensions to the Google HTTP Client Library for Java.|1.35.0 |Apache 2.0 | |Jackson Datatype Protobuf |0.9.12 |Apache 2.0 | -|Jackson datatype: Guava |2.11.4 |Apache 2.0 | -|Jackson module: Afterburner |2.11.4 |Apache 2.0 | -|Jackson-annotations |2.11.4 |Apache 2.0 | -|Jackson-core |2.11.4 |Apache 2.0 | -|jackson-databind |2.11.4 |Apache 2.0 | -|Jackson-dataformat-CBOR |2.11.4 |Apache 2.0 | -|Jackson-dataformat-Smile |2.11.4 |Apache 2.0 | -|Jackson-dataformat-YAML |2.11.4 |Apache 2.0 | -|Jackson-JAXRS-base |2.11.4 |Apache 2.0 | -|Jackson-JAXRS-JSON |2.11.4 |Apache 2.0 | -|Jackson-module-JAXB-annotations |2.11.4 |Apache 2.0 | -|jackson-module-jsonSchema |2.11.4 |Apache 2.0 | +|Jackson datatype: Guava |2.14.2 |Apache 2.0 | +|Jackson module: Afterburner |2.14.2 |Apache 2.0 | +|Jackson-annotations |2.14.2 |Apache 2.0 | +|Jackson-core |2.14.2 |Apache 2.0 | +|jackson-databind |2.14.2 |Apache 2.0 | +|Jackson-dataformat-CBOR |2.14.2 |Apache 2.0 | +|jackson-dataformat-protobuf |2.14.2 |Apache 2.0 | +|Jackson-dataformat-Smile |2.14.2 |Apache 2.0 | +|Jackson-dataformat-YAML |2.14.2 |Apache 2.0 | +|jackson-datatype-jdk8 |2.14.2 |Apache 2.0 | +|Jackson-JAXRS-base |2.14.2 |Apache 2.0 | +|Jackson-JAXRS-JSON |2.14.2 |Apache 2.0 | +|Jackson-module-JAXB-annotations |2.14.2 |Apache 2.0 | +|jackson-module-jsonSchema |2.14.2 |Apache 2.0 | |Jaeger Client |1.5.0 |Apache 2.0 | |Jaeger Core |1.5.0 |Apache 2.0 | |Jaeger Thrift |1.5.0 |Apache 2.0 | |Jaeger TracerResolver |1.5.0 |Apache 2.0 | -|Jakarta Activation API JAR |1.2.1 |EDL 1.0 | +|Jakarta Activation API JAR |1.2.2 |EDL 1.0 | |Jakarta Annotations API |1.3.5 |Eclipse Public License 2.0| |Jakarta Bean Validation API |2.0.2 |Apache 2.0 | -|Jakarta Dependency Injection |1 |Apache 2.0 | +|Jakarta Dependency Injection |1.0 |Apache 2.0 | |Jakarta Enterprise Beans API |3.2.6 |EPL 2.0 & GPL2 w/ CPE| |Jakarta Expression Language API |3.0.3 |EPL 2.0 & GPL2 w/ CPE| |Jakarta Inject |2.6.1 |Eclipse Public License 2.0| |Jakarta Interceptors |1.2.5 |EPL 2.0 & GPL2 w/ CPE| |Jakarta RESTful Web Services API |2.1.6 |Eclipse Public License 2.0| |Jakarta Transaction API |1.3.2 |EPL 2.0 & GPL2 w/ CPE| -|Jakarta XML Bind API |2.3.2 |EDL 1.0 | -|Janino |2.7.6 |BSD 3-clause | +|Jakarta XML Bind API |2.3.3 |EDL 1.0 | +|Janino |3.1.6 |BSD 3-clause | |Java Concurrency in Practice book annotations |1.0-1 |CC 2.5 | |Java implementation of the SemVer Specification |0.9.0 |MIT License | -|Java Native Access |4.5.0 |Apache 2.0 | -|Java Native Access |4.5.0 |Apache 2.0 | +|Java Native Access |5.12.1 |Apache 2.0 | +|Java Native Access |5.12.1 |Apache 2.0 | |Java Servlet API |3.1.0 |GPL v2 w/ CPE | -|JavaBeans Activation Framework (JAF) |1.1 |CDDL 1.0 | |JavaMail API |1.6.1 |CDDL 1.1 | -|Javassist |3.19.0-GA |Apache 2.0 | +|Javassist |3.28.0-GA |Apache 2.0 | |Javax Persistence |2.1.0 |Eclipse Public License 1.0 & Eclipse Distribution License v. 1.0| -|Javax WS RS API |2.0.1 |GPL v2 w/ CPE | +|Javax WS RS API |2.1.1 |GPL v2 w/ CPE | +|javax.activation-api |1.2.0 |COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) Version 1.1| |javax.annotation API |1.3.2 |GPL v2 w/ CPE | -|javax.inject |1 |Apache 2.0 | +|javax.inject |1.0 |Apache 2.0 | +|javax.interceptor-api |1.2 |Eclipse Public License - v 2.0, The GNU General Public License (GPL) Version 2, June 1991| |JAX RS Provider For JSON Content Type |1.9.13 |Apache 2.0 | -|JAXB API bundle for GlassFish V3 |2.2.2 |GPL v2 w/ CPE | +|JAXB API bundle for GlassFish V3 |2.2.11 |GPL v2 w/ CPE | |JBoss Jakarta JAXRS Api_spec |2.0.1.Final |EPL 2.0 & GPL2 w/ CPE| -|JBoss Logging 3 |3.3.2.Final |Apache 2.0 | +|JBoss Logging 3 |3.4.1.Final |Apache 2.0 | |JCL 1.1.1 implemented over SLF4J |1.7.36 |MIT | -|JCommander |1.81 |Apache 2.0 | +|JCommander |1.82 |Apache 2.0 | |JDK Tools |1.8 | | |Jersey Bundle |1.19.3 |CDDL 1.1 | |Jersey Client |1.9 |CDDL 1.1 | -|Jersey Ext Entity Filtering |2.3 |CDDL 1.1 | +|Jersey Ext Entity Filtering |2.39.1 |CDDL 1.1 | |Jersey Guice |1.9 |CDDL 1.1 | -|Jersey Inject HK2 |2.3 |Eclipse Public License 2.0| -|Jersey Media JSON Jackson |2.3 |CDDL 1.1 | -|jersey-container-jetty-http |2.3 |CDDL 1.1 | -|jersey-container-jetty-servlet |2.3 |CDDL 1.1 | -|jersey-container-servlet |2.3 |CDDL 1.1 | -|jersey-container-servlet-core |2.3 |CDDL 1.1 | -|jersey-core-client |2.3 |CDDL 1.1 | -|jersey-core-common |2.3 |CDDL 1.1 | -|jersey-core-server |2.3 |CDDL 1.1 | -|jersey-ext-mvc |2.3 |CDDL 1.1 | -|jersey-ext-mvc-freemarker |2.3 |CDDL 1.1 | -|jersey-media-jaxb |2.3 |CDDL 1.1 | -|jersey-media-multipart |2.3 |CDDL 1.1 | -|JetBrains Java Annotations |13 |Apache 2.0 | -|Jetty :: Asynchronous HTTP Client |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: Continuation |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: Http Utility |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: IO Utility |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: Security |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: Server Core |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: Servlet Handling |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: Utilities |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: Utilities :: Ajax(JSON) |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: Utility Servlets and Filters |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: Webapp Application Support |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: Websocket :: API |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: Websocket :: Client |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: Websocket :: Common |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: Websocket :: Server |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: Websocket :: Servlet Interface |9.4.43.v20210629 |Apache 2.0 | -|Jetty :: XML utilities |9.4.43.v20210629 |Apache 2.0 | +|Jersey Inject HK2 |2.39.1 |Eclipse Public License 2.0| +|Jersey Media JSON Jackson |2.39.1 |CDDL 1.1 | +|jersey-container-jetty-http |2.39.1 |CDDL 1.1 | +|jersey-container-jetty-servlet |2.39.1 |CDDL 1.1 | +|jersey-container-servlet |2.39.1 |CDDL 1.1 | +|jersey-container-servlet-core |2.39.1 |CDDL 1.1 | +|jersey-core-client |2.39.1 |CDDL 1.1 | +|jersey-core-common |2.39.1 |CDDL 1.1 | +|jersey-core-server |2.39.1 |CDDL 1.1 | +|jersey-ext-mvc |2.39.1 |CDDL 1.1 | +|jersey-ext-mvc-freemarker |2.39.1 |CDDL 1.1 | +|jersey-media-jaxb |2.35 |CDDL 1.1 | +|jersey-media-multipart |2.39.1 |CDDL 1.1 | +|JetBrains Java Annotations |13.0 |Apache 2.0 | +|Jetty :: Asynchronous HTTP Client |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: Continuation |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: Http Utility |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: IO Utility |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: Security |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: Server Core |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: Servlet Handling |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: Utilities |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: Utilities :: Ajax(JSON) |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: Utility Servlets and Filters |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: Webapp Application Support |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: Websocket :: API |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: Websocket :: Client |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: Websocket :: Common |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: Websocket :: Server |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: Websocket :: Servlet Interface |9.4.51.v20230217 |Apache 2.0 | +|Jetty :: XML utilities |9.4.51.v20230217 |Apache 2.0 | |Jetty Server |6.1.26 |Apache 2.0 | |Jetty Utilities |6.1.26 |Apache 2.0 | |JLine |0.9.94 |BSD 3-clause | +|jline |2.14.3 |BSD-3-Clause | +|jline |3.9.0 |BSD-3-Clause | |JMES Path Query library |1.11.761 |Apache 2.0 | -|JMES Path Query library |1.12.75 |Apache 2.0 | -|Joda-Time |2.9 |Apache 2.0 | +|JMES Path Query library |1.12.400 |Apache 2.0 | +|Joda-Time |2.12.1 |Apache 2.0 | |JOpt Simple |3.2 |MIT | |JOpt Simple |5.0.2 |MIT | |jquery |3.5.1 |MIT | |js-tokens |4.0.0 |MIT | -|JSch |0.1.54 |BSD 3-clause | +|JSch |0.1.55 |BSD 3-clause | |jsesc |2.5.2 |MIT | |JSON In Java |20080701 |provided without support or warranty| -|JSON Small and Fast Parser |2.4.8 |Apache 2.0 | +|JSON Small and Fast Parser |2.4.10 |Apache 2.0 | |json-parse-even-better-errors |2.3.1 |MIT | |json-ptr |3.1.1 |MIT | +|json-utils |2.17.295 |Apache 2.0 | |json5 |2.2.1 |MIT | |jsplumb |2.1.4 |MIT | -|JUL to SLF4J bridge |1.7.28 |MIT | +|JUL to SLF4J bridge |1.7.36 |MIT | |JVM Integration For Metrics |4.1.19 |Apache 2.0 | |jwt-decode |3.1.2 |MIT | |Koloboke Collections API |1.0.0 |Apache 2.0 | |Koloboke Collections Implementation Commons |1.0.0 |Apache 2.0 | -|Kotlin Common Standard Library |1.4.0 |Apache 2.0 | -|Kotlin Standard Library |1.4.10 |Apache 2.0 | +|Kotlin Common Standard Library |1.6.20 |Apache 2.0 | +|Kotlin Standard Library |1.6.20 |Apache 2.0 | |Kryo |4.0.1 |BSD 3-clause | +|LatencyUtils |2.0.3 |Public Domain, BSD 2-Clause License| |leantable |0.2.6 |Apache-2.0 | +|leantable |0.4.11 |Apache-2.0 | +|leantable |0.4.12 |Apache-2.0 | |leveldbjni-all |1.8 |BSD 3-clause | -|lilith-data-converter |8.2.0 |Apache 2.0 | -|lilith-data-eventsource |8.2.0 |Apache 2.0 | -|lilith-data-logging |8.2.0 |Apache 2.0 | -|lilith-data-logging-protobuf |8.2.0 |Apache 2.0 | -|lilith-sender |8.2.0 |Apache 2.0 | +|lilith-data-converter |8.3.0 |Apache 2.0 | +|lilith-data-eventsource |8.3.0 |Apache 2.0 | +|lilith-data-logging |8.3.0 |Apache 2.0 | +|lilith-data-logging-protobuf |8.3.0 |Apache 2.0 | +|lilith-sender |8.3.0 |Apache 2.0 | |lines-and-columns |1.2.4 |MIT | |linkifyjs |2.1.9 |MIT | |lodash |4.17.21 |MIT | @@ -589,36 +638,36 @@ |lodash.isequal |4.5.0 |MIT | |lodash.isplainobject |4.0.6 |MIT | |lodash.omit |4.5.0 |MIT | -|Log4j Implemented Over SLF4J |1.7.28 |MIT | -|Logback Access Module |1.2.3 |EPL 1.0 | -|Logback Classic Module |1.2.3 |EPL 1.0 | +|Log4j Implemented Over SLF4J |1.7.36 |MIT | +|Logback Access Module |1.2.11 |EPL 1.0 | +|Logback Classic Module |1.2.11 |EPL 1.0 | |Logback Core Module |1.2.11 |EPL 1.0 | -|logback-classic |8.2.0 |Apache 2.0 | -|logback-converter-classic |8.2.0 |Apache 2.0 | -|logback-multiplex-appender-classic |8.2.0 |Apache 2.0 | -|logback-multiplex-appender-core |8.2.0 |Apache 2.0 | -|Logstash Logback Encoder |6.2 |Apache 2.0 | +|logback-classic |8.3.0 |Apache 2.0 | +|logback-converter-classic |8.3.0 |Apache 2.0 | +|logback-multiplex-appender-classic |8.3.0 |Apache 2.0 | +|logback-multiplex-appender-core |8.3.0 |Apache 2.0 | +|Logstash Logback Encoder |7.2 |Apache 2.0 | |loose-envify |1.4.0 |MIT | |lottie-web |5.8.1 |MIT | |lowlight |1.20.0 |MIT | -|Lucene Common Analyzers |6.6.0 |Apache 2.0 | -|Lucene Core |6.6.0 |Apache 2.0 | -|Lucene Grouping |6.6.0 |Apache 2.0 | -|Lucene Highlighter |6.6.0 |Apache 2.0 | -|Lucene Join |6.6.0 |Apache 2.0 | -|Lucene Memory |6.6.0 |Apache 2.0 | -|Lucene Memory |6.6.0 |Apache 2.0 | -|Lucene Miscellaneous |6.6.0 |Apache 2.0 | -|Lucene Queries |6.6.0 |Apache 2.0 | -|Lucene QueryParsers |6.6.0 |Apache 2.0 | -|Lucene Sandbox |6.6.0 |Apache 2.0 | -|Lucene Spatial |6.6.0 |Apache 2.0 | -|Lucene Spatial 3D |6.6.0 |Apache 2.0 | -|Lucene Spatial Extras |6.6.0 |Apache 2.0 | -|Lucene Suggest |6.6.0 |Apache 2.0 | +|Lucene Common Analyzers |7.7.3 |Apache 2.0 | +|Lucene Core |7.7.3 |Apache 2.0 | +|Lucene Grouping |7.7.3 |Apache 2.0 | +|Lucene Highlighter |7.7.3 |Apache 2.0 | +|Lucene Join |7.7.3 |Apache 2.0 | +|Lucene Memory |7.7.3 |Apache 2.0 | +|Lucene Memory |7.7.3 |Apache 2.0 | +|Lucene Miscellaneous |7.7.3 |Apache 2.0 | +|Lucene Queries |7.7.3 |Apache 2.0 | +|Lucene QueryParsers |7.7.3 |Apache 2.0 | +|Lucene Sandbox |7.7.3 |Apache 2.0 | +|Lucene Spatial |7.7.3 |Apache 2.0 | +|Lucene Spatial 3D |7.7.3 |Apache 2.0 | +|Lucene Spatial Extras |7.7.3 |Apache 2.0 | +|Lucene Suggest |7.7.3 |Apache 2.0 | |LZ4 and XxHash |1.7.1 |Apache 2.0 | |map-or-similar |1.5.0 |MIT | -|MariaDB |2.3.0 |LGPL v2.1 | +|MariaDB |3.0.8 |LGPL v2.1 | |marked |0.7.0 |MIT | |material-icons |0.1.0 |MIT | |material-ui-popup-state |3.1.1 |MIT | @@ -629,49 +678,77 @@ |Metrics Integration For Jetty 9.3 and Higher |4.1.19 |Apache 2.0 | |Metrics Integration with JMX |4.1.19 |Apache 2.0 | |micro-memoize |4.0.9 |MIT | -|Microsoft Azure Active Directory Authentication Library (ADAL) for Java|1.6.4 |MIT License | +|micrometer-commons |1.10.6 |Apache 2.0 | +|micrometer-core |1.10.6 |Apache 2.0 | +|micrometer-observation |1.10.6 |Apache 2.0 | +|microprofile-openapi-api |3.1 |Apache 2.0 | +|Microsoft Azure Active Directory Authentication Library (ADAL) for Java|1.6.7 |MIT License | |Microsoft JDBC Driver For SQL Server |7.0.0.jre8 |MIT License | -|MIME streaming extension |1.9.11 |GPL v2 w/ CPE | +|MIME streaming extension |1.9.15 |GPL v2 w/ CPE | |MinLog |1.3.0 |BSD 3-clause | |ModelMapper |2.3.0 |Apache 2.0 | |ModelMapper Protobuf Extension |2.3.0 |Apache 2.0 | |moize |6.1.0 |MIT | +|moize |6.1.3 |MIT | |monaco-editor |0.10.0 |MIT | |MongoDB Java Driver |3.12.0 |Apache 2.0 | +|mongodb-driver-core |4.3.4 |Apache 2.0 | +|mongodb-driver-legacy |4.3.4 |Apache 2.0 | +|mongodb-driver-sync |4.3.4 |Apache 2.0 | |mousetrap |1.6.1 |Apache-2.0 | |ms |2.1.2 |MIT | +|msw |1.2.0 |MIT | |mumath |3.3.4 |Unlicense | |nanoclone |0.2.1 |MIT | |nanoid |3.3.4 |MIT | |Native Library Loader |2.3.4 |Simplified BSD License| -|Nessie API |0.4.0 |Apache 2.0 | -|Nessie Server Store |0.4.0 |Apache 2.0 | -|Nessie Services |0.4.0 |Apache 2.0 | +|Nessie API |0.58.0 |Apache 2.0 | +|Nessie Server Store |0.58.0 |Apache 2.0 | +|Nessie Services |0.58.0 |Apache 2.0 | |Nessie Versioned Memory Store |0.4.0 |Apache 2.0 | -|Nessie Versioned Store SPI |0.4.0 |Apache 2.0 | +|Nessie Versioned Store SPI |0.58.0 |Apache 2.0 | +|nessie-client |0.58.0 |Apache 2.0 | +|nessie-protobuf-relocated |0.58.0 |Apache 2.0 | +|nessie-rest-services |0.58.0 |Apache 2.0 | +|nessie-server-store-proto |0.58.0 |Apache 2.0 | +|nessie-versioned-persist-adapter |0.58.0 |Apache 2.0 | +|nessie-versioned-persist-in-memory |0.58.0 |Apache 2.0 | +|nessie-versioned-persist-non-transactional |0.58.0 |Apache 2.0 | +|nessie-versioned-persist-serialize |0.58.0 |Apache 2.0 | +|nessie-versioned-persist-serialize-proto |0.58.0 |Apache 2.0 | +|nessie-versioned-persist-store |0.58.0 |Apache 2.0 | |Netty Reactive Streams HTTP Support |2.0.5 |Apache 2.0 | |Netty Reactive Streams Implementation |2.0.5 |Apache 2.0 | -|Netty/Buffer |4.1.68.Final |Apache 2.0 | -|Netty/Codec |4.1.68.Final |Apache 2.0 | -|Netty/Codec/HTTP |4.1.68.Final |Apache 2.0 | -|Netty/Codec/HTTP2 |4.1.48.Final |Apache 2.0 | -|Netty/Codec/Socks |4.1.48.Final |Apache 2.0 | -|Netty/Common |4.1.68.Final |Apache 2.0 | -|Netty/Handler |4.1.68.Final |Apache 2.0 | -|Netty/Handler/Proxy |4.1.48.Final |Apache 2.0 | -|Netty/Resolver |4.1.68.Final |Apache 2.0 | -|Netty/TomcatNative [BoringSSL - Static] |2.0.28.Final |Apache 2.0 | -|Netty/Transport |4.1.68.Final |Apache 2.0 | -|Netty/Transport/Native/Epoll |4.1.48.Final-linux-x86_64|Apache 2.0 | -|Netty/Transport/Native/Unix/Common |4.1.48.Final |Apache 2.0 | +|netty-tcnative-classes |2.0.56.Final |Apache 2.0 | +|netty-transport-classes-epoll |4.1.89.Final |Apache 2.0 | +|netty-transport-classes-kqueue |4.1.89.Final |Apache 2.0 | +|netty-transport-native-kqueue |4.1.89.Final |Apache 2.0 | +|Netty/Buffer |4.1.89.Final |Apache 2.0 | +|Netty/Codec |4.1.89.Final |Apache 2.0 | +|Netty/Codec/HTTP |4.1.89.Final |Apache 2.0 | +|Netty/Codec/HTTP2 |4.1.89.Final |Apache 2.0 | +|Netty/Codec/Socks |4.1.89.Final |Apache 2.0 | +|Netty/Common |4.1.89.Final |Apache 2.0 | +|Netty/Handler |4.1.89.Final |Apache 2.0 | +|Netty/Handler/Proxy |4.1.89.Final |Apache 2.0 | +|Netty/Resolver |4.1.89.Final |Apache 2.0 | +|Netty/TomcatNative [BoringSSL - Static] |2.0.56.Final |Apache 2.0 | +|Netty/TomcatNative [BoringSSL - Static] |2.0.56.Final-linux-aarch_64.jar|Apache 2.0 | +|Netty/TomcatNative [BoringSSL - Static] |2.0.56.Final-linux-x86_64.jar|Apache 2.0 | +|Netty/TomcatNative [BoringSSL - Static] |2.0.56.Final-osx-aarch_64.jar|Apache 2.0 | +|Netty/TomcatNative [BoringSSL - Static] |2.0.56.Final-osx-x86_64.jar|Apache 2.0 | +|Netty/TomcatNative [BoringSSL - Static] |2.0.56.Final-windows-x86_64.jar|Apache 2.0 | +|Netty/Transport |4.1.89.Final |Apache 2.0 | +|Netty/Transport/Native/Epoll |4.1.89.Final |Apache 2.0 | +|Netty/Transport/Native/Unix/Common |4.1.89.Final |Apache 2.0 | |Nimbus JOSE+JWT |8.8 |Apache 2.0 | -|Nimbus LangTag |1.5 |Apache 2.0 | +|Nimbus LangTag |1.4.4 |Apache 2.0 | |node-fetch |1.7.3 |MIT | |node-releases |2.0.5 |MIT | |Noggit |0.6 |Apache 2.0 | |normalizr |2.3.1 |MIT | -|OAuth 2.0 SDK with OpenID Connect Extensions |6.5 |Apache 2.0 | -|OAuth2 Client |2.3 |CDDL 1.1 | +|OAuth 2.0 SDK with OpenID Connect Extensions |9.3 |Apache 2.0 | +|OAuth2 Client |2.39.1 |CDDL 1.1 | |object-assign |4.1.1 |MIT | |object-inspect |1.12.0 |MIT | |object-is |1.1.5 |MIT | @@ -679,36 +756,44 @@ |object.assign |4.1.2 |MIT | |Objenesis |2.4 |Apache 2.0 | |Ojdbc8 |19.3.0.0 |Oracle Free Use Terms and Conditions (FUTC)| +|OkHttp |2.7.5 |Apache 2.0 | |OkHttp |4.9.0 |Apache 2.0 | |Okio |2.8.0 |Apache 2.0 | +|Okio-jvm |3.0.0 |Apache 2.0 | |Ons |19.3.0.0 |Oracle Free Use Terms and Conditions (FUTC)| -|OpenCensus API |0.24.0 |Apache 2.0 | -|OpenCensus DropWizard Util for Java |0.24.0 |Apache 2.0 | +|OpenCensus API |0.31.1 |Apache 2.0 | +|OpenCensus DropWizard Util for Java |0.31.1 |Apache 2.0 | |OpenCensus Exemplar Util |0.24.0 |Apache 2.0 | |OpenCensus HTTP Util |0.24.0 |Apache 2.0 | -|OpenCensus implementation |0.24.0 |Apache 2.0 | -|OpenCensus Java implementation |0.24.0 |Apache 2.0 | +|OpenCensus implementation |0.31.1 |Apache 2.0 | +|OpenCensus Java implementation |0.31.1 |Apache 2.0 | |OpenCensus Java Metrics Exporter Util |0.24.0 |Apache 2.0 | |OpenCensus Resources Util |0.24.0 |Apache 2.0 | |OpenCensus Stackdriver Stats Exporter |0.24.0 |Apache 2.0 | |OpenCensus Stackdriver Trace Exporter |0.24.0 |Apache 2.0 | |OpenHFT/Java-Thread-Affinity/affinity |3.1.7 |Apache 2.0 | +|OpenSearch SQL JDBC Driver |1.1.0.1 |Apache 2.0 | |OpenSSL toolkit |1.1.1d |OpenSSL and SSLeay license| -|OpenTelemetry - Jaeger Remote sampler |1.0.1 |Apache 2.0 | -|OpenTelemetry API |1.0.1 |Apache 2.0 | -|OpenTelemetry Context (Incubator) |1.0.1 |Apache 2.0 | -|OpenTelemetry Metrics API |1.0.1-alpha |Apache 2.0 | -|OpenTelemetry OpenTracing Bridge |1.0.1-alpha |Apache 2.0 | -|OpenTelemetry Proto |1.0.1-alpha |Apache 2.0 | -|OpenTelemetry Protocol Exporter |1.0.1 |Apache 2.0 | -|OpenTelemetry Protocol Exporters |1.0.1 |Apache 2.0 | -|OpenTelemetry Protocol JSON Logging Exporters |1.0.1 |Apache 2.0 | -|OpenTelemetry Protocol Trace Exporter |1.0.1 |Apache 2.0 | -|OpenTelemetry SDK |1.0.1 |Apache 2.0 | -|OpenTelemetry SDK Common |1.0.1 |Apache 2.0 | -|OpenTelemetry SDK For Tracing |1.0.1 |Apache 2.0 | -|OpenTelemetry SDK Metrics |1.0.1-alpha |Apache 2.0 | -|OpenTelemetry Semantic Conventions |1.0.1-alpha |Apache 2.0 | +|OpenTelemetry - Jaeger Remote sampler |1.25.0 |Apache 2.0 | +|OpenTelemetry API |1.25.0 |Apache 2.0 | +|OpenTelemetry Context (Incubator) |1.25.0 |Apache 2.0 | +|OpenTelemetry Metrics API Events |1.25.0-alpha |Apache 2.0 | +|OpenTelemetry Metrics API Logs |1.25.0-alpha |Apache 2.0 | +|OpenTelemetry OpenTracing Bridge |1.25.0-alpha |Apache 2.0 | +|OpenTelemetry Proto |1.25.0-alpha |Apache 2.0 | +|OpenTelemetry Protocol Exporter |1.25.0 |Apache 2.0 | +|OpenTelemetry Protocol Exporters |1.25.0 |Apache 2.0 | +|OpenTelemetry Protocol JSON Logging Exporters |1.25.0 |Apache 2.0 | +|OpenTelemetry Protocol Trace Exporter |1.25.0 |Apache 2.0 | +|OpenTelemetry SDK |1.25.0 |Apache 2.0 | +|OpenTelemetry SDK Common |1.25.0 |Apache 2.0 | +|OpenTelemetry SDK For Tracing |1.25.0 |Apache 2.0 | +|OpenTelemetry SDK Metrics |1.25.0 |Apache 2.0 | +|OpenTelemetry Semantic Conventions |1.25.0-alpha |Apache 2.0 | +|OpenTelemetry Semantic Conventions |1.25.0-alpha |Apache 2.0 | +|opentelemetry-exporter-common |1.25.0 |Apache 2.0 | +|opentelemetry-extension-trace-propagators |1.25.0 |Apache 2.0 | +|opentelemetry-instrumentation-annotations |1.25.0 |Apache 2.0 | |OpenTracing Java API |0.33.0 |Apache 2.0 | |OpenTracing NoOp |0.33.0 |Apache 2.0 | |OpenTracing utilities |0.33.0 |Apache 2.0 | @@ -719,26 +804,32 @@ |OSGi resource locator bundle - used by various API providers that rely on META-INF/services mechanism to locate providers.|1.0.3 |CDDL 1.1 | |ParaNamer Core |2.5.6 |BSD 3-clause | |parent-module |1.0.1 |MIT | +|parquet-avro |1.12.3 |Apache 2.0 | |parse-entities |2.0.0 |MIT | |parse-json |5.2.0 |MIT | |path-browserify |1.0.1 |MIT | |path-parse |1.0.7 |MIT | |path-type |4.0.0 |MIT | -|Perfmark:perfmark API |0.19.0 |Apache 2.0 | +|Perfmark:perfmark API |0.25.0 |Apache 2.0 | |performance-now |0.2.0 |MIT | |performance-now |2.1.0 |MIT | |picocolors |1.0.0 |ISC | -|Plugin Framework for Java |3.0.1 |Apache 2.0 | +|Plugin Framework for Java |3.6.0 |Apache 2.0 | |popper.js |1.14.7 |MIT | -|PostgreSQL JDBC Driver |42.3.4 |BSD 2-clause | +|PostgreSQL JDBC Driver |42.4.1 |BSD 2-clause | |prismjs |1.27.0 |MIT | |prismjs |1.28.0 |MIT | -|Prometheus Java Simpleclient |0.7.0 |Apache 2.0 | -|Prometheus Java Simpleclient Common |0.7.0 |Apache 2.0 | -|Prometheus Java Simpleclient Dropwizard |0.7.0 |Apache 2.0 | -|Prometheus Java Simpleclient Hotspot |0.7.0 |Apache 2.0 | -|Prometheus Java Simpleclient Servlet |0.7.0 |Apache 2.0 | +|Prometheus Java Simpleclient |0.16.0 |Apache 2.0 | +|Prometheus Java Simpleclient Common |0.16.0 |Apache 2.0 | +|Prometheus Java Simpleclient Dropwizard |0.16.0 |Apache 2.0 | +|Prometheus Java Simpleclient Hotspot |0.16.0 |Apache 2.0 | +|Prometheus Java Simpleclient Servlet |0.16.0 |Apache 2.0 | +|Prometheus Java Simpleclient Servlet |0.16.0 |Apache 2.0 | +|Prometheus Java Simpleclient Servlet |0.16.0 |Apache 2.0 | +|Prometheus Java Simpleclient Servlet |0.16.0 |Apache 2.0 | +|Prometheus Java Simpleclient Servlet |0.16.0 |Apache 2.0 | |promise |7.3.1 |MIT | +|prop-types |15.5.10 |BSD-3-Clause | |prop-types |15.5.8 |BSD-3-Clause | |prop-types |15.7.2 |MIT | |prop-types |15.8.1 |MIT | @@ -747,11 +838,12 @@ |property-information |5.6.0 |MIT | |Proto Google Cloud Trace V1 |0.65.0 |Apache 2.0 | |Proto Google Cloud Trace V2 |0.65.0 |Apache 2.0 | -|Proto Google Common Protos |1.17.0 |Apache 2.0 | +|Proto Google Common Protos |2.9.0 |Apache 2.0 | |Proto Google IAM V1 |0.12.0 |Apache 2.0 | |Protobuf Jackson |1.2.0 |MIT License | -|Protocol Buffer [Util] |3.9.1 |BSD 3-clause | -|Protocol Buffer Java API |3.9.1 |BSD 3-clause | +|Protocol Buffer [Util] |3.21.9 |BSD 3-clause | +|Protocol Buffer Java API |3.21.9 |BSD 3-clause | +|protoparser |4.0.3 |Apache 2.0 | |protostuff :: api |1.4.4 |Apache 2.0 | |protostuff :: collectionschema |1.4.4 |Apache 2.0 | |protostuff :: core |1.4.4 |Apache 2.0 | @@ -783,6 +875,7 @@ |react-gtm-module |2.0.11 |MIT | |react-hook-form |7.34.0 |MIT | |react-immutable-proptypes |2.1.0 |MIT | +|react-intl |2.9.0 |BSD-3-Clause | |react-intl |5.20.10 |BSD-3-Clause | |react-is |16.13.1 |MIT | |react-is |18.2.0 |MIT | @@ -811,6 +904,7 @@ |react-syntax-highlighter |15.5.0 |MIT | |react-textarea-autosize |8.3.4 |MIT | |react-transition-group |4.4.2 |BSD-3-Clause | +|react-transition-group |4.4.5 |BSD-3-Clause | |react-virtualized |9.22.3 |MIT | |react-virtualized-tree |3.4.1 |MIT | |Reactive Streams |1.0.3 |CC0 | @@ -823,20 +917,22 @@ |redux-saga |0.15.6 |MIT | |redux-thunk |2.3.0 |MIT | |ReflectASM |1.11.3 |BSD 3-clause | -|Reflections |0.9.10 |WTFPL | +|Reflections |0.10.2 |WTFPL | |refractor |3.6.0 |MIT | |regenerator-runtime |0.11.1 |MIT | |regenerator-runtime |0.13.9 |MIT | |regexp.prototype.flags |1.4.3 |MIT | -|Rendezvous Hash |1 |BSD 3-clause | +|Rendezvous Hash |1.0 |BSD 3-clause | |requires-port |1.0.0 |MIT | |reselect |2.5.4 |MIT | |reselect |3.0.1 |MIT | |reselect |4.0.0 |MIT | |resolve |1.20.0 |MIT | |resolve-from |4.0.0 |MIT | +|RoaringBitmap |0.9.22 |Apache 2.0 | |RocksDB JNI |5.14.2 |Apache 2.0 | |RSQL-parser |2.1.0 |MIT | +|rxjs |7.5.5 |Apache-2.0 | |S2 Geometry Library |0.9.0 |Apache 2.0 | |safe-buffer |5.1.1 |MIT | |safe-buffer |5.2.1 |MIT | @@ -848,19 +944,22 @@ |setimmediate |1.0.5 |MIT | |shallow-equal |1.2.1 |MIT | |shallowequal |1.1.0 |MIT | +|shims |0.9.22 |Apache 2.0 | |side-channel |1.0.4 |MIT | |Simplefan |19.3.0.0 |Oracle Free Use Terms and Conditions (FUTC)| |simplemde |1.11.2 |MIT | |Sketches Core |0.9.0 |Apache 2.0 | |SLF4J API Module |1.7.36 |MIT | |Slugify - Core |2.1.7 |Apache 2.0 | -|smart-resource |0.3.6 |Apache-2.0 | +|smart-icon |1.4.3 |Apache-2.0 | +|smart-resource |0.3.8 |Apache-2.0 | +|smart-resource |1.0.0 |Apache-2.0 | |SnakeYAML |1.15 |Apache 2.0 | -|SnakeYAML |1.26 |Apache 2.0 | +|SnakeYAML |1.33 |Apache 2.0 | |Snappy for Java |1.1.4 |Apache 2.0 | -|Snowflake JDBC |3.13.24 |Apache 2.0 | +|Snowflake JDBC |3.13.30 |Apache 2.0 | |software.amazon.ion:ion-java |1.0.2 |Apache 2.0 | -|solid-js |1.4.8 |MIT | +|solid-js |1.6.2 |MIT | |source-map |0.5.7 |BSD-3-Clause | |space-separated-tokens |1.1.5 |MIT | |SparseBitSet |1.2 |Apache 2.0 | @@ -869,16 +968,17 @@ |string.prototype.trimend |1.0.5 |MIT | |string.prototype.trimstart |1.0.5 |MIT | |stylis |4.0.13 |MIT | -|Sulky ULID |8.2.0 |Apache 2.0 | -|sulky-codec |8.2.0 |Apache 2.0 | -|sulky-formatting |8.2.0 |Apache 2.0 | -|sulky-io |8.2.0 |Apache 2.0 | +|Sulky ULID |8.3.0 |Apache 2.0 | +|sulky-codec |8.3.0 |Apache 2.0 | +|sulky-formatting |8.3.0 |Apache 2.0 | +|sulky-io |8.3.0 |Apache 2.0 | |supports-color |5.5.0 |MIT | |symbol-observable |1.2.0 |MIT | |symbol-observable |2.0.3 |MIT | |T-Digest |3.2 |Apache 2.0 | |The Netty Project |3.10.6.Final-nohttp |Apache 2.0 | |The Netty Project |3.2.2.Final |Apache 2.0 | +|third-party-jackson-core |2.17.295 |Apache 2.0 | |ThreeTen backport |1.4.1 |BSD 3-clause | |tiny-warning |1.0.3 |MIT | |to-fast-properties |2.0.0 |MIT | @@ -913,6 +1013,8 @@ |use-sync-external-store |1.2.0 |MIT | |util |0.12.4 |MIT | |uuid |2.0.3 |MIT | +|uuid |8.3.2 |MIT | +|value-annotations |2.9.3 |Apache 2.0 | |warning |3.0.0 |BSD-3-Clause | |warning |4.0.3 |MIT | |whatwg-fetch |2.0.3 |MIT | @@ -921,7 +1023,7 @@ |ws |1.1.5 |MIT | |Xalan Java |2.7.2 |Apache 2.0 | |Xalan Java Serializer |2.7.2 |Apache 2.0 | -|Xerces2-j |2.12.0 |Apache 2.0 | +|Xerces2-j |2.12.2 |Apache 2.0 | |XML Commons External Components XML APIs |1.4.01 |Apache 2.0 | |Xml Compatibility Extensions For Jackson |1.9.13 |Apache 2.0 | |XmlBeans |3.1.0 |Apache 2.0 | @@ -931,8 +1033,10 @@ |yup |0.32.9 |MIT | |ZkClient |0.1 |Apache 2.0 | |zod |3.17.10 |MIT | -|zookeeper |3.4.5-mapr-1710 |Apache 2.0 | +|zookeeper |3.5.6.0-mapr-2009 |Apache 2.0 | +|zookeeper-jute |3.5.6.0-mapr-2009 |Apache 2.0 | |zrender |5.3.1 |BSD-3-Clause | +|zstd-jni |1.5.0-1 |https://github.com/luben/zstd-jni/blob/master/LICENSE| # License Texts ## Apache 2.0 @@ -5091,4 +5195,4 @@ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -``` +``` \ No newline at end of file diff --git a/distribution/resources/src/main/resources-mapr/licenses/NOTICE b/distribution/resources/src/main/resources-mapr/licenses/NOTICE index 0769c81b38..95d5859ddb 100644 --- a/distribution/resources/src/main/resources-mapr/licenses/NOTICE +++ b/distribution/resources/src/main/resources-mapr/licenses/NOTICE @@ -7,92 +7,112 @@ The Apache Software Foundation (http://www.apache.org/). This product incorporates and/or depends on software that falls under the following notices and/or trademarks: -# List of contributors Red Hat Inc. Akira Kawauchi Davide D'Alto Dhanji R. Prasanna Emmanuel Bernard Gavin King Gerhard Petracek Guillaume Smet Gunnar Morling Hardy Ferentschik Hendrik Ebbers Kevin Pollet Sebastian Thomschke + ========================================================================= == NOTICE file corresponding to section 4(d) of the Apache License, == == Version 2.0, in this case for MicroProfile OpenAPI == ========================================================================= The majority of this software were originally based on the following: * Swagger Core https://github.com/swagger-api/swagger-core under Apache License, v2.0 -

        Copyright © 2022 Sami Samhuri, http://samhuri.net <[email protected]>

        + Apache Parquet MR (Incubating) Copyright 2014 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). -------------------------------------------------------------------------------- This product includes parquet-tools, initially developed at ARRIS, Inc. with the following copyright notice: Copyright 2013 ARRIS, Inc. Licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -------------------------------------------------------------------------------- This product includes parquet-protobuf, initially developed by Lukas Nalezenc with the following copyright notice: Copyright 2013 Lukas Nalezenec. Licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -------------------------------------------------------------------------------- This product includes code from Apache Avro, which includes the following in its NOTICE file: Apache Avro Copyright 2010-2015 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). -------------------------------------------------------------------------------- This project includes code from Kite, developed at Cloudera, Inc. with the following copyright notice: | Copyright 2013 Cloudera Inc. | | Licensed under the Apache License, Version 2.0 (the License); | you may not use this file except in compliance with the License. | You may obtain a copy of the License at | | http://www.apache.org/licenses/LICENSE-2.0 | | Unless required by applicable law or agreed to in writing, software | distributed under the License is distributed on an AS IS BASIS, | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | See the License for the specific language governing permissions and | limitations under the License. -------------------------------------------------------------------------------- This project includes code from Netflix, Inc. with the following copyright notice: | Copyright 2016 Netflix, Inc. | | Licensed under the Apache License, Version 2.0 (the License); | you may not use this file except in compliance with the License. | You may obtain a copy of the License at | | http://www.apache.org/licenses/LICENSE-2.0 | | Unless required by applicable law or agreed to in writing, software | distributed under the License is distributed on an AS IS BASIS, | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | See the License for the specific language governing permissions and | limitations under the License. -ASM: a very small and fast Java bytecode manipulation framework Copyright (c) 2000-2011 INRIA, France Telecom All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holders nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# List of contributors Red Hat Inc. Akira Kawauchi Davide D'Alto Dhanji R. Prasanna Emmanuel Bernard Gavin King Gerhard Petracek Guillaume Smet Gunnar Morling Hardy Ferentschik Hendrik Ebbers Kevin Pollet Sebastian Thomschke -AWS EventStream for Java Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. +* This code was Written by Gil Tene of Azul Systems, and released to the * public domain, as explained at http://creativecommons.org/publicdomain/zero/1.0/ For users of this code who wish to consume it under the BSD license rather than under the public domain or CC0 contribution text mentioned above, the code found under this directory is *also* provided under the following license (commonly referred to as the BSD 2-Clause License). This license does not detract from the above stated release of the code into the public domain, and simply represents an additional license granted by the Author. ----------------------------------------------------------------------------- ** Beginning of BSD 2-Clause License text. ** Copyright (c) 2012, 2013, 2014 Gil Tene All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -AWS SDK for Java 2.0 Copyright 2010-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. This product includes software developed by Amazon Technologies, Inc (http://www.amazon.com/). ********************** THIRD PARTY COMPONENTS ********************** This software includes third party software subject to the following copyrights: - XML parsing and utility functions from JetS3t - Copyright 2006-2009 James Murty. - PKCS#1 PEM encoded private key parsing and utility functions from oauth.googlecode.com - Copyright 1998-2010 AOL Inc. The licenses for these third party components are included in LICENSE.txt +// Copyright (c) 2013 The Chromium Authors. All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -AWS SDK for Java Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. This product includes software developed by Amazon Technologies, Inc (http://www.amazon.com/). ********************** THIRD PARTY COMPONENTS ********************** This software includes third party software subject to the following copyrights: - XML parsing and utility functions from JetS3t - Copyright 2006-2009 James Murty. - PKCS#1 PEM encoded private key parsing and utility functions from oauth.googlecode.com - Copyright 1998-2010 AOL Inc. The licenses for these third party components are included in LICENSE.txt +

        Copyright © 2023 Sami Samhuri, http://samhuri.net <[email protected]>

        -Adam Stawicki Alaa Nassef Andrey Derevyanko Andrey Rodionov Benson Margulies Brent Douglas Carlos Vara Dag Hovland Davide Marchignoli Carlo de Wolf Chris Beckey Christian Ivan Denis Tiago Doug Lea Emmanuel Bernard Efthymis Sarbanis Federico Federico Mancini Gavin King George Gastaldi Gerhard Petracek Guillaume Husta Guillaume Smet Gunnar Morling Hardy Ferentschik Henno Vermeulen Jan-Willem Willebrands Jason T. Greene Julien May Julien Furgerot Juraci Krohling Justin Nauman Kathryn Killebrew Kevin Pollet Khalid Alqinyah Lee KyoungIl Leonardo Loch Zanivan Lucas Pouzac Lukas Niemeier Mark Hobson Marko Bekhta Mert C?alis?kan Paolo Perrotta Pete Muir Sanne Grinovero Sebastian Bayerl Shane Bryzak Shelly McGowan Steve Ebersole Strong Liu Victor Rezende dos Santos Willi Schönborn Yoann Rodière +ASM: a very small and fast Java bytecode manipulation framework Copyright (c) 2000-2011 INRIA, France Telecom All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holders nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -Aggregate Designer Copyright 2006 - 2013 Pentaho Corporation. All rights reserved. Copyright 2000-2005, 2014-2016 Julian Hyde +AWS EventStream for Java Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. -Amazon Ion Java Copyright 2007-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. +AWS SDK for Java Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. This product includes software developed by Amazon Technologies, Inc (http://www.amazon.com/). ********************** THIRD PARTY COMPONENTS ********************** This software includes third party software subject to the following copyrights: - XML parsing and utility functions from JetS3t - Copyright 2006-2009 James Murty. - PKCS#1 PEM encoded private key parsing and utility functions from oauth.googlecode.com - Copyright 1998-2010 AOL Inc. The licenses for these third party components are included in LICENSE.txt -Apache Arrow Copyright 2016 The Apache Software Foundation +AWS SDK for Java Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. This product includes software developed by Amazon Technologies, Inc (http://www.amazon.com/). ********************** THIRD PARTY COMPONENTS ********************** This software includes third party software subject to the following copyrights: - XML parsing and utility functions from JetS3t - Copyright 2006-2009 James Murty. - PKCS#1 PEM encoded private key parsing and utility functions from oauth.googlecode.com - Copyright 1998-2010 AOL Inc. The licenses for these third party components are included in LICENSE.txt -Apache Arrow Copyright 2016-2019 The Apache Software Foundation +AWS SDK for Java 2.0 Copyright 2010-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. This product includes software developed by Amazon Technologies, Inc (http://www.amazon.com/). ********************** THIRD PARTY COMPONENTS ********************** This software includes third party software subject to the following copyrights: - XML parsing and utility functions from JetS3t - Copyright 2006-2009 James Murty. - PKCS#1 PEM encoded private key parsing and utility functions from oauth.googlecode.com - Copyright 1998-2010 AOL Inc. The licenses for these third party components are included in LICENSE.txt -Apache Avro Copyright 2010-2019 The Apache Software Foundation +AWS SDK for Java 2.0 Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. This product includes software developed by Amazon Technologies, Inc (http://www.amazon.com/). ********************** THIRD PARTY COMPONENTS ********************** This software includes third party software subject to the following copyrights: - XML parsing and utility functions from JetS3t - Copyright 2006-2009 James Murty. - PKCS#1 PEM encoded private key parsing and utility functions from oauth.googlecode.com - Copyright 1998-2010 AOL Inc. - Apache Commons Lang - https://github.com/apache/commons-lang - Netty Reactive Streams - https://github.com/playframework/netty-reactive-streams - Jackson-core - https://github.com/FasterXML/jackson-core - Jackson-dataformat-cbor - https://github.com/FasterXML/jackson-dataformats-binary The licenses for these third party components are included in LICENSE.txt - For Apache Commons Lang see also this required NOTICE: Apache Commons Lang Copyright 2001-2020 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (https://www.apache.org/). -Apache Calcite -- Avatica Copyright 2012-2021 The Apache Software Foundation +Adam Stawicki Alaa Nassef Andrey Derevyanko Andrey Rodionov Benson Margulies Brent Douglas Carlos Vara Dag Hovland Davide Marchignoli Carlo de Wolf Chris Beckey Christian Ivan Denis Tiago Doug Lea Emmanuel Bernard Efthymis Sarbanis Federico Federico Mancini Gavin King George Gastaldi Gerhard Petracek Guillaume Husta Guillaume Smet Gunnar Morling Hardy Ferentschik Henno Vermeulen Jan-Willem Willebrands Jason T. Greene Julien May Julien Furgerot Juraci Krohling Justin Nauman Kathryn Killebrew Kevin Pollet Khalid Alqinyah Lee KyoungIl Leonardo Loch Zanivan Lucas Pouzac Lukas Niemeier Mark Hobson Marko Bekhta Mert C?alis?kan Paolo Perrotta Pete Muir Sanne Grinovero Sebastian Bayerl Shane Bryzak Shelly McGowan Steve Ebersole Strong Liu Victor Rezende dos Santos Willi Schönborn Yoann Rodière -Apache Calcite Copyright 2012-2017 The Apache Software Foundation +Aggregate Designer Copyright 2006 - 2013 Pentaho Corporation. All rights reserved. Copyright 2000-2005, 2014-2016 Julian Hyde -Apache Commons BeanUtils Copyright 2000-2019 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). +Amazon Ion Java Copyright 2007-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. -Apache Commons Collections Copyright 2001-2019 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). +Apache Arrow Copyright 2016 The Apache Software Foundation -Apache Commons Compress Copyright 2002-2020 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (https://www.apache.org/). +Apache Arrow Copyright 2016 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). This product includes software from the SFrame project (BSD, 3-clause). * Copyright (C) 2015 Dato, Inc. * Copyright (c) 2009 Carnegie Mellon University. This product includes software from the Feather project (Apache 2.0) https://github.com/wesm/feather This product includes software from the DyND project (BSD 2-clause) https://github.com/libdynd This product includes software from the LLVM project * distributed under the University of Illinois Open Source This product includes software from the google-lint project * Copyright (c) 2009 Google Inc. All rights reserved. This product includes software from the mman-win32 project * Copyright https://code.google.com/p/mman-win32/ * Licensed under the MIT License; This product includes software from the LevelDB project * Copyright (c) 2011 The LevelDB Authors. All rights reserved. * Use of this source code is governed by a BSD-style license that can be * Moved from Kudu http://github.com/cloudera/kudu This product includes software from the CMake project * Copyright 2001-2009 Kitware, Inc. * Copyright 2012-2014 Continuum Analytics, Inc. * All rights reserved. This product includes software from https://github.com/matthew-brett/multibuild (BSD 2-clause) * Copyright (c) 2013-2016, Matt Terry and Matthew Brett; all rights reserved. This product includes software from the Ibis project (Apache 2.0) * Copyright (c) 2015 Cloudera, Inc. * https://github.com/cloudera/ibis The web site includes files generated by Jekyll. -------------------------------------------------------------------------------- This product includes code from Apache Kudu, which includes the following in its NOTICE file: Apache Kudu Copyright 2016 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). Portions of this software were developed at Cloudera, Inc (http://www.cloudera.com/). -Apache Commons IO Copyright 2002-2012 The Apache Software Foundation This product includes software developed by The Apache Software Foundation (http://www.apache.org/). +Apache Arrow Copyright 2016-2019 The Apache Software Foundation -Apache Commons Lang Copyright 2001-2011 The Apache Software Foundation This product includes software developed by The Apache Software Foundation (http://www.apache.org/). +Apache Avro Copyright 2010-2019 The Apache Software Foundation -Apache Commons Lang Copyright 2001-2017 The Apache Software Foundation +Apache Calcite Copyright 2012-2017 The Apache Software Foundation -Apache Commons Math Copyright 2001-2016 The Apache Software Foundation +Apache Calcite -- Avatica Copyright 2012-2021 The Apache Software Foundation -Apache Commons Pool Copyright 2001-2016 The Apache Software Foundation +Apache Commons BeanUtils Copyright 2000-2019 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). -Apache Curator Copyright 2013-2014 The Apache Software Foundation +Apache Commons Collections Copyright 2001-2019 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). -Apache DataSketches Java Copyright 2021 The Apache Software Foundation Copyright 2015-2018 Yahoo Copyright 2019 Verizon Media This product includes software developed at The Apache Software Foundation (http://www.apache.org/). Prior to moving to ASF, the software for this project was developed at Yahoo (now Verizon Media) (https://developer.yahoo.com). +Apache Commons Compress Copyright 2002-2020 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (https://www.apache.org/). -Apache DataSketches Memory Copyright 2021 - The Apache Software Foundation Copyright 2015-2018 Yahoo Copyright 2019 Verizon Media This product includes software developed at The Apache Software Foundation (http://www.apache.org/). Prior to moving to ASF, the software for this project was developed at Yahoo (now Verizon Media) (https://developer.yahoo.com). +Apache Commons IO Copyright 2002-2012 The Apache Software Foundation This product includes software developed by The Apache Software Foundation (http://www.apache.org/). -Apache Directory LDAP API Copyright 2003-2013 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). Additional copyright notices and license terms applicable are present in the distribution/src/main/release/licenses directory. +Apache Commons Lang Copyright 2001-2011 The Apache Software Foundation This product includes software developed by The Apache Software Foundation (http://www.apache.org/). + +Apache Commons Lang Copyright 2001-2017 The Apache Software Foundation + +Apache Commons Math Copyright 2001-2016 The Apache Software Foundation + +Apache Commons Pool Copyright 2001-2016 The Apache Software Foundation + +Apache Curator Copyright 2013-2014 The Apache Software Foundation + +Apache DataSketches Java Copyright 2021 The Apache Software Foundation Copyright 2015-2018 Yahoo Copyright 2019 Verizon Media This product includes software developed at The Apache Software Foundation (http://www.apache.org/). Prior to moving to ASF, the software for this project was developed at Yahoo (now Verizon Media) (https://developer.yahoo.com). + +Apache DataSketches Memory Copyright 2021 - The Apache Software Foundation Copyright 2015-2018 Yahoo Copyright 2019 Verizon Media This product includes software developed at The Apache Software Foundation (http://www.apache.org/). Prior to moving to ASF, the software for this project was developed at Yahoo (now Verizon Media) (https://developer.yahoo.com). + +Apache Directory LDAP API Copyright 2003-2013 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). Additional copyright notices and license terms applicable are present in the distribution/src/main/release/licenses directory. Apache Drill Copyright 2013-2014 The Apache Software Foundation -Apache HttpComponents Client Copyright 1999-2017 The Apache Software Foundation +Apache Hadoop Third-party Libs Copyright 2020 and onwards The Apache Software Foundation. This product includes software developed at The Apache Software Foundation (http://www.apache.org/). + +Apache HttpComponents Client Copyright 1999-2017 The Apache Software Foundation + +Apache HttpComponents Client Copyright 1999-2017 The Apache Software Foundation -Apache HttpComponents Client Copyright 1999-2020 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). +Apache HttpComponents Client Copyright 1999-2020 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). -Apache HttpComponents Core Copyright 2005-2017 The Apache Software Foundation +Apache HttpComponents Core Copyright 2005-2017 The Apache Software Foundation -Apache Iceberg (incubating) Copyright 2017-2018 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). -------------------------------------------------------------------------------- This project includes code from Kite, developed at Cloudera, Inc. with the following copyright notice: | Copyright 2013 Cloudera Inc. | | Licensed under the Apache License, Version 2.0 (the License); | you may not use this file except in compliance with the License. | You may obtain a copy of the License at | | http://www.apache.org/licenses/LICENSE-2.0 | | Unless required by applicable law or agreed to in writing, software | distributed under the License is distributed on an AS IS BASIS, | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | See the License for the specific language governing permissions and | limitations under the License. +Apache Iceberg Copyright 2017-2022 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). -------------------------------------------------------------------------------- This project includes code from Kite, developed at Cloudera, Inc. with the following copyright notice: | Copyright 2013 Cloudera Inc. | | Licensed under the Apache License, Version 2.0 (the License); | you may not use this file except in compliance with the License. | You may obtain a copy of the License at | | http://www.apache.org/licenses/LICENSE-2.0 | | Unless required by applicable law or agreed to in writing, software | distributed under the License is distributed on an AS IS BASIS, | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | See the License for the specific language governing permissions and | limitations under the License. -Apache Kafka Copyright 2012 The Apache Software Foundation. +Apache Iceberg (incubating) Copyright 2017-2018 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). -------------------------------------------------------------------------------- This project includes code from Kite, developed at Cloudera, Inc. with the following copyright notice: | Copyright 2013 Cloudera Inc. | | Licensed under the Apache License, Version 2.0 (the License); | you may not use this file except in compliance with the License. | You may obtain a copy of the License at | | http://www.apache.org/licenses/LICENSE-2.0 | | Unless required by applicable law or agreed to in writing, software | distributed under the License is distributed on an AS IS BASIS, | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | See the License for the specific language governing permissions and | limitations under the License. -Apache Log4j Copyright 1999-2017 Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). ResolverUtil.java Copyright 2005-2006 Tim Fennell Dumbster SMTP test server Copyright 2004 Jason Paul Kitchen TypeUtil.java Copyright 2002-2012 Ramnivas Laddad, Juergen Hoeller, Chris Beams picocli (http://picocli.info) Copyright 2017 Remko Popma +Apache Kafka Copyright 2012 The Apache Software Foundation. -Apache Lucene Copyright 2001-2017 The Apache Software Foundation +Apache Log4j Copyright 1999-2017 Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). ResolverUtil.java Copyright 2005-2006 Tim Fennell Dumbster SMTP test server Copyright 2004 Jason Paul Kitchen TypeUtil.java Copyright 2002-2012 Ramnivas Laddad, Juergen Hoeller, Chris Beams picocli (http://picocli.info) Copyright 2017 Remko Popma -Apache Parquet MR (Incubating) Copyright 2014 The Apache Software Foundation +Apache Lucene Copyright 2001-2017 The Apache Software Foundation -Apache Ranger Copyright 2014-2017 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). This product includes software developed by Spring Security Project (http://www.springframework.org/security) +Apache Parquet MR (Incubating) Copyright 2014 The Apache Software Foundation -Apache Twill Copyright 2013-2017 The Apache Software Foundation +Apache Ranger Copyright 2014-2017 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). This product includes software developed by Spring Security Project (http://www.springframework.org/security) -Apache Yetus Copyright 2008-2019 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). --- Additional licenses for the Apache Yetus Source/Website: --- See LICENSE for terms. +Apache Thrift Copyright 2006-2010 The Apache Software Foundation. -Apache ZooKeeper Copyright 2009-2014 The Apache Software Foundation +Apache Twill Copyright 2013-2017 The Apache Software Foundation -ApacheDS Copyright 2003-2013 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). Copyright 1998-2008 The OpenLDAP Foundation All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted only as authorized by the OpenLDAP Public License. A copy of this license is available in the file LICENSE in the top-level directory of the distribution or, alternatively, at . OpenLDAP is a registered trademark of the OpenLDAP Foundation. Individual files and/or contributed packages may be copyright by other parties and/or subject to additional restrictions. This work is derived from the University of Michigan LDAP v3.3 distribution. Information concerning this software is available at . This work also contains materials derived from public sources. Additional information about OpenLDAP can be obtained at . --- Portions Copyright 1998-2008 Kurt D. Zeilenga. Portions Copyright 1998-2006 Net Boolean Incorporated. Portions Copyright 2001-2006 IBM Corporation. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted only as authorized by the OpenLDAP Public License. --- Portions Copyright 1999-2007 Howard Y.H. Chu. Portions Copyright 1999-2007 Symas Corporation. Portions Copyright 1998-2003 Hallvard B. Furuseth. Portions Copyright 2007 Gavin Henry Portions Copyright 2007 Suretec Systems All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that this notice is preserved. The names of the copyright holders may not be used to endorse or promote products derived from this software without their specific prior written permission. This software is provided ``as is'' without express or implied warranty. --- Portions Copyright (c) 1992-1996 Regents of the University of Michigan. All rights reserved. Redistribution and use in source and binary forms are permitted provided that this notice is preserved and that due credit is given to the University of Michigan at Ann Arbor. The name of the University may not be used to endorse or promote products derived from this software without specific prior written permission. This software is provided ``as is'' without express or implied warranty. --- Portions Copyright (c) 2001-2011 Vladimir Lysyy Licensed under the Apache License, Version 2.0 (the License); you may not use this source code except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. +Apache Yetus Copyright 2008-2019 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). --- Additional licenses for the Apache Yetus Source/Website: --- See LICENSE for terms. + +Apache ZooKeeper Copyright 2009-2014 The Apache Software Foundation + +ApacheDS Copyright 2003-2013 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). Copyright 1998-2008 The OpenLDAP Foundation All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted only as authorized by the OpenLDAP Public License. A copy of this license is available in the file LICENSE in the top-level directory of the distribution or, alternatively, at . OpenLDAP is a registered trademark of the OpenLDAP Foundation. Individual files and/or contributed packages may be copyright by other parties and/or subject to additional restrictions. This work is derived from the University of Michigan LDAP v3.3 distribution. Information concerning this software is available at . This work also contains materials derived from public sources. Additional information about OpenLDAP can be obtained at . --- Portions Copyright 1998-2008 Kurt D. Zeilenga. Portions Copyright 1998-2006 Net Boolean Incorporated. Portions Copyright 2001-2006 IBM Corporation. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted only as authorized by the OpenLDAP Public License. --- Portions Copyright 1999-2007 Howard Y.H. Chu. Portions Copyright 1999-2007 Symas Corporation. Portions Copyright 1998-2003 Hallvard B. Furuseth. Portions Copyright 2007 Gavin Henry Portions Copyright 2007 Suretec Systems All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that this notice is preserved. The names of the copyright holders may not be used to endorse or promote products derived from this software without their specific prior written permission. This software is provided ``as is'' without express or implied warranty. --- Portions Copyright (c) 1992-1996 Regents of the University of Michigan. All rights reserved. Redistribution and use in source and binary forms are permitted provided that this notice is preserved and that due credit is given to the University of Michigan at Ann Arbor. The name of the University may not be used to endorse or promote products derived from this software without specific prior written permission. This software is provided ``as is'' without express or implied warranty. --- Portions Copyright (c) 2001-2011 Vladimir Lysyy Licensed under the Apache License, Version 2.0 (the License); you may not use this source code except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Copyright (C) 2004 Sam Hocevar Copyright (C) 2009 The JSR-330 Expert Group -Copyright (C) 2012 Google, Inc. - Copyright (C) 2012-2014 by various contributors (see AUTHORS) Copyright (C) 2013 Jordan Harband @@ -107,19 +127,19 @@ Copyright (C) 2018 The Guava Authors Copyright (C) 2020 Dremio -Copyright (c) 1997, 2018 Oracle and/or its affiliates and others. All rights reserved. This program and the accompanying materials are made available under the terms of the Eclipse Public License v. 2.0, which is available at http://www.eclipse.org/legal/epl-2.0. This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse Public License v. 2.0 are satisfied: GNU General Public License, version 2 with the GNU Classpath Exception, which is available at https://www.gnu.org/software/classpath/license.html. SPDX-License-Identifier: EPL-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 +Copyright (c) 1997, 2018 Oracle and/or its affiliates and others. All rights reserved. This program and the accompanying materials are made available under the terms of the Eclipse Public License v. 2.0, which is available at http://www.eclipse.org/legal/epl-2.0. This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse Public License v. 2.0 are satisfied: GNU General Public License, version 2 with the GNU Classpath Exception, which is available at https://www.gnu.org/software/classpath/license.html. SPDX-License-Identifier: EPL-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 -Copyright (c) 1997, PostgreSQL Global Development Group +Copyright (c) 1997, PostgreSQL Global Development Group -Copyright (c) 2000 - 2019 The Legion of the Bouncy Castle Inc. (https://www.bouncycastle.org) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +Copyright (c) 2000 - 2019 The Legion of the Bouncy Castle Inc. (https://www.bouncycastle.org) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Copyright (c) 2000-2011 INRIA, France Telecom -Copyright (c) 2001-2016, Arno Unkrig Copyright (c) 2015-2016 TIBCO Software Inc. +Copyright (c) 2001-2016, Arno Unkrig Copyright (c) 2015-2016 TIBCO Software Inc. Copyright (c) 2002-2015 Atsuhiko Yamanaka, JCraft,Inc. All rights reserved. -Copyright (c) 2002-2017 EPFL Copyright (c) 2011-2017 Lightbend, Inc. +Copyright (c) 2002-2017 EPFL Copyright (c) 2011-2017 Lightbend, Inc. Copyright (c) 2003-2013, Objenesis Team and all contributors @@ -129,21 +149,21 @@ Copyright (c) 2004-2016 Paul R. Holser, Jr. Copyright (c) 2004-2017 QOS.ch -Copyright (c) 2005, Graph Builder All rights reserved. +Copyright (c) 2005, Graph Builder All rights reserved. Copyright (c) 2006 Google, Inc. All rights reserved. -Copyright (c) 2006 Paul Hammant & ThoughtWorks Inc All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holders nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +Copyright (c) 2006 Paul Hammant & ThoughtWorks Inc All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holders nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Copyright (c) 2006, Ivan Sagalaev. All rights reserved. -Copyright (c) 2007, Eclipse Foundation, Inc. and its licensors. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.Neither the name of the Eclipse Foundation, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +Copyright (c) 2007, Eclipse Foundation, Inc. and its licensors. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.Neither the name of the Eclipse Foundation, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Copyright (c) 2007-present, Stephen Colebourne & Michael Nascimento Santos -Copyright (c) 2008, 2013 Sun Microsystems, Oracle Corporation. All rights reserved. This program and the accompanying materials are made available under the terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 which accompanies this distribution. The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html and the Eclipse Distribution License is available at http://www.eclipse.org/org/documents/edl-v10.php. Contributors: Linda DeMichiel -Java Persistence 2.1 Specification available from http://jcp.org/en/jsr/detail?id=338 Oracle Committers - EclipseLink specific implementations and OSGi support Oracle Committers - Misc Bugfixes Java(TM) Persistence API, Version 2.1 +Copyright (c) 2008, 2013 Sun Microsystems, Oracle Corporation. All rights reserved. This program and the accompanying materials are made available under the terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 which accompanies this distribution. The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html and the Eclipse Distribution License is available at http://www.eclipse.org/org/documents/edl-v10.php. Contributors: Linda DeMichiel -Java Persistence 2.1 Specification available from http://jcp.org/en/jsr/detail?id=338 Oracle Committers - EclipseLink specific implementations and OSGi support Oracle Committers - Misc Bugfixes Java(TM) Persistence API, Version 2.1 -Copyright (c) 2008, Nathan Sweet All rights reserved. +Copyright (c) 2008, Nathan Sweet All rights reserved. Copyright (c) 2009-2011, Mozilla Foundation and contributors All rights reserved. @@ -155,7 +175,7 @@ Copyright (c) 2010 - 2015, Board of Regents of the University of Wisconsin-Madis Copyright (c) 2010-2012 Robert Kieffer MIT License - http://opensource.org/licenses/mit-license.php -Copyright (c) 2010-2015 Oracle and/or its affiliates. All rights reserved. The contents of this file are subject to the terms of either the GNU General Public License Version 2 only (GPL) or the Common Development and Distribution License(CDDL) (collectively, the License). You may not use this file except in compliance with the License. You can obtain a copy of the License at http://glassfish.java.net/public/CDDL+GPL_1_1.html or packager/legal/LICENSE.txt. See the License for the specific language governing permissions and limitations under the License. When distributing the software, include this License Header Notice in each file and include the License file at packager/legal/LICENSE.txt. GPL Classpath Exception: Oracle designates this particular file as subject to the Classpath exception as provided by Oracle in the GPL Version 2 section of the License file that accompanied this code. Modifications: If applicable, add the following below the License Header, with the fields enclosed by brackets [] replaced by your own identifying information: Portions Copyright [year] [name of copyright owner] Contributor(s): If you wish your version of this file to be governed by only the CDDL or only the GPL Version 2, indicate your decision by adding [Contributor] elects to include this software in this distribution under the [CDDL or GPL Version 2] license. If you don't indicate a single choice of license, a recipient has the option to distribute your version of this file under either the CDDL, the GPL Version 2 or to extend the choice of license to its licensees as provided above. However, if you add GPL Version 2 code and therefore, elected the GPL Version 2 license, then the option applies only if the new code is made subject to such option by the copyright holder. +Copyright (c) 2010-2015 Oracle and/or its affiliates. All rights reserved. The contents of this file are subject to the terms of either the GNU General Public License Version 2 only (GPL) or the Common Development and Distribution License(CDDL) (collectively, the License). You may not use this file except in compliance with the License. You can obtain a copy of the License at http://glassfish.java.net/public/CDDL+GPL_1_1.html or packager/legal/LICENSE.txt. See the License for the specific language governing permissions and limitations under the License. When distributing the software, include this License Header Notice in each file and include the License file at packager/legal/LICENSE.txt. GPL Classpath Exception: Oracle designates this particular file as subject to the Classpath exception as provided by Oracle in the GPL Version 2 section of the License file that accompanied this code. Modifications: If applicable, add the following below the License Header, with the fields enclosed by brackets [] replaced by your own identifying information: Portions Copyright [year] [name of copyright owner] Contributor(s): If you wish your version of this file to be governed by only the CDDL or only the GPL Version 2, indicate your decision by adding [Contributor] elects to include this software in this distribution under the [CDDL or GPL Version 2] license. If you don't indicate a single choice of license, a recipient has the option to distribute your version of this file under either the CDDL, the GPL Version 2 or to extend the choice of license to its licensees as provided above. However, if you add GPL Version 2 code and therefore, elected the GPL Version 2 license, then the option applies only if the new code is made subject to such option by the copyright holder. Copyright (c) 2010-2015, Michael Bostock All rights reserved. @@ -165,13 +185,15 @@ Copyright (c) 2010-2016, Michael Bostock All rights reserved. Copyright (c) 2010-2018, Michael Bostock All rights reserved. +Copyright (c) 2010-2020 Robert Kieffer and other contributors + Copyright (c) 2011 Alexander Shtuchkin Copyright (c) 2011 Einar Otto Stangvik <einaros@gmail.com> -Copyright (c) 2011 FuseSource Corp. All rights reserved. +Copyright (c) 2011 FuseSource Corp. All rights reserved. -Copyright (c) 2011 Oracle and/or its affiliates. All rights reserved. +Copyright (c) 2011 Oracle and/or its affiliates. All rights reserved. Copyright (c) 2011, Christopher Finke All rights reserved. @@ -185,7 +207,7 @@ Copyright (c) 2011-2018, Christopher Jeffrey (https://github.com/chjj/) Copyright (c) 2011-2021 Phillip Clark -Copyright (c) 2011-present, Facebook, Inc. Copyright (c) 2011 The LevelDB Authors. All rights reserved. +Copyright (c) 2011-present, Facebook, Inc. Copyright (c) 2011 The LevelDB Authors. All rights reserved. Copyright (c) 2012 Barnesandnoble.com, llc, Donavon West, and Domenic Denicola @@ -201,15 +223,15 @@ Copyright (c) 2012 Nicholas Fisher Copyright (c) 2012 Raynos. -Copyright (c) 2012, 2018 Oracle and/or its affiliates. All rights reserved. This program and the accompanying materials are made available under the terms of the Eclipse Public License v. 2.0, which is available at http://www.eclipse.org/legal/epl-2.0. This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse Public License v. 2.0 are satisfied: GNU General Public License, version 2 with the GNU Classpath Exception, which is available at https://www.gnu.org/software/classpath/license.html. SPDX-License-Identifier: EPL-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 +Copyright (c) 2012, 2018 Oracle and/or its affiliates. All rights reserved. This program and the accompanying materials are made available under the terms of the Eclipse Public License v. 2.0, which is available at http://www.eclipse.org/legal/epl-2.0. This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse Public License v. 2.0 are satisfied: GNU General Public License, version 2 with the GNU Classpath Exception, which is available at https://www.gnu.org/software/classpath/license.html. SPDX-License-Identifier: EPL-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 -Copyright (c) 2012, 2019 Oracle and/or its affiliates. All rights reserved. This program and the accompanying materials are made available under the terms of the Eclipse Public License v. 2.0, which is available at http://www.eclipse.org/legal/epl-2.0. This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse Public License v. 2.0 are satisfied: GNU General Public License, version 2 with the GNU Classpath Exception, which is available at https://www.gnu.org/software/classpath/license.html. SPDX-License-Identifier: EPL-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 +Copyright (c) 2012, 2019 Oracle and/or its affiliates. All rights reserved. This program and the accompanying materials are made available under the terms of the Eclipse Public License v. 2.0, which is available at http://www.eclipse.org/legal/epl-2.0. This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse Public License v. 2.0 are satisfied: GNU General Public License, version 2 with the GNU Classpath Exception, which is available at https://www.gnu.org/software/classpath/license.html. SPDX-License-Identifier: EPL-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 Copyright (c) 2012-2014 Andris Reinman Copyright (c) 2012-2014 Chris Pettitt -Copyright (c) 2012-2017 The ANTLR Project. All rights reserved. +Copyright (c) 2012-2017 The ANTLR Project. All rights reserved. Copyright (c) 2012-2018 Aseem Kishore, and [others]. @@ -259,7 +281,7 @@ Copyright (c) 2014 Petka Antonov 2015 Sindre Sorhus Copyright (c) 2014, 2015, 2016, 2017, 2018 Simon Lydell -Copyright (c) 2014, Chris Lohfink All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the {organization} nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +Copyright (c) 2014, Chris Lohfink All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the {organization} nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Copyright (c) 2014-2016 GitHub, Inc. @@ -353,6 +375,8 @@ Copyright (c) 2015-2017 Rubén Norte Copyright (c) 2015-2018 Andrew Clark +Copyright (c) 2015-2018 Google, Inc., Netflix, Inc., Microsoft Corp. and contributors + Copyright (c) 2015-2018 Reselect Contributors Copyright (c) 2015-present Dan Abramov @@ -389,11 +413,13 @@ Copyright (c) 2016 Your Name. Copyright (c) 2016 Zeit, Inc. +Copyright (c) 2016, 2017, Mike Lischke + Copyright (c) 2016, Claudéric Demers Copyright (c) 2016, Schrodinger, Inc. All rights reserved. -Copyright (c) 2016, gRPC EcosystemAll rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of grpc-opentracing nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +Copyright (c) 2016, gRPC EcosystemAll rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of grpc-opentracing nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Copyright (c) 2016-2019 Ryan Carniato @@ -425,7 +451,9 @@ Copyright (c) 2017 Titus Wormer Copyright (c) 2017 Tony Quetano -Copyright (c) 2017, 2018 Oracle and/or its affiliates. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of the Eclipse Foundation, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +Copyright (c) 2017 sudodoki + +Copyright (c) 2017, 2018 Oracle and/or its affiliates. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of the Eclipse Foundation, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Copyright (c) 2017, Baidu Inc. All rights reserved. @@ -445,7 +473,7 @@ Copyright (c) 2018 Jordan Harband Copyright (c) 2018 Nikita Skovoroda -Copyright (c) 2018 Oracle and/or its affiliates. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of the Eclipse Foundation, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +Copyright (c) 2018 Oracle and/or its affiliates. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of the Eclipse Foundation, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Copyright (c) 2018 React Popper authors @@ -459,6 +487,8 @@ Copyright (c) 2018, React Community Forked from React (https://github.com/facebo Copyright (c) 2018-present, iamkun +Copyright (c) 2018–present Artem Zakharchenko + Copyright (c) 2019 Ademola Adegbuyi Copyright (c) 2019 Alexander Reardon @@ -473,9 +503,9 @@ Copyright (c) 2019 FormatJS Copyright (c) 2019 Jordan Harband -Copyright (c) 2019, James Clarke +Copyright (c) 2019 Sentry (https://sentry.io) and individual contributors. All rights reserved. -Copyright (c) 2019, Sentry All rights reserved. +Copyright (c) 2019, James Clarke Copyright (c) 2019-present Beier(Bill) Luo @@ -503,6 +533,8 @@ Copyright (c) 2021, Oath Inc. Copyright (c) 2022 Inspect JS +Copyright (c) 2022 Sentry (https://sentry.io) and individual contributors. All rights reserved. + Copyright (c) 2022 WorkOS Copyright (c) Microsoft Corporation. @@ -511,15 +543,13 @@ Copyright (c) React Training 2015-2019 Copyright (c) Remix Software 2020-2022 Copyright 2000-2016 JetBrains s.r.o. -Copyright 2003-2005, Ernst de Haan All rights reserved. - -Copyright 2006 SUN MICROSYSTEMS, INC +Copyright 2003-2005, Ernst de Haan All rights reserved. Copyright 2008 Google Inc. -Copyright 2008 Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Code generated by the Protocol Buffer compiler is owned by the owner of the input file used when generating it. This code is not standalone and requires a support library to be linked with it. This support library is itself covered by the above license. +Copyright 2008 Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Code generated by the Protocol Buffer compiler is owned by the owner of the input file used when generating it. This code is not standalone and requires a support library to be linked with it. This support library is itself covered by the above license. -Copyright 2008, Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +Copyright 2008, Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Copyright 2008-2011 Google Inc. @@ -557,23 +587,27 @@ Copyright 2014 Google Inc. Copyright 2014 The gRPC Authors -Copyright 2014, Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +Copyright 2014 Yahoo Inc. All rights reserved. + +Copyright 2014, Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Copyright 2014-2018 Chronicle Software Ltd -Copyright 2015, Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +Copyright 2014-2023 Real Logic Limited. Licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License. You may obtain a copy of the License at https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + +Copyright 2015, Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Copyright 2015-2019 The Jaeger Project Authors Copyright 2015-2020 Ping Identity Corporation -Copyright 2016 The Android Open Source Project Licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ----------------------------------------------------------------------- This product contains a modified portion of `Netty`, a configurable network stack in Java, which can be obtained at: * LICENSE: * licenses/LICENSE.netty.txt (Apache License 2.0) * HOMEPAGE: * http://netty.io/ This product contains a modified portion of `Apache Harmony`, modular Java runtime, which can be obtained at: * LICENSE: * licenses/LICENSE.harmony.txt (Apache License 2.0) * HOMEPAGE: * https://harmony.apache.org/ +Copyright 2016 The Android Open Source Project Licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ----------------------------------------------------------------------- This product contains a modified portion of `Netty`, a configurable network stack in Java, which can be obtained at: * LICENSE: * licenses/LICENSE.netty.txt (Apache License 2.0) * HOMEPAGE: * http://netty.io/ This product contains a modified portion of `Apache Harmony`, modular Java runtime, which can be obtained at: * LICENSE: * licenses/LICENSE.harmony.txt (Apache License 2.0) * HOMEPAGE: * https://harmony.apache.org/ Copyright 2016 Vladimir Bukhtoyarov. -Copyright 2016, Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +Copyright 2016, Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -Copyright 2016, Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +Copyright 2016, Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Copyright 2016-2019 The OpenTracing Authors @@ -589,7 +623,7 @@ Copyright 2018 Kilian Valkhof Copyright 2018 Logan Smyth -Copyright 2019 Carl Mastrangelo Licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ----------------------------------------------------------------------- This product contains a modified portion of 'Catapult', an open source Trace Event viewer for Chome, Linux, and Android applications, which can be obtained at: * LICENSE: * traceviewer/src/main/resources/io/perfmark/traceviewer/third_party/catapult/LICENSE (New BSD License) * HOMEPAGE: * https://github.com/catapult-project/catapult +Copyright 2019 Carl Mastrangelo Licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ----------------------------------------------------------------------- This product contains a modified portion of 'Catapult', an open source Trace Event viewer for Chome, Linux, and Android applications, which can be obtained at: * LICENSE: * traceviewer/src/main/resources/io/perfmark/traceviewer/third_party/catapult/LICENSE (New BSD License) * HOMEPAGE: * https://github.com/catapult-project/catapult Copyright 2019 Eric Mantooth @@ -625,7 +659,9 @@ Copyright Joyent, Inc. and other Node contributors. All rights reserved. Permiss Copyright OpenJS Foundation and other contributors -Copyright The OpenTelemetry Authors SPDX-License-Identifier: Apache-2.0 +Copyright OpenSearch Contributors Copyright 2010-2014 Amazon.com,Inc. or its affiliates. All Rights Reserved. This product includes software developed by Amazon Technologies,Inc (http://www.amazon.com/). ********************** THIRD PARTY COMPONENTS ********************** This software includes third party software subject to the following copyrights: - XML parsing and utility functions from JetS3t - Copyright 2006-2009 James Murty. - PKCS#1 PEM encoded private key parsing and utility functions from oauth.googlecode.com - Copyright 1998-2010 AOL Inc. The licenses for these third party components are included in LICENSE.txt + +Copyright The OpenTelemetry Authors SPDX-License-Identifier: Apache-2.0 Copyright jQuery Foundation and other contributors @@ -651,35 +687,43 @@ Copyright ©2009-2011 FasterXML, LLC. All rights reserved unless otherwise indic Copyright ©2010-2019 Oracle Corporation. All Rights Reserved. -Copyright(c) 2018 Microsoft Corporation All rights reserved. +Copyright(c) 2018 Microsoft Corporation All rights reserved. + +Dremio Copyright 2015-2017 Dremio Corporation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). -------------------------------------------------------------------------------- This project includes code from Iceberg, developed at Netflix, Inc. with the following copyright notice: | Copyright 2017 Netflix, Inc. | | Licensed under the Apache License, Version 2.0 (the License); | you may not use this file except in compliance with the License. | You may obtain a copy of the License at | | http://www.apache.org/licenses/LICENSE-2.0 | | Unless required by applicable law or agreed to in writing, software | distributed under the License is distributed on an AS IS BASIS, | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | See the License for the specific language governing permissions and | limitations under the License. -Elasticsearch Copyright 2009-2017 Elasticsearch +Elasticsearch Copyright 2009-2017 Elasticsearch FreeMarker is Free software, licensed under the Apache License, Version 2.0. -JBoss, Home of Professional Open Source Copyright 2010, Red Hat, Inc., and individual contributors +JBoss, Home of Professional Open Source Copyright 2010, Red Hat, Inc., and individual contributors -Jetty Web Container Copyright 1995-2017 Mort Bay Consulting Pty Ltd. +Jetty Web Container Copyright 1995-2017 Mort Bay Consulting Pty Ltd. -Jetty Web Container Copyright 1995-2017 Mort Bay Consulting Pty Ltd. +Jetty Web Container Copyright 1995-2017 Mort Bay Consulting Pty Ltd. -LICENSE ISSUES ============== The OpenSSL toolkit stays under a double license, i.e. both the conditions of the OpenSSL License and the original SSLeay license apply to the toolkit. See below for the actual license texts. OpenSSL License --------------- /* ==================================================================== * Copyright (c) 1998-2019 The OpenSSL Project. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. All advertising materials mentioning features or use of this * software must display the following acknowledgment: * This product includes software developed by the OpenSSL Project * for use in the OpenSSL Toolkit. (http://www.openssl.org/) * * 4. The names OpenSSL Toolkit and OpenSSL Project must not be used to * endorse or promote products derived from this software without * prior written permission. For written permission, please contact * openssl-core@openssl.org. * * 5. Products derived from this software may not be called OpenSSL * nor may OpenSSL appear in their names without prior written * permission of the OpenSSL Project. * * 6. Redistributions of any form whatsoever must retain the following * acknowledgment: * This product includes software developed by the OpenSSL Project * for use in the OpenSSL Toolkit (http://www.openssl.org/) * * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. * ==================================================================== * * This product includes cryptographic software written by Eric Young * (eay@cryptsoft.com). This product includes software written by Tim * Hudson (tjh@cryptsoft.com). * */ Original SSLeay License ----------------------- /* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) * All rights reserved. * * This package is an SSL implementation written * by Eric Young (eay@cryptsoft.com). * The implementation was written so as to conform with Netscapes SSL. * * This library is free for commercial and non-commercial use as long as * the following conditions are aheared to. The following conditions * apply to all code found in this distribution, be it the RC4, RSA, * lhash, DES, etc., code; not just the SSL code. The SSL documentation * included with this distribution is covered by the same copyright terms * except that the holder is Tim Hudson (tjh@cryptsoft.com). * * Copyright remains Eric Young's, and as such any Copyright notices in * the code are not to be removed. * If this package is used in a product, Eric Young should be given attribution * as the author of the parts of the library used. * This can be in the form of a textual message at program startup or * in documentation (online or textual) provided with the package. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. All advertising materials mentioning features or use of this software * must display the following acknowledgement: * This product includes cryptographic software written by * Eric Young (eay@cryptsoft.com) * The word 'cryptographic' can be left out if the rouines from the library * being used are not cryptographic related :-). * 4. If you include any Windows specific code (or a derivative thereof) from * the apps directory (application code) you must include an acknowledgement: * This product includes software written by Tim Hudson (tjh@cryptsoft.com) * * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * * The licence and distribution terms for any publically available version or * derivative of this code cannot be changed. i.e. this code cannot simply be * copied and put under another distribution licence * [including the GNU Public Licence.] */ +LICENSE INFORMATION The Programs listed below are licensed under the following License Information terms and conditions in addition to the Program license terms previously agreed to by Client and IBM. If Client does not have previously agreed to license terms in effect for the Program, the International Program License Agreement (i125-3301-15) applies. Program Name (Program Number): IBM Data Server Driver for JDBC and SQLJ v4.32 (11.5.8) (Tool) The following standard terms apply to Licensee's use of the Program. Modifiable Third Party Code To the extent, if any, in the NOTICES file IBM identifies third party code as Modifiable Third Party Code, IBM authorizes Licensee to 1) modify the Modifiable Third Party Code and 2) reverse engineer the Program modules that directly interface with the Modifiable Third Party Code provided that it is only for the purpose of debugging Licensee's modifications to such third party code. IBM's service and support obligations, if any, apply only to the unmodified Program. Redistributables The Program includes components that are Redistributable and they are listed below. Redistributables may be distributed, in object-code form, only as part of Licensee's value-added application that was developed using the Program (Licensee's Application) and only to support use of Licensee's Application. If the Redistributables include a Java Runtime Environment, Licensee must also include other non-Java Redistributables with Licensee's Application. Licensee may not remove any copyright or notice files contained in the Redistributables or use IBM's, it's suppliers' or distributors' names or trademarks in connection with the marketing of Licensee's Application without IBM's or that supplier's or distributor's prior written consent. Licensee's license agreement with the end user must be at least as protective of IBM as the terms of this Agreement. IBM, its suppliers and distributors provide the Redistributables and related documentation without obligation of support and AS IS, WITH NO WARRANTY OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING THE WARRANTY OF TITLE, NON-INFRINGEMENT OR NON-INTERFERENCE AND THE IMPLIED WARRANTIES AND CONDITIONS OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. The following are Redistributables: IBM Data Server Driver for JDBC and SQLJ V4.32 Redistributables: The following list includes files that are provided to Licensee pursuant to the Redistributables section of the IBM International Program License Agreements License Information that applies to this Program: db2jcc4.jar sqlj4.zip L/N: L-KHAI-CASRX7 D/N: L-KHAI-CASRX7 P/N: L-KHAI-CASRX7 + +LICENSE ISSUES ============== The OpenSSL toolkit stays under a double license, i.e. both the conditions of the OpenSSL License and the original SSLeay license apply to the toolkit. See below for the actual license texts. OpenSSL License --------------- /* ==================================================================== * Copyright (c) 1998-2019 The OpenSSL Project. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. All advertising materials mentioning features or use of this * software must display the following acknowledgment: * This product includes software developed by the OpenSSL Project * for use in the OpenSSL Toolkit. (http://www.openssl.org/) * * 4. The names OpenSSL Toolkit and OpenSSL Project must not be used to * endorse or promote products derived from this software without * prior written permission. For written permission, please contact * openssl-core@openssl.org. * * 5. Products derived from this software may not be called OpenSSL * nor may OpenSSL appear in their names without prior written * permission of the OpenSSL Project. * * 6. Redistributions of any form whatsoever must retain the following * acknowledgment: * This product includes software developed by the OpenSSL Project * for use in the OpenSSL Toolkit (http://www.openssl.org/) * * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. * ==================================================================== * * This product includes cryptographic software written by Eric Young * (eay@cryptsoft.com). This product includes software written by Tim * Hudson (tjh@cryptsoft.com). * */ Original SSLeay License ----------------------- /* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) * All rights reserved. * * This package is an SSL implementation written * by Eric Young (eay@cryptsoft.com). * The implementation was written so as to conform with Netscapes SSL. * * This library is free for commercial and non-commercial use as long as * the following conditions are aheared to. The following conditions * apply to all code found in this distribution, be it the RC4, RSA, * lhash, DES, etc., code; not just the SSL code. The SSL documentation * included with this distribution is covered by the same copyright terms * except that the holder is Tim Hudson (tjh@cryptsoft.com). * * Copyright remains Eric Young's, and as such any Copyright notices in * the code are not to be removed. * If this package is used in a product, Eric Young should be given attribution * as the author of the parts of the library used. * This can be in the form of a textual message at program startup or * in documentation (online or textual) provided with the package. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. All advertising materials mentioning features or use of this software * must display the following acknowledgement: * This product includes cryptographic software written by * Eric Young (eay@cryptsoft.com) * The word 'cryptographic' can be left out if the rouines from the library * being used are not cryptographic related :-). * 4. If you include any Windows specific code (or a derivative thereof) from * the apps directory (application code) you must include an acknowledgement: * This product includes software written by Tim Hudson (tjh@cryptsoft.com) * * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * * The licence and distribution terms for any publically available version or * derivative of this code cannot be changed. i.e. this code cannot simply be * copied and put under another distribution licence * [including the GNU Public Licence.] */ Logback: the reliable, generic, fast and flexible logging framework. Copyright (C) 1999-2017, QOS.ch. All rights reserved. -MIT License Copyright (c) Microsoft Corporation Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +MIT License Copyright (c) Microsoft Corporation Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +Micrometer Copyright (c) 2017-Present VMware, Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License. You may obtain a copy of the License at https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------------------- This product contains a modified portion of 'io.netty.util.internal.logging', in the Netty/Common library distributed by The Netty Project: * Copyright 2013 The Netty Project * License: Apache License v2.0 * Homepage: https://netty.io This product contains a modified portion of 'StringUtils.isBlank()', in the Commons Lang library distributed by The Apache Software Foundation: * Copyright 2001-2019 The Apache Software Foundation * License: Apache License v2.0 * Homepage: https://commons.apache.org/proper/commons-lang/ This product contains a modified portion of 'JsonUtf8Writer', in the Moshi library distributed by Square, Inc: * Copyright 2010 Google Inc. * License: Apache License v2.0 * Homepage: https://github.com/square/moshi This product contains a modified portion of the 'org.springframework.lang' package in the Spring Framework library, distributed by VMware, Inc: * Copyright 2002-2019 the original author or authors. * License: Apache License v2.0 * Homepage: https://spring.io/projects/spring-framework -Nimbus JOSE + JWT Copyright 2012 - 2020, Connect2id Ltd. +Nimbus JOSE + JWT Copyright 2012 - 2020, Connect2id Ltd. -Nimbus Language Tags Copyright 2012-2016, Connect2id Ltd. +Nimbus Language Tags Copyright 2012-2016, Connect2id Ltd. -Nimbus OAuth 2.0 SDK with OpenID Connect extensions Copyright (c) Connect2id Ltd., 2012 - 2019 +Nimbus OAuth 2.0 SDK with OpenID Connect extensions Copyright (c) Connect2id Ltd., 2012 - 2019 -Prometheus instrumentation library for JVM applications Copyright 2012-2015 The Prometheus Authors This product includes software developed at Boxever Ltd. (http://www.boxever.com/). This product includes software developed at SoundCloud Ltd. (http://soundcloud.com/). This product includes software developed as part of the Ocelli project by Netflix Inc. (https://github.com/Netflix/ocelli/). +Notices for Eclipse Project for Interceptors This content is produced and maintained by the Eclipse Project for Interceptors project. Project home: https://projects.eclipse.org/projects/ee4j.interceptors Trademarks Eclipse Project for Interceptors is a trademark of the Eclipse Foundation. Copyright All content is the property of the respective authors or their employers. For more information regarding authorship of content, please consult the listed source code repository logs. Declared Project Licenses This program and the accompanying materials are made available under the terms of the Eclipse Public License v. 2.0 which is available at http://www.eclipse.org/legal/epl-2.0. This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse Public License v. 2.0 are satisfied: GNU General Public License, version 2 with the GNU Classpath Exception which is available at https://www.gnu.org/software/classpath/license.html. SPDX-License-Identifier: EPL-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 Source Code The project maintains the following source code repositories: https://github.com/eclipse-ee4j/interceptor-api Third-party Content Cryptography Content may contain encryption software. The country in which you are currently may have restrictions on the import, possession, and use, and/or re-export to another country, of encryption software. BEFORE using any encryption software, please check the country's laws, regulations and policies concerning the import, possession, or use, and re-export of encryption software, to see if this is permitted. -The Checker Framework Copyright 2004-present by the Checker Framework developers Most of the Checker Framework is licensed under the GNU General Public License, version 2 (GPL2), with the classpath exception. The text of this license appears below. This is the same license used for OpenJDK. A few parts of the Checker Framework have more permissive licenses. * The annotations are licensed under the MIT License. (The text of this license appears below.) More specifically, all the parts of the Checker Framework that you might want to include with your own program use the MIT License. This is the checker-qual.jar file and all the files that appear in it: every file in a qual/ directory, plus utility files such as NullnessUtil.java, RegexUtil.java, SignednessUtil.java, etc. In addition, the cleanroom implementations of third-party annotations, which the Checker Framework recognizes as aliases for its own annotations, are licensed under the MIT License. Some external libraries that are included with the Checker Framework have different licenses. * javaparser is dual licensed under the LGPL or the Apache license -- you may use it under whichever one you want. (The javaparser source code contains a file with the text of the GPL, but it is not clear why, since javaparser does not use the GPL.) See file stubparser/LICENSE and the source code of all its files. * JUnit is licensed under the Common Public License v1.0 (see http://www.junit.org/license), with parts (Hamcrest) licensed under the BSD License (see http://hamcrest.org/JavaHamcrest/). * Libraries in plume-lib (https://github.com/plume-lib/) are licensed under the MIT License. The Checker Framework includes annotations for the JDK in directory checker/jdk/, and for some other libraries. Each annotated library uses the same license as the unannotated version of the library. +Prometheus instrumentation library for JVM applications Copyright 2012-2015 The Prometheus Authors This product includes software developed at Boxever Ltd. (http://www.boxever.com/). This product includes software developed at SoundCloud Ltd. (http://soundcloud.com/). This product includes software developed as part of the Ocelli project by Netflix Inc. (https://github.com/Netflix/ocelli/). -The MIT License Copyright (c) 2009 codehaus.org. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +The Checker Framework Copyright 2004-present by the Checker Framework developers Most of the Checker Framework is licensed under the GNU General Public License, version 2 (GPL2), with the classpath exception. The text of this license appears below. This is the same license used for OpenJDK. A few parts of the Checker Framework have more permissive licenses. * The annotations are licensed under the MIT License. (The text of this license appears below.) More specifically, all the parts of the Checker Framework that you might want to include with your own program use the MIT License. This is the checker-qual.jar file and all the files that appear in it: every file in a qual/ directory, plus utility files such as NullnessUtil.java, RegexUtil.java, SignednessUtil.java, etc. In addition, the cleanroom implementations of third-party annotations, which the Checker Framework recognizes as aliases for its own annotations, are licensed under the MIT License. Some external libraries that are included with the Checker Framework have different licenses. * javaparser is dual licensed under the LGPL or the Apache license -- you may use it under whichever one you want. (The javaparser source code contains a file with the text of the GPL, but it is not clear why, since javaparser does not use the GPL.) See file stubparser/LICENSE and the source code of all its files. * JUnit is licensed under the Common Public License v1.0 (see http://www.junit.org/license), with parts (Hamcrest) licensed under the BSD License (see http://hamcrest.org/JavaHamcrest/). * Libraries in plume-lib (https://github.com/plume-lib/) are licensed under the MIT License. The Checker Framework includes annotations for the JDK in directory checker/jdk/, and for some other libraries. Each annotated library uses the same license as the unannotated version of the library. + +The MIT License Copyright (c) 2009 codehaus.org. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. The MIT License (MIT) Copyright (c) 2012-2014 Raynos. @@ -689,26 +733,36 @@ The MIT License (MIT) Copyright (c) 2015-2020 Adphorus Copyright (c) 2020- H The MIT License (MIT) Copyright (c) 2017 Kent C. Dodds -The Xerces-J 2.12.0 release is available in source code and precompiled binary (JAR files) form. Both Xerces-J packages are made available under the Apache Software License. +The MIT License (MIT) Copyright (c) 2017 PayPal + +The MongoDB Java Driver uses third-party libraries or other resources that may be distributed under licenses different than the MongoDB Java Driver software. In the event that we accidentally failed to list a required notice, please bring it to our attention through any of the ways detailed here: https://jira.mongodb.org/browse/JAVA The attached notices are provided for information only. For any licenses that require disclosure of source, sources are available at https://github.com/mongodb/mongo-java-driver. 1) The following files: Immutable.java, NotThreadSafe.java, ThreadSafe.java Copyright (c) 2005 Brian Goetz and Tim Peierls Released under the Creative Commons Attribution License (http://creativecommons.org/licenses/by/2.5) Official home: http://www.jcip.net Any republication or derived work distributed in source code form must include this copyright and license notice. 2) The following files: Assertions.java, AbstractCopyOnWriteMap.java, CopyOnWriteMap.java Copyright (c) 2008-2014 Atlassian Pty Ltd Licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. 3) The following files: Beta.java, UnsignedLongs.java, UnsignedLongsTest.java Copyright 2010 The Guava Authors Copyright 2011 The Guava Authors Licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. 4) The following files: ReadTimeoutHandler.java Copyright 2008-present MongoDB, Inc. Copyright 2012 The Netty Project Licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. 5) The following files: InstantCodec.java, Jsr310CodecProvider.java, LocalDateCodec.java, LocalDateTimeCodec.java, LocalTimeCodec.java Copyright 2008-present MongoDB, Inc. Copyright 2018 Cezary Bartosiak Licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. 6) The following files: SaslPrep.java Copyright 2008-present MongoDB, Inc. Copyright 2017 Tom Bentley Licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. 7) The following files (originally from https://github.com/marianobarrios/tls-channel): AsynchronousTlsChannel.java AsynchronousTlsChannelGroup.java BufferAllocator.java BufferHolder.java ByteBufferSet.java ByteBufferUtil.java ClientTlsChannel.java DirectBufferAllocator.java DirectBufferDeallocator.java ExtendedAsynchronousByteChannel.java HeapBufferAllocator.java NeedsReadException.java NeedsTaskException.java NeedsWriteException.java ServerTlsChannel.java SniSslContextFactory.java TlsChannel.java TlsChannelBuilder.java TlsChannelCallbackException.java TlsChannelFlowControlException.java TlsChannelImpl.java TlsExplorer.java TrackingAllocator.java Util.java WouldBlockException.java Copyright (c) [2015-2020] all contributors MIT License Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -The code for the t-digest was originally authored by Ted Dunning A number of small but very helpful changes have been contributed by Adrien Grand (https://github.com/jpountz) +The Xerces-J 2.12.0 release is available in source code and precompiled binary (JAR files) form. Both Xerces-J packages are made available under the Apache Software License. -The code in this repository code was Written by Gil Tene, Michael Barker, and Matt Warren, and released to the public domain, as explained at http://creativecommons.org/publicdomain/zero/1.0/ For users of this code who wish to consume it under the BSD license rather than under the public domain or CC0 contribution text mentioned above, the code found under this directory is *also* provided under the following license (commonly referred to as the BSD 2-Clause License). This license does not detract from the above stated release of the code into the public domain, and simply represents an additional license granted by the Author. ----------------------------------------------------------------------------- ** Beginning of BSD 2-Clause License text. ** Copyright (c) 2012, 2013, 2014 Gil Tene Copyright (c) 2014 Michael Barker Copyright (c) 2014 Matt Warren All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +The code for the t-digest was originally authored by Ted Dunning A number of small but very helpful changes have been contributed by Adrien Grand (https://github.com/jpountz) -The code released under the CDDL shall be governed by the laws of the State of California (excluding conflict-of-law provisions). Any litigation relating to this License shall be subject to the jurisdiction of the Federal Courts of the Northern District of California and the state courts of the State of California, with venue lying in Santa Clara County, California. +The code in this repository code was Written by Gil Tene, Michael Barker, and Matt Warren, and released to the public domain, as explained at http://creativecommons.org/publicdomain/zero/1.0/ For users of this code who wish to consume it under the BSD license rather than under the public domain or CC0 contribution text mentioned above, the code found under this directory is *also* provided under the following license (commonly referred to as the BSD 2-Clause License). This license does not detract from the above stated release of the code into the public domain, and simply represents an additional license granted by the Author. ----------------------------------------------------------------------------- ** Beginning of BSD 2-Clause License text. ** Copyright (c) 2012, 2013, 2014 Gil Tene Copyright (c) 2014 Michael Barker Copyright (c) 2014 Matt Warren All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -This product includes software developed by Google Snappy: http://code.google.com/p/snappy/ (New BSD License) This product includes software developed by Apache PureJavaCrc32C from apache-hadoop-common http://hadoop.apache.org/ (Apache 2.0 license) This library containd statically linked libstdc++. This inclusion is allowed by GCC RUntime Library Exception http://gcc.gnu.org/onlinedocs/libstdc++/manual/license.html == Contributors == * Tatu Saloranta * Providing benchmark suite * Alec Wysoker * Performance and memory usage improvement +The code released under the CDDL shall be governed by the laws of the State of California (excluding conflict-of-law provisions). Any litigation relating to this License shall be subject to the jurisdiction of the Federal Courts of the Northern District of California and the state courts of the State of California, with venue lying in Santa Clara County, California. + +This code is licensed under Apache License, Version 2.0 (AL2.0). + +This product includes software developed by Google Snappy: http://code.google.com/p/snappy/ (New BSD License) This product includes software developed by Apache PureJavaCrc32C from apache-hadoop-common http://hadoop.apache.org/ (Apache 2.0 license) This library containd statically linked libstdc++. This inclusion is allowed by GCC RUntime Library Exception http://gcc.gnu.org/onlinedocs/libstdc++/manual/license.html == Contributors == * Tatu Saloranta * Providing benchmark suite * Alec Wysoker * Performance and memory usage improvement This product includes software developed by Joda.org (http://www.joda.org/). +This product includes software developed by The Apache Software Foundation (http://www.apache.org/). In addition, this product includes software dependencies. See the accompanying LICENSE.txt for a listing of dependencies that are NOT Apache licensed (with pointers to their licensing) Apache HTrace includes an Apache Thrift connector to Zipkin. Zipkin is a distributed tracing system that is Apache 2.0 Licensed. Copyright 2012 Twitter, Inc. + This product includes software developed by The Apache Software Foundation (http://www.apache.org/). -This product includes software developed by The Apache Software Foundation (http://www.apache.org/). In addition, this product includes software dependencies. See the accompanying LICENSE.txt for a listing of dependencies that are NOT Apache licensed (with pointers to their licensing) Apache HTrace includes an Apache Thrift connector to Zipkin. Zipkin is a distributed tracing system that is Apache 2.0 Licensed. Copyright 2012 Twitter, Inc. +This product includes software developed by The Apache Software Foundation (http://www.apache.org/). Toposort - Topological sorting for node.js Copyright (c) 2012 by Marcel Klehr MIT LICENSE Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -foodmart-data-json Foodmart data set in JSON format Copyright (C) 2013-2013 Pentaho Copyright (C) 2015-2015 Julian Hyde All Rights Reserved. Based upon the Pentaho mondrian-data-foodmart-json artifact developed as part of the Pentaho Mondrian OLAP engine (http://mondrian.pentaho.com). +[The BSD license] Copyright (c) 2016 The ANTLR Project All rights reserved. + +foodmart-data-json Foodmart data set in JSON format Copyright (C) 2013-2013 Pentaho Copyright (C) 2015-2015 Julian Hyde All Rights Reserved. Based upon the Pentaho mondrian-data-foodmart-json artifact developed as part of the Pentaho Mondrian OLAP engine (http://mondrian.pentaho.com). -protostuff Copyright 2009 David Yu dyuproject@gmail.com +protostuff Copyright 2009 David Yu dyuproject@gmail.com -sulky-modules - several general-purpose modules. Copyright (C) 2007-2018 Joern Huxhorn \ No newline at end of file +sulky-modules - several general-purpose modules. Copyright (C) 2007-2018 Joern Huxhorn \ No newline at end of file diff --git a/distribution/resources/src/main/resources/bin/dremio b/distribution/resources/src/main/resources/bin/dremio index d63a78815c..0f9d18ada8 100755 --- a/distribution/resources/src/main/resources/bin/dremio +++ b/distribution/resources/src/main/resources/bin/dremio @@ -48,6 +48,7 @@ fi export DREMIO_IN_CONTAINER export DREMIO_LOG_TO_CONSOLE export DREMIO_ENV_SCRIPT="dremio-env" +export DREMIO_ADMIN_ONLY=false . "$bin"/dremio-config # get arguments @@ -120,6 +121,8 @@ dremio_internal_start() { DREMIO_JAVA_OPTS="$DREMIO_JAVA_OPTS $DREMIO_GC_OPTS" # User options DREMIO_JAVA_OPTS="$DREMIO_JAVA_OPTS $DREMIO_JAVA_SERVER_EXTRA_OPTS $DREMIO_JAVA_EXTRA_OPTS " + # Start in administrator only mode + DREMIO_JAVA_OPTS="$DREMIO_JAVA_OPTS -Ddremio.admin-only-mode=${DREMIO_ADMIN_ONLY}" # Add this option only if it is Java 8 if [ "$JAVA_MAJOR_VERSION" -eq 8 ]; then DREMIO_JAVA_OPTS="$DREMIO_JAVA_OPTS -XX:+PrintClassHistogramBeforeFullGC" @@ -223,7 +226,10 @@ fi DREMIO_NICENESS="${DREMIO_NICENESS:-0}" thiscmd=$0 + args=$@ +# args may be empty as they could be striped out. Reading them. +args=$([ "${DREMIO_ADMIN_ONLY}" == "true" ] && echo "--admin-only") mkdir -p "${DREMIO_LOG_DIR}" @@ -232,7 +238,7 @@ case $startStopStatus in (start) check_before_start echo Starting $command, logging to $logout - nohup $thiscmd internal_start $args < /dev/null >> ${logout} 2>&1 & + nohup $thiscmd $args internal_start < /dev/null >> ${logout} 2>&1 & sleep 1; echo Check the log file to make sure $command started successfully and did not run into any issues. ;; diff --git a/distribution/resources/src/main/resources/bin/dremio-config b/distribution/resources/src/main/resources/bin/dremio-config index 4d427af99e..754efb2aa2 100644 --- a/distribution/resources/src/main/resources/bin/dremio-config +++ b/distribution/resources/src/main/resources/bin/dremio-config @@ -58,6 +58,9 @@ while [ $# -gt 1 ]; do confdir=$1 shift DREMIO_CONF_DIR=$confdir + elif [ "--admin-only" = "$1" ]; then + DREMIO_ADMIN_ONLY=true + shift else # Presume we are at end of options and break break @@ -196,7 +199,7 @@ if [ ! -x "$JAVA" ]; then exit 1 fi # Ensure that Java version is at least 1.8 -JAVA_VERSION_STRING=`"$JAVA" -version 2>&1 | grep "version" | sed 's/.*"\(.*\)".*/\1/g'` +JAVA_VERSION_STRING=`"$JAVA" -version 2>&1 | sed -n -E 's/(java|openjdk) version "([0-9\\._]+)".*/\2/p'` JAVA_MAJOR_VERSION= if `echo $JAVA_VERSION_STRING | egrep -q -e "^1\.[0-9]{1,}\."`; then JAVA_MAJOR_VERSION=`echo $JAVA_VERSION_STRING | sed 's/^1\.\([0-9]\{1,\}\)\..*/\1/g'` @@ -237,7 +240,7 @@ then echo "Dremio requires at least 4 GB memory to run. ${DREMIO_MAX_MEMORY_SIZE_MB} MB is too low." exit 1 fi - ROLES=`$JAVA -cp "$DREMIO_CLASSPATH" -Ddremio.log.path=${DREMIO_LOG_DIR} com.dremio.dac.daemon.GetRolesCommand` + ROLES=`$JAVA -cp "$DREMIO_CLASSPATH" $DREMIO_JAVA_EXTRA_OPTS -Ddremio.log.path=${DREMIO_LOG_DIR} com.dremio.dac.daemon.GetRolesCommand` if ( echo $ROLES | grep -q "executor" ); then DEFAULT_MAX_HEAP_MEMORY_EXECUTOR_MB=2048 if [ 6144 -le $DREMIO_MAX_MEMORY_SIZE_MB ]; then @@ -350,6 +353,42 @@ case $startStopStatus in ;; esac +if [ "$JAVA_MAJOR_VERSION" -ge 9 ]; then + DREMIO_JAVA_OPTS="$DREMIO_JAVA_OPTS --add-opens=java.base/java.lang=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED --add-opens=java.base/java.util=ALL-UNNAMED" +fi + +# Force -XX:UseAVX=2 on Intel Skylake and Cascade Lake CPUs to improve +# performance. See https://bugs.openjdk.org/browse/JDK-8286823. +# +# Skylake and Cascade Lake CPUs can be detected by checking for family 6 and +# model=85 in /proc/cpuinfo. +# +# When family is equal to 6, the value of `model` in /proc/cpuinfo is the +# concatenation of the 'Extended Model' (4 leftmost bits) and the CPU 'Model' +# (4 rightmost bits). +# +# - Family=6 and model=85 denotes Cascade Lake and Skylake CPUs (Extended +# Model=0x5 and Model=0x5) +# - Family=6 and model=106 denotes Ice Lake (Extended Model=0x6 and Model=0xA) +# - ... +# +# Source: https://en.wikichip.org/wiki/intel/cpuid +# +# The code below adds -XX:UseAVX=2 to Dremio Java opts if at *least one* of the +# CPU sockets on the server is running the Skylake or Cascade Lake +# architecture. I.e. it also works on servers with multiple, heterogeneous +# CPUs. +CPU_VENDOR=$((grep vendor_id /proc/cpuinfo 2>/dev/null || echo '- - None') | sort -u | awk '{print $3}') +IS_CPU_FAMILY_6=$(grep -Eq '^cpu family\s*: 6$' /proc/cpuinfo 2>/dev/null && echo true || echo false) +IS_CPU_SKYLAKE_OR_CASCADELAKE=$(grep -Eq '^model\s*: 85$' /proc/cpuinfo 2>/dev/null && echo true || echo false) +if [ "$JAVA_MAJOR_VERSION" -ge 11 ] && \ + [ "$CPU_VENDOR" = "GenuineIntel" ] && \ + [ "$IS_CPU_FAMILY_6" = "true" ] && \ + [ "$IS_CPU_SKYLAKE_OR_CASCADELAKE" = "true" ] +then + DREMIO_JAVA_OPTS="$DREMIO_JAVA_OPTS -XX:UseAVX=2" +fi + # make sure allocator chunks are done as mmap'd memory (and reduce arena overhead) # Newer versions of glibc use an arena memory allocator that causes virtual # memory usage to explode. Tune the variable down to prevent vmem explosion. diff --git a/distribution/server/pom.xml b/distribution/server/pom.xml index 79be15866d..5a30dac470 100644 --- a/distribution/server/pom.xml +++ b/distribution/server/pom.xml @@ -22,7 +22,7 @@ com.dremio.distribution dremio-distribution-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-distribution @@ -36,8 +36,8 @@ com.dremio.plugins dremio - 780000000 - 760000000 + 820000000 + 800000000 @@ -569,8 +569,8 @@ exit 0 - 910000000 - 900000000 + 928000000 + 920000000 diff --git a/mvnw b/mvnw index b7f064624f..8d937f4c14 100755 --- a/mvnw +++ b/mvnw @@ -19,7 +19,7 @@ # ---------------------------------------------------------------------------- # ---------------------------------------------------------------------------- -# Apache Maven Wrapper startup batch script, version 3.1.1 +# Apache Maven Wrapper startup batch script, version 3.2.0 # # Required ENV vars: # ------------------ @@ -53,7 +53,7 @@ fi cygwin=false; darwin=false; mingw=false -case "`uname`" in +case "$(uname)" in CYGWIN*) cygwin=true ;; MINGW*) mingw=true;; Darwin*) darwin=true @@ -61,7 +61,7 @@ case "`uname`" in # See https://developer.apple.com/library/mac/qa/qa1170/_index.html if [ -z "$JAVA_HOME" ]; then if [ -x "/usr/libexec/java_home" ]; then - JAVA_HOME="`/usr/libexec/java_home`"; export JAVA_HOME + JAVA_HOME="$(/usr/libexec/java_home)"; export JAVA_HOME else JAVA_HOME="/Library/Java/Home"; export JAVA_HOME fi @@ -71,38 +71,38 @@ esac if [ -z "$JAVA_HOME" ] ; then if [ -r /etc/gentoo-release ] ; then - JAVA_HOME=`java-config --jre-home` + JAVA_HOME=$(java-config --jre-home) fi fi # For Cygwin, ensure paths are in UNIX format before anything is touched if $cygwin ; then [ -n "$JAVA_HOME" ] && - JAVA_HOME=`cygpath --unix "$JAVA_HOME"` + JAVA_HOME=$(cygpath --unix "$JAVA_HOME") [ -n "$CLASSPATH" ] && - CLASSPATH=`cygpath --path --unix "$CLASSPATH"` + CLASSPATH=$(cygpath --path --unix "$CLASSPATH") fi # For Mingw, ensure paths are in UNIX format before anything is touched if $mingw ; then - [ -n "$JAVA_HOME" ] && - JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`" + [ -n "$JAVA_HOME" ] && [ -d "$JAVA_HOME" ] && + JAVA_HOME="$(cd "$JAVA_HOME" || (echo "cannot cd into $JAVA_HOME."; exit 1); pwd)" fi if [ -z "$JAVA_HOME" ]; then - javaExecutable="`which javac`" - if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then + javaExecutable="$(which javac)" + if [ -n "$javaExecutable" ] && ! [ "$(expr "\"$javaExecutable\"" : '\([^ ]*\)')" = "no" ]; then # readlink(1) is not available as standard on Solaris 10. - readLink=`which readlink` - if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then + readLink=$(which readlink) + if [ ! "$(expr "$readLink" : '\([^ ]*\)')" = "no" ]; then if $darwin ; then - javaHome="`dirname \"$javaExecutable\"`" - javaExecutable="`cd \"$javaHome\" && pwd -P`/javac" + javaHome="$(dirname "\"$javaExecutable\"")" + javaExecutable="$(cd "\"$javaHome\"" && pwd -P)/javac" else - javaExecutable="`readlink -f \"$javaExecutable\"`" + javaExecutable="$(readlink -f "\"$javaExecutable\"")" fi - javaHome="`dirname \"$javaExecutable\"`" - javaHome=`expr "$javaHome" : '\(.*\)/bin'` + javaHome="$(dirname "\"$javaExecutable\"")" + javaHome=$(expr "$javaHome" : '\(.*\)/bin') JAVA_HOME="$javaHome" export JAVA_HOME fi @@ -118,7 +118,7 @@ if [ -z "$JAVACMD" ] ; then JAVACMD="$JAVA_HOME/bin/java" fi else - JAVACMD="`\\unset -f command; \\command -v java`" + JAVACMD="$(\unset -f command 2>/dev/null; \command -v java)" fi fi @@ -150,108 +150,99 @@ find_maven_basedir() { fi # workaround for JBEAP-8937 (on Solaris 10/Sparc) if [ -d "${wdir}" ]; then - wdir=`cd "$wdir/.."; pwd` + wdir=$(cd "$wdir/.." || exit 1; pwd) fi # end of workaround done - printf '%s' "$(cd "$basedir"; pwd)" + printf '%s' "$(cd "$basedir" || exit 1; pwd)" } # concatenates all lines of a file concat_lines() { if [ -f "$1" ]; then - echo "$(tr -s '\n' ' ' < "$1")" + # Remove \r in case we run on Windows within Git Bash + # and check out the repository with auto CRLF management + # enabled. Otherwise, we may read lines that are delimited with + # \r\n and produce $'-Xarg\r' rather than -Xarg due to word + # splitting rules. + tr -s '\r\n' ' ' < "$1" fi } -BASE_DIR=$(find_maven_basedir "$(dirname $0)") +log() { + if [ "$MVNW_VERBOSE" = true ]; then + printf '%s\n' "$1" + fi +} + +BASE_DIR=$(find_maven_basedir "$(dirname "$0")") if [ -z "$BASE_DIR" ]; then exit 1; fi MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"}; export MAVEN_PROJECTBASEDIR -if [ "$MVNW_VERBOSE" = true ]; then - echo $MAVEN_PROJECTBASEDIR -fi +log "$MAVEN_PROJECTBASEDIR" ########################################################################################## # Extension to allow automatically downloading the maven-wrapper.jar from Maven-central # This allows using the maven wrapper in projects that prohibit checking in binary data. ########################################################################################## -if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then - if [ "$MVNW_VERBOSE" = true ]; then - echo "Found .mvn/wrapper/maven-wrapper.jar" - fi +wrapperJarPath="$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" +if [ -r "$wrapperJarPath" ]; then + log "Found $wrapperJarPath" else - if [ "$MVNW_VERBOSE" = true ]; then - echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..." - fi + log "Couldn't find $wrapperJarPath, downloading it ..." + if [ -n "$MVNW_REPOURL" ]; then - wrapperUrl="$MVNW_REPOURL/org/apache/maven/wrapper/maven-wrapper/3.1.1/maven-wrapper-3.1.1.jar" + wrapperUrl="$MVNW_REPOURL/org/apache/maven/wrapper/maven-wrapper/3.2.0/maven-wrapper-3.2.0.jar" else - wrapperUrl="https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.1.1/maven-wrapper-3.1.1.jar" + wrapperUrl="https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.2.0/maven-wrapper-3.2.0.jar" fi - while IFS="=" read key value; do - case "$key" in (wrapperUrl) wrapperUrl="$value"; break ;; + while IFS="=" read -r key value; do + # Remove '\r' from value to allow usage on windows as IFS does not consider '\r' as a separator ( considers space, tab, new line ('\n'), and custom '=' ) + safeValue=$(echo "$value" | tr -d '\r') + case "$key" in (wrapperUrl) wrapperUrl="$safeValue"; break ;; esac - done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties" - if [ "$MVNW_VERBOSE" = true ]; then - echo "Downloading from: $wrapperUrl" - fi - wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" + done < "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.properties" + log "Downloading from: $wrapperUrl" + if $cygwin; then - wrapperJarPath=`cygpath --path --windows "$wrapperJarPath"` + wrapperJarPath=$(cygpath --path --windows "$wrapperJarPath") fi if command -v wget > /dev/null; then - QUIET="--quiet" - if [ "$MVNW_VERBOSE" = true ]; then - echo "Found wget ... using wget" - QUIET="" - fi + log "Found wget ... using wget" + [ "$MVNW_VERBOSE" = true ] && QUIET="" || QUIET="--quiet" if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then - wget $QUIET "$wrapperUrl" -O "$wrapperJarPath" + wget $QUIET "$wrapperUrl" -O "$wrapperJarPath" || rm -f "$wrapperJarPath" else - wget $QUIET --http-user="$MVNW_USERNAME" --http-password="$MVNW_PASSWORD" "$wrapperUrl" -O "$wrapperJarPath" + wget $QUIET --http-user="$MVNW_USERNAME" --http-password="$MVNW_PASSWORD" "$wrapperUrl" -O "$wrapperJarPath" || rm -f "$wrapperJarPath" fi - [ $? -eq 0 ] || rm -f "$wrapperJarPath" elif command -v curl > /dev/null; then - QUIET="--silent" - if [ "$MVNW_VERBOSE" = true ]; then - echo "Found curl ... using curl" - QUIET="" - fi + log "Found curl ... using curl" + [ "$MVNW_VERBOSE" = true ] && QUIET="" || QUIET="--silent" if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then - curl $QUIET -o "$wrapperJarPath" "$wrapperUrl" -f -L + curl $QUIET -o "$wrapperJarPath" "$wrapperUrl" -f -L || rm -f "$wrapperJarPath" else - curl $QUIET --user "$MVNW_USERNAME:$MVNW_PASSWORD" -o "$wrapperJarPath" "$wrapperUrl" -f -L + curl $QUIET --user "$MVNW_USERNAME:$MVNW_PASSWORD" -o "$wrapperJarPath" "$wrapperUrl" -f -L || rm -f "$wrapperJarPath" fi - [ $? -eq 0 ] || rm -f "$wrapperJarPath" else - if [ "$MVNW_VERBOSE" = true ]; then - echo "Falling back to using Java to download" - fi - javaSource="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java" - javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" + log "Falling back to using Java to download" + javaSource="$MAVEN_PROJECTBASEDIR/.mvn/wrapper/MavenWrapperDownloader.java" + javaClass="$MAVEN_PROJECTBASEDIR/.mvn/wrapper/MavenWrapperDownloader.class" # For Cygwin, switch paths to Windows format before running javac if $cygwin; then - javaSource=`cygpath --path --windows "$javaSource"` - javaClass=`cygpath --path --windows "$javaClass"` + javaSource=$(cygpath --path --windows "$javaSource") + javaClass=$(cygpath --path --windows "$javaClass") fi if [ -e "$javaSource" ]; then if [ ! -e "$javaClass" ]; then - if [ "$MVNW_VERBOSE" = true ]; then - echo " - Compiling MavenWrapperDownloader.java ..." - fi - # Compiling the Java class + log " - Compiling MavenWrapperDownloader.java ..." ("$JAVA_HOME/bin/javac" "$javaSource") fi if [ -e "$javaClass" ]; then - # Running the downloader - if [ "$MVNW_VERBOSE" = true ]; then - echo " - Running MavenWrapperDownloader.java ..." - fi - ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR") + log " - Running MavenWrapperDownloader.java ..." + ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$wrapperUrl" "$wrapperJarPath") || rm -f "$wrapperJarPath" fi fi fi @@ -260,25 +251,55 @@ fi # End of extension ########################################################################################## +# If specified, validate the SHA-256 sum of the Maven wrapper jar file +wrapperSha256Sum="" +while IFS="=" read -r key value; do + case "$key" in (wrapperSha256Sum) wrapperSha256Sum=$value; break ;; + esac +done < "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.properties" +if [ -n "$wrapperSha256Sum" ]; then + wrapperSha256Result=false + if command -v sha256sum > /dev/null; then + if echo "$wrapperSha256Sum $wrapperJarPath" | sha256sum -c > /dev/null 2>&1; then + wrapperSha256Result=true + fi + elif command -v shasum > /dev/null; then + if echo "$wrapperSha256Sum $wrapperJarPath" | shasum -a 256 -c > /dev/null 2>&1; then + wrapperSha256Result=true + fi + else + echo "Checksum validation was requested but neither 'sha256sum' or 'shasum' are available." + echo "Please install either command, or disable validation by removing 'wrapperSha256Sum' from your maven-wrapper.properties." + exit 1 + fi + if [ $wrapperSha256Result = false ]; then + echo "Error: Failed to validate Maven wrapper SHA-256, your Maven wrapper might be compromised." >&2 + echo "Investigate or delete $wrapperJarPath to attempt a clean download." >&2 + echo "If you updated your Maven version, you need to update the specified wrapperSha256Sum property." >&2 + exit 1 + fi +fi + MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" # For Cygwin, switch paths to Windows format before running java if $cygwin; then [ -n "$JAVA_HOME" ] && - JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"` + JAVA_HOME=$(cygpath --path --windows "$JAVA_HOME") [ -n "$CLASSPATH" ] && - CLASSPATH=`cygpath --path --windows "$CLASSPATH"` + CLASSPATH=$(cygpath --path --windows "$CLASSPATH") [ -n "$MAVEN_PROJECTBASEDIR" ] && - MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"` + MAVEN_PROJECTBASEDIR=$(cygpath --path --windows "$MAVEN_PROJECTBASEDIR") fi # Provide a "standardized" way to retrieve the CLI args that will # work with both Windows and non-Windows executions. -MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@" +MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $*" export MAVEN_CMD_LINE_ARGS WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain +# shellcheck disable=SC2086 # safe args exec "$JAVACMD" \ $MAVEN_OPTS \ $MAVEN_DEBUG_OPTS \ diff --git a/mvnw.cmd b/mvnw.cmd index cba1f040dc..f80fbad3e7 100644 --- a/mvnw.cmd +++ b/mvnw.cmd @@ -18,7 +18,7 @@ @REM ---------------------------------------------------------------------------- @REM ---------------------------------------------------------------------------- -@REM Apache Maven Wrapper startup batch script, version 3.1.1 +@REM Apache Maven Wrapper startup batch script, version 3.2.0 @REM @REM Required ENV vars: @REM JAVA_HOME - location of a JDK home dir @@ -119,7 +119,7 @@ SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar" set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain -set WRAPPER_URL="https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.1.1/maven-wrapper-3.1.1.jar" +set WRAPPER_URL="https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.2.0/maven-wrapper-3.2.0.jar" FOR /F "usebackq tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO ( IF "%%A"=="wrapperUrl" SET WRAPPER_URL=%%B @@ -133,7 +133,7 @@ if exist %WRAPPER_JAR% ( ) ) else ( if not "%MVNW_REPOURL%" == "" ( - SET WRAPPER_URL="%MVNW_REPOURL%/org/apache/maven/wrapper/maven-wrapper/3.1.1/maven-wrapper-3.1.1.jar" + SET WRAPPER_URL="%MVNW_REPOURL%/org/apache/maven/wrapper/maven-wrapper/3.2.0/maven-wrapper-3.2.0.jar" ) if "%MVNW_VERBOSE%" == "true" ( echo Couldn't find %WRAPPER_JAR%, downloading it ... @@ -153,6 +153,24 @@ if exist %WRAPPER_JAR% ( ) @REM End of extension +@REM If specified, validate the SHA-256 sum of the Maven wrapper jar file +SET WRAPPER_SHA_256_SUM="" +FOR /F "usebackq tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO ( + IF "%%A"=="wrapperSha256Sum" SET WRAPPER_SHA_256_SUM=%%B +) +IF NOT %WRAPPER_SHA_256_SUM%=="" ( + powershell -Command "&{"^ + "$hash = (Get-FileHash \"%WRAPPER_JAR%\" -Algorithm SHA256).Hash.ToLower();"^ + "If('%WRAPPER_SHA_256_SUM%' -ne $hash){"^ + " Write-Output 'Error: Failed to validate Maven wrapper SHA-256, your Maven wrapper might be compromised.';"^ + " Write-Output 'Investigate or delete %WRAPPER_JAR% to attempt a clean download.';"^ + " Write-Output 'If you updated your Maven version, you need to update the specified wrapperSha256Sum property.';"^ + " exit 1;"^ + "}"^ + "}" + if ERRORLEVEL 1 goto error +) + @REM Provide a "standardized" way to retrieve the CLI args that will @REM work with both Windows and non-Windows executions. set MAVEN_CMD_LINE_ARGS=%* diff --git a/plugins/adls/pom.xml b/plugins/adls/pom.xml index 3f3ab2b13b..79cf6d8dc3 100644 --- a/plugins/adls/pom.xml +++ b/plugins/adls/pom.xml @@ -25,7 +25,7 @@ dremio-plugin-parent com.dremio.plugins - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-adls-plugin diff --git a/plugins/adls/src/main/java/com/dremio/plugins/adl/store/AzureDataLakeStoragePlugin.java b/plugins/adls/src/main/java/com/dremio/plugins/adl/store/AzureDataLakeStoragePlugin.java index 69801db20f..0d08a4e1b4 100644 --- a/plugins/adls/src/main/java/com/dremio/plugins/adl/store/AzureDataLakeStoragePlugin.java +++ b/plugins/adls/src/main/java/com/dremio/plugins/adl/store/AzureDataLakeStoragePlugin.java @@ -102,6 +102,7 @@ protected boolean isAsyncEnabledForQuery(OperatorContext context) { return context != null && context.getOptions().getOption(AzureDataLakeOptions.ASYNC_READS); } + @Override public boolean supportReadSignature(DatasetMetadata metadata, boolean isFileDataset) { return false; } diff --git a/plugins/adls/src/main/java/com/dremio/plugins/adl/store/DremioAdlFileSystem.java b/plugins/adls/src/main/java/com/dremio/plugins/adl/store/DremioAdlFileSystem.java index 80c5e64960..8ba2d6f1eb 100644 --- a/plugins/adls/src/main/java/com/dremio/plugins/adl/store/DremioAdlFileSystem.java +++ b/plugins/adls/src/main/java/com/dremio/plugins/adl/store/DremioAdlFileSystem.java @@ -56,6 +56,7 @@ public boolean getAclBit() { return this.aclBit; } + @Override public boolean equals(Object obj) { if (!(obj instanceof FsPermission)) { return false; @@ -65,6 +66,7 @@ public boolean equals(Object obj) { } } + @Override public int hashCode() { return this.toShort(); } diff --git a/plugins/adls/src/main/resources/adl-layout.json b/plugins/adls/src/main/resources/adl-layout.json index c65a35c72e..4cd2e71150 100644 --- a/plugins/adls/src/main/resources/adl-layout.json +++ b/plugins/adls/src/main/resources/adl-layout.json @@ -4,7 +4,7 @@ "beta" ], "metadataRefresh": { - "datasetDiscovery": true, + "datasetDiscovery": false, "isFileSystemSource": true }, "form": { diff --git a/plugins/awsauth/pom.xml b/plugins/awsauth/pom.xml index d01688ea0b..8723f2dbf5 100644 --- a/plugins/awsauth/pom.xml +++ b/plugins/awsauth/pom.xml @@ -26,7 +26,7 @@ dremio-plugin-parent com.dremio.plugins - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-plugin-awsauth @@ -44,12 +44,6 @@ software.amazon.awssdk apache-client - - - commons-logging - commons-logging - - com.amazonaws diff --git a/plugins/awsglue/pom.xml b/plugins/awsglue/pom.xml index 89dbc3e405..769c3b1af7 100644 --- a/plugins/awsglue/pom.xml +++ b/plugins/awsglue/pom.xml @@ -23,7 +23,7 @@ dremio-plugin-parent com.dremio.plugins - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-awsglue-plugin @@ -140,30 +140,6 @@ - - org.codehaus.mojo - build-helper-maven-plugin - - - reserve-network-port - - reserve-network-port - - process-resources - - - - 16000 - 16999 - - s3mock.reserved.port - - - - - maven-dependency-plugin @@ -190,14 +166,6 @@ true - - maven-surefire-plugin - - - ${s3mock.reserved.port} - - - maven-failsafe-plugin diff --git a/plugins/awsglue/src/main/java/com/dremio/plugins/awsglue/store/AWSGlueStoragePlugin.java b/plugins/awsglue/src/main/java/com/dremio/plugins/awsglue/store/AWSGlueStoragePlugin.java index ebcc7d1882..3cb3e143ce 100644 --- a/plugins/awsglue/src/main/java/com/dremio/plugins/awsglue/store/AWSGlueStoragePlugin.java +++ b/plugins/awsglue/src/main/java/com/dremio/plugins/awsglue/store/AWSGlueStoragePlugin.java @@ -25,7 +25,6 @@ import java.util.Map; import java.util.Optional; import java.util.function.Predicate; -import java.util.function.Supplier; import javax.inject.Provider; @@ -38,9 +37,9 @@ import org.apache.iceberg.CatalogProperties; import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.TableOperations; -import org.apache.iceberg.aws.AwsProperties; import org.apache.iceberg.aws.glue.DremioGlueTableOperations; import org.apache.iceberg.catalog.TableIdentifier; +import org.apache.iceberg.io.FileIO; import org.apache.iceberg.util.LockManagers; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -71,7 +70,6 @@ import com.dremio.exec.catalog.RollbackOption; import com.dremio.exec.catalog.StoragePluginId; import com.dremio.exec.catalog.TableMutationOptions; -import com.dremio.exec.catalog.VacuumOption; import com.dremio.exec.catalog.conf.Property; import com.dremio.exec.dotfile.View; import com.dremio.exec.physical.base.OpProps; @@ -102,7 +100,6 @@ import com.dremio.exec.store.dfs.FormatPlugin; import com.dremio.exec.store.dfs.IcebergTableProps; import com.dremio.exec.store.hive.Hive2StoragePluginConfig; -import com.dremio.exec.store.iceberg.DremioFileIO; import com.dremio.exec.store.iceberg.IcebergUtils; import com.dremio.exec.store.iceberg.SupportsIcebergMutablePlugin; import com.dremio.exec.store.iceberg.SupportsIcebergRootPointer; @@ -301,11 +298,6 @@ private Configuration getConfForGlue() { return config; } - @Override - public Supplier getHadoopFsSupplier(String path, Iterable> conf, String queryUser) { - return ((MutablePlugin)hiveStoragePlugin).getHadoopFsSupplier(path, conf, queryUser); - } - @Override public Configuration getFsConfCopy() { return ((SupportsIcebergRootPointer) hiveStoragePlugin).getFsConfCopy(); @@ -338,20 +330,24 @@ public boolean isIcebergMetadataValid(DatasetConfig config, NamespaceKey key, Na @Override public TableOperations createIcebergTableOperations(FileSystem fs, String queryUserName, IcebergTableIdentifier tableIdentifier) { - Map properties = new HashMap<>(); for (Map.Entry property : glueConfTableOperations) { properties.put(property.getKey(), property.getValue()); } IcebergGlueTableIdentifier glueTableIdentifier = (IcebergGlueTableIdentifier) tableIdentifier; - DremioFileIO fileIO = new DremioFileIO(fs, glueConfTableOperations, this); + FileIO fileIO = createIcebergFileIO(fs, null, null, null, null); return new DremioGlueTableOperations(getGlueClient(), LockManagers.from(properties), - IcebergGlueModel.GLUE, new AwsProperties(properties), fileIO, + IcebergGlueModel.GLUE, properties, fileIO, TableIdentifier.of(glueTableIdentifier.getNamespace(), glueTableIdentifier.getTableName())); + } - + @Override + public FileIO createIcebergFileIO(FileSystem fs, OperatorContext context, List dataset, + String datasourcePluginUID, Long fileLength) { + return ((SupportsIcebergRootPointer) hiveStoragePlugin).createIcebergFileIO(fs, context, dataset, + datasourcePluginUID, fileLength); } @Override @@ -434,9 +430,9 @@ public IcebergModel getIcebergModel(String location, NamespaceKey key, String us } catch (IOException e) { throw UserException.validationError(e).message("Failure creating File System instance for path %s", location).buildSilently(); } - Preconditions.checkArgument(key.size() >= 2, "key must be at least two parts"); - String tableName = key.getName(); - String dbName = key.getPathComponents().get(1); + List dbAndTableName = resolveTableNameToValidPath(key.getPathComponents()); + String dbName = dbAndTableName.get(0); + String tableName = dbAndTableName.get(1); return new IcebergGlueModel(dbName, tableName, fs, userName, null, this); } @@ -452,7 +448,7 @@ public IcebergModel getIcebergModel(IcebergTableProps tableProps, String userNam return new IcebergGlueModel(tableProps.getDatabaseName(), tableProps.getTableName(), fs, userName, null, this); } - private String resolveTableLocation(NamespaceKey tableSchemaPath, WriterOptions writerOptions) { + private String resolveTableLocation(String dbName, String tableName, WriterOptions writerOptions) { String queryLocation = writerOptions.getTableLocation(); if (StringUtils.isNotEmpty(queryLocation)) { return PathUtils.removeTrailingSlash(queryLocation); @@ -462,10 +458,6 @@ private String resolveTableLocation(NamespaceKey tableSchemaPath, WriterOptions return null; } - Preconditions.checkArgument(tableSchemaPath.size() >= 2, "tableSchemaPath must be at least two parts"); - String tableName = tableSchemaPath.getName(); - String dbName = tableSchemaPath.getPathComponents().get(1); - try { GetDatabaseResponse response = getGlueClient().getDatabase(GetDatabaseRequest.builder().name(dbName).build()); if (response == null || response.database() == null || StringUtils.isEmpty(response.database().locationUri())) { @@ -487,7 +479,11 @@ private String resolveTableLocation(NamespaceKey tableSchemaPath, WriterOptions @Override public void createEmptyTable(NamespaceKey tableSchemaPath, SchemaConfig schemaConfig, BatchSchema batchSchema, WriterOptions writerOptions) { - String tableLocation = resolveTableLocation(tableSchemaPath, writerOptions); + List dbAndTableName = resolveTableNameToValidPath(tableSchemaPath.getPathComponents()); + String dbName = dbAndTableName.get(0); + String tableName = dbAndTableName.get(1); + + String tableLocation = resolveTableLocation(dbName, tableName, writerOptions); if (StringUtils.isEmpty(tableLocation)) { String warehouseLocation = PathUtils.removeTrailingSlash(glueConfTableOperations.get(CatalogProperties.WAREHOUSE_LOCATION)); if (StringUtils.isEmpty(warehouseLocation) || HiveConf.ConfVars.METASTOREWAREHOUSE.getDefaultValue().equals(warehouseLocation)) { @@ -499,7 +495,6 @@ public void createEmptyTable(NamespaceKey tableSchemaPath, SchemaConfig schemaCo } IcebergModel icebergModel = getIcebergModel(tableLocation, tableSchemaPath, schemaConfig.getUserName()); - String tableName = tableSchemaPath.getName(); PartitionSpec partitionSpec = Optional.ofNullable(writerOptions.getTableFormatOptions().getIcebergSpecificOptions() .getIcebergTableProps()).map(props -> props.getDeserializedPartitionSpec()).orElse(null); IcebergOpCommitter icebergOpCommitter = icebergModel.getCreateTableCommitter(tableName, icebergModel.getTableIdentifier(tableLocation), batchSchema, @@ -554,18 +549,6 @@ public void rollbackTable(NamespaceKey tableSchemaPath, icebergModel.rollbackTable(icebergModel.getTableIdentifier(metadataLocation), rollbackOption); } - @Override - public void vacuumTable(NamespaceKey tableSchemaPath, - DatasetConfig datasetConfig, - SchemaConfig schemaConfig, - VacuumOption vacuumOption, - TableMutationOptions tableMutationOptions) { - SplitsPointer splits = DatasetSplitsPointer.of(context.getNamespaceService(schemaConfig.getUserName()), datasetConfig); - String metadataLocation = IcebergUtils.getMetadataLocation(datasetConfig, splits.getPartitionChunks().iterator()); - IcebergModel icebergModel = getIcebergModel(metadataLocation, tableSchemaPath, schemaConfig.getUserName()); - icebergModel.vacuumTable(icebergModel.getTableIdentifier(metadataLocation), vacuumOption); - } - @Override public void addColumns(NamespaceKey key, DatasetConfig datasetConfig, @@ -656,7 +639,7 @@ public List getPrimaryKeyFromMetadata(NamespaceKey table, final String path; if (DatasetHelper.isInternalIcebergTable(datasetConfig)) { final FileSystemPlugin metaStoragePlugin = context.getCatalogService().getSource(METADATA_STORAGE_PLUGIN_NAME); - icebergModel = metaStoragePlugin.getIcebergModel(metaStoragePlugin.getSystemUserFS()); + icebergModel = metaStoragePlugin.getIcebergModel(); String metadataTableName = datasetConfig.getPhysicalDataset().getIcebergMetadata().getTableUuid(); path = metaStoragePlugin.resolveTablePathToValidPath(metadataTableName).toString(); } else if (DatasetHelper.isIcebergDataset(datasetConfig)) { diff --git a/plugins/awsglue/src/main/java/org/apache/iceberg/aws/glue/DremioGlueTableOperations.java b/plugins/awsglue/src/main/java/org/apache/iceberg/aws/glue/DremioGlueTableOperations.java index 8c14a28066..8ca5cf020b 100644 --- a/plugins/awsglue/src/main/java/org/apache/iceberg/aws/glue/DremioGlueTableOperations.java +++ b/plugins/awsglue/src/main/java/org/apache/iceberg/aws/glue/DremioGlueTableOperations.java @@ -15,6 +15,8 @@ */ package org.apache.iceberg.aws.glue; +import java.util.Map; + import org.apache.iceberg.LockManager; import org.apache.iceberg.aws.AwsProperties; import org.apache.iceberg.catalog.TableIdentifier; @@ -25,10 +27,24 @@ /** * Glue table operations */ -public class DremioGlueTableOperations extends GlueTableOperations{ +public class DremioGlueTableOperations extends GlueTableOperations { + + private final FileIO dremioFileIO; + public DremioGlueTableOperations(GlueClient glue, LockManager lockManager, - String catalogName, AwsProperties awsProperties, + String catalogName, Map properties, FileIO fileIO, TableIdentifier tableIdentifier) { - super(glue, lockManager, catalogName, awsProperties, fileIO, tableIdentifier); + super(glue, lockManager, catalogName, new AwsProperties(properties), properties, null, tableIdentifier); + this.dremioFileIO = fileIO; + } + + @Override + public FileIO io() { + // After https://github.com/apache/iceberg/pull/5756, + // `FileIO` is constructed in the parent class using catalog properties and hadoop conf. + // `DremioFileIO` cannot be constructed without a `Plugin` in the parent class. + // Hence, by default construct `S3FileIO` (as io-impl is not configured) + // and override it with `DremioFileIO`. + return dremioFileIO; } } diff --git a/plugins/awsglue/src/main/resources/awsglue-layout.json b/plugins/awsglue/src/main/resources/awsglue-layout.json index 050afe4203..15c41407b3 100644 --- a/plugins/awsglue/src/main/resources/awsglue-layout.json +++ b/plugins/awsglue/src/main/resources/awsglue-layout.json @@ -72,11 +72,11 @@ "container": {} }, { - "value": "CA_NORTH_1", + "value": "CN_NORTH_1", "container": {} }, { - "value": "CA_NORTHWEST_1", + "value": "CN_NORTHWEST_1", "container": {} }, { diff --git a/plugins/awsglue/src/test/java/com/dremio/plugins/awsglue/store/AWSGlueStoragePluginTest.java b/plugins/awsglue/src/test/java/com/dremio/plugins/awsglue/store/AWSGlueStoragePluginTest.java index d43726f255..b250de9678 100644 --- a/plugins/awsglue/src/test/java/com/dremio/plugins/awsglue/store/AWSGlueStoragePluginTest.java +++ b/plugins/awsglue/src/test/java/com/dremio/plugins/awsglue/store/AWSGlueStoragePluginTest.java @@ -25,6 +25,7 @@ import java.util.ArrayList; import java.util.List; +import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; @@ -36,6 +37,7 @@ import com.dremio.exec.store.CatalogService; import com.dremio.service.InitializerRegistry; import com.dremio.service.namespace.source.proto.SourceConfig; +import com.google.common.base.Preconditions; import io.findify.s3mock.S3Mock; @@ -47,7 +49,7 @@ public class AWSGlueStoragePluginTest extends BaseTestQuery { @ClassRule public static final TemporaryFolder folder = new TemporaryFolder(); - private static Integer port; + private static int port; private static S3Mock s3Mock; @BeforeClass @@ -67,6 +69,14 @@ public static void setupDefaultTestCluster() throws Exception { } + @AfterClass + public static void teardownDefaultTestCluster() throws Exception { + if (s3Mock != null) { + s3Mock.shutdown(); + s3Mock = null; + } + } + @Test public void testParquet() throws Exception { testBuilder() @@ -143,18 +153,25 @@ public void testNoInputFormat() throws Exception { .hasMessageContaining("DATA_READ ERROR: Unable to get Hive table InputFormat class."); } - private static void setupS3Mock() { - port = Integer.getInteger("s3mock.reserved.port"); - if (port == null) { - throw new RuntimeException("Can't start test since s3mock.reserved.port property is not available."); - } + @Test + public void testCreateLongPathShouldThrow() throws Exception { + assertThatThrownBy(() -> test("CREATE TABLE \"testglue\".\"default\".long.path.should.throw (c1 int)")) + .hasMessageContaining("Dataset path '[testglue, default, long, path, should, throw]' is invalid"); + } + + @Test + public void testCreateAsLongPathShouldThrow() throws Exception { + assertThatThrownBy(() -> test("CREATE TABLE \"testglue\".\"default\".long.ctas.path.should.throw (c1 int) as values (1), (2)")) + .hasMessageContaining("Dataset path '[testglue, default, long, ctas, path, should, throw]' is invalid"); + } + private static void setupS3Mock() { + Preconditions.checkState(s3Mock == null); s3Mock = new S3Mock.Builder() - .withPort(port) + .withPort(0) .withFileBackend(folder.getRoot().getAbsolutePath()) .build(); - - s3Mock.start(); + port = s3Mock.start().localAddress().getPort(); } private static void setupBucketAndFile() throws IOException { diff --git a/plugins/azure/pom.xml b/plugins/azure/pom.xml index 7243d39670..6755dd7aa2 100644 --- a/plugins/azure/pom.xml +++ b/plugins/azure/pom.xml @@ -24,7 +24,7 @@ com.dremio.plugins dremio-plugin-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-azure-storage-plugin diff --git a/plugins/azure/src/main/java/com/dremio/plugins/azure/AbstractAzureStorageConf.java b/plugins/azure/src/main/java/com/dremio/plugins/azure/AbstractAzureStorageConf.java new file mode 100644 index 0000000000..e86db17bb8 --- /dev/null +++ b/plugins/azure/src/main/java/com/dremio/plugins/azure/AbstractAzureStorageConf.java @@ -0,0 +1,247 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.plugins.azure; + +import java.util.List; + +import javax.inject.Provider; +import javax.validation.constraints.Max; +import javax.validation.constraints.Min; + +import com.dremio.exec.catalog.StoragePluginId; +import com.dremio.exec.catalog.conf.DefaultCtasFormatSelection; +import com.dremio.exec.catalog.conf.DisplayMetadata; +import com.dremio.exec.catalog.conf.NotMetadataImpacting; +import com.dremio.exec.catalog.conf.Property; +import com.dremio.exec.catalog.conf.Secret; +import com.dremio.exec.server.SabotContext; +import com.dremio.exec.store.dfs.CacheProperties; +import com.dremio.exec.store.dfs.FileSystemConf; +import com.dremio.exec.store.dfs.SchemaMutability; +import com.dremio.io.file.Path; +import com.dremio.options.OptionManager; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableList; + +import io.protostuff.Tag; + +/** + * Abstract class to hold Azure Storage (including datalake v2) plugin conf + */ +public abstract class AbstractAzureStorageConf extends FileSystemConf { + + /** + * @return secret type for SharedAccess authentication + */ + public abstract SharedAccessSecretType getSharedAccessSecretType(); + + /** + * @return Azure Key Vault/VaultURI for SharedAccess authentication + */ + public abstract String getAccessKeyUri(); + + /** + * @return secret type for Azure Active Directory authentication + */ + public abstract AzureActiveDirectorySecretType getAzureADSecretType(); + + /** + * @return Azure Key Vault/VaultURI for Azure Active Directory authentication + */ + public abstract String getClientSecretUri(); + + public static final List KEY_AUTH_PROPS = ImmutableList.of( + AzureStorageFileSystem.ACCOUNT, + AzureStorageFileSystem.SECURE, + AzureStorageFileSystem.CONTAINER_LIST, + AzureStorageFileSystem.KEY + ); + + public static final List AZURE_AD_PROPS = ImmutableList.of( + AzureStorageFileSystem.ACCOUNT, + AzureStorageFileSystem.SECURE, + AzureStorageFileSystem.CONTAINER_LIST, + AzureStorageFileSystem.CLIENT_ID, + AzureStorageFileSystem.CLIENT_SECRET, + AzureStorageFileSystem.TOKEN_ENDPOINT + ); + + /** + * Type of Storage + */ + public enum AccountKind { + @Tag(1) + @DisplayMetadata(label = "StorageV1") + STORAGE_V1, + + @Tag(2) + @DisplayMetadata(label = "StorageV2") + STORAGE_V2 + ; + + @JsonIgnore + public Prototype getPrototype(boolean enableSSL) { + if(this == AccountKind.STORAGE_V1) { + return enableSSL ? Prototype.WASBS : Prototype.WASB; + } else { + return enableSSL ? Prototype.ABFSS: Prototype.ABFS; + } + } + } + + @Tag(1) + @DisplayMetadata(label = "Account Version") + public AccountKind accountKind = AccountKind.STORAGE_V2; + + @Tag(2) + @DisplayMetadata(label = "Account Name") + public String accountName; + + @Tag(3) + @Secret + @DisplayMetadata() + public String accessKey; + + @Tag(4) + @DisplayMetadata(label = "Root Path") + public String rootPath = "/"; + + @Tag(5) + @DisplayMetadata(label = "Advanced Properties") + public List propertyList; + + @Tag(6) + @DisplayMetadata(label = "Blob Containers & Filesystem Allowlist") + public List containers; + + @Tag(7) + @NotMetadataImpacting + @DisplayMetadata(label = "Encrypt connection") + public boolean enableSSL = true; + + @Tag(8) + @NotMetadataImpacting + @DisplayMetadata(label = "Enable exports into the source (CTAS and DROP)") + @JsonIgnore + public boolean allowCreateDrop = false; + + @Tag(9) + @NotMetadataImpacting + @DisplayMetadata(label = "Enable asynchronous access when possible") + public boolean enableAsync = true; + + @Tag(10) + @DisplayMetadata(label = "Application ID") + public String clientId; + + @Tag(11) + @DisplayMetadata(label = "OAuth 2.0 Token Endpoint") + public String tokenEndpoint; + + @Tag(12) + @Secret + @DisplayMetadata() + public String clientSecret; + + @Tag(13) + @DisplayMetadata(label = "Authentication Type") + public AzureAuthenticationType credentialsType = AzureAuthenticationType.ACCESS_KEY; + + @Tag(14) + @NotMetadataImpacting + @DisplayMetadata(label = "Enable local caching when possible") + public boolean isCachingEnabled = true; + + @Tag(15) + @NotMetadataImpacting + @Min(value = 1, message = "Max percent of total available cache space must be between 1 and 100") + @Max(value = 100, message = "Max percent of total available cache space must be between 1 and 100") + @DisplayMetadata(label = "Max percent of total available cache space to use when possible") + public int maxCacheSpacePct = 100; + + @Tag(16) + @NotMetadataImpacting + @DisplayMetadata(label = "Default CTAS Format") + public DefaultCtasFormatSelection defaultCtasFormat = DefaultCtasFormatSelection.ICEBERG; + + @Tag(17) + @NotMetadataImpacting + @DisplayMetadata(label = "Enable partition column inference") + public boolean isPartitionInferenceEnabled = false; + + @Override + public AzureStoragePlugin newPlugin(SabotContext context, String name, Provider pluginIdProvider) { + Preconditions.checkNotNull(accountName, "Account name must be provided."); + return new AzureStoragePlugin(this, context, name, pluginIdProvider); + } + + @Override + public String getDefaultCtasFormat() { + return defaultCtasFormat.getDefaultCtasFormat(); + } + + @Override + public Path getPath() { + return Path.of(rootPath); + } + + @Override + public boolean isImpersonationEnabled() { + return false; + } + + @Override + public List getProperties() { + return propertyList; + } + + @Override + public String getConnection() { + return String.format("%s:///", CloudFileSystemScheme.AZURE_STORAGE_FILE_SYSTEM_SCHEME.getScheme()); + } + + @Override + public SchemaMutability getSchemaMutability() { + return SchemaMutability.USER_TABLE; + } + + @Override + public boolean isPartitionInferenceEnabled() { + return isPartitionInferenceEnabled; + } + + @Override + public boolean isAsyncEnabled() { + return enableAsync; + } + + @Override + public CacheProperties getCacheProperties() { + return new CacheProperties() { + @Override + public boolean isCachingEnabled(final OptionManager optionManager) { + return isCachingEnabled; + } + + @Override + public int cacheMaxSpaceLimitPct() { + return maxCacheSpacePct; + } + }; + } + +} diff --git a/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureActiveDirectorySecretType.java b/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureActiveDirectorySecretType.java new file mode 100644 index 0000000000..601e935d7c --- /dev/null +++ b/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureActiveDirectorySecretType.java @@ -0,0 +1,31 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.plugins.azure; + +import com.dremio.exec.catalog.conf.DisplayMetadata; + +import io.protostuff.Tag; + +public enum AzureActiveDirectorySecretType { + + @Tag(1) + @DisplayMetadata(label = "Dremio") + AZURE_ACTIVE_DIRECTORY_SECRET_KEY, + + @Tag(2) + @DisplayMetadata(label = "Azure Key Vault") + AZURE_ACTIVE_DIRECTORY_KEY_VAULT +} diff --git a/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureAsyncContainerProvider.java b/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureAsyncContainerProvider.java index 1185e10591..d93e42cbf2 100644 --- a/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureAsyncContainerProvider.java +++ b/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureAsyncContainerProvider.java @@ -98,7 +98,7 @@ public class AzureAsyncContainerProvider implements ContainerProvider { this.isSecure = isSecure; this.asyncHttpClient = asyncHttpClient; this.rootPath = rootPath; - this.retryer = new Retryer.Builder() + this.retryer = Retryer.newBuilder() .retryIfExceptionOfType(RuntimeException.class) .setWaitStrategy(Retryer.WaitStrategy.EXPONENTIAL, BASE_MILLIS_TO_WAIT, MAX_MILLIS_TO_WAIT) .setMaxRetries(10).build(); diff --git a/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureAsyncReader.java b/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureAsyncReader.java index db592d8226..9822350c45 100644 --- a/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureAsyncReader.java +++ b/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureAsyncReader.java @@ -28,6 +28,7 @@ import org.slf4j.LoggerFactory; import com.dremio.exec.hadoop.DremioHadoopUtils; +import com.dremio.http.BufferBasedCompletionHandler; import com.dremio.io.ExponentialBackoff; import com.dremio.io.ReusableAsyncByteReader; import com.dremio.plugins.async.utils.AsyncReadWithRetry; @@ -45,6 +46,9 @@ public class AzureAsyncReader extends ReusableAsyncByteReader implements AutoClo private static final int BASE_MILLIS_TO_WAIT = 250; // set to the average latency of an async read private static final int MAX_MILLIS_TO_WAIT = 10 * BASE_MILLIS_TO_WAIT; + /** The maximum range size for which azure storage provides MD5 checksums. */ + private static final int MAX_LEN_FOR_MD5_CHECKSUM = 1 << 22; + private static final Logger logger = LoggerFactory.getLogger(AzureAsyncReader.class); private final AsyncHttpClient asyncHttpClient; private final AzureAuthTokenProvider authProvider; @@ -53,6 +57,7 @@ public class AzureAsyncReader extends ReusableAsyncByteReader implements AutoClo private final String url; private final String threadName; private final AsyncReadWithRetry asyncReaderWithRetry; + private final boolean enableMD5Checksum; private final ExponentialBackoff backoff = new ExponentialBackoff() { @Override public int getBaseMillis() { return BASE_MILLIS_TO_WAIT; } @Override public int getMaxMillis() { return MAX_MILLIS_TO_WAIT; } @@ -64,8 +69,9 @@ public AzureAsyncReader(final String azureEndpoint, final AzureAuthTokenProvider authProvider, final String version, final boolean isSecure, - final AsyncHttpClient asyncHttpClient) { - this(azureEndpoint, accountName, path, authProvider, version, isSecure, asyncHttpClient, new AsyncReadWithRetry(throwable -> { + final AsyncHttpClient asyncHttpClient, + final boolean enableMD5Checksum) { + this(azureEndpoint, accountName, path, authProvider, version, isSecure, asyncHttpClient, enableMD5Checksum, new AsyncReadWithRetry(throwable -> { if (throwable.getMessage().contains("ConditionNotMet")) { return AsyncReadWithRetry.Error.PRECONDITION_NOT_MET; } else if (throwable.getMessage().contains("PathNotFound")) { @@ -83,6 +89,7 @@ public AzureAsyncReader(final String azureEndpoint, final String version, final boolean isSecure, final AsyncHttpClient asyncHttpClient, + final boolean enableMD5Checksum, AsyncReadWithRetry asyncReadWithRetry) { this.authProvider = authProvider; this.path = path; @@ -95,25 +102,42 @@ public AzureAsyncReader(final String azureEndpoint, this.url = String.format("%s/%s/%s", baseURL, container, AzureAsyncHttpClientUtils.encodeUrl(subPath)); this.threadName = Thread.currentThread().getName(); this.asyncReaderWithRetry = asyncReadWithRetry; + this.enableMD5Checksum = enableMD5Checksum; } @Override public CompletableFuture readFully(long offset, ByteBuf dst, int dstOffset, int len) { - return read(offset, dst, dstOffset, len, 0); + return read(offset, len, this.createResponseHandler(dst, dstOffset, len), 0); } - public CompletableFuture read(long offset, ByteBuf dst, int dstOffset, long len, int retryAttemptNum) { + public CompletableFuture read(long offset, long len, BufferBasedCompletionHandler responseHandler, int retryAttemptNum) { MetricsLogger metrics = getMetricLogger(); java.util.function.Function requestBuilderFunction = getRequestBuilderFunction(offset, len, metrics); + return asyncReaderWithRetry.read(asyncHttpClient, requestBuilderFunction, - metrics, path, threadName, dst, dstOffset, retryAttemptNum, backoff); + metrics, path, threadName, responseHandler, retryAttemptNum, backoff); + } + + private BufferBasedCompletionHandler createResponseHandler(ByteBuf buf, int dstOffset, int len) { + if (this.requireChecksum(len)) { + return new ChecksumVerifyingCompletionHandler(buf, dstOffset); + } else { + return new BufferBasedCompletionHandler(buf, dstOffset); + } + } + + @VisibleForTesting + boolean requireChecksum(long len) { + return enableMD5Checksum && len <= MAX_LEN_FOR_MD5_CHECKSUM; } java.util.function.Function getRequestBuilderFunction(long offset, long len, MetricsLogger metrics) { java.util.function.Function requestBuilderFunction = (Function) unused -> { long rangeEnd = offset + len - 1L; + boolean requestChecksum = this.requireChecksum(len); RequestBuilder requestBuilder = AzureAsyncHttpClientUtils.newDefaultRequestBuilder() .addHeader("Range", String.format("bytes=%d-%d", offset, rangeEnd)) + .addHeader("x-ms-range-get-content-md5", requestChecksum ? "true" : "false") .setUrl(url); if (version != null) { requestBuilder.addHeader("If-Unmodified-Since", version); diff --git a/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureAuthenticationType.java b/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureAuthenticationType.java index a4a051ab9d..a7f3a8ed2e 100644 --- a/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureAuthenticationType.java +++ b/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureAuthenticationType.java @@ -27,7 +27,7 @@ */ public enum AzureAuthenticationType { - @Tag(1) @DisplayMetadata(label = "Shared Access Key") + @Tag(1) @DisplayMetadata(label = "Shared access key") ACCESS_KEY { @Override public List getUniqueProperties() { diff --git a/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureStorageConf.java b/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureStorageConf.java index 1dfa40589f..e3f1eef379 100644 --- a/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureStorageConf.java +++ b/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureStorageConf.java @@ -15,28 +15,10 @@ */ package com.dremio.plugins.azure; -import java.util.List; +import static com.dremio.hadoop.security.alias.DremioCredentialProvider.PROTOCOL_PREFIX; -import javax.inject.Provider; -import javax.validation.constraints.Max; -import javax.validation.constraints.Min; - -import com.dremio.exec.catalog.StoragePluginId; -import com.dremio.exec.catalog.conf.DefaultCtasFormatSelection; import com.dremio.exec.catalog.conf.DisplayMetadata; -import com.dremio.exec.catalog.conf.NotMetadataImpacting; -import com.dremio.exec.catalog.conf.Property; -import com.dremio.exec.catalog.conf.Secret; import com.dremio.exec.catalog.conf.SourceType; -import com.dremio.exec.server.SabotContext; -import com.dremio.exec.store.dfs.CacheProperties; -import com.dremio.exec.store.dfs.FileSystemConf; -import com.dremio.exec.store.dfs.SchemaMutability; -import com.dremio.io.file.Path; -import com.dremio.options.OptionManager; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableList; import io.protostuff.Tag; @@ -45,184 +27,74 @@ */ @CheckAzureConf @SourceType(value = "AZURE_STORAGE", label = "Azure Storage", uiConfig = "azure-storage-layout.json") -public class AzureStorageConf extends FileSystemConf { - - public static final List KEY_AUTH_PROPS = ImmutableList.of( - AzureStorageFileSystem.ACCOUNT, - AzureStorageFileSystem.SECURE, - AzureStorageFileSystem.CONTAINER_LIST, - AzureStorageFileSystem.KEY - ); - - public static final List AZURE_AD_PROPS = ImmutableList.of( - AzureStorageFileSystem.ACCOUNT, - AzureStorageFileSystem.SECURE, - AzureStorageFileSystem.CONTAINER_LIST, - AzureStorageFileSystem.CLIENT_ID, - AzureStorageFileSystem.CLIENT_SECRET, - AzureStorageFileSystem.TOKEN_ENDPOINT - ); +public class AzureStorageConf extends AbstractAzureStorageConf { /** - * Type of Storage + * Secret Key <-> Azure Key Vault selector for 'Shared Access Key' authenticationType */ - public enum AccountKind { - @Tag(1) - @DisplayMetadata(label = "StorageV1") - STORAGE_V1, - - @Tag(2) - @DisplayMetadata(label = "StorageV2") - STORAGE_V2 - ; - - @JsonIgnore - public Prototype getPrototype(boolean enableSSL) { - if(this == AccountKind.STORAGE_V1) { - return enableSSL ? Prototype.WASBS : Prototype.WASB; - } else { - return enableSSL ? Prototype.ABFSS: Prototype.ABFS; - } - } - } - - @Tag(1) - @DisplayMetadata(label = "Account Kind") - public AccountKind accountKind = AccountKind.STORAGE_V2; - - @Tag(2) - @DisplayMetadata(label = "Account Name") - public String accountName; - - @Tag(3) - @Secret - @DisplayMetadata(label = "Shared Access Key") - public String accessKey; - - @Tag(4) - @DisplayMetadata(label = "Root Path") - public String rootPath = "/"; - - @Tag(5) - @DisplayMetadata(label = "Advanced Properties") - public List propertyList; - - @Tag(6) - @DisplayMetadata(label = "Blob Containers & Filesystem Allowlist") - public List containers; - - @Tag(7) - @NotMetadataImpacting - @DisplayMetadata(label = "Encrypt connection") - public boolean enableSSL = true; - - @Tag(8) - @NotMetadataImpacting - @DisplayMetadata(label = "Enable exports into the source (CTAS and DROP)") - @JsonIgnore - public boolean allowCreateDrop = false; + @Tag(18) + @DisplayMetadata(label = "Secret Store") + public SharedAccessSecretType sharedAccessSecretType = SharedAccessSecretType.SHARED_ACCESS_SECRET_KEY; - @Tag(9) - @NotMetadataImpacting - @DisplayMetadata(label = "Enable asynchronous access when possible") - public boolean enableAsync = true; - - @Tag(10) - @DisplayMetadata(label = "Application ID") - public String clientId; - - @Tag(11) - @DisplayMetadata(label = "OAuth 2.0 Token Endpoint") - public String tokenEndpoint; - - @Tag(12) - @Secret - @DisplayMetadata(label = "Client Secret") - public String clientSecret; - - @Tag(13) - public AzureAuthenticationType credentialsType = AzureAuthenticationType.ACCESS_KEY; - - @Tag(14) - @NotMetadataImpacting - @DisplayMetadata(label = "Enable local caching when possible") - public boolean isCachingEnabled = true; - - @Tag(15) - @NotMetadataImpacting - @Min(value = 1, message = "Max percent of total available cache space must be between 1 and 100") - @Max(value = 100, message = "Max percent of total available cache space must be between 1 and 100") - @DisplayMetadata(label = "Max percent of total available cache space to use when possible") - public int maxCacheSpacePct = 100; - - @Tag(16) - @NotMetadataImpacting - @DisplayMetadata(label = "Default CTAS Format") - public DefaultCtasFormatSelection defaultCtasFormat = DefaultCtasFormatSelection.ICEBERG; - - @Tag(17) - @NotMetadataImpacting - @DisplayMetadata(label = "Enable partition column inference") - public boolean isPartitionInferenceEnabled = false; - - @Override - public AzureStoragePlugin newPlugin(SabotContext context, String name, Provider pluginIdProvider) { - Preconditions.checkNotNull(accountName, "Account name must be provided."); - return new AzureStoragePlugin(this, context, name, pluginIdProvider); - } - - @Override - public String getDefaultCtasFormat() { - return defaultCtasFormat.getDefaultCtasFormat(); - } - - @Override - public Path getPath() { - return Path.of(rootPath); - } + /** + * Secret Key <-> Azure Key Vault selector for 'Azure Active Directory' authenticationType + */ + @Tag(19) + @DisplayMetadata(label = "Application Secret Store") + public AzureActiveDirectorySecretType azureADSecretType = AzureActiveDirectorySecretType.AZURE_ACTIVE_DIRECTORY_SECRET_KEY; - @Override - public boolean isImpersonationEnabled() { - return false; - } + /** + * vault uri for 'Shared Access Key' authenticationType + */ + @Tag(20) + @DisplayMetadata() + public String accessKeyUri; - @Override - public List getProperties() { - return propertyList; - } + /** + * vault uri for 'Azure Active Directory' authenticationType + */ + @Tag(21) + @DisplayMetadata() + public String clientSecretUri; + /** + * @return the secret type (Secret Key 'dremio' vs. Azure Vault URI) for Shared Access authentication type + */ @Override - public String getConnection() { - return String.format("%s:///", CloudFileSystemScheme.AZURE_STORAGE_FILE_SYSTEM_SCHEME.getScheme()); + public SharedAccessSecretType getSharedAccessSecretType() { + return sharedAccessSecretType; } + /** + * @return the Shared Access Key Vault URI + */ @Override - public SchemaMutability getSchemaMutability() { - return SchemaMutability.USER_TABLE; + public String getAccessKeyUri() { + accessKeyUri = prependProtocolIfNotExist(accessKeyUri, PROTOCOL_PREFIX); + return accessKeyUri; } + /** + * @return the secret type (Secret Key 'dremio' vs. Azure Vault URI) for Azure Active Directory authentication type + */ @Override - public boolean isPartitionInferenceEnabled() { - return isPartitionInferenceEnabled; + public AzureActiveDirectorySecretType getAzureADSecretType() { + return azureADSecretType; } + /** + * @return the Azure Active Directory Key Vault URI + */ @Override - public boolean isAsyncEnabled() { - return enableAsync; + public String getClientSecretUri() { + clientSecretUri = prependProtocolIfNotExist(clientSecretUri, PROTOCOL_PREFIX); + return clientSecretUri; } - @Override - public CacheProperties getCacheProperties() { - return new CacheProperties() { - @Override - public boolean isCachingEnabled(final OptionManager optionManager) { - return isCachingEnabled; - } - - @Override - public int cacheMaxSpaceLimitPct() { - return maxCacheSpacePct; - } - }; + /** + * @return the prepended uri (if protocol not yet present) + */ + private String prependProtocolIfNotExist(String uri, String protocol) { + return uri.toLowerCase().contains(protocol) ? uri : protocol.concat(uri); } } diff --git a/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureStorageFileSystem.java b/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureStorageFileSystem.java index fe1694d864..53ead5575c 100644 --- a/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureStorageFileSystem.java +++ b/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureStorageFileSystem.java @@ -32,16 +32,19 @@ import org.asynchttpclient.AsyncHttpClient; import com.dremio.common.AutoCloseables; +import com.dremio.common.exceptions.UserException; import com.dremio.common.util.Retryer; import com.dremio.exec.hadoop.MayProvideAsyncStream; import com.dremio.exec.store.dfs.DremioFileSystemCache; import com.dremio.exec.store.dfs.FileSystemConf; import com.dremio.http.AsyncHttpClientProvider; import com.dremio.io.AsyncByteReader; -import com.dremio.plugins.azure.AzureStorageConf.AccountKind; +import com.dremio.plugins.azure.AbstractAzureStorageConf.AccountKind; import com.dremio.plugins.util.ContainerFileSystem; +import com.dremio.services.credentials.CredentialsException; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; +import com.google.common.base.Throwables; /** * A container file system implementation for Azure Storage. @@ -51,7 +54,7 @@ public class AzureStorageFileSystem extends ContainerFileSystem implements MayProvideAsyncStream { private static final String CONTAINER_HUMAN_NAME = "Container"; static final String ACCOUNT = "dremio.azure.account"; - static final String KEY = "dremio.azure.key"; + public static final String KEY = "dremio.azure.key"; static final String MODE = "dremio.azure.mode"; static final String SECURE = "dremio.azure.secure"; static final String CONTAINER_LIST = "dremio.azure.container_list"; @@ -61,7 +64,7 @@ public class AzureStorageFileSystem extends ContainerFileSystem implements MayPr static final String CLIENT_ID = "dremio.azure.clientId"; static final String TOKEN_ENDPOINT = "dremio.azure.tokenEndpoint"; - static final String CLIENT_SECRET = "dremio.azure.clientSecret"; + public static final String CLIENT_SECRET = "dremio.azure.clientSecret"; static final String AZURE_ENDPOINT = "fs.azure.endpoint"; private String azureEndpoint; @@ -80,6 +83,7 @@ public class AzureStorageFileSystem extends ContainerFileSystem implements MayPr private Configuration parentConf; private ContainerProvider containerProvider; private AzureAuthTokenProvider authProvider; + private boolean enableMD5Checksum; private final DremioFileSystemCache fsCache = new DremioFileSystemCache(); @@ -112,6 +116,7 @@ protected void setup(Configuration conf) throws IOException { account = Objects.requireNonNull(conf.get(ACCOUNT)); asyncHttpClient = AsyncHttpClientProvider.getInstance(); + enableMD5Checksum = conf.getBoolean(AzureStorageOptions.ENABLE_CHECKSUM.getOptionName(), true); if (credentialsType == AZURE_ACTIVE_DIRECTORY) { clientID = Objects.requireNonNull(conf.get(CLIENT_ID)); @@ -119,7 +124,7 @@ protected void setup(Configuration conf) throws IOException { clientSecret = new String(Objects.requireNonNull(conf.getPassword(CLIENT_SECRET))); this.authProvider = new AzureOAuthTokenProvider(tokenEndpoint, clientID, clientSecret); } else if (credentialsType == ACCESS_KEY) { - key = new String(Objects.requireNonNull(conf.getPassword(KEY))); + key = getSharedAccessKey(conf); this.authProvider = new AzureSharedKeyAuthTokenProvider(account, key); } else { throw new IOException("Unrecognized credential type"); @@ -162,6 +167,30 @@ protected void setup(Configuration conf) throws IOException { containerProvider.verfiyContainersExist(); } + /** + * Get the shared access key from configuration. Unpacks exceptions + * such that cause of CredentialsException is surfaced. + */ + private String getSharedAccessKey(Configuration conf) throws IOException { + try { + return new String(Objects.requireNonNull(conf.getPassword(KEY))); + } catch (IOException e) { + // Hadoop does a lot of exception wrapping, so dig to find actual error. + // Throw UserException to surface problems with credentials config to the user + for (Throwable throwable : Throwables.getCausalChain(e)) { + if (throwable instanceof CredentialsException) { + final String message = throwable.getCause() == null ? throwable.getMessage() : throwable.getCause().getMessage(); + throw UserException.permissionError(throwable) + .message("Failed to resolve credentials: " + message) + .buildSilently(); + } + } + throw UserException.permissionError(e) + .message("Failed to resolve credentials.") + .buildSilently(); + } + } + private String[] getContainerNames(String value) { if (value == null) { return null; @@ -234,11 +263,11 @@ public FileSystem create() throws IOException { if (credentialsType == AZURE_ACTIVE_DIRECTORY) { parent.proto.setImpl(conf, parent.account, parent.clientID, parent.tokenEndpoint, parent.clientSecret, parent.azureEndpoint); - return parent.fsCache.get(new Path(location).toUri(), conf, AzureStorageConf.AZURE_AD_PROPS); + return parent.fsCache.get(new Path(location).toUri(), conf, AbstractAzureStorageConf.AZURE_AD_PROPS); } parent.proto.setImpl(conf, parent.account, parent.key, parent.azureEndpoint); - return parent.fsCache.get(new Path(location).toUri(), conf, AzureStorageConf.KEY_AUTH_PROPS); + return parent.fsCache.get(new Path(location).toUri(), conf, AbstractAzureStorageConf.KEY_AUTH_PROPS); } } } @@ -260,6 +289,6 @@ public boolean supportsAsync() { @Override public AsyncByteReader getAsyncByteReader(Path path, String version, Map options) { - return new AzureAsyncReader(azureEndpoint, account, path, authProvider, version, secure, asyncHttpClient); + return new AzureAsyncReader(azureEndpoint, account, path, authProvider, version, secure, asyncHttpClient, enableMD5Checksum); } } diff --git a/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureStorageOptions.java b/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureStorageOptions.java index d46c2905d6..b280aaa4b4 100644 --- a/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureStorageOptions.java +++ b/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureStorageOptions.java @@ -27,4 +27,6 @@ public class AzureStorageOptions { // If enabled, use the asynchronous interface for files. If disabled, use the hadoop file interface. public static final BooleanValidator ASYNC_READS = new BooleanValidator("store.azure.async", true); + // If enabled, compute MD5 checksums when reading data from V2 or Blob storage. + public static final BooleanValidator ENABLE_CHECKSUM = new BooleanValidator("store.azure.checksum", false); } diff --git a/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureStoragePlugin.java b/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureStoragePlugin.java index 72ad68abc0..b63bd9aed0 100644 --- a/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureStoragePlugin.java +++ b/plugins/azure/src/main/java/com/dremio/plugins/azure/AzureStoragePlugin.java @@ -15,6 +15,8 @@ */ package com.dremio.plugins.azure; +import static com.dremio.hadoop.security.alias.DremioCredentialProvider.DREMIO_SCHEME_PREFIX; + import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -48,11 +50,12 @@ /** * Storage plugin for Microsoft Azure Storage */ -class AzureStoragePlugin extends DirectorySupportLackingFileSystemPlugin { +public class AzureStoragePlugin extends DirectorySupportLackingFileSystemPlugin { private static final Logger logger = LoggerFactory.getLogger(AzureStoragePlugin.class); + private static final String DREMIO_PLUS_AZURE_VAULT_SCHEME_PREFIX = DREMIO_SCHEME_PREFIX.concat("azure-key-vault+"); - public AzureStoragePlugin(AzureStorageConf config, SabotContext context, String name, Provider idProvider) { + public AzureStoragePlugin(AbstractAzureStorageConf config, SabotContext context, String name, Provider idProvider) { super(config, context, name, idProvider); } @@ -78,7 +81,7 @@ public SourceState getState() { } catch (Exception e) { return SourceState.badState( - String.format("Could not connect to %s. Check your settings and credentials", sourceName), e); + String.format("Could not connect to %s. Check your settings and credentials", getName()), e); } } @@ -92,9 +95,19 @@ public MetadataValidity validateMetadata(BytesOutput signature, DatasetHandle da return MetadataValidity.INVALID; } + private static String addPrefixIfNotExist(String prefix, String input) { + if (prefix == null) { + return input; + } + if (input == null) { + return null; + } + return input.toLowerCase().startsWith(prefix)? input : prefix.concat(input); + } + @Override protected List getProperties() { - final AzureStorageConf config = getConfig(); + final AbstractAzureStorageConf config = getConfig(); final List properties = new ArrayList<>(); // configure hadoop fs implementation @@ -105,17 +118,32 @@ protected List getProperties() { properties.add(new Property(AzureStorageFileSystem.ACCOUNT, config.accountName)); properties.add(new Property(AzureStorageFileSystem.SECURE, Boolean.toString(config.enableSSL))); properties.add(new Property(AzureStorageFileSystem.MODE, config.accountKind.name())); + properties.add(new Property(AzureStorageOptions.ENABLE_CHECKSUM.getOptionName(), + Boolean.toString(getContext().getOptionManager().getOption(AzureStorageOptions.ENABLE_CHECKSUM)))); AzureAuthenticationType credentialsType = config.credentialsType; - if(credentialsType == AzureAuthenticationType.AZURE_ACTIVE_DIRECTORY) { + if (credentialsType == AzureAuthenticationType.AZURE_ACTIVE_DIRECTORY) { properties.add(new Property(AzureStorageFileSystem.CREDENTIALS_TYPE, AzureAuthenticationType.AZURE_ACTIVE_DIRECTORY.name())); properties.add(new Property(AzureStorageFileSystem.CLIENT_ID, config.clientId)); - properties.add(new Property(AzureStorageFileSystem.CLIENT_SECRET, config.clientSecret)); + + + if (config.getAzureADSecretType() == AzureActiveDirectorySecretType.AZURE_ACTIVE_DIRECTORY_SECRET_KEY) { + properties.add(new Property(AzureStorageFileSystem.CLIENT_SECRET, addPrefixIfNotExist(DREMIO_SCHEME_PREFIX, config.clientSecret))); + } else { // Azure Key Vault + properties.add(new Property(AzureStorageFileSystem.CLIENT_SECRET, addPrefixIfNotExist(DREMIO_PLUS_AZURE_VAULT_SCHEME_PREFIX, config.getClientSecretUri()))); + } + properties.add(new Property(AzureStorageFileSystem.TOKEN_ENDPOINT, config.tokenEndpoint)); } else { properties.add(new Property(AzureStorageFileSystem.CREDENTIALS_TYPE, AzureAuthenticationType.ACCESS_KEY.name())); - properties.add(new Property(AzureStorageFileSystem.KEY, config.accessKey)); + + if (config.getSharedAccessSecretType() == SharedAccessSecretType.SHARED_ACCESS_SECRET_KEY) { + properties.add(new Property(AzureStorageFileSystem.KEY, addPrefixIfNotExist(DREMIO_SCHEME_PREFIX, config.accessKey))); + } else { // Azure Key Vault + properties.add(new Property(AzureStorageFileSystem.KEY, addPrefixIfNotExist(DREMIO_PLUS_AZURE_VAULT_SCHEME_PREFIX, config.getAccessKeyUri()))); + } + } if(config.containers != null && config.containers.size() > 0) { @@ -193,6 +221,7 @@ protected boolean isAsyncEnabledForQuery(OperatorContext context) { return context != null && context.getOptions().getOption(AzureStorageOptions.ASYNC_READS); } + @Override public boolean supportReadSignature(DatasetMetadata metadata, boolean isFileDataset) { return false; } diff --git a/plugins/azure/src/main/java/com/dremio/plugins/azure/BlobContainerProvider.java b/plugins/azure/src/main/java/com/dremio/plugins/azure/BlobContainerProvider.java index 151ee189b3..be82b92104 100644 --- a/plugins/azure/src/main/java/com/dremio/plugins/azure/BlobContainerProvider.java +++ b/plugins/azure/src/main/java/com/dremio/plugins/azure/BlobContainerProvider.java @@ -92,6 +92,7 @@ public Stream getContainerCreators() throws IOException { } } + @Override public void verfiyContainersExist() { List list = containers.asList(); for(String c : list) { diff --git a/plugins/azure/src/main/java/com/dremio/plugins/azure/CheckAzureConfValidator.java b/plugins/azure/src/main/java/com/dremio/plugins/azure/CheckAzureConfValidator.java index f984456b3f..e6c2ca2aa9 100644 --- a/plugins/azure/src/main/java/com/dremio/plugins/azure/CheckAzureConfValidator.java +++ b/plugins/azure/src/main/java/com/dremio/plugins/azure/CheckAzureConfValidator.java @@ -27,16 +27,21 @@ /** * Validator for the {@code CheckAzureConf} annotation. */ -public class CheckAzureConfValidator implements ConstraintValidator { +public class CheckAzureConfValidator implements ConstraintValidator { @Override - public boolean isValid(AzureStorageConf value, ConstraintValidatorContext context) { + public boolean isValid(AbstractAzureStorageConf value, ConstraintValidatorContext context) { if (value == null || value.credentialsType == AzureAuthenticationType.AZURE_ACTIVE_DIRECTORY ) { return true; } context.disableDefaultConstraintViolation(); - final String key = value.accessKey; + final String key; + if (value.getSharedAccessSecretType() == SharedAccessSecretType.SHARED_ACCESS_SECRET_KEY) { + key = value.accessKey; + } else { // Azure Key Vault + key = value.getAccessKeyUri(); + } final String account = value.accountName; boolean credentialsPresent = true; diff --git a/plugins/azure/src/main/java/com/dremio/plugins/azure/ChecksumVerifyingCompletionHandler.java b/plugins/azure/src/main/java/com/dremio/plugins/azure/ChecksumVerifyingCompletionHandler.java new file mode 100644 index 0000000000..73f241ee26 --- /dev/null +++ b/plugins/azure/src/main/java/com/dremio/plugins/azure/ChecksumVerifyingCompletionHandler.java @@ -0,0 +1,63 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dremio.plugins.azure; + +import java.io.IOException; +import java.security.MessageDigest; +import java.util.Base64; + +import org.asynchttpclient.Response; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.dremio.http.BufferBasedCompletionHandler; + +import io.netty.buffer.ByteBuf; + +/** + * verifies content MD5 checksum if provided in the http-header Content-MD5. + */ +public class ChecksumVerifyingCompletionHandler extends BufferBasedCompletionHandler { + + private static final Logger logger = LoggerFactory.getLogger(ChecksumVerifyingCompletionHandler.class); + public static final String CHECKSUM_RESPONSE_HEADER = "Content-MD5"; + public static final String HASH_ALGORITHM = "MD5"; + + public ChecksumVerifyingCompletionHandler(ByteBuf outputBuffer, int dstOffset) { + super(outputBuffer, dstOffset); + } + + @Override + public Response onCompleted(Response response) throws Exception { + if (isRequestFailed()) { + logger.error("Error response received {} {}", response.getStatusCode(), response.getResponseBody()); + throw new RuntimeException(response.getResponseBody()); + } + String expectedMD5 = response.getHeader(CHECKSUM_RESPONSE_HEADER); + if (expectedMD5 != null) { + MessageDigest digest = MessageDigest.getInstance(HASH_ALGORITHM); + digest.update(getBodyBytes()); + String checksum = Base64.getEncoder().encodeToString(digest.digest()); + if (!checksum.equals(expectedMD5)) { + throw new IOException(String.format("mismatched MD5 checksum: got %s, expected %s", checksum, expectedMD5)); + } + } else { + throw new IOException("MD5 checksum requested, but response header missing"); + } + return response; + } +} diff --git a/plugins/azure/src/main/java/com/dremio/plugins/azure/SharedAccessSecretType.java b/plugins/azure/src/main/java/com/dremio/plugins/azure/SharedAccessSecretType.java new file mode 100644 index 0000000000..446d7e06e3 --- /dev/null +++ b/plugins/azure/src/main/java/com/dremio/plugins/azure/SharedAccessSecretType.java @@ -0,0 +1,32 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.plugins.azure; + + +import com.dremio.exec.catalog.conf.DisplayMetadata; + +import io.protostuff.Tag; + +public enum SharedAccessSecretType { + + @Tag(1) + @DisplayMetadata(label = "Dremio") + SHARED_ACCESS_SECRET_KEY, + + @Tag(2) + @DisplayMetadata(label = "Azure Key Vault") + SHARED_ACCESS_AZURE_KEY_VAULT +} diff --git a/plugins/azure/src/main/resources/azure-storage-layout.json b/plugins/azure/src/main/resources/azure-storage-layout.json index 5243b18743..84c8f34f74 100644 --- a/plugins/azure/src/main/resources/azure-storage-layout.json +++ b/plugins/azure/src/main/resources/azure-storage-layout.json @@ -1,7 +1,7 @@ { "sourceType": "AZURE_STORAGE", "metadataRefresh": { - "datasetDiscovery": true, + "datasetDiscovery": false, "isFileSystemSource": true }, "form": { @@ -38,10 +38,39 @@ "container": { "elements": [ { - "propName": "config.accessKey", - "size": "half", - "errMsg": "Access key is required for Azure storage accounts", - "secure": true + "propName": "config.sharedAccessSecretType", + "uiType": "container_selection", + "options": [ + { + "value": "SHARED_ACCESS_SECRET_KEY", + "container": { + "elements": [ + { + "propName": "config.accessKey", + "placeholder": "Type secret here", + "size": "half", + "errMsg": "Access key is required for Azure storage accounts", + "secure": true + } + ] + } + }, + { + "value": "SHARED_ACCESS_AZURE_KEY_VAULT", + "container": { + "elements": [ + { + "name": "Azure Key Vault", + "propName": "config.accessKeyUri", + "prefix":"https://", + "placeholder": "/secrets/", + "size": "half", + "errMsg": "Vault URI" + } + ] + } + } + ] } ] } @@ -61,10 +90,38 @@ "errMsg": "Token endpoint is required for Azure Active Directory OAuth 2.0" }, { - "propName": "config.clientSecret", - "size": "half", - "errMsg": "Client secret is required for Azure Active Directory OAuth 2.0", - "secure": true + "propName": "config.azureADSecretType", + "uiType": "container_selection", + "options": [ + { + "value": "AZURE_ACTIVE_DIRECTORY_SECRET_KEY", + "container": { + "elements": [ + { + "propName": "config.clientSecret", + "placeholder": "Type secret here", + "size": "half", + "errMsg": "Access key is required for Azure storage accounts", + "secure": true + } + ] + } + }, + { + "value": "AZURE_ACTIVE_DIRECTORY_KEY_VAULT", + "container": { + "elements": [ + { + "propName": "config.clientSecretUri", + "prefix": "https://", + "placeholder": "/secrets/", + "size": "half", + "errMsg": "Vault URI" + } + ] + } + } + ] } ] } diff --git a/plugins/azure/src/test/java/com/dremio/plugins/azure/TestAzureAsyncReader.java b/plugins/azure/src/test/java/com/dremio/plugins/azure/TestAzureAsyncReader.java index bf914f6f27..487a16689a 100644 --- a/plugins/azure/src/test/java/com/dremio/plugins/azure/TestAzureAsyncReader.java +++ b/plugins/azure/src/test/java/com/dremio/plugins/azure/TestAzureAsyncReader.java @@ -21,6 +21,7 @@ import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; @@ -33,11 +34,13 @@ import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.time.temporal.ChronoUnit; +import java.util.Base64; import java.util.Locale; import java.util.Random; import java.util.concurrent.CompletableFuture; import java.util.function.Function; +import org.apache.commons.codec.digest.DigestUtils; import org.apache.hadoop.fs.Path; import org.asynchttpclient.AsyncCompletionHandler; import org.asynchttpclient.AsyncHttpClient; @@ -48,7 +51,6 @@ import org.asynchttpclient.Response; import org.junit.Test; -import com.dremio.http.BufferBasedCompletionHandler; import com.dremio.plugins.async.utils.AsyncReadWithRetry; import com.dremio.plugins.async.utils.MetricsLogger; @@ -84,7 +86,7 @@ void verifyTestFileVersionChanged(boolean checkVersion) { assertEquals(FileNotFoundException.class, e.getCause().getClass()); assertTrue(e.getCause().getMessage().contains("Version of file has changed")); } finally { - verify(azureAsyncReader, times(1)).read(0, buf, 0, 20, 0); + verify(azureAsyncReader, times(1)).read(eq(0L), eq(20L), any(ChecksumVerifyingCompletionHandler.class), eq(0)); } } @@ -93,6 +95,30 @@ public void testFileVersionChanged() { verifyTestFileVersionChanged(true); } + + // chunks of size > 4MB can not use checksums due to limits in the azure blob storage API. + // chunks of size > 4MB can not use checksums due to limits in the azure blob storage API. + @Test + public void testNoChecksumRequiredForLargeChunk() { + AzureAsyncReader reader = new AzureAsyncReader(AZURE_ENDPOINT, + "account", new Path("container/directory/file_00.parquet"), + getMockAuthTokenProvider(), "0", true, mock(AsyncHttpClient.class), + true, + mock(AsyncReadWithRetry.class)); + assertTrue(reader.requireChecksum(4194304)); + assertTrue(!reader.requireChecksum(4194305)); + } + + @Test public void testRespectNoChecksumArg() { + AzureAsyncReader reader = new AzureAsyncReader(AZURE_ENDPOINT, + "account", new Path("container/directory/file_00.parquet"), + getMockAuthTokenProvider(), "0", true, mock(AsyncHttpClient.class), + false, + mock(AsyncReadWithRetry.class)); + assertTrue(!reader.requireChecksum(4194304)); + assertTrue(!reader.requireChecksum(4194305)); + } + void verifyTestPathNotFound(boolean checkVersion) { final String responseBody = "{\"error\":{\"code\":\"PathNotFound\",\"message\":\"The specified path does not exist." + "\\nRequestId:5b544bd0-c01f-0048-03f0-16bacd000000\\nTime:2020-04-20T08:51:53.7856703Z\"}}"; @@ -107,7 +133,7 @@ void verifyTestPathNotFound(boolean checkVersion) { assertEquals(FileNotFoundException.class, e.getCause().getClass()); assertTrue(e.getMessage().contains("PathNotFound")); } finally { - verify(azureAsyncReader, times(1)).read(0, buf, 0, 20, 0); + verify(azureAsyncReader, times(1)).read(eq(0L), eq(20L), any(ChecksumVerifyingCompletionHandler.class), eq(0)); } } @@ -122,10 +148,12 @@ void verifyTestServerErrorsAndRetries(boolean checkVersion) { "\\nRequestId:5b544bd0-c01f-0048-03f0-16bacd000000\\nTime:2020-04-20T08:51:53.7856703Z\"}}"; final int responseCode = 500; AzureAsyncReader azureAsyncReader = prepareAsyncReader(responseBody, responseCode, checkVersion); - ByteBuf buf = Unpooled.buffer(20); + int len = 20; + ChecksumVerifyingCompletionHandler responseHandler = + new ChecksumVerifyingCompletionHandler(Unpooled.buffer(len), len); try { - azureAsyncReader.readFully(0, buf, 0, 20).get(); + azureAsyncReader.read(0, len, responseHandler, 0).get(); fail("Should fail because of failing condition match"); } catch (Exception e) { assertEquals(RuntimeException.class, e.getCause().getClass()); @@ -136,9 +164,9 @@ void verifyTestServerErrorsAndRetries(boolean checkVersion) { for (int retryAttempt = 0; retryAttempt < expectedRetries; retryAttempt++) { AsyncReadWithRetry asyncReadWithRetry = azureAsyncReader.getAsyncReaderWithRetry(); verify(asyncReadWithRetry).read(azureAsyncReader.getAsyncHttpClient(), - azureAsyncReader.getRequestBuilderFunction(0, 20, azureAsyncReader.getMetricLogger()), + azureAsyncReader.getRequestBuilderFunction(0, len, azureAsyncReader.getMetricLogger()), azureAsyncReader.getMetricLogger(), azureAsyncReader.getPath(), azureAsyncReader.getThreadName(), - buf, 0, retryAttempt, azureAsyncReader.getBackoff()); + responseHandler, retryAttempt, azureAsyncReader.getBackoff()); } } } @@ -174,6 +202,8 @@ private void testSuccessHttpMode(boolean isSecure, boolean checkVersion) { LocalDateTime versionDate = LocalDateTime.now(ZoneId.of("GMT")).minusDays(2); byte[] responseBytes = getRandomBytes(20); + when(response.getHeader(ChecksumVerifyingCompletionHandler.CHECKSUM_RESPONSE_HEADER)) + .thenReturn(md5Checksum(responseBytes)); HttpResponseBodyPart responsePart = mock(HttpResponseBodyPart.class); when(responsePart.getBodyByteBuffer()).thenReturn(ByteBuffer.wrap(responseBytes)); @@ -200,7 +230,7 @@ private void testSuccessHttpMode(boolean isSecure, boolean checkVersion) { // Fill in response AsyncCompletionHandler responseHandler = invocationOnMock.getArgument(1, AsyncCompletionHandler.class); - assertEquals(responseHandler.getClass(), BufferBasedCompletionHandler.class); + assertEquals(responseHandler.getClass(), ChecksumVerifyingCompletionHandler.class); responseHandler.onBodyPartReceived(responsePart); responseHandler.onStatusReceived(status); @@ -215,14 +245,11 @@ private void testSuccessHttpMode(boolean isSecure, boolean checkVersion) { azureAsyncReader = getReader("0", isSecure, client); } - try { ByteBuf buf = Unpooled.buffer(20); - azureAsyncReader.readFully(0, buf, 0, 20).get(); + int len = 20; + azureAsyncReader.readFully(0, buf, 0, len).join(); assertEquals(new String(buf.array()), new String(responseBytes)); - verify(azureAsyncReader).read(0, buf, 0, 20, 0); - } catch (Exception e) { - fail(e.getMessage()); - } + verify(azureAsyncReader).read(eq(0L), eq(20L), any(ChecksumVerifyingCompletionHandler.class), eq(0)); } AzureAsyncReader getReader(String version, boolean isSecure, AsyncHttpClient client) { @@ -237,23 +264,19 @@ AzureAsyncReader getReader(String version, boolean isSecure, AsyncHttpClient cli })); return spy(new AzureAsyncReader(AZURE_ENDPOINT, "account", new Path("container/directory/file_00.parquet"), - getMockAuthTokenProvider(), version, isSecure, client, asyncReadWithRetry + getMockAuthTokenProvider(), version, isSecure, client, true, asyncReadWithRetry )); } @Test public void testAsyncReaderWithRandomCharacterInPath() { AsyncHttpClient client = mock(AsyncHttpClient.class); - try { - LocalDateTime versionDate = LocalDateTime.now(ZoneId.of("GMT")).minusDays(2); - AzureAsyncReader azureAsyncReader = new AzureAsyncReader(AZURE_ENDPOINT, - "account", new Path("/testdir/$#%&New Folder to test abc 123/0_0_0.parquet"), - getMockAuthTokenProvider(), String.valueOf(versionDate.atZone(ZoneId.of("GMT")).toInstant().toEpochMilli()), - false, client - ); - } catch (Exception e) { - fail(e.getMessage()); - } + LocalDateTime versionDate = LocalDateTime.now(ZoneId.of("GMT")).minusDays(2); + AzureAsyncReader azureAsyncReader = new AzureAsyncReader(AZURE_ENDPOINT, + "account", new Path("/testdir/$#%&New Folder to test abc 123/0_0_0.parquet"), + getMockAuthTokenProvider(), String.valueOf(versionDate.atZone(ZoneId.of("GMT")).toInstant().toEpochMilli()), + false, client, true + ); } @Test @@ -265,7 +288,7 @@ public void testAsyncHttpClientClosedError() { AzureAsyncReader azureAsyncReader = spy(new AzureAsyncReader(AZURE_ENDPOINT, "account", new Path("container/directory/file_00.parquet"), getMockAuthTokenProvider(), String.valueOf(versionDate.atZone(ZoneId.of("GMT")).toInstant().toEpochMilli()), - false, client + false, client, true )); try { @@ -276,7 +299,6 @@ public void testAsyncHttpClientClosedError() { } } - private AzureAsyncReader prepareAsyncReader(final String responseBody, final int responseCode, boolean checkVersion) { // Prepare response AsyncHttpClient client = mock(AsyncHttpClient.class); @@ -288,11 +310,10 @@ private AzureAsyncReader prepareAsyncReader(final String responseBody, final int CompletableFuture future = new CompletableFuture<>(); //CompletableFuture.completedFuture(response); ListenableFuture resFuture = mock(ListenableFuture.class); when(resFuture.toCompletableFuture()).thenReturn(future); - LocalDateTime versionDate = LocalDateTime.now(ZoneId.of("GMT")).minusDays(2); when(client.executeRequest(any(Request.class), any(AsyncCompletionHandler.class))).then(invocationOnMock -> { AsyncCompletionHandler responseHandler = invocationOnMock.getArgument(1, AsyncCompletionHandler.class); - assertEquals(responseHandler.getClass(), BufferBasedCompletionHandler.class); + assertEquals(responseHandler.getClass(), ChecksumVerifyingCompletionHandler.class); responseHandler.onStatusReceived(status); try { responseHandler.onCompleted(response); @@ -304,16 +325,14 @@ private AzureAsyncReader prepareAsyncReader(final String responseBody, final int AzureAsyncReader azureAsyncReader; if (checkVersion) { + LocalDateTime versionDate = LocalDateTime.now(ZoneId.of("GMT")).minusDays(2); azureAsyncReader = getReader(String.valueOf(versionDate.atZone(ZoneId.of("GMT")).toInstant().toEpochMilli()), true, client); } else { azureAsyncReader = getReader("0", true, client); } MetricsLogger metricsLogger = mock(MetricsLogger.class); when(azureAsyncReader.getMetricLogger()).thenReturn(metricsLogger); - Function requestFunction = unused -> { - Request request = mock(Request.class); - return request; - }; + Function requestFunction = unused -> mock(Request.class); when(azureAsyncReader.getRequestBuilderFunction(0, 20, metricsLogger)).thenReturn(requestFunction); return azureAsyncReader; @@ -331,4 +350,8 @@ private AzureAuthTokenProvider getMockAuthTokenProvider() { when(authTokenProvider.getAuthzHeaderValue(any(Request.class))).thenReturn("Bearer testtoken"); return authTokenProvider; } + + private String md5Checksum(byte[] bytes) { + return Base64.getEncoder().encodeToString(DigestUtils.md5(bytes)); + } } diff --git a/plugins/azure/src/test/java/com/dremio/plugins/azure/TestChecksumVerifyingCompletionHandler.java b/plugins/azure/src/test/java/com/dremio/plugins/azure/TestChecksumVerifyingCompletionHandler.java new file mode 100644 index 0000000000..82e1370664 --- /dev/null +++ b/plugins/azure/src/test/java/com/dremio/plugins/azure/TestChecksumVerifyingCompletionHandler.java @@ -0,0 +1,138 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dremio.plugins.azure; + +import static com.dremio.plugins.azure.ChecksumVerifyingCompletionHandler.CHECKSUM_RESPONSE_HEADER; +import static org.junit.Assert.assertEquals; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.Base64; + +import org.apache.commons.codec.digest.DigestUtils; +import org.asynchttpclient.HttpResponseBodyPart; +import org.asynchttpclient.HttpResponseStatus; +import org.asynchttpclient.Response; +import org.junit.jupiter.api.Test; + +import io.netty.buffer.Unpooled; + +class TestChecksumVerifyingCompletionHandler { + + @Test + void failedRequestIgnoresMD5() throws Exception { + ChecksumVerifyingCompletionHandler handler = + new ChecksumVerifyingCompletionHandler(Unpooled.buffer(32), 0); + HttpResponseStatus status = mock(HttpResponseStatus.class); + String responseBody = "some error message"; + when(status.getStatusCode()).thenReturn(500); + handler.onStatusReceived(status); + Response response = mock(Response.class); + when(response.getResponseBody()).thenReturn(responseBody); + when(response.getHeader(CHECKSUM_RESPONSE_HEADER)).thenReturn("0"); + try { + handler.onCompleted(response); + } catch (RuntimeException re) { + assertEquals(re.getMessage(), responseBody); + } + } + + @Test + void mismatchedMD5HeaderFails() throws Exception { + HttpResponseStatus status = mock(HttpResponseStatus.class); + when(status.getStatusCode()).thenReturn(206); + + String responseBody = "totally valid parquet"; + HttpResponseBodyPart responsePart = mock(HttpResponseBodyPart.class); + when(responsePart.getBodyByteBuffer()).thenReturn(ByteBuffer.wrap(responseBody.getBytes())); + + Response response = mock(Response.class); + when(response.getResponseBody()).thenReturn(responseBody); + when(response.getHeader(CHECKSUM_RESPONSE_HEADER)).thenReturn("invalid"); + + ChecksumVerifyingCompletionHandler handler = + new ChecksumVerifyingCompletionHandler(Unpooled.buffer(32), 0); + handler.onStatusReceived(status); + + try { + handler.onCompleted(response); + } catch (IOException ioE) { + assertEquals(ioE.getMessage(), + "mismatched MD5 checksum: got 1B2M2Y8AsgTpgAmY7PhCfg==, expected invalid"); + } + } + + @Test + void missingMD5HeaderFails() throws Exception { + HttpResponseStatus status = mock(HttpResponseStatus.class); + when(status.getStatusCode()).thenReturn(206); + + String responseBody = "totally valid parquet"; + HttpResponseBodyPart responsePart = mock(HttpResponseBodyPart.class); + when(responsePart.getBodyByteBuffer()).thenReturn(ByteBuffer.wrap(responseBody.getBytes())); + + Response response = mock(Response.class); + when(response.getResponseBody()).thenReturn(responseBody); + + ChecksumVerifyingCompletionHandler handler = + new ChecksumVerifyingCompletionHandler(Unpooled.buffer(32), 0); + handler.onStatusReceived(status); + + try { + handler.onCompleted(response); + } catch (IOException ioE) { + assertEquals(ioE.getMessage(), + "MD5 checksum requested, but response header missing"); + } + } + + @Test + void ignorePreviouslyReceivedBodyPartsOnReset() throws Exception { + HttpResponseStatus initialStatus = mock(HttpResponseStatus.class); + when(initialStatus.getStatusCode()).thenReturn(500); + + HttpResponseStatus secondTryStatus = mock(HttpResponseStatus.class); + when(secondTryStatus.getStatusCode()).thenReturn(206); + + String initialBodyBytes = "some bytes we don't want to see "; + String secondTryBodyBytes = "some bytes we do want to see "; + + HttpResponseBodyPart initialResponsePart = mock(HttpResponseBodyPart.class); + when(initialResponsePart.getBodyByteBuffer()).thenReturn( + ByteBuffer.wrap(initialBodyBytes.getBytes())); + + HttpResponseBodyPart secondTryResponsePart = mock(HttpResponseBodyPart.class); + when(secondTryResponsePart.getBodyByteBuffer()).thenReturn( + ByteBuffer.wrap(secondTryBodyBytes.getBytes())); + + Response response = mock(Response.class); + when(response.getHeader(CHECKSUM_RESPONSE_HEADER)).thenReturn( + Base64.getEncoder().encodeToString(DigestUtils.md5(secondTryBodyBytes.getBytes()))); + + ChecksumVerifyingCompletionHandler handler = + new ChecksumVerifyingCompletionHandler(Unpooled.buffer(32), 0); + handler.onBodyPartReceived(initialResponsePart); + handler.onStatusReceived(initialStatus); + handler.reset(); + handler.onStatusReceived(secondTryStatus); + handler.onBodyPartReceived(secondTryResponsePart); + handler.onCompleted(response); + } + +} diff --git a/plugins/azure/src/test/java/com/dremio/plugins/azure/TestProtocolPrefixIfNotExists.java b/plugins/azure/src/test/java/com/dremio/plugins/azure/TestProtocolPrefixIfNotExists.java new file mode 100644 index 0000000000..d677e5305b --- /dev/null +++ b/plugins/azure/src/test/java/com/dremio/plugins/azure/TestProtocolPrefixIfNotExists.java @@ -0,0 +1,92 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.plugins.azure; + +import org.junit.Assert; +import org.junit.Test; + +public class TestProtocolPrefixIfNotExists { + + @Test + public void testProtocolPrefix1() { + String input = "dremio+azure-key-vault+https:///secrets/"; + final String updatedStringSA = testProtocolPrefixForSharedAccessKey(input); + Assert.assertEquals(updatedStringSA, input); + final String updatedStringAAD = testProtocolPrefixForActiveDirectory(input); + Assert.assertEquals(updatedStringAAD, input); + + } + + @Test + public void testProtocolPrefix2() { + String input = "https:///secrets/"; + final String updatedStringSA = testProtocolPrefixForSharedAccessKey(input); + Assert.assertEquals(updatedStringSA, input); + final String updatedStringAAD = testProtocolPrefixForActiveDirectory(input); + Assert.assertEquals(updatedStringAAD, input); + + } + + @Test + public void testProtocolPrefix3() { + String input = "HtTpS:///secrets/"; + String updatedStringSA = testProtocolPrefixForSharedAccessKey(input); + Assert.assertEquals(updatedStringSA, input); + final String updatedStringAAD = testProtocolPrefixForActiveDirectory(input); + Assert.assertEquals(updatedStringAAD, input); + + } + + @Test + public void testProtocolPrefix4() { + String input = "HtTpS/secrets/"; + String updatedStringSA = testProtocolPrefixForSharedAccessKey(input); + Assert.assertEquals(updatedStringSA, "https://HtTpS/secrets/"); + final String updatedStringAAD = testProtocolPrefixForActiveDirectory(input); + Assert.assertEquals(updatedStringAAD, "https://HtTpS/secrets/"); + } + + @Test + public void testProtocolPrefix5() { + String input = "/secrets/"; + final String updatedStringSA = testProtocolPrefixForSharedAccessKey(input); + Assert.assertEquals(updatedStringSA, "https:///secrets/"); + final String updatedStringAAD = testProtocolPrefixForActiveDirectory(input); + Assert.assertEquals(updatedStringAAD, "https:///secrets/"); + } + + /** + * Tests the prependProtocolIfNotExist method injected inside the getter method for Shared Access Key Vault URI + * @return the AzureStorageConf API Field 'accessKeyUri' with the prepended protocol (If not already present) + * Parameter - vaultUri: Vault URI raw user input + */ + private String testProtocolPrefixForSharedAccessKey(String vaultUri) { + AzureStorageConf config = new AzureStorageConf(); + config.accessKeyUri = vaultUri; + return config.getAccessKeyUri(); + } + + /** + * Tests the prependProtocolIfNotExist method injected inside the getter method for Azure Active Directory Vault URI + * @return the AzureStorageConf API Field 'clientSecretUri' with the prepended protocol (If not already present) + * parameter - vaultUri: Vault URI raw user input + */ + private String testProtocolPrefixForActiveDirectory(String vaultUri) { + AzureStorageConf config = new AzureStorageConf(); + config.clientSecretUri = vaultUri; + return config.getClientSecretUri(); + } +} diff --git a/plugins/common/pom.xml b/plugins/common/pom.xml index b8bb1bfcd0..9a9fd24954 100644 --- a/plugins/common/pom.xml +++ b/plugins/common/pom.xml @@ -25,7 +25,7 @@ com.dremio.plugins dremio-plugin-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-plugin-common @@ -44,11 +44,27 @@ com.dremio dremio-common + + com.dremio.services + dremio-services-options + org.apache.hadoop hadoop-common ${hadoop.version} - compile + + + org.pf4j + pf4j + + + + + com.dremio + dremio-common + tests + test-jar + test diff --git a/plugins/common/src/main/java/com/dremio/http/BufferBasedCompletionHandler.java b/plugins/common/src/main/java/com/dremio/http/BufferBasedCompletionHandler.java index f617c99084..14145c2d27 100644 --- a/plugins/common/src/main/java/com/dremio/http/BufferBasedCompletionHandler.java +++ b/plugins/common/src/main/java/com/dremio/http/BufferBasedCompletionHandler.java @@ -16,6 +16,8 @@ package com.dremio.http; +import java.nio.ByteBuffer; + import org.asynchttpclient.AsyncCompletionHandlerBase; import org.asynchttpclient.AsyncHandler; import org.asynchttpclient.HttpResponseBodyPart; @@ -30,18 +32,33 @@ * Response processor for async http */ public class BufferBasedCompletionHandler extends AsyncCompletionHandlerBase { - private final Logger logger = LoggerFactory.getLogger(BufferBasedCompletionHandler.class); + private static final Logger logger = LoggerFactory.getLogger(BufferBasedCompletionHandler.class); private final ByteBuf outputBuffer; + private final int dstOffset; private boolean requestFailed = false; - public BufferBasedCompletionHandler(ByteBuf outputBuffer) { + public BufferBasedCompletionHandler(ByteBuf outputBuffer, int dstOffset) { this.outputBuffer = outputBuffer; + this.outputBuffer.writerIndex(dstOffset); + this.dstOffset = dstOffset; } public boolean isRequestFailed() { return requestFailed; } + protected ByteBuffer getBodyBytes() { + return outputBuffer.nioBuffer(dstOffset, outputBuffer.writerIndex() - dstOffset); + } + + /** + * Reset the buffer to its original state to make it usable for a retry. + */ + public void reset() { + this.requestFailed = false; + this.outputBuffer.writerIndex(this.dstOffset); + } + @Override public State onStatusReceived(HttpResponseStatus status) throws Exception { // The REST service provides error information as part of the response diff --git a/plugins/common/src/main/java/com/dremio/plugins/async/utils/AsyncReadWithRetry.java b/plugins/common/src/main/java/com/dremio/plugins/async/utils/AsyncReadWithRetry.java index 8cc11a4520..9a7ac313bf 100644 --- a/plugins/common/src/main/java/com/dremio/plugins/async/utils/AsyncReadWithRetry.java +++ b/plugins/common/src/main/java/com/dremio/plugins/async/utils/AsyncReadWithRetry.java @@ -28,8 +28,6 @@ import com.dremio.http.BufferBasedCompletionHandler; import com.dremio.io.ExponentialBackoff; -import io.netty.buffer.ByteBuf; - /** * Utility class that does read with retry */ @@ -54,8 +52,7 @@ public CompletableFuture read(AsyncHttpClient asyncHttpClient, MetricsLogger metrics, Path path, String threadName, - ByteBuf dst, - int dstOffset, + BufferBasedCompletionHandler responseHandler, int retryAttemptNum, ExponentialBackoff backoff) { @@ -68,8 +65,7 @@ public CompletableFuture read(AsyncHttpClient asyncHttpClient, Request req = requestBuilderFunction.apply(null); metrics.startTimer("request"); - dst.writerIndex(dstOffset); - return asyncHttpClient.executeRequest(req, new BufferBasedCompletionHandler(dst)) + return asyncHttpClient.executeRequest(req, responseHandler) .toCompletableFuture() .whenComplete((response, throwable) -> { metrics.endTimer("request"); @@ -108,7 +104,8 @@ public CompletableFuture read(AsyncHttpClient asyncHttpClient, metrics.endTimer("total"); metrics.logAllMetrics(); - return read(asyncHttpClient, requestBuilderFunction, metrics, path, threadName, dst, dstOffset, retryAttemptNum + 1, backoff); + responseHandler.reset(); + return read(asyncHttpClient, requestBuilderFunction, metrics, path, threadName, responseHandler, retryAttemptNum + 1, backoff); }).thenCompose(Function.identity()); } } diff --git a/plugins/hive-common/src/main/java/com/dremio/exec/store/hive/pf4j/WhitelistClassLoader.java b/plugins/common/src/main/java/com/dremio/plugins/pf4j/AllowlistClassLoader.java similarity index 73% rename from plugins/hive-common/src/main/java/com/dremio/exec/store/hive/pf4j/WhitelistClassLoader.java rename to plugins/common/src/main/java/com/dremio/plugins/pf4j/AllowlistClassLoader.java index bb33d17f97..3b1163b541 100644 --- a/plugins/hive-common/src/main/java/com/dremio/exec/store/hive/pf4j/WhitelistClassLoader.java +++ b/plugins/common/src/main/java/com/dremio/plugins/pf4j/AllowlistClassLoader.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.dremio.exec.store.hive.pf4j; +package com.dremio.plugins.pf4j; import java.io.IOException; import java.io.InputStream; @@ -25,28 +25,28 @@ import com.google.common.collect.ImmutableList; /** - * A classloader delegating to its parent if and only if a class or a resouce - * belong to the whitelist + * A classloader delegating to its parent if and only if a class or a resource + * belongs to the allow list */ -final class WhitelistClassLoader extends ClassLoader { +final class AllowlistClassLoader extends ClassLoader { - private final ImmutableList whitelist; + private final ImmutableList allowlist; - private WhitelistClassLoader(ClassLoader parent, List whitelist) { + private AllowlistClassLoader(ClassLoader parent, List allowlist) { super(parent); - this.whitelist = ImmutableList.copyOf(whitelist); + this.allowlist = ImmutableList.copyOf(allowlist); } /** - * Wrap an existing classloader to only allow resources and classes present in the whitelist + * Wrap an existing classloader to only allow resources and classes present in the allowlist * to be found * * @param parent the parent classloader - * @param whitelist the white list of class/resource names prefixes. Prefixes should use '/' character as the delimiter + * @param allowlist the allowed list of class/resource names prefixes. Prefixes should use '/' character as the delimiter * @return */ - public static ClassLoader of(ClassLoader parent, List whitelist) { - return new WhitelistClassLoader(parent, whitelist); + public static ClassLoader of(ClassLoader parent, List allowlist) { + return new AllowlistClassLoader(parent, allowlist); } private static boolean matchPackage(List packages, String resourceName) { @@ -55,7 +55,7 @@ private static boolean matchPackage(List packages, String resourceName) @Override public URL getResource(String name) { - if (!matchPackage(whitelist, name)) { + if (!matchPackage(allowlist, name)) { return null; } return super.getResource(name); @@ -63,7 +63,7 @@ public URL getResource(String name) { @Override public Enumeration getResources(String name) throws IOException { - if (!matchPackage(whitelist, name)) { + if (!matchPackage(allowlist, name)) { return Collections.emptyEnumeration(); } return super.getResources(name); @@ -71,7 +71,7 @@ public Enumeration getResources(String name) throws IOException { @Override public InputStream getResourceAsStream(String name) { - if (!matchPackage(whitelist, name)) { + if (!matchPackage(allowlist, name)) { return null; } return super.getResourceAsStream(name); @@ -80,11 +80,9 @@ public InputStream getResourceAsStream(String name) { @Override public Class loadClass(String name) throws ClassNotFoundException { final String resourceName = name.replace('.', '/'); - if (!matchPackage(whitelist, resourceName)) { + if (!matchPackage(allowlist, resourceName)) { throw new ClassNotFoundException(name); } return super.loadClass(name); } - - } diff --git a/plugins/hive-common/src/main/java/com/dremio/exec/store/hive/pf4j/NativeLibJarPluginLoader.java b/plugins/common/src/main/java/com/dremio/plugins/pf4j/NativeLibJarPluginLoader.java similarity index 86% rename from plugins/hive-common/src/main/java/com/dremio/exec/store/hive/pf4j/NativeLibJarPluginLoader.java rename to plugins/common/src/main/java/com/dremio/plugins/pf4j/NativeLibJarPluginLoader.java index c29f684d0a..d6cb89febf 100644 --- a/plugins/hive-common/src/main/java/com/dremio/exec/store/hive/pf4j/NativeLibJarPluginLoader.java +++ b/plugins/common/src/main/java/com/dremio/plugins/pf4j/NativeLibJarPluginLoader.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.dremio.exec.store.hive.pf4j; +package com.dremio.plugins.pf4j; import java.io.IOException; import java.net.URL; @@ -29,6 +29,8 @@ import org.pf4j.PluginManager; import org.slf4j.Logger; +import com.dremio.options.OptionResolver; + /** * Customized plugin loader to create a classloader that extracts native libraries before loading them from a plugin * bundle. @@ -36,13 +38,17 @@ public class NativeLibJarPluginLoader extends JarPluginLoader { private static final Logger logger = org.slf4j.LoggerFactory.getLogger(NativeLibJarPluginLoader.class); - public NativeLibJarPluginLoader(PluginManager pluginManager) { + private final OptionResolver optionResolver; + + public NativeLibJarPluginLoader(PluginManager pluginManager, OptionResolver optionResolver) { super(pluginManager); + this.optionResolver = optionResolver; } @Override public ClassLoader loadPlugin(Path pluginPath, PluginDescriptor pluginDescriptor) { - final PluginClassLoader pluginClassLoader = new NativeLibPluginClassLoader(pluginPath, this.pluginManager, pluginDescriptor, this.getClass().getClassLoader()); + final PluginClassLoader pluginClassLoader = new NativeLibPluginClassLoader(pluginPath, this.pluginManager, + pluginDescriptor, this.getClass().getClassLoader(), optionResolver); pluginClassLoader.addFile(pluginPath.toFile()); // Add the subdirectory for any customer added dependencies. diff --git a/plugins/hive-common/src/main/java/com/dremio/exec/store/hive/pf4j/NativeLibPluginClassLoader.java b/plugins/common/src/main/java/com/dremio/plugins/pf4j/NativeLibPluginClassLoader.java similarity index 73% rename from plugins/hive-common/src/main/java/com/dremio/exec/store/hive/pf4j/NativeLibPluginClassLoader.java rename to plugins/common/src/main/java/com/dremio/plugins/pf4j/NativeLibPluginClassLoader.java index aba7b54dbf..23d556a3b9 100644 --- a/plugins/hive-common/src/main/java/com/dremio/exec/store/hive/pf4j/NativeLibPluginClassLoader.java +++ b/plugins/common/src/main/java/com/dremio/plugins/pf4j/NativeLibPluginClassLoader.java @@ -13,14 +13,17 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.dremio.exec.store.hive.pf4j; +package com.dremio.plugins.pf4j; import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; +import java.util.Arrays; import java.util.Enumeration; +import java.util.List; +import java.util.stream.Collectors; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; @@ -29,6 +32,7 @@ import org.pf4j.PluginManager; import org.pf4j.util.FileUtils; +import com.dremio.options.OptionResolver; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; @@ -40,7 +44,7 @@ public class NativeLibPluginClassLoader extends PluginClassLoader { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(NativeLibPluginClassLoader.class); - private static ImmutableList PACKAGE_WHITELIST = ImmutableList. builder() + private static final List BASE_PACKAGE_ALLOWLIST = ImmutableList. builder() .add("java/") .add("javax/") // Too broad of a package but until we have a proper API/SDK @@ -63,14 +67,27 @@ public class NativeLibPluginClassLoader extends PluginClassLoader { .build(); private final Path pluginPath; + private final List sharedPrefixes; private volatile Path tempDirectory; - public NativeLibPluginClassLoader(Path pluginPath, PluginManager pluginManager, - PluginDescriptor pluginDescriptor, ClassLoader parent) { - super(pluginManager, pluginDescriptor, WhitelistClassLoader.of(parent, PACKAGE_WHITELIST), false); + public NativeLibPluginClassLoader(Path pluginPath, PluginManager pluginManager, PluginDescriptor pluginDescriptor, + ClassLoader parent, OptionResolver optionResolver) { + super(pluginManager, pluginDescriptor, AllowlistClassLoader.of(parent, getPackageAllowlist(optionResolver)), false); this.pluginPath = pluginPath; + this.sharedPrefixes = getSharedPrefixes(optionResolver); } + @Override + public Class loadClass(String className) throws ClassNotFoundException { + synchronized (getClassLoadingLock(className)) { + // check if this class starts with one of the shared prefixes - if so use the parent classloader to load it + if (sharedPrefixes.stream().anyMatch(className::startsWith)) { + return getParent().loadClass(className); + } + + return super.loadClass(className); + } + } @Override protected String findLibrary(String libname) { @@ -142,4 +159,22 @@ static void validateZipDirectory(Path tempDirectory, final ZipEntry entry) throw throw new IOException(String.format("JAR entry %s is outside of the target directory %s. ", entry.getName(), tempDirectory)); } } + + private static List getSharedPrefixes(OptionResolver optionResolver) { + String sharedPrefixes = optionResolver != null ? + optionResolver.getOption(Pf4jPluginOptions.CLASSLOADER_SHARED_PREFIXES) : + Pf4jPluginOptions.CLASSLOADER_SHARED_PREFIXES.getDefault().getStringVal(); + // ensure each prefix ends with a "." so that only full package names are matched + return Arrays.stream(sharedPrefixes.split(",")) + .map(p -> p + ".") + .collect(Collectors.toList()); + } + + private static List getPackageAllowlist(OptionResolver optionResolver) { + return ImmutableList.builder() + .addAll(BASE_PACKAGE_ALLOWLIST) + .addAll(getSharedPrefixes(optionResolver).stream() + .map(p -> p.replace(".", "/")).iterator()) + .build(); + } } diff --git a/plugins/hive-common/src/main/java/com/dremio/exec/store/hive/pf4j/NativeLibPluginManager.java b/plugins/common/src/main/java/com/dremio/plugins/pf4j/NativeLibPluginManager.java similarity index 90% rename from plugins/hive-common/src/main/java/com/dremio/exec/store/hive/pf4j/NativeLibPluginManager.java rename to plugins/common/src/main/java/com/dremio/plugins/pf4j/NativeLibPluginManager.java index 0c9f2c512a..9ad2a64f41 100644 --- a/plugins/hive-common/src/main/java/com/dremio/exec/store/hive/pf4j/NativeLibPluginManager.java +++ b/plugins/common/src/main/java/com/dremio/plugins/pf4j/NativeLibPluginManager.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.dremio.exec.store.hive.pf4j; +package com.dremio.plugins.pf4j; import java.nio.file.Path; import java.nio.file.Paths; @@ -30,6 +30,7 @@ import com.dremio.common.AutoCloseables; import com.dremio.config.DremioConfig; +import com.dremio.options.OptionResolver; /** * Customized plugin manager to create a classloader that extracts native libraries before loading them from a plugin @@ -41,9 +42,15 @@ public class NativeLibPluginManager extends DefaultPluginManager { private static final String PLUGINS_PATH_DEV_MODE = "../plugins"; + private final OptionResolver optionResolver; + + public NativeLibPluginManager(OptionResolver optionResolver) { + this.optionResolver = optionResolver; + } + @Override protected PluginLoader createPluginLoader() { - return new NativeLibJarPluginLoader(this); + return new NativeLibJarPluginLoader(this, optionResolver); } @Override diff --git a/plugins/common/src/main/java/com/dremio/plugins/pf4j/Pf4jPluginOptions.java b/plugins/common/src/main/java/com/dremio/plugins/pf4j/Pf4jPluginOptions.java new file mode 100644 index 0000000000..f822059b77 --- /dev/null +++ b/plugins/common/src/main/java/com/dremio/plugins/pf4j/Pf4jPluginOptions.java @@ -0,0 +1,30 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.plugins.pf4j; + +import com.dremio.options.Options; +import com.dremio.options.TypeValidators; + +@Options +public interface Pf4jPluginOptions { + /** + * A comma-separated list of class name prefixes that will be loaded using the application classloader instead of + * the plugin classloader. + */ + String CLASSLOADER_SHARED_PREFIXES_KEY = "plugin.classloader.shared_prefixes"; + TypeValidators.StringValidator CLASSLOADER_SHARED_PREFIXES = + new TypeValidators.StringValidator(CLASSLOADER_SHARED_PREFIXES_KEY, "javax.xml,org.w3c,org.xml"); +} diff --git a/plugins/hive-common/src/test/java/com/dremio/exec/store/hive/pf4j/TestNativeLibPluginClassLoader.java b/plugins/common/src/test/java/com/dremio/plugins/pf4j/TestNativeLibPluginClassLoader.java similarity index 99% rename from plugins/hive-common/src/test/java/com/dremio/exec/store/hive/pf4j/TestNativeLibPluginClassLoader.java rename to plugins/common/src/test/java/com/dremio/plugins/pf4j/TestNativeLibPluginClassLoader.java index a3f37c13bc..d5870b6eda 100644 --- a/plugins/hive-common/src/test/java/com/dremio/exec/store/hive/pf4j/TestNativeLibPluginClassLoader.java +++ b/plugins/common/src/test/java/com/dremio/plugins/pf4j/TestNativeLibPluginClassLoader.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.dremio.exec.store.hive.pf4j; +package com.dremio.plugins.pf4j; import java.io.IOException; import java.nio.file.Path; diff --git a/plugins/hive-common/src/test/java/com/dremio/exec/store/hive/pf4j/TestNativeLibPluginManager.java b/plugins/common/src/test/java/com/dremio/plugins/pf4j/TestNativeLibPluginManager.java similarity index 83% rename from plugins/hive-common/src/test/java/com/dremio/exec/store/hive/pf4j/TestNativeLibPluginManager.java rename to plugins/common/src/test/java/com/dremio/plugins/pf4j/TestNativeLibPluginManager.java index b1916b2adc..eeb36e2d9a 100644 --- a/plugins/hive-common/src/test/java/com/dremio/exec/store/hive/pf4j/TestNativeLibPluginManager.java +++ b/plugins/common/src/test/java/com/dremio/plugins/pf4j/TestNativeLibPluginManager.java @@ -13,7 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.dremio.exec.store.hive.pf4j; +package com.dremio.plugins.pf4j; + +import static org.mockito.Mockito.mock; import java.nio.file.Path; import java.nio.file.Paths; @@ -23,6 +25,7 @@ import org.junit.Test; import com.dremio.config.DremioConfig; +import com.dremio.options.OptionResolver; import com.dremio.test.TemporarySystemProperties; /** @@ -30,8 +33,6 @@ */ public class TestNativeLibPluginManager { - private final NativeLibPluginManager nativeLibPluginManager = new NativeLibPluginManager(); - @Rule public TemporarySystemProperties properties = new TemporarySystemProperties(); @@ -41,6 +42,8 @@ public void testShouldReturnPluginRoot() { properties.set(DremioConfig.PLUGINS_ROOT_PATH_PROPERTY, "/tmp/plugins"); Path expectedPath = Paths.get("/tmp/plugins/connectors"); + OptionResolver optionResolver = mock(OptionResolver.class); + NativeLibPluginManager nativeLibPluginManager = new NativeLibPluginManager(optionResolver); // when Path actualPath = nativeLibPluginManager.createPluginsRoot().get(0); diff --git a/plugins/dataplane/pom.xml b/plugins/dataplane/pom.xml new file mode 100644 index 0000000000..e3606e9efc --- /dev/null +++ b/plugins/dataplane/pom.xml @@ -0,0 +1,87 @@ + + + + + dremio-plugin-parent + com.dremio.plugins + 24.1.0-202306130653310132-d30779f6 + + 4.0.0 + + dremio-dataplane-plugin + + Plugins - Dataplane + + + + com.dremio.sabot + dremio-sabot-kernel + + + com.dremio.plugins + dremio-s3-plugin + ${project.version} + + + io.opentelemetry.instrumentation + opentelemetry-instrumentation-annotations + + + org.projectnessie.nessie + nessie-model + + + + + io.findify + s3mock_2.12 + test + + + org.projectnessie.nessie + nessie-compatibility-common + test + + + com.dremio.services + dremio-credential-provider + ${project.version} + test + + + com.dremio + dremio-common + ${project.version} + tests + test-jar + test + + + + + + + ${project.basedir}/src/test/resources + true + + + + diff --git a/plugins/dataplane/src/main/java/com/dremio/plugins/dataplane/CredentialsProviderConstants.java b/plugins/dataplane/src/main/java/com/dremio/plugins/dataplane/CredentialsProviderConstants.java new file mode 100644 index 0000000000..b2b5f1b146 --- /dev/null +++ b/plugins/dataplane/src/main/java/com/dremio/plugins/dataplane/CredentialsProviderConstants.java @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.plugins.dataplane; + +public final class CredentialsProviderConstants { + + public static final String ACCESS_KEY_PROVIDER = "org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider"; + public static final String NONE_PROVIDER = "org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider"; + public static final String ASSUME_ROLE_PROVIDER = "com.dremio.plugins.s3.store.STSCredentialProviderV1"; + public static final String EC2_METADATA_PROVIDER = "com.amazonaws.auth.InstanceProfileCredentialsProvider"; + public static final String AWS_PROFILE_PROVIDER = "com.dremio.plugins.s3.store.AWSProfileCredentialsProviderV1"; + + private CredentialsProviderConstants() {} + +} diff --git a/dac/ui-lib/themes/dremio-light/components/skeleton.scss b/plugins/dataplane/src/main/java/com/dremio/plugins/dataplane/NessiePluginConfigConstants.java similarity index 74% rename from dac/ui-lib/themes/dremio-light/components/skeleton.scss rename to plugins/dataplane/src/main/java/com/dremio/plugins/dataplane/NessiePluginConfigConstants.java index 8cc77b1897..799967adb3 100644 --- a/dac/ui-lib/themes/dremio-light/components/skeleton.scss +++ b/plugins/dataplane/src/main/java/com/dremio/plugins/dataplane/NessiePluginConfigConstants.java @@ -13,10 +13,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +package com.dremio.plugins.dataplane; -@use "../../base/components/skeleton"; +public final class NessiePluginConfigConstants { + public static final String MINIMUM_NESSIE_SPECIFICATION_VERSION = "2.0.0"; + private NessiePluginConfigConstants() {} -.dremio-skeleton { - --dremio--skeleton--background: var(--dremio--color--neutral--150); - --dremio--skeleton--border-radius: var(--dremio--radius--1); } diff --git a/plugins/dataplane/src/main/java/com/dremio/plugins/dataplane/store/AbstractDataplanePluginConfig.java b/plugins/dataplane/src/main/java/com/dremio/plugins/dataplane/store/AbstractDataplanePluginConfig.java new file mode 100644 index 0000000000..484d028592 --- /dev/null +++ b/plugins/dataplane/src/main/java/com/dremio/plugins/dataplane/store/AbstractDataplanePluginConfig.java @@ -0,0 +1,260 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.plugins.dataplane.store; + +import static com.dremio.plugins.dataplane.CredentialsProviderConstants.ACCESS_KEY_PROVIDER; +import static com.dremio.plugins.dataplane.CredentialsProviderConstants.ASSUME_ROLE_PROVIDER; +import static com.dremio.plugins.dataplane.CredentialsProviderConstants.NONE_PROVIDER; + +import java.net.URI; +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import javax.inject.Provider; +import javax.validation.constraints.Max; +import javax.validation.constraints.Min; + +import org.apache.hadoop.fs.s3a.Constants; +import org.projectnessie.client.api.NessieApiV1; +import org.projectnessie.client.auth.BearerAuthenticationProvider; +import org.projectnessie.client.http.HttpClientBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.StoragePluginId; +import com.dremio.exec.catalog.conf.DefaultCtasFormatSelection; +import com.dremio.exec.catalog.conf.DisplayMetadata; +import com.dremio.exec.catalog.conf.DoNotDisplay; +import com.dremio.exec.catalog.conf.NotMetadataImpacting; +import com.dremio.exec.catalog.conf.Property; +import com.dremio.exec.catalog.conf.Secret; +import com.dremio.exec.server.SabotContext; +import com.dremio.exec.store.VersionedStoragePluginConfig; +import com.dremio.exec.store.dfs.CacheProperties; +import com.dremio.exec.store.dfs.FileSystemConf; +import com.dremio.exec.store.dfs.SchemaMutability; +import com.dremio.io.file.Path; +import com.dremio.options.OptionManager; +import com.dremio.plugins.NessieClient; +import com.dremio.plugins.util.awsauth.AWSCredentialsConfigurator; +import com.dremio.service.namespace.SourceState; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Strings; + +import io.protostuff.Tag; + +public abstract class AbstractDataplanePluginConfig + extends FileSystemConf + implements VersionedStoragePluginConfig { + // @Tag(1) is used for nessieEndpoint only in Nessie + // @Tag(2) is used for nessieAccessToken only in Nessie + @Tag(3) + @DisplayMetadata(label = "AWS Access Key") + @NotMetadataImpacting + public String awsAccessKey = ""; + + @Tag(4) + @Secret + @DisplayMetadata(label = "AWS Access Secret") + @NotMetadataImpacting + public String awsAccessSecret = ""; + + @Tag(5) + @DisplayMetadata(label = "AWS Root Path") + @NotMetadataImpacting + public String awsRootPath = ""; + + @Tag(6) + @DisplayMetadata(label = "Connection Properties") + @NotMetadataImpacting + public List propertyList; + + @Tag(7) + @DisplayMetadata(label = "IAM Role to Assume") + public String assumedRoleARN; + + // @Tag(8) is used for credentialType in subclasses + + @Tag(9) + @NotMetadataImpacting + @DisplayMetadata(label = "Enable asynchronous access when possible") + public boolean asyncEnabled = true; + + @Tag(10) + @NotMetadataImpacting + @DisplayMetadata(label = "Enable local caching when possible") + public boolean isCachingEnabled = true; + + @Tag(11) + @NotMetadataImpacting + @Min(value = 1, message = "Max percent of total available cache space must be between 1 and 100") + @Max(value = 100, message = "Max percent of total available cache space must be between 1 and 100") + @DisplayMetadata(label = "Max percent of total available cache space to use when possible") + public int maxCacheSpacePct = 100; + + @Tag(12) + @NotMetadataImpacting + @DoNotDisplay + @DisplayMetadata(label = "Default CTAS Format") + public DefaultCtasFormatSelection defaultCtasFormat = DefaultCtasFormatSelection.ICEBERG; + // @Tag(13) is reserved + + // @Tag(14) is used for nessieAuthType only in Nessie + + // @Tag(15) is used for awsProfile only in Nessie + + // @Tag(16) is used for secure only in Nessie + @Override + public CacheProperties getCacheProperties() { + return new CacheProperties() { + @Override + public boolean isCachingEnabled(OptionManager optionManager) { + return isCachingEnabled; + } + + @Override + public int cacheMaxSpaceLimitPct() { + return maxCacheSpacePct; + } + }; + } + + private static final Logger logger = LoggerFactory.getLogger(AbstractDataplanePluginConfig.class); + + @Override + public abstract DataplanePlugin newPlugin(SabotContext context, String name, Provider pluginIdProvider); + + @Override + public boolean isAsyncEnabled() { + return asyncEnabled; + } + + @Override + public Path getPath() { + validateAWSRootPath(awsRootPath); + return Path.of(awsRootPath); + } + + @Override + public boolean isImpersonationEnabled() { + return false; + } + + @Override + public String getConnection() { + return CloudFileSystemScheme.S3_FILE_SYSTEM_SCHEME.getScheme() + ":///"; + } + + @Override + public boolean isPartitionInferenceEnabled() { + return false; + } + + @Override + public SchemaMutability getSchemaMutability() { + return SchemaMutability.USER_TABLE; + } + + @Override + public List getProperties() { + return propertyList != null ? propertyList : Collections.emptyList(); + } + + @Override + public String getDefaultCtasFormat() { + return defaultCtasFormat.getDefaultCtasFormat(); + } + + protected static String getAccessKeyProvider(List properties, + String accessKey, + String accessSecret) { + if (("".equals(accessKey)) || ("".equals(accessSecret))) { + throw UserException.validationError() + .message("Failure creating S3 connection. You must provide AWS Access Key and AWS Access Secret.") + .build(logger); + } + properties.add(new Property(Constants.ACCESS_KEY, accessKey)); + properties.add(new Property(Constants.SECRET_KEY, accessSecret)); + return ACCESS_KEY_PROVIDER; + } + + protected NessieApiV1 getNessieRestClient(String name, String nessieEndpoint, String nessieAccessToken) { + final HttpClientBuilder builder = HttpClientBuilder.builder() + .withUri(URI.create(nessieEndpoint)); + + if (!Strings.isNullOrEmpty(nessieAccessToken)) { + builder.withAuthentication(BearerAuthenticationProvider.create(nessieAccessToken)); + } + + try { + return builder.withTracing(true).build(NessieApiV1.class); + } catch (IllegalArgumentException e) { + throw UserException.resourceError().message("Unable to create source [%s], " + + "%s must be a valid http or https address", name, nessieEndpoint).build(); + } + } + + public boolean hasAssumedRoleARN() { + return !Strings.isNullOrEmpty(assumedRoleARN); + } + + public AWSCredentialsConfigurator wrapAssumedRoleToProvider(AWSCredentialsConfigurator configurator) + { + return properties -> { + String mainAWSCredProvider = configurator.configureCredentials(properties); + if (hasAssumedRoleARN() && !NONE_PROVIDER.equals(mainAWSCredProvider)) { + properties.add(new Property(Constants.ASSUMED_ROLE_ARN, assumedRoleARN)); + properties.add(new Property(Constants.ASSUMED_ROLE_CREDENTIALS_PROVIDER, mainAWSCredProvider)); + mainAWSCredProvider = ASSUME_ROLE_PROVIDER; + } + return mainAWSCredProvider; + }; + } + + public void validateAWSRootPath(String path) { + if (!isValidAWSRootPath(path)) { + throw UserException.validationError() + .message("Failure creating or updating Nessie source. Invalid AWS Root Path. You must provide a valid AWS S3 path." + + " Example: /bucket-name/path") + .build(logger); + } + } + + @VisibleForTesting + protected boolean isValidAWSRootPath(String rootLocation) { + //TODO: DX-64664 - Verify the regex used for AWS Path. + // Refer to https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html for bucket naming rules + Pattern pathPattern = Pattern.compile("^(/?[a-z0-9.-]+)(/[^/]+)*/?$"); + Matcher m = pathPattern.matcher(rootLocation); + return m.find(); + } + + public abstract void validateNessieAuthSettings(String name); + + public abstract void validatePluginEnabled(SabotContext context); + + public abstract void validateConnectionToNessieRepository(NessieClient nessieClient, String name, SabotContext context); + + public abstract void validateNessieSpecificationVersion(NessieClient nessieClient, String name); + + public abstract Optional encryptConnection(); + + public abstract SourceState getState(NessieClient nessieClient, String name, SabotContext context); +} diff --git a/plugins/dataplane/src/main/java/com/dremio/plugins/dataplane/store/DataplanePlugin.java b/plugins/dataplane/src/main/java/com/dremio/plugins/dataplane/store/DataplanePlugin.java new file mode 100644 index 0000000000..3a5c0f958a --- /dev/null +++ b/plugins/dataplane/src/main/java/com/dremio/plugins/dataplane/store/DataplanePlugin.java @@ -0,0 +1,1799 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.plugins.dataplane.store; + +import static com.dremio.plugins.dataplane.NessiePluginConfigConstants.MINIMUM_NESSIE_SPECIFICATION_VERSION; +import static com.dremio.plugins.dataplane.store.InformationSchemaCelFilter.getInformationSchemaFilter; +import static org.apache.hadoop.fs.s3a.Constants.AWS_CREDENTIALS_PROVIDER; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.EnumSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.regex.Pattern; +import java.util.stream.Stream; + +import javax.annotation.Nullable; +import javax.inject.Provider; + +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.tuple.ImmutablePair; +import org.apache.hadoop.conf.Configuration; +import org.apache.iceberg.BaseTable; +import org.apache.iceberg.PartitionSpec; +import org.apache.iceberg.Schema; +import org.apache.iceberg.StaticTableOperations; +import org.apache.iceberg.Table; +import org.apache.iceberg.TableOperations; +import org.apache.iceberg.io.FileIO; +import org.apache.iceberg.types.Types; +import org.apache.iceberg.view.ViewDefinition; +import org.apache.iceberg.view.ViewVersionMetadata; +import org.apache.iceberg.view.ViewVersionMetadataParser; +import org.projectnessie.client.api.NessieApi; + +import com.dremio.common.AutoCloseables; +import com.dremio.common.exceptions.UserException; +import com.dremio.common.logical.FormatPluginConfig; +import com.dremio.common.utils.PathUtils; +import com.dremio.connector.metadata.DatasetHandle; +import com.dremio.connector.metadata.DatasetMetadata; +import com.dremio.connector.metadata.EntityPath; +import com.dremio.connector.metadata.GetDatasetOption; +import com.dremio.connector.metadata.GetMetadataOption; +import com.dremio.connector.metadata.ListPartitionChunkOption; +import com.dremio.connector.metadata.PartitionChunkListing; +import com.dremio.connector.metadata.options.TimeTravelOption; +import com.dremio.exec.ExecConstants; +import com.dremio.exec.catalog.AlterTableOption; +import com.dremio.exec.catalog.DataplaneTableInfo; +import com.dremio.exec.catalog.DataplaneViewInfo; +import com.dremio.exec.catalog.MutablePlugin; +import com.dremio.exec.catalog.ResolvedVersionContext; +import com.dremio.exec.catalog.RollbackOption; +import com.dremio.exec.catalog.StoragePluginId; +import com.dremio.exec.catalog.TableMutationOptions; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.catalog.VersionedPlugin; +import com.dremio.exec.catalog.conf.Property; +import com.dremio.exec.dotfile.View; +import com.dremio.exec.physical.base.OpProps; +import com.dremio.exec.physical.base.PhysicalOperator; +import com.dremio.exec.physical.base.ViewOptions; +import com.dremio.exec.physical.base.Writer; +import com.dremio.exec.physical.base.WriterOptions; +import com.dremio.exec.physical.config.TableFunctionConfig; +import com.dremio.exec.planner.logical.CreateTableEntry; +import com.dremio.exec.planner.logical.ViewTable; +import com.dremio.exec.planner.sql.handlers.SqlHandlerConfig; +import com.dremio.exec.planner.sql.handlers.refresh.AbstractRefreshPlanBuilder; +import com.dremio.exec.planner.sql.handlers.refresh.UnlimitedSplitsMetadataProvider; +import com.dremio.exec.planner.sql.parser.SqlRefreshDataset; +import com.dremio.exec.record.BatchSchema; +import com.dremio.exec.server.SabotContext; +import com.dremio.exec.store.BlockBasedSplitGenerator; +import com.dremio.exec.store.ChangeInfo; +import com.dremio.exec.store.ConnectionRefusedException; +import com.dremio.exec.store.HttpClientRequestException; +import com.dremio.exec.store.InvalidSpecificationVersionException; +import com.dremio.exec.store.InvalidURLException; +import com.dremio.exec.store.NessieApiProvider; +import com.dremio.exec.store.NoDefaultBranchException; +import com.dremio.exec.store.ReferenceConflictException; +import com.dremio.exec.store.ReferenceInfo; +import com.dremio.exec.store.ReferenceNotFoundException; +import com.dremio.exec.store.SchemaConfig; +import com.dremio.exec.store.SemanticVersionParserException; +import com.dremio.exec.store.StoragePluginRulesFactory; +import com.dremio.exec.store.UnAuthenticatedException; +import com.dremio.exec.store.VersionedDatasetAccessOptions; +import com.dremio.exec.store.dfs.AddPrimaryKey; +import com.dremio.exec.store.dfs.CreateParquetTableEntry; +import com.dremio.exec.store.dfs.DropPrimaryKey; +import com.dremio.exec.store.dfs.FileSystemPlugin; +import com.dremio.exec.store.dfs.FileSystemRulesFactory; +import com.dremio.exec.store.dfs.FormatPlugin; +import com.dremio.exec.store.dfs.IcebergTableProps; +import com.dremio.exec.store.iceberg.IcebergFormatConfig; +import com.dremio.exec.store.iceberg.IcebergFormatPlugin; +import com.dremio.exec.store.iceberg.IcebergUtils; +import com.dremio.exec.store.iceberg.SchemaConverter; +import com.dremio.exec.store.iceberg.TableSchemaProvider; +import com.dremio.exec.store.iceberg.TableSnapshotProvider; +import com.dremio.exec.store.iceberg.TimeTravelProcessors; +import com.dremio.exec.store.iceberg.ViewHandle; +import com.dremio.exec.store.iceberg.model.IcebergModel; +import com.dremio.exec.store.iceberg.model.IcebergTableIdentifier; +import com.dremio.exec.store.iceberg.nessie.IcebergNessieVersionedModel; +import com.dremio.exec.store.iceberg.nessie.IcebergNessieVersionedViews; +import com.dremio.exec.store.metadatarefresh.footerread.FooterReadTableFunction; +import com.dremio.exec.store.parquet.ParquetFormatConfig; +import com.dremio.exec.store.parquet.ParquetScanTableFunction; +import com.dremio.exec.store.parquet.ParquetSplitCreator; +import com.dremio.exec.store.parquet.ScanTableFunction; +import com.dremio.io.file.FileSystem; +import com.dremio.io.file.Path; +import com.dremio.plugins.ExternalNamespaceEntry; +import com.dremio.plugins.ExternalNamespaceEntry.Type; +import com.dremio.plugins.NessieClient; +import com.dremio.plugins.NessieClient.NestingMode; +import com.dremio.plugins.NessieClientTableMetadata; +import com.dremio.plugins.s3.store.S3FileSystem; +import com.dremio.plugins.util.ContainerAccessDeniedException; +import com.dremio.plugins.util.awsauth.AWSCredentialsConfigurator; +import com.dremio.sabot.exec.context.OperatorContext; +import com.dremio.sabot.exec.fragment.FragmentExecutionContext; +import com.dremio.service.catalog.SchemaType; +import com.dremio.service.catalog.SearchQuery; +import com.dremio.service.catalog.TableType; +import com.dremio.service.namespace.DatasetHelper; +import com.dremio.service.namespace.NamespaceKey; +import com.dremio.service.namespace.NamespaceService; +import com.dremio.service.namespace.SourceState; +import com.dremio.service.namespace.capabilities.SourceCapabilities; +import com.dremio.service.namespace.dataset.proto.DatasetConfig; +import com.dremio.telemetry.api.metrics.MetricsInstrumenter; +import com.github.benmanes.caffeine.cache.CacheLoader; +import com.github.benmanes.caffeine.cache.Caffeine; +import com.github.benmanes.caffeine.cache.LoadingCache; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Joiner; +import com.google.common.base.Preconditions; +import com.google.common.base.Suppliers; +import com.google.common.util.concurrent.UncheckedExecutionException; +import com.google.protobuf.ByteString; + +import io.opentelemetry.instrumentation.annotations.WithSpan; + +/** + * Plugin to represent Dremio Dataplane (DDP) Catalog in Dremio Query Engine (DQE). + */ +public class DataplanePlugin extends FileSystemPlugin + implements VersionedPlugin, MutablePlugin, NessieApiProvider { + + private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DataplanePlugin.class); + private static final MetricsInstrumenter metrics = new MetricsInstrumenter(DataplanePlugin.class); + private static final String DEFAULT_CATALOG_NAME = "DREMIO"; + private final AbstractDataplanePluginConfig pluginConfig; + private final SabotContext context; + private final String name; + + private final NessieClient nessieClient; + private final Configuration fileSystemConfig; + private final AWSCredentialsConfigurator awsCredentialConfigurator; + + /** + * The cached DremioFileIO instance for the plugin. This is created on-demand - consumers should access this only + * via the getFileIO() method which handles the creation. + */ + private FileIO fileIO; + + private static final Joiner DOT_JOINER = Joiner.on('.'); + + private final LoadingCache, Table> tableLoadingCache = Caffeine + .newBuilder() + .maximumSize(1000) // items + .softValues() + .expireAfterAccess(1, TimeUnit.HOURS) + .build(new TableCacheLoader()); + + private final LoadingCache viewLoadingCache = Caffeine + .newBuilder() + .maximumSize(1000) // items + .softValues() + .expireAfterAccess(1, TimeUnit.HOURS) + .build(new ViewCacheLoader()); + + public DataplanePlugin(AbstractDataplanePluginConfig pluginConfig, + SabotContext context, + String name, + Provider idProvider, + AWSCredentialsConfigurator awsCredentialsConfigurator, + NessieClient nessieClient) { + super(pluginConfig, context, name, idProvider); + this.pluginConfig = pluginConfig; + this.context = context; + this.name = name; + this.awsCredentialConfigurator = awsCredentialsConfigurator; + + this.nessieClient = nessieClient; + + this.fileSystemConfig = initializeFileSystemConfig(); + } + + @Override + public ResolvedVersionContext resolveVersionContext(VersionContext versionContext) { + ResolvedVersionContext resolvedVersionContext = nessieClient.resolveVersionContext(versionContext); + logger.debug("VersionContext '{}' resolved to '{}'", versionContext, resolvedVersionContext); + return resolvedVersionContext; + } + + @Override + @WithSpan + public boolean commitExists(String commitHash) { + return metrics.log("commitExists", () -> nessieClient.commitExists(commitHash)); + } + + @Override + @WithSpan + public Stream listBranches() { + return metrics.log("listBranches", nessieClient::listBranches); + } + + @Override + @WithSpan + public Stream listTags() { + return metrics.log("listTags", nessieClient::listTags); + } + + @Override + @WithSpan + public Stream listReferences() { + return metrics.log("listReferences", nessieClient::listReferences); + } + + @Override + @WithSpan + public Stream listChanges(VersionContext version) { + return metrics.log("listChanges", () -> nessieClient.listChanges(version)); + } + + @Override + @WithSpan + public Stream listEntries(List catalogPath, VersionContext version) { + return metrics.log("listEntries", () -> { + ResolvedVersionContext resolvedVersion = resolveVersionContext(version); + return nessieClient.listEntries(catalogPath, resolvedVersion, + NestingMode.SAME_DEPTH_ONLY, null, null); + }); + } + + @VisibleForTesting + @Override + public Stream listEntriesIncludeNested(List catalogPath, VersionContext version) { + return metrics.log("listEntriesIncludeNested", + () -> listEntriesIncludeNestedHelper(catalogPath, version, null)); + } + + @Override + @WithSpan + public Stream listTablesIncludeNested(List catalogPath, VersionContext version) + { + return metrics.log("listTablesIncludeNested", + () -> listEntriesIncludeNestedHelper(catalogPath, version, EnumSet.of(Type.ICEBERG_TABLE))); + } + + @Override + @WithSpan + public Stream listViewsIncludeNested(List catalogPath, VersionContext version) + { + return metrics.log("listViewsIncludeNested", + () -> listEntriesIncludeNestedHelper(catalogPath, version, EnumSet.of(Type.ICEBERG_VIEW))); + } + + private Stream listEntriesIncludeNestedHelper( + List catalogPath, + VersionContext version, + @Nullable Set contentFilter + ) { + ResolvedVersionContext resolvedVersion = resolveVersionContext(version); + return nessieClient.listEntries(catalogPath, resolvedVersion, + NestingMode.INCLUDE_NESTED, contentFilter, null); + } + + @Override + @WithSpan + public EntityType getType(List tableKey, ResolvedVersionContext version) { + return metrics.log("getType()", () -> nessieClient.getVersionedEntityType(tableKey, version)); + } + + @Override + @WithSpan + public void createNamespace(NamespaceKey namespaceKey, VersionContext version) { + logger.debug("Creating namespace '{}' from '{}'", namespaceKey, version); + metrics.log("createNamespace", () -> nessieClient.createNamespace(schemaComponentsWithoutPluginName(namespaceKey), version)); + } + + @Override + @WithSpan + public void deleteFolder(NamespaceKey namespaceKey, VersionContext version) { + logger.debug("Deleting Folder '{}' from '{}'", namespaceKey, version); + metrics.log("deleteFolder", () -> nessieClient.deleteNamespace(schemaComponentsWithoutPluginName(namespaceKey), version)); + } + + @Override + @WithSpan + public void createBranch(String branchName, VersionContext sourceVersion) { + logger.debug("Creating branch '{}' from '{}'", branchName, sourceVersion); + metrics.log("createBranch", () -> nessieClient.createBranch(branchName, sourceVersion)); + } + + @Override + @WithSpan + public void createTag(String tagName, VersionContext sourceVersion) { + logger.debug("Creating tag '{}' from '{}'", tagName, sourceVersion); + metrics.log("createTag", () -> nessieClient.createTag(tagName, sourceVersion)); + } + + @Override + @WithSpan + public void dropBranch(String branchName, String branchHash) { + logger.debug("Drop branch '{}' at '{}'", branchName, branchHash); + metrics.log("dropBranch", () -> nessieClient.dropBranch(branchName, branchHash)); + } + + @Override + @WithSpan + public void dropTag(String tagName, String tagHash) { + logger.debug("Dropping tag '{}' at '{}'", tagName, tagHash); + metrics.log("dropTag", () -> nessieClient.dropTag(tagName, tagHash)); + } + + @Override + @WithSpan + public void mergeBranch(String sourceBranchName, String targetBranchName) { + logger.debug("Merging branch '{}' into '{}'", sourceBranchName, targetBranchName); + metrics.log("mergeBranch", () -> nessieClient.mergeBranch(sourceBranchName, targetBranchName)); + } + + @Override + @WithSpan + public void assignBranch(String branchName, VersionContext sourceVersion) + throws ReferenceConflictException, ReferenceNotFoundException { + logger.debug("Assign branch '{}' to {}", branchName, sourceVersion); + metrics.log("assignBranch", () -> nessieClient.assignBranch(branchName, sourceVersion)); + } + + @Override + @WithSpan + public void assignTag(String tagName, VersionContext sourceVersion) + throws ReferenceConflictException, ReferenceNotFoundException { + logger.debug("Assign tag '{}' to {}", tagName, sourceVersion); + metrics.log("assignTag", () -> nessieClient.assignTag(tagName, sourceVersion)); + } + + @Override + @WithSpan + public Optional getDatasetHandle( + EntityPath datasetPath, + GetDatasetOption... options + ) { + + return metrics.log("getDatasetHandle", () -> { + try{ + return getDatasetHandleHelper(datasetPath, options); + } catch (UncheckedExecutionException e) { + throw failedToLoadIcebergTableException(e); + } + }); + } + + private Optional getDatasetHandleHelper(EntityPath datasetPath, GetDatasetOption[] options) { + final ResolvedVersionContext version = Preconditions.checkNotNull( + VersionedDatasetAccessOptions + .getVersionedDatasetAccessOptions(options) + .getVersionContext()); + logger.debug("Getting dataset handle for '{}' at version {} ", + datasetPath, + version); + + List versionedTableKey = datasetPath.getComponents().subList(1, datasetPath.size()); + EntityType entityType = getType(versionedTableKey, version); + final String metadataLocation = nessieClient.getMetadataLocation( + versionedTableKey, + version, + null); + logger.debug("Retrieving Iceberg metadata from location '{}' ", metadataLocation); + if (metadataLocation == null) { + return Optional.empty(); + } + + final String contentId = nessieClient.getContentId(versionedTableKey, version, null) ; + final String uniqueId = getUUIDFromMetadataLocation(metadataLocation); + + switch(entityType) { + case ICEBERG_TABLE: + final Table table = getIcebergTable(datasetPath, metadataLocation); + logger.debug("Retrieved Iceberg table : name {} , location {}, schema {}, current snapshot {}, partition spec {} ", + table.name(), + table.location(), + table.schema(), + table.currentSnapshot(), + table.spec()); + + final TimeTravelOption travelOption = TimeTravelOption.getTimeTravelOption(options); + final TimeTravelOption.TimeTravelRequest timeTravelRequest = + travelOption != null ? travelOption.getTimeTravelRequest() : null; + final TableSnapshotProvider tableSnapshotProvider = + TimeTravelProcessors.getTableSnapshotProvider(datasetPath.getComponents(), timeTravelRequest); + logger.debug("Time travel request {} ", timeTravelRequest); + final TableSchemaProvider tableSchemaProvider = + TimeTravelProcessors.getTableSchemaProvider(timeTravelRequest); + return Optional.of(new TransientIcebergMetadataProvider(datasetPath, + Suppliers.ofInstance(table), + fileSystemConfig, + tableSnapshotProvider, + this, + tableSchemaProvider, + context.getOptionManager(), + contentId, + uniqueId)); + + case ICEBERG_VIEW: + final Optional dialect = nessieClient.getViewDialect(versionedTableKey, version); + if (!dialect.isPresent() || !dialect.get().equals(IcebergNessieVersionedViews.DIALECT)) { + throw UserException.validationError() + .message( + "Dialect is %s and %s is expected", + dialect.isPresent() ? dialect.get() : null, IcebergNessieVersionedViews.DIALECT) + .build(); + } + + final ViewVersionMetadata viewVersionMetadata = readViewMetadata(metadataLocation); + + return Optional.of(ViewHandle + .newBuilder() + .datasetpath(datasetPath) + .viewVersionMetadata(viewVersionMetadata) + .id(contentId) + .uniqueId(uniqueId) + .build()); + default: + return Optional.empty(); + } + } + + private String getUUIDFromMetadataLocation(String metadataLocation){ + return metadataLocation.substring(metadataLocation.lastIndexOf("/")+1, metadataLocation.lastIndexOf(".metadata.json")); + } + + private UserException failedToLoadIcebergTableException(Exception e) { + return UserException.ioExceptionError(e) + .message("Failed to load the Iceberg table. The underlying metadata and/or data files may not exist, " + + "or you do not have permission to access them.") + .buildSilently(); + } + + private Table getIcebergTable(EntityPath datasetPath, String metadataLocation) { + return metrics.log("loadIcebergTable", () -> { + LoadingCache, Table> cache = getTableLoadingCache(); + return cache.get(ImmutablePair.of(metadataLocation, datasetPath)); + }); + } + + private LoadingCache, Table> getTableLoadingCache() { + return tableLoadingCache; + } + + @Override + @WithSpan + public PartitionChunkListing listPartitionChunks( + DatasetHandle datasetHandle, + ListPartitionChunkOption... options + ) { + return metrics.log("listPartitionChunks", () -> { + TransientIcebergMetadataProvider icebergMetadataProvider = + datasetHandle.unwrap(TransientIcebergMetadataProvider.class); + return icebergMetadataProvider.listPartitionChunks(options); + }); + } + + @Override + @WithSpan + public DatasetMetadata getDatasetMetadata( + DatasetHandle datasetHandle, + PartitionChunkListing chunkListing, + GetMetadataOption... options + ) { + return metrics.log("getDatasetMetadata", ()-> { + TransientIcebergMetadataProvider icebergMetadataProvider = + datasetHandle.unwrap(TransientIcebergMetadataProvider.class); + return icebergMetadataProvider.getDatasetMetadata(options); + }); + } + + @Override + public boolean containerExists(EntityPath containerPath) { + return false; + } + + @Override + public boolean hasAccessPermission(String user, + NamespaceKey key, + DatasetConfig datasetConfig) { + return true; + } + + @Override + public SourceState getState() { + return this.pluginConfig.getState(nessieClient, name, context); + } + + @Override + public SourceCapabilities getSourceCapabilities() { + return SourceCapabilities.NONE; // TODO(DX-43872) Are there any source capabilities we should add? + } + + @Override + public ViewTable getView( + List tableSchemaPath, + SchemaConfig schemaConfig + ) { + throw new UnsupportedOperationException("Views aren't supported"); + } + + @Override + public Class getRulesFactoryClass() { + return context.getConfig().getClass("dremio.plugins.dfs.rulesfactory", StoragePluginRulesFactory.class, FileSystemRulesFactory.class); + } + + @Override + public void start() throws IOException { + this.pluginConfig.validatePluginEnabled(context); + this.pluginConfig.validateNessieAuthSettings(name); + this.pluginConfig.validateAWSRootPath(getRootLocation()); + try { + this.pluginConfig.validateNessieSpecificationVersion(nessieClient, name); + } catch (InvalidURLException e) { + throw UserException.validationError(e).message("Unable to create source [%s], " + + "Make sure that Nessie endpoint URL is valid.", name).buildSilently(); + } catch (InvalidSpecificationVersionException e) { + throw UserException.validationError(e).message("Unable to create source [%s], Nessie Server should comply with Nessie specification version %s or later." + + " Also make sure that Nessie endpoint URL is valid.", name, MINIMUM_NESSIE_SPECIFICATION_VERSION).buildSilently(); + } catch (SemanticVersionParserException e) { + throw UserException.validationError(e).message("Unable to create source [%s], Cannot parse Nessie specification version." + + " Nessie Server should comply with Nessie specification version %s or later.", name, MINIMUM_NESSIE_SPECIFICATION_VERSION).buildSilently(); + } + try { + this.pluginConfig.validateConnectionToNessieRepository(nessieClient, name, context); + } catch (NoDefaultBranchException e){ + throw UserException.resourceError().message("Unable to create source [%s], No default branch exists in Nessie Server", name).buildSilently(); + } catch (UnAuthenticatedException ex) { + throw UserException.resourceError().message("Unable to create source [%s], Unable to authenticate to the Nessie server. " + + "Make sure that the token is valid and not expired.", name).buildSilently(); + } catch (ConnectionRefusedException ex) { + throw UserException.resourceError().message("Unable to create source [%s], Connection refused while " + + "connecting to the Nessie Server.", name).buildSilently(); + } catch (HttpClientRequestException ex) { + throw UserException.resourceError().message("Unable to create source [%s], Failed to get the default branch from" + + " the Nessie server.", name).buildSilently(); + } + + super.start(); + } + + @Override + @WithSpan + public void createEmptyTable(NamespaceKey tableSchemaPath, + SchemaConfig schemaConfig, + BatchSchema batchSchema, + WriterOptions writerOptions) { + metrics.log("createEmptyTable", + () -> createEmptyTableHelper(tableSchemaPath, schemaConfig, batchSchema, writerOptions)); + } + + private void createEmptyTableHelper(NamespaceKey tableSchemaPath, + SchemaConfig schemaConfig, + BatchSchema batchSchema, + WriterOptions writerOptions) { + final ResolvedVersionContext version = Preconditions.checkNotNull(writerOptions.getVersion()); + List tableSchemaComponentsWithoutPluginName = schemaComponentsWithoutPluginName(tableSchemaPath); + + IcebergModel icebergModel = new IcebergNessieVersionedModel( + tableSchemaComponentsWithoutPluginName, + fileSystemConfig, + getSystemUserFS(), + nessieClient, + null, // Used to create DremioInputFile (valid only for insert/ctas) + version, + this, + schemaConfig.getUserName()); + + logger.debug("Creating empty table: '{}' with version '{}'", tableSchemaComponentsWithoutPluginName, version); + try { + PartitionSpec partitionSpec = Optional.ofNullable(writerOptions.getTableFormatOptions().getIcebergSpecificOptions() + .getIcebergTableProps()).map(props -> props.getDeserializedPartitionSpec()).orElse(null); + icebergModel + .getCreateTableCommitter( + String.join(".", tableSchemaComponentsWithoutPluginName), + icebergModel.getTableIdentifier(getRootLocation()), + batchSchema, + writerOptions.getPartitionColumns(), + null, + partitionSpec + ) + .commit(); + } catch (UncheckedIOException e){ + if(e.getCause() instanceof ContainerAccessDeniedException){ + throw UserException.permissionError(e.getCause()). + message("Access denied while creating table. %s", + e.getMessage() + ) + .buildSilently(); + } + throw e; + } + } + + @Override + @WithSpan + public CreateTableEntry createNewTable(NamespaceKey tableSchemaPath, + SchemaConfig schemaConfig, + IcebergTableProps icebergTableProps, + WriterOptions writerOptions, + Map storageOptions, + boolean isResultsTable) { + return metrics.log("createNewTable", + () -> createNewTableHelper(tableSchemaPath, schemaConfig, icebergTableProps, writerOptions)); + } + + private CreateTableEntry createNewTableHelper(NamespaceKey tableSchemaPath, + SchemaConfig schemaConfig, + IcebergTableProps icebergTableProps, + WriterOptions writerOptions) { + Preconditions.checkNotNull(icebergTableProps); + Preconditions.checkNotNull(icebergTableProps.getVersion()); + List tableSchemaComponentsWithoutPluginName = schemaComponentsWithoutPluginName(tableSchemaPath); + + final String tableName = String.join(".", tableSchemaComponentsWithoutPluginName); + + //Iceberg tables have parquet as the underlying file format + final FormatPlugin formatPlugin = this.getFormatPlugin("parquet"); //This call uses FSPlugin formatCreator + + final String userName = schemaConfig.getUserName(); + + Path path = resolveTableNameToValidPath(tableSchemaPath.toString(), writerOptions.getVersion()); + icebergTableProps = new IcebergTableProps(icebergTableProps); + icebergTableProps.setTableLocation(path.toString()); + icebergTableProps.setTableName(tableName); + Preconditions.checkState(icebergTableProps.getUuid() != null && + !icebergTableProps.getUuid().isEmpty(), "Unexpected state. UUID must be set"); + path = path.resolve(icebergTableProps.getUuid()); + logger.debug("Creating new table '{}' with options '{}' IcebergTableProps '{}' ", + tableSchemaPath, + writerOptions, + icebergTableProps + ); + return new CreateParquetTableEntry( + userName, + this, // This requires FSPlugin features + path.toString(), + icebergTableProps, + writerOptions, + tableSchemaPath); + } + + /** + * Resolve given table path relative to source resolve it to a valid path in filesystem. + * If the table exists, fetch the path from the versioned store. If not, resolve under base location. + * If the resolved path refers to an entity not under the base of the source then a permission error is thrown. + */ + private Path resolveTableNameToValidPath(String tablePathWithPlugin, ResolvedVersionContext versionContext) { + List tablePath = schemaComponentsWithoutPluginName( + new NamespaceKey(PathUtils.parseFullPath(tablePathWithPlugin))); + final String metadataLocation = nessieClient.getMetadataLocation(tablePath, versionContext, null); + logger.info("Retrieving Iceberg metadata from location '{}' ", metadataLocation); + + if (metadataLocation == null) { + // Table does not exist, resolve new path under the aws root folder location + // location where the iceberg table folder will be created + // Format : ""/"//" + Path basePath = pluginConfig.getPath(); + String relativePathClean = PathUtils.removeLeadingSlash(String.join("/", tablePath)); + Path combined = basePath.resolve(relativePathClean); + PathUtils.verifyNoAccessOutsideBase(basePath, combined); + return combined; + } + + final Table icebergTable = getIcebergTable(new EntityPath(tablePath), metadataLocation); + return Path.of(removeUriScheme(icebergTable.location())); + } + + private String removeUriScheme(String uri) { + if (StringUtils.isBlank(uri)) { + return uri; + } + + int urlSchemeIndex = uri.indexOf("://"); + if (urlSchemeIndex > 0) { + uri = uri.substring(urlSchemeIndex + 2); + } + return uri; + } + + @Override + public Writer getWriter(PhysicalOperator child, + String location, + WriterOptions options, + OpProps props) { + throw new UnsupportedOperationException(); + } + + @Override + @WithSpan + public void dropTable(NamespaceKey tableSchemaPath, + SchemaConfig schemaConfig, + TableMutationOptions tableMutationOptions) { + metrics.log("dropTable", + () -> dropTableHelper(tableSchemaPath, schemaConfig, tableMutationOptions)); + } + + private void dropTableHelper(NamespaceKey tableSchemaPath, + SchemaConfig schemaConfig, + TableMutationOptions tableMutationOptions) { + Preconditions.checkNotNull(tableMutationOptions); + final ResolvedVersionContext version = tableMutationOptions.getResolvedVersionContext(); + Preconditions.checkNotNull(version); + + List tableKeyWithoutPluginName = schemaComponentsWithoutPluginName(tableSchemaPath); + //Check if the entity is a table. + VersionedPlugin.EntityType entityType = nessieClient.getVersionedEntityType(tableKeyWithoutPluginName, version); + if (entityType == EntityType.UNKNOWN ) { + throw UserException.validationError() + .message("%s does not exist ", tableKeyWithoutPluginName) + .buildSilently(); + } else if (entityType != EntityType.ICEBERG_TABLE) { + throw UserException.validationError() + .message("%s is not a TABLE ", tableKeyWithoutPluginName) + .buildSilently(); + } + IcebergModel icebergModel = new IcebergNessieVersionedModel( + tableKeyWithoutPluginName, + fileSystemConfig, + getSystemUserFS(), + nessieClient, + null, + version, + this, + schemaConfig.getUserName()); + + logger.debug("Dropping table '{}' at version '{}'", tableKeyWithoutPluginName, version); + icebergModel.deleteTable(icebergModel.getTableIdentifier(pluginConfig.awsRootPath)); + } + + @Override + @WithSpan + public void alterTable(NamespaceKey tableSchemaPath, DatasetConfig datasetConfig, AlterTableOption alterTableOption, + SchemaConfig schemaConfig, TableMutationOptions tableMutationOptions) { + metrics.log("updateTable", + () -> alterTableHelper(tableSchemaPath, schemaConfig, alterTableOption, tableMutationOptions)); + } + + private void alterTableHelper(NamespaceKey tableSchemaPath, + SchemaConfig schemaConfig, + AlterTableOption alterTableOption, + TableMutationOptions tableMutationOptions) { + final ResolvedVersionContext version = tableMutationOptions.getResolvedVersionContext(); + Preconditions.checkNotNull(version); + + List tableSchemaComponentsWithoutPluginName = schemaComponentsWithoutPluginName(tableSchemaPath); + IcebergModel icebergModel = new IcebergNessieVersionedModel( + tableSchemaComponentsWithoutPluginName, + fileSystemConfig, + getSystemUserFS(), + nessieClient, + null, + version, + this, + schemaConfig.getUserName()); + logger.debug("Altering table partition spec for table {} at version {} with options {}", + tableSchemaComponentsWithoutPluginName, + version, + alterTableOption); + icebergModel.alterTable(icebergModel.getTableIdentifier(getRootLocation()), alterTableOption); + } + + @Override + @WithSpan + public void truncateTable(NamespaceKey tableSchemaPath, + SchemaConfig schemaConfig, TableMutationOptions tableMutationOptions) { + metrics.log("truncateTable", + () -> truncateTableHelper(tableSchemaPath, schemaConfig, tableMutationOptions)); + } + + private void truncateTableHelper(NamespaceKey tableSchemaPath, + SchemaConfig schemaConfig, TableMutationOptions tableMutationOptions) { + Preconditions.checkNotNull(tableMutationOptions); + + final ResolvedVersionContext version = tableMutationOptions.getResolvedVersionContext(); + Preconditions.checkNotNull(version); + + List tableSchemaComponentsWithoutPluginName = schemaComponentsWithoutPluginName(tableSchemaPath); + IcebergModel icebergModel = new IcebergNessieVersionedModel( + tableSchemaComponentsWithoutPluginName, + fileSystemConfig, + getSystemUserFS(), + nessieClient, + null, + version, + this, + schemaConfig.getUserName()); + logger.debug("Truncating table '{}' at version '{}'", tableSchemaComponentsWithoutPluginName, version); + IcebergTableIdentifier icebergTableIdentifier = icebergModel.getTableIdentifier(getRootLocation()); + icebergModel.truncateTable(icebergTableIdentifier); + } + + @Override + @WithSpan + public void rollbackTable(NamespaceKey tableSchemaPath, + DatasetConfig datasetConfig, + SchemaConfig schemaConfig, + RollbackOption rollbackOption, + TableMutationOptions tableMutationOptions) { + + metrics.log("rollbackTable", + () -> rollbackTableHelper(tableSchemaPath, schemaConfig, rollbackOption, tableMutationOptions)); + } + + private void rollbackTableHelper(NamespaceKey tableSchemaPath, + SchemaConfig schemaConfig, + RollbackOption rollbackOption, + TableMutationOptions tableMutationOptions) { + final ResolvedVersionContext version = tableMutationOptions.getResolvedVersionContext(); + Preconditions.checkNotNull(version); + + List tableSchemaComponentsWithoutPluginName = schemaComponentsWithoutPluginName(tableSchemaPath); + IcebergModel icebergModel = new IcebergNessieVersionedModel( + tableSchemaComponentsWithoutPluginName, + fileSystemConfig, + getSystemUserFS(), + nessieClient, + null, + version, + this, + schemaConfig.getUserName()); + logger.debug("Rollback table {} at version {} with options {}", + tableSchemaComponentsWithoutPluginName, + version, + rollbackOption); + icebergModel.rollbackTable(icebergModel.getTableIdentifier(getRootLocation()), rollbackOption); + } + + @Override + @WithSpan + public boolean createOrUpdateView(NamespaceKey tableSchemaPath, + SchemaConfig schemaConfig, + View view, + ViewOptions viewOptions) { + return metrics.log("createOrUpdateView", + () -> createOrUpdateViewHelper(tableSchemaPath, schemaConfig, view, viewOptions)); + } + + private boolean createOrUpdateViewHelper( + NamespaceKey tableSchemaPath, + SchemaConfig schemaConfig, + View view, + ViewOptions viewOptions) { + if (!viewOptions.getVersion().isBranch()) { + throw UserException.validationError() + .message("Cannot update a view on a tag or bareCommit") + .buildSilently(); + } + + final ResolvedVersionContext version = Objects.requireNonNull(viewOptions.getVersion()); + final SchemaConverter converter = SchemaConverter.getBuilder().build(); + final List viewKey = schemaComponentsWithoutPluginName(tableSchemaPath); + final org.apache.hadoop.fs.Path path = new org.apache.hadoop.fs.Path(getRootLocation()); + + try { + final String metadata = + IcebergUtils.getValidIcebergPath( + path, + fileSystemConfig, + getSystemUserFS().getScheme()); + final IcebergNessieVersionedViews versionedViews = + new IcebergNessieVersionedViews(metadata, nessieClient, fileSystemConfig, this, schemaConfig.getUserName()); + final ViewDefinition viewDefinition = + viewOptions.isViewAlter() + ? versionedViews.loadDefinition(viewKey, version) + : ViewDefinition.of( + view.getSql(), + converter.toIcebergSchema(viewOptions.getBatchSchema()), + tableSchemaPath.getRoot(), + view.getWorkspaceSchemaPath()); + + logger.debug( + "{}: '{}' at source path '{}' with version '{}'", + viewOptions.getActionType().name(), + tableSchemaPath.getName(), + tableSchemaPath, + version); + + if (viewOptions.isViewCreate()) { + versionedViews.create(viewKey, viewDefinition, Collections.emptyMap(), version); + return true; + } + + final String metadataLocation = + Objects.requireNonNull(nessieClient.getMetadataLocation(viewKey, version, null)); + final ViewVersionMetadata viewVersionMetadata = readViewMetadata(metadataLocation); + final Map currentProperties = viewVersionMetadata.properties(); + + if (viewOptions.isViewUpdate()) { + versionedViews.replace(viewKey, viewDefinition, currentProperties, version); + return true; + } + + final Map properties = Objects.requireNonNull(viewOptions.getProperties()); + final boolean needUpdate = + properties.entrySet().stream() + .anyMatch(entry -> !entry.getValue().equals(currentProperties.get(entry.getKey()))); + + if (!needUpdate) { + logger.debug("No property need to be updated"); + return false; + } + + versionedViews.replace(viewKey, viewDefinition, properties, version); + + return true; + } catch (UserException e) { + throw e; + } catch (Exception ex) { + logger.debug("Exception while operating on the view", ex); + } + + return false; + } + + @Override + @WithSpan + public void dropView(NamespaceKey tableSchemaPath, + ViewOptions viewOptions, SchemaConfig schemaConfig) { + metrics.log("dropView", () -> dropViewHelper(tableSchemaPath, schemaConfig, viewOptions)); + } + + private void dropViewHelper(NamespaceKey tableSchemaPath, SchemaConfig schemaConfig, ViewOptions viewOptions) { + String location = getRootLocation(); + org.apache.hadoop.fs.Path path = new org.apache.hadoop.fs.Path(location); + String metadata = IcebergUtils.getValidIcebergPath(path, + fileSystemConfig, + getSystemUserFS().getScheme()); + IcebergNessieVersionedViews versionedViews = + new IcebergNessieVersionedViews(metadata, nessieClient, fileSystemConfig, this, schemaConfig.getUserName()); + List viewKey = schemaComponentsWithoutPluginName(tableSchemaPath); + ResolvedVersionContext version = viewOptions.getVersion(); + logger.debug("Dropping view '{}' at version '{}'", viewKey, version); + versionedViews.drop(viewKey, version); + } + + @Override + @WithSpan + public void addColumns(NamespaceKey tableSchemaPath, + DatasetConfig datasetConfig, + SchemaConfig schemaConfig, + List columnsToAdd, + TableMutationOptions tableMutationOptions) { + metrics.log("addColumns", + () -> addColumnsHelper(tableSchemaPath, schemaConfig, columnsToAdd, tableMutationOptions)); + } + + private void addColumnsHelper(NamespaceKey tableSchemaPath, + SchemaConfig schemaConfig, + List columnsToAdd, + TableMutationOptions tableMutationOptions) { + Preconditions.checkNotNull(tableMutationOptions); + final ResolvedVersionContext version = tableMutationOptions.getResolvedVersionContext(); + Preconditions.checkNotNull(version); + + SchemaConverter schemaConverter = SchemaConverter.getBuilder().build(); + List tableSchemaComponentsWithoutPluginName = schemaComponentsWithoutPluginName(tableSchemaPath); + IcebergModel icebergModel = new IcebergNessieVersionedModel( + tableSchemaComponentsWithoutPluginName, + fileSystemConfig, + getSystemUserFS(), + nessieClient, + null, + version, + this, + schemaConfig.getUserName()); + IcebergTableIdentifier icebergTableIdentifier = icebergModel.getTableIdentifier(getRootLocation()); + List icebergFields = schemaConverter.toIcebergFields(columnsToAdd); + + logger.debug("Adding columns '{}' to table '{}' at version '{}'", + columnsToAdd, tableSchemaComponentsWithoutPluginName, version); + icebergModel.addColumns(icebergTableIdentifier, icebergFields); + } + + @Override + @WithSpan + public void dropColumn(NamespaceKey tableSchemaPath, + DatasetConfig datasetConfig, + SchemaConfig schemaConfig, + String columnToDrop, + TableMutationOptions tableMutationOptions) { + metrics.log("dropColumn", + () -> dropColumnHelper(tableSchemaPath, schemaConfig, columnToDrop, tableMutationOptions)); + } + + private void dropColumnHelper(NamespaceKey tableSchemaPath, + SchemaConfig schemaConfig, + String columnToDrop, + TableMutationOptions tableMutationOptions) { + Preconditions.checkNotNull(tableMutationOptions); + final ResolvedVersionContext version = tableMutationOptions.getResolvedVersionContext(); + Preconditions.checkNotNull(version); + + List tableSchemaComponentsWithoutPluginName = schemaComponentsWithoutPluginName(tableSchemaPath); + IcebergModel icebergModel = new IcebergNessieVersionedModel( + tableSchemaComponentsWithoutPluginName, + fileSystemConfig, + getSystemUserFS(), + nessieClient, + null, + version, + this, + schemaConfig.getUserName()); + IcebergTableIdentifier icebergTableIdentifier = icebergModel.getTableIdentifier(getRootLocation()); + logger.debug("Dropping column '{}' for table '{}' at version '{}'", + columnToDrop, tableSchemaComponentsWithoutPluginName, version); + icebergModel.dropColumn(icebergTableIdentifier, columnToDrop); + } + + @Override + @WithSpan + public void changeColumn(NamespaceKey tableSchemaPath, + DatasetConfig datasetConfig, + SchemaConfig schemaConfig, + String columnToChange, + Field fieldFromSqlColDeclaration, + TableMutationOptions tableMutationOptions) { + metrics.log("changeColumn", + () -> changeColumnHelper(tableSchemaPath, schemaConfig, columnToChange, + fieldFromSqlColDeclaration, tableMutationOptions)); + } + + @Override + @WithSpan + public void addPrimaryKey(NamespaceKey table, + DatasetConfig datasetConfig, + SchemaConfig schemaConfig, + List columns, + ResolvedVersionContext versionContext) { + metrics.log("addPrimaryKey", + () -> addPrimaryKeyHelper(table, datasetConfig, schemaConfig, columns, versionContext)); + } + + private void addPrimaryKeyHelper(NamespaceKey table, + DatasetConfig datasetConfig, + SchemaConfig schemaConfig, + List columns, + ResolvedVersionContext versionContext) { + AddPrimaryKey op = new AddPrimaryKey(table, context, datasetConfig, schemaConfig, + getIcebergModelHelper( + table.getPathComponents().subList(1, table.size()), // The key to nessie should not contain the source name, stripping source name. + versionContext, + null, + schemaConfig.getUserName()), + validateAndGetPath(table, schemaConfig.getUserName()), + this); + op.performOperation(columns); + } + + @Override + @WithSpan + public void dropPrimaryKey(NamespaceKey table, + DatasetConfig datasetConfig, + SchemaConfig schemaConfig, + ResolvedVersionContext versionContext) { + metrics.log("dropPrimaryKey", + () -> dropPrimaryKeyHelper(table, datasetConfig, schemaConfig, versionContext)); + } + + private void dropPrimaryKeyHelper(NamespaceKey table, + DatasetConfig datasetConfig, + SchemaConfig schemaConfig, + ResolvedVersionContext versionContext) { + DropPrimaryKey op = new DropPrimaryKey(table, context, datasetConfig, schemaConfig, + getIcebergModelHelper( + table.getPathComponents().subList(1, table.size()), // The key to nessie should not contain the source name, stripping source name. + versionContext, + null, + schemaConfig.getUserName()), + validateAndGetPath(table, schemaConfig.getUserName()), + this); + op.performOperation(); + } + + @Override + @WithSpan + public List getPrimaryKey(NamespaceKey table, + DatasetConfig datasetConfig, + SchemaConfig schemaConfig, + ResolvedVersionContext versionContext, + boolean saveInKvStore) { + return metrics.log("getPrimaryKey", + () -> getPrimaryKeyHelper(table, datasetConfig, schemaConfig, versionContext, saveInKvStore)); + } + + private List getPrimaryKeyHelper(NamespaceKey table, + DatasetConfig datasetConfig, + SchemaConfig schemaConfig, + ResolvedVersionContext versionContext, + boolean saveInKvStore) { + if (datasetConfig.getPhysicalDataset() == null || // PK only supported for physical datasets + datasetConfig.getPhysicalDataset().getIcebergMetadata() == null || // Physical dataset not Iceberg format + !DatasetHelper.isIcebergDataset(datasetConfig)) { // Not native iceberg + return null; + } + + return IcebergUtils.validateAndGeneratePrimaryKey(this, context, table, datasetConfig, schemaConfig, versionContext, saveInKvStore); + } + + @Override + @WithSpan + public List getPrimaryKeyFromMetadata(NamespaceKey table, + DatasetConfig datasetConfig, + SchemaConfig schemaConfig, + ResolvedVersionContext versionContext, + boolean saveInKvStore) { + // For versioned tables, we don't cache the PK in the KV store. Grab the iceberg table + // from the table cache. + List versionedTableKey = table.getPathComponents().subList(1, table.getPathComponents().size()); + final String metadataLocation = nessieClient.getMetadataLocation(versionedTableKey, versionContext, null); + logger.debug("Retrieving Iceberg metadata from location '{}' ", metadataLocation); + if (metadataLocation == null) { + return null; + } + + final Table icebergTable = getIcebergTable(new EntityPath(table.getPathComponents()), metadataLocation); + return IcebergUtils.getPrimaryKeyFromTableMetadata(icebergTable); + } + + private void changeColumnHelper(NamespaceKey tableSchemaPath, + SchemaConfig schemaConfig, + String columnToChange, + Field fieldFromSqlColDeclaration, + TableMutationOptions tableMutationOptions) { + Preconditions.checkNotNull(tableMutationOptions); + final ResolvedVersionContext version = tableMutationOptions.getResolvedVersionContext(); + Preconditions.checkNotNull(version); + + List tableSchemaComponentsWithoutPluginName = schemaComponentsWithoutPluginName(tableSchemaPath); + IcebergModel icebergModel = new IcebergNessieVersionedModel( + tableSchemaComponentsWithoutPluginName, + fileSystemConfig, + getSystemUserFS(), + nessieClient, + null, + version, + this, + schemaConfig.getUserName()); + IcebergTableIdentifier icebergTableIdentifier = icebergModel.getTableIdentifier(getRootLocation()); + + logger.debug("Changing column '{}' to '{}' in table '{}' at version '{}'", + columnToChange, fieldFromSqlColDeclaration, tableSchemaComponentsWithoutPluginName, version); + + icebergModel.changeColumn(icebergTableIdentifier, columnToChange, fieldFromSqlColDeclaration); + } + + @Override + public boolean toggleSchemaLearning(NamespaceKey table, SchemaConfig schemaConfig, boolean enableSchemaLearning) { + throw new UnsupportedOperationException(); + } + + private Configuration initializeFileSystemConfig() { + final Configuration config = FileSystemPlugin.getNewFsConf(); + // We maintain a separate Configuration within DataplanePlugin to keep it isolated from the one + // in FileSystemPlugin. We may migrate away from extending FileSystemPlugin in the future. + updateConfiguration(config); + return config; + } + + private List schemaComponentsWithoutPluginName(NamespaceKey tableSchemaPath) { + Preconditions.checkArgument(tableSchemaPath.hasParent()); + Preconditions.checkArgument(name.equalsIgnoreCase(tableSchemaPath.getRoot())); + return tableSchemaPath.getPathWithoutRoot(); + } + + // This method is used to createFS out of the DataplanePluginConfig with appropriate AWS credential providers. + @Override + protected FileSystem newFileSystem(String userName, OperatorContext operatorContext) throws IOException { + updateConfiguration(getFsConf()); + return super.newFileSystem(userName, operatorContext); + } + + private void updateConfiguration(Configuration configuration) { + configuration.set("fs.dremioS3.impl", S3FileSystem.class.getName()); + + configuration.set(org.apache.hadoop.fs.FileSystem.FS_DEFAULT_NAME_KEY, "dremioS3:///"); + + // Enabling the cache will fetch the incorrect S3 file system object from Hadoop File System class. + configuration.set("fs.dremioS3.impl.disable.cache", "true"); + + for (Property property : pluginConfig.getProperties()) { + configuration.set(property.name, property.value); + } + + List awsProviderProperties = new ArrayList<>(); + String awsProvider = awsCredentialConfigurator.configureCredentials(awsProviderProperties); + Optional property = pluginConfig.encryptConnection(); + if (property.isPresent()) { + awsProviderProperties.add(property.get()); + } + configuration.set(AWS_CREDENTIALS_PROVIDER, awsProvider); + awsProviderProperties.forEach(p -> configuration.set(p.name, p.value)); + } + + @Override + public Configuration getFsConfCopy() { + return new Configuration(fileSystemConfig); + } + + @Override + public FormatPlugin getFormatPlugin(FormatPluginConfig formatConfig) { + if (formatConfig instanceof ParquetFormatConfig) { // ParquetWriter requires this. + return super.getFormatPlugin(formatConfig); + } else { + return new IcebergFormatPlugin("iceberg", context, (IcebergFormatConfig) formatConfig, this); + } + } + + @Override + public List resolveTableNameToValidPath(List tableSchemaPath) { + List fullPath = new ArrayList<>(PathUtils.toPathComponents(pluginConfig.awsRootPath)); + for (String pathComponent : tableSchemaPath.subList(1 /* need to skip the source name */, tableSchemaPath.size())) { + fullPath.add(PathUtils.removeQuotes(pathComponent)); + } + PathUtils.verifyNoAccessOutsideBase(PathUtils.toFSPath(pluginConfig.awsRootPath), PathUtils.toFSPath(fullPath)); + return fullPath; + } + + @Override + public boolean isIcebergMetadataValid(DatasetConfig config, NamespaceKey key, NamespaceService userNamespaceService) { + return true; + } + + @Override + public BlockBasedSplitGenerator.SplitCreator createSplitCreator( + OperatorContext context, + byte[] extendedBytes, + boolean isInternalIcebergTable + ) { + return new ParquetSplitCreator(context, true); + } + + @Override + public ScanTableFunction createScanTableFunction( + FragmentExecutionContext fec, + OperatorContext context, + OpProps props, + TableFunctionConfig functionConfig + ) { + return new ParquetScanTableFunction(fec, context, props, functionConfig); + } + + @Override + public FooterReadTableFunction getFooterReaderTableFunction( + FragmentExecutionContext fec, + OperatorContext context, + OpProps props, + TableFunctionConfig functionConfig + ) { + return new FooterReadTableFunction(fec, context, props, functionConfig); + } + + @Override + public AbstractRefreshPlanBuilder createRefreshDatasetPlanBuilder( + SqlHandlerConfig config, + SqlRefreshDataset sqlRefreshDataset, + UnlimitedSplitsMetadataProvider metadataProvider, + boolean isFullRefresh + ) { + throw new UnsupportedOperationException("Metadata refresh is not supported"); + } + + @Override + public IcebergModel getIcebergModel(IcebergTableProps icebergTableProps, String userName, OperatorContext operatorContext, FileSystem fileSystem) { + List tableKeyAsList = Arrays.asList(icebergTableProps.getTableName().split(Pattern.quote("."))); + ResolvedVersionContext version = icebergTableProps.getVersion(); + return getIcebergModelHelper(tableKeyAsList, version, operatorContext, userName); + } + + private IcebergModel getIcebergModelHelper(List tableKeyAsList, + ResolvedVersionContext version, + OperatorContext operatorContext, + String userName) { + Preconditions.checkNotNull(tableKeyAsList); + Preconditions.checkNotNull(version); + Preconditions.checkNotNull(nessieClient); + return new IcebergNessieVersionedModel( + tableKeyAsList, + getFsConfCopy(), + getSystemUserFS(), + nessieClient, + operatorContext, + version, + this, + userName); + } + + @Override + public String getRootLocation() { + return pluginConfig.getPath().toString(); + } + + @Override + public String getTableLocation(IcebergTableProps tableProps) { + return getRootLocation(); + } + + public void commitTableGrpcOperation(List catalogKey, + String metadataLocation, + NessieClientTableMetadata nessieClientTableMetadata, + ResolvedVersionContext resolvedVersionContext, + String baseContentId, + String jobId, + String userName) { + nessieClient.commitTable(catalogKey, metadataLocation, nessieClientTableMetadata, resolvedVersionContext, baseContentId, jobId, userName); + } + + public String getMetadataLocationGrpcOperation(List catalogKey, + ResolvedVersionContext resolvedVersionContext, String jobId) { + return nessieClient.getMetadataLocation(catalogKey, resolvedVersionContext, jobId); + } + + public String getContentGrpcOperation(List catalogKey, + ResolvedVersionContext resolvedVersionContext, String jobId) { + return nessieClient.getContentId(catalogKey, resolvedVersionContext, jobId); + } + + @Override + @WithSpan + public String getContentId(List key, ResolvedVersionContext resolvedVersionContext) { + return nessieClient.getContentId(key, resolvedVersionContext, null); + } + + @Override + public NessieApi getNessieApi() { + return nessieClient.getNessieApi(); + } + + private class TableCacheLoader implements CacheLoader, Table> { + @Override + public Table load(ImmutablePair pair) { + return metrics.log("tableLoad", () -> loadTable(pair)); + } + + private Table loadTable(ImmutablePair pair){ + String metadataLocation = pair.left; + EntityPath datasetPath = pair.right; + final TableOperations tableOperations = new StaticTableOperations(metadataLocation, getFileIO()); + if (tableOperations.current() == null) { + logger.warn("Iceberg table content at metadatalocation {} is null", metadataLocation); + throw failedToLoadIcebergTableException(null); + } + final Table table = new BaseTable(tableOperations, + String.join(".", datasetPath.getComponents().subList(1, datasetPath.size()))); + table.refresh(); + return table; + } + } + + private class ViewCacheLoader implements CacheLoader { + @Override + public ViewVersionMetadata load(String metadataLocation) { + return metrics.log("viewLoad", () -> loadView(metadataLocation)); + } + + private ViewVersionMetadata loadView(String metadataLocation){ + return ViewVersionMetadataParser.read(getFileIO().newInputFile(metadataLocation)); + } + } + + + private Optional determineSchemaId(ExternalNamespaceEntry entry, ResolvedVersionContext resolvedVersionContext) { + if (entry.getNamespace().isEmpty()) { + return Optional.empty(); + } + String schemaId = nessieClient.getContentId(entry.getNamespace(), resolvedVersionContext, null); + if (schemaId == null) { + logger.warn("Failed to retrieve schema information for entry: " + entry.getNameElements()); + } + return Optional.ofNullable(schemaId); + } + + private Optional dataplaneViewInfoRetriever(ExternalNamespaceEntry entry, ResolvedVersionContext resolvedVersionContext) { + try { + List catalogKey = entry.getNameElements(); + String metadataLocation = nessieClient.getMetadataLocation(catalogKey, resolvedVersionContext, null); + EntityPath keyPath = toEntityPath(entry); + Optional schemaId = determineSchemaId(entry, resolvedVersionContext); + ViewVersionMetadata viewVersionMetadata = readViewMetadata(metadataLocation); + + return Optional.of(new DataplaneViewInfo.newBuilder() + .viewId(nessieClient.getContentId(catalogKey, resolvedVersionContext, null)) + .spaceId(this.getId().getConfig().getId().getId()) + .viewName(entry.getName()) + .schemaId(schemaId.orElse("")) + .path(keyPath.toString()) + .tag(getUUIDFromMetadataLocation(metadataLocation)) + .createdAt(getViewCreatedAt(viewVersionMetadata)) + .sqlDefinition(getViewSqlDefinition(viewVersionMetadata)) + .sqlContext(getViewSqlContext(viewVersionMetadata)) + .build()); + } catch (Exception e) { + logger.warn("Failed to retrieve information while calling getAllViewInfo", e); + // There is no way to propagate an error in SYS.VIEW queries, + // so we must squash the error and not return results for the table. + return Optional.empty(); + } + } + + @Override + @WithSpan + public Stream getAllViewInfo() { + ResolvedVersionContext resolvedVersionContext = nessieClient.getDefaultBranch(); + return listAllEntries(resolvedVersionContext, EnumSet.of(Type.ICEBERG_VIEW)) + .map(entry -> dataplaneViewInfoRetriever(entry, resolvedVersionContext)) + .filter(Optional::isPresent) + .map(Optional::get); + } + + private ViewVersionMetadata readViewMetadata(String metadataLocation) { + return viewLoadingCache.get(metadataLocation); + } + + private static long getViewCreatedAt(ViewVersionMetadata viewVersionMetadata) { + if (viewVersionMetadata != null) { + return viewVersionMetadata.history().get(0).timestampMillis(); + } + return 0L; + } + + private static String getViewSqlDefinition(ViewVersionMetadata viewVersionMetadata) { + if (viewVersionMetadata != null) { + return viewVersionMetadata.definition().sql(); + } + return ""; + } + + private static String getViewSqlContext(ViewVersionMetadata viewVersionMetadata) { + if (viewVersionMetadata != null) { + return viewVersionMetadata.definition().sessionNamespace().toString(); + } + return ""; + } + + private Optional dataplaneTableInfoRetriever(ExternalNamespaceEntry entry, ResolvedVersionContext resolvedVersionContext) { + try { + List catalogKey = entry.getNameElements(); + String metadataLocation = nessieClient.getMetadataLocation(catalogKey, resolvedVersionContext, null); + EntityPath keyPath = toEntityPath(entry); + Table table = getIcebergTable(keyPath, metadataLocation); + String tableId = nessieClient.getContentId(catalogKey, resolvedVersionContext, null); + Optional schemaId = determineSchemaId(entry, resolvedVersionContext); + + return Optional.of(new DataplaneTableInfo.newBuilder() + .tableId(tableId != null ? tableId : "") + .sourceId(this.getId().getConfig().getId().getId()) + .name(entry.getName()) + .schema(schemaId.orElse("")) + .path(keyPath.toString()) + .tag(getUUIDFromMetadataLocation(metadataLocation)) + .formatType(entry.getType()) + .createdAt(table.history().get(0).timestampMillis()) + .build()); + } catch (Exception e) { + logger.warn("Failed to retrieve information while calling getAllTableInfo", e); + // There is no way to propagate an error in SYS.TABLES queries, + // so we must squash the error and not return results for the table. + return Optional.empty(); + } + } + + @Override + @WithSpan + public Stream getAllTableInfo() { + ResolvedVersionContext resolvedVersionContext = nessieClient.getDefaultBranch(); + return listAllEntries(resolvedVersionContext, EnumSet.of(Type.ICEBERG_TABLE)) + .map(entry -> dataplaneTableInfoRetriever(entry, resolvedVersionContext)) + .filter(Optional::isPresent) + .map(Optional::get); + } + + /** + * @return Stream of Tables. + * If celFilter == null it means that we have nothing to find(SearchQuery) so call listEntriesIncludeNested with no filter + * If celFilter == null && searchQuery != null then, we have something to find(SearchQuery) but we didn't get appropriate filter. so we don't call nessie + * If celFilter != null it means that by anyhow you got appropriate filter. So, call nessie with the filter. + * return Stream.empty if there's nothing to return + */ + @Override + @WithSpan + public Stream getAllInformationSchemaTableInfo(SearchQuery searchQuery) { + String celFilter; + try { + celFilter = getInformationSchemaFilter(searchQuery, false, name); + } catch (Exception e) { + logger.warn("Failed to retrieve information while calling getAllInformationSchemaTableInfo", e); + return Stream.empty(); + } + + if(celFilter == null && searchQuery != null) { + return Stream.empty(); + } + try { + ResolvedVersionContext resolvedVersionContext = nessieClient.getDefaultBranch(); + return listAllEntries(resolvedVersionContext, + EnumSet.of(Type.ICEBERG_TABLE, Type.ICEBERG_VIEW), celFilter) + .map(this::informationSchemaTableInfoRetriever) + .filter(Optional::isPresent) + .map(Optional::get); + } catch (Exception e) { + // if we are failing while we are retrieving data from nessie; the query has unsupported operations or etc, the query should be ignored + logger.warn("Failed to retrieve information while calling getAllInformationSchemaTableInfo", e); + return Stream.empty(); + } + } + + private Optional informationSchemaTableInfoRetriever(ExternalNamespaceEntry entry) { + try { + return Optional.of( + com.dremio.service.catalog.Table.newBuilder() + .setCatalogName(DEFAULT_CATALOG_NAME) + .setSchemaName(joinPathExcludeEntryWithDots(entry)) + .setTableName(entry.getName()) + .setTableType(entry.getType() == Type.ICEBERG_TABLE ? TableType.TABLE : TableType.VIEW) + .build()); + } catch (Exception e) { + logger.warn("Failed to retrieve information while calling informationSchemaTableInfoRetriever", e); + // There is no way to propagate an error in INFORMATION_SCHEMA queries, + // so we must squash the error and not return results for the table. + return Optional.empty(); + } + } + + /** + * @return Stream of Views. + * If celFilter == null it means that we have nothing to find(SearchQuery) so call listEntriesIncludeNested with no filter + * If celFilter == null && searchQuery != null then, we have something to find(SearchQuery) but we didn't get appropriate filter. so we don't call nessie + * If celFilter != null it means that by anyhow you got appropriate filter. So, call nessie with the filter. + * return Stream.empty if there's nothing to return + */ + @Override + @WithSpan + public Stream getAllInformationSchemaViewInfo(SearchQuery searchQuery) { + String celFilter; + try { + celFilter = getInformationSchemaFilter(searchQuery, false, name); + } catch (Exception e) { + logger.warn("Failed to retrieve information while calling getAllInformationSchemaViewInfo", e); + return Stream.empty(); + } + + try { + ResolvedVersionContext resolvedVersionContext = nessieClient.getDefaultBranch(); + return listAllEntries(resolvedVersionContext, EnumSet.of(Type.ICEBERG_VIEW), celFilter) + .map(entry -> informationSchemaViewInfoRetriever(entry, resolvedVersionContext)) + .filter(Optional::isPresent) + .map(Optional::get); + } catch (Exception e) { + // if we are failing while we are retrieving data from nessie; the query has unsupported operations or etc, the query should be ignored + logger.warn("Failed to retrieve information while calling getAllInformationSchemaViewInfo", e); + return Stream.empty(); + } + } + + private Optional informationSchemaViewInfoRetriever(ExternalNamespaceEntry entry, ResolvedVersionContext resolvedVersionContext) { + try { + String metadataLocation = nessieClient.getMetadataLocation(entry.getNameElements(), + resolvedVersionContext, null); + + ViewVersionMetadata viewVersionMetadata = readViewMetadata(metadataLocation); + + return Optional.of( + com.dremio.service.catalog.View.newBuilder() + .setCatalogName(DEFAULT_CATALOG_NAME) + .setSchemaName(joinPathExcludeEntryWithDots(entry)) + .setTableName(entry.getName()) + .setViewDefinition(getViewSqlDefinition(viewVersionMetadata)) + .build()); + } catch (Exception e) { + logger.warn("Failed to retrieve information while calling informationSchemaViewInfoRetriever", e); + // There is no way to propagate an error in INFORMATION_SCHEMA queries, + // so we must squash the error and not return results for the table. + return Optional.empty(); + } + } + + /** + * @return Stream of Columns. + * If celFilter == null it means that we have nothing to find(SearchQuery) so call listEntriesIncludeNested with no filter + * If celFilter == null && searchQuery != null then, we have something to find(SearchQuery) but we didn't get appropriate filter. so we don't call nessie + * If celFilter != null it means that by anyhow you got appropriate filter. So, call nessie with the filter. + * return Stream.empty if there's nothing to return + */ + @Override + @WithSpan + public Stream getAllInformationSchemaColumnInfo(SearchQuery searchQuery) { + String celFilter; + try { + celFilter = getInformationSchemaFilter(searchQuery, false, name); + } catch (Exception e) { + logger.warn("Failed to retrieve information while calling getAllInformationSchemaColumnInfo", e); + return Stream.empty(); + } + + try { + ResolvedVersionContext resolvedVersionContext = nessieClient.getDefaultBranch(); + return listAllEntries(resolvedVersionContext, + EnumSet.of(Type.ICEBERG_TABLE, Type.ICEBERG_VIEW), celFilter) + .map(entry -> informationSchemaColumnInfoRetriever(entry, resolvedVersionContext)) + .filter(Optional::isPresent) + .map(Optional::get); + } catch (Exception e) { + // if we are failing while we are retrieving data from nessie; the query has unsupported operations or etc, the query should be ignored + logger.warn("Failed to retrieve information while calling getAllInformationSchemaColumnInfo", e); + return Stream.empty(); + } + } + + private Optional informationSchemaColumnInfoRetriever(ExternalNamespaceEntry entry, ResolvedVersionContext resolvedVersionContext) { + try { + String metadataLocation = nessieClient.getMetadataLocation(entry.getNameElements(), resolvedVersionContext, null); + Schema schema; + if (entry.getType() == Type.ICEBERG_TABLE) { + EntityPath keyPath = toEntityPath(entry); + Table table = getIcebergTable(keyPath, metadataLocation); + schema = table.schema(); + } else if (entry.getType() == Type.ICEBERG_VIEW) { + ViewVersionMetadata viewVersionMetadata = readViewMetadata(metadataLocation); + schema = viewVersionMetadata.definition().schema(); + } else { + throw new IllegalArgumentException("Unsupported entry type: " + entry.getType()); + } + + return Optional.of( + com.dremio.service.catalog.TableSchema.newBuilder() + .setCatalogName(DEFAULT_CATALOG_NAME) + .setSchemaName(joinPathExcludeEntryWithDots(entry)) + .setTableName(entry.getName()) + .setBatchSchema(serializeIcebergSchema(schema)) + .build()); + } catch (Exception e) { + logger.warn("Failed to retrieve information while calling informationSchemaColumnInfoRetriever", e); + // There is no way to propagate an error in INFORMATION_SCHEMA queries, + // so we must squash the error and not return results for the table. + return Optional.empty(); + } + } + + private SchemaConverter newIcebergSchemaConverter() { + return SchemaConverter.getBuilder() + .setMapTypeEnabled(context.getOptionManager().getOption(ExecConstants.ENABLE_MAP_DATA_TYPE)) + .build(); + } + + private ByteString serializeIcebergSchema(Schema icebergSchema) { + SchemaConverter converter = newIcebergSchemaConverter(); + return ByteString.copyFrom(converter.fromIceberg(icebergSchema).serialize()); + } + + /** + * @return Stream of Schemata. + * If celFilter == null it means that we have nothing to find(SearchQuery) so call listEntriesIncludeNested with no filter + * If celFilter == null && searchQuery != null then, we have something to find(SearchQuery) but we didn't get appropriate filter. so we don't call nessie + * If celFilter != null it means that by anyhow you got appropriate filter. So, call nessie with the filter. + * return Stream.empty if there's nothing to return + */ + @Override + @WithSpan + public Stream getAllInformationSchemaSchemataInfo(SearchQuery searchQuery) { + String celFilter; + try { + celFilter = getInformationSchemaFilter(searchQuery, true, name); + } catch (Exception e) { + logger.warn("Failed to retrieve information while calling getAllInformationSchemaColumnInfo", e); + return Stream.empty(); + } + + try { + ResolvedVersionContext resolvedVersionContext = nessieClient.getDefaultBranch(); + return listAllEntries(resolvedVersionContext, EnumSet.of(Type.FOLDER), celFilter) + .map(this::informationSchemaSchemataInfoRetriever) + .filter(Optional::isPresent) + .map(Optional::get); + } catch (Exception e) { + // if we are failing while we are retrieving data from nessie; the query has unsupported operations or etc, the query should be ignored + logger.warn("Failed to retrieve information while calling getAllInformationSchemaSchemataInfo", e); + return Stream.empty(); + } + } + + @Override + public void close() { + AutoCloseables.close( + new RuntimeException("Error while closing DataplanePlugin."), + nessieClient, + super::close); + } + + private Optional informationSchemaSchemataInfoRetriever(ExternalNamespaceEntry entry) { + try { + return Optional.of( + com.dremio.service.catalog.Schema.newBuilder() + .setCatalogName(DEFAULT_CATALOG_NAME) + .setSchemaName(joinPathIncludeEntryWithDots(entry)) + .setSchemaOwner("") + .setSchemaType(SchemaType.SIMPLE) + .setIsMutable(false) + .build()); + } catch (Exception e) { + logger.warn("Failed to retrieve information while calling informationSchemaSchemataInfoRetriever", e); + // There is no way to propagate an error in INFORMATION_SCHEMA queries, + // so we must squash the error and not return results for the table. + return Optional.empty(); + } + } + + private String joinPathExcludeEntryWithDots(ExternalNamespaceEntry entry) { + List path = new ArrayList<>(); + path.add(name); + path.addAll(entry.getNamespace()); + return DOT_JOINER.join(path); + } + + private String joinPathIncludeEntryWithDots(ExternalNamespaceEntry entry) { + List path = new ArrayList<>(); + path.add(name); + path.addAll(entry.getNameElements()); + return DOT_JOINER.join(path); + } + + private EntityPath toEntityPath(ExternalNamespaceEntry entry) { + List path = new ArrayList<>(); + path.add(name); + path.addAll(entry.getNameElements()); + return new EntityPath(path); + } + + private Stream listAllEntries( + ResolvedVersionContext resolvedVersionContext, + Set contentTypeFilter + ) { + return listAllEntries(resolvedVersionContext, contentTypeFilter, null); + } + + private Stream listAllEntries( + ResolvedVersionContext resolvedVersionContext, + Set contentTypeFilter, + @Nullable String celFilter + ) { + return nessieClient + .listEntries(null, resolvedVersionContext, + NestingMode.INCLUDE_NESTED, contentTypeFilter, celFilter); + } + + private FileIO getFileIO() { + if (fileIO == null) { + FileSystem fs = getSystemUserFS(); + Preconditions.checkState(fs != null, + "Plugin must be started before accessing the DremioFileIO instance"); + fileIO = createIcebergFileIO(fs, null, null, null, null); + } + return fileIO; + } +} diff --git a/plugins/dataplane/src/main/java/com/dremio/plugins/dataplane/store/InformationSchemaCelFilter.java b/plugins/dataplane/src/main/java/com/dremio/plugins/dataplane/store/InformationSchemaCelFilter.java new file mode 100644 index 0000000000..649ec27584 --- /dev/null +++ b/plugins/dataplane/src/main/java/com/dremio/plugins/dataplane/store/InformationSchemaCelFilter.java @@ -0,0 +1,225 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.plugins.dataplane.store; + +import java.util.ArrayList; +import java.util.List; + +import com.dremio.service.catalog.SearchQuery; +import com.google.common.base.Preconditions; + +/** + * This class converts SearchQuery into INFORMATION_SCHEMA SPECIFIC CEL filter that is being used by nessie + * if searchQuery has the structure something like : + * Where table_name = "table" and table_schema like '%nessie.folder%" + * then it will return "(entry.name == "table") && ((name + entry.namespace + entry.name).matches(^.*nessie_folder,*$))" + */ +public final class InformationSchemaCelFilter { + + private InformationSchemaCelFilter() {} + + private static final String SEARCH_NAME = "SEARCH_NAME"; + private static final String SEARCH_SCHEMA = "SEARCH_SCHEMA"; + + private static String getEqualSearchSchema(boolean isSchemata, String equalsPattern, String path, String sourceName) { + if (splitPath(equalsPattern).length == 1) { + if (isSchemata) { + return null; + } + return String.format( + "entry.namespace == '' && (%s) == (%s)", + SearchQueryToCelConversionUtilities.convertToRawCelStringLiteral(sourceName), + SearchQueryToCelConversionUtilities.convertToRawCelStringLiteral(equalsPattern)); + } + return String.format( + "(%s) == (%s)", + path, + SearchQueryToCelConversionUtilities.convertToRawCelStringLiteral(equalsPattern)); + } + + private static String getLikeSearchSchema(boolean isSchemata, String regexPattern, String sourceName) { + String regexPatternAsRawCelStringLiteral = SearchQueryToCelConversionUtilities.convertToRawCelStringLiteral(regexPattern); + String sourceNameAsCelRawStringLiteral = SearchQueryToCelConversionUtilities.convertToRawCelStringLiteral(sourceName); + String nameElements = sourceNameAsCelRawStringLiteral + " + '.' + entry.namespace + '.' + entry.name"; + String namespace = sourceNameAsCelRawStringLiteral + " + '.' + entry.namespace"; + String nameOnly = sourceNameAsCelRawStringLiteral + " + '.' + entry.name"; + if (isSchemata) { + return String.format( + "(%s).matches(%s) || (%s).matches(%s) || (%s).matches(%s)", + sourceNameAsCelRawStringLiteral, + regexPatternAsRawCelStringLiteral, + nameElements, + regexPatternAsRawCelStringLiteral, + nameOnly, + regexPatternAsRawCelStringLiteral); + } + return String.format( + "(%s).matches(%s) || (%s).matches(%s)", + sourceNameAsCelRawStringLiteral, + regexPatternAsRawCelStringLiteral, + namespace, + regexPatternAsRawCelStringLiteral); + } + + /** + * @param searchQuery searchQuery + * @param isSchemata represents if it is being called by information_schema.SCHEMATA + * @return CEL filter of the searchQuery (leaf case) + */ + private static String leafSearchQueryToCelFiler(SearchQuery searchQuery, boolean isSchemata, String sourceName) { + String fieldName, path; + switch (searchQuery.getQueryCase()) { + case EQUALS: + String equalsPattern = searchQuery.getEquals().getStringValue(); + fieldName = searchQuery.getEquals().getField(); + path = resolvePath(equalsPattern, sourceName, isSchemata); + if (fieldName.equals(SEARCH_NAME)) { + return String.format("(%s) == entry.name", SearchQueryToCelConversionUtilities.convertToRawCelStringLiteral(equalsPattern)); + } else if (fieldName.equals(SEARCH_SCHEMA)) { + return getEqualSearchSchema(isSchemata, equalsPattern, path, sourceName); + } else { + throw new IllegalStateException(String.format("Field should be SEARCH_NAME or SEARCH_SCHEMA. Provided = %s", fieldName)); + } + case LIKE: + String regexPattern = SearchQueryToCelConversionUtilities.likeQueryToRe2Regex(searchQuery.getLike()); + fieldName = searchQuery.getLike().getField(); + path = resolvePath(regexPattern, sourceName, isSchemata); + if (fieldName.equals(SEARCH_NAME)) { + return String.format("entry.name.matches(%s)", SearchQueryToCelConversionUtilities.convertToRawCelStringLiteral(regexPattern)); + } else if (fieldName.equals(SEARCH_SCHEMA)) { + if (!containsWildCard(searchQuery.getLike())) { // you don't have wildcard, it is same as equal + return getEqualSearchSchema(isSchemata, searchQuery.getLike().getPattern(), path, sourceName); + } else { + return getLikeSearchSchema(isSchemata, regexPattern, sourceName); + } + } else { + throw new IllegalStateException(String.format("Field should be SEARCH_NAME or SEARCH_SCHEMA. Provided = %s",fieldName)); + } + default: + throw new IllegalStateException(String.format("SearchQuery should be EQUALS or LIKE. Provided = %s", searchQuery.getQueryCase())); + } + } + + /** + * @param searchQuery searchQuery + * @param includeTableNameInQuery represents if it is being called by information_schema.SCHEMATA + * @return CEL filter of the searchQueries + */ + public static String getInformationSchemaFilter(SearchQuery searchQuery, boolean includeTableNameInQuery, String sourceName) { + if (searchQuery == null) { + return null; + } + // check the name of the source equals to the source name. + List searchQueries; + String delimiter; + switch (searchQuery.getQueryCase()) { + case EQUALS: + //Intentional fallthrough + case LIKE: + return leafSearchQueryToCelFiler(searchQuery, includeTableNameInQuery, sourceName); + case AND: + searchQueries = searchQuery.getAnd().getClausesList(); + delimiter = " && "; + break; + case OR: + searchQueries = searchQuery.getOr().getClausesList(); + delimiter = " || "; + break; + default: + throw new IllegalStateException(String.format("SearchQuery should be one of the following to get celFilter: EQUALS, LIKE, AND, OR. Provided = %s", searchQuery.getQueryCase())); + } + + List res = new ArrayList<>(); + for (SearchQuery s : searchQueries) { + String subFilter = getInformationSchemaFilter(s, includeTableNameInQuery, sourceName); + if (subFilter != null) { + subFilter = "(" + subFilter + ")"; + res.add(subFilter); + } + } + + return res.isEmpty() ? null : "(" + String.join(delimiter, res) + ")"; + } + + /** + * @return checking if we need to add entry.namespace in our cel filter. In schemata, we should include entry.namespace in our celFilter + * if and only if we have three elements in our value. for example, if we have path that is source.folder1.folder2, + * which has three elements, the celFilter should be source., + * while if we have path that is source.folder1, which has two elements, the celFilter should be source.folder1, which is source. + */ + private static Boolean shouldIncludeNamespace(String value) { + String[] path = splitPath(value); + return path.length > 2; + } + + /** + * @return split values on . that is followed by an even number of double quotes + */ + private static String[] splitPath(String value) { + return value.split("\\.(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)"); + } + + /** + * TODO: DX-58997 - Clarify naming and usage + * resolvePath method is the method that helps you to generate celFilter for EQUAL Query ONLY. + * There are several cases we should handle: + * if we are not looking for a source and if we need to include name of the entry, + */ + private static String resolvePath(String value, String sourceName, boolean includeEntryName) { + StringBuilder path = new StringBuilder(SearchQueryToCelConversionUtilities.convertToRawCelStringLiteral(sourceName)); + if (splitPath(value).length != 1) { + if (includeEntryName) { + if (shouldIncludeNamespace(value)) { + path.append(" + '.' + entry.namespace + '.' + entry.name"); + } else { + path.append(" + '.' + entry.name"); + } + } else { + path.append(" + '.' + entry.namespace"); + } + } + return path.toString(); + } + + private static boolean containsWildCard(SearchQuery.Like likeQuery) { + String pattern = likeQuery.getPattern(); + String escape = likeQuery.getEscape(); + Preconditions.checkArgument("".equals(escape) || escape.length() == 1, "An escape must be a single character."); + final char e = "".equals(escape) ? '\\' : escape.charAt(0); + boolean escaped = false; + for (int i = 0; i < pattern.length(); i++) { + char c = pattern.charAt(i); + if (escaped) { + escaped = false; + continue; + } + + if (c == e) { + escaped = true; + continue; + } + + switch (c) { + case '%': + return true; + + default: + break; + } + } + return false; + } +} diff --git a/plugins/dataplane/src/main/java/com/dremio/plugins/dataplane/store/NessiePluginConfig.java b/plugins/dataplane/src/main/java/com/dremio/plugins/dataplane/store/NessiePluginConfig.java new file mode 100644 index 0000000000..30df050617 --- /dev/null +++ b/plugins/dataplane/src/main/java/com/dremio/plugins/dataplane/store/NessiePluginConfig.java @@ -0,0 +1,271 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.plugins.dataplane.store; + +import static com.dremio.exec.store.DataplanePluginOptions.NESSIE_PLUGIN_ENABLED; +import static com.dremio.plugins.dataplane.CredentialsProviderConstants.AWS_PROFILE_PROVIDER; +import static com.dremio.plugins.dataplane.CredentialsProviderConstants.EC2_METADATA_PROVIDER; +import static com.dremio.plugins.dataplane.CredentialsProviderConstants.NONE_PROVIDER; +import static com.dremio.plugins.dataplane.NessiePluginConfigConstants.MINIMUM_NESSIE_SPECIFICATION_VERSION; +import static org.apache.hadoop.fs.s3a.Constants.SECURE_CONNECTIONS; + +import java.net.URI; +import java.util.Optional; + +import javax.inject.Provider; + +import org.apache.parquet.SemanticVersion; +import org.projectnessie.client.api.NessieApiV2; +import org.projectnessie.client.auth.BearerAuthenticationProvider; +import org.projectnessie.client.http.HttpClientBuilder; +import org.projectnessie.model.NessieConfiguration; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.StoragePluginId; +import com.dremio.exec.catalog.conf.AWSAuthenticationType; +import com.dremio.exec.catalog.conf.DisplayMetadata; +import com.dremio.exec.catalog.conf.NessieAuthType; +import com.dremio.exec.catalog.conf.NotMetadataImpacting; +import com.dremio.exec.catalog.conf.Property; +import com.dremio.exec.catalog.conf.Secret; +import com.dremio.exec.catalog.conf.SourceType; +import com.dremio.exec.server.SabotContext; +import com.dremio.exec.store.ConnectionRefusedException; +import com.dremio.exec.store.InvalidSpecificationVersionException; +import com.dremio.exec.store.InvalidURLException; +import com.dremio.exec.store.NoDefaultBranchException; +import com.dremio.exec.store.SemanticVersionParserException; +import com.dremio.exec.store.UnAuthenticatedException; +import com.dremio.plugins.NessieClient; +import com.dremio.plugins.NessieClientImpl; +import com.dremio.plugins.UsernameAwareNessieClientImpl; +import com.dremio.plugins.util.awsauth.AWSCredentialsConfigurator; +import com.dremio.service.namespace.SourceState; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Strings; + +import io.protostuff.Tag; + +/** + * Connection configuration for Nessie source Plugin. + */ +@SourceType(value = "NESSIE", label = "Nessie (Preview)", uiConfig = "nessie-layout.json") +public class NessiePluginConfig extends AbstractDataplanePluginConfig { + @Tag(1) + @DisplayMetadata(label = "Nessie Endpoint URL") + @NotMetadataImpacting + public String nessieEndpoint; + + @Tag(2) + @Secret + @DisplayMetadata(label = "Bearer Token") + @NotMetadataImpacting + public String nessieAccessToken; + + @Tag(8) + public AWSAuthenticationType credentialType = AWSAuthenticationType.ACCESS_KEY; + + @Tag(14) + @NotMetadataImpacting + public NessieAuthType nessieAuthType = NessieAuthType.BEARER; + + @Tag(15) + @DisplayMetadata(label = "AWS Profile") + public String awsProfile; + + @Tag(16) + @NotMetadataImpacting + @DisplayMetadata(label = "Encrypt connection") + public boolean secure = true; + + @Override + public DataplanePlugin newPlugin(SabotContext context, String name, Provider pluginIdProvider) { + NessieClient nessieClient = getNessieClient(name, context); + + return new DataplanePlugin(this, context, name, pluginIdProvider, getAWSCredentialsProvider(), + nessieClient); + } + + @Override + public String getConnection() { + if (Strings.isNullOrEmpty(awsAccessKey) || + Strings.isNullOrEmpty(awsAccessSecret)) { + return "file:///"; + } + return super.getConnection(); + } + + @Override + public void validatePluginEnabled(SabotContext context) { + if (!context.getOptionManager().getOption(NESSIE_PLUGIN_ENABLED)) { + throw UserException.unsupportedError() + .message("Nessie Source is not supported.") + .buildSilently(); + } + } + + @Override + public void validateConnectionToNessieRepository(NessieClient nessieClient, String name, SabotContext context) { + nessieClient.getDefaultBranch(); + } + + @VisibleForTesting + NessieClient getNessieClient(String name, SabotContext context) { + NessieClientImpl nessieClient = + new NessieClientImpl(getNessieRestClient(name, nessieEndpoint, nessieAccessToken), false); + return new UsernameAwareNessieClientImpl(nessieClient, context.getUserService()); + } + + @Override + public Optional encryptConnection() { + return Optional.of(new Property(SECURE_CONNECTIONS, String.valueOf(secure))); + } + + @Override + public void validateNessieAuthSettings(String name) { + if (nessieAuthType == null) { + throw UserException.resourceError().message("Unable to create source [%s], " + + "Invalid Nessie Auth type", name).build(); + } + switch (nessieAuthType) { + case BEARER: + if (Strings.isNullOrEmpty(nessieAccessToken)) { + throw UserException.resourceError().message("Unable to create source [%s], " + + "bearer token provided is empty", name).build(); + } + break; + case NONE: + // Nothing to check for NONE type auth + break; + default: + throw new UnsupportedOperationException(); + } + } + + @Override + public void validateNessieSpecificationVersion(NessieClient nessieClient, String name) { + NessieApiV2 nessieApi = (NessieApiV2) nessieClient.getNessieApi(); + NessieConfiguration nessieConfiguration = getNessieConfig(nessieApi, name); + validateNessieSpecificationVersionHelper(nessieConfiguration.getSpecVersion()); + } + + @VisibleForTesting + NessieConfiguration getNessieConfig(NessieApiV2 nessieApiV2, String name) { + try { + return nessieApiV2.getConfig(); + } catch (Exception e) { + //IllegalArgumentException and NessieServerException are seen when we provide wrong urls in the Nessie endpoint + throw new InvalidURLException(e, "Make sure that Nessie endpoint URL [%s] is valid.", nessieEndpoint); + } + } + + @VisibleForTesting + void validateNessieSpecificationVersionHelper(String specificationVersion) { + if (specificationVersion == null) { + // This happens when you are using the older server, or you are trying to pass the v1 endpoint for supported OSS Nessie sever (which supports v2) + throw new InvalidSpecificationVersionException("Nessie Server should comply with Nessie specification version %s or later." + + " Also make sure that Nessie endpoint URL is valid.", MINIMUM_NESSIE_SPECIFICATION_VERSION); + } else { + int result; + try { + result = SemanticVersion.parse(specificationVersion).compareTo(SemanticVersion.parse(MINIMUM_NESSIE_SPECIFICATION_VERSION)); + } catch (SemanticVersion.SemanticVersionParseException ex) { + throw new SemanticVersionParserException(ex, "Cannot parse Nessie specification version %s. " + + "Nessie Server should comply with Nessie specification version %s or later.", specificationVersion, MINIMUM_NESSIE_SPECIFICATION_VERSION); + } + if (result < 0) { + throw new InvalidSpecificationVersionException("Nessie Server should comply with Nessie specification version %s or later." + + " Also make sure that Nessie endpoint URL is valid.", MINIMUM_NESSIE_SPECIFICATION_VERSION); + } + } + } + + @VisibleForTesting + AWSCredentialsConfigurator getAWSCredentialsProvider(){ + AWSCredentialsConfigurator awsCredentialsConfigurator = getPrimaryAWSCredentialsProvider(); + return wrapAssumedRoleToProvider(awsCredentialsConfigurator); + } + + private AWSCredentialsConfigurator getPrimaryAWSCredentialsProvider() { + switch (credentialType) { + case ACCESS_KEY: + return properties -> getAccessKeyProvider(properties, awsAccessKey, awsAccessSecret); + case AWS_PROFILE: + return properties -> { + if (awsProfile != null) { + properties.add(new Property("com.dremio.awsProfile", awsProfile)); + } + return AWS_PROFILE_PROVIDER; + }; + case EC2_METADATA: + return properties -> EC2_METADATA_PROVIDER; + case NONE: + return properties -> NONE_PROVIDER; + default: + throw new UnsupportedOperationException("Failure creating S3 connection. Unsupported credential type:" + credentialType); + } + } + @Override + protected NessieApiV2 getNessieRestClient(String name, String nessieEndpoint, String nessieAccessToken) { + final HttpClientBuilder builder = HttpClientBuilder.builder() + .withUri(URI.create(nessieEndpoint)); + + if (!Strings.isNullOrEmpty(nessieAccessToken)) { + builder.withAuthentication(BearerAuthenticationProvider.create(nessieAccessToken)); + } + + try { + return builder + .withTracing(true) + .withEnableApiCompatibilityCheck(false) + .build(NessieApiV2.class); + } catch (IllegalArgumentException e) { + throw UserException.resourceError().message("Unable to create source [%s], " + + "%s must be a valid http or https address", name, nessieEndpoint).build(); + } + } + + @Override + public SourceState getState(NessieClient nessieClient, String name, SabotContext context) { + try { + this.validateConnectionToNessieRepository(nessieClient, name, context); + this.validateNessieSpecificationVersion(nessieClient, name); + } catch (NoDefaultBranchException e){ + return SourceState.badState(String.format("Could not connect to [%s]. No default branch exists in the Nessie server. " + + "Check your Nessie server", name), e); + } catch (UnAuthenticatedException e) { + return SourceState.badState(String.format("Could not connect to [%s]. Unable to authenticate to the Nessie server. " + + "Make sure that the token is valid and not expired", name), e); + } catch (ConnectionRefusedException e) { + return SourceState.badState(String.format("Could not connect to [%s]. Connection refused while connecting to the " + + "Nessie Server. Make sure that the Nessie server is up and running", name), e); + } catch (InvalidURLException e) { + return SourceState.badState(String.format("Could not connect to [%s]. Make sure that Nessie endpoint URL [%s] is valid.", + name, nessieEndpoint), e); + } catch (InvalidSpecificationVersionException e) { + return SourceState.badState(String.format("Could not connect to [%s]. Nessie Server should comply with Nessie specification version %s or later. " + + "Also make sure that Nessie endpoint URL is valid.", name, MINIMUM_NESSIE_SPECIFICATION_VERSION), e); + } catch (SemanticVersionParserException e) { + return SourceState.badState(String.format("Could not connect to [%s]. Cannot parse Nessie specification version. " + + "Nessie Server should comply with Nessie specification version %s or later.", name, MINIMUM_NESSIE_SPECIFICATION_VERSION), e); + } catch (Exception e) { + //For any unknowns + return SourceState.badState(String.format("Could not connect to [%s]. Check your settings, credentials and Nessie server", name), e); + } + + return SourceState.GOOD; + } +} diff --git a/plugins/dataplane/src/main/java/com/dremio/plugins/dataplane/store/SearchQueryToCelConversionUtilities.java b/plugins/dataplane/src/main/java/com/dremio/plugins/dataplane/store/SearchQueryToCelConversionUtilities.java new file mode 100644 index 0000000000..a295828860 --- /dev/null +++ b/plugins/dataplane/src/main/java/com/dremio/plugins/dataplane/store/SearchQueryToCelConversionUtilities.java @@ -0,0 +1,108 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.plugins.dataplane.store; + +import java.util.Set; + +import com.dremio.service.catalog.SearchQuery; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; + +/** + * Helpers for converting from SearchQuery to CEL syntax + */ +final class SearchQueryToCelConversionUtilities { + + private SearchQueryToCelConversionUtilities() {} + + private static final ImmutableMap SEARCH_QUERY_SPECIAL_CHARACTERS_MAP = + ImmutableMap.builder() + .put('%', ".*") + .build(); + // CEL uses RE2 syntax: https://github.com/google/re2/wiki/Syntax + private static final Set RE2_SPECIAL_CHARACTERS = + ImmutableSet.of('*', '+', '?', '(', ')', '|', '[', ']', ':', '^', '\\', '.', '{', '}'); + private static final char RE2_ESCAPE = '\\'; + + /** + * Converts a string into the equivalent CEL raw string literal. By using a + * raw string wrapped in single quotes, only existing single quote characters + * need to be escaped. This is particularly helpful for passing regex patterns + * to a CEL matches filter. + *

        + * See the spec. + */ + public static String convertToRawCelStringLiteral(String string) { + StringBuilder sb = new StringBuilder(); + for (int i = 0; i < string.length(); i++) { + char c = string.charAt(i); + + if ('\'' == c) { + sb.append('\''); + } + sb.append(c); + } + + return String.format("r'%s'", sb); + } + + /** + * Converts a SearchQuery Like object into the equivalent RE2 regex pattern. + */ + public static String likeQueryToRe2Regex(SearchQuery.Like likeQuery) { + String pattern = likeQuery.getPattern(); + String escape = likeQuery.getEscape(); + Preconditions.checkArgument(escape.length() <= 1, "An escape must be a single character."); + + final boolean doEscaping = !escape.isEmpty(); + final char e = doEscaping + ? escape.charAt(0) + : '\0'; // Unused, but must have some value since primitive + + StringBuilder sb = new StringBuilder(); + boolean lastCharacterWasEscape = false; + for (int i = 0; i < pattern.length(); i++) { + char c = pattern.charAt(i); + + if (lastCharacterWasEscape) { + appendRawCharacter(sb, c); + lastCharacterWasEscape = false; + continue; + } + + if (doEscaping && c == e) { + lastCharacterWasEscape = true; + continue; + } + + if (SEARCH_QUERY_SPECIAL_CHARACTERS_MAP.containsKey(c)) { + sb.append(SEARCH_QUERY_SPECIAL_CHARACTERS_MAP.get(c)); + } else { + appendRawCharacter(sb, c); + } + } + + return "^" + sb + "$"; + } + + private static void appendRawCharacter(StringBuilder sb, char c) { + if (RE2_SPECIAL_CHARACTERS.contains(c)) { + sb.append(RE2_ESCAPE); + } + sb.append(c); + } +} diff --git a/plugins/dataplane/src/main/java/com/dremio/plugins/dataplane/store/TransientIcebergMetadataProvider.java b/plugins/dataplane/src/main/java/com/dremio/plugins/dataplane/store/TransientIcebergMetadataProvider.java new file mode 100644 index 0000000000..c66ba775f7 --- /dev/null +++ b/plugins/dataplane/src/main/java/com/dremio/plugins/dataplane/store/TransientIcebergMetadataProvider.java @@ -0,0 +1,102 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.plugins.dataplane.store; + +import java.util.function.Supplier; + +import org.apache.hadoop.conf.Configuration; +import org.apache.iceberg.Table; + +import com.dremio.connector.metadata.BytesOutput; +import com.dremio.connector.metadata.DatasetMetadata; +import com.dremio.connector.metadata.EntityPath; +import com.dremio.exec.catalog.DremioTable; +import com.dremio.exec.catalog.MutablePlugin; +import com.dremio.exec.catalog.VersionedDatasetAdapter; +import com.dremio.exec.catalog.VersionedPlugin; +import com.dremio.exec.store.VersionedDatasetHandle; +import com.dremio.exec.store.iceberg.BaseIcebergExecutionDatasetAccessor; +import com.dremio.exec.store.iceberg.TableSchemaProvider; +import com.dremio.exec.store.iceberg.TableSnapshotProvider; +import com.dremio.options.OptionResolver; +import com.dremio.service.namespace.file.proto.FileConfig; +import com.dremio.service.namespace.file.proto.IcebergFileConfig; +import com.dremio.service.namespace.file.proto.ParquetFileConfig; + +import io.opentelemetry.instrumentation.annotations.WithSpan; + +/** + * Iceberg dataset accessor that provides metadata of an Iceberg table; the returned metadata should not be + * persisted. + */ +public class TransientIcebergMetadataProvider extends BaseIcebergExecutionDatasetAccessor implements VersionedDatasetHandle { + + private final Supplier

        {intl.formatMessage({ id: "Common.Author" })}:{commit.author}{commit.authors?.[0] || ""}
        {intl.formatMessage({ id: "Common.Created" })}:
        tableSupplier; + private String contentId; + private String uniqueInstanceId; + + public TransientIcebergMetadataProvider( + EntityPath datasetPath, + Supplier
        tableSupplier, + Configuration configuration, + TableSnapshotProvider tableSnapshotProvider, + MutablePlugin plugin, + TableSchemaProvider tableSchemaProvider, + OptionResolver optionResolver, + String contentId, // This ContentId in the Nessie ContentId and applies to iceberg tables with root pointers in nessie. In other cases can be null or a random string + String uniqueInstanceId // uuid extracted from the Iceberg metadata location + ) { + super(datasetPath, tableSupplier, configuration, tableSnapshotProvider, plugin, tableSchemaProvider, optionResolver); + this.tableSupplier = tableSupplier; + this.contentId = contentId; + this.uniqueInstanceId = uniqueInstanceId; + } + + @Override + protected FileConfig getFileConfig() { + return new IcebergFileConfig() + .setParquetDataFormat(new ParquetFileConfig()) + .asFileConfig() + .setLocation(tableSupplier.get().location()); + } + + @Override + public BytesOutput provideSignature(DatasetMetadata metadata) { + // metadata for dataplane table is not persisted + return BytesOutput.NONE; + } + + @Override + public VersionedPlugin.EntityType getType() { + return VersionedPlugin.EntityType.ICEBERG_TABLE; + } + + @WithSpan + @Override + public DremioTable translateToDremioTable(VersionedDatasetAdapter vda, String accesssUserName) { + return vda.translateIcebergTable(accesssUserName); + } + + @Override + public String getUniqueInstanceId() { + return uniqueInstanceId; + } + + @Override + public String getContentId() { + return contentId; + } +} diff --git a/plugins/dataplane/src/main/resources/nessie-layout.json b/plugins/dataplane/src/main/resources/nessie-layout.json new file mode 100644 index 0000000000..98eaa4e942 --- /dev/null +++ b/plugins/dataplane/src/main/resources/nessie-layout.json @@ -0,0 +1,168 @@ +{ + "sourceType": "NESSIE", + "tags": [ + ], + "metadataRefresh": { + "isFileSystemSource": true + }, + "form": { + "tabs": [ + { + "name": "General", + "isGeneral": true, + "sections": [ + { + "elements" : [ + { + "propName": "config.nessieEndpoint", + "errMsg": "Required" + }, + { + "propName": "config.nessieAuthType", + "label": "Nessie Authentication Type", + "uiType": "container_selection", + "defaultValue": "BEARER", + "options": [ + { + "value": "NONE", + "container": { + "help": { + "position": "top", + "text": "No authentication is enforced on Nessie server." + } + } + }, + { + "value": "BEARER", + "container": { + "elements": [ + { + "propName": "config.nessieAccessToken", + "size": "half", + "secure": true + } + ] + } + } + ] + } + ] + } + ] + }, + { + "name": "Storage", + "sections": [ + { + "name": "Authentication", + "elements": [ + { + "propName": "config.credentialType", + "label": "Authentication Type", + "uiType": "container_selection", + "options": [ + { + "value": "ACCESS_KEY", + "container": { + "elements": [ + { + "propName": "config.awsAccessKey", + "size": "full", + "errMsg": "Both access secret and key are required for Access Key type authentication." + }, + { + "propName": "config.awsAccessSecret", + "size": "full", + "errMsg": "Both access secret and key are required for Access Key type authentication.", + "secure": true + }, + { + "propName": "config.assumedRoleARN", + "size": "full", + "validate": { + "isRequired": false + } + } + ] + } + }, + { + "value": "EC2_METADATA", + "container": { + "elements": [ + { + "propName": "config.assumedRoleARN", + "size": "full", + "validate": { + "isRequired": false + } + } + ] + } + }, + { + "value": "AWS_PROFILE", + "container": { + "elements": [ + { + "propName": "config.awsProfile", + "label": "AWS Profile (optional)", + "placeholder": "default", + "size": "full", + "validate": { + "isRequired": false + } + } + ] + } + }, + { + "value": "NONE", + "container": { + } + } + ] + }, + { + "propName": "config.awsRootPath", + "placeholder": "/bucket-name/optional/folder/path" + }, + { + "emptyLabel": "No properties added", + "addLabel": "Add property", + "propName": "config.propertyList" + }, + { + "propName": "config.secure" + } + ] + } + ] + }, + { + "name": "Advanced Options", + "sections": [ + { + "elements": [ + { + "propName": "config.asyncEnabled" + } + ] + }, + { + "name": "Cache Options", + "checkboxController": "asyncEnabled", + "elements": [ + { + "propName": "config.isCachingEnabled" + }, + { + "propName": "config.maxCacheSpacePct" + } + ] + } + ] + } + ] + } +} diff --git a/plugins/dataplane/src/main/resources/sabot-module.conf b/plugins/dataplane/src/main/resources/sabot-module.conf new file mode 100644 index 0000000000..1072235cec --- /dev/null +++ b/plugins/dataplane/src/main/resources/sabot-module.conf @@ -0,0 +1,22 @@ +# +# Copyright (C) 2017-2019 Dremio Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +// This file tells Dremio to consider this module when class path scanning. +// This file can also include any supplementary configuration information. +// This file is in HOCON format, see https://github.com/typesafehub/config/blob/master/HOCON.md for more information. + +dremio.classpath.scanning.packages += com.dremio.plugins.dataplane + diff --git a/plugins/dataplane/src/test/java/com/dremio/plugins/dataplane/store/TestDataplanePlugin.java b/plugins/dataplane/src/test/java/com/dremio/plugins/dataplane/store/TestDataplanePlugin.java new file mode 100644 index 0000000000..a4a8ef6242 --- /dev/null +++ b/plugins/dataplane/src/test/java/com/dremio/plugins/dataplane/store/TestDataplanePlugin.java @@ -0,0 +1,259 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.plugins.dataplane.store; + +import static com.dremio.plugins.dataplane.CredentialsProviderConstants.ACCESS_KEY_PROVIDER; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.util.Arrays; +import java.util.stream.Collectors; + +import javax.inject.Provider; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +import com.dremio.exec.catalog.StoragePluginId; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.server.SabotContext; +import com.dremio.exec.store.InvalidSpecificationVersionException; +import com.dremio.exec.store.InvalidURLException; +import com.dremio.exec.store.SemanticVersionParserException; +import com.dremio.io.file.Path; +import com.dremio.plugins.NessieClient; +import com.dremio.plugins.util.awsauth.AWSCredentialsConfigurator; +import com.dremio.service.namespace.NamespaceKey; +import com.dremio.service.namespace.SourceState; + +/** + * Unit tests for DataplanePlugin + */ +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.STRICT_STUBS) +public class TestDataplanePlugin { + + private static final String DATAPLANE_PLUGIN_NAME = "test_dataplane"; + @Mock + private AbstractDataplanePluginConfig pluginConfig; + @Mock + private SabotContext sabotContext; + @Mock + private Provider idProvider; + @Mock + private AWSCredentialsConfigurator awsCredentialsConfigurator; + @Mock + private NessieClient nessieClient; + + // Can't @InjectMocks a String, so initialization is done in @BeforeEach + private DataplanePlugin dataplanePlugin; + + @BeforeEach + public void setup() { + when(awsCredentialsConfigurator.configureCredentials(any())) + .thenReturn(ACCESS_KEY_PROVIDER); + + dataplanePlugin = new DataplanePlugin( + pluginConfig, + sabotContext, + DATAPLANE_PLUGIN_NAME, + idProvider, + awsCredentialsConfigurator, + nessieClient); + } + + @Test + public void createTag() { + // Arrange + String tagName = "tagName"; + VersionContext sourceVersion = VersionContext.ofBranch("branchName"); + + // Act + dataplanePlugin.createTag(tagName, sourceVersion); + + // Assert + verify(nessieClient).createTag(tagName, sourceVersion); + } + + @Test + public void createNamespace() { + final String folderNameWithSpace = "folder with space"; + final String branchName = "branchName"; + // Arrange + NamespaceKey pathWithSourceName = new NamespaceKey( + Arrays.asList( + DATAPLANE_PLUGIN_NAME, + folderNameWithSpace)); + VersionContext sourceVersion = VersionContext.ofBranch(branchName); + + // Act + dataplanePlugin.createNamespace(pathWithSourceName, sourceVersion); + + // Assert + verify(nessieClient).createNamespace( + pathWithSourceName + .getPathComponents() + .stream().skip(1) + .collect(Collectors.toList()), + sourceVersion); + } + + @Test + public void deleteFolder() { + final String folderName = "folder"; + final String branchName = "branchName"; + // Arrange + NamespaceKey pathWithSourceName = new NamespaceKey( + Arrays.asList( + DATAPLANE_PLUGIN_NAME, + folderName)); + VersionContext sourceVersion = VersionContext.ofBranch(branchName); + + // Act + dataplanePlugin.deleteFolder(pathWithSourceName, sourceVersion); + + // Assert + verify(nessieClient).deleteNamespace( + pathWithSourceName + .getPathComponents() + .stream().skip(1) + .collect(Collectors.toList()), + sourceVersion); + } + + @Test + public void createNamespaceWithNestedFolder() { + final String rootFolderNameWithSpace = "folder with space"; + final String leafFolderNameWithSpace = "folder with another space"; + final String branchName = "branchName"; + // Arrange + NamespaceKey pathWithSourceName = new NamespaceKey( + Arrays.asList( + DATAPLANE_PLUGIN_NAME, + rootFolderNameWithSpace, + leafFolderNameWithSpace)); + VersionContext sourceVersion = VersionContext.ofBranch(branchName); + + // Act + dataplanePlugin.createNamespace(pathWithSourceName, sourceVersion); + + // Assert + verify(nessieClient).createNamespace( + pathWithSourceName + .getPathComponents() + .stream().skip(1) + .collect(Collectors.toList()), + sourceVersion); + } + + @Test + public void testNessieAuthTypeSettingsCallDuringSetup() { + when(pluginConfig.getPath()) + .thenReturn(Path.of("/test-bucket")); + + //Act + try { + dataplanePlugin.start(); + } catch (Exception e) { + //ignoring this exception as this happened due to super.start() which needs extra config probably + //This call is to verify if validateNessieAuthSettings gets called as in Assert + } + + // Assert + verify(pluginConfig).validateNessieAuthSettings( + DATAPLANE_PLUGIN_NAME); + } + + @Test + public void testValidateConnectionToNessieCallDuringSetup() { + when(pluginConfig.getPath()) + .thenReturn(Path.of("/test-bucket")); + + //Act + try { + dataplanePlugin.start(); + } catch (Exception e) { + //ignoring this exception as this happened due to super.start() which needs extra config probably + //This call is to verify if validateConnectionToNessieRepository gets called + } + + // Assert + verify(pluginConfig).validateConnectionToNessieRepository( + nessieClient, DATAPLANE_PLUGIN_NAME, sabotContext); + } + + @Test + public void testInvalidURLErrorWhileValidatingNessieSpecVersion() { + when(pluginConfig.getPath()) + .thenReturn(Path.of("/test-bucket")); + doThrow(new InvalidURLException()).when(pluginConfig).validateNessieSpecificationVersion(nessieClient, DATAPLANE_PLUGIN_NAME); + + //Act + Assert + assertThatThrownBy(() -> dataplanePlugin.start()) + .hasMessageContaining("Make sure that Nessie endpoint URL is valid"); + } + + @Test + public void testInvalidSpecificationVersionErrorWhileValidatingNessieSpecVersion() { + when(pluginConfig.getPath()) + .thenReturn(Path.of("/test-bucket")); + doThrow(new InvalidSpecificationVersionException()).when(pluginConfig).validateNessieSpecificationVersion(nessieClient, DATAPLANE_PLUGIN_NAME); + + //Act + Assert + assertThatThrownBy(() -> dataplanePlugin.start()) + .hasMessageContaining("Nessie Server should comply with Nessie specification version"); + } + + @Test + public void testSemanticParserErrorWhileValidatingNessieSpecVersion() { + when(pluginConfig.getPath()) + .thenReturn(Path.of("/test-bucket")); + doThrow(new SemanticVersionParserException()).when(pluginConfig).validateNessieSpecificationVersion(nessieClient, DATAPLANE_PLUGIN_NAME); + + //Act + Assert + assertThatThrownBy(() -> dataplanePlugin.start()) + .hasMessageContaining("Cannot parse Nessie specification version"); + } + + @Test + public void testNessieApiCloseCallDuringCleanup() { + //Act + dataplanePlugin.close(); + + // Assert + verify(nessieClient).close(); + } + + @Test + public void testPluginState() { + when(pluginConfig.getState(nessieClient, DATAPLANE_PLUGIN_NAME, sabotContext)).thenReturn(SourceState.GOOD); + + //Act and Assert + assertThat(dataplanePlugin.getState()).isEqualTo(SourceState.GOOD); + + //Assert + verify(pluginConfig).getState(nessieClient, DATAPLANE_PLUGIN_NAME, sabotContext); + } +} diff --git a/plugins/dataplane/src/test/java/com/dremio/plugins/dataplane/store/TestDataplanePlugin2.java b/plugins/dataplane/src/test/java/com/dremio/plugins/dataplane/store/TestDataplanePlugin2.java new file mode 100644 index 0000000000..d3987bcb6b --- /dev/null +++ b/plugins/dataplane/src/test/java/com/dremio/plugins/dataplane/store/TestDataplanePlugin2.java @@ -0,0 +1,470 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.plugins.dataplane.store; + +import static com.dremio.exec.store.DataplanePluginOptions.NESSIE_PLUGIN_ENABLED; +import static com.dremio.plugins.dataplane.NessiePluginConfigConstants.MINIMUM_NESSIE_SPECIFICATION_VERSION; +import static com.dremio.test.DremioTest.CLASSPATH_SCAN_RESULT; +import static com.dremio.test.DremioTest.DEFAULT_DREMIO_CONFIG; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; + +import java.io.File; +import java.io.IOException; +import java.net.URI; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.api.io.TempDir; +import org.projectnessie.client.api.NessieApiV1; +import org.projectnessie.client.api.NessieApiV2; +import org.projectnessie.error.NessieNotFoundException; +import org.projectnessie.model.Content; +import org.projectnessie.model.ContentKey; +import org.projectnessie.model.FetchOption; +import org.projectnessie.model.IcebergTable; +import org.projectnessie.model.LogResponse.LogEntry; +import org.projectnessie.model.NessieConfiguration; +import org.projectnessie.model.Operation; +import org.projectnessie.tools.compatibility.api.NessieAPI; +import org.projectnessie.tools.compatibility.api.NessieBaseUri; +import org.projectnessie.tools.compatibility.api.NessieServerProperty; +import org.projectnessie.tools.compatibility.internal.OlderNessieServersExtension; + +import com.dremio.common.AutoCloseables; +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.CatalogUser; +import com.dremio.exec.catalog.ResolvedVersionContext; +import com.dremio.exec.catalog.TableMutationOptions; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.catalog.conf.NessieAuthType; +import com.dremio.exec.catalog.conf.Property; +import com.dremio.exec.physical.base.WriterOptions; +import com.dremio.exec.record.BatchSchema; +import com.dremio.exec.server.SabotContext; +import com.dremio.exec.store.NoDefaultBranchException; +import com.dremio.exec.store.ReferenceConflictException; +import com.dremio.exec.store.ReferenceNotFoundException; +import com.dremio.exec.store.SchemaConfig; +import com.dremio.exec.store.UnAuthenticatedException; +import com.dremio.hadoop.security.alias.DremioCredentialProviderFactory; +import com.dremio.options.OptionManager; +import com.dremio.plugins.NessieClient; +import com.dremio.plugins.s3.store.S3FileSystem; +import com.dremio.service.namespace.NamespaceKey; +import com.dremio.service.users.UserService; +import com.dremio.services.credentials.CredentialsService; +import com.google.common.base.Preconditions; + +import io.findify.s3mock.S3Mock; + +/** + * Tests for DataplanePlugin + * + * TODO DX-54476: This was originally intended to be unit tests for + * DataplanePlugin, but it has expanded and become a little cluttered. These + * should be cleaned up and moved to TestDataplanePlugin or + * TestIntegrationDataplanePlugin. + * + * This class should be considered legacy, new tests should not be added here. + * Instead, use one of: + * - TestDataplanePlugin for unit tests + * - TestIntegrationDataplanePlugin for integration tests + */ +@ExtendWith(OlderNessieServersExtension.class) +@NessieServerProperty(name = "nessie.store.validate.namespaces", value = "false") +public class TestDataplanePlugin2 { + // Constants + private static final String S3_PREFIX = "s3://"; + private static final String DATAPLANE_PLUGIN_NAME = "test_dataplane"; + private static final String BUCKET_NAME = "test.dataplane.bucket"; + private static final String METADATA_FOLDER = "metadata"; + private static final String DEFAULT_BRANCH_NAME = "main"; + private static final String USER_NAME = "dataplane-test-user"; + private static final List DEFAULT_TABLE_COMPONENTS = + Arrays.asList("folderA", "folderB", "table1"); + private static final NamespaceKey DEFAULT_NAMESPACE_KEY = + new NamespaceKey(Stream.concat( + Stream.of(DATAPLANE_PLUGIN_NAME), + DEFAULT_TABLE_COMPONENTS.stream()) + .collect(Collectors.toList())); + private static final VersionContext DEFAULT_VERSION_CONTEXT = + VersionContext.ofBranch(DEFAULT_BRANCH_NAME); + private static final BatchSchema DEFAULT_BATCH_SCHEMA = + BatchSchema.newBuilder().build(); + + private static S3Mock s3Mock; + private static int S3_PORT; + @TempDir + static File temporaryDirectory; + private static Path bucketPath; + + // Nessie + private static String nessieUri; + @NessieBaseUri + private static URI nessieBaseUri; + @NessieAPI + private static NessieApiV1 nessieClient; + + // Dataplane Plugin + private static DataplanePlugin dataplanePlugin; + private static DataplanePlugin dataplanePluginNotAuthorized; + private static DataplanePlugin dataplanePluginInvalidAWSBucket; + + @BeforeAll + public static void setUp() throws Exception { + DremioCredentialProviderFactory.configure(() -> + CredentialsService.newInstance(DEFAULT_DREMIO_CONFIG, CLASSPATH_SCAN_RESULT)); + setUpS3Mock(); + setUpNessie(); + setUpDataplanePlugin(); + setUpDataplanePluginNotAuthorized(); + } + + private static void setUpS3Mock() throws IOException { + bucketPath = Paths.get(temporaryDirectory.getAbsolutePath(), BUCKET_NAME); + Files.createDirectory(bucketPath); + + Preconditions.checkState(s3Mock == null); + s3Mock = new S3Mock.Builder() + .withPort(0) + .withFileBackend(temporaryDirectory.getAbsolutePath()) + .build(); + S3_PORT = s3Mock.start().localAddress().getPort(); + } + + private static void setUpNessie() { + nessieUri = nessieBaseUri.resolve("v2").toString(); + } + + private static void setUpDataplanePluginNotAuthorized() { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + nessiePluginConfig.nessieEndpoint = nessieUri; + nessiePluginConfig.nessieAuthType = NessieAuthType.NONE; + nessiePluginConfig.secure = false; + nessiePluginConfig.awsAccessKey = "foo"; // Unused, just needs to be set + nessiePluginConfig.awsAccessSecret = "bar"; // Unused, just needs to be set + nessiePluginConfig.awsRootPath = BUCKET_NAME; + + SabotContext context = mock(SabotContext.class); + OptionManager optionManager = mock(OptionManager.class); + UserService userService = mock(UserService.class); + when(optionManager.getOption(NESSIE_PLUGIN_ENABLED)).thenReturn(true); + when(context.getOptionManager()).thenReturn(optionManager); + when(context.getUserService()).thenReturn(userService); + + // S3Mock settings + nessiePluginConfig.propertyList = Arrays.asList( + new Property("fs.s3a.endpoint", "localhost:" + S3_PORT), + new Property("fs.s3a.path.style.access", "true"), + new Property(S3FileSystem.COMPATIBILITY_MODE, "true") + ); + + NessiePluginConfig mockNessiePluginConfig = spy(nessiePluginConfig); + NessieClient nessieClient = mock(NessieClient.class); + NessieApiV2 nessieApi = mock(NessieApiV2.class); + NessieConfiguration nessieConfiguration = mock(NessieConfiguration.class); + when(nessieClient.getNessieApi()).thenReturn(nessieApi); + when(nessieApi.getConfig()).thenReturn(nessieConfiguration); + when(nessieConfiguration.getSpecVersion()).thenReturn(MINIMUM_NESSIE_SPECIFICATION_VERSION); + when(nessieClient.getDefaultBranch()).thenThrow(UnAuthenticatedException.class); + when(mockNessiePluginConfig.getNessieClient(DATAPLANE_PLUGIN_NAME, context)).thenReturn(nessieClient); + + dataplanePluginNotAuthorized = spy(mockNessiePluginConfig.newPlugin( + context, + DATAPLANE_PLUGIN_NAME, + null)); + } + + private static void setUpDataplanePluginInvalidAWSBucket() { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + nessiePluginConfig.nessieEndpoint = nessieUri; + nessiePluginConfig.nessieAuthType = NessieAuthType.NONE; + nessiePluginConfig.secure = false; + nessiePluginConfig.awsAccessKey = "foo"; // Unused, just needs to be set + nessiePluginConfig.awsAccessSecret = "bar"; // Unused, just needs to be set + nessiePluginConfig.awsRootPath = "/"; + + SabotContext context = mock(SabotContext.class); + OptionManager optionManager = mock(OptionManager.class); + UserService userService = mock(UserService.class); + when(optionManager.getOption(NESSIE_PLUGIN_ENABLED)).thenReturn(true); + when(context.getOptionManager()).thenReturn(optionManager); + when(context.getUserService()).thenReturn(userService); + + // S3Mock settings + nessiePluginConfig.propertyList = Arrays.asList( + new Property("fs.s3a.endpoint", "localhost:" + S3_PORT), + new Property("fs.s3a.path.style.access", "true"), + new Property(S3FileSystem.COMPATIBILITY_MODE, "true") + ); + + dataplanePluginInvalidAWSBucket = spy(nessiePluginConfig.newPlugin( + context, + DATAPLANE_PLUGIN_NAME, + null)); + } + + private static void setUpDataplanePlugin() throws Exception { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + nessiePluginConfig.nessieEndpoint = nessieUri; + nessiePluginConfig.nessieAuthType = NessieAuthType.NONE; + nessiePluginConfig.secure = false; + nessiePluginConfig.awsAccessKey = "foo"; // Unused, just needs to be set + nessiePluginConfig.awsAccessSecret = "bar"; // Unused, just needs to be set + nessiePluginConfig.awsRootPath = BUCKET_NAME; + + SabotContext context = mock(SabotContext.class); + OptionManager optionManager = mock(OptionManager.class); + UserService userService = mock(UserService.class); + when(optionManager.getOption(NESSIE_PLUGIN_ENABLED)).thenReturn(true); + when(context.getUserService()).thenReturn(userService); + when(context.getOptionManager()).thenReturn(optionManager); + when(context.getClasspathScan()).thenReturn(CLASSPATH_SCAN_RESULT); + when(context.getFileSystemWrapper()).thenReturn((fs, storageId, conf, operatorContext, enableAsync, isMetadataEnabled) -> fs); + + // S3Mock settings + nessiePluginConfig.propertyList = Arrays.asList( + new Property("fs.s3a.endpoint", "localhost:" + S3_PORT), + new Property("fs.s3a.path.style.access", "true"), + new Property(S3FileSystem.COMPATIBILITY_MODE, "true") + ); + + dataplanePlugin = nessiePluginConfig.newPlugin( + context, + DATAPLANE_PLUGIN_NAME, + null); + dataplanePlugin.start(); + } + + @AfterAll + public static void tearDown() throws Exception { + AutoCloseables.close(dataplanePlugin, dataplanePluginNotAuthorized, dataplanePluginInvalidAWSBucket, nessieClient); + if (s3Mock != null) { + s3Mock.shutdown(); + s3Mock = null; + } + } + + @Test + public void testInvalidAWSRootPathErrorDuringSetup() { + assertThatThrownBy(TestDataplanePlugin2::setUpDataplanePluginInvalidAWSBucket) + .isInstanceOf(UserException.class) + .hasMessageContaining("Failure creating or updating Nessie source. Invalid AWS Root Path."); + } + + @Test + public void testNessieWrongToken() throws Exception { + // act+assert + assertThatThrownBy(()->dataplanePluginNotAuthorized.start()) + .isInstanceOf(UserException.class) + .hasMessageContaining("Unable to authenticate to the Nessie server"); + } + + @Test + public void createEmptyTable() + throws NessieNotFoundException, ReferenceNotFoundException, + NoDefaultBranchException, ReferenceConflictException, IOException { + // Arrange + + // Act + dataplanePlugin.createEmptyTable( + DEFAULT_NAMESPACE_KEY, + getSchemaConfig(), + DEFAULT_BATCH_SCHEMA, + makeWriterOptions()); + + // Assert + assertNessieHasCommitForTable(DEFAULT_TABLE_COMPONENTS, Operation.Put.class); + assertNessieHasTable(DEFAULT_TABLE_COMPONENTS); + assertIcebergTableExistsAtSubPath(DEFAULT_TABLE_COMPONENTS); + } + + @Test + public void createEmptyTableBadPluginName() { + // Arrange + NamespaceKey tableKeyWithPluginName = new NamespaceKey( + Stream.concat( + Stream.of("bad" + DATAPLANE_PLUGIN_NAME), + DEFAULT_TABLE_COMPONENTS.stream()) + .collect(Collectors.toList())); + + // Act + Assert + assertThatThrownBy(() -> + dataplanePlugin.createEmptyTable( + tableKeyWithPluginName, + null, + DEFAULT_BATCH_SCHEMA, + makeWriterOptions())) + .isInstanceOf(IllegalArgumentException.class); + } + + @Test + public void createEmptyTableOnlyPluginName() { + // Arrange + NamespaceKey justPluginName = new NamespaceKey(DATAPLANE_PLUGIN_NAME); + + // Act + Assert + assertThatThrownBy(() -> + dataplanePlugin.createEmptyTable( + justPluginName, + getSchemaConfig(), + DEFAULT_BATCH_SCHEMA, + makeWriterOptions())) + .isInstanceOf(IllegalArgumentException.class); + } + + @Test + public void createEmptyTableNoVersionContext() { + // Arrange + WriterOptions noVersionContext = WriterOptions.DEFAULT + .withPartitionColumns(null); + + // Act + Assert + assertThatThrownBy(() -> + dataplanePlugin.createEmptyTable( + DEFAULT_NAMESPACE_KEY, + getSchemaConfig(), + DEFAULT_BATCH_SCHEMA, + noVersionContext)) + .isInstanceOf(NullPointerException.class); + } + + @Test + public void dropTable() throws Exception { + + // Arrange + dataplanePlugin.createEmptyTable( + DEFAULT_NAMESPACE_KEY, + getSchemaConfig(), + DEFAULT_BATCH_SCHEMA, + makeWriterOptions()); + + // Act + dataplanePlugin.dropTable(DEFAULT_NAMESPACE_KEY, + getSchemaConfig(), + defaultTableOption()); + + // Assert + assertNessieHasCommitForTable(DEFAULT_TABLE_COMPONENTS, Operation.Delete.class); + assertNessieDoesNotHaveTable(DEFAULT_TABLE_COMPONENTS); + + // TODO For now, we aren't doing filesystem cleanup, so this check is correct. Might change in the future. + assertIcebergTableExistsAtSubPath(DEFAULT_TABLE_COMPONENTS); + } + + private WriterOptions makeWriterOptions() + throws ReferenceNotFoundException, NoDefaultBranchException, ReferenceConflictException { + return WriterOptions.DEFAULT + .withPartitionColumns(null) + .withVersion(dataplanePlugin.resolveVersionContext(DEFAULT_VERSION_CONTEXT)); + } + + private SchemaConfig getSchemaConfig() { + return SchemaConfig.newBuilder(new CatalogUser(USER_NAME)).build(); + } + + private TableMutationOptions defaultTableOption() + throws ReferenceNotFoundException, NoDefaultBranchException, ReferenceConflictException { + ResolvedVersionContext resolvedVersionContext = dataplanePlugin.resolveVersionContext(DEFAULT_VERSION_CONTEXT); + return TableMutationOptions.newBuilder() + .setResolvedVersionContext(resolvedVersionContext) + .build(); + } + + private void assertNessieHasCommitForTable( + List tableSchemaComponents, + Class operationType) + throws NessieNotFoundException { + final List logEntries = nessieClient + .getCommitLog() + .refName(DEFAULT_BRANCH_NAME) + .fetch(FetchOption.ALL) // Get extended data, including operations + .get() + .getLogEntries(); + assertTrue(logEntries.size() >= 1); + final LogEntry mostRecentLogEntry = logEntries.get(0); // Commits are ordered most recent to earliest + + assertThat(mostRecentLogEntry.getCommitMeta().getAuthor()).isEqualTo(USER_NAME); + + final List operations = mostRecentLogEntry.getOperations(); + assertEquals(1, operations.size()); + final Operation operation = operations.get(0); + assertTrue(operationType.isAssignableFrom(operation.getClass())); + + final ContentKey actualContentKey = operation.getKey(); + final ContentKey expectedContentKey = ContentKey.of(tableSchemaComponents); + assertEquals(expectedContentKey, actualContentKey); + } + + private void assertNessieHasTable(List tableSchemaComponents) + throws NessieNotFoundException { + Map contentsMap = nessieClient + .getContent() + .refName(DEFAULT_BRANCH_NAME) + .key(ContentKey.of(tableSchemaComponents)) + .get(); + + ContentKey expectedContentsKey = ContentKey.of(tableSchemaComponents); + assertTrue(contentsMap.containsKey(expectedContentsKey)); + + String expectedMetadataLocationPrefix = S3_PREFIX + BUCKET_NAME + "/" + + String.join("/", tableSchemaComponents) + "/" + METADATA_FOLDER; + Optional maybeIcebergTable = contentsMap + .get(expectedContentsKey) + .unwrap(IcebergTable.class); + assertTrue(maybeIcebergTable.isPresent()); + assertTrue(maybeIcebergTable.get() + .getMetadataLocation() + .startsWith(expectedMetadataLocationPrefix)); + } + + private void assertNessieDoesNotHaveTable(List tableSchemaComponents) + throws NessieNotFoundException { + Map contentsMap = nessieClient + .getContent() + .refName(DEFAULT_BRANCH_NAME) + .key(ContentKey.of(tableSchemaComponents)) + .get(); + + assertTrue(contentsMap.isEmpty()); + } + + private void assertIcebergTableExistsAtSubPath(List subPath) { + // Iceberg tables on disk have a "metadata" folder in their root, check for "metadata" folder too + Path pathToMetadataFolder = bucketPath + .resolve(String.join("/", subPath)) + .resolve(METADATA_FOLDER); + + assertTrue(Files.exists(pathToMetadataFolder)); + } +} diff --git a/plugins/dataplane/src/test/java/com/dremio/plugins/dataplane/store/TestInformationSchemaCelFilter.java b/plugins/dataplane/src/test/java/com/dremio/plugins/dataplane/store/TestInformationSchemaCelFilter.java new file mode 100644 index 0000000000..816669e5ab --- /dev/null +++ b/plugins/dataplane/src/test/java/com/dremio/plugins/dataplane/store/TestInformationSchemaCelFilter.java @@ -0,0 +1,370 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.plugins.dataplane.store; + +import static com.dremio.plugins.dataplane.store.InformationSchemaCelFilter.getInformationSchemaFilter; +import static org.assertj.core.api.Assertions.assertThat; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import com.dremio.service.catalog.SearchQuery; +import com.dremio.service.catalog.SearchQuery.And; +import com.dremio.service.catalog.SearchQuery.Equals; +import com.dremio.service.catalog.SearchQuery.Like; +import com.dremio.service.catalog.SearchQuery.Or; + +public class TestInformationSchemaCelFilter { + + private static final String DATAPLANE_PLUGIN_NAME = "test_dataplane"; + private static final String TABLE_NAME = "test_table"; + private static final String FULL_PATH = "test_dataplane.folder1.folder2"; + private static final String SEARCH_NAME = "SEARCH_NAME"; + private static final String SEARCH_SCHEMA = "SEARCH_SCHEMA"; + private static final String QUOTED_PATH = "\"this.is.a.single.folder\".subfolder.table"; + + @Test + public void testBasicEqualSearchQueryToCel() { + Equals equals = Equals.newBuilder() + .setField(SEARCH_NAME) + .setStringValue(TABLE_NAME) + .build(); + SearchQuery searchQuery = SearchQuery.newBuilder() + .setEquals(equals) + .build(); + assertThat(getInformationSchemaFilter(searchQuery, false, DATAPLANE_PLUGIN_NAME)) + .isEqualTo(String.format("(r'%s') == entry.name", TABLE_NAME)); + } + + @Test + public void testBasicLikeSearchQueryToCel() { + Like like = Like.newBuilder() + .setField(SEARCH_NAME) + .setPattern(String.format("%%%s%%", TABLE_NAME)) + .build(); + SearchQuery searchQuery = SearchQuery.newBuilder() + .setLike(like) + .build(); + assertThat(getInformationSchemaFilter(searchQuery, false, DATAPLANE_PLUGIN_NAME)) + .isEqualTo(String.format("entry.name.matches(r'^.*%s.*$')", TABLE_NAME)); + } + + /** + * Same Query as we load all schema from Tableau + */ + @Test + public void testTableauGetAllSchemata() { + Like like = Like.newBuilder() + .setField(SEARCH_SCHEMA) + .setPattern(DATAPLANE_PLUGIN_NAME) + .setEscape("\\") + .build(); + + SearchQuery searchQuery = SearchQuery.newBuilder() + .setLike(like) + .build(); + assertThat(getInformationSchemaFilter(searchQuery, true, DATAPLANE_PLUGIN_NAME)) + .isEqualTo(null); + + } + + @Test + public void testGetSchemataFullPath() { + Like like = Like.newBuilder() + .setField(SEARCH_SCHEMA) + .setPattern(FULL_PATH) + .setEscape("\\") + .build(); + + SearchQuery searchQuery = SearchQuery.newBuilder() + .setLike(like) + .build(); + assertThat(getInformationSchemaFilter(searchQuery, true, DATAPLANE_PLUGIN_NAME)) + .isEqualTo(String.format("(r'test_dataplane' + '.' + entry.namespace + '.' + entry.name) == (r'%s')", FULL_PATH)); + } + + @Test + public void testQueryNotSchemata() { + Like like = Like.newBuilder() + .setField(SEARCH_SCHEMA) + .setPattern(FULL_PATH) + .setEscape("\\") + .build(); + + SearchQuery searchQuery = SearchQuery.newBuilder() + .setLike(like) + .build(); + assertThat(getInformationSchemaFilter(searchQuery, false, DATAPLANE_PLUGIN_NAME)) + .isEqualTo(String.format("(r'test_dataplane' + '.' + entry.namespace) == (r'%s')", FULL_PATH)); + } + + @Test + public void testTableauGetExactTableName() { + Like tableNameLike = Like.newBuilder() + .setField(SEARCH_NAME) + .setPattern(TABLE_NAME) + .setEscape("\\") + .build(); + SearchQuery tableNameLikeQuery = SearchQuery.newBuilder() + .setLike(tableNameLike) + .build(); + Like tablePathLike = Like.newBuilder() + .setField(SEARCH_SCHEMA) + .setPattern(FULL_PATH) + .setEscape("\\") + .build(); + SearchQuery tablePathLikeQuery = SearchQuery.newBuilder() + .setLike(tablePathLike) + .build(); + And and = And.newBuilder() + .addClauses(tableNameLikeQuery) + .addClauses(tablePathLikeQuery) + .build(); + SearchQuery searchQuery = SearchQuery.newBuilder() + .setAnd(and) + .build(); + assertThat(getInformationSchemaFilter(searchQuery, false, DATAPLANE_PLUGIN_NAME)) + .isEqualTo(String.format("((entry.name.matches(r'^%s$')) && ((r'%s' + '.' + entry.namespace) == (r'%s')))", TABLE_NAME, DATAPLANE_PLUGIN_NAME, FULL_PATH)); + } + + @Test + public void testTableauStartWith() { + Like tableNameLike = Like.newBuilder() + .setField(SEARCH_NAME) + .setPattern(String.format("%s%%", TABLE_NAME)) + .setEscape("\\") + .build(); + SearchQuery tableNameLikeQuery = SearchQuery.newBuilder() + .setLike(tableNameLike) + .build(); + Like tablePathLike = Like.newBuilder() + .setField(SEARCH_SCHEMA) + .setPattern(FULL_PATH) + .setEscape("\\") + .build(); + SearchQuery tablePathLikeQuery = SearchQuery.newBuilder() + .setLike(tablePathLike) + .build(); + And and = And.newBuilder() + .addClauses(tableNameLikeQuery) + .addClauses(tablePathLikeQuery) + .build(); + SearchQuery searchQuery = SearchQuery.newBuilder() + .setAnd(and) + .build(); + assertThat(getInformationSchemaFilter(searchQuery, false, DATAPLANE_PLUGIN_NAME)) + .isEqualTo(String.format("((entry.name.matches(r'^%s.*$')) && ((r'%s' + '.' + entry.namespace) == (r'%s')))", TABLE_NAME, DATAPLANE_PLUGIN_NAME, FULL_PATH)); + } + + @Test + public void testTableauContains() { + Like tableNameLike = Like.newBuilder() + .setField(SEARCH_NAME) + .setPattern(String.format("%%%s%%", TABLE_NAME)) + .setEscape("\\") + .build(); + SearchQuery tableNameLikeQuery = SearchQuery.newBuilder() + .setLike(tableNameLike) + .build(); + Like tablePathLike = Like.newBuilder() + .setField(SEARCH_SCHEMA) + .setPattern(FULL_PATH) + .setEscape("\\") + .build(); + SearchQuery tablePathLikeQuery = SearchQuery.newBuilder() + .setLike(tablePathLike) + .build(); + And and = And.newBuilder() + .addClauses(tableNameLikeQuery) + .addClauses(tablePathLikeQuery) + .build(); + SearchQuery searchQuery = SearchQuery.newBuilder() + .setAnd(and) + .build(); + assertThat(getInformationSchemaFilter(searchQuery, false, DATAPLANE_PLUGIN_NAME)) + .isEqualTo(String.format("((entry.name.matches(r'^.*%s.*$')) && ((r'%s' + '.' + entry.namespace) == (r'%s')))", TABLE_NAME, DATAPLANE_PLUGIN_NAME, FULL_PATH)); + } + + @Test + public void testComplexQuery() { + Like tableNameLike = Like.newBuilder() + .setField(SEARCH_NAME) + .setPattern(String.format("%%%s%%", TABLE_NAME)) + .setEscape("\\") + .build(); + SearchQuery tableNameLikeQuery = SearchQuery.newBuilder() + .setLike(tableNameLike) + .build(); + Like tablePathLike = Like.newBuilder() + .setField(SEARCH_SCHEMA) + .setPattern(FULL_PATH) + .setEscape("\\") + .build(); + SearchQuery tablePathLikeQuery = SearchQuery.newBuilder() + .setLike(tablePathLike) + .build(); + Equals tableNameEquals = Equals.newBuilder() + .setField(SEARCH_NAME) + .setStringValue(String.format("%%%s%%", TABLE_NAME)) + .build(); + SearchQuery tableNameEqualQuery = SearchQuery.newBuilder() + .setEquals(tableNameEquals) + .build(); + Equals tablePathEqual = Equals.newBuilder() + .setField(SEARCH_SCHEMA) + .setStringValue(FULL_PATH) + .build(); + SearchQuery tablePathEqualQuery = SearchQuery.newBuilder() + .setEquals(tablePathEqual) + .build(); + And and = And.newBuilder() + .addClauses(tableNameLikeQuery) + .addClauses(tablePathLikeQuery) + .build(); + And and2 = And.newBuilder() + .addClauses(tableNameEqualQuery) + .addClauses(tablePathEqualQuery) + .build(); + SearchQuery andQuery1 = SearchQuery.newBuilder() + .setAnd(and) + .build(); + SearchQuery andQuery2 = SearchQuery.newBuilder() + .setAnd(and2) + .build(); + Or or = Or.newBuilder() + .addClauses(andQuery1) + .addClauses(andQuery2) + .build(); + SearchQuery searchQuery2 = SearchQuery.newBuilder() + .setOr(or) + .build(); + assertThat(getInformationSchemaFilter(searchQuery2, false, DATAPLANE_PLUGIN_NAME)) + .isEqualTo(String.format("((((entry.name.matches(r'^.*%s.*$')) && ((r'%s' + '.' + entry.namespace) == (r'%s')))) || ((((r'%%%s%%') == entry.name) && ((r'%s' + '.' + entry.namespace) == (r'%s')))))", TABLE_NAME, DATAPLANE_PLUGIN_NAME, FULL_PATH,TABLE_NAME, DATAPLANE_PLUGIN_NAME, FULL_PATH)); + } + + @Test + public void testQuotedPathEqualsQuery() { + Equals equals = Equals.newBuilder() + .setField(SEARCH_SCHEMA) + .setStringValue(QUOTED_PATH) + .build(); + SearchQuery searchQuery = SearchQuery.newBuilder() + .setEquals(equals) + .build(); + assertThat(getInformationSchemaFilter(searchQuery, false, DATAPLANE_PLUGIN_NAME)) + .isEqualTo("(r'test_dataplane' + '.' + entry.namespace) == (r'\"this.is.a.single.folder\".subfolder.table')"); + } + + @Test + public void testQuotedPathLikeQuery() { + Like like = Like.newBuilder() + .setField(SEARCH_SCHEMA) + .setPattern(QUOTED_PATH) + .build(); + SearchQuery searchQuery = SearchQuery.newBuilder() + .setLike(like) + .build(); + assertThat(getInformationSchemaFilter(searchQuery, false, DATAPLANE_PLUGIN_NAME)) + .isEqualTo("(r'test_dataplane' + '.' + entry.namespace) == (r'\"this.is.a.single.folder\".subfolder.table')"); + } + + @Test + public void testInvalidQuery() { + Equals equals = Equals.newBuilder() + .setField("WRONG_FIELD") + .setStringValue(QUOTED_PATH) + .build(); + SearchQuery searchQuery = SearchQuery.newBuilder() + .setEquals(equals) + .build(); + Assertions.assertThrows(IllegalStateException.class, () -> getInformationSchemaFilter(searchQuery, false, DATAPLANE_PLUGIN_NAME)); + } + + @Test + public void testNullQuery() { + assertThat(getInformationSchemaFilter(null, false, DATAPLANE_PLUGIN_NAME)) + .isEqualTo(null); + } + + @Test + public void testStar() { + String name = "*starTable"; + String expected = "\\*starTable"; + Like like = Like.newBuilder() + .setField(SEARCH_NAME) + .setPattern(String.format("%%%s%%", name)) + .build(); + SearchQuery searchQuery = SearchQuery.newBuilder() + .setLike(like) + .build(); + assertThat(getInformationSchemaFilter(searchQuery, false, DATAPLANE_PLUGIN_NAME)) + .isEqualTo(String.format("entry.name.matches(r'^.*%s.*$')", expected)); + } + + @Test + public void testQuestionMark() { + String name = "?question?table?"; + String expected = "\\?question\\?table\\?"; + Like like = Like.newBuilder() + .setField(SEARCH_NAME) + .setPattern(String.format("%%%s%%", name)) + .build(); + SearchQuery searchQuery = SearchQuery.newBuilder() + .setLike(like) + .build(); + assertThat(getInformationSchemaFilter(searchQuery, false, DATAPLANE_PLUGIN_NAME)) + .isEqualTo(String.format("entry.name.matches(r'^.*%s.*$')", expected)); + } + + /** + * The folder has name "dot.dot.dot.dot" and it's under DATAPLANE_PLUGIN_NAME source. + */ + @Test + public void testDotsInQuotes() { + String tableWithDotsInQuotes = "\"dot.dot.dot.dot.table\""; + Like like = Like.newBuilder() + .setField(SEARCH_SCHEMA) + .setPattern(String.format("%s.%s", DATAPLANE_PLUGIN_NAME, tableWithDotsInQuotes)) + .build(); + SearchQuery searchQuery = SearchQuery.newBuilder() + .setLike(like) + .build(); + assertThat(getInformationSchemaFilter(searchQuery, true, DATAPLANE_PLUGIN_NAME)) + .isEqualTo(String.format("(r'%s' + '.' + entry.name) == (r'%s.%s')", DATAPLANE_PLUGIN_NAME, DATAPLANE_PLUGIN_NAME, tableWithDotsInQuotes)); + } + + @Test + public void testSchemataNested() { + String sourceName = "DATAPLANE_PLUGIN_NAME"; + String tableName = "table1"; + + Like like = Like.newBuilder() + .setField(SEARCH_SCHEMA) + .setPattern(String.format("'%s'.%s%%", sourceName, tableName)) + .setEscape("\\") + .build(); + SearchQuery searchQuery = SearchQuery.newBuilder() + .setLike(like) + .build(); + + String nameElements = String.format("r'%s' + '.' + entry.namespace + '.' + entry.name", sourceName); + String nameOnly = String.format("r'%s' + '.' + entry.name", sourceName); + String value = String.format("^''%s''\\.%s.*$", sourceName, tableName); + + assertThat(getInformationSchemaFilter(searchQuery, true, sourceName)) + .isEqualTo(String.format("(r'%s').matches(r'%s') || (%s).matches(r'%s') || (%s).matches(r'%s')", sourceName, value, nameElements, value, nameOnly, value)); + } +} diff --git a/plugins/dataplane/src/test/java/com/dremio/plugins/dataplane/store/TestNessiePluginConfig.java b/plugins/dataplane/src/test/java/com/dremio/plugins/dataplane/store/TestNessiePluginConfig.java new file mode 100644 index 0000000000..08f46bce4d --- /dev/null +++ b/plugins/dataplane/src/test/java/com/dremio/plugins/dataplane/store/TestNessiePluginConfig.java @@ -0,0 +1,366 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.plugins.dataplane.store; + +import static com.dremio.exec.store.DataplanePluginOptions.NESSIE_PLUGIN_ENABLED; +import static com.dremio.plugins.dataplane.CredentialsProviderConstants.ACCESS_KEY_PROVIDER; +import static com.dremio.plugins.dataplane.CredentialsProviderConstants.ASSUME_ROLE_PROVIDER; +import static com.dremio.plugins.dataplane.CredentialsProviderConstants.AWS_PROFILE_PROVIDER; +import static com.dremio.plugins.dataplane.CredentialsProviderConstants.EC2_METADATA_PROVIDER; +import static com.dremio.plugins.dataplane.CredentialsProviderConstants.NONE_PROVIDER; +import static com.dremio.plugins.dataplane.NessiePluginConfigConstants.MINIMUM_NESSIE_SPECIFICATION_VERSION; +import static org.apache.hadoop.fs.s3a.Constants.SECURE_CONNECTIONS; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatCode; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.Mockito.when; + +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; + +import org.junit.Rule; +import org.junit.Test; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnit; +import org.mockito.junit.MockitoRule; +import org.mockito.quality.Strictness; +import org.projectnessie.client.api.NessieApiV2; +import org.projectnessie.model.NessieConfiguration; + +import com.dremio.exec.catalog.ResolvedVersionContext; +import com.dremio.exec.catalog.conf.AWSAuthenticationType; +import com.dremio.exec.catalog.conf.NessieAuthType; +import com.dremio.exec.catalog.conf.Property; +import com.dremio.exec.server.SabotContext; +import com.dremio.exec.store.InvalidSpecificationVersionException; +import com.dremio.exec.store.InvalidURLException; +import com.dremio.exec.store.SemanticVersionParserException; +import com.dremio.exec.store.UnAuthenticatedException; +import com.dremio.options.OptionManager; +import com.dremio.plugins.NessieClient; +import com.dremio.plugins.UsernameAwareNessieClientImpl; +import com.dremio.service.namespace.SourceState; +import com.dremio.service.users.UserService; + +public class TestNessiePluginConfig { + + @Rule + public MockitoRule rule = MockitoJUnit.rule().strictness(Strictness.STRICT_STUBS); + + @Mock + private SabotContext sabotContext; + + @Mock + private UserService userService; + + @Mock + private OptionManager optionManager; + + @Mock + private NessieApiV2 nessieApiV2; + + @Mock + private NessieConfiguration nessieConfiguration; + + @Mock + private NessieClient nessieClient; + + private static final String SOURCE_NAME = "testNessieSource"; + + @Test + public void testAWSCredentialsProviderWithAccessKey() { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + nessiePluginConfig.awsAccessKey = "test-access-key"; + nessiePluginConfig.awsAccessSecret = "test-secret-key"; + nessiePluginConfig.credentialType = AWSAuthenticationType.ACCESS_KEY; + List awsProviderProperties = new ArrayList<>(); + + assertThat(nessiePluginConfig.getAWSCredentialsProvider().configureCredentials(awsProviderProperties)).isEqualTo(ACCESS_KEY_PROVIDER); + } + + @Test + public void testAWSCredentialsProviderWithAwsProfile() { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + List awsProviderProperties = new ArrayList<>(); + nessiePluginConfig.credentialType = AWSAuthenticationType.AWS_PROFILE; + nessiePluginConfig.awsProfile = "test-awsProfile"; + + assertThat(nessiePluginConfig.getAWSCredentialsProvider().configureCredentials(awsProviderProperties)).isEqualTo(AWS_PROFILE_PROVIDER); + Property expectedProperty = new Property("com.dremio.awsProfile", nessiePluginConfig.awsProfile); + assertThat(awsProviderProperties.get(0).name).isEqualTo(expectedProperty.name); + assertThat(awsProviderProperties.get(0).value).isEqualTo(expectedProperty.value); + } + + @Test + public void testAWSCredentialsProviderWithNoneAuthentication() { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + List awsProviderProperties = new ArrayList<>(); + nessiePluginConfig.credentialType = AWSAuthenticationType.NONE; + assertThat(nessiePluginConfig.getAWSCredentialsProvider().configureCredentials(awsProviderProperties)).isEqualTo(NONE_PROVIDER); + } + + @Test + public void testEmptyAWSAccessKey() { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + nessiePluginConfig.awsAccessKey = ""; + nessiePluginConfig.awsAccessSecret = "test-secret-key"; + nessiePluginConfig.credentialType = AWSAuthenticationType.ACCESS_KEY; + List awsProviderProperties = new ArrayList<>(); + + assertThatThrownBy(() -> nessiePluginConfig.getAWSCredentialsProvider().configureCredentials(awsProviderProperties)) + .hasMessageContaining("Failure creating S3 connection. You must provide AWS Access Key and AWS Access Secret."); + } + + @Test + public void testAWSCredentialsProviderWithEC2Metadata() { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + List awsProviderProperties = new ArrayList<>(); + nessiePluginConfig.credentialType = AWSAuthenticationType.EC2_METADATA; + assertThat(nessiePluginConfig.getAWSCredentialsProvider().configureCredentials(awsProviderProperties)).isEqualTo(EC2_METADATA_PROVIDER); + } + + @Test + public void testAWSCredentialsProviderWithEC2MetadataAndIAMRole() { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + List awsProviderProperties = new ArrayList<>(); + nessiePluginConfig.assumedRoleARN = "test-assume-role-arn"; + nessiePluginConfig.credentialType = AWSAuthenticationType.EC2_METADATA; + assertThat(nessiePluginConfig.getAWSCredentialsProvider().configureCredentials(awsProviderProperties)).isEqualTo(ASSUME_ROLE_PROVIDER); + Property expectedProperty = new Property("fs.s3a.assumed.role.arn", nessiePluginConfig.assumedRoleARN); + assertThat(awsProviderProperties.get(0).name).isEqualTo(expectedProperty.name); + assertThat(awsProviderProperties.get(0).value).isEqualTo(expectedProperty.value); + } + + @Test + public void testMissingBearerToken() { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + nessiePluginConfig.nessieEndpoint = "http://localhost:19120/"; + nessiePluginConfig.awsAccessKey = "test-access-key"; + nessiePluginConfig.awsAccessSecret = "test-secret-key"; + nessiePluginConfig.credentialType = AWSAuthenticationType.ACCESS_KEY; + nessiePluginConfig.secure = false; + nessiePluginConfig.nessieAuthType = NessieAuthType.BEARER; + + assertThatThrownBy(() -> nessiePluginConfig.validateNessieAuthSettings("test_nessie_source")) + .hasMessageContaining("bearer token provided is empty"); + } + + @Test + public void testEmptyBearerToken() { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + nessiePluginConfig.nessieAccessToken = ""; + nessiePluginConfig.nessieEndpoint = "http://localhost:19120/"; + nessiePluginConfig.awsAccessKey = "test-access-key"; + nessiePluginConfig.awsAccessSecret = "test-secret-key"; + nessiePluginConfig.credentialType = AWSAuthenticationType.ACCESS_KEY; + nessiePluginConfig.secure = false; + nessiePluginConfig.nessieAuthType = NessieAuthType.BEARER; + + assertThatThrownBy(() -> nessiePluginConfig.validateNessieAuthSettings("test_nessie_source")) + .hasMessageContaining("bearer token provided is empty"); + } + + @Test + public void testInvalidNessieAuthType() { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + nessiePluginConfig.nessieEndpoint = "http://localhost:19120/"; + nessiePluginConfig.awsAccessKey = "test-access-key"; + nessiePluginConfig.awsAccessSecret = "test-secret-key"; + nessiePluginConfig.credentialType = AWSAuthenticationType.ACCESS_KEY; + nessiePluginConfig.secure = false; + nessiePluginConfig.nessieAuthType = null; + + assertThatThrownBy(() -> nessiePluginConfig.validateNessieAuthSettings("test_nessie_source")) + .hasMessageContaining("Invalid Nessie Auth type"); + } + + @Test + public void testEncryptConnectionWithSecureAsFalse() { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + Property expectedProperty = new Property(SECURE_CONNECTIONS, "false"); + nessiePluginConfig.secure = false; + Optional property = nessiePluginConfig.encryptConnection(); + + assertThat(property.get().name).isEqualTo(expectedProperty.name); + assertThat(property.get().value).isEqualTo(expectedProperty.value); + } + + @Test + public void testEncryptConnectionWithSecureAsTrue() { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + Property expectedProperty = new Property(SECURE_CONNECTIONS, "true"); + nessiePluginConfig.secure = true; + Optional property = nessiePluginConfig.encryptConnection(); + + assertThat(property.get().name).isEqualTo(expectedProperty.name); + assertThat(property.get().value).isEqualTo(expectedProperty.value); + } + + @Test + public void testValidatePluginEnabled() { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + when(sabotContext.getOptionManager()).thenReturn(optionManager); + when(optionManager.getOption(NESSIE_PLUGIN_ENABLED)).thenReturn(false); + + assertThatThrownBy(() -> nessiePluginConfig.validatePluginEnabled(sabotContext)) + .hasMessageContaining("Nessie Source is not supported"); + } + + @Test + public void testGetNessieClient() { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + nessiePluginConfig.nessieEndpoint = "http://test-nessie"; + when(sabotContext.getUserService()).thenReturn(userService); + + //Currently NessiePlugin is using the wrapper UsernameAwareNessieClientImpl (on top of NessieClient). + //This is basically to test the correct NessieClient instance used from NessiePlugin. + //If there are multiple wrappers used for each service (like one for coordinator and another for executor), then + //this might break + assertThat(nessiePluginConfig.getNessieClient("NESSIE_SOURCE", sabotContext)).isInstanceOf(UsernameAwareNessieClientImpl.class); + } + + @Test + public void testGetNessieClientThrowsError() { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + nessiePluginConfig.nessieEndpoint = "invalid://test-nessie"; + + assertThatThrownBy(() -> nessiePluginConfig.getNessieClient("NESSIE_SOURCE", sabotContext)) + .hasMessageContaining("must be a valid http or https address"); + } + + @Test + public void testInvalidNessieSpecificationVersion() { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + + assertThatThrownBy(() -> nessiePluginConfig.validateNessieSpecificationVersionHelper("x.y.z")) + .isInstanceOf(SemanticVersionParserException.class) + .hasMessageContaining("Cannot parse Nessie specification version"); + } + + @Test + public void testInvalidNessieEndpointURL() { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + nessiePluginConfig.nessieEndpoint = "http://invalid/v0"; + when(nessieApiV2.getConfig()).thenThrow(IllegalArgumentException.class); + + assertThatThrownBy(() -> nessiePluginConfig.getNessieConfig(nessieApiV2, "NESSIE_SOURCE")) + .isInstanceOf(InvalidURLException.class) + .hasMessageContaining("Make sure that Nessie endpoint URL [http://invalid/v0] is valid"); + } + + @Test + public void testInvalidLowerNessieSpecificationVersion() { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + + assertThatThrownBy(() -> nessiePluginConfig.validateNessieSpecificationVersionHelper("1.0.0")) + .isInstanceOf(InvalidSpecificationVersionException.class) + .hasMessageContaining("Nessie Server should comply with Nessie specification version"); + } + + @Test + public void testValidEquivalentNessieSpecificationVersion() { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + + assertThatCode(() -> nessiePluginConfig.validateNessieSpecificationVersionHelper("2.0.0")) + .doesNotThrowAnyException(); + } + + @Test + public void testInvalidLowerNessieSpecificationVersionFor0_58() { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + + assertThatCode(() -> nessiePluginConfig.validateNessieSpecificationVersionHelper("2.0.0-beta.1")) + .isInstanceOf(InvalidSpecificationVersionException.class) + .hasMessageContaining("Nessie Server should comply with Nessie specification version"); + } + + @Test + public void testInvalidNullNessieSpecificationVersion() { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + + assertThatThrownBy(() -> nessiePluginConfig.validateNessieSpecificationVersionHelper(null)) + .isInstanceOf(InvalidSpecificationVersionException.class) + .hasMessageContaining("Nessie Server should comply with Nessie specification version") + .hasMessageContaining("Also make sure that Nessie endpoint URL is valid."); + } + + @Test + public void testInValidRootPathDuringSetup() { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + nessiePluginConfig.nessieEndpoint = "http://localhost:19120/"; + + when(sabotContext.getUserService()).thenReturn(userService); + + assertThatThrownBy(() -> nessiePluginConfig.newPlugin(sabotContext, "NESSIE_SOURCE", null)) + .hasMessageContaining("Invalid AWS Root Path."); + } + + @Test + public void testValidAWSRootPath() { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + assertThat(nessiePluginConfig.isValidAWSRootPath("bucket-name")).isTrue(); + assertThat(nessiePluginConfig.isValidAWSRootPath("/bucket-name")).isTrue(); + assertThat(nessiePluginConfig.isValidAWSRootPath("/bucket-name/")).isTrue(); + assertThat(nessiePluginConfig.isValidAWSRootPath("bucket-name/folder/path")).isTrue(); + assertThat(nessiePluginConfig.isValidAWSRootPath("bucket-name/folder/path/")).isTrue(); + assertThat(nessiePluginConfig.isValidAWSRootPath("/bucket-name/folder/path")).isTrue(); + assertThat(nessiePluginConfig.isValidAWSRootPath("/bucket-name/folder/path/")).isTrue(); + } + + @Test + public void testInvalidAWSRootPath() { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + assertThat(nessiePluginConfig.isValidAWSRootPath("/")).isFalse(); + assertThat(nessiePluginConfig.isValidAWSRootPath("//")).isFalse(); + assertThat(nessiePluginConfig.isValidAWSRootPath("/ ")).isFalse(); + assertThat(nessiePluginConfig.isValidAWSRootPath(" / ")).isFalse(); + assertThat(nessiePluginConfig.isValidAWSRootPath("")).isFalse(); + assertThat(nessiePluginConfig.isValidAWSRootPath(" ")).isFalse(); + assertThat(nessiePluginConfig.isValidAWSRootPath("/s pace/")).isFalse(); + assertThat(nessiePluginConfig.isValidAWSRootPath("/\\\\\"/")).isFalse(); + assertThat(nessiePluginConfig.isValidAWSRootPath("UPPERCASE")).isFalse(); + assertThat(nessiePluginConfig.isValidAWSRootPath("$%_INVALID")).isFalse(); + assertThat(nessiePluginConfig.isValidAWSRootPath("/bucket-name/folder/path///")).isFalse(); + assertThat(nessiePluginConfig.isValidAWSRootPath("/bucket-name//folder/path/")).isFalse(); + assertThat(nessiePluginConfig.isValidAWSRootPath("'/bucket-name/folder/path/")).isFalse(); + assertThat(nessiePluginConfig.isValidAWSRootPath("\"/bucket-name/folder/path/")).isFalse(); + assertThat(nessiePluginConfig.isValidAWSRootPath("/bucket-n'ame/folder/path/")).isFalse(); + assertThat(nessiePluginConfig.isValidAWSRootPath("/bucket-\"name/folder/path/")).isFalse(); + } + + @Test + public void testHealthyGetStateCall() { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + when(nessieClient.getDefaultBranch()).thenReturn(ResolvedVersionContext.ofBranch( + "testBranch", "2b3a38be1df114556a019986dcfbfedda593925f")); + when(nessieClient.getNessieApi()).thenReturn(nessieApiV2); + when(nessieApiV2.getConfig()).thenReturn(nessieConfiguration); + when(nessieConfiguration.getSpecVersion()).thenReturn(MINIMUM_NESSIE_SPECIFICATION_VERSION); + + assertThat(nessiePluginConfig.getState(nessieClient, SOURCE_NAME, sabotContext)).isEqualTo(SourceState.GOOD); + } + + @Test + public void testUnHealthyGetStateCall() { + NessiePluginConfig nessiePluginConfig = new NessiePluginConfig(); + when(nessieClient.getDefaultBranch()).thenThrow(new UnAuthenticatedException()); + + SourceState sourceState = nessiePluginConfig.getState(nessieClient, SOURCE_NAME, sabotContext); + assertThat(sourceState.getStatus()).isEqualTo(SourceState.SourceStatus.bad); + assertThat(sourceState.getSuggestedUserAction()).contains("Make sure that the token is valid and not expired"); + } +} diff --git a/plugins/dataplane/src/test/java/com/dremio/plugins/dataplane/store/TestSearchQueryToCelConversionUtilities.java b/plugins/dataplane/src/test/java/com/dremio/plugins/dataplane/store/TestSearchQueryToCelConversionUtilities.java new file mode 100644 index 0000000000..2c010e9f13 --- /dev/null +++ b/plugins/dataplane/src/test/java/com/dremio/plugins/dataplane/store/TestSearchQueryToCelConversionUtilities.java @@ -0,0 +1,316 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.plugins.dataplane.store; + +import static com.dremio.plugins.dataplane.store.SearchQueryToCelConversionUtilities.likeQueryToRe2Regex; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import java.util.Set; +import java.util.stream.Stream; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import com.dremio.service.catalog.SearchQuery; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; + +class TestSearchQueryToCelConversionUtilities { + + private static final Set SOME_REGULAR_CHARACTERS = ImmutableSet.of('a', 'A', 'z', 'Z', '-', '_'); + private static final ImmutableMap SEARCH_QUERY_SPECIAL_CHARACTERS_MAP = + ImmutableMap.builder() + .put('%', ".*") + .build(); + // CEL uses RE2 syntax: https://github.com/google/re2/wiki/Syntax + private static final Set RE2_SPECIAL_CHARACTERS = + ImmutableSet.of('*', '+', '?', '(', ')', '|', '[', ']', ':', '^', '\\', '.', '{', '}'); + + private static Stream convertToRawCelStringLiteralArguments() { + return Stream.of( + Arguments.of( + "basicString", + "r'basicString'"), + Arguments.of( + "string'withRawSingleQuote", + "r'string''withRawSingleQuote'"), + Arguments.of( + "string%withSpecialCharacter", + "r'string%withSpecialCharacter'"), + Arguments.of( + "string\\withBackslash", + "r'string\\withBackslash'")); + } + + @ParameterizedTest(name = "[{index}] pattern: [{0}], expected: [{1}]") + @MethodSource("convertToRawCelStringLiteralArguments") + public void convertToRawCelStringLiteral(String pattern, String expected) { + assertThat(SearchQueryToCelConversionUtilities.convertToRawCelStringLiteral(pattern)) + .isEqualTo(expected); + } + + @Test + public void likeQueryToRe2RegexEmptyPattern() { + final SearchQuery.Like likeQuery = SearchQuery.Like.newBuilder() + .setPattern("") + .build(); + assertThat(likeQueryToRe2Regex(likeQuery)).isEqualTo("^$"); + } + + @Test + public void likeQueryToRe2RegexBasicString() { + final SearchQuery.Like likeQuery = SearchQuery.Like.newBuilder() + .setPattern("basicString") + .build(); + assertThat(likeQueryToRe2Regex(likeQuery)).isEqualTo("^basicString$"); + } + + @Test + public void likeQueryToRe2RegexOnlyEscape() { + final SearchQuery.Like likeQuery = SearchQuery.Like.newBuilder() + .setPattern("\\") + .setEscape("\\") + .build(); + assertThat(likeQueryToRe2Regex(likeQuery)).isEqualTo("^$"); + } + + private static Stream regularCharacters() { + return SOME_REGULAR_CHARACTERS.stream() + .map(c -> + Arguments.of( + c.toString(), // e.g. a + String.format("^%c$", c))); // e.g. ^a$ (keep the regular character) + } + + private static Stream escapedRegularCharacters(char escape) { + return SOME_REGULAR_CHARACTERS.stream() + .map(c -> + Arguments.of( + String.format("%c%c", escape, c), // e.g. \a + String.format("^%c$", c))); // e.g. ^a$ (ignore escape, keep the regular character) + } + + private static Stream escapedRegularCharactersBackslash() { + return escapedRegularCharacters('\\'); + } + + private static Stream escapedRegularCharactersBacktick() { + return escapedRegularCharacters('`'); + } + + private static Stream escapedRegularCharactersNoEscape() { + return SOME_REGULAR_CHARACTERS.stream() + .map(c -> + Arguments.of( + String.format("\\%c", c), // e.g. \a + String.format("^\\\\%c$", c))); // e.g. ^\\a$ (escape backslash, keep the regular character) + } + + @ParameterizedTest(name = "[{index}] pattern: [{0}], expected: [{1}]") + @MethodSource("regularCharacters") + public void likeQueryToRe2RegexRegularCharacters(String pattern, String expectedRegex) { + final SearchQuery.Like likeQuery = SearchQuery.Like.newBuilder() + .setPattern(pattern) + .build(); + assertThat(likeQueryToRe2Regex(likeQuery)).isEqualTo(expectedRegex); + } + + @ParameterizedTest(name = "[{index}] pattern: [{0}], expected: [{1}]") + @MethodSource("escapedRegularCharactersBackslash") + public void likeQueryToRe2RegexEscapedRegularCharactersBackslash(String pattern, String expectedRegex) { + final SearchQuery.Like likeQuery = SearchQuery.Like.newBuilder() + .setPattern(pattern) + .setEscape("\\") + .build(); + assertThat(likeQueryToRe2Regex(likeQuery)).isEqualTo(expectedRegex); + } + + @ParameterizedTest(name = "[{index}] pattern: [{0}], expected: [{1}]") + @MethodSource("escapedRegularCharactersBacktick") + public void likeQueryToRe2RegexEscapedRegularCharactersBacktick(String pattern, String expectedRegex) { + final SearchQuery.Like likeQuery = SearchQuery.Like.newBuilder() + .setPattern(pattern) + .setEscape("`") + .build(); + assertThat(likeQueryToRe2Regex(likeQuery)).isEqualTo(expectedRegex); + } + + @ParameterizedTest(name = "[{index}] pattern: [{0}], expected: [{1}]") + @MethodSource("escapedRegularCharactersNoEscape") + public void likeQueryToRe2RegexEscapedRegularCharactersNoEscape(String pattern, String expectedRegex) { + final SearchQuery.Like likeQuery = SearchQuery.Like.newBuilder() + .setPattern(pattern) + .build(); + assertThat(likeQueryToRe2Regex(likeQuery)).isEqualTo(expectedRegex); + } + + private static Stream searchQuerySpecialCharacters() { + return SEARCH_QUERY_SPECIAL_CHARACTERS_MAP.entrySet().stream() + .map(entry -> + Arguments.of( + String.format("%s", entry.getKey().toString()), // e.g. % + String.format("^%s$", entry.getValue()))); // e.g. ^.*$ (convert the SearchQuery special character) + } + + private static Stream escapedSearchQuerySpecialCharacters(char escape) { + return SEARCH_QUERY_SPECIAL_CHARACTERS_MAP.keySet().stream() + .map(key -> Arguments.of( + String.format("%c%s", escape, key.toString()), // e.g. \% + String.format("^%s$", key))); // e.g. ^%$ (include the raw SearchQuery special character) + /* Note: We only have a single SearchQuery special character (%). It's not + * one of the RE2 Regex special characters. If we ever have a special + * character that is in both SearchQuery and RE2 Regex special characters, + * then this test's expected value needs to change to accommodate. + */ + } + + private static Stream escapedSearchQuerySpecialCharactersBackslash() { + return escapedSearchQuerySpecialCharacters('\\'); + } + + private static Stream escapedSearchQuerySpecialCharactersBacktick() { + return escapedSearchQuerySpecialCharacters('`'); + } + + private static Stream escapedSearchQuerySpecialCharactersNoEscape() { + return SEARCH_QUERY_SPECIAL_CHARACTERS_MAP.entrySet().stream() + .map(entry -> + Arguments.of( + String.format("\\%s", entry.getKey().toString()), // e.g. \% + String.format("^\\\\%s$", entry.getValue()))); // e.g. ^\\.*$ (escape backslash, convert the SearchQuery special character) + } + + @ParameterizedTest(name = "[{index}] pattern: [{0}], expected: [{1}]") + @MethodSource("searchQuerySpecialCharacters") + public void likeQueryToRe2RegexSearchQuerySpecialCharacters(String pattern, String expectedRegex) { + final SearchQuery.Like likeQuery = SearchQuery.Like.newBuilder() + .setPattern(pattern) + .build(); + assertThat(likeQueryToRe2Regex(likeQuery)).isEqualTo(expectedRegex); + } + + @ParameterizedTest(name = "[{index}] pattern: [{0}], expected: [{1}]") + @MethodSource("escapedSearchQuerySpecialCharactersBackslash") + public void likeQueryToRe2RegexEscapedSearchQuerySpecialCharactersBackslash(String pattern, String expectedRegex) { + final SearchQuery.Like likeQuery = SearchQuery.Like.newBuilder() + .setPattern(pattern) + .setEscape("\\") + .build(); + assertThat(likeQueryToRe2Regex(likeQuery)).isEqualTo(expectedRegex); + } + + @ParameterizedTest(name = "[{index}] pattern: [{0}], expected: [{1}]") + @MethodSource("escapedSearchQuerySpecialCharactersBacktick") + public void likeQueryToRe2RegexEscapedSearchQuerySpecialCharactersBacktick(String pattern, String expectedRegex) { + final SearchQuery.Like likeQuery = SearchQuery.Like.newBuilder() + .setPattern(pattern) + .setEscape("`") + .build(); + assertThat(likeQueryToRe2Regex(likeQuery)).isEqualTo(expectedRegex); + } + + @ParameterizedTest(name = "[{index}] pattern: [{0}], expected: [{1}]") + @MethodSource("escapedSearchQuerySpecialCharactersNoEscape") + public void likeQueryToRe2RegexEscapedSearchQuerySpecialCharactersNoEscape(String pattern, String expectedRegex) { + final SearchQuery.Like likeQuery = SearchQuery.Like.newBuilder() + .setPattern(pattern) + .build(); + assertThat(likeQueryToRe2Regex(likeQuery)).isEqualTo(expectedRegex); + } + + private static Stream re2SpecialCharacters() { + return RE2_SPECIAL_CHARACTERS.stream() + .map(c -> + Arguments.of( + String.format("%c", c), // e.g. * + String.format("^\\%c$", c))); // e.g. ^\*$ (escape the re2 special character) + } + + private static Stream escapedRe2SpecialCharacters(char escape) { + return RE2_SPECIAL_CHARACTERS.stream() + .map(c -> + Arguments.of( + String.format("%c%c", escape, c), // e.g. \* + String.format("^\\%c$", c))); // e.g. ^\*$ (ignore escape, escape the re2 special character) + } + + private static Stream escapedRe2SpecialCharactersBackslash() { + return escapedRe2SpecialCharacters('\\'); + } + + private static Stream escapedRe2SpecialCharactersBacktick() { + return escapedRe2SpecialCharacters('`'); + } + + private static Stream escapedRe2SpecialCharactersNoEscape() { + return RE2_SPECIAL_CHARACTERS.stream() + .map(c -> + Arguments.of( + String.format("\\%c", c), // e.g. \* + String.format("^\\\\\\%c$", c))); // e.g. ^\\\*$ (escape backslash, escape the re2 special character) + } + + @ParameterizedTest(name = "[{index}] pattern: [{0}], expected: [{1}]") + @MethodSource("re2SpecialCharacters") + public void likeQueryToRe2RegexRe2SpecialCharacters(String pattern, String expectedRegex) { + final SearchQuery.Like likeQuery = SearchQuery.Like.newBuilder() + .setPattern(pattern) + .build(); + assertThat(likeQueryToRe2Regex(likeQuery)).isEqualTo(expectedRegex); + } + + @ParameterizedTest(name = "[{index}] pattern: [{0}], expected: [{1}]") + @MethodSource("escapedRe2SpecialCharactersBackslash") + public void likeQueryToRe2RegexEscapedRe2SpecialCharactersBackslash(String pattern, String expectedRegex) { + final SearchQuery.Like likeQuery = SearchQuery.Like.newBuilder() + .setPattern(pattern) + .setEscape("\\") + .build(); + assertThat(likeQueryToRe2Regex(likeQuery)).isEqualTo(expectedRegex); + } + + @ParameterizedTest(name = "[{index}] pattern: [{0}], expected: [{1}]") + @MethodSource("escapedRe2SpecialCharactersBacktick") + public void likeQueryToRe2RegexEscapedRe2SpecialCharactersBacktick(String pattern, String expectedRegex) { + final SearchQuery.Like likeQuery = SearchQuery.Like.newBuilder() + .setPattern(pattern) + .setEscape("`") + .build(); + assertThat(likeQueryToRe2Regex(likeQuery)).isEqualTo(expectedRegex); + } + + @ParameterizedTest(name = "[{index}] pattern: [{0}], expected: [{1}]") + @MethodSource("escapedRe2SpecialCharactersNoEscape") + public void likeQueryToRe2RegexEscapedRe2SpecialCharactersNoEscape(String pattern, String expectedRegex) { + final SearchQuery.Like likeQuery = SearchQuery.Like.newBuilder() + .setPattern(pattern) + .build(); + assertThat(likeQueryToRe2Regex(likeQuery)).isEqualTo(expectedRegex); + } + + @Test + public void likeQueryToRe2RegexInvalidEscape() { + final SearchQuery.Like likeQuery = SearchQuery.Like.newBuilder() + .setPattern("basicString") + .setEscape("12") // Two characters + .build(); + assertThatThrownBy(() -> likeQueryToRe2Regex(likeQuery)) + .isInstanceOf(IllegalArgumentException.class); + } +} diff --git a/plugins/dataplane/src/test/java/com/dremio/plugins/dataplane/store/TestSemanticVersion.java b/plugins/dataplane/src/test/java/com/dremio/plugins/dataplane/store/TestSemanticVersion.java new file mode 100644 index 0000000000..75a06df069 --- /dev/null +++ b/plugins/dataplane/src/test/java/com/dremio/plugins/dataplane/store/TestSemanticVersion.java @@ -0,0 +1,74 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.plugins.dataplane.store; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.util.Arrays; +import java.util.stream.Stream; + +import org.apache.commons.collections4.set.ListOrderedSet; +import org.apache.parquet.SemanticVersion; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import com.google.common.collect.Sets; + +public class TestSemanticVersion { + + private static final ListOrderedSet orderedVersionsToCompare = ListOrderedSet.listOrderedSet( + Arrays.asList( + "1.0.0-alpha", + "1.0.0-alpha.1", + "1.0.0-alpha.beta", + "1.0.0-beta", + "1.0.0-beta.2", + "1.0.0-beta.11", + "1.0.0-rc.1", + "1.0.0", + "2.0.0", + "2.1.0", + "2.1.1")); + + private static Stream allPairsOfVersions() { + return Sets.cartesianProduct(orderedVersionsToCompare, orderedVersionsToCompare).stream() + .map(pair -> Arguments.of(pair.get(0), pair.get(1))); + } + + @ParameterizedTest(name = "{index} {0} {1}") + @MethodSource("allPairsOfVersions") + public void testPrecedenceWithDifferentVersions(String string1, String string2) throws Exception { + SemanticVersion semanticVersion1 = SemanticVersion.parse(string1); + SemanticVersion semanticVersion2 = SemanticVersion.parse(string2); + + Integer order1 = orderedVersionsToCompare.indexOf(string1); + Integer order2 = orderedVersionsToCompare.indexOf(string2); + + int semanticVersionOrder = Integer.signum(semanticVersion1.compareTo(semanticVersion2)); + int expectedOrder = Integer.signum(order1.compareTo(order2)); + + assertThat(semanticVersionOrder).isEqualTo(expectedOrder); + } + + @Test + public void testPrecedenceWithDifferentMetadata() throws Exception { + SemanticVersion a = SemanticVersion.parse("1.0.0-alpha+001"); + SemanticVersion b = SemanticVersion.parse("1.0.0-alpha+20130313144700"); + assertThat(a.compareTo(b)).isEqualTo(0); + } +} diff --git a/plugins/dataplane/src/test/resources/META-INF/nessie-compatibility.properties b/plugins/dataplane/src/test/resources/META-INF/nessie-compatibility.properties new file mode 100644 index 0000000000..86d97a1328 --- /dev/null +++ b/plugins/dataplane/src/test/resources/META-INF/nessie-compatibility.properties @@ -0,0 +1,18 @@ +# +# Copyright (C) 2017-2019 Dremio Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Test against the current (build) version of Nessie +nessie.versions=@nessie.version@ diff --git a/plugins/elasticsearch/pom.xml b/plugins/elasticsearch/pom.xml index 8c6e16c3ec..dd2608fa98 100644 --- a/plugins/elasticsearch/pom.xml +++ b/plugins/elasticsearch/pom.xml @@ -23,7 +23,7 @@ dremio-plugin-parent com.dremio.plugins - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-elasticsearch-plugin @@ -65,7 +65,6 @@ com.google.code.gson gson - 2.9.0 org.elasticsearch @@ -77,6 +76,14 @@ org.elasticsearch jna + + org.elasticsearch + elasticsearch-cli + + + org.elasticsearch + elasticsearch-secure-sm + com.carrotsearch hppc diff --git a/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/AmazonElasticStoragePluginConfig.java b/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/AmazonElasticStoragePluginConfig.java index f756745b8e..fcde9c2a90 100644 --- a/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/AmazonElasticStoragePluginConfig.java +++ b/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/AmazonElasticStoragePluginConfig.java @@ -114,10 +114,11 @@ public AmazonElasticStoragePluginConfig( boolean usePainless, int scrollSize, boolean allowPushdownOnNormalizedOrAnalyzedFields, + boolean pushdownWithKeyword, boolean warnOnRowCountMismatch, EncryptionValidationMode encryptionValidationMode, boolean forceDoublePrecision) { super(scriptsEnabled, showHiddenIndices, showIdColumn, readTimeoutMillis, scrollTimeoutMillis, usePainless, - scrollSize, allowPushdownOnNormalizedOrAnalyzedFields, warnOnRowCountMismatch, encryptionValidationMode, forceDoublePrecision); + scrollSize, allowPushdownOnNormalizedOrAnalyzedFields, pushdownWithKeyword, warnOnRowCountMismatch, encryptionValidationMode, forceDoublePrecision); this.hostname = hostname; this.port = port; this.accessKey = accessKey; @@ -178,6 +179,7 @@ public static ElasticsearchConf createElasticsearchConf(AmazonElasticStoragePlug true, amazonOSStoragePluginConfig.scrollSize, amazonOSStoragePluginConfig.allowPushdownOnNormalizedOrAnalyzedFields, + amazonOSStoragePluginConfig.pushdownWithKeyword, amazonOSStoragePluginConfig.warnOnRowCountMismatch, amazonOSStoragePluginConfig.encryptionValidationMode, false); diff --git a/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/BaseElasticStoragePluginConfig.java b/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/BaseElasticStoragePluginConfig.java index 7615efb27e..bb1cf82a25 100644 --- a/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/BaseElasticStoragePluginConfig.java +++ b/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/BaseElasticStoragePluginConfig.java @@ -88,6 +88,10 @@ public abstract class BaseElasticStoragePluginConfig indices){ return this; } + @Override public Result getResult(WebTarget target) { try { return new JsonResult(target.path(Joiner.on(",").join(indexes)).path("_search_shards").request().header(CONTENT_TYPE, APPLICATION_JSON).buildGet().invoke(byte[].class)); @@ -309,6 +311,7 @@ public Count addType(String type) { return this; } + @Override public Result getResult(WebTarget target) { try { return new CountResult(parse(target.path(Joiner.on(",").join(indexes)).path(Joiner.on(",").join(types)).path("_count").request().header(CONTENT_TYPE, APPLICATION_JSON).buildGet().invoke(String.class)).get("count").getAsLong()); diff --git a/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/ElasticStoragePluginConfig.java b/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/ElasticStoragePluginConfig.java index 1fb54d09b2..4427c83864 100644 --- a/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/ElasticStoragePluginConfig.java +++ b/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/ElasticStoragePluginConfig.java @@ -104,10 +104,11 @@ public ElasticStoragePluginConfig( boolean useWhitelist, int scrollSize, boolean allowPushdownOnNormalizedOrAnalyzedFields, + boolean pushdownWithKeyword, boolean warnOnRowCountMismatch, EncryptionValidationMode encryptionValidationMode, boolean forceDoublePrecision) { super(scriptsEnabled, showHiddenIndices, showIdColumn, readTimeoutMillis, scrollTimeoutMillis, usePainless, - scrollSize, allowPushdownOnNormalizedOrAnalyzedFields, warnOnRowCountMismatch, encryptionValidationMode, forceDoublePrecision); + scrollSize, allowPushdownOnNormalizedOrAnalyzedFields, pushdownWithKeyword, warnOnRowCountMismatch, encryptionValidationMode, forceDoublePrecision); this.hostList = hostList; this.username = username; this.password = password; @@ -152,6 +153,7 @@ public static ElasticsearchConf createElasticsearchConf(ElasticStoragePluginConf elasticStoragePluginConfig.useWhitelist, elasticStoragePluginConfig.scrollSize, elasticStoragePluginConfig.allowPushdownOnNormalizedOrAnalyzedFields, + elasticStoragePluginConfig.pushdownWithKeyword, elasticStoragePluginConfig.warnOnRowCountMismatch, elasticStoragePluginConfig.encryptionValidationMode, elasticStoragePluginConfig.forceDoublePrecision); diff --git a/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/ElasticsearchConf.java b/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/ElasticsearchConf.java index 388a6a4811..ae9676034f 100644 --- a/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/ElasticsearchConf.java +++ b/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/ElasticsearchConf.java @@ -56,6 +56,7 @@ public enum AuthenticationType { private final boolean useWhitelist; private final int scrollSize; private final boolean allowPushdownOnNormalizedOrAnalyzedFields; + private final boolean pushdownWithKeyword; private final boolean warnOnRowCountMismatch; private final EncryptionValidationMode encryptionValidationMode; private final boolean forceDoublePrecision; @@ -82,6 +83,7 @@ public ElasticsearchConf( boolean useWhitelist, int scrollSize, boolean allowPushdownOnNormalizedOrAnalyzedFields, + boolean pushdownWithKeyword, boolean warnOnRowCountMismatch, EncryptionValidationMode encryptionValidationMode, boolean forceDoublePrecision) { @@ -102,7 +104,8 @@ public ElasticsearchConf( this.usePainless = usePainless; this.useWhitelist = useWhitelist; this.scrollSize = scrollSize; - this.allowPushdownOnNormalizedOrAnalyzedFields = allowPushdownOnNormalizedOrAnalyzedFields; + this.allowPushdownOnNormalizedOrAnalyzedFields = allowPushdownOnNormalizedOrAnalyzedFields || pushdownWithKeyword; + this.pushdownWithKeyword = pushdownWithKeyword; this.warnOnRowCountMismatch = warnOnRowCountMismatch; this.encryptionValidationMode = encryptionValidationMode; this.forceDoublePrecision = forceDoublePrecision; @@ -164,6 +167,10 @@ public boolean isAllowPushdownOnNormalizedOrAnalyzedFields() { return allowPushdownOnNormalizedOrAnalyzedFields; } + public boolean isPushdownWithKeyword() { + return pushdownWithKeyword; + } + public boolean isWarnOnRowCountMismatch() { return warnOnRowCountMismatch; } diff --git a/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/execution/WriteHolders.java b/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/execution/WriteHolders.java index bc1461ff5a..38c439c5ee 100644 --- a/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/execution/WriteHolders.java +++ b/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/execution/WriteHolders.java @@ -367,8 +367,7 @@ private static long getMillis(SchemaPath path, String value, DateFormats.Abstrac // Call to generic formatter parsing to handle specific scenarios. try { return DateFormats.AbstractFormatterAndType.getMillisGenericFormatter(value); - } - catch (Exception e) { + } catch (Exception e) { throw UserException.dataReadError() .message("Failed to parse date time value %s in field %s.", value, path.getAsUnescapedPath()) .build(logger); diff --git a/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/mapping/ElasticMappingSet.java b/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/mapping/ElasticMappingSet.java index e9c4028fdc..cb7e54f8cc 100644 --- a/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/mapping/ElasticMappingSet.java +++ b/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/mapping/ElasticMappingSet.java @@ -220,32 +220,36 @@ public static class ElasticField implements Iterable { private final List formats; private final ImmutableList children; private final boolean docValues; + private final ImmutableList fields; private ElasticField( - String name, - Type type, - Indexing indexing, - boolean normalized, - List formats, - boolean docValues, - List children) { + String name, + Type type, + Indexing indexing, + boolean normalized, + List formats, + boolean docValues, + List children, + List fields) { this.name = name; this.type = type; this.indexing = indexing; this.normalized = normalized; this.formats = formats; this.docValues = docValues; - this.children = ImmutableList.copyOf(children); + this.children = asList(children); + this.fields = asList(fields); } public ElasticField( - @JacksonInject(CurentNameInjectable.CURRENT_NAME) String name, - @JsonProperty("type") Type type, - @JsonProperty("index") Indexing indexing, - @JsonProperty("normalizer") String normalizer, - @JsonProperty("format") String format, - @JsonProperty("doc_values") Boolean docValues, - @JsonProperty("properties") Map children){ + @JacksonInject(CurentNameInjectable.CURRENT_NAME) String name, + @JsonProperty("type") Type type, + @JsonProperty("index") Indexing indexing, + @JsonProperty("normalizer") String normalizer, + @JsonProperty("format") String format, + @JsonProperty("doc_values") Boolean docValues, + @JsonProperty("properties") Map children, + @JsonProperty("fields") Map fields){ this.name = Preconditions.checkNotNull(name, "Field didn't have name."); this.formats = DateFormats.getFormatList(format); @@ -290,7 +294,12 @@ public ElasticField( this.docValues = docValues; } this.children = asList(children); - + // Check if this is a string having a nested fields.keyword entry + if (this.type == Type.TEXT && fields != null && fields.get("keyword") != null) { + this.fields = asList(fields); + } else { + this.fields = null; + } } public void setTypeUnknown() { @@ -354,7 +363,7 @@ public ElasticField merge(ElasticMapping mapping, ElasticField field, String cur boolean normalized = this.normalized || field.normalized; // we just have different fields. Let's merge them. - return new ElasticField(name, mergedType, indexing, normalized, formats, docValues, mergeFields(mapping, children, field.children, curr_mapping, other_mapping, curr_index, other_index)); + return new ElasticField(name, mergedType, indexing, normalized, formats, docValues, mergeFields(mapping, children, field.children, curr_mapping, other_mapping, curr_index, other_index), fields); } public void logDataReadErrorHelper(ElasticField field, String curr_mapping, String other_mapping, String curr_index, String other_index, String diff, String first, String second) { @@ -414,6 +423,10 @@ public boolean hasDocValues(){ return docValues; } + public boolean hasFields() { + return fields != null && !fields.isEmpty(); + } + @Override public Iterator iterator() { return children.iterator(); @@ -760,6 +773,14 @@ private static ImmutableList asList(Map map){ } } + private static ImmutableList asList(List list){ + if(list == null){ + return ImmutableList.of(); + } else { + return ImmutableList.copyOf(list); + } + } + public static class ClusterMetadata { private final String clusterName; diff --git a/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/mapping/SchemaMerger.java b/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/mapping/SchemaMerger.java index 0e4f227483..af69feb872 100644 --- a/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/mapping/SchemaMerger.java +++ b/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/mapping/SchemaMerger.java @@ -336,6 +336,10 @@ private static void recordAnnotations(SchemaPath path, ElasticField elasticField resultToPopulate.hasNoDocValue(path); } + if (elasticField.hasFields()) { + resultToPopulate.isStringWithKeywordType(path); + } + // handle special types. switch(elasticField.getType()){ case GEO_POINT: @@ -428,6 +432,10 @@ public void isScaledType(SchemaPath path){ annotations.put(path, anno(path).setSpecialType(ElasticSpecialType.SCALED_FLOAT).build()); } + public void isStringWithKeywordType(SchemaPath path) { + annotations.put(path, anno(path).setSpecialType(ElasticSpecialType.STRING_WITH_KEYWORD).build()); + } + public void isAnalyzed(SchemaPath path){ annotations.put(path, anno(path).setAnalyzed(true).build()); } diff --git a/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/planning/rels/ElasticsearchFilter.java b/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/planning/rels/ElasticsearchFilter.java index 17b1bda90f..0bcd7500c0 100644 --- a/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/planning/rels/ElasticsearchFilter.java +++ b/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/planning/rels/ElasticsearchFilter.java @@ -66,6 +66,7 @@ public ElasticsearchFilter copy(RelTraitSet relTraitSet, RelNode relNode, RexNod return new ElasticsearchFilter(getCluster(), relTraitSet, relNode, rexNode, pluginId); } + @Override public RelOptCost computeSelfCost(RelOptPlanner planner, RelMetadataQuery mq) { return super.computeSelfCost(planner, mq).multiplyBy(0.1D); } diff --git a/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/planning/rels/ElasticsearchLimit.java b/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/planning/rels/ElasticsearchLimit.java index e06374dd44..816b2cec6d 100644 --- a/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/planning/rels/ElasticsearchLimit.java +++ b/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/planning/rels/ElasticsearchLimit.java @@ -64,6 +64,7 @@ public ElasticsearchLimit(RelOptCluster cluster, RelTraitSet traits, RelNode chi assert offsetSize == 0; // currently do not support offset } + @Override public StoragePluginId getPluginId() { return pluginId; } diff --git a/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/planning/rels/ElasticsearchProject.java b/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/planning/rels/ElasticsearchProject.java index a80f8e32b3..91daf392be 100644 --- a/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/planning/rels/ElasticsearchProject.java +++ b/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/planning/rels/ElasticsearchProject.java @@ -115,6 +115,7 @@ public StoragePluginId getPluginId() { return pluginId; } + @Override public RelOptCost computeSelfCost(RelOptPlanner planner, RelMetadataQuery mq) { if (needsScript && !scriptsEnabled) { return planner.getCostFactory().makeInfiniteCost(); diff --git a/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/planning/rules/PredicateAnalyzer.java b/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/planning/rules/PredicateAnalyzer.java index 976701161e..5a64e9d2e9 100644 --- a/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/planning/rules/PredicateAnalyzer.java +++ b/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/planning/rules/PredicateAnalyzer.java @@ -67,6 +67,8 @@ import org.slf4j.LoggerFactory; import com.dremio.common.expression.CompleteType; +import com.dremio.common.expression.PathSegment; +import com.dremio.common.expression.SchemaPath; import com.dremio.common.types.TypeProtos.MajorType; import com.dremio.common.types.TypeProtos.MinorType; import com.dremio.common.types.Types; @@ -287,7 +289,7 @@ protected Visitor(RexBuilder rexBuilder, ElasticsearchConf config) { @Override public Expression visitInputRef(RexInputRef inputRef) { - return new NamedFieldExpression((SchemaField) inputRef); + return new NamedFieldExpression((SchemaField) inputRef, config.isPushdownWithKeyword()); } @Override @@ -361,7 +363,8 @@ private boolean supportedRexCall(RexCall call) { case IS_NOT_NULL: case IS_NULL: return true; - default: // fall through + default: + return false; } case FUNCTION_ID: case FUNCTION_STAR: @@ -401,8 +404,9 @@ public Expression visitCall(RexCall call) { operands.add(nodeExpr); } String query = convertQueryString(operands.subList(0, operands.size() - 1), operands.get(operands.size() - 1)); - return QueryExpression.create(new NamedFieldExpression(null)).queryString(query); + return QueryExpression.create(new NamedFieldExpression(null, false)).queryString(query); } + // fall through default: throw new PredicateAnalyzerException(format("Unsupported syntax [%s] for call: [%s]", syntax, call)); } @@ -1041,14 +1045,22 @@ public static final class NamedFieldExpression implements TerminalExpression { private final SchemaField schemaField; - public NamedFieldExpression(SchemaField schemaField) { + public NamedFieldExpression(SchemaField schemaField, boolean pushdownWithKeyword) { this.schemaField = schemaField; + if (schemaField != null && pushdownWithKeyword && schemaField.getCompleteType().isText() && !getUnescapedName().contains(".keyword") && + schemaField.getPath().isSimplePath() && schemaField.getAnnotation().getSpecialType() == ElasticSpecialType.STRING_WITH_KEYWORD) { + schemaField.setPath(new SchemaPath(new PathSegment.NameSegment(getUnescapedName() + ".keyword"))); + } } public String getRootName(){ return schemaField.getPath().getRootSegment().getPath(); } + public String getUnescapedName(){ + return schemaField.getPath().getAsUnescapedPath(); + } + public boolean isMetaField(){ return ElasticsearchConstants.META_COLUMNS.contains(getRootName()); } diff --git a/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/planning/rules/SchemaField.java b/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/planning/rules/SchemaField.java index 124d82b10f..2f36ce97e4 100644 --- a/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/planning/rules/SchemaField.java +++ b/plugins/elasticsearch/src/main/java/com/dremio/plugins/elastic/planning/rules/SchemaField.java @@ -78,7 +78,7 @@ public final class SchemaField extends RexInputRef { private static final String OPEN_BQ = "[\""; private static final String CLOSE_BQ = "\"]"; - private final SchemaPath path; + private SchemaPath path; private final CompleteType type; private final FieldAnnotation annotation; private final ElasticSpecialType specialType; @@ -101,6 +101,10 @@ public SchemaPath getPath() { return path; } + public void setPath(SchemaPath path) { + this.path = path; + } + public CompleteType getCompleteType() { return type; } diff --git a/plugins/elasticsearch/src/main/proto/elastic.proto b/plugins/elasticsearch/src/main/proto/elastic.proto index b90f25a84f..34c86a4145 100644 --- a/plugins/elasticsearch/src/main/proto/elastic.proto +++ b/plugins/elasticsearch/src/main/proto/elastic.proto @@ -39,6 +39,7 @@ enum ElasticSpecialType { GEO_POINT = 3; NESTED = 4; SCALED_FLOAT = 5; + STRING_WITH_KEYWORD = 6; } message ElasticTableXattr { diff --git a/plugins/elasticsearch/src/main/resources/elastic-storage-layout.json b/plugins/elasticsearch/src/main/resources/elastic-storage-layout.json index cf36e3dc13..27ce5a1b13 100644 --- a/plugins/elasticsearch/src/main/resources/elastic-storage-layout.json +++ b/plugins/elasticsearch/src/main/resources/elastic-storage-layout.json @@ -57,6 +57,9 @@ { "propName": "config.allowPushdownOnNormalizedOrAnalyzedFields" }, + { + "propName": "config.pushdownWithKeyword" + }, { "propName": "config.scriptsEnabled" }, diff --git a/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ElasticBaseTestQuery.java b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ElasticBaseTestQuery.java index bc85247755..f9cec50f5f 100644 --- a/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ElasticBaseTestQuery.java +++ b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ElasticBaseTestQuery.java @@ -154,6 +154,12 @@ public TestBuilder testBuilder() { boolean enabled() default false; } + @Retention(RetentionPolicy.RUNTIME) + @Target({ElementType.TYPE}) + public @interface PushdownWithKeyword { + boolean enabled() default false; + } + @Before public void before() throws Exception { schema = schemaName(); @@ -210,11 +216,18 @@ public void setupElasticHelper(boolean forceDoublePrecision) throws IOException, allowPushdownNormalizedOrAnalyzedFields = pushdownAnalyzed.enabled(); } + PushdownWithKeyword pushdownKeyword = + this.getClass().getAnnotation(PushdownWithKeyword.class); + boolean pushdownWithKeyword = false; + if (pushdownKeyword != null) { + pushdownWithKeyword = pushdownKeyword.enabled(); + } + getSabotContext().getOptionManager().setOption(OptionValue.createLong(OptionValue.OptionType.SYSTEM, ExecConstants.ELASTIC_ACTION_RETRIES, 3)); elastic = new ElasticsearchCluster(scrollSize, new Random(), scriptsEnabled, showIDColumn, publishHost, sslEnabled, getSabotContext().getOptionManager().getOption(ELASTIC_ACTION_RETRIES_VALIDATOR), forceDoublePrecision); SourceConfig sc = new SourceConfig(); sc.setName("elasticsearch"); - sc.setConnectionConf(elastic.config(allowPushdownNormalizedOrAnalyzedFields)); + sc.setConnectionConf(elastic.config(allowPushdownNormalizedOrAnalyzedFields, pushdownWithKeyword)); sc.setMetadataPolicy(CatalogService.DEFAULT_METADATA_POLICY); createSourceWithRetry(sc); ElasticVersionBehaviorProvider elasticVersionBehaviorProvider = new ElasticVersionBehaviorProvider(elastic.getMinVersionInCluster()); diff --git a/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ElasticsearchCluster.java b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ElasticsearchCluster.java index f7cce90ba3..976fe66c04 100644 --- a/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ElasticsearchCluster.java +++ b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ElasticsearchCluster.java @@ -315,7 +315,7 @@ private static ContentSigner newSigner(PrivateKey privateKey, String algo) { * Creates a storage plugin config with values suitable for creating * connections to the embedded elasticsearch cluster. */ - public BaseElasticStoragePluginConfig config(boolean allowPushdownAnalyzedOrNormalizedFields) { + public BaseElasticStoragePluginConfig config(boolean allowPushdownAnalyzedOrNormalizedFields, boolean pushdownWithKeyword) { if ((this.authenticationType == ES_ACCOUNT) || (this.authenticationType == NONE)) { AuthenticationType authenticationType; if (this.authenticationType == ES_ACCOUNT) { @@ -338,6 +338,7 @@ public BaseElasticStoragePluginConfig config(boolean allowPushdownAnalyzedOrNorm useWhiteList, /* use whitelist */ scrollSize, allowPushdownAnalyzedOrNormalizedFields, /* allow group by on normalized fields */ + pushdownWithKeyword, /* allow group by on normalized fields */ false, /* warn on row count mismatch */ EncryptionValidationMode.NO_VALIDATION, forceDoublePrecision @@ -369,6 +370,7 @@ public BaseElasticStoragePluginConfig config(boolean allowPushdownAnalyzedOrNorm true, /* use painless */ scrollSize, allowPushdownAnalyzedOrNormalizedFields, /* allow group by on normalized fields */ + pushdownWithKeyword, /* allow .keyword in pushdowns */ false, /* warn on row count mismatch */ EncryptionValidationMode.NO_VALIDATION, forceDoublePrecision @@ -379,7 +381,7 @@ public BaseElasticStoragePluginConfig config(boolean allowPushdownAnalyzedOrNorm public BaseElasticStoragePluginConfig config() { - return config(false); + return config(false, false); } /** diff --git a/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestAllDataTypes.java b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestAllDataTypes.java index ff302a8304..6114edb2a0 100644 --- a/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestAllDataTypes.java +++ b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestAllDataTypes.java @@ -178,8 +178,7 @@ public void loadTable() throws IOException, ParseException { ElasticsearchCluster.ColumnData[] dataNew = new ElasticsearchCluster.ColumnData[0]; dataNew = operatedList.toArray(dataNew); elastic.load(schema, table, dataNew); - } - else { + } else { elastic.load(schema, table, data); } } diff --git a/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestAllElasticTypes.java b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestAllElasticTypes.java index f393740f01..61044ff6cb 100644 --- a/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestAllElasticTypes.java +++ b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestAllElasticTypes.java @@ -38,7 +38,7 @@ public class ITTestAllElasticTypes extends ElasticBaseTestQuery { DateTimeFormatter formatter = DateFunctionsUtils.getISOFormatterForFormatString("YYYY-MM-DD HH:MI:SS"); // set in @Before method - private String ELASTIC_TABLE = null; + private String elasticTableName = null; @Before public void loadTable() throws IOException, ParseException { @@ -148,7 +148,7 @@ public void loadTable() throws IOException, ParseException { }; elastic.load(schema, table, data); - ELASTIC_TABLE = String.format("elasticsearch.%s.%s", schema, table); + elasticTableName = String.format("elasticsearch.%s.%s", schema, table); } // TODO - see if source data is what is getting to groovy @@ -173,7 +173,7 @@ public void testCastRoundTripsToVarchar() throws Exception { for (int i = 0; i < allCastTypes.length; i++) { String exprs = " cast(cast(%s as %s) as %s) = %s as outCol"; testBuilder() - .sqlQuery(String.format("select " + exprs + " from %s", sourceFields[i], LONG_VARCHAR, allCastTypes[i], sourceFields[i], ELASTIC_TABLE)) + .sqlQuery(String.format("select " + exprs + " from %s", sourceFields[i], LONG_VARCHAR, allCastTypes[i], sourceFields[i], elasticTableName)) .ordered() .baselineColumns("outCol") .baselineValues(true) diff --git a/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestClient.java b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestClient.java index 0264e2e53a..535f525ca2 100644 --- a/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestClient.java +++ b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestClient.java @@ -54,6 +54,7 @@ public static void beforeStart() { Assume.assumeFalse(ElasticsearchCluster.USE_EXTERNAL_ES5); } + @Override @Before public void before() throws Exception { super.before(); diff --git a/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestComplexMaps.java b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestComplexMaps.java index 73cc77c1b1..9deffa33cf 100644 --- a/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestComplexMaps.java +++ b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestComplexMaps.java @@ -29,6 +29,7 @@ public class ITTestComplexMaps extends ElasticBaseTestQuery { + @SuppressWarnings("checkstyle:MemberName") private String ELASTIC_TABLE = null; @Override @@ -650,6 +651,7 @@ public void testSelectComplexMapWithNestedList() throws Exception { .go(); } + @Override public void verifyJsonInPlan(String s, String[] s2){ } diff --git a/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestDateTypesMixDate.java b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestDateTypesMixDate.java index 17ae00e0c2..d4b85a9153 100644 --- a/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestDateTypesMixDate.java +++ b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestDateTypesMixDate.java @@ -96,8 +96,7 @@ private void runTestBuilder(String sql, String value1, String value2) throws Exc .baselineValues(formatter.parse(value1 , formatterToBaselineJT).toString().replace("T", " ")+ ":00.000") .baselineValues(formatter.parse(value2 , formatterToBaselineJT).toString().replace("T", " ")+ ":00.000") .go(); - } - else { + } else { testBuilder() .sqlQuery(sql) .ordered() diff --git a/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestDateTypesMixDateTime.java b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestDateTypesMixDateTime.java index c1bef39536..d2694408b8 100644 --- a/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestDateTypesMixDateTime.java +++ b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestDateTypesMixDateTime.java @@ -96,8 +96,7 @@ private void runTestBuilder(String sql, String value1, String value2) throws Exc .baselineValues(formatter.parse(value1, DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSS")).toString().replace("T", " ") + ".000") .baselineValues(formatter.parse(value2, DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSS")).toString().replace("T", " ") + ".000") .go(); - } - else { + } else { testBuilder() .sqlQuery(sql) .ordered() diff --git a/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestElasticsearchPushdownWithKeywordEnabled.java b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestElasticsearchPushdownWithKeywordEnabled.java new file mode 100644 index 0000000000..bda81b03a0 --- /dev/null +++ b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestElasticsearchPushdownWithKeywordEnabled.java @@ -0,0 +1,201 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.plugins.elastic; + +import static com.dremio.plugins.elastic.ElasticsearchType.OBJECT; +import static com.dremio.plugins.elastic.ElasticsearchType.TEXT; + +import java.util.concurrent.TimeUnit; + +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TestRule; + +import com.dremio.common.util.TestTools; +import com.dremio.plugins.elastic.ElasticBaseTestQuery.PushdownWithKeyword; +import com.google.common.collect.ImmutableMap; + +/** + * Test for elasticsearch contains with scripts disabled. Runs all the tests in parent class with scripts disabled. + */ +@PushdownWithKeyword(enabled = true) +public class ITTestElasticsearchPushdownWithKeywordEnabled extends ElasticBaseTestQuery { + @SuppressWarnings("checkstyle:MemberName") + protected String TABLENAME; + + @Rule + public final TestRule timeoutRule = TestTools.getTimeoutRule(150, TimeUnit.SECONDS); + + @Before + public void loadData() throws Exception { + + ElasticsearchCluster.ColumnData[] data = new ElasticsearchCluster.ColumnData[]{ + new ElasticsearchCluster.ColumnData("location", OBJECT, new Object[][]{ + {ImmutableMap.of("name", "Denmark-Norway")}, + {ImmutableMap.of("name", "Norway")}, + {ImmutableMap.of("name", "Denmark")} + }) + }; + + elastic.load(schema, table, data); + TABLENAME = "elasticsearch." + schema + "." + table; + } + + @Test + public void testLike() throws Exception { + + String sql = String.format("select l.location.name from %s l where l.location.name like 'Norway'", TABLENAME); + testBuilder().sqlQuery(sql).unOrdered() + .baselineColumns("name") + .baselineValues("Norway") + .go(); + } + + @Test + public void testLikeHyphen() throws Exception { + + String sql = String.format("select l.location.name from %s l where l.location.name like 'Denmark-Norway'", TABLENAME); + testBuilder().sqlQuery(sql).unOrdered() + .baselineColumns("name") + .baselineValues("Denmark-Norway") + .go(); + } + + @Test + public void testLikeMulti() throws Exception { + + String sql = String.format("select l.location.name from %s l where l.location.name like 'Denmark' or l.location.name like 'Norway'", TABLENAME); + testBuilder().sqlQuery(sql).unOrdered() + .baselineColumns("name") + .baselineValues("Denmark") + .baselineValues("Norway") + .go(); + } + + @Test + public void testLikeWithoutKeyword() throws Exception { + + ElasticsearchCluster.ColumnData[] data = new ElasticsearchCluster.ColumnData[]{ + new ElasticsearchCluster.ColumnData("name", TEXT, new Object[][]{ + {"Denmark-Norway"}, + {"Denmark"}, + {"Norway"} + }) + }; + + elastic.load(schema, table, data); + String sql = String.format("select name from %s where name like 'Norway'", TABLENAME); + testBuilder().sqlQuery(sql).unOrdered() + .baselineColumns("name") + .expectsEmptyResultSet() + .go(); + } + + @Test + public void testLikePattern() throws Exception { + + String sql = String.format("select l.location.name from %s l where l.location.name like '%%Denmark%%'", TABLENAME); + testBuilder().sqlQuery(sql).unOrdered() + .baselineColumns("name") + .baselineValues("Denmark") + .baselineValues("Denmark-Norway") + .go(); + } + + @Test + public void testEquals() throws Exception { + + String sql = String.format("select l.location.name from %s l where l.location.name = 'Denmark'", TABLENAME); + testBuilder().sqlQuery(sql).unOrdered() + .baselineColumns("name") + .baselineValues("Denmark") + .go(); + } + + @Test + public void testEqualsHyphen() throws Exception { + + String sql = String.format("select l.location.name from %s l where l.location.name = 'Denmark-Norway'", TABLENAME); + testBuilder().sqlQuery(sql).unOrdered() + .baselineColumns("name") + .baselineValues("Denmark-Norway") + .go(); + } + + @Test + public void testEqualsMulti() throws Exception { + + String sql = String.format("select l.location.name from %s l where l.location.name = 'Denmark' or l.location.name = 'Norway'", TABLENAME); + testBuilder().sqlQuery(sql).unOrdered() + .baselineColumns("name") + .baselineValues("Denmark") + .baselineValues("Norway") + .go(); + } + + @Test + public void testEqualsWithoutKeyword() throws Exception { + + ElasticsearchCluster.ColumnData[] data = new ElasticsearchCluster.ColumnData[]{ + new ElasticsearchCluster.ColumnData("name", TEXT, new Object[][]{ + {"Denmark-Norway"}, + {"Denmark"}, + {"Norway"} + }) + }; + + elastic.load(schema, table, data); + String sql = String.format("select name from %s where name = 'Norway' or name = 'Denmark'", TABLENAME); + testBuilder().sqlQuery(sql).unOrdered() + .baselineColumns("name") + .baselineValues("Norway") + .baselineValues("Denmark") + .baselineValues("Denmark-Norway") + .go(); + } + + @Test + public void testIn() throws Exception { + + String sql = String.format("select l.location.name from %s l where l.location.name in('Denmark')", TABLENAME); + testBuilder().sqlQuery(sql).unOrdered() + .baselineColumns("name") + .baselineValues("Denmark") + .go(); + } + + @Test + public void testInHyphen() throws Exception { + + String sql = String.format("select l.location.name from %s l where l.location.name in('Denmark-Norway')", TABLENAME); + testBuilder().sqlQuery(sql).unOrdered() + .baselineColumns("name") + .baselineValues("Denmark-Norway") + .go(); + } + + @Test + public void testInMulti() throws Exception { + + String sql = String.format("select l.location.name from %s l where l.location.name in('Denmark', 'Norway')", TABLENAME); + testBuilder().sqlQuery(sql).unOrdered() + .baselineColumns("name") + .baselineValues("Denmark") + .baselineValues("Norway") + .go(); + } +} diff --git a/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestHiddenIndices.java b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestHiddenIndices.java index e09d439d4e..9516dad57a 100644 --- a/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestHiddenIndices.java +++ b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestHiddenIndices.java @@ -43,6 +43,7 @@ public static void beforeStart() { } + @Override @Before public void before() throws Exception { super.before(); diff --git a/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestLimit.java b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestLimit.java index 8b7437f17f..78f07238fe 100644 --- a/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestLimit.java +++ b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestLimit.java @@ -97,7 +97,7 @@ public void tearDown() throws Exception { AutoCloseables.close(jobTelemetryClient); } - String AGG_LIMIT = "=" + private static final String AGG_LIMIT = "=" + "[{\n" + " \"size\" : 0,\n" + " \"query\" : {\n" + diff --git a/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestMetadataColumns.java b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestMetadataColumns.java index 5f7a4ff1f6..b653b653be 100644 --- a/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestMetadataColumns.java +++ b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestMetadataColumns.java @@ -41,6 +41,7 @@ @ShowIDColumn(enabled=true) public class ITTestMetadataColumns extends ElasticBaseTestQuery { + @SuppressWarnings("checkstyle:MemberName") protected String TABLENAME; protected String[] ids = new String[5]; protected String[] uids = new String[5]; diff --git a/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestProjectPushdown.java b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestProjectPushdown.java index 30bcde0ccb..144dfd6a07 100644 --- a/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestProjectPushdown.java +++ b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestProjectPushdown.java @@ -28,7 +28,8 @@ import com.google.common.collect.Lists; public class ITTestProjectPushdown extends ElasticBaseTestQuery { - private String PARQUET_TABLE = null; + private static final String PARQUET_TABLE = "dfs.\"[WORKING_PATH]/src/test/resources/small_business.parquet\""; + @SuppressWarnings("checkstyle:MemberName") private String ELASTIC_TABLE = null; private static final String NEW_COLUMN_1 = "/json/new_column/file1.json"; @@ -41,9 +42,7 @@ public void before() throws Exception { super.before(); ColumnData[] data = getBusinessData(); load(schema, table, data); - PARQUET_TABLE = "dfs.\"[WORKING_PATH]/src/test/resources/small_business.parquet\""; ELASTIC_TABLE = String.format("elasticsearch.%s.%s", schema, table); - } /** diff --git a/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestProjectionsAndFilter.java b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestProjectionsAndFilter.java index b3f7de87e6..34f1d429a8 100644 --- a/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestProjectionsAndFilter.java +++ b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/ITTestProjectionsAndFilter.java @@ -1033,26 +1033,26 @@ public final void tesFilterWithCase() throws Exception { String sqlQuery = "select city from elasticsearch." + schema + "." + table + " where case when stars < 2 then city else state end = 'San Francisco'"; verifyJsonInPlan(sqlQuery, new String[]{ "[{\n" + - " \"from\" : 0,\n" + - " \"size\" : 4000,\n" + - " \"query\" : {\n" + - " \"script\" : {\n" + - " \"script\" : {\n" + - " \"source\" : \"(def) ((doc[\\\"stars\\\"].empty) ? false : ( ( ( ( doc[\\\"stars\\\"].value < 2 ) ) ? (def) ( (doc[\\\"city\\\"].empty) ? null : doc[\\\"city\\\"].value ) : (def) ( (doc[\\\"state\\\"].empty) ? null : doc[\\\"state\\\"].value ) ) == 'San Francisco' ))\",\n" + - " \"lang\" : \"painless\"\n" + - " },\n" + - " \"boost\" : 1.0\n" + - " }\n" + - " },\n" + - " \"_source\" : {\n" + - " \"includes\" : [\n" + - " \"city\",\n" + - " \"stars\",\n" + - " \"state\"\n" + + " \"from\":0,\n" + + " \"size\":4000,\n" + + " \"query\":{\n" + + " \"script\":{\n" + + " \"script\":{\n" + + " \"source\":\"(def) ((doc[\\\"stars\\\"].empty) ? false : ( ( ( doc[\\\"stars\\\"].value < 2 ) ) ? (def) ( (doc[\\\"city\\\"].empty) ? null : ( doc[\\\"city\\\"].value == 'San Francisco' ) ) : (def) ( (doc[\\\"state\\\"].empty) ? null : ( doc[\\\"state\\\"].value == 'San Francisco' ) ) ))\",\n" + + " \"lang\":\"painless\"\n" + + " },\n" + + " \"boost\":1.0\n" + + " }\n" + + " },\n" + + " \"_source\":{\n" + + " \"includes\":[\n" + + " \"city\",\n" + + " \"stars\",\n" + + " \"state\"\n" + " ],\n" + - " \"excludes\" : [ ]\n" + - " }\n" + - "}]" + " \"excludes\":[]\n" + + " }\n" + + " }]" }); testBuilder().sqlQuery(sqlQuery).unOrdered().baselineColumns("city") .baselineValues("San Francisco") diff --git a/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/mapping/TestSchemaMerger.java b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/mapping/TestSchemaMerger.java index d3a986ce4d..8db1c43edc 100644 --- a/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/mapping/TestSchemaMerger.java +++ b/plugins/elasticsearch/src/test/java/com/dremio/plugins/elastic/mapping/TestSchemaMerger.java @@ -66,8 +66,8 @@ private void createMapping() { null, null, true, - innerFields2 - ); + innerFields2, + null); final Map innerFields1 = new HashMap<>(); innerFields1.put("1", createElasticTextField("stringLevel1")); @@ -81,8 +81,8 @@ private void createMapping() { null, null, true, - innerFields1 - ); + innerFields1, + null); final Map fields = new HashMap<>(); fields.put("1", createElasticTextField("stringLevel0")); @@ -101,8 +101,8 @@ private ElasticMappingSet.ElasticField createElasticTextField(String name) { null, null, true, - Collections.emptyMap() - ); + Collections.emptyMap(), + null); } private ElasticMappingSet.ElasticField createElasticField(String name, ElasticMappingSet.Type type, boolean normalized) { @@ -113,8 +113,8 @@ private ElasticMappingSet.ElasticField createElasticField(String name, ElasticMa normalized ? "true" : null, null, true, - Collections.emptyMap() - ); + Collections.emptyMap(), + null); } private ElasticMappingSet.ElasticField createElasticFloatField(String name) { @@ -125,8 +125,8 @@ private ElasticMappingSet.ElasticField createElasticFloatField(String name) { null, null, true, - Collections.emptyMap() - ); + Collections.emptyMap(), + null); } private void createSchema() { diff --git a/plugins/gcs/pom.xml b/plugins/gcs/pom.xml index 4400cceb24..391ad305ce 100644 --- a/plugins/gcs/pom.xml +++ b/plugins/gcs/pom.xml @@ -25,7 +25,7 @@ com.dremio.plugins dremio-plugin-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-gcs-plugin diff --git a/plugins/gcs/src/main/java/com/dremio/plugins/gcs/GCSAsyncFileReader.java b/plugins/gcs/src/main/java/com/dremio/plugins/gcs/GCSAsyncFileReader.java index c8e92ae616..f785c173ec 100644 --- a/plugins/gcs/src/main/java/com/dremio/plugins/gcs/GCSAsyncFileReader.java +++ b/plugins/gcs/src/main/java/com/dremio/plugins/gcs/GCSAsyncFileReader.java @@ -35,6 +35,7 @@ import org.asynchttpclient.uri.Uri; import com.dremio.exec.hadoop.DremioHadoopUtils; +import com.dremio.http.BufferBasedCompletionHandler; import com.dremio.io.ExponentialBackoff; import com.dremio.io.ReusableAsyncByteReader; import com.dremio.plugins.async.utils.AsyncReadWithRetry; @@ -119,7 +120,7 @@ public CompletableFuture readFully(long offset, ByteBuf dst, int dstOffset java.util.function.Function requestBuilderFunction = getRequestBuilderFunction(offset, len); return asyncReaderWithRetry.read(asyncHttpClient, requestBuilderFunction, - metrics, path, threadName, dst, dstOffset, 0, backoff); + metrics, path, threadName, new BufferBasedCompletionHandler(dst, dstOffset), 0, backoff); } private java.util.function.Function getRequestBuilderFunction(long offset, int len) { diff --git a/plugins/gcs/src/main/java/com/dremio/plugins/gcs/GCSConf.java b/plugins/gcs/src/main/java/com/dremio/plugins/gcs/GCSConf.java index d9bac6aa8b..49817640cb 100644 --- a/plugins/gcs/src/main/java/com/dremio/plugins/gcs/GCSConf.java +++ b/plugins/gcs/src/main/java/com/dremio/plugins/gcs/GCSConf.java @@ -178,6 +178,7 @@ public boolean isCachingEnabled(final OptionManager optionManager) { return GCSConf.this.cachingEnable; } + @Override public int cacheMaxSpaceLimitPct() { return GCSConf.this.cachePercent; } diff --git a/plugins/gcs/src/main/java/com/dremio/plugins/gcs/GoogleStoragePlugin.java b/plugins/gcs/src/main/java/com/dremio/plugins/gcs/GoogleStoragePlugin.java index bf3a58bff9..8bd0ffe243 100644 --- a/plugins/gcs/src/main/java/com/dremio/plugins/gcs/GoogleStoragePlugin.java +++ b/plugins/gcs/src/main/java/com/dremio/plugins/gcs/GoogleStoragePlugin.java @@ -178,6 +178,7 @@ protected boolean isAsyncEnabledForQuery(OperatorContext context) { return context != null && context.getOptions().getOption(ASYNC_READS); } + @Override public boolean supportReadSignature(DatasetMetadata metadata, boolean isFileDataset) { return false; } diff --git a/plugins/gcs/src/main/resources/gcs-layout.json b/plugins/gcs/src/main/resources/gcs-layout.json index 921b6173fa..a5dae1d84a 100644 --- a/plugins/gcs/src/main/resources/gcs-layout.json +++ b/plugins/gcs/src/main/resources/gcs-layout.json @@ -3,7 +3,7 @@ "tags": [ ], "metadataRefresh": { - "datasetDiscovery": true, + "datasetDiscovery": false, "isFileSystemSource": true }, "form": { diff --git a/plugins/hdfs/pom.xml b/plugins/hdfs/pom.xml index 639e8ca83e..e3efb647e6 100644 --- a/plugins/hdfs/pom.xml +++ b/plugins/hdfs/pom.xml @@ -23,7 +23,7 @@ dremio-plugin-parent com.dremio.plugins - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-hdfs-plugin diff --git a/plugins/hive-common/pom.xml b/plugins/hive-common/pom.xml index 2f26a2dac5..e70cd41fda 100644 --- a/plugins/hive-common/pom.xml +++ b/plugins/hive-common/pom.xml @@ -22,7 +22,7 @@ com.dremio.plugins dremio-plugin-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-hive-plugin-common diff --git a/plugins/hive-common/src/main/java/com/dremio/exec/store/hive/BaseHiveStoragePluginConfig.java b/plugins/hive-common/src/main/java/com/dremio/exec/store/hive/BaseHiveStoragePluginConfig.java index 86aab53a16..514001b084 100644 --- a/plugins/hive-common/src/main/java/com/dremio/exec/store/hive/BaseHiveStoragePluginConfig.java +++ b/plugins/hive-common/src/main/java/com/dremio/exec/store/hive/BaseHiveStoragePluginConfig.java @@ -102,6 +102,7 @@ public abstract class BaseHiveStoragePluginConfig @DisplayMetadata(label = "Default CTAS Format") public DefaultCtasFormatSelection defaultCtasFormat = DefaultCtasFormatSelection.ICEBERG; + @Override public String getDefaultCtasFormat() { return defaultCtasFormat.getDefaultCtasFormat(); } diff --git a/plugins/hive-common/src/main/java/com/dremio/exec/store/hive/ContextClassLoaderAware.java b/plugins/hive-common/src/main/java/com/dremio/exec/store/hive/ContextClassLoaderAware.java new file mode 100644 index 0000000000..495c7a0264 --- /dev/null +++ b/plugins/hive-common/src/main/java/com/dremio/exec/store/hive/ContextClassLoaderAware.java @@ -0,0 +1,24 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store.hive; + +/** + * Marker interface that indicates that a plugin-based class ensures that the calling thread's context class loader + * is set appropriately before calling into any dependencies the class may have. Callers that also manage the + * context class loader can check for this interface and optimize out unnecessary context swaps. + */ +public interface ContextClassLoaderAware { +} diff --git a/plugins/hive-common/src/main/java/com/dremio/exec/store/hive/exec/HiveProxyingScanBatchCreator.java b/plugins/hive-common/src/main/java/com/dremio/exec/store/hive/exec/HiveProxyingScanBatchCreator.java index bb118b3bd6..919747bfe0 100644 --- a/plugins/hive-common/src/main/java/com/dremio/exec/store/hive/exec/HiveProxyingScanBatchCreator.java +++ b/plugins/hive-common/src/main/java/com/dremio/exec/store/hive/exec/HiveProxyingScanBatchCreator.java @@ -27,6 +27,7 @@ */ public class HiveProxyingScanBatchCreator implements ProducerOperator.Creator { + @Override public ProducerOperator create(FragmentExecutionContext fragmentExecContext, OperatorContext context, HiveProxyingSubScan config) throws ExecutionSetupException { final SupportsPF4JStoragePlugin pf4JStoragePlugin = diff --git a/plugins/hive-common/src/main/java/com/dremio/exec/store/hive/proxy/HiveProxiedSubScan.java b/plugins/hive-common/src/main/java/com/dremio/exec/store/hive/proxy/HiveProxiedSubScan.java index 80e3a031db..355584f754 100644 --- a/plugins/hive-common/src/main/java/com/dremio/exec/store/hive/proxy/HiveProxiedSubScan.java +++ b/plugins/hive-common/src/main/java/com/dremio/exec/store/hive/proxy/HiveProxiedSubScan.java @@ -92,5 +92,6 @@ public List getColumns() { public abstract boolean mayLearnSchema(); + @Override public abstract HiveProxiedSubScan clone(); } diff --git a/plugins/hive-function-registry/pom.xml b/plugins/hive-function-registry/pom.xml index 626aad1444..912ddabd32 100644 --- a/plugins/hive-function-registry/pom.xml +++ b/plugins/hive-function-registry/pom.xml @@ -22,7 +22,7 @@ com.dremio.plugins dremio-plugin-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-hive-function-registry diff --git a/plugins/hive/pom.xml b/plugins/hive/pom.xml index db18b3f0da..013fb45f31 100644 --- a/plugins/hive/pom.xml +++ b/plugins/hive/pom.xml @@ -22,7 +22,7 @@ com.dremio.plugins dremio-plugin-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-hive-plugin @@ -319,6 +319,12 @@ org.apache.directory.server apacheds-kerberos-codec + + + javax.servlet.jsp + jsp-api + @@ -347,13 +353,17 @@ org.apache.hadoop hadoop-azure - compile org.apache.hadoop hadoop-hdfs test + + com.dremio.plugins + dremio-plugin-common + ${project.version} + com.dremio.plugins dremio-hive-plugin-common diff --git a/plugins/hive/src/main/codegen/templates/HiveRecordReaders.java b/plugins/hive/src/main/codegen/templates/HiveRecordReaders.java index 451ec594a3..9ded7c9301 100644 --- a/plugins/hive/src/main/codegen/templates/HiveRecordReaders.java +++ b/plugins/hive/src/main/codegen/templates/HiveRecordReaders.java @@ -190,8 +190,7 @@ public void internalInit(InputSplit inputSplit, JobConf jobConf, ValueVector[] v } try (OperatorStats.WaitRecorder recorder = OperatorStats.getWaitRecorder(this.context.getStats())) { reader = ((OrcInputFormat)jobConf.getInputFormat()).getRecordReader(inputSplit, jobConf, Reporter.NULL, options); - } - catch(FSError e) { + } catch (FSError e) { throw HadoopFileSystemWrapper.propagateFSError(e); } <#else> @@ -236,8 +235,7 @@ public int populateData() throws IOException, SerDeException { if (!hasNext) { break; } - } - catch(FSError e) { + } catch (FSError e) { throw HadoopFileSystemWrapper.propagateFSError(e); } Object deSerializedValue = partitionSerDe.deserialize((Writable) value); @@ -264,8 +262,7 @@ public void close() throws IOException { if (reader != null) { try (OperatorStats.WaitRecorder recorder = OperatorStats.getWaitRecorder(this.context.getStats())){ reader.close(); - } - catch(FSError e) { + } catch (FSError e) { throw HadoopFileSystemWrapper.propagateFSError(e); } reader = null; diff --git a/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/Hive2PluginCreator.java b/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/Hive2PluginCreator.java index ebfb9066bf..db23573077 100644 --- a/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/Hive2PluginCreator.java +++ b/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/Hive2PluginCreator.java @@ -29,6 +29,7 @@ @Extension public class Hive2PluginCreator implements StoragePluginCreator { + @Override public HiveStoragePlugin createStoragePlugin(PluginManager pf4jManager, HiveStoragePluginConfig config, SabotContext context, String name, Provider pluginIdProvider) { final HiveConfFactory confFactory = new HiveConfFactory(); diff --git a/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/HiveClientImpl.java b/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/HiveClientImpl.java index 9f189ce12a..98d3769bd2 100644 --- a/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/HiveClientImpl.java +++ b/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/HiveClientImpl.java @@ -169,7 +169,9 @@ void connect() throws MetaException { doAsCommand( (PrivilegedExceptionAction) () -> { try(Closeable ccls = HivePf4jPlugin.swapClassLoader()) { - client = Hive.get(hiveConf).getMSC(); + // skip registering Hive functions as this could be expensive, especially on Glue, and we don't have any + // need for them + client = Hive.getWithFastCheck(hiveConf, false).getMSC(); } return null; }, diff --git a/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/HiveConfFactory.java b/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/HiveConfFactory.java index ce4db50a83..7ff1c669dc 100644 --- a/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/HiveConfFactory.java +++ b/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/HiveConfFactory.java @@ -114,7 +114,7 @@ private void disableFileSystemCache(HiveConf hiveConf) { protected HiveConf createBaseHiveConf(BaseHiveStoragePluginConfig config) { // Note: HiveConf tries to use the context classloader first, then uses the classloader that it itself - // is in. If the context classloader is non-null, it will prevnt using the PF4J classloader. + // is in. If the context classloader is non-null, it will prevent using the PF4J classloader. // We do not need synchronization when changing this, since it is per-thread anyway. final ClassLoader contextLoader = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(null); @@ -171,6 +171,7 @@ protected static void addUserProperties(HiveConf hiveConf, BaseHiveStoragePlugin final Set userPropertyNames = new HashSet<>(); if(config.propertyList != null) { for(Property prop : config.propertyList) { + checkUnsupportedProps(prop.name, prop.value); userPropertyNames.add(prop.name); setConf(hiveConf, prop.name, prop.value); if(logger.isTraceEnabled()){ @@ -257,4 +258,10 @@ protected static void setConf(HiveConf hiveConf, String intProperty, int intValu protected static void setConf(HiveConf hiveConf, String propertyName, boolean booleanValue) { hiveConf.setBoolean(propertyName, booleanValue); } + + private static void checkUnsupportedProps(String name, String value) { + if ("parquet.column.index.access".equals(name) && Boolean.parseBoolean(value)) { + throw new IllegalArgumentException("Unsupported Hive config: " + name + '=' + value); + } + } } diff --git a/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/HiveStoragePlugin.java b/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/HiveStoragePlugin.java index a7e8eec1fb..31694704c4 100644 --- a/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/HiveStoragePlugin.java +++ b/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/HiveStoragePlugin.java @@ -67,6 +67,7 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.TableOperations; +import org.apache.iceberg.io.FileIO; import org.apache.orc.OrcConf; import org.pf4j.PluginManager; @@ -103,12 +104,9 @@ import com.dremio.exec.catalog.MutablePlugin; import com.dremio.exec.catalog.ResolvedVersionContext; import com.dremio.exec.catalog.RollbackOption; -import com.dremio.exec.catalog.VacuumOption; import com.dremio.exec.catalog.StoragePluginId; import com.dremio.exec.catalog.TableMutationOptions; import com.dremio.exec.dotfile.View; -import com.dremio.exec.hadoop.HadoopFsCacheWrapperDremioClassLoader; -import com.dremio.exec.hadoop.HadoopFsSupplierProviderDremioClassLoader; import com.dremio.exec.physical.base.OpProps; import com.dremio.exec.physical.base.PhysicalOperator; import com.dremio.exec.physical.base.ViewOptions; @@ -262,7 +260,6 @@ public class HiveStoragePlugin extends BaseHiveStoragePlugin implements StorageP private int signatureValidationParallelism = 16; private long signatureValidationTimeoutMS = 2_000L; - private final HadoopFsSupplierProviderDremioClassLoader hadoopFsSupplierProviderDremioClassLoader = new HadoopFsCacheWrapperDremioClassLoader(); private final HadoopFsSupplierProviderPluginClassLoader hadoopFsSupplierProviderPluginClassLoader = new HadoopFsCacheWrapperPluginClassLoader(); @VisibleForTesting @@ -307,12 +304,7 @@ public boolean canGetDatasetMetadataInCoordinator() { return true; } - @Override - public Supplier getHadoopFsSupplier(String path, Iterable> conf, String queryUser) { - return hadoopFsSupplierProviderDremioClassLoader.getHadoopFsSupplierDremioClassLoader(path, conf); - } - - private FileSystem createFileSystem(String filePath, OperatorContext operatorContext, + private FileSystem createFileSystem(String filePath, String userName, OperatorContext operatorContext, boolean injectAsyncOptions, boolean disableHDFSCache) throws IOException { try (Closeable ccls = HivePf4jPlugin.swapClassLoader()) { Path path = new Path(filePath); @@ -324,7 +316,7 @@ private FileSystem createFileSystem(String filePath, OperatorContext operatorCon if (disableHDFSCache) { jobConf.setBoolean("fs.hdfs.impl.disable.cache", true); } - return createFS(new DremioHadoopFileSystemWrapper(new Path(uri), jobConf, operatorContext != null ? operatorContext.getStats() : null, cacheAndAsyncConf.isAsyncEnabled(), this.getHadoopFsSupplierPluginClassLoader(uri.toString(), jobConf).get()), + return createFS(new DremioHadoopFileSystemWrapper(new Path(uri), jobConf, operatorContext != null ? operatorContext.getStats() : null, cacheAndAsyncConf.isAsyncEnabled(), this.getHadoopFsSupplierPluginClassLoader(uri.toString(), jobConf, userName).get()), operatorContext, cacheAndAsyncConf); } catch (URISyntaxException e) { throw new RuntimeException(e); @@ -333,17 +325,17 @@ private FileSystem createFileSystem(String filePath, OperatorContext operatorCon @Override public FileSystem createFS(String filePath, String userName, OperatorContext operatorContext) throws IOException { - return createFileSystem(filePath, operatorContext, false, false); + return createFileSystem(filePath, userName, operatorContext, false, false); } @Override public FileSystem createFSWithAsyncOptions(String filePath, String userName, OperatorContext operatorContext) throws IOException { - return createFileSystem(filePath, operatorContext, true, false); + return createFileSystem(filePath, userName, operatorContext, true, false); } @Override public FileSystem createFSWithoutHDFSCache(String filePath, String userName, OperatorContext operatorContext) throws IOException { - return createFileSystem(filePath, operatorContext, false, true); + return createFileSystem(filePath, userName, operatorContext, false, true); } @Override @@ -353,6 +345,7 @@ public Iterable> getConfigProperties() { } } + @Override public String getDefaultCtasFormatProperty() { return hiveConf.get(HIVE_DEFAULT_CTAS_FORMAT); } @@ -385,7 +378,7 @@ public boolean allowUnlimitedSplits(DatasetHandle handle, DatasetConfig datasetC try (Closeable ccls = HivePf4jPlugin.swapClassLoader()) { final HiveClient client = getClient(SystemUser.SYSTEM_USERNAME); final HiveMetadataUtils.SchemaComponents schemaComponents = - HiveMetadataUtils.resolveSchemaComponents(tablePathComponents, true); + HiveMetadataUtils.resolveSchemaComponents(tablePathComponents); final Table table = client.getTable(schemaComponents.getDbName(), schemaComponents.getTableName(), true); if (table == null) { throw new ConnectorException(String.format("Dataset path '%s', table not found.", tablePathComponents)); @@ -406,6 +399,7 @@ public void runRefreshQuery(String refreshQuery, String user) throws Exception { runQuery(refreshQuery, user, QUERY_TYPE_METADATA_REFRESH); } + @Override public boolean supportReadSignature(DatasetMetadata metadata, boolean isFileDataset) { final HiveDatasetMetadata hiveDatasetMetadata = metadata.unwrap(HiveDatasetMetadata.class); try (Closeable ccls = HivePf4jPlugin.swapClassLoader()) { @@ -416,8 +410,9 @@ public boolean supportReadSignature(DatasetMetadata metadata, boolean isFileData } } + @Override public List resolveTableNameToValidPath(List tableSchemaPath) { - final HiveMetadataUtils.SchemaComponents schemaComponents = HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath, true); + final HiveMetadataUtils.SchemaComponents schemaComponents = HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath); return Arrays.asList(schemaComponents.getDbName(), schemaComponents.getTableName()); } @@ -439,11 +434,11 @@ public ScanTableFunction createScanTableFunction(FragmentExecutionContext fec, O } } + @Override public AbstractRefreshPlanBuilder createRefreshDatasetPlanBuilder(SqlHandlerConfig config, SqlRefreshDataset sqlRefreshDataset, UnlimitedSplitsMetadataProvider metadataProvider, boolean isFullRefresh) { if (isFullRefresh) { return new HiveFullRefreshDatasetPlanBuilder(config, sqlRefreshDataset, metadataProvider); - } - else { + } else { return new HiveIncrementalRefreshDatasetPlanBuilder(config, sqlRefreshDataset, metadataProvider); } } @@ -468,11 +463,9 @@ public ReadSignatureProvider createReadSignatureProvider(com.google.protobuf.Byt boolean isFullRefresh, boolean isPartialRefresh) { if (isFullRefresh) { return new HiveFullRefreshReadSignatureProvider(dataTableRoot, queryStartTime, partitionPaths, partitionExists); - } - else if (isPartialRefresh) { + } else if (isPartialRefresh) { return new HivePartialRefreshReadSignatureProvider(existingReadSignature, dataTableRoot, queryStartTime, partitionPaths, partitionExists); - } - else { + } else { return new HiveIncrementalRefreshReadSignatureProvider(existingReadSignature, dataTableRoot, queryStartTime, partitionPaths, partitionExists); } } @@ -481,7 +474,7 @@ else if (isPartialRefresh) { public TableOperations createIcebergTableOperations(FileSystem fs, String queryUserName, IcebergTableIdentifier tableIdentifier) { try (Closeable ccls = HivePf4jPlugin.swapClassLoader()) { IcebergHiveTableIdentifier hiveTableIdentifier = (IcebergHiveTableIdentifier) tableIdentifier; - DremioFileIO fileIO = new DremioFileIO(fs, (Iterable>)hiveConf, this); + FileIO fileIO = createIcebergFileIO(fs, null, null, null, null); if (hiveConf.getBoolean(HiveConfFactory.ENABLE_DML_TESTS_WITHOUT_LOCKING, false)) { return new NoOpHiveTableOperations(hiveConf, getClient(SystemUser.SYSTEM_USERNAME), fileIO, IcebergHiveModel.HIVE, hiveTableIdentifier.getNamespace(), hiveTableIdentifier.getTableName()); @@ -491,6 +484,13 @@ public TableOperations createIcebergTableOperations(FileSystem fs, String queryU } } + @Override + public FileIO createIcebergFileIO(FileSystem fs, OperatorContext context, List dataset, + String datasourcePluginUID, Long fileLength) { + return new DremioFileIO(fs, context, dataset, datasourcePluginUID, fileLength, + new HiveFileSystemConfigurationAdapter(hiveConf)); + } + @Override public boolean isIcebergMetadataValid(DatasetConfig config, NamespaceKey key, NamespaceService userNamespaceService) { if (config.getPhysicalDataset().getIcebergMetadata() == null || @@ -500,7 +500,7 @@ public boolean isIcebergMetadataValid(DatasetConfig config, NamespaceKey key, Na } String existingRootPointer = config.getPhysicalDataset().getIcebergMetadata().getMetadataFileLocation(); try { - final HiveMetadataUtils.SchemaComponents schemaComponents = HiveMetadataUtils.resolveSchemaComponents(key.getPathComponents(), true); + final HiveMetadataUtils.SchemaComponents schemaComponents = HiveMetadataUtils.resolveSchemaComponents(key.getPathComponents()); Table table = getClient(SystemUser.SYSTEM_USERNAME).getTable(schemaComponents.getDbName(), schemaComponents.getTableName(), true); if (table == null) { @@ -546,7 +546,7 @@ public CreateTableEntry createNewTable(NamespaceKey tableSchemaPath, SchemaConfi Map storageOptions, boolean isResultsTable) { Preconditions.checkArgument(icebergTableProps != null, "Iceberg properties are not provided"); final HiveMetadataUtils.SchemaComponents schemaComponents = - HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath.getPathComponents(), false); + HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath.getPathComponents()); com.dremio.io.file.Path tableFolderPath = null; String tableFolderLocation = null; HiveClient client = getClient(schemaConfig.getUserName()); @@ -566,6 +566,7 @@ public CreateTableEntry createNewTable(NamespaceKey tableSchemaPath, SchemaConfi case MERGE: case UPDATE: case OPTIMIZE: + case VACUUM: client.checkDmlPrivileges( schemaComponents.getDbName(), schemaComponents.getTableName(), @@ -614,7 +615,7 @@ String resolveTableLocation(HiveMetadataUtils.SchemaComponents schemaComponents, @Override public void createEmptyTable(NamespaceKey tableSchemaPath, SchemaConfig schemaConfig, BatchSchema batchSchema, WriterOptions writerOptions) { final HiveMetadataUtils.SchemaComponents schemaComponents = - HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath.getPathComponents(), false); + HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath.getPathComponents()); String tableLocation = resolveTableLocation(schemaComponents, schemaConfig, writerOptions); tableLocation = HiveMetadataUtils.resolveCreateTableLocation(hiveConf, schemaComponents, tableLocation); @@ -659,7 +660,7 @@ public IcebergModel getIcebergModel(IcebergTableProps tableProps, String userNam public void dropTable(NamespaceKey tableSchemaPath, SchemaConfig schemaConfig, TableMutationOptions tableMutationOptions) { final HiveClient client = getClient(schemaConfig.getUserName()); - final HiveMetadataUtils.SchemaComponents schemaComponents = HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath.getPathComponents(), true); + final HiveMetadataUtils.SchemaComponents schemaComponents = HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath.getPathComponents()); try { client.dropTable(schemaComponents.getDbName(), schemaComponents.getTableName(), false); } catch (NoSuchObjectException | UnknownTableException e) { @@ -680,7 +681,7 @@ public void alterTable(NamespaceKey tableSchemaPath, DatasetConfig datasetConfig SchemaConfig schemaConfig, TableMutationOptions tableMutationOptions) { HiveClient client = getClient(schemaConfig.getUserName()); final HiveMetadataUtils.SchemaComponents schemaComponents = - HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath.getPathComponents(), false); + HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath.getPathComponents()); client.checkAlterTablePrivileges(schemaComponents.getDbName(), schemaComponents.getTableName()); SplitsPointer splits = DatasetSplitsPointer.of(context.getNamespaceService(schemaConfig.getUserName()), datasetConfig); @@ -694,7 +695,7 @@ public void alterTable(NamespaceKey tableSchemaPath, DatasetConfig datasetConfig public void truncateTable(NamespaceKey tableSchemaPath, SchemaConfig schemaConfig, TableMutationOptions tableMutationOptions) { HiveClient client = getClient(schemaConfig.getUserName()); final HiveMetadataUtils.SchemaComponents schemaComponents = - HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath.getPathComponents(), false); + HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath.getPathComponents()); client.checkTruncateTablePrivileges(schemaComponents.getDbName(), schemaComponents.getTableName()); DatasetConfig datasetConfig = null; @@ -718,7 +719,7 @@ public void rollbackTable(NamespaceKey tableSchemaPath, TableMutationOptions tableMutationOptions) { HiveClient client = getClient(schemaConfig.getUserName()); final HiveMetadataUtils.SchemaComponents schemaComponents = - HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath.getPathComponents(), false); + HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath.getPathComponents()); client.checkDmlPrivileges( schemaComponents.getDbName(), schemaComponents.getTableName(), @@ -730,26 +731,6 @@ public void rollbackTable(NamespaceKey tableSchemaPath, icebergModel.rollbackTable(icebergModel.getTableIdentifier(metadataLocation), rollbackOption); } - @Override - public void vacuumTable(NamespaceKey tableSchemaPath, - DatasetConfig datasetConfig, - SchemaConfig schemaConfig, - VacuumOption vacuumOption, - TableMutationOptions tableMutationOptions) { - HiveClient client = getClient(schemaConfig.getUserName()); - final HiveMetadataUtils.SchemaComponents schemaComponents = - HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath.getPathComponents(), false); - client.checkDmlPrivileges( - schemaComponents.getDbName(), - schemaComponents.getTableName(), - getPrivilegeActionTypesForIcebergDml(IcebergCommandType.VACUUM)); - - SplitsPointer splits = DatasetSplitsPointer.of(context.getNamespaceService(schemaConfig.getUserName()), datasetConfig); - String metadataLocation = IcebergUtils.getMetadataLocation(datasetConfig, splits.getPartitionChunks().iterator()); - IcebergModel icebergModel = getIcebergModel(metadataLocation, schemaComponents, schemaConfig.getUserName()); - icebergModel.vacuumTable(icebergModel.getTableIdentifier(metadataLocation), vacuumOption); - } - @Override public void addColumns(NamespaceKey key, DatasetConfig datasetConfig, @@ -758,7 +739,7 @@ public void addColumns(NamespaceKey key, TableMutationOptions tableMutationOptions) { HiveClient client = getClient(schemaConfig.getUserName()); final HiveMetadataUtils.SchemaComponents schemaComponents = - HiveMetadataUtils.resolveSchemaComponents(key.getPathComponents(), false); + HiveMetadataUtils.resolveSchemaComponents(key.getPathComponents()); client.checkAlterTablePrivileges(schemaComponents.getDbName(), schemaComponents.getTableName()); SplitsPointer splits = DatasetSplitsPointer.of(context.getNamespaceService(schemaConfig.getUserName()), datasetConfig); @@ -776,7 +757,7 @@ public void dropColumn(NamespaceKey key, TableMutationOptions tableMutationOptions) { HiveClient client = getClient(schemaConfig.getUserName()); final HiveMetadataUtils.SchemaComponents schemaComponents = - HiveMetadataUtils.resolveSchemaComponents(key.getPathComponents(), false); + HiveMetadataUtils.resolveSchemaComponents(key.getPathComponents()); client.checkAlterTablePrivileges(schemaComponents.getDbName(), schemaComponents.getTableName()); SplitsPointer splits = DatasetSplitsPointer.of(context.getNamespaceService(schemaConfig.getUserName()), datasetConfig); @@ -795,7 +776,7 @@ public void changeColumn(NamespaceKey key, TableMutationOptions tableMutationOptions) { HiveClient client = getClient(schemaConfig.getUserName()); final HiveMetadataUtils.SchemaComponents schemaComponents = - HiveMetadataUtils.resolveSchemaComponents(key.getPathComponents(), false); + HiveMetadataUtils.resolveSchemaComponents(key.getPathComponents()); client.checkAlterTablePrivileges(schemaComponents.getDbName(), schemaComponents.getTableName()); SplitsPointer splits = DatasetSplitsPointer.of(context.getNamespaceService(schemaConfig.getUserName()), datasetConfig); @@ -813,7 +794,7 @@ public void addPrimaryKey(NamespaceKey table, ResolvedVersionContext versionContext) { HiveClient client = getClient(schemaConfig.getUserName()); final HiveMetadataUtils.SchemaComponents schemaComponents = - HiveMetadataUtils.resolveSchemaComponents(table.getPathComponents(), false); + HiveMetadataUtils.resolveSchemaComponents(table.getPathComponents()); client.checkAlterTablePrivileges(schemaComponents.getDbName(), schemaComponents.getTableName()); SplitsPointer splits = DatasetSplitsPointer.of(context.getNamespaceService(schemaConfig.getUserName()), datasetConfig); @@ -830,7 +811,7 @@ public void dropPrimaryKey(NamespaceKey table, ResolvedVersionContext versionContext) { HiveClient client = getClient(schemaConfig.getUserName()); final HiveMetadataUtils.SchemaComponents schemaComponents = - HiveMetadataUtils.resolveSchemaComponents(table.getPathComponents(), false); + HiveMetadataUtils.resolveSchemaComponents(table.getPathComponents()); client.checkAlterTablePrivileges(schemaComponents.getDbName(), schemaComponents.getTableName()); SplitsPointer splits = DatasetSplitsPointer.of(context.getNamespaceService(schemaConfig.getUserName()), datasetConfig); @@ -863,14 +844,14 @@ public List getPrimaryKeyFromMetadata(NamespaceKey table, final String userName = schemaConfig.getUserName(); HiveClient client = getClient(userName); final HiveMetadataUtils.SchemaComponents schemaComponents = - HiveMetadataUtils.resolveSchemaComponents(table.getPathComponents(), false); + HiveMetadataUtils.resolveSchemaComponents(table.getPathComponents()); client.checkAlterTablePrivileges(schemaComponents.getDbName(), schemaComponents.getTableName()); final IcebergModel icebergModel; final String path; if (DatasetHelper.isInternalIcebergTable(datasetConfig)) { final FileSystemPlugin metaStoragePlugin = context.getCatalogService().getSource(METADATA_STORAGE_PLUGIN_NAME); - icebergModel = metaStoragePlugin.getIcebergModel(metaStoragePlugin.getSystemUserFS()); + icebergModel = metaStoragePlugin.getIcebergModel(); String metadataTableName = datasetConfig.getPhysicalDataset().getIcebergMetadata().getTableUuid(); path = metaStoragePlugin.resolveTablePathToValidPath(metadataTableName).toString(); } else if (DatasetHelper.isIcebergDataset(datasetConfig)) { @@ -916,7 +897,7 @@ public boolean hasAccessPermission(String user, NamespaceKey key, DatasetConfig } try { - final HiveMetadataUtils.SchemaComponents schemaComponents = HiveMetadataUtils.resolveSchemaComponents(key.getPathComponents(), true); + final HiveMetadataUtils.SchemaComponents schemaComponents = HiveMetadataUtils.resolveSchemaComponents(key.getPathComponents()); final Table table = clientsByUser .get(user).getTable(schemaComponents.getDbName(), schemaComponents.getTableName(), true); if (table == null) { @@ -1032,8 +1013,8 @@ public boolean isImpersonationEnabled() { } @Override - public Supplier getHadoopFsSupplierPluginClassLoader(String path, Iterable> conf) { - return hadoopFsSupplierProviderPluginClassLoader.getHadoopFsSupplierPluginClassLoader(path, conf); + public Supplier getHadoopFsSupplierPluginClassLoader(String path, Iterable> conf, String userName) { + return hadoopFsSupplierProviderPluginClassLoader.getHadoopFsSupplierPluginClassLoader(path, conf, isImpersonationEnabled() ? userName: SystemUser.SYSTEM_USERNAME); } private enum TaskType { @@ -1130,8 +1111,7 @@ private boolean hasChanged() throws IOException { " cached last modification time = {}, actual modified time = {}", cachedEntityPath, cachedEntity.getLastModificationTime(), fileStatus.getModificationTime()); return true; - } - else if (MetadataRefreshUtils.unlimitedSplitsSupportEnabled(optionManager) && optionManager.getOption(ExecConstants.HIVE_SIGNATURE_CHANGE_RECURSIVE_LISTING) + } else if (MetadataRefreshUtils.unlimitedSplitsSupportEnabled(optionManager) && optionManager.getOption(ExecConstants.HIVE_SIGNATURE_CHANGE_RECURSIVE_LISTING) && (cachedEntity.getPath() == null || cachedEntity.getPath().isEmpty())) { final RemoteIterator statuses = fs.listFiles(cachedEntityPath, true); while (statuses.hasNext()) { @@ -1160,7 +1140,7 @@ else if (MetadataRefreshUtils.unlimitedSplitsSupportEnabled(optionManager) && op MetadataValidity checkHiveMetadata(HiveTableXattr tableXattr, EntityPath datasetPath, BatchSchema tableSchema, final HiveReadSignature readSignature) throws TException { final HiveClient client = getClient(SystemUser.SYSTEM_USERNAME); - final HiveMetadataUtils.SchemaComponents schemaComponents = HiveMetadataUtils.resolveSchemaComponents(datasetPath.getComponents(), true); + final HiveMetadataUtils.SchemaComponents schemaComponents = HiveMetadataUtils.resolveSchemaComponents(datasetPath.getComponents()); Table table = client.getTable(schemaComponents.getDbName(), schemaComponents.getTableName(), true); @@ -1193,11 +1173,10 @@ MetadataValidity checkHiveMetadata(HiveTableXattr tableXattr, EntityPath dataset return MetadataValidity.INVALID; } - boolean includeComplexTypes = optionManager.getOption(ExecConstants.HIVE_COMPLEXTYPES_ENABLED); - boolean isMapTypeEnabled = optionManager.getOption(ExecConstants.ENABLE_MAP_DATA_TYPE); + HiveSchemaTypeOptions typeOptions = new HiveSchemaTypeOptions(optionManager); // cached schema may have $_dremio_update_$ column added, this should not be considered during schema comparisons BatchSchema tableSchemaWithoutInternalCols = tableSchema.dropField(IncrementalUpdateUtils.UPDATE_COLUMN); - BatchSchema hiveSchema = HiveMetadataUtils.getBatchSchema(table, hiveConf, includeComplexTypes, isMapTypeEnabled, this); + BatchSchema hiveSchema = HiveMetadataUtils.getBatchSchema(table, hiveConf, typeOptions, this); if (!hiveSchema.equalsTypesWithoutPositions(tableSchemaWithoutInternalCols)) { // refresh metadata if converted schema is not same as schema in kvstore logger.debug("{}: metadata INVALID - schema has changed, cached: {}, actual: {}", datasetPath, @@ -1353,12 +1332,13 @@ private StatsEstimationParameters getStatsParams() { @Override public Optional getDatasetHandle(EntityPath datasetPath, GetDatasetOption... options) throws ConnectorException { final HiveClient client = getClient(SystemUser.SYSTEM_USERNAME); - final HiveMetadataUtils.SchemaComponents schemaComponents = - HiveMetadataUtils.resolveSchemaComponents(datasetPath.getComponents(), false); - if (schemaComponents == null) { + if (!HiveMetadataUtils.isValidPathSchema(datasetPath.getComponents())) { return Optional.empty(); } + final HiveMetadataUtils.SchemaComponents schemaComponents = + HiveMetadataUtils.resolveSchemaComponents(datasetPath.getComponents()); + final boolean tableExists; try { tableExists = client.tableExists(schemaComponents.getDbName(), schemaComponents.getTableName()); @@ -1437,7 +1417,6 @@ public PartitionChunkListing listPartitionChunks(DatasetHandle datasetHandle, Li .enforceVarcharWidth(enforceVarcharWidth) .maxInputSplitsPerPartition(toIntExact(hiveSettings.getMaxInputSplitsPerPartition())) .optionManager(optionManager); - boolean includeComplexTypes = optionManager.getOption(ExecConstants.HIVE_COMPLEXTYPES_ENABLED); final HiveClient client = getClient(SystemUser.SYSTEM_USERNAME); final TableMetadata tableMetadata = HiveMetadataUtils.getTableMetadata( @@ -1447,7 +1426,7 @@ public PartitionChunkListing listPartitionChunks(DatasetHandle datasetHandle, Li HiveMetadataUtils.getMaxLeafFieldCount(options), HiveMetadataUtils.getMaxNestedFieldLevels(options), TimeTravelOption.getTimeTravelOption(options), - includeComplexTypes, + new HiveSchemaTypeOptions(optionManager), hiveConf, this); @@ -1484,7 +1463,7 @@ private HivePartitionChunkListing.Builder buildSplits(HivePartitionChunkListing. private List getDeltaSplits(TableMetadata tableMetadata) { try { - String tableLocation = tableMetadata.getTable().getSd().getLocation(); + String tableLocation = DeltaHiveInputFormat.getLocation(tableMetadata.getTable(), optionManager); FileSystem fs = createFS(tableLocation, SystemUser.SYSTEM_USERNAME, null); DeltaLakeTable deltaLakeTable = new DeltaLakeTable(getSabotContext(), fs, tableLocation); return deltaLakeTable.getAllSplits(); @@ -1498,7 +1477,7 @@ private HivePartitionChunkListing.SplitType getSplitType(TableMetadata tableMeta return ICEBERG_MANIFEST_SPLIT; } - if (DeltaHiveInputFormat.isDeltaTable(tableMetadata.getTable().getParameters().get(META_TABLE_STORAGE), optionManager)) { + if (DeltaHiveInputFormat.isDeltaTable(tableMetadata.getTable(), optionManager)) { return DELTA_COMMIT_LOGS; } @@ -1606,8 +1585,7 @@ public BytesOutput provideSignature(DatasetHandle datasetHandle, DatasetMetadata .setRootPointer(metadataAccumulator.getRootPointer()) .build() .toByteArray()); - } - else { + } else { return BytesOutput.NONE; } } @@ -1659,11 +1637,6 @@ public void close() { clientsByUser.cleanUp(); clientsByUser = null; } - try { - hadoopFsSupplierProviderDremioClassLoader.close(); - } catch (Exception e) { - logger.warn("Failed to close hadoopFsSupplierProviderDremioClassLoader", e); - } try { hadoopFsSupplierProviderPluginClassLoader.close(); @@ -1865,6 +1838,7 @@ private UserException buildAlreadyClosedException() { .buildSilently(); } + @Override public T getPF4JStoragePlugin() { return (T) this; } diff --git a/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/HiveUtilities.java b/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/HiveUtilities.java index 6d9b7ce7a1..df3596d4ac 100644 --- a/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/HiveUtilities.java +++ b/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/HiveUtilities.java @@ -55,7 +55,6 @@ import com.dremio.common.types.TypeProtos.MajorType; import com.dremio.common.util.Closeable; import com.dremio.exec.planner.physical.PlannerSettings; -import com.dremio.exec.store.hive.deltalake.DeltaHiveInputFormat; import com.dremio.exec.work.ExecErrorConstants; import com.dremio.hive.proto.HiveReaderProto.Prop; import com.dremio.hive.proto.HiveReaderProto.SerializedInputSplit; @@ -122,7 +121,7 @@ public static final AbstractSerDe createSerDe(final JobConf jobConf, final Strin * @throws Exception */ public static final Class> getInputFormatClass(final JobConf jobConf, Optional inputFormat, - Optional storageHandlerName, final OptionManager options) throws Exception { + Optional storageHandlerName) throws Exception { if (inputFormat.isPresent()) { return (Class>) Class.forName(inputFormat.get()); } @@ -131,9 +130,6 @@ public static final AbstractSerDe createSerDe(final JobConf jobConf, final Strin try (Closeable ccls = HivePf4jPlugin.swapClassLoader()) { // HiveUtils.getStorageHandler() depends on the current context classloader if you query and HBase table, // and don't have an HBase session open. - if (DeltaHiveInputFormat.isDeltaTable(storageHandlerName.get(), options)) { - return DeltaHiveInputFormat.class; - } final HiveStorageHandler storageHandler = HiveUtils.getStorageHandler(jobConf, storageHandlerName.get()); return (Class>) storageHandler.getInputFormatClass(); } diff --git a/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/exec/DremioORCRecordUtils.java b/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/exec/DremioORCRecordUtils.java index 55b1377c24..a51dd20dd6 100644 --- a/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/exec/DremioORCRecordUtils.java +++ b/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/exec/DremioORCRecordUtils.java @@ -328,8 +328,7 @@ public DiskRangeList readFileData( if (zcr != null) { try { return readDiskRangesUsingZCR(fs, file, path, zcr, pool, baseOffset, range); - } - catch (UnsupportedOperationException ioe) { + } catch (UnsupportedOperationException ioe) { // zero copy read failed. Clear all buffers and unset zero copy read if (pool != null) { pool.clear(); @@ -658,10 +657,12 @@ private static final class ByteBufferWrapper { this.byteBuffer = byteBuffer; } + @Override public boolean equals(Object rhs) { return (rhs instanceof ByteBufferWrapper) && (this.byteBuffer == ((ByteBufferWrapper) rhs).byteBuffer); } + @Override public int hashCode() { return System.identityHashCode(byteBuffer); } diff --git a/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/exec/HiveFieldConverter.java b/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/exec/HiveFieldConverter.java index a390eb1f5e..d93fd2cec8 100644 --- a/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/exec/HiveFieldConverter.java +++ b/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/exec/HiveFieldConverter.java @@ -18,13 +18,12 @@ import static com.dremio.exec.store.hive.HiveUtilities.throwUnsupportedHiveDataTypeError; import java.lang.reflect.InvocationTargetException; +import java.math.BigDecimal; import java.math.RoundingMode; import java.util.Map; import java.util.concurrent.TimeUnit; -import com.dremio.common.exceptions.FieldSizeLimitExceptionHelper; -import com.dremio.exec.store.hive.exec.HiveAbstractReader.HiveOperatorContextOptions; - +import org.apache.arrow.memory.ArrowBuf; import org.apache.arrow.memory.util.LargeMemoryUtil; import org.apache.arrow.vector.BigIntVector; import org.apache.arrow.vector.BitVector; @@ -34,15 +33,37 @@ import org.apache.arrow.vector.Float8Vector; import org.apache.arrow.vector.IntVector; import org.apache.arrow.vector.TimeStampMilliVector; +import org.apache.arrow.vector.ValueVector; import org.apache.arrow.vector.VarBinaryVector; import org.apache.arrow.vector.VarCharVector; -import org.apache.arrow.vector.ValueVector; +import org.apache.arrow.vector.complex.ListVector; +import org.apache.arrow.vector.complex.MapVector; +import org.apache.arrow.vector.complex.StructVector; +import org.apache.arrow.vector.complex.impl.NullableStructWriter; +import org.apache.arrow.vector.complex.impl.UnionListWriter; +import org.apache.arrow.vector.complex.impl.UnionMapWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter; +import org.apache.arrow.vector.complex.writer.BigIntWriter; +import org.apache.arrow.vector.complex.writer.BitWriter; +import org.apache.arrow.vector.complex.writer.DateMilliWriter; +import org.apache.arrow.vector.complex.writer.DecimalWriter; +import org.apache.arrow.vector.complex.writer.Float4Writer; +import org.apache.arrow.vector.complex.writer.Float8Writer; +import org.apache.arrow.vector.complex.writer.IntWriter; +import org.apache.arrow.vector.complex.writer.TimeStampMilliWriter; +import org.apache.arrow.vector.complex.writer.VarBinaryWriter; +import org.apache.arrow.vector.complex.writer.VarCharWriter; import org.apache.arrow.vector.holders.DecimalHolder; +import org.apache.arrow.vector.types.pojo.Field; import org.apache.arrow.vector.util.DecimalUtility; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; +import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; +import org.apache.hadoop.hive.serde2.objectinspector.StructField; +import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector; @@ -58,9 +79,15 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.io.Text; + +import com.dremio.common.exceptions.FieldSizeLimitExceptionHelper; +import com.dremio.exec.store.hive.exec.HiveAbstractReader.HiveOperatorContextOptions; import com.dremio.sabot.exec.context.OperatorContext; import com.google.common.collect.Maps; @@ -99,7 +126,6 @@ protected void checkSizeLimit(int size) { primMap.put(PrimitiveCategory.CHAR, Char.class); } - public static HiveFieldConverter create(TypeInfo typeInfo, OperatorContext context, HiveOperatorContextOptions options) throws IllegalAccessException, InstantiationException, NoSuchMethodException, InvocationTargetException { switch (typeInfo.getCategory()) { @@ -122,38 +148,28 @@ public static HiveFieldConverter create(TypeInfo typeInfo, OperatorContext conte break; case LIST: { - Class clazz = List.class; - if (clazz != null) { - return clazz.getConstructor(HiveOperatorContextOptions.class).newInstance(options); - } + return new HiveList((ListTypeInfo) typeInfo, context, options); } - break; case STRUCT: { - Class clazz = Struct.class; - if (clazz != null) { - return clazz.getConstructor(HiveOperatorContextOptions.class).newInstance(options); - } + return new HiveStruct((StructTypeInfo) typeInfo, context, options); } - break; case MAP: { - Class clazz = HiveMap.class; - if (clazz != null) { - return clazz.getConstructor(HiveOperatorContextOptions.class).newInstance(options); - } + return new HiveMap((MapTypeInfo) typeInfo, context, options); } - break; case UNION: { Class clazz = Union.class; if (clazz != null) { return clazz.getConstructor(HiveOperatorContextOptions.class).newInstance(options); } } + break; default: throwUnsupportedHiveDataTypeError(typeInfo.getCategory().toString()); } return null; } + public static class Union extends HiveFieldConverter { public Union(HiveOperatorContextOptions options) { super(options); @@ -165,37 +181,297 @@ public void setSafeValue(ObjectInspector oi, Object hiveFieldValue, ValueVector return; } } - public static class HiveMap extends HiveFieldConverter { - public HiveMap(HiveOperatorContextOptions options) { + + public abstract static class BaseComplexConverter extends HiveFieldConverter { + private final OperatorContext context; + + protected BaseComplexConverter(OperatorContext context, HiveOperatorContextOptions options) { super(options); + this.context = context; + } + + protected void write(BaseWriter.ListWriter writer, TypeInfo typeInfo, ObjectInspector oi, Object value) { + if (value == null) { + return; + } + + switch (typeInfo.getCategory()) { + case PRIMITIVE: { + final PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo; + switch (primitiveTypeInfo.getPrimitiveCategory()) { + case BOOLEAN: + writeBoolean(writer.bit(), ((BooleanObjectInspector) oi).get(value)); + break; + case DOUBLE: + writeDouble(writer.float8(), ((DoubleObjectInspector) oi).get(value)); + break; + case FLOAT: + writeFloat(writer.float4(), ((FloatObjectInspector) oi).get(value)); + break; + case DECIMAL: + writeDecimal(writer.decimal(), getDecimalValue((DecimalTypeInfo) typeInfo, oi, value)); + break; + case BYTE: + writeInt(writer.integer(), ((ByteObjectInspector) oi).get(value)); + break; + case INT: + writeInt(writer.integer(), ((IntObjectInspector) oi).get(value)); + break; + case LONG: + writeLong(writer.bigInt(), ((LongObjectInspector) oi).get(value)); + break; + case SHORT: + writeInt(writer.integer(), ((ShortObjectInspector) oi).get(value)); + break; + case BINARY: + writeBinary(writer.varBinary(), ((BinaryObjectInspector) oi).getPrimitiveJavaObject(value)); + break; + case STRING: + writeText(writer.varChar(), ((StringObjectInspector) oi).getPrimitiveWritableObject(value)); + break; + case VARCHAR: + writeText(writer.varChar(), ((HiveVarcharObjectInspector) oi).getPrimitiveWritableObject(value).getTextValue()); + break; + case TIMESTAMP: + writeTimestamp(writer.timeStampMilli(), ((TimestampObjectInspector) oi).getPrimitiveWritableObject(value)); + break; + case DATE: + writeDate(writer.dateMilli(), ((DateObjectInspector) oi).getPrimitiveWritableObject(value)); + break; + case CHAR: + writeText(writer.varChar(), ((HiveCharObjectInspector) oi).getPrimitiveWritableObject(value).getStrippedValue()); + break; + default: + break; + } + } + break; + case LIST: + writeList(writer.list(), (ListTypeInfo) typeInfo, (ListObjectInspector) oi, value); + break; + case MAP: + writeMap(writer.map(false), (MapTypeInfo) typeInfo, (MapObjectInspector) oi, value); + break; + case STRUCT: + writeStruct(writer.struct(), (StructTypeInfo) typeInfo, (StructObjectInspector) oi, value); + break; + default: + break; + } + } + + protected void write(BaseWriter.StructWriter writer, java.lang.String name, TypeInfo typeInfo, ObjectInspector oi, Object value) { + if (value == null) { + return; + } + + switch (typeInfo.getCategory()) { + case PRIMITIVE: { + final PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo; + switch (primitiveTypeInfo.getPrimitiveCategory()) { + case BOOLEAN: + writeBoolean(writer.bit(name), ((BooleanObjectInspector) oi).get(value)); + break; + case DOUBLE: + writeDouble(writer.float8(name), ((DoubleObjectInspector) oi).get(value)); + break; + case FLOAT: + writeFloat(writer.float4(name), ((FloatObjectInspector) oi).get(value)); + break; + case DECIMAL: { + DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; + writeDecimal(writer.decimal(name, decimalTypeInfo.scale(), decimalTypeInfo.precision()), + getDecimalValue(decimalTypeInfo, oi, value)); + } + break; + case BYTE: + writeInt(writer.integer(name), ((ByteObjectInspector) oi).get(value)); + break; + case INT: + writeInt(writer.integer(name), ((IntObjectInspector) oi).get(value)); + break; + case LONG: + writeLong(writer.bigInt(name), ((LongObjectInspector) oi).get(value)); + break; + case SHORT: + writeInt(writer.integer(name), ((ShortObjectInspector) oi).get(value)); + break; + case BINARY: + writeBinary(writer.varBinary(name), ((BinaryObjectInspector) oi).getPrimitiveJavaObject(value)); + break; + case STRING: + writeText(writer.varChar(name), ((StringObjectInspector) oi).getPrimitiveWritableObject(value)); + break; + case VARCHAR: + writeText(writer.varChar(name), ((HiveVarcharObjectInspector) oi).getPrimitiveWritableObject(value).getTextValue()); + break; + case TIMESTAMP: + writeTimestamp(writer.timeStampMilli(name), ((TimestampObjectInspector) oi).getPrimitiveWritableObject(value)); + break; + case DATE: + writeDate(writer.dateMilli(name), ((DateObjectInspector) oi).getPrimitiveWritableObject(value)); + break; + case CHAR: + writeText(writer.varChar(name), ((HiveCharObjectInspector) oi).getPrimitiveWritableObject(value).getStrippedValue()); + break; + default: + break; + } + } + break; + case LIST: + writeList(writer.list(name), (ListTypeInfo) typeInfo, (ListObjectInspector) oi, value); + break; + case MAP: + writeMap(writer.map(name, false), (MapTypeInfo) typeInfo, (MapObjectInspector) oi, value); + break; + case STRUCT: + writeStruct(writer.struct(name), (StructTypeInfo) typeInfo, (StructObjectInspector) oi, value); + break; + default: + break; + } + } + + private OperatorContext getContext() { + return context; + } + + private static BigDecimal getDecimalValue(DecimalTypeInfo typeInfo, ObjectInspector oi, Object value) { + BigDecimal decimal = ((HiveDecimalObjectInspector) oi).getPrimitiveJavaObject(value).bigDecimalValue(); + return decimal.setScale(typeInfo.scale(), RoundingMode.HALF_UP); + } + + private void writeBinary(VarBinaryWriter writer, byte[] value) { + checkSizeLimit(value.length); + try (ArrowBuf buf = getContext().getAllocator().buffer(value.length)) { + buf.setBytes(0, value); + writer.writeVarBinary(0, value.length, buf); + } + } + + private void writeBoolean(BitWriter writer, boolean value) { + writer.writeBit(value ? 1 : 0); } + + private void writeDouble(Float8Writer writer, double value) { + writer.writeFloat8(value); + } + + private void writeFloat(Float4Writer writer, float value) { + writer.writeFloat4(value); + } + + private void writeDecimal(DecimalWriter writer, BigDecimal value) { + writer.writeDecimal(value); + } + + private void writeInt(IntWriter writer, int value) { + writer.writeInt(value); + } + + private void writeLong(BigIntWriter writer, long value) { + writer.writeBigInt(value); + } + + private void writeText(VarCharWriter writer, Text value) { + checkSizeLimit(value.getLength()); + try (ArrowBuf buf = getContext().getAllocator().buffer(value.getLength())) { + buf.setBytes(0, value.getBytes()); + writer.writeVarChar(0, value.getLength(), buf); + } + } + + private void writeTimestamp(TimeStampMilliWriter writer, TimestampWritable value) { + long seconds = value.getSeconds(); + long nanos = value.getNanos(); + long millis = seconds * 1000 + nanos/1000/1000; + writer.writeTimeStampMilli(millis); + } + + private void writeDate(DateMilliWriter writer, DateWritable value) { + writer.writeDateMilli(value.get().toLocalDate().toEpochDay() * Date.MILLIS_PER_DAY); + } + + protected void writeMap(BaseWriter.MapWriter writer, MapTypeInfo typeInfo, MapObjectInspector oi, Object value) { + writer.startMap(); + for (Map.Entry e : oi.getMap(value).entrySet()) { + writer.startEntry(); + write(writer.key(), typeInfo.getMapKeyTypeInfo(), oi.getMapKeyObjectInspector(), e.getKey()); + write(writer.value(), typeInfo.getMapValueTypeInfo(), oi.getMapValueObjectInspector(), e.getValue()); + writer.endEntry(); + } + writer.endMap(); + } + + protected void writeList(BaseWriter.ListWriter writer, ListTypeInfo typeInfo, ListObjectInspector listOi, Object value) { + writer.startList(); + for (Object o : listOi.getList(value)) { + write(writer, typeInfo.getListElementTypeInfo(), listOi.getListElementObjectInspector(), o); + } + writer.endList(); + } + + protected void writeStruct(BaseWriter.StructWriter writer, StructTypeInfo typeInfo, StructObjectInspector oi, Object value) { + writer.start(); + for (StructField field : oi.getAllStructFieldRefs()) { + write(writer, field.getFieldName(), typeInfo.getStructFieldTypeInfo(field.getFieldName()), + field.getFieldObjectInspector(), oi.getStructFieldData(value, field)); + } + writer.end(); + } + } + + public static class HiveMap extends BaseComplexConverter { + private final MapTypeInfo typeInfo; + + public HiveMap(MapTypeInfo typeInfo, OperatorContext context, HiveOperatorContextOptions options) { + super(context, options); + this.typeInfo = typeInfo; + } + @Override public void setSafeValue(ObjectInspector oi, Object hiveFieldValue, ValueVector outputVV, int outputIndex) { - // In ORC vectorized file reader path these functions are not called. - // Currently we support complex types in ORC format only - return; + UnionMapWriter mapWriter = ((MapVector) outputVV).getWriter(); + mapWriter.setPosition(outputIndex); + writeMap(mapWriter, typeInfo, (MapObjectInspector) oi, hiveFieldValue); } } - public static class List extends HiveFieldConverter { - public List(HiveOperatorContextOptions options) { - super(options); + + public static class HiveList extends BaseComplexConverter { + private final ListTypeInfo typeInfo; + + public HiveList(ListTypeInfo typeInfo, OperatorContext context, HiveOperatorContextOptions options) { + super(context, options); + this.typeInfo = typeInfo; } @Override public void setSafeValue(ObjectInspector oi, Object hiveFieldValue, ValueVector outputVV, int outputIndex) { - // In ORC vectorized file reader path these functions are not called. - // Currently we support complex types in ORC format only - return; + UnionListWriter listWriter = ((ListVector) outputVV).getWriter(); + listWriter.setPosition(outputIndex); + writeList(listWriter, typeInfo, (ListObjectInspector) oi, hiveFieldValue); } } - public static class Struct extends HiveFieldConverter { - public Struct(HiveOperatorContextOptions options) { - super(options); + + public static class HiveStruct extends BaseComplexConverter { + private final StructTypeInfo typeInfo; + + public HiveStruct(StructTypeInfo typeInfo, OperatorContext context, HiveOperatorContextOptions options) { + super(context, options); + this.typeInfo = typeInfo; } @Override public void setSafeValue(ObjectInspector oi, Object hiveFieldValue, ValueVector outputVV, int outputIndex) { - // In ORC vectorized file reader path these functions are not called. - // Currently we support complex types in ORC format only - return; + StructObjectInspector structOi = (StructObjectInspector) oi; + NullableStructWriter structWriter = ((StructVector) outputVV).getWriter(); + structWriter.setPosition(outputIndex); + structWriter.start(); + for (Field writerField : structWriter.getField().getChildren()) { + StructField field = structOi.getStructFieldRef(writerField.getName()); + write(structWriter, field.getFieldName(), typeInfo.getStructFieldTypeInfo(field.getFieldName()), + field.getFieldObjectInspector(), structOi.getStructFieldData(hiveFieldValue, field)); + } + structWriter.end(); } } public static class Binary extends HiveFieldConverter { @@ -236,7 +512,7 @@ public Decimal(int precision, int scale, OperatorContext context, HiveOperatorCo @Override public void setSafeValue(ObjectInspector oi, Object hiveFieldValue, ValueVector outputVV, int outputIndex) { DecimalUtility.writeBigDecimalToArrowBuf(((HiveDecimalObjectInspector)oi).getPrimitiveJavaObject(hiveFieldValue).bigDecimalValue() - .setScale(holder.scale, RoundingMode.HALF_UP), holder.buffer, LargeMemoryUtil.capAtMaxInt(holder.start), DecimalVector.TYPE_WIDTH); + .setScale(holder.scale, RoundingMode.HALF_UP), holder.buffer, LargeMemoryUtil.capAtMaxInt(holder.start), DecimalVector.TYPE_WIDTH); ((DecimalVector) outputVV).setSafe(outputIndex, 1, 0, holder.buffer); } } @@ -357,7 +633,7 @@ public static class Date extends HiveFieldConverter { public Date(HiveOperatorContextOptions options) { super(options); } - private static final long MILLIS_PER_DAY = TimeUnit.DAYS.toMillis(1L); + public static final long MILLIS_PER_DAY = TimeUnit.DAYS.toMillis(1L); @Override public void setSafeValue(ObjectInspector oi, Object hiveFieldValue, ValueVector outputVV, int outputIndex) { @@ -379,5 +655,4 @@ public void setSafeValue(ObjectInspector oi, Object hiveFieldValue, ValueVector ((VarCharVector) outputVV).setSafe(outputIndex, valueBytes, 0, valueLen); } } - } diff --git a/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/exec/HiveORCCopiers.java b/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/exec/HiveORCCopiers.java index bdca42ba74..9e954abe29 100644 --- a/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/exec/HiveORCCopiers.java +++ b/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/exec/HiveORCCopiers.java @@ -110,7 +110,9 @@ public interface ORCCopier { private abstract static class ORCCopierBase implements ORCCopier { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(HiveORCCopiers.class); + @Override public abstract void copy(int inputIdx, int count, int outputIdx); + @Override public abstract void ensureHasRequiredCapacity(int required); protected void ensureVectorHasRequiredCapacity(ValueVector vector, int required) { while (required > vector.getValueCapacity()) { @@ -189,8 +191,7 @@ private static ORCCopier createCopier(HiveColumnVectorData columnVectorData, } else if (output instanceof BigIntVector) { if (input instanceof LongColumnVector) { return new BigIntCopier((LongColumnVector) input, (BigIntVector) output); - } - else { + } else { return new NoOpCopier(null, null); } } else if (output instanceof Float4Vector) { @@ -198,8 +199,7 @@ private static ORCCopier createCopier(HiveColumnVectorData columnVectorData, return new DoubleToFloat4Copier((DoubleColumnVector) input, (Float4Vector) output); } else if (input instanceof LongColumnVector) { return new LongToFloat4Copier((LongColumnVector) input, (Float4Vector) output); - } - else { + } else { return new NoOpCopier(null, null); } } else if (output instanceof Float8Vector) { @@ -215,15 +215,13 @@ private static ORCCopier createCopier(HiveColumnVectorData columnVectorData, } else if (output instanceof DateMilliVector) { if (input instanceof LongColumnVector) { return new DateMilliCopier((LongColumnVector) input, (DateMilliVector) output); - } - else { + } else { return new NoOpCopier(null, null); } } else if (output instanceof TimeStampMilliVector) { if (input instanceof TimestampColumnVector) { return new TimeStampMilliCopier((TimestampColumnVector) input, (TimeStampMilliVector) output); - } - else { + } else { return new NoOpCopier(null, null); } } else if (output instanceof DecimalVector) { @@ -241,32 +239,28 @@ private static ORCCopier createCopier(HiveColumnVectorData columnVectorData, } else if (output instanceof BitVector) { if (input instanceof LongColumnVector) { return new BitCopier((LongColumnVector) input, (BitVector) output); - } - else { + } else { return new NoOpCopier(null, null); } } else if (output instanceof ListVector) { if (input instanceof MultiValuedColumnVector) { return new ListCopier(columnVectorData, ordinalId, (MultiValuedColumnVector) input, (ListVector) output, operatorContextOptions, vectorToNameMap); - } - else { + } else { return new NoOpCopier(null, null); } } else if (output instanceof StructVector) { if (input instanceof StructColumnVector) { return new StructCopier(columnVectorData, ordinalId, (StructColumnVector) input, (StructVector) output, operatorContextOptions, vectorToNameMap); - } - else { + } else { return new NoOpCopier(null, null); } } else if (output instanceof UnionVector) { if (input instanceof UnionColumnVector) { return new UnionCopier(columnVectorData, ordinalId, (UnionColumnVector) input, (UnionVector) output, operatorContextOptions, vectorToNameMap); - } - else { + } else { return new NoOpCopier(null, null); } } @@ -367,18 +361,15 @@ private static class StructCopier extends ORCCopierBase { Preconditions.checkNotNull(vectorToNameMap.get(hiveElementVector),"The hiveElementVector is not present in the map that maps all the inputVectors with their corresponding names"); if(arrowElementVector == null){ fieldCopiers.add(new NoOpCopier(null, null)); - } - else if(vectorToNameMap.get(hiveElementVector).equals(arrowElementVector.getName())) { + } else if(vectorToNameMap.get(hiveElementVector).equals(arrowElementVector.getName())) { ORCCopier childCopier = createCopier(columnVectorData, childPos, arrowElementVector, hiveElementVector, operatorContextOptions, vectorToNameMap); fieldCopiers.add(childCopier); arrowIdx++; - } - else{ + } else { fieldCopiers.add(new NoOpCopier(null, null)); } - } - else { + } else { fieldCopiers.add(new NoOpCopier(null, null)); } childPos += columnVectorData.getTotalVectorCount(childPos); @@ -752,8 +743,7 @@ public void copy(int inputIdx, int count, int outputIdx) { .unscaledValue() .toByteArray(); outputVector.setBigEndian(outputIdx, decimalValue); - } - catch (Exception e) { + } catch (Exception e) { // ignoring exception creates null entry } } @@ -771,8 +761,7 @@ public void copy(int inputIdx, int count, int outputIdx) { .unscaledValue() .toByteArray(); outputVector.setBigEndian(outputIdx, decimalValue); - } - catch (Exception e) { + } catch (Exception e) { // ignoring exception creates null entry } } @@ -828,8 +817,7 @@ public void copy(int inputIdx, int count, int outputIdx) { .unscaledValue() .toByteArray(); outputVector.setBigEndian(outputIdx, decimalValue); - } - catch (Exception e) { + } catch (Exception e) { // ignoring exception creates null entry } } @@ -847,8 +835,7 @@ public void copy(int inputIdx, int count, int outputIdx) { .unscaledValue() .toByteArray(); outputVector.setBigEndian(outputIdx, decimalValue); - } - catch (Exception e) { + } catch (Exception e) { // ignoring exception creates null entry } } @@ -909,8 +896,7 @@ public void copy(int inputIdx, int count, int outputIdx) { .unscaledValue() .toByteArray(); outputVector.setBigEndian(outputIdx, decimalValue); - } - catch (Exception e) { + } catch (Exception e) { } } @@ -978,8 +964,7 @@ public void copy(int inputIdx, int count, int outputIdx) { try { final byte[] value = HiveDecimal.enforcePrecisionScale(input[inputIdx].getHiveDecimal(), outputPrecision, outputScale).bigDecimalValue().movePointRight(outputScale).unscaledValue().toByteArray(); outputVector.setBigEndian(outputIdx, value); - } - catch (Exception e) { + } catch (Exception e) { // ignoring exception sets null. // enforcePrecisionScale returns null when it cannot enforce } @@ -991,8 +976,7 @@ public void copy(int inputIdx, int count, int outputIdx) { try { byte[] v = HiveDecimal.enforcePrecisionScale(input[inputIdx].getHiveDecimal(), outputPrecision, outputScale).bigDecimalValue().movePointRight(outputScale).unscaledValue().toByteArray(); outputVector.setBigEndian(outputIdx, v); - } - catch (Exception e) { + } catch (Exception e) { // ignoring exception sets null. // enforcePrecisionScale returns null when it cannot enforce } @@ -1166,8 +1150,7 @@ public void copy(int inputIdx, int count, int outputIdx) { String strValue = new String(vector[inputIdx], start[inputIdx], length[inputIdx], StandardCharsets.UTF_8); double doubleValue = Double.parseDouble(strValue); outputVector.set(outputIdx, doubleValue); - } - catch (Exception e) { + } catch (Exception e) { } } diff --git a/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/exec/HiveSplitCreator.java b/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/exec/HiveSplitCreator.java index 4158775d3b..d5b24e0a19 100644 --- a/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/exec/HiveSplitCreator.java +++ b/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/exec/HiveSplitCreator.java @@ -58,6 +58,7 @@ public HiveSplitCreator(OperatorContext context, byte[] extendedProperty) { partitionXattrBytes = partitionXattr.toByteString(); } + @Override public SplitAndPartitionInfo createSplit(PartitionProtobuf.NormalizedPartitionInfo filePartitionInfo, SplitIdentity splitIdentity, String fileFormat, long fileSize, long currentModTime) throws InvalidProtocolBufferException { InputSplit inputSplit; switch (fileFormat.toUpperCase()) { diff --git a/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/exec/HiveTextReader.java b/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/exec/HiveTextReader.java index f9e2a414c5..f98baab5e1 100644 --- a/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/exec/HiveTextReader.java +++ b/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/exec/HiveTextReader.java @@ -76,8 +76,7 @@ public HiveTextReader(final HiveTableXattr tableAttr, final SplitAndPartitionInf public void internalInit(InputSplit inputSplit, JobConf jobConf, ValueVector[] vectors) throws IOException { try (OperatorStats.WaitRecorder recorder = OperatorStats.getWaitRecorder(this.context.getStats())) { reader = jobConf.getInputFormat().getRecordReader(inputSplit, jobConf, Reporter.NULL); - } - catch(FSError e) { + } catch (FSError e) { throw HadoopFileSystemWrapper.propagateFSError(e); } @@ -123,8 +122,7 @@ public int populateData() throws IOException, SerDeException { if (!hasNext) { break; } - } - catch(FSError e) { + } catch (FSError e) { throw HadoopFileSystemWrapper.propagateFSError(e); } if (skipRecordsInspector.doSkipHeader(recordCount++)) { @@ -293,8 +291,7 @@ public void close() throws IOException { if (reader != null) { try (OperatorStats.WaitRecorder recorder = OperatorStats.getWaitRecorder(this.context.getStats())) { reader.close(); - } - catch(FSError e) { + } catch (FSError e) { throw HadoopFileSystemWrapper.propagateFSError(e); } reader = null; diff --git a/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/exec/ScanWithHiveReader.java b/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/exec/ScanWithHiveReader.java index ded9be74d0..b046e9a5a7 100644 --- a/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/exec/ScanWithHiveReader.java +++ b/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/exec/ScanWithHiveReader.java @@ -261,7 +261,7 @@ private static RecordReader getRecordReader(HiveTableXattr tableXattr, partitionStorageHandlerName = HiveReaderProtoUtil.getPartitionStorageHandler(tableXattr, partitionXattr); } - jobConf.setInputFormat(getInputFormatClass(jobConf, partitionInputFormat, partitionStorageHandlerName, context.getOptions())); + jobConf.setInputFormat(getInputFormatClass(jobConf, partitionInputFormat, partitionStorageHandlerName)); partitionOI = getStructOI(partitionSerDe); updateFileFormatStat(context.getStats(), partitionInputFormat); @@ -273,7 +273,7 @@ private static RecordReader getRecordReader(HiveTableXattr tableXattr, } else { partitionSerDe = null; partitionOI = null; - jobConf.setInputFormat(getInputFormatClass(jobConf, tableInputFormat, HiveReaderProtoUtil.getTableStorageHandler(tableXattr), context.getOptions())); + jobConf.setInputFormat(getInputFormatClass(jobConf, tableInputFormat, HiveReaderProtoUtil.getTableStorageHandler(tableXattr))); updateFileFormatStat(context.getStats(), tableInputFormat); } diff --git a/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/exec/dfs/DremioHadoopFileSystemWrapper.java b/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/exec/dfs/DremioHadoopFileSystemWrapper.java index 98b30215de..08251073e1 100644 --- a/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/exec/dfs/DremioHadoopFileSystemWrapper.java +++ b/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/exec/dfs/DremioHadoopFileSystemWrapper.java @@ -57,11 +57,15 @@ import org.apache.hadoop.security.AccessControlException; import com.dremio.common.VM; +import com.dremio.common.util.Closeable; +import com.dremio.common.util.concurrent.ContextClassLoaderSwapper; +import com.dremio.exec.hadoop.HadoopFileSystem; import com.dremio.exec.hadoop.MayProvideAsyncStream; import com.dremio.exec.store.LocalSyncableFileSystem; import com.dremio.exec.store.dfs.DremioFileSystemCache; import com.dremio.exec.store.dfs.OpenFileTracker; import com.dremio.exec.store.dfs.SimpleFileBlockLocation; +import com.dremio.exec.store.hive.ContextClassLoaderAware; import com.dremio.exec.store.hive.exec.DremioFileSystem; import com.dremio.io.AsyncByteReader; import com.dremio.io.FSInputStream; @@ -105,6 +109,7 @@ public class DremioHadoopFileSystemWrapper private final boolean isNAS; private final boolean isHDFS; private final boolean enableAsync; + private final boolean mustSwapContextClassLoader; public DremioHadoopFileSystemWrapper(org.apache.hadoop.fs.Path path, Configuration fsConf, OperatorStats operatorStats, boolean enableAsync, FileSystem fileSystem) throws IOException { this(fsConf, fileSystem, operatorStats, enableAsync); @@ -120,6 +125,7 @@ public DremioHadoopFileSystemWrapper(Configuration fsConf, FileSystem fs, Operat if(operatorStats != null) { operatorStats.createMetadataReadIOStats(); } + this.mustSwapContextClassLoader = !(fs instanceof ContextClassLoaderAware); } private static boolean isMapRfs(FileSystem fs) { @@ -150,10 +156,6 @@ private static boolean isHDFS(org.apache.hadoop.fs.FileSystem fs) { return false; } - protected FileSystem getUnderlyingFs() { - return underlyingFs; - } - // See DX-15492 private void openNonExistentFileInPath(org.apache.hadoop.fs.Path f) throws IOException { org.apache.hadoop.fs.Path nonExistentFile = f.suffix(NON_EXISTENT_FILE_SUFFIX + NON_EXISTENT_FILE_COUNTER++); @@ -201,7 +203,8 @@ private void checkAccessAllowed(org.apache.hadoop.fs.Path f, FsAction mode) thro */ @Override public FSInputStream open(Path f) throws IOException { - try (WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, f)) { + try (Closeable closeable = swapClassLoader(); + WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, f)) { return newFSDataInputStreamWrapper(f, openFile(f)); } catch(FSError e) { throw propagateFSError(e); @@ -215,7 +218,8 @@ public String getScheme() { @Override public FSOutputStream create(Path f) throws IOException { - try (WaitRecorder recorder = OperatorStats.getWaitRecorder(operatorStats)) { + try (Closeable closeable = swapClassLoader(); + WaitRecorder recorder = OperatorStats.getWaitRecorder(operatorStats)) { return newFSDataOutputStreamWrapper(underlyingFs.create(toHadoopPath(f)), f.toString()); } catch(FSError e) { throw propagateFSError(e); @@ -224,7 +228,8 @@ public FSOutputStream create(Path f) throws IOException { @Override public FSOutputStream create(Path f, boolean overwrite) throws IOException { - try (WaitRecorder recorder = OperatorStats.getWaitRecorder(operatorStats)) { + try (Closeable closeable = swapClassLoader(); + WaitRecorder recorder = OperatorStats.getWaitRecorder(operatorStats)) { return newFSDataOutputStreamWrapper(underlyingFs.create(toHadoopPath(f), overwrite), f.toString()); } catch(FSError e) { throw propagateFSError(e); @@ -235,7 +240,8 @@ public FSOutputStream create(Path f, boolean overwrite) throws IOException { @Override public FileAttributes getFileAttributes(Path f) throws IOException { - try (WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, f)) { + try (Closeable closeable = swapClassLoader(); + WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, f)) { final FileStatus result = underlyingFs.getFileStatus(toHadoopPath(f)); // safe-guarding against misbehaving filesystems if (result == null) { @@ -249,7 +255,8 @@ public FileAttributes getFileAttributes(Path f) throws IOException { @Override public void setPermission(Path p, Set permissions) throws IOException { - try (WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, p)) { + try (Closeable closeable = swapClassLoader(); + WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, p)) { underlyingFs.setPermission(toHadoopPath(p), toFsPermission(permissions)); } catch(FSError e) { throw propagateFSError(e); @@ -268,7 +275,8 @@ public T unwrap(Class clazz) { @Override public boolean mkdirs(Path f, Set permissions) throws IOException { - try (WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, f)) { + try (Closeable closeable = swapClassLoader(); + WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, f)) { return underlyingFs.mkdirs(toHadoopPath(f), toFsPermission(permissions)); } catch(FSError e) { throw propagateFSError(e); @@ -297,7 +305,8 @@ public void close() throws IOException { @Override public boolean mkdirs(Path folderPath) throws IOException { - try (WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, folderPath)) { + try (Closeable closeable = swapClassLoader(); + WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, folderPath)) { org.apache.hadoop.fs.Path path = toHadoopPath(folderPath); if (!underlyingFs.exists(path)) { return underlyingFs.mkdirs(path); @@ -312,7 +321,8 @@ public boolean mkdirs(Path folderPath) throws IOException { @Override public DirectoryStream list(Path f) throws FileNotFoundException, IOException { - try (WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, f)) { + try (Closeable closeable = swapClassLoader(); + WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, f)) { return new ArrayDirectoryStream(underlyingFs.listStatus(toHadoopPath(f))); } catch(FSError e) { throw propagateFSError(e); @@ -321,7 +331,8 @@ public DirectoryStream list(Path f) throws FileNotFoundException @Override public DirectoryStream list(Path f, Predicate filter) throws FileNotFoundException, IOException { - try (WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, f)) { + try (Closeable closeable = swapClassLoader(); + WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, f)) { return new ArrayDirectoryStream(underlyingFs.listStatus(toHadoopPath(f), toPathFilter(filter))); } catch(FSError e) { throw propagateFSError(e); @@ -330,7 +341,8 @@ public DirectoryStream list(Path f, Predicate filter) thro @Override public DirectoryStream listFiles(Path f, boolean recursive) throws FileNotFoundException, IOException { - try (WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, f)) { + try (Closeable closeable = swapClassLoader(); + WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, f)) { return new FetchOnDemandDirectoryStream(underlyingFs.listFiles(toHadoopPath(f), recursive), f, operatorStats); } catch (FSError e) { throw propagateFSError(e); @@ -340,7 +352,8 @@ public DirectoryStream listFiles(Path f, boolean recursive) thro @Override public DirectoryStream glob(Path pattern, Predicate filter) throws FileNotFoundException, IOException { - try (WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, pattern)) { + try (Closeable closeable = swapClassLoader(); + WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, pattern)) { return new ArrayDirectoryStream(underlyingFs.globStatus(toHadoopPath(pattern), toPathFilter(filter))); } catch(FSError e) { throw propagateFSError(e); @@ -349,7 +362,8 @@ public DirectoryStream glob(Path pattern, Predicate filter @Override public boolean rename(Path src, Path dst) throws IOException { - try (WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, dst)) { + try (Closeable closeable = swapClassLoader(); + WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, dst)) { return underlyingFs.rename(toHadoopPath(src), toHadoopPath(dst)); } catch(FSError e) { throw propagateFSError(e); @@ -358,7 +372,8 @@ public boolean rename(Path src, Path dst) throws IOException { @Override public boolean delete(Path f, boolean recursive) throws IOException { - try (WaitRecorder recorder = OperatorStats.getWaitRecorder(operatorStats)) { + try (Closeable closeable = swapClassLoader(); + WaitRecorder recorder = OperatorStats.getWaitRecorder(operatorStats)) { return underlyingFs.delete(toHadoopPath(f), recursive); } catch(FSError e) { throw propagateFSError(e); @@ -369,7 +384,8 @@ public boolean delete(Path f, boolean recursive) throws IOException { public boolean exists(Path f) throws IOException { final org.apache.hadoop.fs.Path p = toHadoopPath(f); boolean exists = false; - try (WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, f)) { + try (Closeable closeable = swapClassLoader(); + WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, f)) { exists = underlyingFs.exists(p); if (!exists && isNAS) { forceRefresh(f); @@ -385,7 +401,8 @@ public boolean exists(Path f) throws IOException { public boolean isDirectory(Path f) throws IOException { final org.apache.hadoop.fs.Path p = toHadoopPath(f); boolean exists = false; - try (WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, f)) { + try (Closeable closeable = swapClassLoader(); + WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, f)) { exists = underlyingFs.isDirectory(p); if (!exists && isNAS) { forceRefresh(f); @@ -401,7 +418,8 @@ public boolean isDirectory(Path f) throws IOException { public boolean isFile(Path f) throws IOException { final org.apache.hadoop.fs.Path p = toHadoopPath(f); boolean exists = false; - try (WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, f)) { + try (Closeable closeable = swapClassLoader(); + WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, f)) { exists = underlyingFs.isFile(p); if (!exists && isNAS) { forceRefresh(f); @@ -430,7 +448,8 @@ public Iterable getFileBlockLocations(FileAttributes file, lo } final FileStatus status = ((HadoopFileStatusWrapper) file).getFileStatus(); Path p = status == null ? null : file.getPath(); - try (WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, p)) { + try (Closeable closeable = swapClassLoader(); + WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, p)) { return toFileBlockLocations(() -> underlyingFs.getFileBlockLocations(status, start, len)); } catch(FSError e) { throw propagateFSError(e); @@ -439,7 +458,8 @@ public Iterable getFileBlockLocations(FileAttributes file, lo @Override public Iterable getFileBlockLocations(Path p, long start, long len) throws IOException { - try (WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, p)) { + try (Closeable closeable = swapClassLoader(); + WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, p)) { return toFileBlockLocations(() -> underlyingFs.getFileBlockLocations(toHadoopPath(p), start, len)); } catch(FSError e) { throw propagateFSError(e); @@ -448,7 +468,8 @@ public Iterable getFileBlockLocations(Path p, long start, lon @Override public void access(final Path path, final Set mode) throws AccessControlException, FileNotFoundException, IOException { - try (WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, path)) { + try (Closeable closeable = swapClassLoader(); + WaitRecorder recorder = OperatorStats.getMetadataWaitRecorder(operatorStats, path)) { checkAccessAllowed(toHadoopPath(path), toFsAction(mode)); } catch(FSError e) { throw propagateFSError(e); @@ -591,7 +612,7 @@ public void close() throws IOException { } } - private static final class FetchOnDemandDirectoryStream implements DirectoryStream { + private final class FetchOnDemandDirectoryStream implements DirectoryStream { private final Iterator convertedIterator; public FetchOnDemandDirectoryStream(RemoteIterator statuses, Path p, OperatorStats stats) { @@ -599,7 +620,8 @@ public FetchOnDemandDirectoryStream(RemoteIterator statuses, convertedIterator = new Iterator() { @Override public boolean hasNext() { - try(WaitRecorder metaRecorder = OperatorStats.getMetadataWaitRecorder(stats, p)) { + try (Closeable closeable = swapClassLoader(); + WaitRecorder metaRecorder = OperatorStats.getMetadataWaitRecorder(stats, p)) { return statuses.hasNext(); } catch (IOException e) { logger.error("IO exception in FetchOnDemandDirectoryStream while performing hasNext on RemoteIterator", e); @@ -609,7 +631,8 @@ public boolean hasNext() { @Override public FileAttributes next() { - try (WaitRecorder metaRecorder = OperatorStats.getMetadataWaitRecorder(stats, p)){ + try (Closeable closeable = swapClassLoader(); + WaitRecorder metaRecorder = OperatorStats.getMetadataWaitRecorder(stats, p)){ return new HadoopFileStatusWrapper(statuses.next()); } catch (IOException e) { logger.error("IO exception in FetchOnDemandDirectoryStream in fetching next fileAttribute ", e); @@ -750,4 +773,15 @@ private static Iterable toFileBlockLocations(IOCallable {}; + } } diff --git a/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/metadata/HiveMetadataUtils.java b/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/metadata/HiveMetadataUtils.java index 6fd7f005a3..aff9a5f92d 100644 --- a/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/metadata/HiveMetadataUtils.java +++ b/plugins/hive/src/main/hive2/com/dremio/exec/store/hive/metadata/HiveMetadataUtils.java @@ -77,6 +77,7 @@ import org.apache.iceberg.BaseTable; import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.Snapshot; +import org.apache.iceberg.io.FileIO; import com.dremio.common.exceptions.UserException; import com.dremio.common.util.Closeable; @@ -106,6 +107,7 @@ import com.dremio.exec.store.hive.HiveClient; import com.dremio.exec.store.hive.HivePf4jPlugin; import com.dremio.exec.store.hive.HiveSchemaConverter; +import com.dremio.exec.store.hive.HiveSchemaTypeOptions; import com.dremio.exec.store.hive.HiveStoragePlugin; import com.dremio.exec.store.hive.HiveUtilities; import com.dremio.exec.store.hive.deltalake.DeltaHiveInputFormat; @@ -191,8 +193,11 @@ public static void injectOrcIncludeFileIdInSplitsConf(final HiveStorageCapabilit } } - public static SchemaComponents resolveSchemaComponents(final List pathComponents, boolean throwIfInvalid) { + public static boolean isValidPathSchema(final List pathComponents) { + return pathComponents != null && (pathComponents.size() == 2 || pathComponents.size() == 3); + } + public static SchemaComponents resolveSchemaComponents(final List pathComponents) { // extract database and table names from dataset path switch (pathComponents.size()) { case 2: @@ -200,10 +205,6 @@ public static SchemaComponents resolveSchemaComponents(final List pathCo case 3: return new SchemaComponents(pathComponents.get(1), pathComponents.get(2)); default: - if (!throwIfInvalid) { - return null; - } - // invalid. Guarded against at both entry points. throw UserException.connectionError() .message("Dataset path '%s' is invalid.", pathComponents) @@ -258,7 +259,7 @@ public static String getIcebergTableLocation(HiveClient client, HiveMetadataUti if (isDeltaTable(table, options)) { return new DeltaHiveInputFormat(); } - final Class inputFormatClazz = getInputFormatClass(job, table, partition, options); + final Class inputFormatClazz = getInputFormatClass(job, table, partition); job.setInputFormat(inputFormatClazz); return job.getInputFormat(); } @@ -281,14 +282,14 @@ public static boolean isIcebergTable(Table table) { } private static boolean isDeltaTable(Table table, OptionManager options) { - return DeltaHiveInputFormat.isDeltaTable(table.getParameters().get(META_TABLE_STORAGE), options); + return DeltaHiveInputFormat.isDeltaTable(table, options); } - public static BatchSchema getBatchSchema(Table table, final HiveConf hiveConf, boolean includeComplexParquetCols, boolean isMapTypeEnabled, HiveStoragePlugin plugin) { + public static BatchSchema getBatchSchema(Table table, final HiveConf hiveConf, HiveSchemaTypeOptions typeOptions, HiveStoragePlugin plugin) { InputFormat format = getInputFormat(table, hiveConf, plugin.getSabotContext().getOptionManager()); final List fields = new ArrayList<>(); final List partitionColumns = new ArrayList<>(); - HiveMetadataUtils.populateFieldsAndPartitionColumns(table, fields, partitionColumns, format, includeComplexParquetCols, isMapTypeEnabled); + HiveMetadataUtils.populateFieldsAndPartitionColumns(table, fields, partitionColumns, format, typeOptions); return BatchSchema.newBuilder().addFields(fields).build(); } @@ -336,18 +337,17 @@ private static void populateFieldsAndPartitionColumns( final List fields, final List partitionColumns, InputFormat format, - final boolean includeComplexParquetCols, - final boolean isMapTypeEnabled) { + final HiveSchemaTypeOptions typeOptions) { for (FieldSchema hiveField : table.getSd().getCols()) { final TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(hiveField.getType()); - Field f = HiveSchemaConverter.getArrowFieldFromHiveType(hiveField.getName(), typeInfo, format, includeComplexParquetCols, isMapTypeEnabled); + Field f = HiveSchemaConverter.getArrowFieldFromHiveType(hiveField.getName(), typeInfo, format, typeOptions); if (f != null) { fields.add(f); } } for (FieldSchema field : table.getPartitionKeys()) { Field f = HiveSchemaConverter.getArrowFieldFromHiveType(field.getName(), - TypeInfoUtils.getTypeInfoFromTypeString(field.getType()), format, includeComplexParquetCols, isMapTypeEnabled); + TypeInfoUtils.getTypeInfoFromTypeString(field.getType()), format, typeOptions); if (f != null) { fields.add(f); partitionColumns.add(field.getName()); @@ -355,11 +355,11 @@ private static void populateFieldsAndPartitionColumns( } } - private static List buildColumnInfo(final Table table, final InputFormat format, final boolean includeComplexParquetCols, final boolean isMapTypeEnabled) { + private static List buildColumnInfo(final Table table, final InputFormat format, final HiveSchemaTypeOptions typeOptions) { final List columnInfos = new ArrayList<>(); for (FieldSchema hiveField : table.getSd().getCols()) { final TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(hiveField.getType()); - Field f = HiveSchemaConverter.getArrowFieldFromHiveType(hiveField.getName(), typeInfo, format, includeComplexParquetCols, isMapTypeEnabled); + Field f = HiveSchemaConverter.getArrowFieldFromHiveType(hiveField.getName(), typeInfo, format, typeOptions); if (f != null) { columnInfos.add(getColumnInfo(typeInfo)); } @@ -483,6 +483,9 @@ private static ColumnInfo getColumnInfo(final TypeInfo typeInfo) { .setScale(0) .setIsPrimitive(true) .build(); + + default: + break; } } @@ -499,12 +502,12 @@ public static TableMetadata getTableMetadata(final HiveClient client, final int maxMetadataLeafColumns, final int maxNestedLevels, final TimeTravelOption timeTravelOption, - final boolean includeComplexParquetCols, + final HiveSchemaTypeOptions typeOptions, final HiveConf hiveConf, final HiveStoragePlugin plugin) throws ConnectorException { try { - final SchemaComponents schemaComponents = resolveSchemaComponents(datasetPath.getComponents(), true); + final SchemaComponents schemaComponents = resolveSchemaComponents(datasetPath.getComponents()); // if the dataset path is not canonized we need to get it from the source final Table table = client.getTable(schemaComponents.getDbName(), schemaComponents.getTableName(), ignoreAuthzErrors); @@ -515,13 +518,12 @@ public static TableMetadata getTableMetadata(final HiveClient client, final Properties tableProperties = MetaStoreUtils.getSchema(table.getSd(), table.getSd(), table.getParameters(), table.getDbName(), table.getTableName(), table.getPartitionKeys()); TableMetadata tableMetadata; if (isIcebergTable(table)) { - tableMetadata = getTableMetadataFromIceberg(hiveConf, datasetPath, table, tableProperties, timeTravelOption, plugin); + tableMetadata = getTableMetadataFromIceberg(hiveConf, datasetPath, table, tableProperties, timeTravelOption, typeOptions, plugin); } else if (isDeltaTable(table, plugin.getSabotContext().getOptionManager())) { - tableMetadata = getTableMetadataFromDelta(table, tableProperties, maxMetadataLeafColumns, plugin); + tableMetadata = getTableMetadataFromDelta(table, tableProperties, maxMetadataLeafColumns, typeOptions, plugin); } else { - final boolean isMapTypeEnabled = plugin.getSabotContext().getOptionManager().getOption(ExecConstants.ENABLE_MAP_DATA_TYPE); tableMetadata = getTableMetadataFromHMS(table, tableProperties, datasetPath, - maxMetadataLeafColumns, maxNestedLevels, includeComplexParquetCols, hiveConf, isMapTypeEnabled, plugin); + maxMetadataLeafColumns, maxNestedLevels, typeOptions, hiveConf, plugin); } HiveMetadataUtils.injectOrcIncludeFileIdInSplitsConf(tableMetadata.getTableStorageCapabilities(), tableProperties); return tableMetadata; @@ -532,14 +534,18 @@ public static TableMetadata getTableMetadata(final HiveClient client, } } - private static TableMetadata getTableMetadataFromIceberg(final HiveConf hiveConf, final EntityPath datasetPath, final Table table, - final Properties tableProperties, final TimeTravelOption timeTravelOption, + private static TableMetadata getTableMetadataFromIceberg(final HiveConf hiveConf, + final EntityPath datasetPath, + final Table table, + final Properties tableProperties, + final TimeTravelOption timeTravelOption, + final HiveSchemaTypeOptions typeOptions, final HiveStoragePlugin plugin) throws IOException { JobConf jobConf = new JobConf(hiveConf); String metadataLocation = tableProperties.getProperty(METADATA_LOCATION, ""); com.dremio.io.file.FileSystem fs = plugin.createFS(metadataLocation, SystemUser.SYSTEM_USERNAME, null); - DremioFileIO fileIO = new DremioFileIO(fs, (Iterable>)jobConf, plugin); + FileIO fileIO = plugin.createIcebergFileIO(fs, null, null, null, null); IcebergHiveTableOperations hiveTableOperations = new IcebergHiveTableOperations(fileIO, metadataLocation); BaseTable icebergTable = new BaseTable(hiveTableOperations, new Path(metadataLocation).getName()); icebergTable.refresh(); @@ -554,8 +560,7 @@ private static TableMetadata getTableMetadataFromIceberg(final HiveConf hiveConf TimeTravelProcessors.getTableSchemaProvider(travelRequest); snapshot = tableSnapshotProvider.apply(icebergTable); schema = tableSchemaProvider.apply(icebergTable, snapshot); - } - else { + } else { snapshot = icebergTable.currentSnapshot(); schema = icebergTable.schema(); } @@ -584,7 +589,7 @@ private static TableMetadata getTableMetadataFromIceberg(final HiveConf hiveConf } SchemaConverter schemaConverter = SchemaConverter.getBuilder().setTableName(table.getTableName()) - .setMapTypeEnabled(plugin.getSabotContext().getOptionManager().getOption(ExecConstants.ENABLE_MAP_DATA_TYPE)).build(); + .setMapTypeEnabled(typeOptions.isMapTypeEnabled()).build(); BatchSchema batchSchema = schemaConverter.fromIceberg(schema); Map specsMap = icebergTable.specs(); specsMap = IcebergUtils.getPartitionSpecMapBySchema(specsMap, schema); @@ -604,7 +609,11 @@ private static TableMetadata getTableMetadataFromIceberg(final HiveConf hiveConf .setDeleteStats(new ScanStats() .setScanFactor(ScanCostFactor.PARQUET.getFactor()) .setType(ScanStatsType.EXACT_ROW_COUNT) - .setRecordCount(numPositionDeletes)); + .setRecordCount(numPositionDeletes)) + .setEqualityDeleteStats(new ScanStats() + .setScanFactor(ScanCostFactor.PARQUET.getFactor()) + .setType(ScanStatsType.EXACT_ROW_COUNT) + .setRecordCount(numEqualityDeletes)); return TableMetadata.newBuilder() .table(table) @@ -622,14 +631,14 @@ private static TableMetadata getTableMetadataFromIceberg(final HiveConf hiveConf private static TableMetadata getTableMetadataFromDelta(final Table table, final Properties tableProperties, final int maxMetadataLeafColumns, + final HiveSchemaTypeOptions typeOptions, final HiveStoragePlugin plugin) throws IOException { - final String tableLocation = table.getSd().getLocation(); + final String tableLocation = DeltaHiveInputFormat.getLocation(table, plugin.getSabotContext().getOptionManager()); final com.dremio.io.file.FileSystem fs = plugin.createFS(tableLocation, SystemUser.SYSTEM_USERNAME, null); final DeltaLakeTable deltaTable = new DeltaLakeTable(plugin.getSabotContext(), fs, tableLocation); final DeltaLogSnapshot snapshot = deltaTable.getConsolidatedSnapshot(); - final boolean isMapTypeEnabled = plugin.getSabotContext().getOptionManager().getOption(ExecConstants.ENABLE_MAP_DATA_TYPE); - final BatchSchema batchSchema = DeltaLakeSchemaConverter.withMapEnabled(isMapTypeEnabled).fromSchemaString(snapshot.getSchema()); + final BatchSchema batchSchema = DeltaLakeSchemaConverter.withMapEnabled(typeOptions.isMapTypeEnabled()).fromSchemaString(snapshot.getSchema()); HiveMetadataUtils.checkLeafFieldCounter(batchSchema.getFields().size(), maxMetadataLeafColumns, ""); return TableMetadata.newBuilder() @@ -649,25 +658,24 @@ private static TableMetadata getTableMetadataFromHMS(final Table table, final EntityPath datasetPath, final int maxMetadataLeafColumns, final int maxNestedLevels, - final boolean includeComplexParquetCols, + final HiveSchemaTypeOptions typeOptions, final HiveConf hiveConf, - final boolean isMapTypeEnabled, final HiveStoragePlugin plugin) throws ConnectorException { - final SchemaComponents schemaComponents = resolveSchemaComponents(datasetPath.getComponents(), true); + final SchemaComponents schemaComponents = resolveSchemaComponents(datasetPath.getComponents()); final InputFormat format = getInputFormat(table, hiveConf, plugin.getSabotContext().getOptionManager()); final List fields = new ArrayList<>(); final List partitionColumns = new ArrayList<>(); - HiveMetadataUtils.populateFieldsAndPartitionColumns(table, fields, partitionColumns, format, includeComplexParquetCols, isMapTypeEnabled); + HiveMetadataUtils.populateFieldsAndPartitionColumns(table, fields, partitionColumns, format, typeOptions); HiveMetadataUtils.checkLeafFieldCounter(fields.size(), maxMetadataLeafColumns, schemaComponents.getTableName()); - HiveSchemaConverter.checkFieldNestedLevels(table, maxNestedLevels, isMapTypeEnabled); + HiveSchemaConverter.checkFieldNestedLevels(table, maxNestedLevels, typeOptions.isMapTypeEnabled()); final BatchSchema batchSchema = BatchSchema.newBuilder().addFields(fields).build(); - final List columnInfos = buildColumnInfo(table, format, includeComplexParquetCols, isMapTypeEnabled); + final List columnInfos = buildColumnInfo(table, format, typeOptions); return TableMetadata.newBuilder() .table(table) @@ -880,9 +888,9 @@ public static List getDatasetSplitsForIcebergTables(TableMetadata */ private static class InputSplitSizeRunnable extends TimedRunnable { - final InputSplit split; - final Configuration conf; - final String tableName; + private final InputSplit split; + private final Configuration conf; + private final String tableName; public InputSplitSizeRunnable(final Configuration conf, final String tableName, final InputSplit split) { this.conf = conf; @@ -1086,7 +1094,7 @@ public static PartitionMetadata getPartitionMetadata(final boolean storageImpers boolean trimStats = trimStats(splitType); HiveDatasetStats metastoreStats = null; InputFormat format = getInputFormat(table, job, partition, optionManager); - Class inputFormatClazz = getInputFormatClass(job, table, partition, optionManager); + Class inputFormatClazz = getInputFormatClass(job, table, partition); metadataAccumulator.setTableLocation(table.getSd().getLocation()); if (null == partition) { @@ -1443,15 +1451,13 @@ private static PartitionValue getPartitionValue(FieldSchema partitionCol, String case DOUBLE: try { return PartitionValue.of(name, Double.parseDouble(value)); - } - catch (NumberFormatException ex) { + } catch (NumberFormatException ex) { return PartitionValue.of(name); } case FLOAT: try { return PartitionValue.of(name, Float.parseFloat(value)); - } - catch (NumberFormatException ex) { + } catch (NumberFormatException ex) { return PartitionValue.of(name); } case BYTE: @@ -1459,15 +1465,13 @@ private static PartitionValue getPartitionValue(FieldSchema partitionCol, String case INT: try { return PartitionValue.of(name, Integer.parseInt(value)); - } - catch (NumberFormatException ex) { + } catch (NumberFormatException ex) { return PartitionValue.of(name); } case LONG: try { return PartitionValue.of(name, Long.parseLong(value)); - } - catch (NumberFormatException ex) { + } catch (NumberFormatException ex) { return PartitionValue.of(name); } case STRING: @@ -1504,10 +1508,13 @@ private static PartitionValue getPartitionValue(FieldSchema partitionCol, String final BigInteger unscaled = original.movePointRight(decimalTypeInfo.scale()).unscaledValue(); return PartitionValue.of(name, ByteBuffer.wrap(DecimalTools.signExtend16(unscaled.toByteArray()))); default: - HiveUtilities.throwUnsupportedHiveDataTypeError(primitiveTypeInfo.getPrimitiveCategory().toString()); + break; } + HiveUtilities.throwUnsupportedHiveDataTypeError(primitiveTypeInfo.getPrimitiveCategory().toString()); + break; default: HiveUtilities.throwUnsupportedHiveDataTypeError(typeInfo.getCategory().toString()); + break; } return null; // unreachable @@ -1548,7 +1555,7 @@ public static void addConfToJob(final JobConf job, final Properties properties) } } - public static Class getInputFormatClass(final JobConf job, final Table table, final Partition partition, OptionManager options) { + public static Class getInputFormatClass(final JobConf job, final Table table, final Partition partition) { try (Closeable ccls = HivePf4jPlugin.swapClassLoader()) { if (partition != null) { if (partition.getSd().getInputFormat() != null) { @@ -1566,9 +1573,6 @@ public static Class getInputFormatClass(final JobConf job } if (table.getParameters().get(META_TABLE_STORAGE) != null) { - if (isDeltaTable(table, options)) { - return DeltaHiveInputFormat.class; - } final HiveStorageHandler storageHandler = HiveUtils.getStorageHandler(job, table.getParameters().get(META_TABLE_STORAGE)); return storageHandler.getInputFormatClass(); } diff --git a/plugins/hive/src/main/java/com/dremio/exec/store/hive/HiveClientWithAuthz.java b/plugins/hive/src/main/java/com/dremio/exec/store/hive/HiveClientWithAuthz.java index 6dd82a7b0c..789f62b781 100644 --- a/plugins/hive/src/main/java/com/dremio/exec/store/hive/HiveClientWithAuthz.java +++ b/plugins/hive/src/main/java/com/dremio/exec/store/hive/HiveClientWithAuthz.java @@ -68,7 +68,9 @@ void connect() throws MetaException { final HiveConf hiveConfCopy = new HiveConf(hiveConf); hiveConfCopy.set("user.name", userName); hiveConfCopy.set("proxy.user.name", userName); - client = Hive.get(hiveConfCopy).getMSC(); + // skip registering Hive functions as this could be expensive, especially on Glue, and we don't have any + // need for them + client = Hive.getWithFastCheck(hiveConfCopy, false).getMSC(); return null; }, ugiForRpc, diff --git a/plugins/hive/src/main/java/com/dremio/exec/store/hive/HiveFileSystemConfigurationAdapter.java b/plugins/hive/src/main/java/com/dremio/exec/store/hive/HiveFileSystemConfigurationAdapter.java new file mode 100644 index 0000000000..7452fd870b --- /dev/null +++ b/plugins/hive/src/main/java/com/dremio/exec/store/hive/HiveFileSystemConfigurationAdapter.java @@ -0,0 +1,42 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store.hive; + +import org.apache.hadoop.conf.Configuration; + +import com.dremio.exec.store.dfs.FileSystemConfigurationAdapter; + +/** + * A FileSystemConfigurationAdapter that exposes Hadoop configuration using the Hive plugin's version of Hadoop. + */ +public class HiveFileSystemConfigurationAdapter implements FileSystemConfigurationAdapter { + + private final Configuration conf; + + public HiveFileSystemConfigurationAdapter(Configuration conf) { + this.conf = conf; + } + + @Override + public String get(String name) { + return conf.get(name); + } + + @Override + public String get(String name, String defaultValue) { + return conf.get(name, defaultValue); + } +} diff --git a/plugins/hive/src/main/java/com/dremio/exec/store/hive/HiveRulesFactory.java b/plugins/hive/src/main/java/com/dremio/exec/store/hive/HiveRulesFactory.java index 4aeffd9ea1..11e2b593b9 100644 --- a/plugins/hive/src/main/java/com/dremio/exec/store/hive/HiveRulesFactory.java +++ b/plugins/hive/src/main/java/com/dremio/exec/store/hive/HiveRulesFactory.java @@ -63,6 +63,7 @@ import com.dremio.exec.planner.logical.RelOptHelper; import com.dremio.exec.planner.logical.TableModifyRel; import com.dremio.exec.planner.logical.TableOptimizeRel; +import com.dremio.exec.planner.logical.VacuumTableRel; import com.dremio.exec.planner.logical.partition.PruneFilterCondition; import com.dremio.exec.planner.logical.partition.PruneScanRuleBase; import com.dremio.exec.planner.logical.partition.PruneScanRuleBase.PruneScanRuleFilterOnProject; @@ -74,6 +75,7 @@ import com.dremio.exec.planner.physical.ScanPrelBase; import com.dremio.exec.planner.physical.TableModifyPruleBase; import com.dremio.exec.planner.physical.TableOptimizePruleBase; +import com.dremio.exec.planner.physical.VacuumTablePruleBase; import com.dremio.exec.record.BatchSchema; import com.dremio.exec.store.RelOptNamespaceTable; import com.dremio.exec.store.ScanFilter; @@ -190,6 +192,7 @@ public RelOptCost computeSelfCost(final RelOptPlanner planner, final RelMetadata return super.computeSelfCost(planner, mq); } + @Override public ScanFilter getFilter() { return filter; } @@ -346,6 +349,7 @@ public boolean hasFilter() { return filter != null; } + @Override public ScanFilter getFilter() { return filter; } @@ -587,6 +591,29 @@ public boolean matches(RelOptRuleCall call) { } } + public static class HiveVacuumTablePrule extends VacuumTablePruleBase { + + public HiveVacuumTablePrule(StoragePluginId pluginId, OptimizerRulesContext context) { + super(RelOptHelper.some(VacuumTableRel.class, Rel.LOGICAL, RelOptHelper.any(RelNode.class)), + String.format("%sHiveVacuumTablePrule.%s.%s", + pluginId.getType().value(), SLUGIFY.slugify(pluginId.getName()), UUID.randomUUID()), context); + } + + @Override + public void onMatch(RelOptRuleCall call) { + final VacuumTableRel vacuumRel = call.rel(0); + call.transformTo(getPhysicalPlan( + vacuumRel, + new HiveIcebergScanTableMetadata(((DremioPrepareTable) vacuumRel.getTable()).getTable().getDataset(), + (SupportsIcebergRootPointer) vacuumRel.getCreateTableEntry().getPlugin()))); + } + + @Override + public boolean matches(RelOptRuleCall call) { + return call.rel(0).getCreateTableEntry().getPlugin() instanceof BaseHiveStoragePlugin; + } + } + @Override public Set getRules(OptimizerRulesContext optimizerContext, PlannerPhase phase, SourceType pluginType) { return ImmutableSet.of(); @@ -623,7 +650,8 @@ public Set getRules(OptimizerRulesContext optimizerContext, PlannerP new HiveIcebergScanPrule(pluginId, optimizerContext), new HiveIcebergTableFileFunctionPrule(pluginId, optimizerContext), new HiveTableModifyPrule(pluginId, optimizerContext), - new HiveTableOptimzePrule(pluginId, optimizerContext) + new HiveTableOptimzePrule(pluginId, optimizerContext), + new HiveVacuumTablePrule(pluginId, optimizerContext) ); default: diff --git a/plugins/hive/src/main/java/com/dremio/exec/store/hive/HiveSchemaConverter.java b/plugins/hive/src/main/java/com/dremio/exec/store/hive/HiveSchemaConverter.java index c46ffd497a..67d30a542a 100644 --- a/plugins/hive/src/main/java/com/dremio/exec/store/hive/HiveSchemaConverter.java +++ b/plugins/hive/src/main/java/com/dremio/exec/store/hive/HiveSchemaConverter.java @@ -44,6 +44,7 @@ import org.apache.arrow.vector.types.pojo.FieldType; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.ql.io.RCFileInputFormat; import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat; import org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; @@ -56,37 +57,54 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo; import org.apache.hadoop.mapred.InputFormat; +import org.apache.hadoop.mapred.SequenceFileInputFormat; +import org.apache.hadoop.mapred.TextInputFormat; import com.dremio.exec.catalog.ColumnNestedTooDeepException; import com.google.common.collect.Sets; public class HiveSchemaConverter { - private static Set ORC_SUPPORTED_TYPES = Sets.newHashSet(LIST, STRUCT, PRIMITIVE); - private static Set PARQUET_SUPPORTED_TYPES = Sets.newHashSet(LIST, STRUCT, PRIMITIVE, MAP); - private static boolean isTypeNotSupported(InputFormat format, Category category, boolean includeParquetComplexTypes, boolean isMapTypeEnabled) { + private static final Set HIVE_SUPPORTED_TYPES = Sets.newHashSet(LIST, STRUCT, PRIMITIVE); + private static final Set PARQUET_SUPPORTED_TYPES = Sets.newHashSet(LIST, STRUCT, PRIMITIVE, MAP); + + private static boolean isTypeNotSupported(InputFormat format, Category category, HiveSchemaTypeOptions typeOptions) { // No restrictions on primitive types if (category.equals(PRIMITIVE)) { return false; } if (category.equals(MAP)) { - return !isMapTypeEnabled; + return !typeOptions.isMapTypeEnabled(); } - // All complex types supported in Orc - if (format instanceof OrcInputFormat && ORC_SUPPORTED_TYPES.contains(category)) { + // All complex types supported in Orc, RCFile, Text, Sequence + if (isSupportedFormatForComplexTypes(format, typeOptions) && HIVE_SUPPORTED_TYPES.contains(category)) { return false; } // Support only list and struct in Parquet along with primitive types. // MapRedParquetInputFormat, VectorizedParquetInputformat - if (includeParquetComplexTypes && MapredParquetInputFormat.class.isAssignableFrom(format.getClass()) && PARQUET_SUPPORTED_TYPES.contains(category)) { + if (typeOptions.isParquetComplexTypesEnabled() && MapredParquetInputFormat.class.isAssignableFrom(format.getClass()) && PARQUET_SUPPORTED_TYPES.contains(category)) { return false; } return true; } + private static boolean isSupportedFormatForComplexTypes(InputFormat format, HiveSchemaTypeOptions typeOptions) { + if (format instanceof OrcInputFormat) { + return true; + } + + if (typeOptions.isNativeComplexTypesEnabled()) { + return format instanceof RCFileInputFormat + || format instanceof TextInputFormat + || format instanceof SequenceFileInputFormat; + } + + return false; + } + private static boolean supportsDroppingSubFields(InputFormat format) { if (MapredParquetInputFormat.class.isAssignableFrom(format.getClass())) { return true; @@ -94,8 +112,8 @@ private static boolean supportsDroppingSubFields(InputFormat format) { return false; } - public static Field getArrowFieldFromHiveType(String name, TypeInfo typeInfo, InputFormat format, boolean includeParquetComplexTypes, boolean isMapTypeEnabled) { - if (isTypeNotSupported(format, typeInfo.getCategory(), includeParquetComplexTypes, isMapTypeEnabled)) { + public static Field getArrowFieldFromHiveType(String name, TypeInfo typeInfo, InputFormat format, HiveSchemaTypeOptions typeOptions) { + if (isTypeNotSupported(format, typeInfo.getCategory(), typeOptions)) { return null; } @@ -105,7 +123,7 @@ public static Field getArrowFieldFromHiveType(String name, TypeInfo typeInfo, In case LIST: { ListTypeInfo lti = (ListTypeInfo) typeInfo; TypeInfo elementTypeInfo = lti.getListElementTypeInfo(); - Field inner = HiveSchemaConverter.getArrowFieldFromHiveType("$data$", elementTypeInfo, format, includeParquetComplexTypes, isMapTypeEnabled); + Field inner = HiveSchemaConverter.getArrowFieldFromHiveType("$data$", elementTypeInfo, format, typeOptions); if (inner == null) { return null; } @@ -119,7 +137,7 @@ public static Field getArrowFieldFromHiveType(String name, TypeInfo typeInfo, In for (String fieldName : fieldNames) { TypeInfo fieldTypeInfo = sti.getStructFieldTypeInfo(fieldName); Field f = HiveSchemaConverter.getArrowFieldFromHiveType(fieldName, - fieldTypeInfo, format, includeParquetComplexTypes, isMapTypeEnabled); + fieldTypeInfo, format, typeOptions); if (f == null) { if (supportsDroppingSubFields(format)) { continue; @@ -142,7 +160,7 @@ public static Field getArrowFieldFromHiveType(String name, TypeInfo typeInfo, In int[] typeIds = new int[objectTypeInfos.size()]; for (int idx = 0; idx < objectTypeInfos.size(); ++idx) { TypeInfo fieldTypeInfo = objectTypeInfos.get(idx); - Field fieldToGetArrowType = HiveSchemaConverter.getArrowFieldFromHiveType("", fieldTypeInfo, format, includeParquetComplexTypes, isMapTypeEnabled); + Field fieldToGetArrowType = HiveSchemaConverter.getArrowFieldFromHiveType("", fieldTypeInfo, format, typeOptions); if (fieldToGetArrowType == null) { return null; } @@ -152,7 +170,7 @@ public static Field getArrowFieldFromHiveType(String name, TypeInfo typeInfo, In } // In a union, Arrow expects the field name for each member to be the same as the "minor type" name. Types.MinorType minorType = Types.getMinorTypeForArrowType(arrowType); - Field f = HiveSchemaConverter.getArrowFieldFromHiveType(minorType.name().toLowerCase(), fieldTypeInfo, format, includeParquetComplexTypes, isMapTypeEnabled); + Field f = HiveSchemaConverter.getArrowFieldFromHiveType(minorType.name().toLowerCase(), fieldTypeInfo, format, typeOptions); if (f == null) { return null; } @@ -169,7 +187,7 @@ public static Field getArrowFieldFromHiveType(String name, TypeInfo typeInfo, In return null; } TypeInfo valueTypeInfo = mti.getMapValueTypeInfo(); - Field valueField = HiveSchemaConverter.getArrowFieldFromHiveType("value", valueTypeInfo, format, includeParquetComplexTypes, isMapTypeEnabled); + Field valueField = HiveSchemaConverter.getArrowFieldFromHiveType("value", valueTypeInfo, format, typeOptions); if (valueField == null) { return null; } @@ -230,9 +248,11 @@ public static Field getArrowFieldFromHivePrimitiveType(String name, TypeInfo typ case UNKNOWN: case VOID: default: - // fall through. + break; } + break; default: + break; } return null; diff --git a/plugins/hive/src/main/java/com/dremio/exec/store/hive/HiveSchemaTypeOptions.java b/plugins/hive/src/main/java/com/dremio/exec/store/hive/HiveSchemaTypeOptions.java new file mode 100644 index 0000000000..f2c8c83751 --- /dev/null +++ b/plugins/hive/src/main/java/com/dremio/exec/store/hive/HiveSchemaTypeOptions.java @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store.hive; + +import com.dremio.exec.ExecConstants; +import com.dremio.options.OptionManager; + +public class HiveSchemaTypeOptions { + + private final boolean parquetComplexTypesEnabled; + private final boolean mapTypeEnabled; + private final boolean nativeComplexTypesEnabled; + + public HiveSchemaTypeOptions(boolean parquetComplexTypesEnabled, boolean mapTypeEnabled, boolean nativeComplexTypesEnabled) { + this.parquetComplexTypesEnabled = parquetComplexTypesEnabled; + this.mapTypeEnabled = mapTypeEnabled; + this.nativeComplexTypesEnabled = nativeComplexTypesEnabled; + } + + public HiveSchemaTypeOptions(final OptionManager optionManager) { + this(optionManager.getOption(ExecConstants.HIVE_COMPLEXTYPES_ENABLED), + optionManager.getOption(ExecConstants.ENABLE_MAP_DATA_TYPE), + optionManager.getOption(ExecConstants.ENABLE_COMPLEX_HIVE_DATA_TYPE)); + } + + public boolean isParquetComplexTypesEnabled() { + return parquetComplexTypesEnabled; + } + + public boolean isMapTypeEnabled() { + return mapTypeEnabled; + } + + public boolean isNativeComplexTypesEnabled() { + return nativeComplexTypesEnabled; + } +} diff --git a/plugins/hive/src/main/java/com/dremio/exec/store/hive/ORCScanFilter.java b/plugins/hive/src/main/java/com/dremio/exec/store/hive/ORCScanFilter.java index 188d5bfd0a..319207df58 100644 --- a/plugins/hive/src/main/java/com/dremio/exec/store/hive/ORCScanFilter.java +++ b/plugins/hive/src/main/java/com/dremio/exec/store/hive/ORCScanFilter.java @@ -74,6 +74,7 @@ public SearchArgument getSarg() { return sarg; } + @Override @JsonIgnore public double getCostAdjustment() { return ScanRelBase.DEFAULT_COST_ADJUSTMENT; diff --git a/plugins/hive/src/main/java/com/dremio/exec/store/hive/deltalake/DeltaHiveInputFormat.java b/plugins/hive/src/main/java/com/dremio/exec/store/hive/deltalake/DeltaHiveInputFormat.java index 290dfab99d..32e6eda610 100644 --- a/plugins/hive/src/main/java/com/dremio/exec/store/hive/deltalake/DeltaHiveInputFormat.java +++ b/plugins/hive/src/main/java/com/dremio/exec/store/hive/deltalake/DeltaHiveInputFormat.java @@ -16,8 +16,11 @@ package com.dremio.exec.store.hive.deltalake; +import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE; + import java.io.IOException; +import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.io.ArrayWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapred.FileInputFormat; @@ -31,9 +34,29 @@ public class DeltaHiveInputFormat extends FileInputFormat { static final String DELTA_STORAGE_HANDLER = "io.delta.hive.DeltaStorageHandler"; + static final String SPARK_SQL_SOURCES_PROVIDER = "spark.sql.sources.provider"; + static final String DELTA = "delta"; + static final String PATH = "path"; + + public static boolean isDeltaTable(Table table, OptionManager options) { + return isDeltaByStorageHandler(table, options) || isDeltaBySparkFormat(table, options); + } + + public static String getLocation(Table table, OptionManager options) { + if (isDeltaBySparkFormat(table, options)) { + return table.getSd().getSerdeInfo().getParameters().get(PATH); + } + return table.getSd().getLocation(); + } + + private static boolean isDeltaByStorageHandler(Table table, OptionManager options) { + return options.getOption(ExecConstants.ENABLE_DELTALAKE_HIVE_SUPPORT) + && DELTA_STORAGE_HANDLER.equalsIgnoreCase(table.getParameters().get(META_TABLE_STORAGE)); + } - public static boolean isDeltaTable(String storageHandler, OptionManager options) { - return options.getOption(ExecConstants.ENABLE_DELTALAKE_HIVE_SUPPORT) && DELTA_STORAGE_HANDLER.equalsIgnoreCase(storageHandler); + private static boolean isDeltaBySparkFormat(Table table, OptionManager options) { + return options.getOption(ExecConstants.ENABLE_DELTALAKE_SPARK_SUPPORT) + && DELTA.equalsIgnoreCase(table.getParameters().get(SPARK_SQL_SOURCES_PROVIDER)); } public DeltaHiveInputFormat() { diff --git a/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/DremioFileSystem.java b/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/DremioFileSystem.java index 563537a8a5..5675f3fa71 100644 --- a/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/DremioFileSystem.java +++ b/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/DremioFileSystem.java @@ -55,6 +55,7 @@ import com.dremio.common.util.Closeable; import com.dremio.common.util.concurrent.ContextClassLoaderSwapper; import com.dremio.exec.hadoop.HadoopFileSystem; +import com.dremio.exec.store.hive.ContextClassLoaderAware; import com.dremio.io.AsyncByteReader; import com.dremio.io.FSInputStream; import com.dremio.io.FSOutputStream; @@ -75,7 +76,7 @@ * Replaces class loader before any action to delegate all class loading to default * class loaders. This is to avoid loading non-hive/hadoop related classes here. */ -public class DremioFileSystem extends FileSystem { +public class DremioFileSystem extends FileSystem implements ContextClassLoaderAware { private static final String FS_S3A_BUCKET = "fs.s3a.bucket."; private static final String FS_S3A_AWS_CREDENTIALS_PROVIDER = "fs.s3a.aws.credentials.provider"; diff --git a/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/FileSplitParquetRecordReader.java b/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/FileSplitParquetRecordReader.java index 75d49e54dd..48164c63ff 100644 --- a/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/FileSplitParquetRecordReader.java +++ b/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/FileSplitParquetRecordReader.java @@ -241,7 +241,7 @@ public void createInputStreamProvider(InputStreamProvider lastInputStreamProvide boolean readColumnIndices = oContext.getOptions().getOption(READ_COLUMN_INDEXES); Preconditions.checkArgument(hiveStoragePlugin instanceof HadoopFsSupplierProviderPluginClassLoader, " plugin does not instance of HadoopFsSupplierProviderPluginClassLoader"); final PrivilegedExceptionAction getFsAction = - () -> hiveStoragePlugin.createFS(new DremioHadoopFileSystemWrapper(finalPath, jobConf, oContext.getStats(), cacheAndAsyncConf.isAsyncEnabled(), ((HadoopFsSupplierProviderPluginClassLoader)hiveStoragePlugin).getHadoopFsSupplierPluginClassLoader(finalPath.toString(), jobConf).get()), + () -> hiveStoragePlugin.createFS(new DremioHadoopFileSystemWrapper(finalPath, jobConf, oContext.getStats(), cacheAndAsyncConf.isAsyncEnabled(), ((HadoopFsSupplierProviderPluginClassLoader)hiveStoragePlugin).getHadoopFsSupplierPluginClassLoader(finalPath.toString(), jobConf, readerUgi.getUserName()).get()), oContext, cacheAndAsyncConf); fs = readerUgi.doAs(getFsAction); @@ -285,7 +285,8 @@ public void createInputStreamProvider(InputStreamProvider lastInputStreamProvide dataset, fileLastModificationTime, false, - filters.hasPushdownFilters() && readColumnIndices); + filters.hasPushdownFilters() && readColumnIndices, filters, + readerFactory.newFilterCreator(oContext, ParquetReaderFactory.ManagedSchemaType.HIVE, managedSchema, oContext.getAllocator())); } catch (Exception e) { // Close input stream provider in case of errors if (inputStreamProviderOfFirstRowGroup != null) { @@ -521,7 +522,7 @@ private void checkFieldTypesCompatibleWithHiveTable(OutputMutator readerOutputMu BatchSchemaField batchSchemaFieldInTable = BatchSchemaField.fromField(fieldInTable.get()); BatchSchemaField batchSchemaFieldInFile = BatchSchemaField.fromField(fieldInFileSchema); throw UserException.unsupportedError().message("Field [%s] has incompatible types in file and table." + - " Type in fileschema: [%s], type in tableschema: [%s]", fieldInFileSchema.getName(), batchSchemaFieldInFile, batchSchemaFieldInTable).buildSilently(); + " Type in fileschema: [%s], type in tableschema: [%s], file Path: %s", fieldInFileSchema.getName(), batchSchemaFieldInFile, batchSchemaFieldInTable, filePath).buildSilently(); } } } @@ -641,7 +642,8 @@ public void createInputStreamProvider(InputStreamProvider lastInputStreamProvide inputStreamProvider = inputStreamProviderFactory.create(fs, oContext, path, fileLength, splitXAttr.getLength(), ParquetScanProjectedColumns.fromSchemaPaths(columnsToRead), footer, lastInputStreamProvider, (f) -> splitXAttr.getRowGroupIndex(), readFullFile, dataset, splitXAttr.getLastModificationTime(), false, - filters.hasPushdownFilters() && readColumnIndices); + filters.hasPushdownFilters() && readColumnIndices, filters, + readerFactory.newFilterCreator(oContext, ParquetReaderFactory.ManagedSchemaType.HIVE, managedSchema, oContext.getAllocator())); return null; }); } diff --git a/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/HadoopFsCacheKeyPluginClassLoader.java b/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/HadoopFsCacheKeyPluginClassLoader.java index d36866b284..0a0d05733f 100644 --- a/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/HadoopFsCacheKeyPluginClassLoader.java +++ b/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/HadoopFsCacheKeyPluginClassLoader.java @@ -27,6 +27,7 @@ public class HadoopFsCacheKeyPluginClassLoader { final String authority; final Configuration conf; final URI uri; + final String userName; /** * This key is used for the cache which loads FileSystem using the Plugin class loader to avoid class Cast exceptions @@ -34,12 +35,14 @@ public class HadoopFsCacheKeyPluginClassLoader { * This key is used for cache which stores the fs at plugin level. * @param uri - uri for which fileSystem will be created or checked * @param conf - configuration for creating FileSystem + * @param userName - username tied to the FileSystem */ - public HadoopFsCacheKeyPluginClassLoader(URI uri, Iterable> conf) { + public HadoopFsCacheKeyPluginClassLoader(URI uri, Iterable> conf, String userName) { this.conf = (Configuration) conf; this.uri = uri; scheme = uri.getScheme() == null ? "" : StringUtils.toLowerCase(uri.getScheme()); authority = uri.getAuthority() == null ? "" : StringUtils.toLowerCase(uri.getAuthority()); + this.userName = userName; } public URI getUri() { @@ -51,9 +54,13 @@ public Configuration getConf() { return conf; } + public String getUserName() { + return userName; + } + @Override public int hashCode() { - return Objects.hash(scheme, authority); + return Objects.hash(scheme, authority, userName); } @Override @@ -67,11 +74,12 @@ public boolean equals(Object o) { HadoopFsCacheKeyPluginClassLoader key = (HadoopFsCacheKeyPluginClassLoader) o; return com.google.common.base.Objects.equal(scheme, key.scheme) && - com.google.common.base.Objects.equal(authority, key.authority); + com.google.common.base.Objects.equal(authority, key.authority) && + com.google.common.base.Objects.equal(userName, key.userName); } @Override public String toString() { - return "@" + scheme + "://" + authority; + return userName + "@" + scheme + "://" + authority; } } diff --git a/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/HadoopFsCacheWrapperPluginClassLoader.java b/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/HadoopFsCacheWrapperPluginClassLoader.java index ae592c58ab..12108a8f17 100644 --- a/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/HadoopFsCacheWrapperPluginClassLoader.java +++ b/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/HadoopFsCacheWrapperPluginClassLoader.java @@ -15,7 +15,10 @@ */ package com.dremio.exec.store.hive.exec; +import static com.dremio.service.users.SystemUser.SYSTEM_USERNAME; + import java.io.IOException; +import java.security.PrivilegedExceptionAction; import java.util.Map; import java.util.concurrent.ExecutionException; import java.util.function.Supplier; @@ -23,9 +26,11 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.security.UserGroupInformation; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.google.common.annotations.VisibleForTesting; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; @@ -54,23 +59,35 @@ public void onRemoval(RemovalNotification() { @Override public org.apache.hadoop.fs.FileSystem load(HadoopFsCacheKeyPluginClassLoader key) throws Exception { - try { + final UserGroupInformation loginUser = UserGroupInformation.getLoginUser(); + final UserGroupInformation ugi; + if (key.getUserName().equals(loginUser.getUserName()) || SYSTEM_USERNAME.equals(key.getUserName())) { + ugi = loginUser; + } else { + ugi = UserGroupInformation.createProxyUser(key.getUserName(), loginUser); + } + + final PrivilegedExceptionAction fsFactory = () -> { final String disableCacheName = String.format("fs.%s.impl.disable.cache", key.getUri().getScheme()); // Clone the conf and set cache to disable, so that a new instance is created rather than returning an existing final Configuration cloneConf = new Configuration(key.getConf()); cloneConf.set(disableCacheName, "true"); return org.apache.hadoop.fs.FileSystem.get(key.getUri(), key.getConf()); - } catch (IOException e) { + }; + + try { + return ugi.doAs(fsFactory); + } catch (IOException | InterruptedException e) { throw new RuntimeException(e); } - }; + } }); @Override - public Supplier getHadoopFsSupplierPluginClassLoader(String path, Iterable> conf) { + public Supplier getHadoopFsSupplierPluginClassLoader(String path, Iterable> conf, String userName) { return () -> { try { - return cache.get(new HadoopFsCacheKeyPluginClassLoader(new Path(path).toUri(), conf)); + return cache.get(new HadoopFsCacheKeyPluginClassLoader(new Path(path).toUri(), conf, userName)); } catch (ExecutionException e) { throw new RuntimeException(e); } @@ -83,4 +100,9 @@ public void close() throws Exception { cache.invalidateAll(); cache.cleanUp(); } + + @VisibleForTesting + protected LoadingCache getCache() { + return cache; + } } diff --git a/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/HadoopFsSupplierProviderPluginClassLoader.java b/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/HadoopFsSupplierProviderPluginClassLoader.java index 9fc949dca5..7901dfa517 100644 --- a/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/HadoopFsSupplierProviderPluginClassLoader.java +++ b/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/HadoopFsSupplierProviderPluginClassLoader.java @@ -25,5 +25,5 @@ * Interface for getting FileSystem objects created using plugin class loader */ public interface HadoopFsSupplierProviderPluginClassLoader extends AutoCloseable { - Supplier getHadoopFsSupplierPluginClassLoader(String path, Iterable> conf); + Supplier getHadoopFsSupplierPluginClassLoader(String path, Iterable> conf, String userName); } diff --git a/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/HiveDirListingRecordReader.java b/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/HiveDirListingRecordReader.java index f6d1c67527..004992130a 100644 --- a/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/HiveDirListingRecordReader.java +++ b/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/HiveDirListingRecordReader.java @@ -48,8 +48,7 @@ protected int iterateDirectory() throws IOException { protected void initDirIterator(boolean isFile) throws IOException { try (Closeable ccls = HivePf4jPlugin.swapClassLoader()) { super.initDirIterator(false); - } - catch (FileNotFoundException e) { + } catch (FileNotFoundException e) { logger.debug("FNF error while listing directory " + operatingPath, e); } } diff --git a/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/HiveSubScan.java b/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/HiveSubScan.java index 890149903e..410d689537 100644 --- a/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/HiveSubScan.java +++ b/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/HiveSubScan.java @@ -78,24 +78,29 @@ public HiveSubScan( this(props, null, fullSchema, tablePath, filter, pluginId, columns, partitionColumns, extendedProperty); } + @Override @JsonProperty("pluginId") public StoragePluginId getPluginId(){ return pluginId; } + @Override @JsonProperty("filter") public ScanFilter getFilter(){ return filter; } + @Override @JsonIgnore public List getSplits() { return splits; } + @Override @JsonProperty("extendedProperty") public byte[] getExtendedProperty() { return this.extendedProperty; } + @Override @JsonProperty("partitionColumns") public List getPartitionColumns() { return partitionColumns; diff --git a/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/dfs/HadoopFsWrapperWithCachePluginClassLoader.java b/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/dfs/HadoopFsWrapperWithCachePluginClassLoader.java index 1a8aef416e..7671b24c41 100644 --- a/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/dfs/HadoopFsWrapperWithCachePluginClassLoader.java +++ b/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/dfs/HadoopFsWrapperWithCachePluginClassLoader.java @@ -51,6 +51,7 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.Credentials; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.Progressable; @@ -131,7 +132,7 @@ private static FileSystem getFs(URI name, Configuration conf) throws Exception { try { String pluginConfIdentifier = conf.get(HiveFsUtils.UNIQUE_CONF_IDENTIFIER_PROPERTY_NAME); return cache.get(pluginConfIdentifier) - .getHadoopFsSupplierPluginClassLoader(name.toString(), conf).get(); + .getHadoopFsSupplierPluginClassLoader(name.toString(), conf, UserGroupInformation.getCurrentUser().getUserName()).get(); } catch (Exception e) { logger.error("FileSystem can not be created", e); throw new Exception("FileSystem can not be created"); diff --git a/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/metadatarefresh/HiveFullRefreshReadSignatureProvider.java b/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/metadatarefresh/HiveFullRefreshReadSignatureProvider.java index 607de02f5a..ba332146f1 100644 --- a/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/metadatarefresh/HiveFullRefreshReadSignatureProvider.java +++ b/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/metadatarefresh/HiveFullRefreshReadSignatureProvider.java @@ -44,12 +44,12 @@ public HiveFullRefreshReadSignatureProvider(String tableRoot, long queryStartTim pathsInReadSignature = new HashSet<>(); } + @Override public ByteString compute(Set addedPartitions, Set deletedPartitions) { // Add table root dir for non-partitioned table and if not already present if (partitionPaths.size() == 0 && (pathsInReadSignature.size() == 0 || !pathsInReadSignature.contains(tableRoot))) { pathsInReadSignature.add(tableRoot); - } - else { + } else { assertPartitionsCount(addedPartitions, deletedPartitions); pathsInReadSignature.addAll(partitionPaths); } diff --git a/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/metadatarefresh/HiveIncrementalRefreshReadSignatureProvider.java b/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/metadatarefresh/HiveIncrementalRefreshReadSignatureProvider.java index ae33f5598b..d54046722d 100644 --- a/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/metadatarefresh/HiveIncrementalRefreshReadSignatureProvider.java +++ b/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/metadatarefresh/HiveIncrementalRefreshReadSignatureProvider.java @@ -41,6 +41,7 @@ public HiveIncrementalRefreshReadSignatureProvider(ByteString existingReadSignat this.existingReadSignature = decodeHiveReadSignatureByteString(existingReadSignature); } + @Override protected void assertPartitionsCount(Set addedPartitions, Set deletedPartitions) { } } diff --git a/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/planner/sql/handlers/refresh/HiveIncrementalRefreshDatasetPlanBuilder.java b/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/planner/sql/handlers/refresh/HiveIncrementalRefreshDatasetPlanBuilder.java index 29601331ab..3001d81ba2 100644 --- a/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/planner/sql/handlers/refresh/HiveIncrementalRefreshDatasetPlanBuilder.java +++ b/plugins/hive/src/main/java/com/dremio/exec/store/hive/exec/planner/sql/handlers/refresh/HiveIncrementalRefreshDatasetPlanBuilder.java @@ -48,6 +48,8 @@ import com.google.common.collect.ImmutableList; import com.google.protobuf.InvalidProtocolBufferException; +import io.opentelemetry.instrumentation.annotations.WithSpan; + public class HiveIncrementalRefreshDatasetPlanBuilder extends HiveFullRefreshDatasetPlanBuilder implements SupportPartialRefresh { private static final Logger logger = LoggerFactory.getLogger(HiveIncrementalRefreshDatasetPlanBuilder.class); @@ -110,8 +112,7 @@ private static boolean validatePartitionSpecEvolution(BatchSchema schema, List generatePathsForPartialRefresh() { try { DirListInputSplitProto.DirListInputSplit dirListInputSplit = DirListInputSplitProto.DirListInputSplit.parseFrom(datasetSplit.getSplitExtendedProperty().toByteArray()); partitionPaths.add(dirListInputSplit.getOperatingPath()); - } - catch (InvalidProtocolBufferException e) { + } catch (InvalidProtocolBufferException e) { throw UserException.parseError(e).buildSilently(); } } @@ -144,6 +144,7 @@ public List generatePathsForPartialRefresh() { return partitionPaths; } + @WithSpan @Override public boolean updateDatasetConfigWithIcebergMetadataIfNecessary() { return repairAndSaveDatasetConfigIfNecessary(); diff --git a/plugins/hive/src/main/java/com/dremio/exec/store/hive/iceberg/NoOpHiveTableOperations.java b/plugins/hive/src/main/java/com/dremio/exec/store/hive/iceberg/NoOpHiveTableOperations.java index f3ad28e762..967777df57 100644 --- a/plugins/hive/src/main/java/com/dremio/exec/store/hive/iceberg/NoOpHiveTableOperations.java +++ b/plugins/hive/src/main/java/com/dremio/exec/store/hive/iceberg/NoOpHiveTableOperations.java @@ -33,11 +33,13 @@ public NoOpHiveTableOperations(Configuration conf, HiveClient client, FileIO fil super(conf, client, fileIO, catalogName, database, table); } + @Override protected long acquireLock() throws UnknownHostException, TException, InterruptedException { // no-op return 0L; } + @Override protected void doUnlock(long lockId) throws TException { //no-op } diff --git a/plugins/hive/src/main/java/com/dremio/exec/store/hive/metadata/HivePartitionChunkListing.java b/plugins/hive/src/main/java/com/dremio/exec/store/hive/metadata/HivePartitionChunkListing.java index b5ce1b713a..ba63c0f427 100644 --- a/plugins/hive/src/main/java/com/dremio/exec/store/hive/metadata/HivePartitionChunkListing.java +++ b/plugins/hive/src/main/java/com/dremio/exec/store/hive/metadata/HivePartitionChunkListing.java @@ -34,6 +34,7 @@ import com.dremio.connector.metadata.PartitionChunkListing; import com.dremio.exec.catalog.DatasetSaverImpl; import com.dremio.exec.store.hive.HivePf4jPlugin; +import com.dremio.exec.store.hive.deltalake.DeltaHiveInputFormat; import com.dremio.hive.proto.HiveReaderProto; import com.dremio.options.OptionManager; import com.google.common.base.Preconditions; @@ -143,6 +144,29 @@ private HivePartitionChunkListing(final boolean storageImpersonationEnabled, fin .build(); metadataAccumulator.setRootPointer(rootPointer); return; + } else if (SplitType.DELTA_COMMIT_LOGS.equals(splitType)) { + if (logger.isDebugEnabled()) { + logger.debug("Table '{}', data read from delta root pointer.", + tableMetadata.getTable().getTableName()); + } + //If it's a deltalake table only the partition xattr is needed. + currentPartitionMetadata = PartitionMetadata.newBuilder().partition(null) + .partitionXattr(HiveMetadataUtils.getPartitionXattr(tableMetadata.getTable(), + HiveMetadataUtils.fromProperties(tableMetadata.getTableProperties()))) + .partitionValues(Collections.EMPTY_LIST) + .inputSplitBatchIterator( + InputSplitBatchIterator.newBuilder() + .partition(null) + .tableMetadata(tableMetadata) + .inputSplits(Collections.EMPTY_LIST) + .maxInputSplitsPerPartition(maxInputSplitsPerPartition) + .build()) + .build(); + + metadataAccumulator.accumulateReaderType(DeltaHiveInputFormat.class); + metadataAccumulator.setTableLocation(DeltaHiveInputFormat.getLocation(tableMetadata.getTable(), optionManager)); + metadataAccumulator.setNotAllFSBasedPartitions(); + return; } else if (null == partitions) { if (logger.isDebugEnabled()) { logger.debug("Table '{}', 1 partition exists.", @@ -179,7 +203,7 @@ private HivePartitionChunkListing(final boolean storageImpersonationEnabled, fin metadataAccumulator.setTableLocation(tableMetadata.getTable().getSd().getLocation()); final JobConf job = new JobConf(hiveConf); - final Class inputFormatClazz = HiveMetadataUtils.getInputFormatClass(job, tableMetadata.getTable(), null, optionsManager); + final Class inputFormatClazz = HiveMetadataUtils.getInputFormatClass(job, tableMetadata.getTable(), null); metadataAccumulator.accumulateReaderType(inputFormatClazz); return; } diff --git a/plugins/hive/src/main/java/com/dremio/exec/store/hive/metadata/ManagedHiveSchema.java b/plugins/hive/src/main/java/com/dremio/exec/store/hive/metadata/ManagedHiveSchema.java index 8d31a3edcc..8c4ea23115 100644 --- a/plugins/hive/src/main/java/com/dremio/exec/store/hive/metadata/ManagedHiveSchema.java +++ b/plugins/hive/src/main/java/com/dremio/exec/store/hive/metadata/ManagedHiveSchema.java @@ -106,6 +106,7 @@ public Optional getField(final String fieldName) { return Optional.ofNullable(fieldInfo.get(fieldName)); } + @Override public Map getAllFields() { return this.fieldInfo; } diff --git a/plugins/hive/src/main/java/com/dremio/exec/store/hive/metadata/ParquetInputFormat.java b/plugins/hive/src/main/java/com/dremio/exec/store/hive/metadata/ParquetInputFormat.java index d42b0a0a22..5cc2d5029b 100644 --- a/plugins/hive/src/main/java/com/dremio/exec/store/hive/metadata/ParquetInputFormat.java +++ b/plugins/hive/src/main/java/com/dremio/exec/store/hive/metadata/ParquetInputFormat.java @@ -98,6 +98,7 @@ private FileSplit makeParquetSplit(Path file, long start, long length, return new ParquetSplit(file, start, length, hosts, inMemoryHosts, fileSize, modificationTime); } + @Override public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException { StopWatch sw = new StopWatch().start(); @@ -240,11 +241,9 @@ private String[][] getSplitHostsAndCachedHosts(BlockLocation[] blkLocations, // Establish the bytes in this block if (index == startIndex) { bytesInThisBlock = bytesInFirstBlock; - } - else if (index == endIndex) { + } else if (index == endIndex) { bytesInThisBlock = bytesInLastBlock; - } - else { + } else { bytesInThisBlock = blkLocations[index].getLength(); } @@ -285,8 +284,7 @@ else if (index == endIndex) { racksMap.put(parentNode,parentNodeInfo); } parentNodeInfo.addLeaf(nodeInfo); - } - else { + } else { nodeInfo = hostsMap.get(node); parentNode = node.getParent(); parentNodeInfo = racksMap.get(parentNode); @@ -387,6 +385,7 @@ void addLeaf(ParquetInputFormat.NodeInfo nodeInfo) { private void sortInDescendingOrder(List mylist) { Collections.sort(mylist, new Comparator() { + @Override public int compare(ParquetInputFormat.NodeInfo obj1, ParquetInputFormat.NodeInfo obj2) { if (obj1 == null || obj2 == null) { @@ -395,8 +394,7 @@ public int compare(ParquetInputFormat.NodeInfo obj1, ParquetInputFormat.NodeInfo if (obj1.getValue() == obj2.getValue()) { return 0; - } - else { + } else { return ((obj1.getValue() < obj2.getValue()) ? 1 : -1); } } diff --git a/plugins/hive/src/test/hive2/com/dremio/exec/hive/TestHiveConfFactory.java b/plugins/hive/src/test/hive2/com/dremio/exec/hive/TestHiveConfFactory.java index 5bcbe914b1..20d0f900a7 100644 --- a/plugins/hive/src/test/hive2/com/dremio/exec/hive/TestHiveConfFactory.java +++ b/plugins/hive/src/test/hive2/com/dremio/exec/hive/TestHiveConfFactory.java @@ -16,11 +16,13 @@ package com.dremio.exec.hive; +import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; import static org.junit.Assert.assertEquals; import java.util.ArrayList; import org.apache.hadoop.hive.conf.HiveConf; +import org.assertj.core.api.Assertions; import org.junit.Test; import com.dremio.exec.catalog.conf.Property; @@ -49,6 +51,17 @@ public void testS3ImplDefaults() { assertEquals("com.dremio.test.CustomS3NImpl", confWithOverrides.get("fs.s3n.impl")); } + @Test + public void testUnsupportedHiveConfigs() { + HiveConfFactory hiveConfFactory = new HiveConfFactory(); + HiveStoragePluginConfig conf = getTestConfig(); + conf.propertyList = new ArrayList<>(); + conf.propertyList.add(new Property("parquet.column.index.access", "true")); + assertThatIllegalArgumentException() + .isThrownBy(() -> hiveConfFactory.createHiveConf(conf)) + .withMessageContaining("parquet.column.index.access"); + } + private HiveStoragePluginConfig getTestConfig() { Hive2StoragePluginConfig hive2StoragePluginConfig = new Hive2StoragePluginConfig(); hive2StoragePluginConfig.hostname = "localhost"; diff --git a/plugins/hive/src/test/hive2/com/dremio/exec/store/hive/HiveTestDataGenerator.java b/plugins/hive/src/test/hive2/com/dremio/exec/store/hive/HiveTestDataGenerator.java index 60ff1b3c2e..a81f88596c 100644 --- a/plugins/hive/src/test/hive2/com/dremio/exec/store/hive/HiveTestDataGenerator.java +++ b/plugins/hive/src/test/hive2/com/dremio/exec/store/hive/HiveTestDataGenerator.java @@ -30,6 +30,7 @@ import java.sql.Date; import java.sql.Timestamp; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Map.Entry; @@ -229,6 +230,10 @@ public void generateTestData(java.util.function.Function generator } } + public static List listStoreAsFormatsForTests() { + return Arrays.asList("orc", "rcfile", "textfile", "sequencefile"); + } + private void generateTestData() throws Exception { try (DriverState driverState = new DriverState(newHiveConf())) { Driver hiveDriver = driverState.driver; @@ -496,7 +501,9 @@ private void generateTestData() throws Exception { // create a Hive table that has columns with data types which are supported for reading in Dremio. createAllTypesTextTable(hiveDriver, "readtest"); createAllTypesTable(hiveDriver, "parquet", "readtest"); - createAllTypesTable(hiveDriver, "orc", "readtest"); + for (String tableFormat : listStoreAsFormatsForTests()) { + createAllTypesTable(hiveDriver, tableFormat, "readtest"); + } createTimestampToStringTable(hiveDriver, "timestamptostring"); createDoubleToStringTable(hiveDriver, "doubletostring"); @@ -518,21 +525,21 @@ private void generateTestData() throws Exception { createNestedListWithNullsHiveTables(hiveDriver); createNestedStructWithNullsHiveTables(hiveDriver); createParuqetComplexFilterTestTable(hiveDriver); - createComplexTypesTextTable(hiveDriver, "orccomplex"); - createComplexTypesTable(hiveDriver, "orc", "orccomplex"); createComplexVarcharHiveTables(hiveDriver); + createComplexTypesTextTable(hiveDriver, "orccomplex"); createListTypesTextTable(hiveDriver, "orclist"); - createListTypesTable(hiveDriver, "orc", "orclist"); - createStructTypesTextTable(hiveDriver, "orcstruct"); - createStructTypesTable(hiveDriver, "orc", "orcstruct"); - createUnionTypesTextTable(hiveDriver, "orcunion"); - createUnionTypesTable(hiveDriver, "orc", "orcunion"); - createMapTypesTextTable(hiveDriver, "orcmap"); - createMapTypesTable(hiveDriver, "orc", "orcmap"); + + for (String tableFormat : listStoreAsFormatsForTests()) { + createComplexTypesTable(hiveDriver, tableFormat, "orccomplex"); + createListTypesTable(hiveDriver, tableFormat, "orclist"); + createStructTypesTable(hiveDriver, tableFormat, "orcstruct"); + createUnionTypesTable(hiveDriver, tableFormat, "orcunion"); + createMapTypesTable(hiveDriver, tableFormat, "orcmap"); + } createORCDecimalCompareTestTable(hiveDriver, "orcdecimalcompare"); createMixedPartitionTypeTable(hiveDriver, "parquet_mixed_partition_type"); @@ -559,11 +566,141 @@ private void generateTestData() throws Exception { createTableWithMapColumn(hiveDriver, "parquet_with_map_column"); createFlattenOrcHiveTable(hiveDriver); + for (String tableFormat : listStoreAsFormatsForTests()) { + createTableWithMapOfIntKey(hiveDriver, tableFormat); + createTableWithMapOfBigIntKey(hiveDriver, tableFormat); + createTableWithMapOfBooleanKey(hiveDriver, tableFormat); + createTableWithMapOfDateKey(hiveDriver, tableFormat); + createTableWithMapOfDecimalKey(hiveDriver, tableFormat); + createTableWithMapOfDoubleKey(hiveDriver, tableFormat); + createTableWithMapOfFloatKey(hiveDriver, tableFormat); + createTableWithMapOfStringKey(hiveDriver, tableFormat); + createTableWithMapOfTimestampKey(hiveDriver, tableFormat); + createTableWithMapOfVarbinaryKey(hiveDriver, tableFormat); + createTableWithMapOfNullValues(hiveDriver, tableFormat); + createTableWithMapOfList(hiveDriver, tableFormat); + createTableWithMapOfStruct(hiveDriver, tableFormat); + createTableWithMapOfMap(hiveDriver, tableFormat); + } + + // This test requires a systemop alteration. Refresh metadata on hive seems to timeout the test preventing re-use of an existing table. Hence, creating a new table. createParquetDecimalSchemaChangeFilterTestTable(hiveDriver, "test_nonvc_parqdecimalschemachange_table"); } } + private void createTableWithMapOfIntKey(Driver hiveDriver, String format) { + String createTable = "create table map_of_int_" + format + " (col1 map) stored as " + format; + String insert = "insert into map_of_int_" + format + " select map(1, 'value1',2,'value2') "; + + executeQuery(hiveDriver, createTable); + executeQuery(hiveDriver, insert); + } + + private void createTableWithMapOfBigIntKey(Driver hiveDriver, String format) { + String createTable = "create table map_of_bigint_" + format + " (col1 map) stored as " + format; + String insert = "insert into map_of_bigint_" + format + " select map(9223372036854775800, 'value1',9223372036854775801,'value2') "; + + executeQuery(hiveDriver, createTable); + executeQuery(hiveDriver, insert); + } + + private void createTableWithMapOfBooleanKey(Driver hiveDriver, String format) { + String createTable = "create table map_of_boolean_" + format + " (col1 map) stored as " + format; + String insert = "insert into map_of_boolean_" + format + " select map(true, 'value1',false,'value2') "; + + executeQuery(hiveDriver, createTable); + executeQuery(hiveDriver, insert); + } + + private void createTableWithMapOfDateKey(Driver hiveDriver, String format) { + String createTable = "create table map_of_date_" + format + " (col1 map) stored as " + format; + String insert = "insert into map_of_date_" + format + " select map(cast('1993-05-26' as date), 'value1',cast('1993-05-27' as date),'value2') "; + + executeQuery(hiveDriver, createTable); + executeQuery(hiveDriver, insert); + } + + private void createTableWithMapOfDecimalKey(Driver hiveDriver, String format) { + String createTable = "create table map_of_decimal_" + format + " (col1 map) stored as " + format; + String insert = "insert into map_of_decimal_" + format + " select map(cast(1.1 as decimal(3,2)), 'value1', cast(1.2 as decimal(3,2)),'value2') "; + + executeQuery(hiveDriver, createTable); + executeQuery(hiveDriver, insert); + } + + private void createTableWithMapOfDoubleKey(Driver hiveDriver, String format) { + String createTable = "create table map_of_double_" + format + " (col1 map) stored as " + format; + String insert = "insert into map_of_double_" + format + " select map(cast(1.10 as double), 'value1',cast(1.20 as double),'value2') "; + + executeQuery(hiveDriver, createTable); + executeQuery(hiveDriver, insert); + } + + private void createTableWithMapOfFloatKey(Driver hiveDriver, String format) { + String createTable = "create table map_of_float_" + format + " (col1 map) stored as " + format; + String insert = "insert into map_of_float_" + format + " select map(cast(1.1 as float), 'value1',cast(1.2 as float),'value2') "; + + executeQuery(hiveDriver, createTable); + executeQuery(hiveDriver, insert); + } + + private void createTableWithMapOfStringKey(Driver hiveDriver, String format) { + String createTable = "create table map_of_string_" + format + " (col1 map) stored as " + format; + String insert = "insert into map_of_string_" + format + " select map('key1', 'value1','key2','value2') "; + + executeQuery(hiveDriver, createTable); + executeQuery(hiveDriver, insert); + } + + private void createTableWithMapOfTimestampKey(Driver hiveDriver, String format) { + String createTable = "create table map_of_timestamp_" + format + " (col1 map) stored as " + format; + String insert = "insert into map_of_timestamp_" + format + " select map(cast('1993-05-26 11:12:33' as timestamp), 'value1',cast('1993-05-26 11:12:35' as timestamp),'value2') "; + + executeQuery(hiveDriver, createTable); + executeQuery(hiveDriver, insert); + } + + private void createTableWithMapOfVarbinaryKey(Driver hiveDriver, String format) { + String createTable = "create table map_of_varbinary_" + format + " (col1 map) stored as " + format; + String insert = "insert into map_of_varbinary_" + format + " select map(cast('1234' as binary), 'value1') "; + + executeQuery(hiveDriver, createTable); + executeQuery(hiveDriver, insert); + } + + private void createTableWithMapOfNullValues(Driver hiveDriver, String format) { + String createTable = "create table map_of_null_values_" + format + " (col1 map) stored as " + format; + String insert = "insert into map_of_null_values_" + format + " select map('key1',null) "; + + executeQuery(hiveDriver, createTable); + executeQuery(hiveDriver, insert); + } + + private void createTableWithMapOfList(Driver hiveDriver, String format) { + String createTable = "create table map_of_list_values_" + format + " (col1 map>) stored as " + format; + String insert = "insert into map_of_list_values_" + format + " select map('key1',array(1,2)) "; + + executeQuery(hiveDriver, createTable); + executeQuery(hiveDriver, insert); + } + + private void createTableWithMapOfStruct(Driver hiveDriver, String format) { + String createTable = "create table map_of_struct_values_" + format + " (col1 map>) stored as " + format; + String insert = "insert into map_of_struct_values_" + format + " select map('key1',named_struct('f1',1)) "; + + executeQuery(hiveDriver, createTable); + executeQuery(hiveDriver, insert); + } + + private void createTableWithMapOfMap(Driver hiveDriver, String format) { + String createTable = "create table map_of_map_values_" + format + " (col1 map>) stored as " + format; + String insert = "insert into map_of_map_values_" + format + " select map('key1',map('innerKey1',1)) "; + + executeQuery(hiveDriver, createTable); + executeQuery(hiveDriver, insert); + } + private File getTempFile() throws Exception { return getTempFile("dremio-hive-test"); } @@ -1143,6 +1280,7 @@ private void createStructTypesTextTable(final Driver hiveDriver, final String ta " bigint_field: bigint, " + " float_field: float, " + " double_field: double, " + + " decimal_field: decimal(6,2), " + " string_field: string> " + ") ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' COLLECTION ITEMS TERMINATED BY ','"); executeQuery(hiveDriver, @@ -1160,6 +1298,7 @@ private void createStructTypesTable(final Driver hiveDriver, final String format " bigint_field: bigint, " + " float_field: float, " + " double_field: double, " + + " decimal_field: decimal(6,2), " + " string_field: string> " + ") ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' COLLECTION ITEMS TERMINATED BY ',' STORED AS " + format); executeQuery(hiveDriver, "INSERT OVERWRITE TABLE " + table + format + " SELECT * FROM " + table); @@ -1730,11 +1869,13 @@ private String generateStructTypesDataFile() throws Exception { String bigint_field = Long.toString(90000000000L); String float_field = Float.toString(row); String double_field = Double.toString(row); + String decimal_field = Double.toString(row); String string_field = Integer.toString(row); printWriter.println(rownum + "\t" + tinyint_field + "," + smallint_field + "," + int_field + "," + bigint_field + "," + - float_field + "," + double_field + "," + string_field); + float_field + "," + double_field + "," + decimal_field + "," + + string_field); } printWriter.close(); diff --git a/plugins/hive/src/test/java/com/dremio/exec/hive/ITHiveParquetCoercions.java b/plugins/hive/src/test/java/com/dremio/exec/hive/ITHiveParquetCoercions.java index 37fe92cc25..5b1e664874 100644 --- a/plugins/hive/src/test/java/com/dremio/exec/hive/ITHiveParquetCoercions.java +++ b/plugins/hive/src/test/java/com/dremio/exec/hive/ITHiveParquetCoercions.java @@ -17,15 +17,20 @@ import static com.dremio.exec.hive.HiveTestUtilities.executeQuery; import static org.joda.time.DateTimeZone.UTC; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; import java.math.BigDecimal; import java.sql.Timestamp; +import java.util.regex.Pattern; import org.apache.hadoop.hive.ql.Driver; import org.joda.time.DateTime; import org.junit.BeforeClass; import org.junit.Test; +import com.dremio.common.exceptions.UserRemoteException; + public class ITHiveParquetCoercions extends LazyDataGeneratingHiveTestBase { @BeforeClass public static void setup() throws Exception { @@ -217,7 +222,16 @@ private void hiveTestTypeConversions(Object[][] testcases) throws Exception { private void hiveTestIncompatibleTypeConversions(Object[][] testcases) { for (Object[] testcase : testcases) { String query = "SELECT * FROM hive." + testcase[0] + "_to_" + testcase[1] + "_parquet_ext"; - errorMsgTestHelper(query, "Field [col1] has incompatible types in file and table."); + String expectedErrorMsg = ".*Field \\[col1] has incompatible types in file and table\\..*file Path: .*"; + try { + test(query); + } catch (Exception e) { + if (!(e instanceof UserRemoteException)) { + fail("Unexpected Error"); + } + boolean errorMsgMatched = Pattern.compile(expectedErrorMsg, Pattern.DOTALL).matcher(e.getMessage()).matches(); + assertTrue("error Message didn't match",errorMsgMatched); + } } } } diff --git a/plugins/hive/src/test/java/com/dremio/exec/hive/ITHiveStorage.java b/plugins/hive/src/test/java/com/dremio/exec/hive/ITHiveStorage.java index 80a2f00100..219a32e395 100644 --- a/plugins/hive/src/test/java/com/dremio/exec/hive/ITHiveStorage.java +++ b/plugins/hive/src/test/java/com/dremio/exec/hive/ITHiveStorage.java @@ -18,26 +18,22 @@ import static com.dremio.common.TestProfileHelper.assumeNonMaprProfile; import static com.dremio.common.utils.PathUtils.parseFullPath; import static com.dremio.exec.store.hive.exec.HiveDatasetOptions.HIVE_PARQUET_ENFORCE_VARCHAR_WIDTH; -import static java.util.Arrays.asList; -import static java.util.Collections.singletonList; -import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.joda.time.DateTimeZone.UTC; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; import java.io.File; import java.math.BigDecimal; import java.sql.Date; import java.sql.Timestamp; -import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.regex.Pattern; -import org.apache.arrow.vector.util.JsonStringArrayList; import org.apache.arrow.vector.util.JsonStringHashMap; -import org.apache.arrow.vector.util.Text; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -48,9 +44,9 @@ import org.junit.AfterClass; import org.junit.Assume; import org.junit.BeforeClass; -import org.junit.Ignore; import org.junit.Test; +import com.dremio.common.exceptions.UserRemoteException; import com.dremio.connector.metadata.BytesOutput; import com.dremio.connector.metadata.DatasetHandle; import com.dremio.connector.metadata.DatasetMetadata; @@ -60,7 +56,6 @@ import com.dremio.exec.catalog.DatasetMetadataAdapter; import com.dremio.exec.planner.physical.PlannerSettings; import com.dremio.exec.proto.UserBitShared; -import com.dremio.exec.record.BatchSchema; import com.dremio.exec.store.CatalogService; import com.dremio.exec.store.dfs.ImpersonationUtil; import com.dremio.hive.proto.HiveReaderProto.FileSystemCachedEntity; @@ -168,11 +163,6 @@ public void readAllSupportedHiveDataTypesText() throws Exception { readAllSupportedHiveDataTypes("readtest"); } - @Test - public void readAllSupportedHiveDataTypesORC() throws Exception { - readAllSupportedHiveDataTypes("readtest_orc"); - } - @Test public void orcTestMoreColumnsInExtTable() throws Exception { Assume.assumeFalse(runWithUnlimitedSplitSupport); @@ -294,8 +284,16 @@ public void parquetTestDecimalConversion() throws Exception { // convert int,string,double to decimals // string to decimal(col2) is an overflow query = "SELECT * FROM hive.decimal_conversion_test_parquet_rev_ext"; - errorMsgTestHelper(query, "Field [col2] has incompatible types in file and table."); - + String expectedErrorMsg = "(.*)Field \\[col2] has incompatible types in file and table\\.(.*)file Path: (.*)"; + try { + test(query); + } catch (Exception e) { + if (!(e instanceof UserRemoteException)) { + fail("Unexpected Error"); + } + boolean errorMsgMatched = Pattern.compile(expectedErrorMsg, Pattern.DOTALL).matcher(e.getMessage()).matches(); + assertTrue("error Message didn't match",errorMsgMatched); + } // all conversions are valid query = "SELECT * FROM hive.decimal_conversion_test_parquet_decimal"; testBuilder() @@ -495,64 +493,6 @@ public void readDoubleToStringORC() throws Exception { .go(); } - @Test - public void readComplexHiveDataTypesORC() throws Exception { - readComplexHiveDataTypes("orccomplexorc"); - } - - @Test - public void readListOfStructORC() throws Exception { - int[] testrows = {0, 500, 1022, 1023, 1024, 4094, 4095, 4096, 4999}; - for (int index : testrows) { - testBuilder().sqlQuery("SELECT list_struct_field[0].name as name FROM hive.orccomplexorc" + - " order by rownum limit 1 offset " + index) - .ordered() - .baselineColumns("name") - .baselineValues("name" + index) - .go(); - testBuilder().sqlQuery("SELECT list_struct_field[1].name as name FROM hive.orccomplexorc" + - " order by rownum limit 1 offset " + index) - .ordered() - .baselineColumns("name") - .baselineValues("name" + (index + 1)) - .go(); - testBuilder().sqlQuery("SELECT list_struct_field[0].age as age FROM hive.orccomplexorc" + - " order by rownum limit 1 offset " + index) - .ordered() - .baselineColumns("age") - .baselineValues(index) - .go(); - testBuilder().sqlQuery("SELECT list_struct_field[1].age as age FROM hive.orccomplexorc" + - " order by rownum limit 1 offset " + index) - .ordered() - .baselineColumns("age") - .baselineValues(index + 1) - .go(); - } - } - - // DX-16748: dropping support for map data type in ORC - @Ignore - @Test - public void readMapValuesTest() throws Exception { - readMapValues("orcmaporc"); - } - - @Test - public void readListHiveDataTypesORC() throws Exception { - readListHiveDataTypes("orclistorc"); - } - - @Test - public void readStructHiveDataTypesORC() throws Exception { - readStructHiveDataTypes("orcstructorc"); - } - - @Test - public void testDropOfUnionHiveDataTypesORC() throws Exception { - readTableWithUnionHiveDataTypes("orcunionorc"); - } - @Test public void readAllSupportedHiveDataTypesParquet() throws Exception { readAllSupportedHiveDataTypes("readtest_parquet"); @@ -906,8 +846,8 @@ public void testIgnoreSkipHeaderFooterForSequencefile() throws Exception { @Test public void testQueryNonExistingTable() { errorMsgTestHelper("SELECT * FROM hive.nonExistedTable", "'nonExistedTable' not found within 'hive'"); - errorMsgTestHelper("SELECT * FROM hive.\"default\".nonExistedTable", "'nonExistedTable' not found within 'hive.hive.\"default\"'"); - errorMsgTestHelper("SELECT * FROM hive.db1.nonExistedTable", "'nonExistedTable' not found within 'hive.hive.db1'"); + errorMsgTestHelper("SELECT * FROM hive.\"default\".nonExistedTable", "'nonExistedTable' not found within 'hive.default'"); + errorMsgTestHelper("SELECT * FROM hive.db1.nonExistedTable", "'nonExistedTable' not found within 'hive.db1'"); } @Test @@ -1547,173 +1487,6 @@ private void readFieldSizeLimit(String table, String column) throws Exception { .hasMessageContaining(exceptionMessage); } - /** - * Test to ensure Dremio fails to read union data from hive - */ - private void readTableWithUnionHiveDataTypes(String table) { - int[] testrows = {0, 500, 1022, 1023, 1024, 4094, 4095, 4096, 4999}; - for (int row : testrows) { - try { - testBuilder().sqlQuery("SELECT * FROM hive." + table + " order by rownum limit 1 offset " + row) - .ordered() - .baselineColumns("rownum") - .baselineValues(row) - .go(); - } catch (Exception e) { - e.printStackTrace(); - assertThat(e.getMessage()).contains(BatchSchema.MIXED_TYPES_ERROR); - } - } - } - - /** - * Test to ensure Dremio reads list of primitive data types - * @throws Exception - */ - private void readStructHiveDataTypes(String table) throws Exception { - int[] testrows = {0, 500, 1022, 1023, 1024, 4094, 4095, 4096, 4999}; - for (int index : testrows) { - JsonStringHashMap structrow1 = new JsonStringHashMap<>(); - structrow1.put("tinyint_field", 1); - structrow1.put("smallint_field", 1024); - structrow1.put("int_field", index); - structrow1.put("bigint_field", 90000000000L); - structrow1.put("float_field", (float) index); - structrow1.put("double_field", (double) index); - structrow1.put("string_field", new Text(Integer.toString(index))); - - testBuilder().sqlQuery("SELECT * FROM hive." + table + - " order by rownum limit 1 offset " + index) - .ordered() - .baselineColumns("rownum", "struct_field") - .baselineValues(index, structrow1) - .go(); - - testBuilder().sqlQuery("SELECT rownum, struct_field['string_field'] AS string_field, struct_field['int_field'] AS int_field FROM hive." + table + - " order by rownum limit 1 offset " + index) - .ordered() - .baselineColumns("rownum", "string_field", "int_field") - .baselineValues(index, Integer.toString(index), index) - .go(); - } - } - - /** - * Test to ensure Dremio reads list of primitive data types - * @throws Exception - */ - private void readListHiveDataTypes(String table) throws Exception { - int[] testrows = {0, 500, 1022, 1023, 1024, 4094, 4095, 4096, 4999}; - for (int testrow : testrows) { - if (testrow % 7 == 0) { - Integer index = testrow; - testBuilder() - .sqlQuery("SELECT rownum,double_field,string_field FROM hive." + table + " order by rownum limit 1 offset " + index.toString()) - .ordered() - .baselineColumns("rownum", "double_field", "string_field") - .baselineValues(index, null, null) - .go(); - } else { - JsonStringArrayList string_field = new JsonStringArrayList<>(); - string_field.add(new Text(Integer.toString(testrow))); - string_field.add(new Text(Integer.toString(testrow + 1))); - string_field.add(new Text(Integer.toString(testrow + 2))); - string_field.add(new Text(Integer.toString(testrow + 3))); - string_field.add(new Text(Integer.toString(testrow + 4))); - - testBuilder() - .sqlQuery("SELECT rownum,double_field,string_field FROM hive." + table + " order by rownum limit 1 offset " + testrow) - .ordered() - .baselineColumns("rownum", "double_field", "string_field") - .baselineValues( - testrow, - asList((double) testrow, (double) (testrow + 1), (double) (testrow + 2), (double) (testrow + 3), (double) (testrow + 4)), - string_field) - .go(); - } - } - } - - /** - * Test to ensure Dremio reads the all ORC complex types correctly - * @throws Exception - */ - private void readComplexHiveDataTypes(String table) throws Exception { - int[] testrows = {0, 500, 1022, 1023, 1024, 4094, 4095, 4096, 4999}; - for (int index : testrows) { - JsonStringHashMap structrow1 = new JsonStringHashMap<>(); - structrow1.put("name", new Text("name" + index)); - structrow1.put("age", index); - - JsonStringHashMap structlistrow1 = new JsonStringHashMap<>(); - structlistrow1.put("type", new Text("type" + index)); - structlistrow1.put("value", new ArrayList<>(singletonList(new Text("elem" + index)))); - - JsonStringHashMap liststruct1 = new JsonStringHashMap<>(); - liststruct1.put("name", new Text("name" + index)); - liststruct1.put("age", index); - JsonStringHashMap liststruct2 = new JsonStringHashMap<>(); - liststruct2.put("name", new Text("name" + (index + 1))); - liststruct2.put("age", index + 1); - - JsonStringHashMap mapstruct1 = new JsonStringHashMap<>(); - mapstruct1.put("key", new Text("name" + index)); - mapstruct1.put("value", index); - JsonStringHashMap mapstruct2 = new JsonStringHashMap<>(); - mapstruct2.put("key", new Text("name" + (index + 1))); - mapstruct2.put("value", index + 1); - JsonStringHashMap mapstruct3 = null; - if (index % 2 == 0) { - mapstruct3 = new JsonStringHashMap<>(); - mapstruct3.put("key", new Text("name" + (index + 2))); - mapstruct3.put("value", index + 2); - } - - JsonStringHashMap mapstructValue = new JsonStringHashMap<>(); - mapstructValue.put("key", new Text("key" + index)); - JsonStringHashMap mapstructValue2 = new JsonStringHashMap<>(); - mapstructValue2.put("type", new Text("struct" + index)); - mapstructValue.put("value", mapstructValue2); - - testBuilder() - .sqlQuery("SELECT * FROM hive." + table + " order by rownum limit 1 offset " + index) - .ordered() - .baselineColumns("rownum", "list_field", "struct_field", "struct_list_field", "list_struct_field", "map_field", "map_struct_field") - .baselineValues( - index, - asList(index, index + 1, index + 2, index + 3, index + 4), - structrow1, - structlistrow1, - asList(liststruct1, liststruct2), - index % 2 == 0 ? asList(mapstruct1, mapstruct2, mapstruct3) : asList(mapstruct1, mapstruct2), - asList(mapstructValue)) - .go(); - } - } - - private void readMapValues(String table) throws Exception { - int[] testrows = {0, 500, 1022, 1023, 1024, 4094, 4095, 4096, 4999}; - for (Integer index : testrows) { - String mapquery = "WITH flatten_" + table + " AS (SELECT flatten(map_field) AS flatten_map_field from hive." + table + " ) " + - " select flatten_map_field['key'] as key_field from flatten_" + table + " order by flatten_map_field['key'] limit 1 offset " + index.toString(); - - testBuilder().sqlQuery(mapquery) - .ordered() - .baselineColumns("key_field") - .baselineValues(index) - .go(); - - mapquery = "WITH flatten" + table + " AS (SELECT flatten(map_field) AS flatten_map_field from hive." + table + " ) " + - " select flatten_map_field['value'] as value_field from flatten" + table + " order by flatten_map_field['value'] limit 1 offset " + index.toString(); - - testBuilder().sqlQuery(mapquery) - .ordered() - .baselineColumns("value_field") - .baselineValues(index) - .go(); - } - } - /** * Test to ensure Dremio reads the all supported types correctly both normal fields (converted to Nullable types) and * partition fields (converted to Required types). diff --git a/plugins/hive/src/test/java/com/dremio/exec/hive/ITHiveStoreAsFormat.java b/plugins/hive/src/test/java/com/dremio/exec/hive/ITHiveStoreAsFormat.java new file mode 100644 index 0000000000..7d1eedb40b --- /dev/null +++ b/plugins/hive/src/test/java/com/dremio/exec/hive/ITHiveStoreAsFormat.java @@ -0,0 +1,414 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.hive; + +import static java.util.Arrays.asList; +import static java.util.Collections.singletonList; +import static org.assertj.core.api.Assertions.assertThat; +import static org.joda.time.DateTimeZone.UTC; + +import java.math.BigDecimal; +import java.math.RoundingMode; +import java.sql.Date; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.List; + +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.apache.arrow.vector.util.JsonStringHashMap; +import org.apache.arrow.vector.util.Text; +import org.joda.time.LocalDateTime; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import com.dremio.exec.planner.physical.PlannerSettings; +import com.dremio.exec.record.BatchSchema; +import com.dremio.exec.store.hive.HiveTestDataGenerator; + +@RunWith(Parameterized.class) +public class ITHiveStoreAsFormat extends HiveTestBase { + private final String tableFormat; + + protected static Boolean runWithUnlimitedSplitSupport = false; + private static AutoCloseable mapEnabled; + private static AutoCloseable complexTypeEnabled; + private static AutoCloseable icebergDisabled; + + @BeforeClass + public static void disableUnlimitedSplitFeature() { + icebergDisabled = disableUnlimitedSplitsAndIcebergSupportFlags(); + } + + @AfterClass + public static void resetUnlimitedSplitFeature() throws Exception { + icebergDisabled.close(); + } + + @BeforeClass + public static void enableMapFeature() { + mapEnabled = enableMapDataType(); + } + + @AfterClass + public static void resetMapFeature() throws Exception { + mapEnabled.close(); + } + + @BeforeClass + public static void enableComplexTypeFeature() { + complexTypeEnabled = enableComplexHiveType(); + } + + @AfterClass + public static void resetComplexTypeFeature() throws Exception { + complexTypeEnabled.close(); + } + + @BeforeClass + public static void setupOptions() throws Exception { + test(String.format("alter session set \"%s\" = true", PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY)); + } + + @AfterClass + public static void shutdownOptions() throws Exception { + test(String.format("alter session set \"%s\" = false", PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY)); + } + + @Parameterized.Parameters(name = "Table Format {0}") + public static List listTableFormats() { + return HiveTestDataGenerator.listStoreAsFormatsForTests(); + } + + public ITHiveStoreAsFormat(String tableFormat) { + this.tableFormat = tableFormat; + } + + @Test + public void readAllSupportedHiveDataTypes() throws Exception { + readAllSupportedHiveDataTypes("readtest_" + this.tableFormat); + } + + @Test + public void readComplexHiveDataTypes() throws Exception { + readComplexHiveDataTypes("orccomplex" + this.tableFormat); + } + + @Test + public void readListOfStruct() throws Exception { + readListOfStruct("orccomplex" + this.tableFormat); + } + + @Test + public void readListHiveDataTypes() throws Exception { + readListHiveDataTypes("orclist" + this.tableFormat); + } + + @Test + public void readStructHiveDataTypes() throws Exception { + readStructHiveDataTypes("orcstruct" + this.tableFormat); + } + + @Test + public void testDropOfUnionHiveDataTypes() throws Exception { + readTableWithUnionHiveDataTypes("orcunion" + this.tableFormat); + } + + /** + * Test to ensure Dremio fails to read union data from hive + */ + private void readTableWithUnionHiveDataTypes(String table) { + int[] testrows = {0, 500, 1022, 1023, 1024, 4094, 4095, 4096, 4999}; + for (int row : testrows) { + try { + testBuilder().sqlQuery("SELECT * FROM hive." + table + " order by rownum limit 1 offset " + row) + .ordered() + .baselineColumns("rownum") + .baselineValues(row) + .go(); + } catch (Exception e) { + e.printStackTrace(); + assertThat(e.getMessage()).contains(BatchSchema.MIXED_TYPES_ERROR); + } + } + } + + /** + * Test to ensure Dremio reads list of primitive data types + * @throws Exception + */ + private void readStructHiveDataTypes(String table) throws Exception { + int[] testrows = {0, 500, 1022, 1023, 1024, 4094, 4095, 4096, 4999}; + for (int index : testrows) { + JsonStringHashMap structrow1 = new JsonStringHashMap<>(); + structrow1.put("tinyint_field", 1); + structrow1.put("smallint_field", 1024); + structrow1.put("int_field", index); + structrow1.put("bigint_field", 90000000000L); + structrow1.put("float_field", (float) index); + structrow1.put("double_field", (double) index); + structrow1.put("decimal_field", new BigDecimal(Double.toString(index)).setScale(2, RoundingMode.HALF_UP)); + structrow1.put("string_field", new Text(Integer.toString(index))); + + testBuilder().sqlQuery("SELECT * FROM hive." + table + + " order by rownum limit 1 offset " + index) + .ordered() + .baselineColumns("rownum", "struct_field") + .baselineValues(index, structrow1) + .go(); + + testBuilder().sqlQuery("SELECT rownum, struct_field['string_field'] AS string_field, struct_field['int_field'] AS int_field FROM hive." + table + + " order by rownum limit 1 offset " + index) + .ordered() + .baselineColumns("rownum", "string_field", "int_field") + .baselineValues(index, Integer.toString(index), index) + .go(); + } + } + + /** + * Test to ensure Dremio reads list of primitive data types + * @throws Exception + */ + private void readListHiveDataTypes(String table) throws Exception { + int[] testrows = {0, 500, 1022, 1023, 1024, 4094, 4095, 4096, 4999}; + for (int testrow : testrows) { + if (testrow % 7 == 0) { + Integer index = testrow; + testBuilder() + .sqlQuery("SELECT rownum,double_field,string_field FROM hive." + table + " order by rownum limit 1 offset " + index.toString()) + .ordered() + .baselineColumns("rownum", "double_field", "string_field") + .baselineValues(index, null, null) + .go(); + } else { + JsonStringArrayList string_field = new JsonStringArrayList<>(); + string_field.add(new Text(Integer.toString(testrow))); + string_field.add(new Text(Integer.toString(testrow + 1))); + string_field.add(new Text(Integer.toString(testrow + 2))); + string_field.add(new Text(Integer.toString(testrow + 3))); + string_field.add(new Text(Integer.toString(testrow + 4))); + + testBuilder() + .sqlQuery("SELECT rownum,double_field,string_field FROM hive." + table + " order by rownum limit 1 offset " + testrow) + .ordered() + .baselineColumns("rownum", "double_field", "string_field") + .baselineValues( + testrow, + asList((double) testrow, (double) (testrow + 1), (double) (testrow + 2), (double) (testrow + 3), (double) (testrow + 4)), + string_field) + .go(); + } + } + } + + /** + * Test to ensure Dremio reads the all ORC complex types correctly + * @throws Exception + */ + private void readComplexHiveDataTypes(String table) throws Exception { + int[] testrows = {0, 500, 1022, 1023, 1024, 4094, 4095, 4096, 4999}; + for (int index : testrows) { + JsonStringHashMap structrow1 = new JsonStringHashMap<>(); + structrow1.put("name", new Text("name" + index)); + structrow1.put("age", index); + + JsonStringHashMap structlistrow1 = new JsonStringHashMap<>(); + structlistrow1.put("type", new Text("type" + index)); + structlistrow1.put("value", new ArrayList<>(singletonList(new Text("elem" + index)))); + + JsonStringHashMap liststruct1 = new JsonStringHashMap<>(); + liststruct1.put("name", new Text("name" + index)); + liststruct1.put("age", index); + JsonStringHashMap liststruct2 = new JsonStringHashMap<>(); + liststruct2.put("name", new Text("name" + (index + 1))); + liststruct2.put("age", index + 1); + + JsonStringHashMap mapstruct1 = new JsonStringHashMap<>(); + mapstruct1.put("key", new Text("name" + index)); + mapstruct1.put("value", index); + JsonStringHashMap mapstruct2 = new JsonStringHashMap<>(); + mapstruct2.put("key", new Text("name" + (index + 1))); + mapstruct2.put("value", index + 1); + JsonStringHashMap mapstruct3 = null; + if (index % 2 == 0) { + mapstruct3 = new JsonStringHashMap<>(); + mapstruct3.put("key", new Text("name" + (index + 2))); + mapstruct3.put("value", index + 2); + } + + JsonStringHashMap mapstructValue = new JsonStringHashMap<>(); + mapstructValue.put("key", new Text("key" + index)); + JsonStringHashMap mapstructValue2 = new JsonStringHashMap<>(); + mapstructValue2.put("type", new Text("struct" + index)); + mapstructValue.put("value", mapstructValue2); + + testBuilder() + .sqlQuery("SELECT * FROM hive." + table + " order by rownum limit 1 offset " + index) + .ordered() + .baselineColumns("rownum", "list_field", "struct_field", "struct_list_field", "list_struct_field", "map_field", "map_struct_field") + .baselineValues( + index, + asList(index, index + 1, index + 2, index + 3, index + 4), + structrow1, + structlistrow1, + asList(liststruct1, liststruct2), + index % 2 == 0 ? asList(mapstruct1, mapstruct2, mapstruct3) : asList(mapstruct1, mapstruct2), + asList(mapstructValue)) + .go(); + } + } + + private void readListOfStruct(String table) throws Exception { + int[] testrows = {0, 500, 1022, 1023, 1024, 4094, 4095, 4096, 4999}; + for (int index : testrows) { + testBuilder().sqlQuery("SELECT list_struct_field[0].name as name FROM hive." + table + + " order by rownum limit 1 offset " + index) + .ordered() + .baselineColumns("name") + .baselineValues("name" + index) + .go(); + testBuilder().sqlQuery("SELECT list_struct_field[1].name as name FROM hive." + table + + " order by rownum limit 1 offset " + index) + .ordered() + .baselineColumns("name") + .baselineValues("name" + (index + 1)) + .go(); + testBuilder().sqlQuery("SELECT list_struct_field[0].age as age FROM hive." + table + + " order by rownum limit 1 offset " + index) + .ordered() + .baselineColumns("age") + .baselineValues(index) + .go(); + testBuilder().sqlQuery("SELECT list_struct_field[1].age as age FROM hive." + table + + " order by rownum limit 1 offset " + index) + .ordered() + .baselineColumns("age") + .baselineValues(index + 1) + .go(); + } + } + + /** + * Test to ensure Dremio reads the all supported types correctly both normal fields (converted to Nullable types) and + * partition fields (converted to Required types). + * @throws Exception + */ + private void readAllSupportedHiveDataTypes(String table) throws Exception { + testBuilder().sqlQuery("SELECT * FROM hive." + table) + .ordered() + .baselineColumns( + "binary_field", + "boolean_field", + "tinyint_field", + "decimal0_field", + "decimal9_field", + "decimal18_field", + "decimal28_field", + "decimal38_field", + "double_field", + "float_field", + "int_field", + "bigint_field", + "smallint_field", + "string_field", + "varchar_field", + "timestamp_field", + "date_field", + "char_field", + // There is a regression in Hive 1.2.1 in binary type partition columns. Disable for now. + //"binary_part", + "boolean_part", + "tinyint_part", + "decimal0_part", + "decimal9_part", + "decimal18_part", + "decimal28_part", + "decimal38_part", + "double_part", + "float_part", + "int_part", + "bigint_part", + "smallint_part", + "string_part", + "varchar_part", + "timestamp_part", + "date_part", + "char_part") + .baselineValues( + "binaryfield".getBytes(), + false, + 34, + new BigDecimal("66"), + new BigDecimal("2347.92"), + new BigDecimal("2758725827.99990"), + new BigDecimal("29375892739852.8"), + new BigDecimal("89853749534593985.783"), + 8.345d, + 4.67f, + 123456, + 234235L, + 3455, + "stringfield", + "varcharfield", + new LocalDateTime(Timestamp.valueOf("2013-07-05 17:01:00").getTime(), UTC), + new LocalDateTime(Date.valueOf("2013-07-05").getTime()), + "charfield", + // There is a regression in Hive 1.2.1 in binary type partition columns. Disable for now. + //"binary", + true, + 64, + new BigDecimal("37"), + new BigDecimal("36.90"), + new BigDecimal("3289379872.94565"), + new BigDecimal("39579334534534.4"), + new BigDecimal("363945093845093890.900"), + 8.345d, + 4.67f, + 123456, + 234235L, + 3455, + "string", + "varchar", + new LocalDateTime(Timestamp.valueOf("2013-07-05 17:01:00").getTime()), + new LocalDateTime(Date.valueOf("2013-07-05").getTime()), + "char") + .baselineValues( // All fields are null, but partition fields have non-null values + null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, + // There is a regression in Hive 1.2.1 in binary type partition columns. Disable for now. + //"binary", + true, + 64, + new BigDecimal("37"), + new BigDecimal("36.90"), + new BigDecimal("3289379872.94565"), + new BigDecimal("39579334534534.4"), + new BigDecimal("363945093845093890.900"), + 8.345d, + 4.67f, + 123456, + 234235L, + 3455, + "string", + "varchar", + new LocalDateTime(Timestamp.valueOf("2013-07-05 17:01:00").getTime()), + new LocalDateTime(Date.valueOf("2013-07-05").getTime()), + "char") + .go(); + } +} diff --git a/plugins/hive/src/test/java/com/dremio/exec/hive/ITHiveStoreAsFormatV2.java b/plugins/hive/src/test/java/com/dremio/exec/hive/ITHiveStoreAsFormatV2.java new file mode 100644 index 0000000000..0e70d3c5e6 --- /dev/null +++ b/plugins/hive/src/test/java/com/dremio/exec/hive/ITHiveStoreAsFormatV2.java @@ -0,0 +1,53 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dremio.exec.hive; + +import java.util.List; + +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import com.dremio.exec.store.hive.HiveTestDataGenerator; + +@RunWith(Parameterized.class) +public class ITHiveStoreAsFormatV2 extends ITHiveStoreAsFormat { + + private static AutoCloseable icebergEnabled; + + @BeforeClass + public static void enableUnlimitedSplitSupport() { + runWithUnlimitedSplitSupport = true; + icebergEnabled = enableUnlimitedSplitsSupportFlags(); + } + + @AfterClass + public static void disableUnlimitedSplitSupport() throws Exception { + icebergEnabled.close(); + runWithUnlimitedSplitSupport = false; + } + + @Parameterized.Parameters(name = "Table Format {0}") + public static List listTableFormats() { + return HiveTestDataGenerator.listStoreAsFormatsForTests(); + } + + public ITHiveStoreAsFormatV2(String tableFormat) { + super(tableFormat); + } +} diff --git a/plugins/hive/src/test/java/com/dremio/exec/hive/ITInfoSchemaOnHiveStorage.java b/plugins/hive/src/test/java/com/dremio/exec/hive/ITInfoSchemaOnHiveStorage.java index c781106be6..254afb8eae 100644 --- a/plugins/hive/src/test/java/com/dremio/exec/hive/ITInfoSchemaOnHiveStorage.java +++ b/plugins/hive/src/test/java/com/dremio/exec/hive/ITInfoSchemaOnHiveStorage.java @@ -21,6 +21,7 @@ import com.dremio.TestBuilder; import com.dremio.exec.catalog.CatalogServiceImpl; import com.dremio.exec.store.CatalogService; +import com.dremio.exec.store.hive.HiveTestDataGenerator; import com.dremio.service.namespace.NamespaceException; import com.dremio.service.namespace.NamespaceKey; import com.google.common.base.Strings; @@ -38,15 +39,17 @@ public void ensureFullMetadataRead() throws NamespaceException{ @Test public void showTablesFromDb() throws Exception{ - testBuilder() + TestBuilder tb = testBuilder() .sqlQuery("SHOW TABLES FROM hive.\"default\"") .unOrdered() .baselineColumns("TABLE_SCHEMA", "TABLE_NAME") .baselineValues("hive.default", "partition_pruning_test") .baselineValues("hive.default", "readtest") - .baselineValues("hive.default", "readtest_parquet") - .baselineValues("hive.default", "readtest_orc") - .baselineValues("hive.default", "empty_table") + .baselineValues("hive.default", "readtest_parquet"); + for (String format : HiveTestDataGenerator.listStoreAsFormatsForTests()) { + tb.baselineValues("hive.default", "readtest_" + format); + } + tb.baselineValues("hive.default", "empty_table") .baselineValues("hive.default", "partitioned_empty_table") .baselineValues("hive.default", "infoschematest") .baselineValues("hive.default", "kv") @@ -64,14 +67,18 @@ public void showTablesFromDb() throws Exception{ .baselineValues("hive.default", "orc_with_two_files") .baselineValues("hive.default", "parquet_mult_rowgroups") .baselineValues("hive.default", "orccomplex") - .baselineValues("hive.default", "orccomplexorc") + .baselineValues("hive.default", "orcmap") .baselineValues("hive.default", "orclist") - .baselineValues("hive.default", "orclistorc") .baselineValues("hive.default", "orcstruct") - .baselineValues("hive.default", "orcstructorc") - .baselineValues("hive.default", "orcunion") - .baselineValues("hive.default", "orcunionorc") - .baselineValues("hive.default", "orcunion_int_input") + .baselineValues("hive.default", "orcunion"); + for (String format : HiveTestDataGenerator.listStoreAsFormatsForTests()) { + tb.baselineValues("hive.default", "orccomplex" + format) + .baselineValues("hive.default", "orcmap" + format) + .baselineValues("hive.default", "orclist" + format) + .baselineValues("hive.default", "orcstruct" + format) + .baselineValues("hive.default", "orcunion" + format); + } + tb.baselineValues("hive.default", "orcunion_int_input") .baselineValues("hive.default", "orcunion_double_input") .baselineValues("hive.default", "orcunion_string_input") .baselineValues("hive.default", "parquetschemalearntest") @@ -93,8 +100,6 @@ public void showTablesFromDb() throws Exception{ .baselineValues("hive.default", "decimal_conversion_test_parquet_decimal_ext") .baselineValues("hive.default", "parquet_varchar_to_decimal_with_filter") .baselineValues("hive.default", "parquet_varchar_to_decimal_with_filter_ext") - .baselineValues("hive.default", "orcmap") - .baselineValues("hive.default", "orcmaporc") .baselineValues("hive.default", "orc_more_columns") .baselineValues("hive.default", "orc_more_columns_ext") .baselineValues("hive.default", "field_size_limit_test") @@ -156,8 +161,24 @@ public void showTablesFromDb() throws Exception{ .baselineValues("hive.default", "empty_float_field") .baselineValues("hive.default", "parquet_with_map_column") .baselineValues("hive.default", "flatten_orc") - .baselineValues("hive.default", "flatten_parquet") - .go(); + .baselineValues("hive.default", "flatten_parquet"); + for (String format : HiveTestDataGenerator.listStoreAsFormatsForTests()) { + tb.baselineValues("hive.default", "map_of_int_" + format) + .baselineValues("hive.default", "map_of_bigint_" + format) + .baselineValues("hive.default", "map_of_boolean_" + format) + .baselineValues("hive.default", "map_of_date_" + format) + .baselineValues("hive.default", "map_of_decimal_" + format) + .baselineValues("hive.default", "map_of_double_" + format) + .baselineValues("hive.default", "map_of_float_" + format) + .baselineValues("hive.default", "map_of_string_" + format) + .baselineValues("hive.default", "map_of_timestamp_" + format) + .baselineValues("hive.default", "map_of_varbinary_" + format) + .baselineValues("hive.default", "map_of_null_values_" + format) + .baselineValues("hive.default", "map_of_list_values_" + format) + .baselineValues("hive.default", "map_of_struct_values_" + format) + .baselineValues("hive.default", "map_of_map_values_" + format); + } + tb.go(); testBuilder() .sqlQuery("SHOW TABLES IN hive.db1") @@ -194,6 +215,7 @@ public void showDatabases() throws Exception{ .baselineValues("dfs_partition_inference") .baselineValues("dfs_test") .baselineValues("dfs_hadoop") + .baselineValues("dfs_hadoop_mutable") .baselineValues("dfs_test_hadoop") .baselineValues("dfs_static_test_hadoop") .baselineValues("dfs_root") diff --git a/plugins/hive/src/test/java/com/dremio/exec/sql/hive/DmlQueryOnHiveTestBase.java b/plugins/hive/src/test/java/com/dremio/exec/sql/hive/DmlQueryOnHiveTestBase.java index a8b625d02c..3f461a9108 100644 --- a/plugins/hive/src/test/java/com/dremio/exec/sql/hive/DmlQueryOnHiveTestBase.java +++ b/plugins/hive/src/test/java/com/dremio/exec/sql/hive/DmlQueryOnHiveTestBase.java @@ -32,7 +32,6 @@ public class DmlQueryOnHiveTestBase extends LazyDataGeneratingHiveTestBase { @BeforeClass public static void beforeClass() throws Exception { setSystemOption(ExecConstants.ENABLE_ICEBERG_ADVANCED_DML, "true"); - setSystemOption(ExecConstants.ENABLE_ICEBERG_OPTIMIZE, "true"); setSystemOption(ExecConstants.ENABLE_ICEBERG_VACUUM, "true"); dataGenerator.updatePluginConfig((getSabotContext().getCatalogService()), @@ -44,8 +43,6 @@ public static void beforeClass() throws Exception { public static void afterClass() { setSystemOption(ExecConstants.ENABLE_ICEBERG_ADVANCED_DML, ExecConstants.ENABLE_ICEBERG_ADVANCED_DML.getDefault().getBoolVal().toString()); - setSystemOption(ExecConstants.ENABLE_ICEBERG_OPTIMIZE, - ExecConstants.ENABLE_ICEBERG_OPTIMIZE.getDefault().getBoolVal().toString()); setSystemOption(ExecConstants.ENABLE_ICEBERG_VACUUM, ExecConstants.ENABLE_ICEBERG_VACUUM.getDefault().getBoolVal().toString()); } diff --git a/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITAlterTableOnHiveTables.java b/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITAlterTableOnHiveTables.java index 1c49aaae01..fe2fb89a34 100644 --- a/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITAlterTableOnHiveTables.java +++ b/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITAlterTableOnHiveTables.java @@ -68,8 +68,7 @@ public void testAlterTableAddColumn() throws Exception { test("alter table " + HIVE_TEST_PLUGIN_NAME + "." + tableName + " add columns (col4 ARRAY(int))"); assertThat(runDescribeQuery(tableName)).contains("col4|ARRAY"); - } - finally { + } finally { dataGenerator.executeDDL("DROP TABLE IF EXISTS " + tableName); } } @@ -85,8 +84,7 @@ public void testAlterTableDropColumn() throws Exception { assertThat(runDescribeQuery(tableName)).contains("col3"); test("alter table " + HIVE_TEST_PLUGIN_NAME + "." + tableName + " DROP COLUMN col3"); assertThat(runDescribeQuery(tableName)).doesNotContain("col3"); - } - finally { + } finally { dataGenerator.executeDDL("DROP TABLE IF EXISTS " + tableName); } } @@ -103,8 +101,7 @@ public void testAlterTableChangeColumn() throws Exception { assertThat(runDescribeQuery(tableName)).doesNotContain("col4"); test("alter table " + HIVE_TEST_PLUGIN_NAME + "." + tableName + " CHANGE COLUMN col1 col4 bigint"); assertThat(runDescribeQuery(tableName)).contains("col4|BIGINT"); - } - finally { + } finally { dataGenerator.executeDDL("DROP TABLE IF EXISTS " + tableName); } } diff --git a/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITBasicDMLSupportOnHiveTables.java b/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITBasicDMLSupportOnHiveTables.java index 9f5a370cfa..9dd8beb11e 100644 --- a/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITBasicDMLSupportOnHiveTables.java +++ b/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITBasicDMLSupportOnHiveTables.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.junit.AfterClass; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -80,8 +81,7 @@ public void testCreateEmptyIcebergTable() throws Exception { .baselineColumns("ok", "summary") .baselineValues(true, "Table created") .go(); - } - finally { + } finally { dataGenerator.executeDDL("DROP TABLE IF EXISTS " + tableName); } } @@ -111,8 +111,7 @@ public void testCTASCreateNewIcebergTable() throws Exception { .baselineColumns("n") .baselineValues(2) .go(); - } - finally { + } finally { dataGenerator.executeDDL(getDropTableQuery(tableName)); dataGenerator.executeDDL(getDropTableQuery(newTableName)); } @@ -132,8 +131,7 @@ public void testCreateEmptyIcebergTableOnLocation() throws Exception { File tableFolder = new File(queryTableLocation); assertTrue("Error in checking if the " + tableFolder.toString() + " exists", tableFolder.exists()); - } - finally { + } finally { dataGenerator.executeDDL("DROP TABLE IF EXISTS " + tableName); } } @@ -151,8 +149,7 @@ public void testCTASCreateNewIcebergTableWithLocation() throws Exception { runSQL(getCTASQueryWithLocation("(values (1), (2), (3))", tableNameWithCatalog, tableLocationFolder)); File tableFolder = new File(tableLocationFolder); assertTrue("Error in checking if the " + tableFolder.toString() + " exists", tableFolder.exists()); - } - finally { + } finally { dataGenerator.executeDDL(getDropTableQuery(tableName)); } } @@ -168,8 +165,7 @@ public void testCreateEmptyIcebergTableWithIdentityTransform() throws Exception .baselineColumns("ok", "summary") .baselineValues(true, "Table created") .go(); - } - finally { + } finally { dataGenerator.executeDDL("DROP TABLE IF EXISTS " + tableName); } } @@ -185,8 +181,7 @@ public void testCreateEmptyIcebergTableWithBucketTransform() throws Exception { .baselineColumns("ok", "summary") .baselineValues(true, "Table created") .go(); - } - finally { + } finally { dataGenerator.executeDDL("DROP TABLE IF EXISTS " + tableName); } } @@ -202,8 +197,7 @@ public void testCreateEmptyIcebergTableWithTruncateTransform() throws Exception .baselineColumns("ok", "summary") .baselineValues(true, "Table created") .go(); - } - finally { + } finally { dataGenerator.executeDDL("DROP TABLE IF EXISTS " + tableName); } } @@ -219,8 +213,7 @@ public void testCreateEmptyIcebergTableWithYearTransformOnDateColumn() throws Ex .baselineColumns("ok", "summary") .baselineValues(true, "Table created") .go(); - } - finally { + } finally { dataGenerator.executeDDL("DROP TABLE IF EXISTS " + tableName); } } @@ -236,8 +229,7 @@ public void testCreateEmptyIcebergTableWithMonthTransformOnDateColumn() throws E .baselineColumns("ok", "summary") .baselineValues(true, "Table created") .go(); - } - finally { + } finally { dataGenerator.executeDDL("DROP TABLE IF EXISTS " + tableName); } } @@ -253,8 +245,7 @@ public void testCreateEmptyIcebergTableWithDayTransformOnDateColumn() throws Exc .baselineColumns("ok", "summary") .baselineValues(true, "Table created") .go(); - } - finally { + } finally { dataGenerator.executeDDL("DROP TABLE IF EXISTS " + tableName); } } @@ -270,8 +261,7 @@ public void testCreateEmptyIcebergTableWithYearTransformOnTimestampColumn() thro .baselineColumns("ok", "summary") .baselineValues(true, "Table created") .go(); - } - finally { + } finally { dataGenerator.executeDDL("DROP TABLE IF EXISTS " + tableName); } } @@ -287,8 +277,7 @@ public void testCreateEmptyIcebergTableWithMonthTransformOnTimestampColumn() thr .baselineColumns("ok", "summary") .baselineValues(true, "Table created") .go(); - } - finally { + } finally { dataGenerator.executeDDL("DROP TABLE IF EXISTS " + tableName); } } @@ -304,8 +293,7 @@ public void testCreateEmptyIcebergTableWithDayTransformOnTimestampColumn() throw .baselineColumns("ok", "summary") .baselineValues(true, "Table created") .go(); - } - finally { + } finally { dataGenerator.executeDDL("DROP TABLE IF EXISTS " + tableName); } } @@ -321,8 +309,7 @@ public void testCreateEmptyIcebergTableWithHourTransformOnTimestampColumn() thro .baselineColumns("ok", "summary") .baselineValues(true, "Table created") .go(); - } - finally { + } finally { dataGenerator.executeDDL("DROP TABLE IF EXISTS " + tableName); } } @@ -355,6 +342,7 @@ public void truncateEmptyTable() throws Exception { } + @Ignore("DX-61689") @Test public void truncateOnCreateAtLocation() throws Exception { diff --git a/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITHiveOrcIncrementalRefresh.java b/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITHiveOrcIncrementalRefresh.java index 608cab4c68..9d2096add9 100644 --- a/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITHiveOrcIncrementalRefresh.java +++ b/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITHiveOrcIncrementalRefresh.java @@ -34,6 +34,7 @@ public String getFileFormat() { return "ORC"; } + @Override @Test public void testIncrementalRefreshSchemaEvolution() throws Exception { final String tableName = "incrrefresh_v2_test_schema_" + getFileFormatLowerCase(); @@ -67,8 +68,7 @@ public void testIncrementalRefreshSchemaEvolution() throws Exception { .go(); verifyIcebergExecution(EXPLAIN_PLAN + selectQuery, finalIcebergMetadataLocation); - } - finally { + } finally { forgetMetadata(tableName); dropTable(tableName); } diff --git a/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITHiveOrcMetadataRefresh.java b/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITHiveOrcMetadataRefresh.java index 73789e9fef..376036f559 100644 --- a/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITHiveOrcMetadataRefresh.java +++ b/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITHiveOrcMetadataRefresh.java @@ -53,6 +53,7 @@ public void testOrcTableWithMoreColExt() throws Exception { .go(); } + @Override @Test @Ignore // Test is not valid for ORC format public void testFailTableOptionQuery() throws Exception { diff --git a/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITHiveRefreshDatasetIncrementalMetadataRefresh.java b/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITHiveRefreshDatasetIncrementalMetadataRefresh.java index 4d3bb86502..ac635a3316 100644 --- a/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITHiveRefreshDatasetIncrementalMetadataRefresh.java +++ b/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITHiveRefreshDatasetIncrementalMetadataRefresh.java @@ -18,7 +18,6 @@ import static com.dremio.exec.sql.hive.ITHiveRefreshDatasetMetadataRefresh.EXPLAIN_PLAN; import static com.dremio.exec.sql.hive.ITHiveRefreshDatasetMetadataRefresh.verifyIcebergExecution; import static com.dremio.exec.store.metadatarefresh.RefreshDatasetTestUtils.fsDelete; -import static com.dremio.exec.store.metadatarefresh.RefreshDatasetTestUtils.setupLocalFS; import static com.dremio.exec.store.metadatarefresh.RefreshDatasetTestUtils.verifyIcebergMetadata; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.junit.Assert.assertEquals; @@ -117,8 +116,7 @@ public void testIncrementalRefreshWithNoChanges() throws Exception { //No change in snapshot Id assertEquals(oldSnapShotId, icebergTable.currentSnapshot().snapshotId()); - } - finally { + } finally { forgetMetadata(tableName); dropTable(tableName); } @@ -160,8 +158,7 @@ public void testIncrementalRefreshWithChangesThenNoChanges() throws Exception { //No change in snapshot Id assertEquals(nextSnapshotId, icebergTable.currentSnapshot().snapshotId()); - } - finally { + } finally { forgetMetadata(tableName); dropTable(tableName); } @@ -215,8 +212,7 @@ public void testMultipleIncrementalRefreshWithAndWithoutChanges() throws Excepti //No change in snapshot Id assertEquals(nextSnapshotId, icebergTable.currentSnapshot().snapshotId()); - } - finally { + } finally { forgetMetadata(tableName); dropTable(tableName); } @@ -261,8 +257,7 @@ public void testIncrementalRefreshFileAddition() throws Exception { .go(); verifyIcebergExecution(EXPLAIN_PLAN + selectQuery, finalIcebergMetadataLocation); - } - finally { + } finally { forgetMetadata(tableName); dropTable(tableName); } @@ -307,8 +302,7 @@ public void testIncrementalRefreshPartitionAddition() throws Exception { .go(); verifyIcebergExecution(EXPLAIN_PLAN + selectQuery, finalIcebergMetadataLocation); - } - finally { + } finally { forgetMetadata(tableName); dropTable(tableName); } @@ -375,8 +369,7 @@ public void testIncrementalRefreshPartitionAdditionFailAfterIcebergCommit() thro .go(); verifyIcebergExecution(EXPLAIN_PLAN + selectQuery, finalIcebergMetadataLocation); - } - finally { + } finally { forgetMetadata(tableName); dropTable(tableName); } @@ -415,8 +408,7 @@ public void testIncrementalRefreshPartitionDeletion() throws Exception { .go(); verifyIcebergExecution(EXPLAIN_PLAN + selectQuery, finalIcebergMetadataLocation); - } - finally { + } finally { forgetMetadata(tableName); dropTable(tableName); } @@ -460,8 +452,7 @@ public void testIncrementalRefreshSchemaEvolution() throws Exception { .go(); verifyIcebergExecution(EXPLAIN_PLAN + selectQuery);*/ - } - finally { + } finally { forgetMetadata(tableName); dropTable(tableName); } @@ -501,8 +492,7 @@ public void testIncrementalRefreshSchemaEvolWithoutDataUpdate() throws Exception .go(); verifyIcebergExecution(EXPLAIN_PLAN + selectQuery, finalIcebergMetadataLocation); - } - finally { + } finally { forgetMetadata(tableName); dropTable(tableName); } @@ -543,8 +533,7 @@ public void testIncrementalRefreshSchemaEvolOnPartitionTable() throws Exception .go(); verifyIcebergExecution(EXPLAIN_PLAN + selectQuery, finalIcebergMetadataLocation); - } - finally { + } finally { forgetMetadata(tableName); dropTable(tableName); } @@ -566,8 +555,7 @@ public void testIncrementalRefreshFailPartitionEvolution() throws Exception { assertThatThrownBy(() -> runFullRefresh(tableName)) .isInstanceOf(Exception.class) .hasMessageContaining("Change in Hive partition definition detected for table"); - } - finally { + } finally { forgetMetadata(tableName); dropTable(tableName); } @@ -615,8 +603,7 @@ public void testPartialRefreshExistingPartition() throws Exception { .go(); verifyIcebergExecution(EXPLAIN_PLAN + selectQuery, finalIcebergMetadataLocation); - } - finally { + } finally { forgetMetadata(tableName); dropTable(tableName); } @@ -659,8 +646,7 @@ public void testPartialRefreshNewPartition() throws Exception { Types.NestedField.optional(1, "id", new Types.IntegerType()), Types.NestedField.optional(2, "year", new Types.IntegerType()), Types.NestedField.optional(3, "month", new Types.StringType()))), Sets.newHashSet("year", "month"), 2); - } - finally { + } finally { forgetMetadata(tableName); dropTable(tableName); } @@ -730,8 +716,7 @@ public void testPartialRefreshNewPartitionFailAfterIcebergCommit() throws Except .baselineValues(2, "Feb", 2021) .baselineValues(3, "Feb", 2022) .go(); - } - finally { + } finally { forgetMetadata(tableName); dropTable(tableName); } @@ -764,8 +749,7 @@ public void testCheckPartitionHasPermissionInIncremental() throws Exception { } finally { fs.setPermission(partitionDir, new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL)); } - } - finally { + } finally { forgetMetadata(tableName); dropTable(tableName); } @@ -798,8 +782,7 @@ public void testCheckPartitionHasPermissionInPartial() throws Exception { } finally { fs.setPermission(partitionDir, new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL)); } - } - finally { + } finally { forgetMetadata(tableName); dropTable(tableName); } @@ -834,8 +817,7 @@ public void testPartialRefreshWithDifferentTypeColumns() throws Exception { .baselineValues(1) .baselineValues(1) .go(); - } - finally { + } finally { forgetMetadata(tableName); dropTable(tableName); } @@ -856,8 +838,7 @@ public void testPartialRefreshFailWithNoValue() throws Exception { assertThatThrownBy(() -> runPartialRefresh(tableName, "(\"year\" = '20', \"month\" = 'FEB')")) .isInstanceOf(Exception.class) .hasMessageContaining("VALIDATION ERROR: Partition 'year=20/month=FEB' does not exist in default." + tableName); - } - finally { + } finally { forgetMetadata(tableName); dropTable(tableName); } @@ -884,8 +865,7 @@ public void testFixedWidthVarcharCol() throws Exception { .go(); verifyIcebergExecution(EXPLAIN_PLAN + selectQuery, finalIcebergMetadataLocation); - } - finally { + } finally { forgetMetadata(tableName); dropTable(tableName); } @@ -920,8 +900,7 @@ public void testPartialRefreshWithRandomOrdering() throws Exception { Types.NestedField.optional(1, "id", new Types.IntegerType()), Types.NestedField.optional(2, "year", new Types.IntegerType()), Types.NestedField.optional(3, "month", new Types.StringType()))), Sets.newHashSet("year", "month"), 2); - } - finally { + } finally { forgetMetadata(tableName); dropTable(tableName); } diff --git a/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITHiveRefreshDatasetMetadataRefresh.java b/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITHiveRefreshDatasetMetadataRefresh.java index 24d6631cb8..6754b93c10 100644 --- a/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITHiveRefreshDatasetMetadataRefresh.java +++ b/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITHiveRefreshDatasetMetadataRefresh.java @@ -17,7 +17,6 @@ import static com.dremio.exec.store.hive.exec.HiveDatasetOptions.HIVE_PARQUET_ENFORCE_VARCHAR_WIDTH; import static com.dremio.exec.store.metadatarefresh.RefreshDatasetTestUtils.fsDelete; -import static com.dremio.exec.store.metadatarefresh.RefreshDatasetTestUtils.setupLocalFS; import static com.dremio.exec.store.metadatarefresh.RefreshDatasetTestUtils.verifyIcebergMetadata; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; @@ -210,8 +209,7 @@ public void testFailTableOptionQuery() throws Exception { HIVE_PARQUET_ENFORCE_VARCHAR_WIDTH, "true"))) .isInstanceOf(UserRemoteException.class) .hasMessageContaining("ALTER unsupported on table 'hive.\"default\".refresh_v2_test_table_option_" + formatType + "'"); - } - finally { + } finally { dataGenerator.executeDDL("DROP TABLE IF EXISTS " + tableName); } } @@ -254,8 +252,7 @@ public void testCheckTableHasPermission() throws Exception { .asInstanceOf(InstanceOfAssertFactories.type(UserRemoteException.class)) .extracting(UserRemoteException::getErrorType) .isEqualTo(UserBitShared.DremioPBError.ErrorType.PERMISSION); - } - finally { + } finally { fs.setPermission(tableDir, new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL)); } } @@ -280,8 +277,7 @@ public void testCheckPartitionHasPermission() throws Exception { .asInstanceOf(InstanceOfAssertFactories.type(UserRemoteException.class)) .extracting(UserRemoteException::getErrorType) .isEqualTo(UserBitShared.DremioPBError.ErrorType.PERMISSION); - } - finally { + } finally { fs.setPermission(partitionDir, new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL)); } } diff --git a/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITHiveSchemaEvolution.java b/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITHiveSchemaEvolution.java index 3aad3c2a01..a263d950fe 100644 --- a/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITHiveSchemaEvolution.java +++ b/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITHiveSchemaEvolution.java @@ -69,8 +69,7 @@ public void testInsertAfterDropColumn() throws Exception { .baselineValues(2) .build() .run(); - } - finally { + } finally { dataGenerator.executeDDL(getDropTableQuery(tableName)); } } @@ -95,8 +94,7 @@ public void testInsertAfterRenameColumn() throws Exception { .baselineValues(2, 4) .build() .run(); - } - finally { + } finally { dataGenerator.executeDDL(getDropTableQuery(tableName)); } } @@ -124,8 +122,7 @@ public void testCTASAfterAddColumn() throws Exception { .build() .run(); testCTAS(tableNameWithCatalog, newTableNameWithCatalog); - } - finally { + } finally { dataGenerator.executeDDL(getDropTableQuery(tableName)); dataGenerator.executeDDL(getDropTableQuery(newTableName)); } @@ -162,8 +159,7 @@ public void testDropAddSameColumn() throws Exception { .baselineValues(3, 4) .build() .run(); - } - finally { + } finally { dataGenerator.executeDDL(getDropTableQuery(tableName)); } } diff --git a/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITOptimizeOnHive.java b/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITOptimizeOnHive.java index 66f0f75ded..97039057ee 100644 --- a/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITOptimizeOnHive.java +++ b/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITOptimizeOnHive.java @@ -20,17 +20,83 @@ import org.junit.Test; import com.dremio.exec.planner.sql.OptimizeTests; - public class ITOptimizeOnHive extends DmlQueryOnHiveTestBase { private static final String SOURCE = HIVE_TEST_PLUGIN_NAME; - @Test public void testOnUnPartitioned() throws Exception { OptimizeTests.testOnUnPartitioned(SOURCE, allocator); } - @Test public void testOnPartitioned() throws Exception { OptimizeTests.testOnPartitioned(SOURCE, allocator); } + @Test + public void testOnUnPartitionedMinInputFilesCriteria() throws Exception { + OptimizeTests.testOnUnpartitionedMinInputFilesCriteria(SOURCE, allocator); + } + + @Test + public void testOnPartitionedMinInputFilesCriteria() throws Exception { + OptimizeTests.testOnPartitionedMinInputFilesCriteria(SOURCE, allocator); + } + + @Test + public void testOnUnpartitionedMinFileSizeCriteria() throws Exception { + OptimizeTests.testOnUnpartitionedMinFileSizeCriteria(SOURCE, allocator); + } + + @Test + public void testWithSingleFilePartitions() throws Exception { + OptimizeTests.testWithSingleFilePartitions(SOURCE, allocator); + } + + @Test + public void testWithSingleFilePartitionsAndEvolvedSpec() throws Exception { + OptimizeTests.testWithSingleFilePartitionsAndEvolvedSpec(SOURCE, allocator); + } + + @Test + public void testUnsupportedScenarios() throws Exception { + OptimizeTests.testUnsupportedScenarios(SOURCE, allocator); + } + + @Test + public void testEvolvedPartitions() throws Exception { + OptimizeTests.testEvolvedPartitions(SOURCE, allocator); + } + + @Test + public void testOptimizeDataOnlyUnPartitioned() throws Exception { + OptimizeTests.testOptimizeDataFilesUnPartitioned(SOURCE, allocator); + } + + @Test + public void testOptimizeDataOnlyPartitioned() throws Exception { + OptimizeTests.testOptimizeDataOnPartitioned(SOURCE, allocator); + } + + @Test + public void testOptimizeManifestsOnlyUnPartitioned() throws Exception { + OptimizeTests.testOptimizeManifestsOnlyUnPartitioned(SOURCE, allocator); + } + + @Test + public void testOptimizeManifestsOnlyPartitioned() throws Exception { + OptimizeTests.testOptimizeManifestsOnlyPartitioned(SOURCE, allocator); + } + + @Test + public void testOptimizeManifestsModesIsolations() throws Exception { + OptimizeTests.testOptimizeManifestsModesIsolations(SOURCE, allocator); + } + + @Test + public void testOptimizeOnEmptyTableHollowSnapshot() throws Exception { + OptimizeTests.testOptimizeOnEmptyTableHollowSnapshot(SOURCE, allocator); + } + + @Test + public void testRewriteManifestsForEvolvedPartitionSpec() throws Exception { + OptimizeTests.testRewriteManifestsForEvolvedPartitionSpec(SOURCE, allocator); + } } diff --git a/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITVacuumOnHive.java b/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITVacuumOnHive.java index 986d5314f6..b58b1fbdda 100644 --- a/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITVacuumOnHive.java +++ b/plugins/hive/src/test/java/com/dremio/exec/sql/hive/ITVacuumOnHive.java @@ -30,8 +30,28 @@ public class ITVacuumOnHive extends DmlQueryOnHiveTestBase { // Defining SOURCE such that you can easily copy and paste the same test across other test variations private static final String SOURCE = HIVE_TEST_PLUGIN_NAME; + @Test + public void testSimpleExpireOlderThanRetainLastUsingEqual() throws Exception { + VacuumTests.testSimpleExpireOlderThanRetainLastUsingEqual(allocator, SOURCE); + } + @Test public void testSimpleExpireOlderThan() throws Exception { VacuumTests.testSimpleExpireOlderThan(allocator, SOURCE); } + + @Test + public void testExpireOnTableOneSnapshot() throws Exception { + VacuumTests.testExpireOnTableOneSnapshot(SOURCE); + } + + @Test + public void testRetainMoreSnapshots() throws Exception { + VacuumTests.testRetainMoreSnapshots(SOURCE); + } + + @Test + public void testRetainAllSnapshots() throws Exception { + VacuumTests.testRetainAllSnapshots(allocator, SOURCE); + } } diff --git a/plugins/hive/src/test/java/com/dremio/exec/store/hive/Hive2StoragePluginConfig.java b/plugins/hive/src/test/java/com/dremio/exec/store/hive/Hive2StoragePluginConfig.java index b51ce8fe33..d3b1f57f5a 100644 --- a/plugins/hive/src/test/java/com/dremio/exec/store/hive/Hive2StoragePluginConfig.java +++ b/plugins/hive/src/test/java/com/dremio/exec/store/hive/Hive2StoragePluginConfig.java @@ -25,7 +25,7 @@ import com.dremio.exec.planner.serialization.kryo.serializers.SourceConfigAwareConnectionConfDeserializer; import com.dremio.exec.server.SabotContext; import com.dremio.exec.store.StoragePlugin; -import com.dremio.exec.store.hive.pf4j.NativeLibPluginManager; +import com.dremio.plugins.pf4j.NativeLibPluginManager; /** * Hive 2.x storage plugin configuration. @@ -35,7 +35,7 @@ public class Hive2StoragePluginConfig extends HiveStoragePluginConfig { @Override public StoragePlugin newPlugin(SabotContext context, String name, Provider pluginIdProvider) { - final PluginManager manager = new NativeLibPluginManager(); + final PluginManager manager = new NativeLibPluginManager(context.getOptionManager()); manager.loadPlugins(); manager.startPlugins(); diff --git a/plugins/hive/src/test/java/com/dremio/exec/store/hive/exec/TestHadoopFsCacheKeyPluginClassLoader.java b/plugins/hive/src/test/java/com/dremio/exec/store/hive/exec/TestHadoopFsCacheKeyPluginClassLoader.java new file mode 100644 index 0000000000..bd13d59b45 --- /dev/null +++ b/plugins/hive/src/test/java/com/dremio/exec/store/hive/exec/TestHadoopFsCacheKeyPluginClassLoader.java @@ -0,0 +1,64 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store.hive.exec; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; + +import java.net.URI; + +import org.apache.hadoop.mapred.JobConf; +import org.junit.Test; + +import com.dremio.io.file.UriSchemes; + +public class TestHadoopFsCacheKeyPluginClassLoader { + + @Test + public void testEquals() { + URI uri = URI.create(String.format("%s://%s%s", UriSchemes.HDFS_SCHEME, "localhost", "/sample/data")); + String userName = "dremio"; + HadoopFsCacheKeyPluginClassLoader key1 = new HadoopFsCacheKeyPluginClassLoader(uri, new JobConf(), userName); + HadoopFsCacheKeyPluginClassLoader key2 = new HadoopFsCacheKeyPluginClassLoader(uri, new JobConf(), userName); + assertEquals(key1, key2); + } + + @Test + public void testNotEquals() { + String host1 = "localhost"; + String path1 = "/sample/data"; + URI uri1 = URI.create(String.format("%s://%s%s", UriSchemes.HDFS_SCHEME, host1, path1)); + URI uri2 = URI.create(String.format("%s://%s%s", UriSchemes.FILE_SCHEME, host1, path1)); + String userName1 = "dremio1"; + HadoopFsCacheKeyPluginClassLoader key1 = new HadoopFsCacheKeyPluginClassLoader(uri1, new JobConf(), userName1); + HadoopFsCacheKeyPluginClassLoader key2 = new HadoopFsCacheKeyPluginClassLoader(uri2, new JobConf(), userName1); + assertNotEquals(key1, key2); + + String host2 = "175.23.2.71"; + URI uri3 = URI.create(String.format("%s://%s%s", UriSchemes.FILE_SCHEME, host2, path1)); + HadoopFsCacheKeyPluginClassLoader key3 = new HadoopFsCacheKeyPluginClassLoader(uri3, new JobConf(), userName1); + assertNotEquals(key2, key3); + + String path2 ="/sample/data2"; + URI uri4 = URI.create(String.format("%s://%s%s", UriSchemes.FILE_SCHEME, host2, path2)); + HadoopFsCacheKeyPluginClassLoader key4 = new HadoopFsCacheKeyPluginClassLoader(uri4, new JobConf(), userName1); + assertEquals(key3, key4); + + String userName2 = "dremio2"; + HadoopFsCacheKeyPluginClassLoader key5 = new HadoopFsCacheKeyPluginClassLoader(uri4, new JobConf(), userName2); + assertNotEquals(key4, key5); + } +} diff --git a/plugins/hive/src/test/java/com/dremio/exec/store/hive/exec/TestHadoopFsCacheWrapperPluginClassLoader.java b/plugins/hive/src/test/java/com/dremio/exec/store/hive/exec/TestHadoopFsCacheWrapperPluginClassLoader.java new file mode 100644 index 0000000000..43addbd816 --- /dev/null +++ b/plugins/hive/src/test/java/com/dremio/exec/store/hive/exec/TestHadoopFsCacheWrapperPluginClassLoader.java @@ -0,0 +1,118 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store.hive.exec; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import java.net.URI; + +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.mapred.JobConf; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import com.dremio.io.file.UriSchemes; +import com.google.common.cache.LoadingCache; + +public class TestHadoopFsCacheWrapperPluginClassLoader { + + private HadoopFsCacheWrapperPluginClassLoader cacheWrapper; + + private static final String HOST = "localhost"; + private static final String PATH = "/sample/data"; + + @Before + public void before() throws Exception { + cacheWrapper = new HadoopFsCacheWrapperPluginClassLoader(); + for (int i = 0; i < 10; i++) { + URI uri = URI.create(String.format("%s://%s%s_%s", UriSchemes.HDFS_SCHEME, HOST, PATH, i)); + FileSystem unused = cacheWrapper.getHadoopFsSupplierPluginClassLoader(uri.toString(), new JobConf(), "dremio").get(); + } + } + + @After + public void after() throws Exception { + cacheWrapper.close(); + cacheWrapper = null; + } + + @Test + public void testExactMatch() { + LoadingCache cache = cacheWrapper.getCache(); + long initialCacheSize = cache.size(); + URI uri = URI.create(String.format("%s://%s%s", UriSchemes.HDFS_SCHEME, HOST, PATH)); + String userName = "dremio"; + FileSystem fs = cacheWrapper.getHadoopFsSupplierPluginClassLoader(uri.toString(), new JobConf(), userName).get(); + assertNotNull("File system instance must be not null", fs); + assertEquals(String.format("File system scheme must be %s", UriSchemes.HDFS_SCHEME), fs.getScheme(), UriSchemes.HDFS_SCHEME); + assertEquals("Cache must contain exactly one element", cache.size(), initialCacheSize); + assertTrue("All elements in the cache must have the same scheme, authority, and ugi", cache.asMap().keySet().stream().allMatch(k -> k.scheme.equals(uri.getScheme()) && k.authority.equals(uri.getAuthority()) && k.userName.equals(userName))); + } + + @Test + public void testSchemaMismatch() { + LoadingCache cache = cacheWrapper.getCache(); + long initialCacheSize = cache.size(); + URI uri = URI.create(String.format("%s://%s%s", UriSchemes.FILE_SCHEME, HOST, PATH)); + String userName = "dremio"; + FileSystem fs = cacheWrapper.getHadoopFsSupplierPluginClassLoader(uri.toString(), new JobConf(), userName).get(); + assertNotNull("File system instance must be not null", fs); + assertEquals(String.format("File system scheme must be %s", UriSchemes.FILE_SCHEME), fs.getScheme(), UriSchemes.FILE_SCHEME); + assertEquals("Cache must contain exactly two elements", cache.size(), initialCacheSize + 1); + assertTrue("All elements in the cache must have the same authority and ugi", cache.asMap().keySet().stream().allMatch(k -> k.authority.equals(uri.getAuthority()) && k.userName.equals(userName))); + assertTrue(String.format("One element in the cache must have \"%s\" schema", UriSchemes.FILE_SCHEME), cache.asMap().keySet().stream().anyMatch(k -> k.scheme.equals(uri.getScheme()))); + } + + @Test + public void testAuthorityMismatch() { + LoadingCache cache = cacheWrapper.getCache(); + long initialCacheSize = cache.size(); + String host = "175.23.2.71"; + URI uri = URI.create(String.format("%s://%s%s", UriSchemes.HDFS_SCHEME, host, PATH)); + String userName = "dremio"; + FileSystem fs = cacheWrapper.getHadoopFsSupplierPluginClassLoader(uri.toString(), new JobConf(), userName).get(); + assertNotNull("File system instance must be not null", fs); + assertEquals(String.format("File system scheme must be %s", UriSchemes.HDFS_SCHEME), fs.getScheme(), UriSchemes.HDFS_SCHEME); + assertEquals("Cache must contain exactly two elements", cache.size(), initialCacheSize + 1); + assertTrue("All elements in the cache must have the same scheme and ugi", cache.asMap().keySet().stream().allMatch(k -> k.scheme.equals(uri.getScheme()) && k.userName.equals(userName))); + assertTrue(String.format("One element in the cache must have \"%s\" authority", host), cache.asMap().keySet().stream().anyMatch(k -> k.authority.equals(uri.getAuthority()))); + } + + @Test + public void testUserMismatch() { + LoadingCache cache = cacheWrapper.getCache(); + long initialCacheSize = cache.size(); + URI uri = URI.create(String.format("%s://%s%s", UriSchemes.HDFS_SCHEME, HOST, PATH)); + String userName = "testUser"; + FileSystem fs = cacheWrapper.getHadoopFsSupplierPluginClassLoader(uri.toString(), new JobConf(), userName).get(); + assertNotNull("File system instance must be not null", fs); + assertEquals(String.format("File system scheme must be %s", UriSchemes.HDFS_SCHEME), fs.getScheme(), UriSchemes.HDFS_SCHEME); + assertEquals("Cache must contain exactly two elements", cache.size(), initialCacheSize + 1); + assertTrue("All elements in the cache must have the same scheme and authority", cache.asMap().keySet().stream().allMatch(k -> k.scheme.equals(uri.getScheme()) && k.authority.equals(uri.getAuthority()))); + assertTrue(String.format("One element in the cache must have \"%s\" username", userName), cache.asMap().keySet().stream().anyMatch(k -> k.userName.equals(userName))); + } + + @Test + public void testClose() throws Exception { + LoadingCache cache = cacheWrapper.getCache(); + cacheWrapper.close(); + assertEquals("Cache must be empty", 0, cache.size()); + } + +} diff --git a/plugins/hive/src/test/java/com/dremio/exec/store/hive/exec/TestHiveReaderProtoUtil.java b/plugins/hive/src/test/java/com/dremio/exec/store/hive/exec/TestHiveReaderProtoUtil.java index 25c0e85170..82427b4e71 100644 --- a/plugins/hive/src/test/java/com/dremio/exec/store/hive/exec/TestHiveReaderProtoUtil.java +++ b/plugins/hive/src/test/java/com/dremio/exec/store/hive/exec/TestHiveReaderProtoUtil.java @@ -42,7 +42,7 @@ public class TestHiveReaderProtoUtil { @Rule - public final TestRule REPEAT_RULE = TestTools.getRepeatRule(false); + public final TestRule repeatRule = TestTools.getRepeatRule(false); private static final int stringLength = 10; diff --git a/plugins/hive/src/test/java/com/dremio/exec/store/hive/exec/TestHiveRecordReaderIterator.java b/plugins/hive/src/test/java/com/dremio/exec/store/hive/exec/TestHiveRecordReaderIterator.java index 92af189522..8a52e243ad 100644 --- a/plugins/hive/src/test/java/com/dremio/exec/store/hive/exec/TestHiveRecordReaderIterator.java +++ b/plugins/hive/src/test/java/com/dremio/exec/store/hive/exec/TestHiveRecordReaderIterator.java @@ -19,7 +19,6 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; @@ -97,7 +96,7 @@ public void testIteratorWithoutFilter() { } @Test - public void testIteratorWithFilterAddedInBetween() { + public void testIteratorWithFilterAddedInBetween() throws Exception { CompositeReaderConfig readerConfig = mock(CompositeReaderConfig.class); when(readerConfig.getPartitionNVPairs(any(BufferAllocator.class), any(SplitAndPartitionInfo.class))) .thenReturn(getNonMatchingNameValuePairs()); @@ -119,14 +118,11 @@ public void testIteratorWithFilterAddedInBetween() { it.addRuntimeFilter(filter); assertFalse(it.hasNext()); assertEquals(5L, ctx.getStats().getLongStat(ScanOperator.Metric.NUM_PARTITIONS_PRUNED)); - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); } } @Test - public void testIteratorWithFilterNothingSkipped() { + public void testIteratorWithFilterNothingSkipped() throws Exception { CompositeReaderConfig readerConfig = mock(CompositeReaderConfig.class); when(readerConfig.getPartitionNVPairs(any(BufferAllocator.class), any(SplitAndPartitionInfo.class))) .thenReturn(getMatchingNameValuePairs()); @@ -145,14 +141,11 @@ public void testIteratorWithFilterNothingSkipped() { assertEquals(reader, it.next()); } assertEquals(0L, ctx.getStats().getLongStat(ScanOperator.Metric.NUM_PARTITIONS_PRUNED)); - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); } } @Test - public void testIteratorWithFilterAllSkipped() { + public void testIteratorWithFilterAllSkipped() throws Exception { CompositeReaderConfig readerConfig = mock(CompositeReaderConfig.class); when(readerConfig.getPartitionNVPairs(any(BufferAllocator.class), any(SplitAndPartitionInfo.class))) .thenReturn(getNonMatchingNameValuePairs()); @@ -167,14 +160,11 @@ public void testIteratorWithFilterAllSkipped() { it.addRuntimeFilter(filter); assertFalse(it.hasNext()); assertEquals(10L, ctx.getStats().getLongStat(ScanOperator.Metric.NUM_PARTITIONS_PRUNED)); - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); } } @Test - public void testIteratorWithFilterSomeSkipped() { + public void testIteratorWithFilterSomeSkipped() throws Exception { Predicate isSelectedSplit = i -> i==1 || i==3 || i==9; CompositeReaderConfig readerConfig = mock(CompositeReaderConfig.class); List> readers = getMockSplitReaders(10); @@ -205,15 +195,12 @@ public void testIteratorWithFilterSomeSkipped() { assertEquals(reader, it.next()); } assertEquals(7L, ctx.getStats().getLongStat(ScanOperator.Metric.NUM_PARTITIONS_PRUNED)); - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); } } @Test - public void testMultipleFilters() { + public void testMultipleFilters() throws Exception { Predicate isSelectedSplit1 = i -> i==1 || i==2 || i==3 || i==9; Predicate isSelectedSplit2 = i -> i==3 || i==5 || i==9; @@ -261,14 +248,11 @@ public void testMultipleFilters() { assertEquals(recordReader, it.next()); } assertEquals(8L , ctx.getStats().getLongStat(ScanOperator.Metric.NUM_PARTITIONS_PRUNED)); - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); } } @Test - public void testIteratorEmpty() { + public void testIteratorEmpty() throws Exception { CompositeReaderConfig readerConfig = mock(CompositeReaderConfig.class); when(readerConfig.getPartitionNVPairs(any(BufferAllocator.class), any(SplitAndPartitionInfo.class))) .thenReturn(getMatchingNameValuePairs()); @@ -302,9 +286,6 @@ public SplitReaderCreator next() { it.addRuntimeFilter(filter); assertFalse(it.hasNext()); assertEquals(0L, ctx.getStats().getLongStat(ScanOperator.Metric.NUM_PARTITIONS_PRUNED)); - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); } } diff --git a/plugins/hive/src/test/java/com/dremio/exec/store/hive/metadata/ParquetInputFormatTest.java b/plugins/hive/src/test/java/com/dremio/exec/store/hive/metadata/ParquetInputFormatTest.java index 8b30cdd368..6f26019846 100644 --- a/plugins/hive/src/test/java/com/dremio/exec/store/hive/metadata/ParquetInputFormatTest.java +++ b/plugins/hive/src/test/java/com/dremio/exec/store/hive/metadata/ParquetInputFormatTest.java @@ -44,8 +44,7 @@ public void testInvalidJobConf() throws Exception{ try { InputSplit[] inputSplits = new ParquetInputFormat().getSplits(jobConf, 1); Assert.fail(); - } - catch (IOException ioe) { + } catch (IOException ioe) { Assert.assertTrue(ioe.getMessage().contains("No input paths specified in job")); } } diff --git a/plugins/hive/src/test/java/com/dremio/exec/store/hive/metadata/TestHiveMetadataUtils.java b/plugins/hive/src/test/java/com/dremio/exec/store/hive/metadata/TestHiveMetadataUtils.java index 94e3e7a744..476ede2b6b 100644 --- a/plugins/hive/src/test/java/com/dremio/exec/store/hive/metadata/TestHiveMetadataUtils.java +++ b/plugins/hive/src/test/java/com/dremio/exec/store/hive/metadata/TestHiveMetadataUtils.java @@ -36,6 +36,7 @@ /** * Tests for HiveMetadataUtils */ +@SuppressWarnings("checkstyle:MemberName") public class TestHiveMetadataUtils { FileInputFormat mock_fileInputFormat = mock(FileInputFormat.class); diff --git a/plugins/hive2/launcher/pom.xml b/plugins/hive2/launcher/pom.xml index 81f3d7610d..8c9a8d6505 100644 --- a/plugins/hive2/launcher/pom.xml +++ b/plugins/hive2/launcher/pom.xml @@ -22,7 +22,7 @@ com.dremio.plugins dremio-hive2-plugin-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-hive2-plugin-launcher @@ -39,6 +39,11 @@ dremio-sabot-kernel ${project.version} + + com.dremio.plugins + dremio-plugin-common + ${project.version} + com.dremio.plugins dremio-hive-plugin-common diff --git a/plugins/hive2/launcher/src/main/java/com/dremio/exec/store/hive/Hive2StoragePluginConfig.java b/plugins/hive2/launcher/src/main/java/com/dremio/exec/store/hive/Hive2StoragePluginConfig.java index ec7bc0a630..7362b7bd5a 100644 --- a/plugins/hive2/launcher/src/main/java/com/dremio/exec/store/hive/Hive2StoragePluginConfig.java +++ b/plugins/hive2/launcher/src/main/java/com/dremio/exec/store/hive/Hive2StoragePluginConfig.java @@ -24,7 +24,7 @@ import com.dremio.exec.planner.serialization.kryo.serializers.SourceConfigAwareConnectionConfDeserializer; import com.dremio.exec.server.SabotContext; import com.dremio.exec.store.StoragePlugin; -import com.dremio.exec.store.hive.pf4j.NativeLibPluginManager; +import com.dremio.plugins.pf4j.NativeLibPluginManager; /** * Hive 2.x storage plugin configuration. @@ -34,7 +34,7 @@ public class Hive2StoragePluginConfig extends HiveStoragePluginConfig { @Override public StoragePlugin newPlugin(SabotContext context, String name, Provider pluginIdProvider) { - final PluginManager manager = new NativeLibPluginManager(); + final PluginManager manager = new NativeLibPluginManager(context.getOptionManager()); manager.loadPlugins(); manager.startPlugin(getPf4jPluginId()); diff --git a/plugins/hive2/plugin/pom.xml b/plugins/hive2/plugin/pom.xml index 24dfa53df7..5b5f52e1df 100644 --- a/plugins/hive2/plugin/pom.xml +++ b/plugins/hive2/plugin/pom.xml @@ -22,7 +22,7 @@ com.dremio.plugins dremio-hive2-plugin-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-hive2-plugin @@ -37,7 +37,6 @@ ${plugin.hive2.azure-datalake.version} ${plugin.hive2.hadoop-azure.version} ${plugin.hive2.hadoop-azure-datalake.version} - ${plugin.hive2.aws-sdk.version} ${plugin.hive2.hadoop.version} ${plugin.hive2.hive.version} ${plugin.hive2.hbase.version} @@ -871,6 +870,14 @@ test-jar ${hadoop.version} test + + + + javax.servlet.jsp + jsp-api + + @@ -931,6 +938,10 @@ org.apache.calcite.avatica * + + org.pentaho + pentaho-aggdesigner-algorithm + @@ -1502,7 +1513,6 @@ org.apache.hadoop hadoop-azure - compile com.dremio.contrib diff --git a/plugins/hive2/pom.xml b/plugins/hive2/pom.xml index 7b61d7d857..f9d83f43fb 100644 --- a/plugins/hive2/pom.xml +++ b/plugins/hive2/pom.xml @@ -24,7 +24,7 @@ dremio-plugin-parent com.dremio.plugins - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 com.dremio.plugins diff --git a/plugins/hive3/launcher/pom.xml b/plugins/hive3/launcher/pom.xml index f05205e463..26a626f504 100644 --- a/plugins/hive3/launcher/pom.xml +++ b/plugins/hive3/launcher/pom.xml @@ -22,7 +22,7 @@ com.dremio.plugins dremio-hive3-plugin-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-hive3-plugin-launcher @@ -39,6 +39,11 @@ dremio-sabot-kernel ${project.version} + + com.dremio.plugins + dremio-plugin-common + ${project.version} + com.dremio.plugins dremio-hive-plugin-common diff --git a/plugins/hive3/launcher/src/main/java/com/dremio/exec/store/hive/Hive3StoragePluginConfig.java b/plugins/hive3/launcher/src/main/java/com/dremio/exec/store/hive/Hive3StoragePluginConfig.java index e8f8fce9e8..527c604af1 100644 --- a/plugins/hive3/launcher/src/main/java/com/dremio/exec/store/hive/Hive3StoragePluginConfig.java +++ b/plugins/hive3/launcher/src/main/java/com/dremio/exec/store/hive/Hive3StoragePluginConfig.java @@ -24,7 +24,7 @@ import com.dremio.exec.planner.serialization.kryo.serializers.SourceConfigAwareConnectionConfDeserializer; import com.dremio.exec.server.SabotContext; import com.dremio.exec.store.StoragePlugin; -import com.dremio.exec.store.hive.pf4j.NativeLibPluginManager; +import com.dremio.plugins.pf4j.NativeLibPluginManager; /** * Hive 3.x storage plugin configuration. @@ -34,7 +34,7 @@ public class Hive3StoragePluginConfig extends HiveStoragePluginConfig { @Override public StoragePlugin newPlugin(SabotContext context, String name, Provider pluginIdProvider) { - final PluginManager manager = new NativeLibPluginManager(); + final PluginManager manager = new NativeLibPluginManager(context.getOptionManager()); manager.loadPlugins(); manager.startPlugin(getPf4jPluginId()); diff --git a/plugins/hive3/plugin/pom.xml b/plugins/hive3/plugin/pom.xml index 2628b70df2..9e1e5244a5 100644 --- a/plugins/hive3/plugin/pom.xml +++ b/plugins/hive3/plugin/pom.xml @@ -22,7 +22,7 @@ com.dremio.plugins dremio-hive3-plugin-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-hive3-plugin diff --git a/plugins/hive3/plugin/src/main/codegen/templates/HiveRecordReaders.java b/plugins/hive3/plugin/src/main/codegen/templates/HiveRecordReaders.java index 0ce816d634..df42ea5c79 100644 --- a/plugins/hive3/plugin/src/main/codegen/templates/HiveRecordReaders.java +++ b/plugins/hive3/plugin/src/main/codegen/templates/HiveRecordReaders.java @@ -195,8 +195,7 @@ public int populateData() throws IOException, SerDeException { if (!hasNext) { break; } - } - catch(FSError e) { + } catch (FSError e) { throw HadoopFileSystemWrapper.propagateFSError(e); } Object deSerializedValue = partitionSerDe.deserialize((Writable) value); @@ -226,8 +225,7 @@ public void close() throws IOException { <#else> try (OperatorStats.WaitRecorder recorder = OperatorStats.getWaitRecorder(this.context.getStats())){ reader.close(); - } - catch(FSError e) { + } catch (FSError e) { throw HadoopFileSystemWrapper.propagateFSError(e); } diff --git a/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/Hive3PluginCreator.java b/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/Hive3PluginCreator.java index 960e4bed3c..79fbffd6c5 100644 --- a/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/Hive3PluginCreator.java +++ b/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/Hive3PluginCreator.java @@ -29,6 +29,7 @@ @Extension public class Hive3PluginCreator implements StoragePluginCreator { + @Override public Hive3StoragePlugin createStoragePlugin(PluginManager pf4jManager, HiveStoragePluginConfig config, SabotContext context, String name, Provider pluginIdProvider) { final HiveConfFactory confFactory = new HiveConfFactory(); diff --git a/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/Hive3StoragePlugin.java b/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/Hive3StoragePlugin.java index 07007acac6..bbc79d8e5c 100644 --- a/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/Hive3StoragePlugin.java +++ b/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/Hive3StoragePlugin.java @@ -67,6 +67,7 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.TableOperations; +import org.apache.iceberg.io.FileIO; import org.apache.orc.OrcConf; import org.pf4j.PluginManager; @@ -105,10 +106,7 @@ import com.dremio.exec.catalog.RollbackOption; import com.dremio.exec.catalog.StoragePluginId; import com.dremio.exec.catalog.TableMutationOptions; -import com.dremio.exec.catalog.VacuumOption; import com.dremio.exec.dotfile.View; -import com.dremio.exec.hadoop.HadoopFsCacheWrapperDremioClassLoader; -import com.dremio.exec.hadoop.HadoopFsSupplierProviderDremioClassLoader; import com.dremio.exec.physical.base.OpProps; import com.dremio.exec.physical.base.PhysicalOperator; import com.dremio.exec.physical.base.ViewOptions; @@ -262,7 +260,6 @@ public class Hive3StoragePlugin extends BaseHiveStoragePlugin implements Storage private int signatureValidationParallelism = 16; private long signatureValidationTimeoutMS = 2_000L; - private final HadoopFsSupplierProviderDremioClassLoader hadoopFsSupplierProviderDremioClassLoader = new HadoopFsCacheWrapperDremioClassLoader(); private final HadoopFsSupplierProviderPluginClassLoader hadoopFsSupplierProviderPluginClassLoader = new HadoopFsCacheWrapperPluginClassLoader(); @VisibleForTesting @@ -303,7 +300,7 @@ public Hive3StoragePlugin(HiveConf hiveConf, PluginManager pf4jManager, SabotCon } } - private FileSystem createFileSystem(String filePath, OperatorContext operatorContext, + private FileSystem createFileSystem(String filePath, String userName, OperatorContext operatorContext, boolean injectAsyncOptions, boolean disableHDFSCache) throws IOException { try (Closeable ccls = HivePf4jPlugin.swapClassLoader()) { Path path = new Path(filePath); @@ -315,7 +312,7 @@ private FileSystem createFileSystem(String filePath, OperatorContext operatorCon if (disableHDFSCache) { jobConf.setBoolean("fs.hdfs.impl.disable.cache", true); } - return createFS(new DremioHadoopFileSystemWrapper(new Path(uri), jobConf, operatorContext != null ? operatorContext.getStats() : null, cacheAndAsyncConf.isAsyncEnabled(), this.getHadoopFsSupplierPluginClassLoader(uri.toString(), jobConf).get()), + return createFS(new DremioHadoopFileSystemWrapper(new Path(uri), jobConf, operatorContext != null ? operatorContext.getStats() : null, cacheAndAsyncConf.isAsyncEnabled(), this.getHadoopFsSupplierPluginClassLoader(uri.toString(), jobConf, userName).get()), operatorContext, cacheAndAsyncConf); } catch (URISyntaxException e) { throw new RuntimeException(e); @@ -323,28 +320,23 @@ private FileSystem createFileSystem(String filePath, OperatorContext operatorCon } @Override - public Supplier getHadoopFsSupplier(String path, Iterable> conf, String queryUser) { - return hadoopFsSupplierProviderDremioClassLoader.getHadoopFsSupplierDremioClassLoader(path, conf); - } - - @Override - public Supplier getHadoopFsSupplierPluginClassLoader(String path, Iterable> conf) { - return hadoopFsSupplierProviderPluginClassLoader.getHadoopFsSupplierPluginClassLoader(path, conf); + public Supplier getHadoopFsSupplierPluginClassLoader(String path, Iterable> conf, String userName) { + return hadoopFsSupplierProviderPluginClassLoader.getHadoopFsSupplierPluginClassLoader(path, conf, isImpersonationEnabled() ? userName: SystemUser.SYSTEM_USERNAME); } @Override public FileSystem createFS(String filePath, String userName, OperatorContext operatorContext) throws IOException { - return createFileSystem(filePath, operatorContext, false, false); + return createFileSystem(filePath, userName, operatorContext, false, false); } @Override public FileSystem createFSWithAsyncOptions(String filePath, String userName, OperatorContext operatorContext) throws IOException { - return createFileSystem(filePath, operatorContext, true, false); + return createFileSystem(filePath, userName, operatorContext, true, false); } @Override public FileSystem createFSWithoutHDFSCache(String filePath, String userName, OperatorContext operatorContext) throws IOException { - return createFileSystem(filePath, operatorContext, false, true); + return createFileSystem(filePath, userName, operatorContext, false, true); } @Override @@ -354,11 +346,13 @@ public Iterable> getConfigProperties() { } } + @Override public String getDefaultCtasFormatProperty(){ return hiveConf.get(HIVE_DEFAULT_CTAS_FORMAT); } + @Override public boolean supportReadSignature(DatasetMetadata metadata, boolean isFileDataset) { final HiveDatasetMetadata hiveDatasetMetadata = metadata.unwrap(HiveDatasetMetadata.class); try (Closeable ccls = HivePf4jPlugin.swapClassLoader()) { @@ -397,7 +391,7 @@ public boolean allowUnlimitedSplits(DatasetHandle handle, DatasetConfig datasetC try (Closeable ccls = HivePf4jPlugin.swapClassLoader()) { final HiveClient client = getClient(SystemUser.SYSTEM_USERNAME); final HiveMetadataUtils.SchemaComponents schemaComponents = - HiveMetadataUtils.resolveSchemaComponents(tablePathComponents, true); + HiveMetadataUtils.resolveSchemaComponents(tablePathComponents); final Table table = client.getTable(schemaComponents.getDbName(), schemaComponents.getTableName(), true); if (table == null) { throw new ConnectorException(String.format("Dataset path '%s', table not found.", tablePathComponents)); @@ -425,7 +419,7 @@ public boolean canGetDatasetMetadataInCoordinator() { @Override public List resolveTableNameToValidPath(List tableSchemaPath) { - final HiveMetadataUtils.SchemaComponents schemaComponents = HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath, true); + final HiveMetadataUtils.SchemaComponents schemaComponents = HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath); return Arrays.asList(schemaComponents.getDbName(), schemaComponents.getTableName()); } @@ -451,8 +445,7 @@ public ScanTableFunction createScanTableFunction(FragmentExecutionContext fec, O public AbstractRefreshPlanBuilder createRefreshDatasetPlanBuilder(SqlHandlerConfig config, SqlRefreshDataset sqlRefreshDataset, UnlimitedSplitsMetadataProvider metadataProvider, boolean isFullRefresh) { if (isFullRefresh) { return new HiveFullRefreshDatasetPlanBuilder(config, sqlRefreshDataset, metadataProvider); - } - else { + } else { return new HiveIncrementalRefreshDatasetPlanBuilder(config, sqlRefreshDataset, metadataProvider); } } @@ -466,11 +459,9 @@ public ReadSignatureProvider createReadSignatureProvider(com.google.protobuf.Byt boolean isFullRefresh, boolean isPartialRefresh) { if (isFullRefresh) { return new HiveFullRefreshReadSignatureProvider(dataTableRoot, queryStartTime, partitionPaths, partitionExists); - } - else if (isPartialRefresh) { + } else if (isPartialRefresh) { return new HivePartialRefreshReadSignatureProvider(existingReadSignature, dataTableRoot, queryStartTime, partitionPaths, partitionExists); - } - else { + } else { return new HiveIncrementalRefreshReadSignatureProvider(existingReadSignature, dataTableRoot, queryStartTime, partitionPaths, partitionExists); } } @@ -488,7 +479,7 @@ public DirListingRecordReader createDirListRecordReader(OperatorContext context, @Override public TableOperations createIcebergTableOperations(FileSystem fs, String queryUserName, IcebergTableIdentifier tableIdentifier) { IcebergHiveTableIdentifier hiveTableIdentifier = (IcebergHiveTableIdentifier) tableIdentifier; - DremioFileIO fileIO = new DremioFileIO(fs, (Iterable>)hiveConf, this); + FileIO fileIO = createIcebergFileIO(fs, null, null, null, null); if (hiveConf.getBoolean(HiveConfFactory.ENABLE_DML_TESTS_WITHOUT_LOCKING, false)) { return new NoOpHiveTableOperations(hiveConf, getClient(SystemUser.SYSTEM_USERNAME), fileIO, IcebergHiveModel.HIVE, hiveTableIdentifier.getNamespace(), hiveTableIdentifier.getTableName()); @@ -497,6 +488,13 @@ public TableOperations createIcebergTableOperations(FileSystem fs, String queryU fileIO, IcebergHiveModel.HIVE, hiveTableIdentifier.getNamespace(), hiveTableIdentifier.getTableName()); } + @Override + public FileIO createIcebergFileIO(FileSystem fs, OperatorContext context, List dataset, + String datasourcePluginUID, Long fileLength) { + return new DremioFileIO(fs, context, dataset, datasourcePluginUID, fileLength, + new HiveFileSystemConfigurationAdapter(hiveConf)); + } + @Override public boolean isIcebergMetadataValid(DatasetConfig config, NamespaceKey key, NamespaceService userNamespaceService) { if (config.getPhysicalDataset().getIcebergMetadata() == null || @@ -506,7 +504,7 @@ public boolean isIcebergMetadataValid(DatasetConfig config, NamespaceKey key, Na } String existingRootPointer = config.getPhysicalDataset().getIcebergMetadata().getMetadataFileLocation(); try { - final HiveMetadataUtils.SchemaComponents schemaComponents = HiveMetadataUtils.resolveSchemaComponents(key.getPathComponents(), true); + final HiveMetadataUtils.SchemaComponents schemaComponents = HiveMetadataUtils.resolveSchemaComponents(key.getPathComponents()); Table table = getClient(SystemUser.SYSTEM_USERNAME).getTable(schemaComponents.getDbName(), schemaComponents.getTableName(), true); if (table == null) { @@ -552,7 +550,7 @@ public CreateTableEntry createNewTable(NamespaceKey tableSchemaPath, SchemaConfi Map storageOptions, boolean isResultsTable) { Preconditions.checkArgument(icebergTableProps != null, "Iceberg properties are not provided"); final HiveMetadataUtils.SchemaComponents schemaComponents = - HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath.getPathComponents(), false); + HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath.getPathComponents()); com.dremio.io.file.Path tableFolderPath = null; String tableFolderLocation = null; HiveClient client = getClient(schemaConfig.getUserName()); @@ -572,6 +570,7 @@ public CreateTableEntry createNewTable(NamespaceKey tableSchemaPath, SchemaConfi case MERGE: case UPDATE: case OPTIMIZE: + case VACUUM: client.checkDmlPrivileges( schemaComponents.getDbName(), schemaComponents.getTableName(), @@ -619,7 +618,7 @@ String resolveTableLocation(HiveMetadataUtils.SchemaComponents schemaComponents, @Override public void createEmptyTable(NamespaceKey tableSchemaPath, SchemaConfig schemaConfig, BatchSchema batchSchema, WriterOptions writerOptions) { final HiveMetadataUtils.SchemaComponents schemaComponents = - HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath.getPathComponents(), false); + HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath.getPathComponents()); String tableLocation = resolveTableLocation(schemaComponents, schemaConfig, writerOptions); tableLocation = HiveMetadataUtils.resolveCreateTableLocation(hiveConf, schemaComponents, tableLocation); @@ -642,7 +641,7 @@ public StoragePluginId getId() { public void dropTable(NamespaceKey tableSchemaPath, SchemaConfig schemaConfig, TableMutationOptions tableMutationOptions) { final HiveClient client = getClient(schemaConfig.getUserName()); - final HiveMetadataUtils.SchemaComponents schemaComponents = HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath.getPathComponents(), true); + final HiveMetadataUtils.SchemaComponents schemaComponents = HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath.getPathComponents()); try { client.dropTable(schemaComponents.getDbName(), schemaComponents.getTableName(), false); } catch (NoSuchObjectException | UnknownTableException e) { @@ -664,7 +663,7 @@ public void alterTable(NamespaceKey tableSchemaPath, DatasetConfig datasetConfig SchemaConfig schemaConfig, TableMutationOptions tableMutationOptions) { HiveClient client = getClient(schemaConfig.getUserName()); final HiveMetadataUtils.SchemaComponents schemaComponents = - HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath.getPathComponents(), false); + HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath.getPathComponents()); client.checkAlterTablePrivileges(schemaComponents.getDbName(), schemaComponents.getTableName()); SplitsPointer splits = DatasetSplitsPointer.of(context.getNamespaceService(schemaConfig.getUserName()), datasetConfig); @@ -677,7 +676,7 @@ public void alterTable(NamespaceKey tableSchemaPath, DatasetConfig datasetConfig public void truncateTable(NamespaceKey tableSchemaPath, SchemaConfig schemaConfig, TableMutationOptions tableMutationOptions) { final HiveClient client = getClient(schemaConfig.getUserName()); final HiveMetadataUtils.SchemaComponents schemaComponents = - HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath.getPathComponents(), false); + HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath.getPathComponents()); client.checkTruncateTablePrivileges(schemaComponents.getDbName(), schemaComponents.getTableName()); DatasetConfig datasetConfig = null; @@ -701,7 +700,7 @@ public void rollbackTable(NamespaceKey tableSchemaPath, TableMutationOptions tableMutationOptions) { final HiveClient client = getClient(schemaConfig.getUserName()); final HiveMetadataUtils.SchemaComponents schemaComponents = - HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath.getPathComponents(), false); + HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath.getPathComponents()); client.checkDmlPrivileges( schemaComponents.getDbName(), schemaComponents.getTableName(), @@ -713,26 +712,6 @@ public void rollbackTable(NamespaceKey tableSchemaPath, icebergModel.rollbackTable(icebergModel.getTableIdentifier(metadataLocation), rollbackOption); } - @Override - public void vacuumTable(NamespaceKey tableSchemaPath, - DatasetConfig datasetConfig, - SchemaConfig schemaConfig, - VacuumOption vacuumOption, - TableMutationOptions tableMutationOptions) { - HiveClient client = getClient(schemaConfig.getUserName()); - final HiveMetadataUtils.SchemaComponents schemaComponents = - HiveMetadataUtils.resolveSchemaComponents(tableSchemaPath.getPathComponents(), false); - client.checkDmlPrivileges( - schemaComponents.getDbName(), - schemaComponents.getTableName(), - getPrivilegeActionTypesForIcebergDml(IcebergCommandType.VACUUM)); - - SplitsPointer splits = DatasetSplitsPointer.of(context.getNamespaceService(schemaConfig.getUserName()), datasetConfig); - String metadataLocation = IcebergUtils.getMetadataLocation(datasetConfig, splits.getPartitionChunks().iterator()); - IcebergModel icebergModel = getIcebergModel(metadataLocation, schemaComponents, schemaConfig.getUserName()); - icebergModel.vacuumTable(icebergModel.getTableIdentifier(metadataLocation), vacuumOption); - } - @Override public void addColumns(NamespaceKey key, DatasetConfig datasetConfig, @@ -741,7 +720,7 @@ public void addColumns(NamespaceKey key, TableMutationOptions tableMutationOptions) { HiveClient client = getClient(schemaConfig.getUserName()); final HiveMetadataUtils.SchemaComponents schemaComponents = - HiveMetadataUtils.resolveSchemaComponents(key.getPathComponents(), false); + HiveMetadataUtils.resolveSchemaComponents(key.getPathComponents()); client.checkAlterTablePrivileges(schemaComponents.getDbName(), schemaComponents.getTableName()); SplitsPointer splits = DatasetSplitsPointer.of(context.getNamespaceService(schemaConfig.getUserName()), datasetConfig); @@ -759,7 +738,7 @@ public void dropColumn(NamespaceKey key, TableMutationOptions tableMutationOptions) { HiveClient client = getClient(schemaConfig.getUserName()); final HiveMetadataUtils.SchemaComponents schemaComponents = - HiveMetadataUtils.resolveSchemaComponents(key.getPathComponents(), false); + HiveMetadataUtils.resolveSchemaComponents(key.getPathComponents()); client.checkAlterTablePrivileges(schemaComponents.getDbName(), schemaComponents.getTableName()); SplitsPointer splits = DatasetSplitsPointer.of(context.getNamespaceService(schemaConfig.getUserName()), datasetConfig); @@ -778,7 +757,7 @@ public void changeColumn(NamespaceKey key, TableMutationOptions tableMutationOptions) { HiveClient client = getClient(schemaConfig.getUserName()); final HiveMetadataUtils.SchemaComponents schemaComponents = - HiveMetadataUtils.resolveSchemaComponents(key.getPathComponents(), false); + HiveMetadataUtils.resolveSchemaComponents(key.getPathComponents()); client.checkAlterTablePrivileges(schemaComponents.getDbName(), schemaComponents.getTableName()); SplitsPointer splits = DatasetSplitsPointer.of(context.getNamespaceService(schemaConfig.getUserName()), datasetConfig); @@ -796,7 +775,7 @@ public void addPrimaryKey(NamespaceKey table, ResolvedVersionContext versionContext) { HiveClient client = getClient(schemaConfig.getUserName()); final HiveMetadataUtils.SchemaComponents schemaComponents = - HiveMetadataUtils.resolveSchemaComponents(table.getPathComponents(), false); + HiveMetadataUtils.resolveSchemaComponents(table.getPathComponents()); client.checkAlterTablePrivileges(schemaComponents.getDbName(), schemaComponents.getTableName()); SplitsPointer splits = DatasetSplitsPointer.of(context.getNamespaceService(schemaConfig.getUserName()), datasetConfig); @@ -813,7 +792,7 @@ public void dropPrimaryKey(NamespaceKey table, ResolvedVersionContext versionContext) { HiveClient client = getClient(schemaConfig.getUserName()); final HiveMetadataUtils.SchemaComponents schemaComponents = - HiveMetadataUtils.resolveSchemaComponents(table.getPathComponents(), false); + HiveMetadataUtils.resolveSchemaComponents(table.getPathComponents()); client.checkAlterTablePrivileges(schemaComponents.getDbName(), schemaComponents.getTableName()); SplitsPointer splits = DatasetSplitsPointer.of(context.getNamespaceService(schemaConfig.getUserName()), datasetConfig); @@ -846,14 +825,14 @@ public List getPrimaryKeyFromMetadata(NamespaceKey table, final String userName = schemaConfig.getUserName(); HiveClient client = getClient(userName); final HiveMetadataUtils.SchemaComponents schemaComponents = - HiveMetadataUtils.resolveSchemaComponents(table.getPathComponents(), false); + HiveMetadataUtils.resolveSchemaComponents(table.getPathComponents()); client.checkAlterTablePrivileges(schemaComponents.getDbName(), schemaComponents.getTableName()); final IcebergModel icebergModel; final String path; if (DatasetHelper.isInternalIcebergTable(datasetConfig)) { final FileSystemPlugin metaStoragePlugin = context.getCatalogService().getSource(METADATA_STORAGE_PLUGIN_NAME); - icebergModel = metaStoragePlugin.getIcebergModel(metaStoragePlugin.getSystemUserFS()); + icebergModel = metaStoragePlugin.getIcebergModel(); String metadataTableName = datasetConfig.getPhysicalDataset().getIcebergMetadata().getTableUuid(); path = metaStoragePlugin.resolveTablePathToValidPath(metadataTableName).toString(); } else if (DatasetHelper.isIcebergDataset(datasetConfig)) { @@ -899,7 +878,7 @@ public boolean hasAccessPermission(String user, NamespaceKey key, DatasetConfig } try { - final HiveMetadataUtils.SchemaComponents schemaComponents = HiveMetadataUtils.resolveSchemaComponents(key.getPathComponents(), true); + final HiveMetadataUtils.SchemaComponents schemaComponents = HiveMetadataUtils.resolveSchemaComponents(key.getPathComponents()); final Table table = clientsByUser .get(user).getTable(schemaComponents.getDbName(), schemaComponents.getTableName(), true); if (table == null) { @@ -1130,8 +1109,7 @@ private boolean hasChanged() throws IOException { " cached last modification time = {}, actual modified time = {}", cachedEntityPath, cachedEntity.getLastModificationTime(), fileStatus.getModificationTime()); return true; - } - else if (MetadataRefreshUtils.unlimitedSplitsSupportEnabled(optionManager) && optionManager.getOption(ExecConstants.HIVE_SIGNATURE_CHANGE_RECURSIVE_LISTING) + } else if (MetadataRefreshUtils.unlimitedSplitsSupportEnabled(optionManager) && optionManager.getOption(ExecConstants.HIVE_SIGNATURE_CHANGE_RECURSIVE_LISTING) && (cachedEntity.getPath() == null || cachedEntity.getPath().isEmpty())) { final RemoteIterator statuses = fs.listFiles(cachedEntityPath, true); while (statuses.hasNext()) { @@ -1160,7 +1138,7 @@ else if (MetadataRefreshUtils.unlimitedSplitsSupportEnabled(optionManager) && op MetadataValidity checkHiveMetadata(HiveTableXattr tableXattr, EntityPath datasetPath, BatchSchema tableSchema, final HiveReadSignature readSignature) throws TException { final HiveClient client = getClient(SystemUser.SYSTEM_USERNAME); - final HiveMetadataUtils.SchemaComponents schemaComponents = HiveMetadataUtils.resolveSchemaComponents(datasetPath.getComponents(), true); + final HiveMetadataUtils.SchemaComponents schemaComponents = HiveMetadataUtils.resolveSchemaComponents(datasetPath.getComponents()); Table table = client.getTable(schemaComponents.getDbName(), schemaComponents.getTableName(), true); @@ -1193,11 +1171,9 @@ MetadataValidity checkHiveMetadata(HiveTableXattr tableXattr, EntityPath dataset return MetadataValidity.INVALID; } - boolean includeComplexTypes = optionManager.getOption(ExecConstants.HIVE_COMPLEXTYPES_ENABLED); - boolean isMapTypeEnabled = optionManager.getOption(ExecConstants.ENABLE_MAP_DATA_TYPE); // cached schema may have $_dremio_update_$ column added, this should not be considered during schema comparisons BatchSchema tableSchemaWithoutInternalCols = tableSchema.dropField(IncrementalUpdateUtils.UPDATE_COLUMN); - BatchSchema hiveSchema = HiveMetadataUtils.getBatchSchema(table, hiveConf, includeComplexTypes, isMapTypeEnabled, this); + BatchSchema hiveSchema = HiveMetadataUtils.getBatchSchema(table, hiveConf, new HiveSchemaTypeOptions(optionManager), this); if (!hiveSchema.equalsTypesWithoutPositions(tableSchemaWithoutInternalCols)) { // refresh metadata if converted schema is not same as schema in kvstore logger.debug("{}: metadata INVALID - schema has changed, cached: {}, actual: {}", datasetPath, @@ -1353,12 +1329,13 @@ private StatsEstimationParameters getStatsParams() { @Override public Optional getDatasetHandle(EntityPath datasetPath, GetDatasetOption... options) throws ConnectorException { final HiveClient client = getClient(SystemUser.SYSTEM_USERNAME); - final HiveMetadataUtils.SchemaComponents schemaComponents = - HiveMetadataUtils.resolveSchemaComponents(datasetPath.getComponents(), false); - if (schemaComponents == null) { + if (!HiveMetadataUtils.isValidPathSchema(datasetPath.getComponents())) { return Optional.empty(); } + final HiveMetadataUtils.SchemaComponents schemaComponents = + HiveMetadataUtils.resolveSchemaComponents(datasetPath.getComponents()); + final boolean tableExists; try { tableExists = client.tableExists(schemaComponents.getDbName(), schemaComponents.getTableName()); @@ -1429,7 +1406,7 @@ public PartitionChunkListing listPartitionChunks(DatasetHandle datasetHandle, Li HiveReaderProtoUtil.convertValuesToNonProtoAttributeValues(hiveTableXattrFromKVStore.getDatasetOptionMap())); } - final HivePartitionChunkListing.Builder builder = HivePartitionChunkListing + final HivePartitionChunkListing.Builder builder = HivePartitionChunkListing .newBuilder() .hiveConf(hiveConf) .storageImpersonationEnabled(storageImpersonationEnabled) @@ -1437,7 +1414,6 @@ public PartitionChunkListing listPartitionChunks(DatasetHandle datasetHandle, Li .enforceVarcharWidth(enforceVarcharWidth) .maxInputSplitsPerPartition(toIntExact(hiveSettings.getMaxInputSplitsPerPartition())) .optionManager(optionManager); - boolean includeComplexTypes = optionManager.getOption(ExecConstants.HIVE_COMPLEXTYPES_ENABLED); final HiveClient client = getClient(SystemUser.SYSTEM_USERNAME); final TableMetadata tableMetadata = HiveMetadataUtils.getTableMetadata( @@ -1447,7 +1423,7 @@ public PartitionChunkListing listPartitionChunks(DatasetHandle datasetHandle, Li HiveMetadataUtils.getMaxLeafFieldCount(options), HiveMetadataUtils.getMaxNestedFieldLevels(options), TimeTravelOption.getTimeTravelOption(options), - includeComplexTypes, + new HiveSchemaTypeOptions(optionManager), hiveConf, this); @@ -1484,7 +1460,7 @@ private HivePartitionChunkListing.Builder buildSplits(HivePartitionChunkListing. private List getDeltaSplits(TableMetadata tableMetadata) { try { - String tableLocation = tableMetadata.getTable().getSd().getLocation(); + String tableLocation = DeltaHiveInputFormat.getLocation(tableMetadata.getTable(), optionManager); FileSystem fs = createFS(tableLocation, SystemUser.SYSTEM_USERNAME, null); DeltaLakeTable deltaLakeTable = new DeltaLakeTable(getSabotContext(), fs, tableLocation); return deltaLakeTable.getAllSplits(); @@ -1498,7 +1474,7 @@ private HivePartitionChunkListing.SplitType getSplitType(TableMetadata tableMeta return ICEBERG_MANIFEST_SPLIT; } - if (DeltaHiveInputFormat.isDeltaTable(tableMetadata.getTable().getParameters().get(META_TABLE_STORAGE), optionManager)) { + if (DeltaHiveInputFormat.isDeltaTable(tableMetadata.getTable(), optionManager)) { return DELTA_COMMIT_LOGS; } @@ -1606,8 +1582,7 @@ public BytesOutput provideSignature(DatasetHandle datasetHandle, DatasetMetadata .setRootPointer(metadataAccumulator.getRootPointer()) .build() .toByteArray()); - } - else { + } else { return BytesOutput.NONE; } } @@ -1660,11 +1635,6 @@ public void close() { clientsByUser.cleanUp(); clientsByUser = null; } - try { - hadoopFsSupplierProviderDremioClassLoader.close(); - } catch (Exception e) { - logger.warn("Failed to close hadoopFsSupplierProviderDremioClassLoader", e); - } try { hadoopFsSupplierProviderPluginClassLoader.close(); @@ -1866,6 +1836,7 @@ private UserException buildAlreadyClosedException() { .buildSilently(); } + @Override public T getPF4JStoragePlugin() { return (T) this; } diff --git a/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/HiveUtilities.java b/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/HiveUtilities.java index d6f5d4439b..fb9e3da93e 100644 --- a/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/HiveUtilities.java +++ b/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/HiveUtilities.java @@ -55,7 +55,6 @@ import com.dremio.common.types.TypeProtos.MajorType; import com.dremio.common.util.Closeable; import com.dremio.exec.planner.physical.PlannerSettings; -import com.dremio.exec.store.hive.deltalake.DeltaHiveInputFormat; import com.dremio.exec.work.ExecErrorConstants; import com.dremio.hive.proto.HiveReaderProto.Prop; import com.dremio.hive.proto.HiveReaderProto.SerializedInputSplit; @@ -122,7 +121,7 @@ public static final AbstractSerDe createSerDe(final JobConf jobConf, final Strin * @throws Exception */ public static final Class> getInputFormatClass(final JobConf jobConf, Optional inputFormat, - Optional storageHandlerName, OptionManager options) throws Exception { + Optional storageHandlerName) throws Exception { if (inputFormat.isPresent()) { return (Class>) Class.forName(inputFormat.get()); } @@ -131,9 +130,6 @@ public static final AbstractSerDe createSerDe(final JobConf jobConf, final Strin try (final Closeable swapper = HivePf4jPlugin.swapClassLoader()) { // HiveUtils.getStorageHandler() depends on the current context classloader if you query and HBase table, // and don't have an HBase session open. - if (DeltaHiveInputFormat.isDeltaTable(storageHandlerName.get(), options)) { - return DeltaHiveInputFormat.class; - } final HiveStorageHandler storageHandler = HiveUtils.getStorageHandler(jobConf, storageHandlerName.get()); return (Class>) storageHandler.getInputFormatClass(); } diff --git a/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/exec/DremioORCRecordUtils.java b/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/exec/DremioORCRecordUtils.java index b45e831e63..ab4e668f7d 100644 --- a/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/exec/DremioORCRecordUtils.java +++ b/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/exec/DremioORCRecordUtils.java @@ -328,8 +328,7 @@ public DiskRangeList readFileData( if (zcr != null) { try { return readDiskRangesUsingZCR(fs, file, path, zcr, pool, baseOffset, range); - } - catch (UnsupportedOperationException ioe) { + } catch (UnsupportedOperationException ioe) { // zero copy read failed. Clear all buffers and unset zero copy read if (pool != null) { pool.clear(); @@ -658,10 +657,12 @@ private static final class ByteBufferWrapper { this.byteBuffer = byteBuffer; } + @Override public boolean equals(Object rhs) { return (rhs instanceof ByteBufferWrapper) && (this.byteBuffer == ((ByteBufferWrapper) rhs).byteBuffer); } + @Override public int hashCode() { return System.identityHashCode(byteBuffer); } diff --git a/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/exec/HiveFieldConverter.java b/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/exec/HiveFieldConverter.java index 78a424a5ad..ef734ce246 100644 --- a/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/exec/HiveFieldConverter.java +++ b/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/exec/HiveFieldConverter.java @@ -18,10 +18,12 @@ import static com.dremio.exec.store.hive.HiveUtilities.throwUnsupportedHiveDataTypeError; import java.lang.reflect.InvocationTargetException; +import java.math.BigDecimal; import java.math.RoundingMode; import java.util.Map; import java.util.concurrent.TimeUnit; +import org.apache.arrow.memory.ArrowBuf; import org.apache.arrow.memory.util.LargeMemoryUtil; import org.apache.arrow.vector.BigIntVector; import org.apache.arrow.vector.BitVector; @@ -34,12 +36,34 @@ import org.apache.arrow.vector.ValueVector; import org.apache.arrow.vector.VarBinaryVector; import org.apache.arrow.vector.VarCharVector; +import org.apache.arrow.vector.complex.ListVector; +import org.apache.arrow.vector.complex.MapVector; +import org.apache.arrow.vector.complex.StructVector; +import org.apache.arrow.vector.complex.impl.NullableStructWriter; +import org.apache.arrow.vector.complex.impl.UnionListWriter; +import org.apache.arrow.vector.complex.impl.UnionMapWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter; +import org.apache.arrow.vector.complex.writer.BigIntWriter; +import org.apache.arrow.vector.complex.writer.BitWriter; +import org.apache.arrow.vector.complex.writer.DateMilliWriter; +import org.apache.arrow.vector.complex.writer.DecimalWriter; +import org.apache.arrow.vector.complex.writer.Float4Writer; +import org.apache.arrow.vector.complex.writer.Float8Writer; +import org.apache.arrow.vector.complex.writer.IntWriter; +import org.apache.arrow.vector.complex.writer.TimeStampMilliWriter; +import org.apache.arrow.vector.complex.writer.VarBinaryWriter; +import org.apache.arrow.vector.complex.writer.VarCharWriter; import org.apache.arrow.vector.holders.DecimalHolder; +import org.apache.arrow.vector.types.pojo.Field; import org.apache.arrow.vector.util.DecimalUtility; import org.apache.hadoop.hive.serde2.io.DateWritableV2; import org.apache.hadoop.hive.serde2.io.TimestampWritableV2; +import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; +import org.apache.hadoop.hive.serde2.objectinspector.StructField; +import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector; @@ -55,7 +79,10 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.io.Text; @@ -64,7 +91,6 @@ import com.dremio.sabot.exec.context.OperatorContext; import com.google.common.collect.Maps; - public abstract class HiveFieldConverter { protected static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(HiveFieldConverter.class); public abstract void setSafeValue(ObjectInspector oi, Object hiveFieldValue, ValueVector outputVV, int outputIndex); @@ -100,7 +126,6 @@ protected void checkSizeLimit(int size) { primMap.put(PrimitiveCategory.CHAR, Char.class); } - public static HiveFieldConverter create(TypeInfo typeInfo, OperatorContext context, HiveOperatorContextOptions options) throws IllegalAccessException, InstantiationException, NoSuchMethodException, InvocationTargetException { switch (typeInfo.getCategory()) { @@ -123,38 +148,28 @@ public static HiveFieldConverter create(TypeInfo typeInfo, OperatorContext conte break; case LIST: { - Class clazz = List.class; - if (clazz != null) { - return clazz.getConstructor(HiveOperatorContextOptions.class).newInstance(options); - } + return new HiveList((ListTypeInfo) typeInfo, context, options); } - break; case STRUCT: { - Class clazz = Struct.class; - if (clazz != null) { - return clazz.getConstructor(HiveOperatorContextOptions.class).newInstance(options); - } + return new HiveStruct((StructTypeInfo) typeInfo, context, options); } - break; case MAP: { - Class clazz = HiveMap.class; - if (clazz != null) { - return clazz.getConstructor(HiveOperatorContextOptions.class).newInstance(options); - } + return new HiveMap((MapTypeInfo) typeInfo, context, options); } - break; case UNION: { Class clazz = Union.class; if (clazz != null) { return clazz.getConstructor(HiveOperatorContextOptions.class).newInstance(options); } } + break; default: throwUnsupportedHiveDataTypeError(typeInfo.getCategory().toString()); } return null; } + public static class Union extends HiveFieldConverter { public Union(HiveOperatorContextOptions options) { super(options); @@ -166,37 +181,297 @@ public void setSafeValue(ObjectInspector oi, Object hiveFieldValue, ValueVector return; } } - public static class HiveMap extends HiveFieldConverter { - public HiveMap(HiveOperatorContextOptions options) { + + public abstract static class BaseComplexConverter extends HiveFieldConverter { + private final OperatorContext context; + + protected BaseComplexConverter(OperatorContext context, HiveOperatorContextOptions options) { super(options); + this.context = context; + } + + protected void write(BaseWriter.ListWriter writer, TypeInfo typeInfo, ObjectInspector oi, Object value) { + if (value == null) { + return; + } + + switch (typeInfo.getCategory()) { + case PRIMITIVE: { + final PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo; + switch (primitiveTypeInfo.getPrimitiveCategory()) { + case BOOLEAN: + writeBoolean(writer.bit(), ((BooleanObjectInspector) oi).get(value)); + break; + case DOUBLE: + writeDouble(writer.float8(), ((DoubleObjectInspector) oi).get(value)); + break; + case FLOAT: + writeFloat(writer.float4(), ((FloatObjectInspector) oi).get(value)); + break; + case DECIMAL: + writeDecimal(writer.decimal(), getDecimalValue((DecimalTypeInfo) typeInfo, oi, value)); + break; + case BYTE: + writeInt(writer.integer(), ((ByteObjectInspector) oi).get(value)); + break; + case INT: + writeInt(writer.integer(), ((IntObjectInspector) oi).get(value)); + break; + case LONG: + writeLong(writer.bigInt(), ((LongObjectInspector) oi).get(value)); + break; + case SHORT: + writeInt(writer.integer(), ((ShortObjectInspector) oi).get(value)); + break; + case BINARY: + writeBinary(writer.varBinary(), ((BinaryObjectInspector) oi).getPrimitiveJavaObject(value)); + break; + case STRING: + writeText(writer.varChar(), ((StringObjectInspector) oi).getPrimitiveWritableObject(value)); + break; + case VARCHAR: + writeText(writer.varChar(), ((HiveVarcharObjectInspector) oi).getPrimitiveWritableObject(value).getTextValue()); + break; + case TIMESTAMP: + writeTimestamp(writer.timeStampMilli(), ((TimestampObjectInspector) oi).getPrimitiveWritableObject(value)); + break; + case DATE: + writeDate(writer.dateMilli(), ((DateObjectInspector) oi).getPrimitiveWritableObject(value)); + break; + case CHAR: + writeText(writer.varChar(), ((HiveCharObjectInspector) oi).getPrimitiveWritableObject(value).getStrippedValue()); + break; + default: + break; + } + } + break; + case LIST: + writeList(writer.list(), (ListTypeInfo) typeInfo, (ListObjectInspector) oi, value); + break; + case MAP: + writeMap(writer.map(false), (MapTypeInfo) typeInfo, (MapObjectInspector) oi, value); + break; + case STRUCT: + writeStruct(writer.struct(), (StructTypeInfo) typeInfo, (StructObjectInspector) oi, value); + break; + default: + break; + } + } + + protected void write(BaseWriter.StructWriter writer, java.lang.String name, TypeInfo typeInfo, ObjectInspector oi, Object value) { + if (value == null) { + return; + } + + switch (typeInfo.getCategory()) { + case PRIMITIVE: { + final PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo; + switch (primitiveTypeInfo.getPrimitiveCategory()) { + case BOOLEAN: + writeBoolean(writer.bit(name), ((BooleanObjectInspector) oi).get(value)); + break; + case DOUBLE: + writeDouble(writer.float8(name), ((DoubleObjectInspector) oi).get(value)); + break; + case FLOAT: + writeFloat(writer.float4(name), ((FloatObjectInspector) oi).get(value)); + break; + case DECIMAL: { + DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; + writeDecimal(writer.decimal(name, decimalTypeInfo.scale(), decimalTypeInfo.precision()), + getDecimalValue(decimalTypeInfo, oi, value)); + } + break; + case BYTE: + writeInt(writer.integer(name), ((ByteObjectInspector) oi).get(value)); + break; + case INT: + writeInt(writer.integer(name), ((IntObjectInspector) oi).get(value)); + break; + case LONG: + writeLong(writer.bigInt(name), ((LongObjectInspector) oi).get(value)); + break; + case SHORT: + writeInt(writer.integer(name), ((ShortObjectInspector) oi).get(value)); + break; + case BINARY: + writeBinary(writer.varBinary(name), ((BinaryObjectInspector) oi).getPrimitiveJavaObject(value)); + break; + case STRING: + writeText(writer.varChar(name), ((StringObjectInspector) oi).getPrimitiveWritableObject(value)); + break; + case VARCHAR: + writeText(writer.varChar(name), ((HiveVarcharObjectInspector) oi).getPrimitiveWritableObject(value).getTextValue()); + break; + case TIMESTAMP: + writeTimestamp(writer.timeStampMilli(name), ((TimestampObjectInspector) oi).getPrimitiveWritableObject(value)); + break; + case DATE: + writeDate(writer.dateMilli(name), ((DateObjectInspector) oi).getPrimitiveWritableObject(value)); + break; + case CHAR: + writeText(writer.varChar(name), ((HiveCharObjectInspector) oi).getPrimitiveWritableObject(value).getStrippedValue()); + break; + default: + break; + } + } + break; + case LIST: + writeList(writer.list(name), (ListTypeInfo) typeInfo, (ListObjectInspector) oi, value); + break; + case MAP: + writeMap(writer.map(name, false), (MapTypeInfo) typeInfo, (MapObjectInspector) oi, value); + break; + case STRUCT: + writeStruct(writer.struct(name), (StructTypeInfo) typeInfo, (StructObjectInspector) oi, value); + break; + default: + break; + } + } + + private OperatorContext getContext() { + return context; + } + + private static BigDecimal getDecimalValue(DecimalTypeInfo typeInfo, ObjectInspector oi, Object value) { + BigDecimal decimal = ((HiveDecimalObjectInspector) oi).getPrimitiveJavaObject(value).bigDecimalValue(); + return decimal.setScale(typeInfo.scale(), RoundingMode.HALF_UP); + } + + private void writeBinary(VarBinaryWriter writer, byte[] value) { + checkSizeLimit(value.length); + try (ArrowBuf buf = getContext().getAllocator().buffer(value.length)) { + buf.setBytes(0, value); + writer.writeVarBinary(0, value.length, buf); + } + } + + private void writeBoolean(BitWriter writer, boolean value) { + writer.writeBit(value ? 1 : 0); + } + + private void writeDouble(Float8Writer writer, double value) { + writer.writeFloat8(value); + } + + private void writeFloat(Float4Writer writer, float value) { + writer.writeFloat4(value); } + + private void writeDecimal(DecimalWriter writer, BigDecimal value) { + writer.writeDecimal(value); + } + + private void writeInt(IntWriter writer, int value) { + writer.writeInt(value); + } + + private void writeLong(BigIntWriter writer, long value) { + writer.writeBigInt(value); + } + + private void writeText(VarCharWriter writer, Text value) { + checkSizeLimit(value.getLength()); + try (ArrowBuf buf = getContext().getAllocator().buffer(value.getLength())) { + buf.setBytes(0, value.getBytes()); + writer.writeVarChar(0, value.getLength(), buf); + } + } + + private void writeTimestamp(TimeStampMilliWriter writer, TimestampWritableV2 value) { + long seconds = value.getSeconds(); + long nanos = value.getNanos(); + long millis = seconds * 1000 + nanos/1000/1000; + writer.writeTimeStampMilli(millis); + } + + private void writeDate(DateMilliWriter writer, DateWritableV2 value) { + writer.writeDateMilli(value.getDays() * Date.MILLIS_PER_DAY); + } + + protected void writeMap(BaseWriter.MapWriter writer, MapTypeInfo typeInfo, MapObjectInspector oi, Object value) { + writer.startMap(); + for (Map.Entry e : oi.getMap(value).entrySet()) { + writer.startEntry(); + write(writer.key(), typeInfo.getMapKeyTypeInfo(), oi.getMapKeyObjectInspector(), e.getKey()); + write(writer.value(), typeInfo.getMapValueTypeInfo(), oi.getMapValueObjectInspector(), e.getValue()); + writer.endEntry(); + } + writer.endMap(); + } + + protected void writeList(BaseWriter.ListWriter writer, ListTypeInfo typeInfo, ListObjectInspector listOi, Object value) { + writer.startList(); + for (Object o : listOi.getList(value)) { + write(writer, typeInfo.getListElementTypeInfo(), listOi.getListElementObjectInspector(), o); + } + writer.endList(); + } + + protected void writeStruct(BaseWriter.StructWriter writer, StructTypeInfo typeInfo, StructObjectInspector oi, Object value) { + writer.start(); + for (StructField field : oi.getAllStructFieldRefs()) { + write(writer, field.getFieldName(), typeInfo.getStructFieldTypeInfo(field.getFieldName()), + field.getFieldObjectInspector(), oi.getStructFieldData(value, field)); + } + writer.end(); + } + } + + public static class HiveMap extends BaseComplexConverter { + private final MapTypeInfo typeInfo; + + public HiveMap(MapTypeInfo typeInfo, OperatorContext context, HiveOperatorContextOptions options) { + super(context, options); + this.typeInfo = typeInfo; + } + @Override public void setSafeValue(ObjectInspector oi, Object hiveFieldValue, ValueVector outputVV, int outputIndex) { - // In ORC vectorized file reader path these functions are not called. - // Currently we support complex types in ORC format only - return; + UnionMapWriter mapWriter = ((MapVector) outputVV).getWriter(); + mapWriter.setPosition(outputIndex); + writeMap(mapWriter, typeInfo, (MapObjectInspector) oi, hiveFieldValue); } } - public static class List extends HiveFieldConverter { - public List(HiveOperatorContextOptions options) { - super(options); + + public static class HiveList extends BaseComplexConverter { + private final ListTypeInfo typeInfo; + + public HiveList(ListTypeInfo typeInfo, OperatorContext context, HiveOperatorContextOptions options) { + super(context, options); + this.typeInfo = typeInfo; } @Override public void setSafeValue(ObjectInspector oi, Object hiveFieldValue, ValueVector outputVV, int outputIndex) { - // In ORC vectorized file reader path these functions are not called. - // Currently we support complex types in ORC format only - return; + UnionListWriter listWriter = ((ListVector) outputVV).getWriter(); + listWriter.setPosition(outputIndex); + writeList(listWriter, typeInfo, (ListObjectInspector) oi, hiveFieldValue); } } - public static class Struct extends HiveFieldConverter { - public Struct(HiveOperatorContextOptions options) { - super(options); + + public static class HiveStruct extends BaseComplexConverter { + private final StructTypeInfo typeInfo; + + public HiveStruct(StructTypeInfo typeInfo, OperatorContext context, HiveOperatorContextOptions options) { + super(context, options); + this.typeInfo = typeInfo; } @Override public void setSafeValue(ObjectInspector oi, Object hiveFieldValue, ValueVector outputVV, int outputIndex) { - // In ORC vectorized file reader path these functions are not called. - // Currently we support complex types in ORC format only - return; + StructObjectInspector structOi = (StructObjectInspector) oi; + NullableStructWriter structWriter = ((StructVector) outputVV).getWriter(); + structWriter.setPosition(outputIndex); + structWriter.start(); + for (Field writerField : structWriter.getField().getChildren()) { + StructField field = structOi.getStructFieldRef(writerField.getName()); + write(structWriter, field.getFieldName(), typeInfo.getStructFieldTypeInfo(field.getFieldName()), + field.getFieldObjectInspector(), structOi.getStructFieldData(hiveFieldValue, field)); + } + structWriter.end(); } } public static class Binary extends HiveFieldConverter { @@ -358,7 +633,7 @@ public static class Date extends HiveFieldConverter { public Date(HiveOperatorContextOptions options) { super(options); } - private static final long MILLIS_PER_DAY = TimeUnit.DAYS.toMillis(1L); + public static final long MILLIS_PER_DAY = TimeUnit.DAYS.toMillis(1L); @Override public void setSafeValue(ObjectInspector oi, Object hiveFieldValue, ValueVector outputVV, int outputIndex) { @@ -380,5 +655,4 @@ public void setSafeValue(ObjectInspector oi, Object hiveFieldValue, ValueVector ((VarCharVector) outputVV).setSafe(outputIndex, valueBytes, 0, valueLen); } } - } diff --git a/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/exec/HiveORCCopiers.java b/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/exec/HiveORCCopiers.java index bec958a096..dc71cd9dce 100644 --- a/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/exec/HiveORCCopiers.java +++ b/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/exec/HiveORCCopiers.java @@ -108,7 +108,9 @@ public interface ORCCopier { private abstract static class ORCCopierBase implements ORCCopier { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(HiveORCCopiers.class); + @Override public abstract void copy(int inputIdx, int count, int outputIdx); + @Override public abstract void ensureHasRequiredCapacity(int required); protected void ensureVectorHasRequiredCapacity(ValueVector vector, int required) { while (required > vector.getValueCapacity()) { @@ -187,8 +189,7 @@ private static ORCCopier createCopier(HiveColumnVectorData columnVectorData, } else if (output instanceof BigIntVector) { if (input instanceof LongColumnVector) { return new BigIntCopier((LongColumnVector) input, (BigIntVector) output); - } - else { + } else { return new NoOpCopier(null, null); } } else if (output instanceof Float4Vector) { @@ -196,8 +197,7 @@ private static ORCCopier createCopier(HiveColumnVectorData columnVectorData, return new DoubleToFloat4Copier((DoubleColumnVector) input, (Float4Vector) output); } else if (input instanceof LongColumnVector) { return new LongToFloat4Copier((LongColumnVector) input, (Float4Vector) output); - } - else { + } else { return new NoOpCopier(null, null); } } else if (output instanceof Float8Vector) { @@ -213,15 +213,13 @@ private static ORCCopier createCopier(HiveColumnVectorData columnVectorData, } else if (output instanceof DateMilliVector) { if (input instanceof LongColumnVector) { return new DateMilliCopier((LongColumnVector) input, (DateMilliVector) output); - } - else { + } else { return new NoOpCopier(null, null); } } else if (output instanceof TimeStampMilliVector) { if (input instanceof TimestampColumnVector) { return new TimeStampMilliCopier((TimestampColumnVector) input, (TimeStampMilliVector) output); - } - else { + } else { return new NoOpCopier(null, null); } } else if (output instanceof DecimalVector) { @@ -239,32 +237,28 @@ private static ORCCopier createCopier(HiveColumnVectorData columnVectorData, } else if (output instanceof BitVector) { if (input instanceof LongColumnVector) { return new BitCopier((LongColumnVector) input, (BitVector) output); - } - else { + } else { return new NoOpCopier(null, null); } } else if (output instanceof ListVector) { if (input instanceof MultiValuedColumnVector) { return new ListCopier(columnVectorData, ordinalId, (MultiValuedColumnVector) input, (ListVector) output, operatorContextOptions, vectorToNameMap); - } - else { + } else { return new NoOpCopier(null, null); } } else if (output instanceof StructVector) { if (input instanceof StructColumnVector) { return new StructCopier(columnVectorData, ordinalId, (StructColumnVector) input, (StructVector) output, operatorContextOptions, vectorToNameMap); - } - else { + } else { return new NoOpCopier(null, null); } } else if (output instanceof UnionVector) { if (input instanceof UnionColumnVector) { return new UnionCopier(columnVectorData, ordinalId, (UnionColumnVector) input, (UnionVector) output, operatorContextOptions, vectorToNameMap); - } - else { + } else { return new NoOpCopier(null, null); } } @@ -365,18 +359,15 @@ private static class StructCopier extends ORCCopierBase { Preconditions.checkNotNull(vectorToNameMap.get(hiveElementVector),"The hiveElementVector is not present in the map that maps all the inputVectors with their corresponding names"); if(arrowElementVector == null){ fieldCopiers.add(new NoOpCopier(null, null)); - } - else if(vectorToNameMap.get(hiveElementVector).equals(arrowElementVector.getName())) { + } else if(vectorToNameMap.get(hiveElementVector).equals(arrowElementVector.getName())) { ORCCopier childCopier = createCopier(columnVectorData, childPos, arrowElementVector, hiveElementVector, operatorContextOptions, vectorToNameMap); fieldCopiers.add(childCopier); arrowIdx++; - } - else{ + } else { fieldCopiers.add(new NoOpCopier(null, null)); } - } - else { + } else { fieldCopiers.add(new NoOpCopier(null, null)); } childPos += columnVectorData.getTotalVectorCount(childPos); @@ -734,8 +725,7 @@ public void copy(int inputIdx, int count, int outputIdx) { .unscaledValue() .toByteArray(); outputVector.setBigEndian(outputIdx, decimalValue); - } - catch (Exception e) { + } catch (Exception e) { // ignoring exception creates null entry } } @@ -753,8 +743,7 @@ public void copy(int inputIdx, int count, int outputIdx) { .unscaledValue() .toByteArray(); outputVector.setBigEndian(outputIdx, decimalValue); - } - catch (Exception e) { + } catch (Exception e) { // ignoring exception creates null entry } } @@ -810,8 +799,7 @@ public void copy(int inputIdx, int count, int outputIdx) { .unscaledValue() .toByteArray(); outputVector.setBigEndian(outputIdx, decimalValue); - } - catch (Exception e) { + } catch (Exception e) { // ignoring exception creates null entry } } @@ -829,8 +817,7 @@ public void copy(int inputIdx, int count, int outputIdx) { .unscaledValue() .toByteArray(); outputVector.setBigEndian(outputIdx, decimalValue); - } - catch (Exception e) { + } catch (Exception e) { // ignoring exception creates null entry } } @@ -891,8 +878,7 @@ public void copy(int inputIdx, int count, int outputIdx) { .unscaledValue() .toByteArray(); outputVector.setBigEndian(outputIdx, decimalValue); - } - catch (Exception e) { + } catch (Exception e) { } } @@ -960,8 +946,7 @@ public void copy(int inputIdx, int count, int outputIdx) { try { final byte[] value = HiveDecimal.enforcePrecisionScale(input[inputIdx].getHiveDecimal(), outputPrecision, outputScale).bigDecimalValue().movePointRight(outputScale).unscaledValue().toByteArray(); outputVector.setBigEndian(outputIdx, value); - } - catch (Exception e) { + } catch (Exception e) { // ignoring exception sets null. // enforcePrecisionScale returns null when it cannot enforce } @@ -973,8 +958,7 @@ public void copy(int inputIdx, int count, int outputIdx) { try { byte[] v = HiveDecimal.enforcePrecisionScale(input[inputIdx].getHiveDecimal(), outputPrecision, outputScale).bigDecimalValue().movePointRight(outputScale).unscaledValue().toByteArray(); outputVector.setBigEndian(outputIdx, v); - } - catch (Exception e) { + } catch (Exception e) { // ignoring exception sets null. // enforcePrecisionScale returns null when it cannot enforce } @@ -1148,8 +1132,7 @@ public void copy(int inputIdx, int count, int outputIdx) { String strValue = new String(vector[inputIdx], start[inputIdx], length[inputIdx], StandardCharsets.UTF_8); double doubleValue = Double.parseDouble(strValue); outputVector.set(outputIdx, doubleValue); - } - catch (Exception e) { + } catch (Exception e) { } } diff --git a/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/exec/HiveSplitCreator.java b/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/exec/HiveSplitCreator.java index 8c0fc0d12b..f0525b293c 100644 --- a/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/exec/HiveSplitCreator.java +++ b/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/exec/HiveSplitCreator.java @@ -60,6 +60,7 @@ public HiveSplitCreator(OperatorContext context, byte[] extendedProperty) { partitionXattrBytes = partitionXattr.toByteString(); } + @Override public SplitAndPartitionInfo createSplit(PartitionProtobuf.NormalizedPartitionInfo filePartitionInfo, SplitIdentity splitIdentity, String fileFormat, long fileSize, long currentModTime) throws InvalidProtocolBufferException { InputSplit inputSplit; switch (fileFormat) { diff --git a/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/exec/HiveTextReader.java b/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/exec/HiveTextReader.java index f9e2a414c5..f98baab5e1 100644 --- a/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/exec/HiveTextReader.java +++ b/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/exec/HiveTextReader.java @@ -76,8 +76,7 @@ public HiveTextReader(final HiveTableXattr tableAttr, final SplitAndPartitionInf public void internalInit(InputSplit inputSplit, JobConf jobConf, ValueVector[] vectors) throws IOException { try (OperatorStats.WaitRecorder recorder = OperatorStats.getWaitRecorder(this.context.getStats())) { reader = jobConf.getInputFormat().getRecordReader(inputSplit, jobConf, Reporter.NULL); - } - catch(FSError e) { + } catch (FSError e) { throw HadoopFileSystemWrapper.propagateFSError(e); } @@ -123,8 +122,7 @@ public int populateData() throws IOException, SerDeException { if (!hasNext) { break; } - } - catch(FSError e) { + } catch (FSError e) { throw HadoopFileSystemWrapper.propagateFSError(e); } if (skipRecordsInspector.doSkipHeader(recordCount++)) { @@ -293,8 +291,7 @@ public void close() throws IOException { if (reader != null) { try (OperatorStats.WaitRecorder recorder = OperatorStats.getWaitRecorder(this.context.getStats())) { reader.close(); - } - catch(FSError e) { + } catch (FSError e) { throw HadoopFileSystemWrapper.propagateFSError(e); } reader = null; diff --git a/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/exec/ScanWithHiveReader.java b/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/exec/ScanWithHiveReader.java index f3013087b8..43442d37e7 100644 --- a/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/exec/ScanWithHiveReader.java +++ b/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/exec/ScanWithHiveReader.java @@ -251,7 +251,7 @@ private static RecordReader getRecordReader(HiveTableXattr tableXattr, partitionStorageHandlerName = HiveReaderProtoUtil.getPartitionStorageHandler(tableXattr, partitionXattr); } - jobConf.setInputFormat(getInputFormatClass(jobConf, partitionInputFormat, partitionStorageHandlerName, context.getOptions())); + jobConf.setInputFormat(getInputFormatClass(jobConf, partitionInputFormat, partitionStorageHandlerName)); partitionOI = getStructOI(partitionSerDe); updateFileFormatStat(context.getStats(), partitionInputFormat); @@ -263,7 +263,7 @@ private static RecordReader getRecordReader(HiveTableXattr tableXattr, } else { partitionSerDe = null; partitionOI = null; - jobConf.setInputFormat(getInputFormatClass(jobConf, tableInputFormat, HiveReaderProtoUtil.getTableStorageHandler(tableXattr), context.getOptions())); + jobConf.setInputFormat(getInputFormatClass(jobConf, tableInputFormat, HiveReaderProtoUtil.getTableStorageHandler(tableXattr))); updateFileFormatStat(context.getStats(), tableInputFormat); } diff --git a/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/metadata/HiveMetadataUtils.java b/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/metadata/HiveMetadataUtils.java index 6fd853d8f4..0c88f20675 100644 --- a/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/metadata/HiveMetadataUtils.java +++ b/plugins/hive3/plugin/src/main/java/com/dremio/exec/store/hive/metadata/HiveMetadataUtils.java @@ -78,6 +78,7 @@ import org.apache.iceberg.BaseTable; import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.Snapshot; +import org.apache.iceberg.io.FileIO; import com.dremio.common.exceptions.UserException; import com.dremio.common.util.Closeable; @@ -108,6 +109,7 @@ import com.dremio.exec.store.hive.HiveClient; import com.dremio.exec.store.hive.HivePf4jPlugin; import com.dremio.exec.store.hive.HiveSchemaConverter; +import com.dremio.exec.store.hive.HiveSchemaTypeOptions; import com.dremio.exec.store.hive.HiveUtilities; import com.dremio.exec.store.hive.deltalake.DeltaHiveInputFormat; import com.dremio.exec.store.hive.exec.apache.HadoopFileSystemWrapper; @@ -115,7 +117,6 @@ import com.dremio.exec.store.hive.exec.metadata.SchemaConverter; import com.dremio.exec.store.hive.iceberg.IcebergHiveTableOperations; import com.dremio.exec.store.hive.iceberg.IcebergInputFormat; -import com.dremio.exec.store.iceberg.DremioFileIO; import com.dremio.exec.store.iceberg.IcebergSerDe; import com.dremio.exec.store.iceberg.IcebergUtils; import com.dremio.exec.store.iceberg.TableSchemaProvider; @@ -193,8 +194,11 @@ public static void injectOrcIncludeFileIdInSplitsConf(final HiveStorageCapabilit } } - public static SchemaComponents resolveSchemaComponents(final List pathComponents, boolean throwIfInvalid) { + public static boolean isValidPathSchema(final List pathComponents) { + return pathComponents != null && (pathComponents.size() == 2 || pathComponents.size() == 3); + } + public static SchemaComponents resolveSchemaComponents(final List pathComponents) { // extract database and table names from dataset path switch (pathComponents.size()) { case 2: @@ -202,10 +206,6 @@ public static SchemaComponents resolveSchemaComponents(final List pathCo case 3: return new SchemaComponents(pathComponents.get(1), pathComponents.get(2)); default: - if (!throwIfInvalid) { - return null; - } - // invalid. Guarded against at both entry points. throw UserException.connectionError() .message("Dataset path '%s' is invalid.", pathComponents) @@ -254,7 +254,7 @@ public static String resolveCreateTableLocation(HiveConf conf, SchemaComponents if (isDeltaTable(table, options)) { return new DeltaHiveInputFormat(); } - final Class inputFormatClazz = getInputFormatClass(job, table, partition, options); + final Class inputFormatClazz = getInputFormatClass(job, table, partition); job.setInputFormat(inputFormatClazz); return job.getInputFormat(); } @@ -277,14 +277,14 @@ public static boolean isIcebergTable(Table table) { } private static boolean isDeltaTable(Table table, OptionManager options) { - return DeltaHiveInputFormat.isDeltaTable(table.getParameters().get(META_TABLE_STORAGE), options); + return DeltaHiveInputFormat.isDeltaTable(table, options); } - public static BatchSchema getBatchSchema(Table table, final HiveConf hiveConf, boolean includeComplexParquetCols, boolean isMapTypeEnabled, Hive3StoragePlugin plugin) { + public static BatchSchema getBatchSchema(Table table, final HiveConf hiveConf, HiveSchemaTypeOptions typeOptions, Hive3StoragePlugin plugin) { InputFormat format = getInputFormat(table, hiveConf, plugin.getSabotContext().getOptionManager()); final List fields = new ArrayList<>(); final List partitionColumns = new ArrayList<>(); - HiveMetadataUtils.populateFieldsAndPartitionColumns(table, fields, partitionColumns, format, includeComplexParquetCols, isMapTypeEnabled); + HiveMetadataUtils.populateFieldsAndPartitionColumns(table, fields, partitionColumns, format, typeOptions); return BatchSchema.newBuilder().addFields(fields).build(); } @@ -332,18 +332,17 @@ private static void populateFieldsAndPartitionColumns( final List fields, final List partitionColumns, InputFormat format, - boolean includeComplexParquetCols, - boolean isMapTypeEnabled) { + final HiveSchemaTypeOptions typeOptions) { for (FieldSchema hiveField : table.getSd().getCols()) { final TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(hiveField.getType()); - Field f = HiveSchemaConverter.getArrowFieldFromHiveType(hiveField.getName(), typeInfo, format, includeComplexParquetCols, isMapTypeEnabled); + Field f = HiveSchemaConverter.getArrowFieldFromHiveType(hiveField.getName(), typeInfo, format, typeOptions); if (f != null) { fields.add(f); } } for (FieldSchema field : table.getPartitionKeys()) { Field f = HiveSchemaConverter.getArrowFieldFromHiveType(field.getName(), - TypeInfoUtils.getTypeInfoFromTypeString(field.getType()), format, includeComplexParquetCols, isMapTypeEnabled); + TypeInfoUtils.getTypeInfoFromTypeString(field.getType()), format, typeOptions); if (f != null) { fields.add(f); partitionColumns.add(field.getName()); @@ -351,12 +350,11 @@ private static void populateFieldsAndPartitionColumns( } } - private static List buildColumnInfo(final Table table, final InputFormat format, boolean - includeComplexParquetCols, boolean isMapTypeEnabled) { + private static List buildColumnInfo(final Table table, final InputFormat format, final HiveSchemaTypeOptions typeOptions) { final List columnInfos = new ArrayList<>(); for (FieldSchema hiveField : table.getSd().getCols()) { final TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(hiveField.getType()); - Field f = HiveSchemaConverter.getArrowFieldFromHiveType(hiveField.getName(), typeInfo, format, includeComplexParquetCols, isMapTypeEnabled); + Field f = HiveSchemaConverter.getArrowFieldFromHiveType(hiveField.getName(), typeInfo, format, typeOptions); if (f != null) { columnInfos.add(getColumnInfo(typeInfo)); } @@ -480,6 +478,9 @@ private static ColumnInfo getColumnInfo(final TypeInfo typeInfo) { .setScale(0) .setIsPrimitive(true) .build(); + + default: + break; } } @@ -496,12 +497,12 @@ public static TableMetadata getTableMetadata(final HiveClient client, final int maxMetadataLeafColumns, final int maxNestedLevels, final TimeTravelOption timeTravelOption, - final boolean includeComplexParquetCols, + final HiveSchemaTypeOptions typeOptions, final HiveConf hiveConf, final Hive3StoragePlugin plugin) throws ConnectorException { try { - final SchemaComponents schemaComponents = resolveSchemaComponents(datasetPath.getComponents(), true); + final SchemaComponents schemaComponents = resolveSchemaComponents(datasetPath.getComponents()); // if the dataset path is not canonized we need to get it from the source final Table table = client.getTable(schemaComponents.getDbName(), schemaComponents.getTableName(), ignoreAuthzErrors); @@ -512,14 +513,13 @@ public static TableMetadata getTableMetadata(final HiveClient client, final Properties tableProperties = MetaStoreUtils.getSchema(table.getSd(), table.getSd(), table.getParameters(), table.getDbName(), table.getTableName(), table.getPartitionKeys()); TableMetadata tableMetadata; if (isIcebergTable(table)) { - tableMetadata = getTableMetadataFromIceberg(hiveConf, datasetPath, table, tableProperties, timeTravelOption, plugin); + tableMetadata = getTableMetadataFromIceberg(hiveConf, datasetPath, table, tableProperties, timeTravelOption, typeOptions, plugin); } else if (isDeltaTable(table, plugin.getSabotContext().getOptionManager())) { - tableMetadata = getTableMetadataFromDelta(table, tableProperties, maxMetadataLeafColumns, plugin); + tableMetadata = getTableMetadataFromDelta(table, tableProperties, maxMetadataLeafColumns, typeOptions, plugin); } else { - final boolean isMapTypeEnabled = plugin.getSabotContext().getOptionManager().getOption(ExecConstants.ENABLE_MAP_DATA_TYPE); tableMetadata = getTableMetadataFromHMS(table, tableProperties, datasetPath, ignoreAuthzErrors, maxMetadataLeafColumns, maxNestedLevels, - includeComplexParquetCols, hiveConf, isMapTypeEnabled, plugin); + typeOptions, hiveConf, plugin); } HiveMetadataUtils.injectOrcIncludeFileIdInSplitsConf(tableMetadata.getTableStorageCapabilities(), tableProperties); return tableMetadata; @@ -535,12 +535,13 @@ private static TableMetadata getTableMetadataFromIceberg(final HiveConf hiveConf final Table table, final Properties tableProperties, final TimeTravelOption timeTravelOption, + final HiveSchemaTypeOptions typeOptions, Hive3StoragePlugin plugin) throws IOException { JobConf jobConf = new JobConf(hiveConf); String metadataLocation = tableProperties.getProperty(METADATA_LOCATION, ""); com.dremio.io.file.FileSystem fs = plugin.createFS(metadataLocation, SystemUser.SYSTEM_USERNAME, null); - DremioFileIO fileIO = new DremioFileIO(fs, (Iterable>)jobConf, plugin); + FileIO fileIO = plugin.createIcebergFileIO(fs, null, null, null, null); IcebergHiveTableOperations hiveTableOperations = new IcebergHiveTableOperations(fileIO, metadataLocation); BaseTable icebergTable = new BaseTable(hiveTableOperations, new Path(metadataLocation).getName()); icebergTable.refresh(); @@ -555,8 +556,7 @@ private static TableMetadata getTableMetadataFromIceberg(final HiveConf hiveConf TimeTravelProcessors.getTableSchemaProvider(travelRequest); snapshot = tableSnapshotProvider.apply(icebergTable); schema = tableSchemaProvider.apply(icebergTable, snapshot); - } - else { + } else { snapshot = icebergTable.currentSnapshot(); schema = icebergTable.schema(); } @@ -584,7 +584,7 @@ private static TableMetadata getTableMetadataFromIceberg(final HiveConf hiveConf } SchemaConverter schemaConverter = SchemaConverter.getBuilder().setTableName(table.getTableName()) - .setMapTypeEnabled(plugin.getSabotContext().getOptionManager().getOption(ExecConstants.ENABLE_MAP_DATA_TYPE)).build(); + .setMapTypeEnabled(typeOptions.isMapTypeEnabled()).build(); BatchSchema batchSchema = schemaConverter.fromIceberg(schema); Map specsMap = icebergTable.specs(); specsMap = IcebergUtils.getPartitionSpecMapBySchema(specsMap, schema); @@ -604,7 +604,11 @@ private static TableMetadata getTableMetadataFromIceberg(final HiveConf hiveConf .setDeleteStats(new ScanStats() .setScanFactor(ScanCostFactor.PARQUET.getFactor()) .setType(ScanStatsType.EXACT_ROW_COUNT) - .setRecordCount(numPositionDeletes)); + .setRecordCount(numPositionDeletes)) + .setEqualityDeleteStats(new ScanStats() + .setScanFactor(ScanCostFactor.PARQUET.getFactor()) + .setType(ScanStatsType.EXACT_ROW_COUNT) + .setRecordCount(numEqualityDeletes)); return TableMetadata.newBuilder() .table(table) @@ -622,14 +626,14 @@ private static TableMetadata getTableMetadataFromIceberg(final HiveConf hiveConf private static TableMetadata getTableMetadataFromDelta(final Table table, final Properties tableProperties, final int maxMetadataLeafColumns, + final HiveSchemaTypeOptions typeOptions, final Hive3StoragePlugin plugin) throws IOException { - final String tableLocation = table.getSd().getLocation(); + final String tableLocation = DeltaHiveInputFormat.getLocation(table, plugin.getSabotContext().getOptionManager()); final com.dremio.io.file.FileSystem fs = plugin.createFS(tableLocation, SystemUser.SYSTEM_USERNAME, null); final DeltaLakeTable deltaTable = new DeltaLakeTable(plugin.getSabotContext(), fs, tableLocation); final DeltaLogSnapshot snapshot = deltaTable.getConsolidatedSnapshot(); - final boolean isMapTypeEnabled = plugin.getSabotContext().getOptionManager().getOption(ExecConstants.ENABLE_MAP_DATA_TYPE); - final BatchSchema batchSchema = DeltaLakeSchemaConverter.withMapEnabled(isMapTypeEnabled).fromSchemaString(snapshot.getSchema()); + final BatchSchema batchSchema = DeltaLakeSchemaConverter.withMapEnabled(typeOptions.isMapTypeEnabled()).fromSchemaString(snapshot.getSchema()); HiveMetadataUtils.checkLeafFieldCounter(batchSchema.getFields().size(), maxMetadataLeafColumns, ""); return TableMetadata.newBuilder() @@ -650,25 +654,24 @@ private static TableMetadata getTableMetadataFromHMS(final Table table, final boolean ignoreAuthzErrors, final int maxMetadataLeafColumns, final int maxNestedLevels, - final boolean includeComplexParquetCols, + final HiveSchemaTypeOptions typeOptions, final HiveConf hiveConf, - final boolean isMapTypeEnabled, final Hive3StoragePlugin plugin) throws ConnectorException { - final SchemaComponents schemaComponents = resolveSchemaComponents(datasetPath.getComponents(), true); + final SchemaComponents schemaComponents = resolveSchemaComponents(datasetPath.getComponents()); final InputFormat format = getInputFormat(table, hiveConf, plugin.getSabotContext().getOptionManager()); final List fields = new ArrayList<>(); final List partitionColumns = new ArrayList<>(); - HiveMetadataUtils.populateFieldsAndPartitionColumns(table, fields, partitionColumns, format, includeComplexParquetCols, isMapTypeEnabled); + HiveMetadataUtils.populateFieldsAndPartitionColumns(table, fields, partitionColumns, format, typeOptions); HiveMetadataUtils.checkLeafFieldCounter(fields.size(), maxMetadataLeafColumns, schemaComponents.getTableName()); - HiveSchemaConverter.checkFieldNestedLevels(table, maxNestedLevels, isMapTypeEnabled); + HiveSchemaConverter.checkFieldNestedLevels(table, maxNestedLevels, typeOptions.isMapTypeEnabled()); final BatchSchema batchSchema = BatchSchema.newBuilder().addFields(fields).build(); - final List columnInfos = buildColumnInfo(table, format, includeComplexParquetCols, isMapTypeEnabled); + final List columnInfos = buildColumnInfo(table, format, typeOptions); return TableMetadata.newBuilder() .table(table) @@ -880,9 +883,9 @@ public static List getDatasetSplitsForIcebergTables(TableMetadata */ private static class InputSplitSizeRunnable extends TimedRunnable { - final InputSplit split; - final Configuration conf; - final String tableName; + private final InputSplit split; + private final Configuration conf; + private final String tableName; public InputSplitSizeRunnable(final Configuration conf, final String tableName, final InputSplit split) { this.conf = conf; @@ -1085,7 +1088,7 @@ public static PartitionMetadata getPartitionMetadata(final boolean storageImpers DirListInputSplitProto.DirListInputSplit dirListInputSplit = null; HiveDatasetStats metastoreStats = null; InputFormat format = getInputFormat(table, job, partition, optionManager); - Class inputFormatClazz = getInputFormatClass(job, table, partition, optionManager); + Class inputFormatClazz = getInputFormatClass(job, table, partition); metadataAccumulator.setTableLocation(table.getSd().getLocation()); if (null == partition) { @@ -1445,15 +1448,13 @@ private static PartitionValue getPartitionValue(FieldSchema partitionCol, String case DOUBLE: try { return PartitionValue.of(name, Double.parseDouble(value)); - } - catch (NumberFormatException ex) { + } catch (NumberFormatException ex) { return PartitionValue.of(name); } case FLOAT: try { return PartitionValue.of(name, Float.parseFloat(value)); - } - catch (NumberFormatException ex) { + } catch (NumberFormatException ex) { return PartitionValue.of(name); } case BYTE: @@ -1461,15 +1462,13 @@ private static PartitionValue getPartitionValue(FieldSchema partitionCol, String case INT: try { return PartitionValue.of(name, Integer.parseInt(value)); - } - catch (NumberFormatException ex) { + } catch (NumberFormatException ex) { return PartitionValue.of(name); } case LONG: try { return PartitionValue.of(name, Long.parseLong(value)); - } - catch (NumberFormatException ex) { + } catch (NumberFormatException ex) { return PartitionValue.of(name); } case STRING: @@ -1506,10 +1505,13 @@ private static PartitionValue getPartitionValue(FieldSchema partitionCol, String final BigInteger unscaled = original.movePointRight(decimalTypeInfo.scale()).unscaledValue(); return PartitionValue.of(name, ByteBuffer.wrap(DecimalTools.signExtend16(unscaled.toByteArray()))); default: - HiveUtilities.throwUnsupportedHiveDataTypeError(primitiveTypeInfo.getPrimitiveCategory().toString()); + break; } + HiveUtilities.throwUnsupportedHiveDataTypeError(primitiveTypeInfo.getPrimitiveCategory().toString()); + break; default: HiveUtilities.throwUnsupportedHiveDataTypeError(typeInfo.getCategory().toString()); + break; } return null; // unreachable @@ -1550,7 +1552,7 @@ public static void addConfToJob(final JobConf job, final Properties properties) } } - public static Class getInputFormatClass(final JobConf job, final Table table, final Partition partition, OptionManager options) { + public static Class getInputFormatClass(final JobConf job, final Table table, final Partition partition) { try (final Closeable cls = HivePf4jPlugin.swapClassLoader()) { if (partition != null) { if (partition.getSd().getInputFormat() != null) { @@ -1568,9 +1570,6 @@ public static Class getInputFormatClass(final JobConf job } if (table.getParameters().get(META_TABLE_STORAGE) != null) { - if (isDeltaTable(table, options)) { - return DeltaHiveInputFormat.class; - } final HiveStorageHandler storageHandler = HiveUtils.getStorageHandler(job, table.getParameters().get(META_TABLE_STORAGE)); return storageHandler.getInputFormatClass(); } diff --git a/plugins/hive3/plugin/src/test/java/com/dremio/exec/store/hive/HiveTestDataGenerator.java b/plugins/hive3/plugin/src/test/java/com/dremio/exec/store/hive/HiveTestDataGenerator.java index 8fade16505..fb0c64dafb 100644 --- a/plugins/hive3/plugin/src/test/java/com/dremio/exec/store/hive/HiveTestDataGenerator.java +++ b/plugins/hive3/plugin/src/test/java/com/dremio/exec/store/hive/HiveTestDataGenerator.java @@ -30,6 +30,7 @@ import java.sql.Date; import java.sql.Timestamp; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -242,6 +243,10 @@ public void generateTestData(java.util.function.Function generator } } + public static List listStoreAsFormatsForTests() { + return Arrays.asList("orc", "rcfile", "textfile", "sequencefile"); + } + private void generateTestData() throws Exception { try (DriverState driverState = new DriverState(newHiveConf())) { Driver hiveDriver = driverState.driver; @@ -509,7 +514,9 @@ private void generateTestData() throws Exception { // create a Hive table that has columns with data types which are supported for reading in Dremio. createAllTypesTextTable(hiveDriver, "readtest"); createAllTypesTable(hiveDriver, "parquet", "readtest"); - createAllTypesTable(hiveDriver, "orc", "readtest"); + for (String tableFormat : listStoreAsFormatsForTests()) { + createAllTypesTable(hiveDriver, tableFormat, "readtest"); + } createTimestampToStringTable(hiveDriver, "timestamptostring"); createDoubleToStringTable(hiveDriver, "doubletostring"); @@ -531,21 +538,21 @@ private void generateTestData() throws Exception { createNestedListWithNullsHiveTables(hiveDriver); createNestedStructWithNullsHiveTables(hiveDriver); createParuqetComplexFilterTestTable(hiveDriver); - createComplexTypesTextTable(hiveDriver, "orccomplex"); - createComplexTypesTable(hiveDriver, "orc", "orccomplex"); createComplexVarcharHiveTables(hiveDriver); + createComplexTypesTextTable(hiveDriver, "orccomplex"); createListTypesTextTable(hiveDriver, "orclist"); - createListTypesTable(hiveDriver, "orc", "orclist"); - createStructTypesTextTable(hiveDriver, "orcstruct"); - createStructTypesTable(hiveDriver, "orc", "orcstruct"); - createUnionTypesTextTable(hiveDriver, "orcunion"); - createUnionTypesTable(hiveDriver, "orc", "orcunion"); - createMapTypesTextTable(hiveDriver, "orcmap"); - createMapTypesTable(hiveDriver, "orc", "orcmap"); + + for (String tableFormat : listStoreAsFormatsForTests()) { + createComplexTypesTable(hiveDriver, tableFormat, "orccomplex"); + createListTypesTable(hiveDriver, tableFormat, "orclist"); + createStructTypesTable(hiveDriver, tableFormat, "orcstruct"); + createUnionTypesTable(hiveDriver, tableFormat, "orcunion"); + createMapTypesTable(hiveDriver, tableFormat, "orcmap"); + } createORCDecimalCompareTestTable(hiveDriver, "orcdecimalcompare"); createMixedPartitionTypeTable(hiveDriver, "parquet_mixed_partition_type"); @@ -571,12 +578,142 @@ private void generateTestData() throws Exception { createORCPartitionSchemaTestTable(hiveDriver); createFlattenOrcHiveTable(hiveDriver); + for (String tableFormat : listStoreAsFormatsForTests()) { + createTableWithMapOfIntKey(hiveDriver, tableFormat); + createTableWithMapOfBigIntKey(hiveDriver, tableFormat); + createTableWithMapOfBooleanKey(hiveDriver, tableFormat); + createTableWithMapOfDateKey(hiveDriver, tableFormat); + createTableWithMapOfDecimalKey(hiveDriver, tableFormat); + createTableWithMapOfDoubleKey(hiveDriver, tableFormat); + createTableWithMapOfFloatKey(hiveDriver, tableFormat); + createTableWithMapOfStringKey(hiveDriver, tableFormat); + createTableWithMapOfTimestampKey(hiveDriver, tableFormat); + createTableWithMapOfVarbinaryKey(hiveDriver, tableFormat); + createTableWithMapOfNullValues(hiveDriver, tableFormat); + createTableWithMapOfList(hiveDriver, tableFormat); + createTableWithMapOfStruct(hiveDriver, tableFormat); + createTableWithMapOfMap(hiveDriver, tableFormat); + } + + // This test requires a systemop alteration. Refresh metadata on hive seems to timeout the test preventing re-use of an existing table. Hence, creating a new table. createParquetDecimalSchemaChangeFilterTestTable(hiveDriver, "test_nonvc_parqdecimalschemachange_table"); createTableWithMapColumn(hiveDriver, "parquet_with_map_column"); } } + private void createTableWithMapOfIntKey(Driver hiveDriver, String format) { + String createTable = "create table map_of_int_" + format + " (col1 map) stored as " + format; + String insert = "insert into map_of_int_" + format + " select map(1, 'value1',2,'value2') "; + + executeQuery(hiveDriver, createTable); + executeQuery(hiveDriver, insert); + } + + private void createTableWithMapOfBigIntKey(Driver hiveDriver, String format) { + String createTable = "create table map_of_bigint_" + format + " (col1 map) stored as " + format; + String insert = "insert into map_of_bigint_" + format + " select map(9223372036854775800, 'value1',9223372036854775801,'value2') "; + + executeQuery(hiveDriver, createTable); + executeQuery(hiveDriver, insert); + } + + private void createTableWithMapOfBooleanKey(Driver hiveDriver, String format) { + String createTable = "create table map_of_boolean_" + format + " (col1 map) stored as " + format; + String insert = "insert into map_of_boolean_" + format + " select map(true, 'value1',false,'value2') "; + + executeQuery(hiveDriver, createTable); + executeQuery(hiveDriver, insert); + } + + private void createTableWithMapOfDateKey(Driver hiveDriver, String format) { + String createTable = "create table map_of_date_" + format + " (col1 map) stored as " + format; + String insert = "insert into map_of_date_" + format + " select map(cast('1993-05-26' as date), 'value1',cast('1993-05-27' as date),'value2') "; + + executeQuery(hiveDriver, createTable); + executeQuery(hiveDriver, insert); + } + + private void createTableWithMapOfDecimalKey(Driver hiveDriver, String format) { + String createTable = "create table map_of_decimal_" + format + " (col1 map) stored as " + format; + String insert = "insert into map_of_decimal_" + format + " select map(cast(1.1 as decimal(3,2)), 'value1', cast(1.2 as decimal(3,2)),'value2') "; + + executeQuery(hiveDriver, createTable); + executeQuery(hiveDriver, insert); + } + + private void createTableWithMapOfDoubleKey(Driver hiveDriver, String format) { + String createTable = "create table map_of_double_" + format + " (col1 map) stored as " + format; + String insert = "insert into map_of_double_" + format + " select map(cast(1.10 as double), 'value1',cast(1.20 as double),'value2') "; + + executeQuery(hiveDriver, createTable); + executeQuery(hiveDriver, insert); + } + + private void createTableWithMapOfFloatKey(Driver hiveDriver, String format) { + String createTable = "create table map_of_float_" + format + " (col1 map) stored as " + format; + String insert = "insert into map_of_float_" + format + " select map(cast(1.1 as float), 'value1',cast(1.2 as float),'value2') "; + + executeQuery(hiveDriver, createTable); + executeQuery(hiveDriver, insert); + } + + private void createTableWithMapOfStringKey(Driver hiveDriver, String format) { + String createTable = "create table map_of_string_" + format + " (col1 map) stored as " + format; + String insert = "insert into map_of_string_" + format + " select map('key1', 'value1','key2','value2') "; + + executeQuery(hiveDriver, createTable); + executeQuery(hiveDriver, insert); + } + + private void createTableWithMapOfTimestampKey(Driver hiveDriver, String format) { + String createTable = "create table map_of_timestamp_" + format + " (col1 map) stored as " + format; + String insert = "insert into map_of_timestamp_" + format + " select map(cast('1993-05-26 11:12:33' as timestamp), 'value1',cast('1993-05-26 11:12:35' as timestamp),'value2') "; + + executeQuery(hiveDriver, createTable); + executeQuery(hiveDriver, insert); + } + + private void createTableWithMapOfVarbinaryKey(Driver hiveDriver, String format) { + String createTable = "create table map_of_varbinary_" + format + " (col1 map) stored as " + format; + String insert = "insert into map_of_varbinary_" + format + " select map(cast('1234' as binary), 'value1') "; + + executeQuery(hiveDriver, createTable); + executeQuery(hiveDriver, insert); + } + + private void createTableWithMapOfNullValues(Driver hiveDriver, String format) { + String createTable = "create table map_of_null_values_" + format + " (col1 map) stored as " + format; + String insert = "insert into map_of_null_values_" + format + " select map('key1',null) "; + + executeQuery(hiveDriver, createTable); + executeQuery(hiveDriver, insert); + } + + private void createTableWithMapOfList(Driver hiveDriver, String format) { + String createTable = "create table map_of_list_values_" + format + " (col1 map>) stored as " + format; + String insert = "insert into map_of_list_values_" + format + " select map('key1',array(1,2)) "; + + executeQuery(hiveDriver, createTable); + executeQuery(hiveDriver, insert); + } + + private void createTableWithMapOfStruct(Driver hiveDriver, String format) { + String createTable = "create table map_of_struct_values_" + format + " (col1 map>) stored as " + format; + String insert = "insert into map_of_struct_values_" + format + " select map('key1',named_struct('f1',1)) "; + + executeQuery(hiveDriver, createTable); + executeQuery(hiveDriver, insert); + } + + private void createTableWithMapOfMap(Driver hiveDriver, String format) { + String createTable = "create table map_of_map_values_" + format + " (col1 map>) stored as " + format; + String insert = "insert into map_of_map_values_" + format + " select map('key1',map('innerKey1',1)) "; + + executeQuery(hiveDriver, createTable); + executeQuery(hiveDriver, insert); + } + private File getTempFile() throws Exception { return getTempFile("dremio-hive-test"); } @@ -1151,6 +1288,7 @@ private void createStructTypesTextTable(final Driver hiveDriver, final String ta " bigint_field: bigint, " + " float_field: float, " + " double_field: double, " + + " decimal_field: decimal(6,2), " + " string_field: string> " + ") ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' COLLECTION ITEMS TERMINATED BY ','"); executeQuery(hiveDriver, @@ -1168,6 +1306,7 @@ private void createStructTypesTable(final Driver hiveDriver, final String format " bigint_field: bigint, " + " float_field: float, " + " double_field: double, " + + " decimal_field: decimal(6,2), " + " string_field: string> " + ") ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' COLLECTION ITEMS TERMINATED BY ',' STORED AS " + format); executeQuery(hiveDriver, "INSERT OVERWRITE TABLE " + table + format + " SELECT * FROM " + table); @@ -1738,11 +1877,13 @@ private String generateStructTypesDataFile() throws Exception { String bigint_field = Long.toString(90000000000L); String float_field = Float.toString(row); String double_field = Double.toString(row); + String decimal_field = Double.toString(row); String string_field = Integer.toString(row); printWriter.println(rownum + "\t" + tinyint_field + "," + smallint_field + "," + int_field + "," + bigint_field + "," + - float_field + "," + double_field + "," + string_field); + float_field + "," + double_field + "," + decimal_field + "," + + string_field); } printWriter.close(); diff --git a/plugins/hive3/pom.xml b/plugins/hive3/pom.xml index 0bc1f114c3..f1374ef921 100644 --- a/plugins/hive3/pom.xml +++ b/plugins/hive3/pom.xml @@ -24,7 +24,7 @@ dremio-plugin-parent com.dremio.plugins - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 com.dremio.plugins diff --git a/plugins/maprfs/pom.xml b/plugins/maprfs/pom.xml index 0e08554aeb..07c4986b6e 100644 --- a/plugins/maprfs/pom.xml +++ b/plugins/maprfs/pom.xml @@ -23,7 +23,7 @@ dremio-plugin-parent com.dremio.plugins - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-maprfs-plugin diff --git a/plugins/mongo/pom.xml b/plugins/mongo/pom.xml index ec572501c8..e551dec3c6 100644 --- a/plugins/mongo/pom.xml +++ b/plugins/mongo/pom.xml @@ -23,7 +23,7 @@ dremio-plugin-parent com.dremio.plugins - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-mongo-plugin diff --git a/plugins/nas/pom.xml b/plugins/nas/pom.xml index 611fef966c..8aa0388ca0 100644 --- a/plugins/nas/pom.xml +++ b/plugins/nas/pom.xml @@ -23,7 +23,7 @@ dremio-plugin-parent com.dremio.plugins - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-nas-plugin diff --git a/plugins/nas/src/main/java/com/dremio/exec/store/dfs/NASConf.java b/plugins/nas/src/main/java/com/dremio/exec/store/dfs/NASConf.java index f075cbfd7a..f7dc2152df 100644 --- a/plugins/nas/src/main/java/com/dremio/exec/store/dfs/NASConf.java +++ b/plugins/nas/src/main/java/com/dremio/exec/store/dfs/NASConf.java @@ -98,6 +98,7 @@ public FileSystemPlugin newPlugin(SabotContext context, String name, Pr return new NASFileSystem(this, context, name, pluginIdProvider); } + @Override public String getDefaultCtasFormat() { return defaultCtasFormat.getDefaultCtasFormat(); } diff --git a/plugins/pdfs/pom.xml b/plugins/pdfs/pom.xml index ad3330fb24..cdd00c14ab 100644 --- a/plugins/pdfs/pom.xml +++ b/plugins/pdfs/pom.xml @@ -25,7 +25,7 @@ dremio-plugin-parent com.dremio.plugins - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-pdfs-plugin diff --git a/plugins/pom.xml b/plugins/pom.xml index 6bba5aa68b..ae520ce1aa 100644 --- a/plugins/pom.xml +++ b/plugins/pom.xml @@ -24,7 +24,7 @@ dremio-parent com.dremio - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 com.dremio.plugins @@ -62,6 +62,7 @@ hive3 awsglue gcs + dataplane diff --git a/plugins/s3/pom.xml b/plugins/s3/pom.xml index 589e2ed4d1..fbaee142dd 100644 --- a/plugins/s3/pom.xml +++ b/plugins/s3/pom.xml @@ -23,7 +23,7 @@ dremio-plugin-parent com.dremio.plugins - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-s3-plugin @@ -42,12 +42,6 @@ software.amazon.awssdk apache-client - - - commons-logging - commons-logging - - com.dremio @@ -130,41 +124,4 @@ - - - - org.apache.maven.plugins - maven-surefire-plugin - - - ${s3mock.reserved.port} - - - - - org.codehaus.mojo - build-helper-maven-plugin - - - reserve-network-port - - reserve-network-port - - process-resources - - - - 10000 - 10999 - - s3mock.reserved.port - - - - - - - diff --git a/plugins/s3/src/main/java/com/dremio/plugins/s3/store/S3AsyncByteReader.java b/plugins/s3/src/main/java/com/dremio/plugins/s3/store/S3AsyncByteReader.java index 5ced96da6c..b9b23abdaa 100644 --- a/plugins/s3/src/main/java/com/dremio/plugins/s3/store/S3AsyncByteReader.java +++ b/plugins/s3/src/main/java/com/dremio/plugins/s3/store/S3AsyncByteReader.java @@ -83,7 +83,7 @@ public CompletableFuture readFully(long offset, ByteBuf dst, int dstOffset if(len == 0) { throw new IllegalArgumentException("Empty reads not allowed."); } - logger.debug("[{}] Submitted request to queue for bucket {}, path {} for {}", threadName, bucket, path, range(offset, len)); + logger.debug("[{}] Submitted request to read from s3 bucket {}, path {} at starting offset {} for length {} ", threadName, bucket, path, offset, len); return asyncReadWithRetry(offset, dst, dstOffset, len, 1); } @@ -162,6 +162,7 @@ private CompletableFuture asyncReadWithRetry(long offset, ByteBuf dst, int }).thenCompose(Function.identity()); } + /** * Used to read a single byte range. */ diff --git a/plugins/s3/src/main/java/com/dremio/plugins/s3/store/S3FileSystem.java b/plugins/s3/src/main/java/com/dremio/plugins/s3/store/S3FileSystem.java index f4dfaa1a66..a21342aa70 100644 --- a/plugins/s3/src/main/java/com/dremio/plugins/s3/store/S3FileSystem.java +++ b/plugins/s3/src/main/java/com/dremio/plugins/s3/store/S3FileSystem.java @@ -122,7 +122,7 @@ public class S3FileSystem extends ContainerFileSystem implements MayProvideAsync private static final String S3_CN_ENDPOINT_END = S3_ENDPOINT_END + ".cn"; private static final ExecutorService threadPool = Executors.newCachedThreadPool(new NamedThreadFactory("s3-async-read-")); - private final Retryer retryer = new Retryer.Builder() + private final Retryer retryer = Retryer.newBuilder() .retryIfExceptionOfType(SdkClientException.class) .retryIfExceptionOfType(software.amazon.awssdk.core.exception.SdkClientException.class) .setWaitStrategy(Retryer.WaitStrategy.EXPONENTIAL, 250, 2500) @@ -321,7 +321,7 @@ protected ContainerHolder getUnknownContainer(String containerName) throws IOExc } logger.debug("Unknown container '{}' found ? {}", containerName, containerFound); if (!containerFound) { - throw new ContainerNotFoundException("Bucket " + containerName + " not found"); + throw new ContainerNotFoundException("Bucket [" + containerName + "] not found."); } return new BucketCreator(getConf(), containerName).toContainerHolder(); } diff --git a/plugins/s3/src/main/java/com/dremio/plugins/s3/store/S3StoragePlugin.java b/plugins/s3/src/main/java/com/dremio/plugins/s3/store/S3StoragePlugin.java index feb10376f7..26cb7f8f34 100644 --- a/plugins/s3/src/main/java/com/dremio/plugins/s3/store/S3StoragePlugin.java +++ b/plugins/s3/src/main/java/com/dremio/plugins/s3/store/S3StoragePlugin.java @@ -226,6 +226,7 @@ protected boolean isAsyncEnabledForQuery(OperatorContext context) { return context != null && context.getOptions().getOption(S3Options.ASYNC); } + @Override public boolean supportReadSignature(DatasetMetadata metadata, boolean isFileDataset) { return false; } diff --git a/plugins/s3/src/main/java/com/dremio/plugins/util/CloseableRef.java b/plugins/s3/src/main/java/com/dremio/plugins/util/CloseableRef.java index 14e44fb50f..0ee2090da9 100644 --- a/plugins/s3/src/main/java/com/dremio/plugins/util/CloseableRef.java +++ b/plugins/s3/src/main/java/com/dremio/plugins/util/CloseableRef.java @@ -49,6 +49,7 @@ public T acquireRef() { return this.obj; } + @Override public void close() throws Exception { if (logger.isDebugEnabled()) { logger.debug("Class {} released the ref for {}:{}", getCallingClass(), obj.getClass().getSimpleName(), System.identityHashCode(obj)); diff --git a/plugins/s3/src/main/java/com/dremio/plugins/util/ContainerFileSystem.java b/plugins/s3/src/main/java/com/dremio/plugins/util/ContainerFileSystem.java index 23836a975a..ad8bbd4273 100644 --- a/plugins/s3/src/main/java/com/dremio/plugins/util/ContainerFileSystem.java +++ b/plugins/s3/src/main/java/com/dremio/plugins/util/ContainerFileSystem.java @@ -249,6 +249,7 @@ public final FileSystem get() throws IOException{ public abstract FileSystem create() throws IOException; + @Override public void close() throws IOException { if (fs != null) { fs.close(); diff --git a/plugins/s3/src/main/resources/s3-layout.json b/plugins/s3/src/main/resources/s3-layout.json index f2ae3008ed..8059a9b87b 100644 --- a/plugins/s3/src/main/resources/s3-layout.json +++ b/plugins/s3/src/main/resources/s3-layout.json @@ -4,7 +4,7 @@ "beta" ], "metadataRefresh": { - "datasetDiscovery": true, + "datasetDiscovery": false, "isFileSystemSource": true }, "form": { diff --git a/plugins/s3/src/test/java/com/dremio/plugins/s3/store/TestS3Compat.java b/plugins/s3/src/test/java/com/dremio/plugins/s3/store/TestS3Compat.java index c4d36d8e45..7eeec74c5d 100644 --- a/plugins/s3/src/test/java/com/dremio/plugins/s3/store/TestS3Compat.java +++ b/plugins/s3/src/test/java/com/dremio/plugins/s3/store/TestS3Compat.java @@ -36,6 +36,7 @@ import com.dremio.exec.catalog.conf.AWSAuthenticationType; import com.dremio.exec.catalog.conf.Property; import com.dremio.exec.server.SabotContext; +import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import io.findify.s3mock.S3Mock; @@ -53,19 +54,16 @@ public class TestS3Compat extends BaseTestQuery { @Before public void setup() { - Integer port = Integer.getInteger("s3mock.reserved.port"); - if(port == null) { - throw new RuntimeException("Can't start test since s3.reserved.port property is not available."); - } - this.port = port; - this.api = new S3Mock.Builder().withPort(port).withFileBackend(TestTools.getWorkingPath() + "/src/test/resources/s3compat").build(); - this.api.start(); + Preconditions.checkState(api == null); + api = new S3Mock.Builder().withPort(0).withFileBackend(TestTools.getWorkingPath() + "/src/test/resources/s3compat").build(); + port = api.start().localAddress().getPort(); } @After public void teardown() { - if(api != null) { - api.stop(); + if (api != null) { + api.shutdown(); + api = null; } } diff --git a/plugins/s3/src/test/java/com/dremio/plugins/s3/store/TestS3FSHealthChecker.java b/plugins/s3/src/test/java/com/dremio/plugins/s3/store/TestS3FSHealthChecker.java index a088c6143a..c02ce3c5cd 100644 --- a/plugins/s3/src/test/java/com/dremio/plugins/s3/store/TestS3FSHealthChecker.java +++ b/plugins/s3/src/test/java/com/dremio/plugins/s3/store/TestS3FSHealthChecker.java @@ -16,7 +16,6 @@ package com.dremio.plugins.s3.store; import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; import static org.mockito.Mockito.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -41,7 +40,7 @@ public class TestS3FSHealthChecker { @Test - public void testGoodHealthCheck() { + public void testGoodHealthCheck() throws Exception { TestExtendedS3FSHealthChecker fs = new TestExtendedS3FSHealthChecker(new Configuration()); AmazonS3 mockedS3Client = mock(AmazonS3.class); ListObjectsV2Result result = new ListObjectsV2Result(); @@ -49,12 +48,8 @@ public void testGoodHealthCheck() { when(mockedS3Client.listObjectsV2(any(ListObjectsV2Request.class))).thenReturn(result); fs.setCustomClient(mockedS3Client); - try { - Path p = Path.of("/bucket/prefix"); - fs.healthCheck(p, ImmutableSet.of()); - } catch (IOException e) { - fail(e.getMessage()); - } + Path p = Path.of("/bucket/prefix"); + fs.healthCheck(p, ImmutableSet.of()); } @Test (expected = IOException.class) diff --git a/plugins/s3/src/test/java/com/dremio/plugins/s3/store/TestS3FileSystem.java b/plugins/s3/src/test/java/com/dremio/plugins/s3/store/TestS3FileSystem.java index 7a8cfa62ea..904e001ed4 100644 --- a/plugins/s3/src/test/java/com/dremio/plugins/s3/store/TestS3FileSystem.java +++ b/plugins/s3/src/test/java/com/dremio/plugins/s3/store/TestS3FileSystem.java @@ -17,7 +17,6 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.fail; import static org.mockito.Mockito.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mockStatic; @@ -108,7 +107,7 @@ public void testInvalidRegionFromEndpoint() { } @Test - public void testUnknownContainerExists() { + public void testUnknownContainerExists() throws Exception { TestExtendedS3FileSystem fs = new TestExtendedS3FileSystem(); AmazonS3 mockedS3Client = mock(AmazonS3.class); when(mockedS3Client.doesBucketExistV2(any(String.class))).thenReturn(true); @@ -117,11 +116,7 @@ public void testUnknownContainerExists() { when(mockedS3Client.listObjectsV2(any(ListObjectsV2Request.class))).thenReturn(result); fs.setCustomClient(mockedS3Client); - try { - assertNotNull(fs.getUnknownContainer("testunknown")); - } catch (IOException e) { - fail(e.getMessage()); - } + assertNotNull(fs.getUnknownContainer("testunknown")); } @Test (expected = ContainerNotFoundException.class) diff --git a/plugins/s3/src/test/java/com/dremio/plugins/s3/store/TestWhiteListedBuckets.java b/plugins/s3/src/test/java/com/dremio/plugins/s3/store/TestWhiteListedBuckets.java index 80e105b53b..9b278b50e5 100644 --- a/plugins/s3/src/test/java/com/dremio/plugins/s3/store/TestWhiteListedBuckets.java +++ b/plugins/s3/src/test/java/com/dremio/plugins/s3/store/TestWhiteListedBuckets.java @@ -38,9 +38,9 @@ import com.dremio.exec.catalog.conf.AWSAuthenticationType; import com.dremio.exec.catalog.conf.Property; import com.dremio.exec.server.SabotContext; +import com.google.common.base.Preconditions; import com.google.common.collect.Lists; -import akka.http.scaladsl.Http; import io.findify.s3mock.S3Mock; import software.amazon.awssdk.regions.Region; @@ -55,9 +55,9 @@ public class TestWhiteListedBuckets extends BaseTestQuery { @BeforeClass public static void setup() { + Preconditions.checkState(s3Mock == null); s3Mock = new S3Mock.Builder().withPort(0).withInMemoryBackend().build(); - Http.ServerBinding binding = s3Mock.start(); - port = binding.localAddress().getPort(); + port = s3Mock.start().localAddress().getPort(); EndpointConfiguration endpoint = new EndpointConfiguration(String.format("http://localhost:%d", port), Region.US_EAST_1.toString()); AmazonS3 client = AmazonS3ClientBuilder @@ -75,7 +75,8 @@ public static void setup() { @AfterClass public static void teardown() { if (s3Mock != null) { - s3Mock.stop(); + s3Mock.shutdown(); + s3Mock = null; } } diff --git a/plugins/sysflight/pom.xml b/plugins/sysflight/pom.xml index cc0c37275e..06c030009c 100644 --- a/plugins/sysflight/pom.xml +++ b/plugins/sysflight/pom.xml @@ -23,7 +23,7 @@ dremio-plugin-parent com.dremio.plugins - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-sysflight-plugin diff --git a/plugins/sysflight/src/main/java/com/dremio/plugins/sysflight/ExpressionConverter.java b/plugins/sysflight/src/main/java/com/dremio/plugins/sysflight/ExpressionConverter.java index c3d041da64..8a1dad475f 100644 --- a/plugins/sysflight/src/main/java/com/dremio/plugins/sysflight/ExpressionConverter.java +++ b/plugins/sysflight/src/main/java/com/dremio/plugins/sysflight/ExpressionConverter.java @@ -172,16 +172,15 @@ private SearchQuery handleLike(RexCall call) { switch(operands.size()) { case 3: - RexNode op3 = operands.get(2); if(op3 instanceof RexLiteral) { escape = ((RexLiteral) op3).getValue3().toString(); } else { return null; } + // fall through case 2: - RexNode op1 = operands.get(0); if(op1 instanceof RexInputRef) { RexInputRef input = ((RexInputRef) op1); @@ -196,7 +195,6 @@ private SearchQuery handleLike(RexCall call) { } else { return null; } - break; default: diff --git a/plugins/sysflight/src/main/java/com/dremio/plugins/sysflight/SysFlightPluginConf.java b/plugins/sysflight/src/main/java/com/dremio/plugins/sysflight/SysFlightPluginConf.java index a786163390..73fdfff5c5 100644 --- a/plugins/sysflight/src/main/java/com/dremio/plugins/sysflight/SysFlightPluginConf.java +++ b/plugins/sysflight/src/main/java/com/dremio/plugins/sysflight/SysFlightPluginConf.java @@ -22,25 +22,27 @@ import com.dremio.exec.catalog.StoragePluginId; import com.dremio.exec.catalog.conf.ConnectionConf; import com.dremio.exec.catalog.conf.SourceType; -import com.dremio.exec.proto.CoordinationProtos.NodeEndpoint; import com.dremio.exec.server.SabotContext; import com.dremio.exec.store.sys.SystemTable; -import io.protostuff.Tag; - /** * Connection config for Sys-flight */ @SourceType(value = "SYSFLIGHT", configurable = false) public class SysFlightPluginConf extends ConnectionConf{ - @Tag(1) - public NodeEndpoint endpoint; + /** + * @Tag(1) + * public NodeEndpoint endpoint; + * + * Please, do not use protobuf here or in any other class that extends ConnectionConf. ConnectionConf uses protostuff + * and if you add protobuf this will result in a backward compatibility issue, in the case you have to change the + * protobuf file in the future. + */ @Override public SysFlightStoragePlugin newPlugin(SabotContext context, String name, Provider pluginIdProvider) { - return new SysFlightStoragePlugin(this, context, name, true, - Collections.singletonList(SystemTable.DEPENDENCIES)); + return new SysFlightStoragePlugin(context, name, true, Collections.singletonList(SystemTable.DEPENDENCIES)); } @Override diff --git a/plugins/sysflight/src/main/java/com/dremio/plugins/sysflight/SysFlightPluginConfigProvider.java b/plugins/sysflight/src/main/java/com/dremio/plugins/sysflight/SysFlightPluginConfigProvider.java index 2bf6b0a6ba..002e1497ab 100644 --- a/plugins/sysflight/src/main/java/com/dremio/plugins/sysflight/SysFlightPluginConfigProvider.java +++ b/plugins/sysflight/src/main/java/com/dremio/plugins/sysflight/SysFlightPluginConfigProvider.java @@ -18,17 +18,14 @@ import javax.inject.Provider; import com.dremio.exec.catalog.conf.ConnectionConf; -import com.dremio.exec.proto.CoordinationProtos.NodeEndpoint; import com.dremio.service.Service; /** * Connection config provider for Sys-flight */ public class SysFlightPluginConfigProvider implements Service, Provider> { - private final Provider endPoint; - public SysFlightPluginConfigProvider(Provider endPoint) { - this.endPoint = endPoint; + public SysFlightPluginConfigProvider() { } @Override @@ -42,7 +39,6 @@ public void close() throws Exception { @Override public ConnectionConf get() { SysFlightPluginConf conf = new SysFlightPluginConf(); - conf.endpoint = endPoint.get(); return conf; } } diff --git a/plugins/sysflight/src/main/java/com/dremio/plugins/sysflight/SysFlightRecordReader.java b/plugins/sysflight/src/main/java/com/dremio/plugins/sysflight/SysFlightRecordReader.java index 513056a07c..ddd77774b7 100644 --- a/plugins/sysflight/src/main/java/com/dremio/plugins/sysflight/SysFlightRecordReader.java +++ b/plugins/sysflight/src/main/java/com/dremio/plugins/sysflight/SysFlightRecordReader.java @@ -130,6 +130,7 @@ public int next() { if (batchHolderIsEmpty) { if (stream.next()) { batchHolder = stream.getRoot(); + LOGGER.debug("Received recordBatch in SysFlight plugin stream {}", batchHolder.getRowCount()); batchHolderIsEmpty = false; ptrToNextRecord = 0; } else { diff --git a/plugins/sysflight/src/main/java/com/dremio/plugins/sysflight/SysFlightStoragePlugin.java b/plugins/sysflight/src/main/java/com/dremio/plugins/sysflight/SysFlightStoragePlugin.java index efb1b875b4..a58832dcad 100644 --- a/plugins/sysflight/src/main/java/com/dremio/plugins/sysflight/SysFlightStoragePlugin.java +++ b/plugins/sysflight/src/main/java/com/dremio/plugins/sysflight/SysFlightStoragePlugin.java @@ -26,7 +26,6 @@ import java.util.Optional; import java.util.Set; import java.util.function.Function; -import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -73,49 +72,33 @@ * Plugin for System tables using Flight protocol, also aware of tables in {@link SystemTable} */ public class SysFlightStoragePlugin implements StoragePlugin, SupportsListingDatasets { - static final org.slf4j.Logger LOGGER = org.slf4j.LoggerFactory.getLogger(SysFlightStoragePlugin.class); - private final Map legacyTableMap = Stream.of(SystemTable.values()) .collect(Collectors.toMap(systemTable -> canonicalize(systemTable.getDatasetPath()), Function.identity())); private volatile Set flightTableList = new HashSet<>(); - private final SysFlightPluginConf conf; private final SabotContext context; private final String name; - private final Boolean useConduit; + private final boolean useConduit; private final BufferAllocator allocator; - private final Predicate userPredicate; private final JobResultInfoProvider jobResultInfoProvider; private volatile FlightClient flightClient; private volatile ManagedChannel prevChannel; - public SysFlightStoragePlugin(SysFlightPluginConf conf, - SabotContext context, + public SysFlightStoragePlugin(SabotContext context, String name, - Boolean useConduit, - Predicate userPredicate, + boolean useConduit, List excludeLegacyTablesList) { excludeLegacyTables(legacyTableMap, excludeLegacyTablesList); - this.conf = conf; this.context = context; this.jobResultInfoProvider = context.getJobResultInfoProvider(); this.name = name; this.useConduit = useConduit; - this.userPredicate = userPredicate; allocator = context.getAllocator().newChildAllocator(SysFlightStoragePlugin.class.getName(), 0, Long.MAX_VALUE); } - SysFlightStoragePlugin(SysFlightPluginConf conf, - SabotContext context, - String name, - Boolean useConduit, - List excludeLegacyTablesList) { - this(conf, context, name, useConduit, s -> true, excludeLegacyTablesList); - } - Map getLegacyTableMap() { return legacyTableMap; } @@ -125,7 +108,7 @@ public FlightClient getFlightClient() { if (useConduit) { curChannel = context.getConduitProvider().getOrCreateChannelToMaster(); } else { - curChannel = context.getConduitProvider().getOrCreateChannel(conf.endpoint); + curChannel = context.getConduitProvider().getOrCreateChannel(context.getEndpoint()); } if (prevChannel != curChannel) { @@ -152,7 +135,7 @@ public SabotContext getSabotContext() { @Override public boolean hasAccessPermission(String user, NamespaceKey key, DatasetConfig datasetConfig) { - return this.userPredicate.test(user); + return true; } @Override diff --git a/pom.xml b/pom.xml index a8d7d0435a..fd915575f2 100755 --- a/pom.xml +++ b/pom.xml @@ -22,7 +22,7 @@ com.dremio dremio-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 pom Dremio - Parent @@ -38,12 +38,11 @@ Submodules should rely on dependenciesManagement as much as possible --> 9.0.0-20221123064031-c39b8a6253-dremio - 1.22.0 - 1.16.0-202212291946520071-31c33937 + 1.23.0 + 1.17.0-202305081555330806-ba52e3e7 10.14.2.0 - 1.81 + 1.82 3.1.0 - 0.7.0 0.3 0.9.16 2.3.31 @@ -52,26 +51,25 @@ 6.2.0.Final - 3.3.2-dremio-202207041927090255-61c2bd1 + 3.3.2-dremio-202305122016210436-942ab1c 1.10.2 - 1.0.0-e2bb9ad-20221027154339-e865afe + 1.2.0-e340ad5-20230511162417-de84403 8.3.0 2.3.10-202208021035330109-f5bda9e - 2.8.5-dremio-r2-202106241733540604-acdda22 + 2.8.5-dremio-r2-202305092225000902-c107c46 2.8.5 - 1.11.761 2.8.5 - 2.3.9-dremio-202209290028370685-7b6189f + 2.3.9-dremio-202305101625150255-0beca91 4.2.1 4.1.6 4.1.7 3.2.0-m3 3.0.1 2.7.6.200-eep-810 - 2.3.9.0-eep-810-dremio-202209290029560475-99ee21d + 2.3.9.0-eep-810-dremio-202305101622080327-3ed1ada 6.2.0.20-mapr - 1.10.0-202211080432360347-3b40cc3 + 1.10.0-202302080426290085-edca6ae 1.1.13 - 3.3.2-dremio-202207041927090255-61c2bd1 + 3.3.2-dremio-202305122016210436-942ab1c 8.3.0 2.3.10-202208021035330109-f5bda9e ${plugin.hive3.hadoop.version} ${plugin.hive3.hadoop.version} ${plugin.hive3.hadoop.version} - ${aws-sdk.version} - 3.1.1-dremio-202108260418420981-22681bb - 2.2.0 - 1.5.1-dremio-202007271350010286-8131f30 + 3.1.1-dremio-202305102143550145-5934df5 + 2.5.3 + 1.5.1-dremio-202305092131550668-2f32481 4.2.4 4.1.17 4.1.19 3.2.0-m3 3.0.1 2.6.1 - 2.2.2-dremio-202208181741250538-215a1b7 + 2.2.2-dremio-202302142306550801-5be8d7e 2.10.0 jar 9+181-r4173-1 - 0.1.16 + 0.1.18 - -Xplugin:ErrorProne -Xep:LockNotBeforeTry:ERROR -Xep:OrphanedFormatString:ERROR -Xep:NonAtomicVolatileUpdate:ERROR -Xep:ComplexBooleanConstant:ERROR -Xep:AssertionFailureIgnored:ERROR -Xep:UnnecessaryAssignment:ERROR -Xep:UseCorrectAssertInTests:ERROR -Xep:StreamResourceLeak:ERROR -Xep:DoubleCheckedLocking:ERROR -Xep:InconsistentHashCode:ERROR -Xep:ArgumentSelectionDefectChecker:ERROR -Xep:ModifyCollectionInEnhancedForLoop:ERROR -Xep:EqualsIncompatibleType:ERROR -Xep:OptionalNotPresent:ERROR -Xep:JUnitAmbiguousTestClass:ERROR -Xep:ShortCircuitBoolean:ERROR -Xep:PreconditionsCheckNotNullRepeated:ERROR -Xep:PreconditionsInvalidPlaceholder:ERROR ${errorprone.args.jdk11} -XepExcludedPaths:${project.build.directory}/.*generated-(test-)?sources/.* + -Xplugin:ErrorProne -Xep:MissingOverride:ERROR -Xep:LockNotBeforeTry:ERROR -Xep:OrphanedFormatString:ERROR -Xep:NonAtomicVolatileUpdate:ERROR -Xep:ComplexBooleanConstant:ERROR -Xep:AssertionFailureIgnored:ERROR -Xep:UnnecessaryAssignment:ERROR -Xep:UseCorrectAssertInTests:ERROR -Xep:StreamResourceLeak:ERROR -Xep:DoubleCheckedLocking:ERROR -Xep:InconsistentHashCode:ERROR -Xep:ArgumentSelectionDefectChecker:ERROR -Xep:ModifyCollectionInEnhancedForLoop:ERROR -Xep:EqualsIncompatibleType:ERROR -Xep:OptionalNotPresent:ERROR -Xep:JUnitAmbiguousTestClass:ERROR -Xep:ShortCircuitBoolean:ERROR -Xep:PreconditionsCheckNotNullRepeated:ERROR -Xep:PreconditionsInvalidPlaceholder:ERROR ${errorprone.args.jdk11} -XepExcludedPaths:${project.build.directory}/.*generated-(test-)?sources/.* 5.6.14 + 2.14.2 + 2.39.1 + 9.4.51.v20230217 2.1.1 4.13.2 - 5.9.1 - 4.8.1 - 3.23.1 + 5.9.2 + 4.11.0 + 3.24.1 8.3.0 1.2.11 4.1.19 3.0.8 - 0.2.4 - 0.21.4 - 0.44.0 - 4.1.68.Final - 2.0.46.Final + 0.3.12 + 0.28.1 + + 0.59.0 + + 4.1.89.Final + 2.0.56.Final + 3.10.6.Final-nohttp - 2.0.43.Final - 1.12.0-202210150148350243-15cbcc2 + 1.12.0-202302141732150599-28d943b 3.6.0 42.4.1 3.21.9 1.4.4 0.1 1.7.36 - 3.13.24-dremio-0 + 3.13.30 0.14.0-202111020547020344-41637331 0.10 3.4.14 4.2.0 2.12.0 - 1.12.261 - 2.16.104 + 1.12.400 + 2.17.295 3.1.7 1.12.0 5.14.2 0.31.1 - 1.45.0 - 1.31 + 1.54.1 31.1-jre - 4.2.2 + 5.1.0 4.2.3 - 1.18.0 + 1.25.0 2.3.5 - 5.4.0 + 6.3.0 1.6.4 1.7.0 1.64 @@ -183,6 +183,7 @@ 6.22 7.7.3 + 2.4.10 @@ -196,7 +197,7 @@ build install --frozen-lockfile --prefer-offline --reporter=silent pnpm@${pnpm.version} ${pnpm.installscript} - 7.2.1 + 8.1.0 nonmapr false false @@ -340,6 +341,25 @@ + + + + kr.motd.maven + os-maven-plugin + + + initialize + + detect + + + + false + + @@ -374,9 +394,7 @@ - + org.jsonschema2pojo jsonschema2pojo-jdk-annotation @@ -471,8 +489,8 @@ [11,) - 2.16 - -Xep:AlreadyChecked:ERROR -Xep:DoubleBraceInitialization:WARN -Xep:Slf4jLoggerShouldBeFinal:ERROR -Xep:Slf4jIllegalPassedClass:ERROR -Xep:Slf4jSignOnlyFormat:WARN -Xep:Slf4jFormatShouldBeConst:WARN -Xep:Slf4jDoNotLogMessageOfExceptionExplicitly:WARN -Xep:Slf4jLoggerShouldBeNonStatic:OFF ${errorprone.module.args} + 2.18.0 + -Xep:FallThrough:ERROR -Xep:AlreadyChecked:ERROR -Xep:DoubleBraceInitialization:WARN -Xep:Slf4jLoggerShouldBeFinal:ERROR -Xep:Slf4jIllegalPassedClass:ERROR -Xep:Slf4jSignOnlyFormat:WARN -Xep:Slf4jFormatShouldBeConst:WARN -Xep:Slf4jDoNotLogMessageOfExceptionExplicitly:WARN -Xep:Slf4jLoggerShouldBeNonStatic:OFF ${errorprone.module.args} @@ -534,7 +552,7 @@ - 2.3.9.0-eep-810-dremio-202209290029560475-99ee21d + 2.3.9.0-eep-810-dremio-202305101622080327-3ed1ada 2.7.6.200-eep-810 2.7.4.0-mapr-700 6.2.0.20-mapr @@ -610,6 +628,9 @@ dremio-free https://maven.dremio.com/free/ + + false + @@ -745,18 +766,9 @@ - kr.motd.maven os-maven-plugin - 1.7.0 - - - initialize - - detect - - - + true com.diffplug.spotless @@ -797,7 +809,6 @@ ${project.basedir}/src/test/java - UTF-8 true true true @@ -846,9 +857,9 @@ false -Xmaxerrs - 5000 + 9000 -Xmaxwarns - 5000 + 9000 false @@ -906,6 +917,7 @@ org.mortbay.jetty:servlet-api-2.5 org.apache.logging.log4j:log4j-slf4j-impl log4j:* + org.slf4j:slf4j-log4j12 io.netty:*:* com.amazonaws:aws-java-sdk @@ -914,8 +926,8 @@ io.netty:*:${netty.version} io.netty:netty:${netty3.version} - io.netty:netty-tcnative-boringssl-static:${netty.boringssl.version} - io.netty:netty-tcnative-classes:${netty.tcnative.version} + io.netty:netty-tcnative-boringssl-static:${netty-tcnative.version} + io.netty:netty-tcnative-classes:${netty-tcnative.version} @@ -1281,7 +1293,7 @@ limitations under the License. org.apache.maven.plugins maven-wrapper-plugin - 3.1.1 + 3.2.0 maven-clean-plugin @@ -1289,19 +1301,19 @@ limitations under the License. maven-dependency-plugin - 3.3.0 + 3.5.0 maven-resources-plugin - 3.3.0 + 3.3.1 maven-compiler-plugin - 3.10.1 + 3.11.0 maven-enforcer-plugin - 3.1.0 + 3.2.1 maven-jar-plugin @@ -1309,7 +1321,7 @@ limitations under the License. maven-surefire-plugin - 3.0.0-M7 + 3.0.0 ${surefire.argLine} @@ -1342,7 +1354,7 @@ limitations under the License. maven-failsafe-plugin - 3.0.0-M7 + 3.0.0 false @@ -1368,15 +1380,15 @@ limitations under the License. maven-assembly-plugin - 3.4.2 + 3.5.0 maven-checkstyle-plugin - 3.2.0 + 3.2.1 maven-shade-plugin - 3.2.4 + 3.4.1 maven-plugin-plugin @@ -1389,7 +1401,7 @@ limitations under the License. maven-install-plugin - 3.0.1 + 3.1.1 @@ -1433,7 +1445,7 @@ limitations under the License. org.owasp dependency-check-maven - 7.2.1 + 8.1.2 com.dremio.tools @@ -1466,7 +1478,7 @@ limitations under the License. v16.15.0 7.7.5 - 7.2.1 + 8.1.0 ${project.build.directory}/frontend ${project.build.directory} @@ -1599,7 +1611,17 @@ limitations under the License. com.diffplug.spotless spotless-maven-plugin - 2.22.8 + 2.29.0 + + + org.jboss.jandex + jandex-maven-plugin + 1.2.1 + + + kr.motd.maven + os-maven-plugin + 1.7.1 @@ -1630,6 +1652,28 @@ limitations under the License. junit test + + org.junit.jupiter + junit-jupiter-api + test + + + org.junit.jupiter + junit-jupiter-params + test + + + + org.junit.jupiter + junit-jupiter-engine + test + + + + org.junit.vintage + junit-vintage-engine + test + @@ -1637,6 +1681,11 @@ limitations under the License. mockito-core test + + org.mockito + mockito-junit-jupiter + test + org.assertj assertj-core @@ -1969,16 +2018,34 @@ limitations under the License. ${mariadb.connector.version} - org.apache.commons commons-compress - 1.21 + 1.22 org.apache.commons commons-text ${commons-text.version} + + + commons-collections + commons-collections + 3.2.2 + + + org.apache.commons + commons-collections4 + 4.4 + + + org.yaml + snakeyaml + ${snakeyaml.version} + org.apache.curator curator-x-discovery @@ -2038,23 +2105,10 @@ limitations under the License. io.prometheus - simpleclient - ${simpleclient.version} - - - io.prometheus - simpleclient_servlet - ${simpleclient.version} - - - io.prometheus - simpleclient_dropwizard - ${simpleclient.version} - - - io.prometheus - simpleclient_hotspot - ${simpleclient.version} + simpleclient_bom + 0.16.0 + pom + import io.dropwizard.metrics @@ -2081,12 +2135,12 @@ limitations under the License. com.google.code.findbugs annotations - 3.0.1 + 3.0.1u2 com.google.code.findbugs jsr305 - 3.0.1 + 3.0.2 com.google.guava @@ -2094,14 +2148,16 @@ limitations under the License. ${guava.version} - com.google.protobuf - protobuf-java - ${protobuf.version} + com.github.ben-manes.caffeine + caffeine + 2.9.3 com.google.protobuf - protobuf-java-util + protobuf-bom ${protobuf.version} + pom + import com.fasterxml.jackson @@ -2110,10 +2166,24 @@ limitations under the License. import pom + + + + org.projectnessie.cel + cel-bom + ${nessie-cel.version} + import + pom + + + com.google.code.gson + gson + 2.10.1 + com.fasterxml.woodstox woodstox-core - 5.2.1 + 5.4.0 com.github.slugify @@ -2128,7 +2198,7 @@ limitations under the License. com.typesafe config - 1.4.1 + 1.4.2 com.google.flatbuffers @@ -2138,7 +2208,7 @@ limitations under the License. commons-codec commons-codec - 1.4 + 1.15 org.objenesis @@ -2157,6 +2227,27 @@ limitations under the License. pom import + + io.opentelemetry + opentelemetry-bom-alpha + ${opentelemetry.version}-alpha + pom + import + + + io.opentelemetry.instrumentation + opentelemetry-instrumentation-bom + ${opentelemetry.version} + pom + import + + + io.opentelemetry.instrumentation + opentelemetry-instrumentation-bom-alpha + ${opentelemetry.version}-alpha + pom + import + io.opentracing opentracing-api @@ -2231,34 +2322,29 @@ limitations under the License. jaeger-client 1.5.0 - - io.grpc - grpc-bom - ${grpc.version} - pom - import - - - org.yaml - snakeyaml - ${snakeyaml.version} - io.netty netty ${netty3.version} + io.netty netty-bom ${netty.version} pom import + - io.netty - netty-tcnative-boringssl-static - ${netty.boringssl.version} + io.grpc + grpc-bom + ${grpc.version} + pom + import io.protostuff @@ -2304,13 +2390,22 @@ limitations under the License. org.mockito - mockito-junit-jupiter + mockito-bom ${mockito.version} + pom + import + + + org.assertj + assertj-bom + ${assertj.version} + pom + import joda-time joda-time - 2.9 + 2.12.1 net.hydromatic @@ -2340,17 +2435,18 @@ limitations under the License. org.apache.httpcomponents httpclient - 4.5.13 + 4.5.14 + + + commons-logging + commons-logging + + org.apache.httpcomponents httpcore - 4.4.9 - - - org.projectnessie.cel - cel-tools - ${nessie-cel.version} + 4.4.16 org.apache.arrow @@ -2504,6 +2600,11 @@ limitations under the License. commons-io ${commons-io.version} + + org.apache.commons + commons-math3 + 3.6.1 + org.asynchttpclient async-http-client @@ -2579,10 +2680,6 @@ limitations under the License. org.mortbay.jetty servlet-api-2.5 - - org.mortbay.jetty - servlet-api-2.5 - javax.servlet servlet-api @@ -2779,10 +2876,6 @@ limitations under the License. org.mortbay.jetty servlet-api-2.5 - - org.mortbay.jetty - servlet-api-2.5 - javax.servlet servlet-api @@ -2953,7 +3046,7 @@ limitations under the License. net.minidev json-smart - 2.4.8 + ${json-smart.version} org.apache.hadoop @@ -2998,10 +3091,6 @@ limitations under the License. org.mortbay.jetty servlet-api-2.5 - - org.mortbay.jetty - servlet-api-2.5 - javax.servlet servlet-api @@ -3046,10 +3135,6 @@ limitations under the License. com.sun.jersey jersey-json - - com.sun.jersey - jersey-core - core org.eclipse.jdt @@ -3105,10 +3190,6 @@ limitations under the License. slf4j-log4j12 org.slf4j - - slf4j-log4j12 - org.slf4j - log4j log4j @@ -3228,7 +3309,6 @@ limitations under the License. org.apache.hive hive-exec - compile org.mortbay.jetty @@ -3242,6 +3322,10 @@ limitations under the License. commons-logging commons-logging-api + + org.apache.ivy + ivy + slf4j-log4j12 org.slf4j @@ -3262,10 +3346,6 @@ limitations under the License. com.google.guava guava - - jline - jline - com.google.protobuf protobuf-java @@ -3438,6 +3518,16 @@ limitations under the License. org.apache.zookeeper zookeeper ${zookeeper.version} + + + org.slf4j + slf4j-log4j12 + + + log4j + log4j + + @@ -3475,26 +3565,11 @@ limitations under the License. bcpkix-fips 1.0.6 - - org.assertj - assertj-core - ${assertj.version} - org.hsqldb hsqldb ${hsqldb.version} - - org.mockito - mockito-core - ${mockito.version} - - - org.mockito - mockito-inline - ${mockito.version} - com.github.tomakehurst wiremock-jre8 @@ -3589,12 +3664,12 @@ limitations under the License. net.java.dev.jna jna - 4.5.0 + 5.12.1 net.java.dev.jna jna-platform - 4.5.0 + 5.12.1 software.amazon.awssdk @@ -3618,7 +3693,7 @@ limitations under the License. org.apache.hadoop hadoop-azure - 2.8.5-dremio-r2-202106241733540604-acdda22 + 2.8.5-dremio-r2-202305092225000902-c107c46 org.apache.hadoop @@ -3695,7 +3770,7 @@ limitations under the License. com.hubspot.jackson jackson-datatype-protobuf - 0.9.12 + 0.9.13 org.eclipse.jetty @@ -3758,6 +3833,11 @@ limitations under the License. nimbus-jose-jwt 8.8 + + com.nimbusds + oauth2-oidc-sdk + 9.3 + org.codehaus.groovy groovy-all @@ -3770,7 +3850,7 @@ limitations under the License. runtime - org.projectnessie + org.projectnessie.nessie nessie-bom ${nessie.version} pom @@ -3848,6 +3928,9 @@ limitations under the License. dremio-public https://maven.dremio.com/public/ + + false + diff --git a/protocol/pom.xml b/protocol/pom.xml index 867a7b5896..533eed4274 100644 --- a/protocol/pom.xml +++ b/protocol/pom.xml @@ -22,7 +22,7 @@ com.dremio dremio-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 com.dremio diff --git a/protocol/src/main/protobuf/UserBitShared.proto b/protocol/src/main/protobuf/UserBitShared.proto index 186bb89ac0..ef9c1317d5 100644 --- a/protocol/src/main/protobuf/UserBitShared.proto +++ b/protocol/src/main/protobuf/UserBitShared.proto @@ -95,6 +95,7 @@ enum WorkloadType { FLIGHT = 12; METADATA_REFRESH = 13; INTERNAL_ICEBERG = 14; + D2D = 15; } message DremioPBError{ @@ -366,6 +367,8 @@ message QueryProfile { repeated AttemptEvent state_list = 38; optional int32 num_plan_cache_used = 39; map rel_info_map = 40; + optional int32 num_joins_in_user_query = 41; + optional int32 num_joins_in_final_prel = 42; } message RelNodeInfo{ @@ -513,6 +516,7 @@ message PlanPhaseProfile { optional string plan = 3; optional string planner_dump = 4; optional FragmentRpcSizeStats size_stats = 5; + map time_breakdown_per_rule = 6; } message MajorFragmentProfile { @@ -627,6 +631,8 @@ message OperatorProfile { optional int32 operator_subtype = 11; optional int64 outputRecords = 12; optional int64 outputBytes = 13; + optional int64 addedFiles = 14; // DML + optional int64 removedFiles = 15; // DML } message StreamProfile { @@ -731,6 +737,7 @@ enum CoreOperatorType { BRIDGE_FILE_READER = 62; ICEBERG_MANIFEST_WRITER = 63; ICEBERG_METADATA_FUNCTIONS_READER = 64; + ICEBERG_SNAPSHOTS_SUB_SCAN = 65; } message MetricDef { diff --git a/protocol/src/main/protobuf/UserCoordRPC.proto b/protocol/src/main/protobuf/UserCoordRPC.proto index f54d546946..0e61835792 100644 --- a/protocol/src/main/protobuf/UserCoordRPC.proto +++ b/protocol/src/main/protobuf/UserCoordRPC.proto @@ -734,6 +734,7 @@ enum SubmissionSource { JDBC = 3; ODBC = 4; FLIGHT = 5; + D2D = 6; } /* diff --git a/provision/common/pom.xml b/provision/common/pom.xml index db76374b88..866a540d86 100644 --- a/provision/common/pom.xml +++ b/provision/common/pom.xml @@ -22,7 +22,7 @@ com.dremio dremio-provision-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 com.dremio.provision @@ -76,16 +76,6 @@ org.apache.zookeeper zookeeper - - - org.slf4j - slf4j-log4j12 - - - log4j - log4j - - com.fasterxml.jackson.core diff --git a/provision/common/src/main/java/com/dremio/provision/ClusterCreateRequest.java b/provision/common/src/main/java/com/dremio/provision/ClusterCreateRequest.java index 8b69afa8b0..4948dc27dc 100644 --- a/provision/common/src/main/java/com/dremio/provision/ClusterCreateRequest.java +++ b/provision/common/src/main/java/com/dremio/provision/ClusterCreateRequest.java @@ -31,11 +31,14 @@ public interface ClusterCreateRequest extends ConsistentProps { String getName(); + @Override @NotNull ClusterType getClusterType(); @NotNull DynamicConfig getDynamicConfig(); + @Override YarnPropsApi getYarnProps(); + @Override AwsPropsApi getAwsProps(); boolean isAllowAutoStart(); boolean isAllowAutoStop(); diff --git a/provision/common/src/main/java/com/dremio/provision/ClusterModifyRequest.java b/provision/common/src/main/java/com/dremio/provision/ClusterModifyRequest.java index f60cae5ebd..dd26332757 100644 --- a/provision/common/src/main/java/com/dremio/provision/ClusterModifyRequest.java +++ b/provision/common/src/main/java/com/dremio/provision/ClusterModifyRequest.java @@ -33,6 +33,7 @@ public interface ClusterModifyRequest extends ConsistentProps { @NotNull String getId(); + @Override @NotNull ClusterType getClusterType(); @NotNull DynamicConfig getDynamicConfig(); @@ -41,8 +42,10 @@ public interface ClusterModifyRequest extends ConsistentProps { @NotNull ClusterDesiredState getDesiredState(); + @Override YarnPropsApi getYarnProps(); + @Override AwsPropsApi getAwsProps(); boolean isAllowAutoStart(); diff --git a/provision/common/src/main/java/com/dremio/provision/aws/util/EC2MetadataUtils.java b/provision/common/src/main/java/com/dremio/provision/aws/util/EC2MetadataUtils.java index 0ce5aec94d..201310cb3a 100644 --- a/provision/common/src/main/java/com/dremio/provision/aws/util/EC2MetadataUtils.java +++ b/provision/common/src/main/java/com/dremio/provision/aws/util/EC2MetadataUtils.java @@ -38,7 +38,7 @@ import software.amazon.awssdk.core.SdkSystemSetting; import software.amazon.awssdk.core.exception.SdkClientException; import software.amazon.awssdk.core.exception.SdkServiceException; -import software.amazon.awssdk.core.internal.util.UserAgentUtils; +import software.amazon.awssdk.core.util.SdkUserAgent; import software.amazon.awssdk.regions.util.HttpResourcesUtils; import software.amazon.awssdk.regions.util.ResourcesEndpointProvider; @@ -679,7 +679,7 @@ public URI endpoint() { @Override public Map headers() { Map requestHeaders = new HashMap<>(); - requestHeaders.put("User-Agent", UserAgentUtils.getUserAgent()); + requestHeaders.put("User-Agent", SdkUserAgent.create().userAgent()); requestHeaders.put("Accept", "*/*"); requestHeaders.put("Connection", "keep-alive"); @@ -711,7 +711,7 @@ public URI endpoint() { @Override public Map headers() { Map requestHeaders = new HashMap<>(); - requestHeaders.put("User-Agent", UserAgentUtils.getUserAgent()); + requestHeaders.put("User-Agent", SdkUserAgent.create().userAgent()); requestHeaders.put("Accept", "*/*"); requestHeaders.put("Connection", "keep-alive"); requestHeaders.put(EC2_METADATA_TOKEN_TTL_HEADER, DEFAULT_TOKEN_TTL); diff --git a/provision/common/src/main/java/com/dremio/provision/service/ProvisioningServiceImpl.java b/provision/common/src/main/java/com/dremio/provision/service/ProvisioningServiceImpl.java index c258ef63fe..3300970f4d 100644 --- a/provision/common/src/main/java/com/dremio/provision/service/ProvisioningServiceImpl.java +++ b/provision/common/src/main/java/com/dremio/provision/service/ProvisioningServiceImpl.java @@ -304,8 +304,6 @@ public synchronized ClusterEnriched modifyCluster(ClusterId clusterId, ClusterSt logger.debug("Action:{}", action); switch (action) { - case NONE: - return getClusterInfo(clusterId); case START: return startCluster(clusterId); case STOP: @@ -327,7 +325,6 @@ public synchronized ClusterEnriched modifyCluster(ClusterId clusterId, ClusterSt cluster.setDesiredState(modifiedCluster.getDesiredState()); store.put(clusterId, cluster); stopCluster(clusterId); - return getClusterInfo(clusterId); } if (ClusterState.STOPPED == cluster.getState() || ClusterState.FAILED == cluster.getState()) { // just modify, no need to start @@ -338,8 +335,9 @@ public synchronized ClusterEnriched modifyCluster(ClusterId clusterId, ClusterSt // start the cluster startCluster(clusterId); } - return getClusterInfo(clusterId); } + return getClusterInfo(clusterId); + case NONE: default: return getClusterInfo(clusterId); } diff --git a/provision/pom.xml b/provision/pom.xml index 464599b906..b72473b9ac 100644 --- a/provision/pom.xml +++ b/provision/pom.xml @@ -22,7 +22,7 @@ com.dremio dremio-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-provision-parent diff --git a/provision/shimloader/pom.xml b/provision/shimloader/pom.xml index f4e6c47082..75c0613188 100644 --- a/provision/shimloader/pom.xml +++ b/provision/shimloader/pom.xml @@ -22,7 +22,7 @@ com.dremio dremio-provision-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 com.dremio.provision diff --git a/provision/yarn/pom.xml b/provision/yarn/pom.xml index 9d9253b700..9516070230 100644 --- a/provision/yarn/pom.xml +++ b/provision/yarn/pom.xml @@ -22,7 +22,7 @@ com.dremio dremio-provision-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 com.dremio.provision diff --git a/provision/yarn/twill/pom.xml b/provision/yarn/twill/pom.xml index faad7ca1d8..95b3988f92 100644 --- a/provision/yarn/twill/pom.xml +++ b/provision/yarn/twill/pom.xml @@ -22,7 +22,7 @@ com.dremio.provision dremio-yarn-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 com.dremio.provision diff --git a/provision/yarn/yarntwill/pom.xml b/provision/yarn/yarntwill/pom.xml index dfd2620c44..693f46d82a 100644 --- a/provision/yarn/yarntwill/pom.xml +++ b/provision/yarn/yarntwill/pom.xml @@ -22,7 +22,7 @@ com.dremio.provision dremio-yarn-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 com.dremio.provision diff --git a/sabot/grammar/pom.xml b/sabot/grammar/pom.xml index 936ae2621f..4e3634b8bb 100644 --- a/sabot/grammar/pom.xml +++ b/sabot/grammar/pom.xml @@ -22,7 +22,7 @@ dremio-sabot-parent com.dremio.sabot - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 4.0.0 diff --git a/sabot/grammar/src/main/codegen/data/Parser.tdd b/sabot/grammar/src/main/codegen/data/Parser.tdd index 2d1486d046..8c28092a77 100644 --- a/sabot/grammar/src/main/codegen/data/Parser.tdd +++ b/sabot/grammar/src/main/codegen/data/Parser.tdd @@ -161,7 +161,8 @@ "EXPIRE", "SNAPSHOTS", "OLDER_THAN", - "RETAIN_LAST" + "RETAIN_LAST", + "TBLPROPERTIES" ] nonReservedKeywords: [ @@ -580,6 +581,7 @@ "SqlLoadMaterialization()", "SqlCompactMaterialization()", "SqlExplainJson()", + "SqlExplainQueryDML()", "SqlAlterClearPlanCache()", "SqlGrant()", "SqlRevoke()", @@ -599,7 +601,8 @@ "SqlDescribeFunction()", "SqlShowFunctions()", "SqlOptimize()", - "SqlVacuum()" + "SqlVacuum()", + "SqlCreateFolder()" ] # List of methods for parsing custom literals. diff --git a/sabot/grammar/src/main/codegen/includes/alter.ftl b/sabot/grammar/src/main/codegen/includes/alter.ftl index a0adbb4b2d..4f19fd48aa 100644 --- a/sabot/grammar/src/main/codegen/includes/alter.ftl +++ b/sabot/grammar/src/main/codegen/includes/alter.ftl @@ -38,9 +38,16 @@ SqlNodeList partitionList = SqlNodeList.EMPTY; SqlPartitionTransform partitionTransform; SqlPolicy sqlPolicy; + SqlTableVersionSpec sqlTableVersionSpec = SqlTableVersionSpec.NOT_SPECIFIED ; + SqlNodeList tablePropertyNameList; + SqlNodeList tablePropertyValueList; } { + { + tablePropertyNameList = SqlNodeList.EMPTY; + tablePropertyValueList = SqlNodeList.EMPTY; + } { pos = getPos(); } ( @@ -73,7 +80,8 @@ | (
        | | | | ) tblName = CompoundIdentifier() - ( + [ sqlTableVersionSpec = ATVersionSpec() ] + ( sqlPolicy = Policy() { return new SqlAlterTableAddRowAccessPolicy(pos, tblName, sqlPolicy); } | @@ -88,6 +96,39 @@ ( | ) { return SqlAlterDatasetReflectionRouting(pos, tblName, SqlLiteral.createSymbol(SqlAlterDatasetReflectionRouting.RoutingType.TABLE, pos)); } ) | + ( + + + { + tablePropertyNameList = new SqlNodeList(getPos()); + tablePropertyValueList = new SqlNodeList(getPos()); + } + ParseTableProperty(tablePropertyNameList, tablePropertyValueList) + ( + + ParseTableProperty(tablePropertyNameList, tablePropertyValueList) + )* + + { + return new SqlAlterTableProperties(pos, tblName, + SqlLiteral.createSymbol(SqlAlterTableProperties.Mode.SET, pos), tablePropertyNameList, tablePropertyValueList); + } + ) + | + ( + + { + tablePropertyNameList = new SqlNodeList(getPos()); + } + + StringLiteralCommaList(tablePropertyNameList.getList()) + + { + return new SqlAlterTableProperties(pos, tblName, + SqlLiteral.createSymbol(SqlAlterTableProperties.Mode.UNSET, pos), tablePropertyNameList, tablePropertyValueList); + } + ) + | ( { return new SqlAlterTableAddColumns(pos, tblName, TableElementList()); } | @@ -121,7 +162,7 @@ ) | ( - {return SqlDropReflection(pos, tblName);} + {return SqlDropReflection(pos, tblName, sqlTableVersionSpec);} | partitionTransform = ParsePartitionTransform() { @@ -137,9 +178,9 @@ ) | ( - name = SimpleIdentifier() {return SqlCreateAggReflection(pos, tblName, name);} + name = SimpleIdentifier() {return SqlCreateAggReflection(pos, tblName, name, sqlTableVersionSpec);} | - name = SimpleIdentifier() {return SqlCreateRawReflection(pos, tblName, name);} + name = SimpleIdentifier() {return SqlCreateRawReflection(pos, tblName, name, sqlTableVersionSpec);} | name = SimpleIdentifier() { return SqlAddExternalReflection(pos, tblName, name);} ) @@ -196,7 +237,7 @@ ) { - return new SqlAccelToggle(pos, tblName, raw, SqlLiteral.createBoolean(true, SqlParserPos.ZERO)); + return new SqlAccelToggle(pos, tblName, raw, SqlLiteral.createBoolean(true, SqlParserPos.ZERO), sqlTableVersionSpec); } ) ) @@ -214,7 +255,7 @@ ) { - return new SqlAccelToggle(pos, tblName, raw, SqlLiteral.createBoolean(false, SqlParserPos.ZERO)); + return new SqlAccelToggle(pos, tblName, raw, SqlLiteral.createBoolean(false, SqlParserPos.ZERO), sqlTableVersionSpec); } ) ) @@ -310,7 +351,7 @@ SqlNodeList KeyValuePair() : [ (STRIPED, CONSOLIDATED) PARTITION BY (field1, field2, ..) ] [ LOCALSORT BY (field1, field2, ..) ] */ -SqlNode SqlCreateAggReflection(SqlParserPos pos, SqlIdentifier tblName, SqlIdentifier name) : +SqlNode SqlCreateAggReflection(SqlParserPos pos, SqlIdentifier tblName, SqlIdentifier name, SqlTableVersionSpec sqlTableVersionSpec) : { SqlNodeList dimensionList; SqlNodeList measureList; @@ -361,7 +402,7 @@ SqlNode SqlCreateAggReflection(SqlParserPos pos, SqlIdentifier tblName, SqlIdent )? { return SqlCreateReflection.createAggregation(pos, tblName, dimensionList, measureList, distributionList, - partitionList, sortList, arrowCachingEnabled, partitionDistributionStrategy, name); + partitionList, sortList, arrowCachingEnabled, partitionDistributionStrategy, name, sqlTableVersionSpec); } } @@ -462,6 +503,7 @@ void MeasureList(List measures) : /** ALTER TABLE tblname + [AT (BRANCH | TAG | COMMIT | SNAPSHOT | TIMESTAMP (versionSpec)] ADD RAW REFLECTION name USING DISPLAY (field1, field2) @@ -469,7 +511,7 @@ void MeasureList(List measures) : [ (STRIPED, CONSOLIDATED) PARTITION BY (field1, field2, ..) ] [ LOCALSORT BY (field1, field2, ..) ] */ -SqlNode SqlCreateRawReflection(SqlParserPos pos, SqlIdentifier tblName, SqlIdentifier name) : +SqlNode SqlCreateRawReflection(SqlParserPos pos, SqlIdentifier tblName, SqlIdentifier name, SqlTableVersionSpec sqlTableVersionSpec) : { SqlNodeList displayList; SqlNodeList distributionList; @@ -516,21 +558,21 @@ SqlNode SqlCreateRawReflection(SqlParserPos pos, SqlIdentifier tblName, SqlIdent )? { return SqlCreateReflection.createRaw(pos, tblName, displayList, distributionList, partitionList, sortList, - arrowCachingEnabled, partitionDistributionStrategy, name); + arrowCachingEnabled, partitionDistributionStrategy, name, sqlTableVersionSpec); } } /** * ALTER TABLE tblname DROP REFLECTION [string reflection id] */ - SqlNode SqlDropReflection(SqlParserPos pos, SqlIdentifier tblName) : + SqlNode SqlDropReflection(SqlParserPos pos, SqlIdentifier tblName, SqlTableVersionSpec sqlTableVersionSpec) : { SqlIdentifier reflectionId; } { { reflectionId = SimpleIdentifier(); } { - return new SqlDropReflection(pos, tblName, reflectionId); + return new SqlDropReflection(pos, tblName, reflectionId, sqlTableVersionSpec); } } diff --git a/sabot/grammar/src/main/codegen/includes/grant.ftl b/sabot/grammar/src/main/codegen/includes/grant.ftl index 298a13e986..02a3be9312 100644 --- a/sabot/grammar/src/main/codegen/includes/grant.ftl +++ b/sabot/grammar/src/main/codegen/includes/grant.ftl @@ -272,17 +272,11 @@ void Privilege(List list) : { list.add(SqlLiteral.createSymbol(SqlGrantOnProjectEntities.Privilege.CREATE_PROJECT, getPos())); } | - { list.add(SqlLiteral.createSymbol(SqlGrantOnProjectEntities.Privilege.CREATE_ARCTIC_CATALOG, getPos())); } + { list.add(SqlLiteral.createSymbol(SqlGrantOnProjectEntities.Privilege.CREATE_CATALOG, getPos())); } | { list.add(SqlLiteral.createSymbol(SqlGrantOnProjectEntities.Privilege.CONFIGURE_SECURITY, getPos())); } | - - { list.add(SqlLiteral.createSymbol(SqlGrantOnProjectEntities.Privilege.CREATE_OAUTH_APPLICATION, getPos())); } - | - - { list.add(SqlLiteral.createSymbol(SqlGrantOnProjectEntities.Privilege.CREATE_EXTERNAL_TOKENS_PROVIDER, getPos())); } - | { list.add(SqlLiteral.createSymbol(SqlGrant.Privilege.INSERT, getPos())); } | diff --git a/sabot/grammar/src/main/codegen/includes/parserImpls.ftl b/sabot/grammar/src/main/codegen/includes/parserImpls.ftl index 7ccbbfcfeb..212459c65f 100644 --- a/sabot/grammar/src/main/codegen/includes/parserImpls.ftl +++ b/sabot/grammar/src/main/codegen/includes/parserImpls.ftl @@ -303,6 +303,7 @@ SqlNode SqlCreateOrReplace() : SqlPolicy policy = null; boolean nullable = true; SqlComplexDataTypeSpec scalarReturnTypeSpec = null; + SqlFunctionReturnType returnType; } { { pos = getPos(); } @@ -310,6 +311,10 @@ SqlNode SqlCreateOrReplace() : ( [ { ifNotExists = true; } ] + { + if (replace && ifNotExists) + throw new ParseException("'OR REPLACE' and 'IF NOT EXISTS' can not both be set."); + } name = CompoundIdentifier() fieldList = ParseFunctionFieldList() @@ -326,7 +331,15 @@ SqlNode SqlCreateOrReplace() : ) { expression = OrderedQueryOrExpr(ExprContext.ACCEPT_ALL); - return new SqlCreateFunction(pos, SqlLiteral.createBoolean(replace, SqlParserPos.ZERO), name, fieldList, scalarReturnTypeSpec, expression, SqlLiteral.createBoolean(ifNotExists, SqlParserPos.ZERO), tabularReturnType); + returnType = new SqlFunctionReturnType(pos, scalarReturnTypeSpec, tabularReturnType); + return new SqlCreateFunction( + pos, + SqlLiteral.createBoolean(replace, SqlParserPos.ZERO), + name, + fieldList, + expression, + SqlLiteral.createBoolean(ifNotExists, SqlParserPos.ZERO), + returnType); } | (|) @@ -417,20 +430,20 @@ SqlNodeList ParseFunctionReturnFieldList() : void FunctionReturnTypeCommaList(List list) : { - SqlNodeList pair; + SqlReturnField returnField; } { ( - pair = ReturnKeyValuePair() { list.add(pair); } + returnField = ReturnKeyValuePair() { list.add(returnField); } ( - pair = ReturnKeyValuePair() { - list.add(pair); + returnField = ReturnKeyValuePair() { + list.add(returnField); } )* )? } -SqlNodeList ReturnKeyValuePair() : +SqlReturnField ReturnKeyValuePair() : { SqlNodeList pair = new SqlNodeList(getPos()); SqlIdentifier name; @@ -438,13 +451,11 @@ SqlNodeList ReturnKeyValuePair() : boolean nullable; } { - name = SimpleIdentifier() { pair.add(name); } + name = SimpleIdentifier() type = DataType() - nullable = NullableOptDefaultTrue() - { - pair.add(new SqlComplexDataTypeSpec(type.withNullable(nullable))); - return pair; - } + nullable = NullableOptDefaultTrue() { + return new SqlReturnField(getPos(), name, new SqlComplexDataTypeSpec(type.withNullable(nullable))); + } } SqlNodeList ParseFunctionFieldList() : @@ -644,7 +655,7 @@ SqlPartitionTransform ParsePartitionTransform() : { SqlIdentifier id; SqlIdentifier columnName; - List argList = Lists.newArrayList(); + List argList = new ArrayList(); SqlNode arg; Span s; Token token; @@ -708,6 +719,17 @@ SqlNodeList ParsePartitionTransformList() : } } +/** Parses a table property */ +void ParseTableProperty(SqlNodeList tablePropertyNameList, SqlNodeList tablePropertyValueList) : +{ + SqlNode name; + SqlNode value; +} +{ + name = StringLiteral() { tablePropertyNameList.add(name); } + value = StringLiteral() { tablePropertyValueList.add(value); } +} + /** * COPY INTO .. * [( [, ... ])] @@ -724,7 +746,7 @@ SqlNode SqlCopyInto() : SqlParserPos pos; SqlIdentifier tblName; SqlNode location; - List fileList = Lists.newArrayList(); + List fileList = new ArrayList(); SqlNodeList files = SqlNodeList.EMPTY; SqlNode file; SqlNode regexPattern = null; @@ -840,6 +862,7 @@ SqlNode SqlCopyInto() : * [ (STRIPED, HASH, ROUNDROBIN) PARTITION BY (field1, field2, ..) ] * [ DISTRIBUTE BY (field1, field2, ..) ] * [ LOCALSORT BY (field1, field2, ..) ] + * [ TBLPROPERTIES ('property_name' = 'property_value', ...) ] * [ STORE AS (opt1 => val1, opt2 => val3, ...) ] * [ LOCATION location] * [ WITH SINGLE WRITER ] @@ -861,6 +884,8 @@ SqlNode SqlCreateTable() : SqlNode query; boolean ifNotExists = false; SqlPolicy policy = null; + SqlNodeList tablePropertyNameList; + SqlNodeList tablePropertyValueList; } { { @@ -873,6 +898,8 @@ SqlNode SqlCreateTable() : partitionDistributionStrategy = PartitionDistributionStrategy.UNSPECIFIED; fieldList = SqlNodeList.EMPTY; policy = null; + tablePropertyNameList = SqlNodeList.EMPTY; + tablePropertyValueList = SqlNodeList.EMPTY; } { pos = getPos(); }
        @@ -902,6 +929,19 @@ SqlNode SqlCreateTable() : ( sortFieldList = ParseRequiredFieldList("Sort") )? + ( + + { + tablePropertyNameList = new SqlNodeList(getPos()); + tablePropertyValueList = new SqlNodeList(getPos()); + } + ParseTableProperty(tablePropertyNameList, tablePropertyValueList) + ( + + ParseTableProperty(tablePropertyNameList, tablePropertyValueList) + )* + + )? ( { location = StringLiteral(); } )? @@ -937,7 +977,7 @@ SqlNode SqlCreateTable() : { return new SqlCreateTable(pos, tblName, fieldList, ifNotExists, partitionDistributionStrategy, partitionTransformList, formatOptions, location, singleWriter, sortFieldList, - distributeFieldList, policy, query); + distributeFieldList, policy, query, tablePropertyNameList, tablePropertyValueList); } ) | @@ -945,7 +985,7 @@ SqlNode SqlCreateTable() : { return new SqlCreateEmptyTable(pos, tblName, fieldList, ifNotExists, partitionDistributionStrategy, partitionTransformList, formatOptions, location, singleWriter, sortFieldList, - distributeFieldList, policy); + distributeFieldList, policy, tablePropertyNameList, tablePropertyValueList); } ) ) @@ -1131,7 +1171,7 @@ SqlUpdate DremioWhenMatchedClause(SqlNode table, SqlIdentifier alias) : SqlInsert DremioWhenNotMatchedClause(SqlIdentifier table) : { final Span insertSpan, valuesSpan; - final List keywords = Lists.newArrayList(); + final List keywords = new ArrayList(); final SqlNodeList keywordList; SqlNodeList insertColumnList = null; SqlNode rowConstructor; @@ -1217,33 +1257,75 @@ SqlNode SqlRollbackTable() : } } +void VacuumExpireSnapshotOptions(SqlNodeList optionsList, SqlNodeList optionsValueList) : +{ + SqlNode exp; +} +{ + + [ + [] exp = StringLiteral() + { + optionsList.add(new SqlIdentifier("older_than", getPos())); + optionsValueList.add(exp); + } + ] + [ + [] exp = UnsignedNumericLiteral() + { + optionsList.add(new SqlIdentifier("retain_last", getPos())); + optionsValueList.add(exp); + } + ] +} + /** * Parses a VACUUM statement. * VACUUM TABLE - * EXPIRE SNAPSHOTS [older_than = ] [retain_last = ] + * EXPIRE SNAPSHOTS [older_than [=] ] [retain_last [=] ] */ SqlNode SqlVacuum() : { SqlParserPos pos; +} +{ + { pos = getPos(); } + ( + + { + return SqlVacuumCatalog(pos); + } + | +
        + { + return SqlVacuumTable(pos); + } + ) +} + +SqlNode SqlVacuumTable(SqlParserPos pos) : +{ SqlIdentifier tableName; SqlNodeList optionsList = new SqlNodeList(getPos()); SqlNodeList optionsValueList = new SqlNodeList(getPos()); SqlNode exp; } { - { pos = getPos(); } -
        { tableName = CompoundIdentifier(); } - - [ - { optionsList.add(new SqlIdentifier("older_than", getPos())); } - exp = StringLiteral() { optionsValueList.add(exp); } - ] - [ - { optionsList.add(new SqlIdentifier("retain_last", getPos())); } - exp = UnsignedNumericLiteral() { optionsValueList.add(exp); } - ] + tableName = CompoundIdentifier() + VacuumExpireSnapshotOptions(optionsList, optionsValueList) + { + return new SqlVacuumTable(pos, tableName, optionsList, optionsValueList); + } +} + +SqlNode SqlVacuumCatalog(SqlParserPos pos) : +{ + SqlIdentifier catalogSource; +} +{ + catalogSource = CompoundIdentifier() { - return new SqlVacuum(pos, tableName, optionsList, optionsValueList); + return new SqlVacuumCatalog(pos, catalogSource, new SqlNodeList(pos), new SqlNodeList(pos)); } } @@ -1417,13 +1499,14 @@ SqlNode SqlRefreshDataset() : [ REWRITE MANIFESTS ] |[ REWRITE DATA USING BIN_PACK| SORT [ ( option = [, ... ] ) ] - [ WHERE ] ] + [ FOR PARTITIONS ] ] */ SqlNode SqlOptimize() : { final SqlParserPos pos; final SqlIdentifier table; - SqlLiteral rewriteManifests = SqlLiteral.createBoolean(false, SqlParserPos.ZERO); + SqlLiteral rewriteManifests = SqlLiteral.createBoolean(true, SqlParserPos.ZERO); + SqlLiteral rewriteDataFiles = SqlLiteral.createBoolean(true, SqlParserPos.ZERO); CompactionType compactionType = CompactionType.BIN_PACK; SqlNode sortOrderId = null; SqlNode condition = null; @@ -1433,22 +1516,40 @@ SqlNode SqlOptimize() : { { pos = getPos(); }
        { table = CompoundIdentifier(); } - [ { rewriteManifests = SqlLiteral.createBoolean(false, pos); } ] - [ { compactionType = CompactionType.BIN_PACK; } ] [ - + { - optionsList = new SqlNodeList(getPos()); - optionsValueList = new SqlNodeList(getPos()); + rewriteManifests = SqlLiteral.createBoolean(true, pos); + rewriteDataFiles = SqlLiteral.createBoolean(false, pos); } - ParseOptimizeOptions(optionsList, optionsValueList) - ( - - ParseOptimizeOptions(optionsList, optionsValueList) - )* - + | + [ + + { + rewriteManifests = SqlLiteral.createBoolean(false, pos); + rewriteDataFiles = SqlLiteral.createBoolean(true, pos); + } + ] + [ + { compactionType = CompactionType.BIN_PACK; } + ] + [ { condition = Expression(ExprContext.ACCEPT_SUB_QUERY); } ] + [ + + { + optionsList = new SqlNodeList(getPos()); + optionsValueList = new SqlNodeList(getPos()); + } + ParseOptimizeOptions(optionsList, optionsValueList) + ( + + ParseOptimizeOptions(optionsList, optionsValueList) + )* + + ] + ] - { return new SqlOptimize(pos, table, rewriteManifests, compactionType, condition, optionsList, optionsValueList); } + { return new SqlOptimize(pos, table, rewriteManifests, rewriteDataFiles, compactionType, condition, optionsList, optionsValueList); } } /** @@ -1547,3 +1648,93 @@ SqlNode SqlOptimize() : return new SqlArrayTypeSpec(getPos(), new SqlComplexDataTypeSpec(fType.withNullable(nullable))); } } + + /** + * Parses an EXPLAIN PLAN statement. e.g. + * EXPLAIN PLAN FOR + * UPDATE targetTable + * SET = [, = ... ] + */ + SqlNode SqlExplainQueryDML() : + { + SqlNode stmt; + SqlExplainLevel detailLevel = SqlExplainLevel.EXPPLAN_ATTRIBUTES; + SqlExplain.Depth depth; + final SqlExplainFormat format; + } + { + + [ detailLevel = ExplainDetailLevel() ] + depth = ExplainDepth() + ( + { format = SqlExplainFormat.XML; } + | + { format = SqlExplainFormat.JSON; } + | + { format = SqlExplainFormat.TEXT; } + ) + stmt = SqlQueryOrTableDml() { + return new SqlExplain(getPos(), + stmt, + detailLevel.symbol(SqlParserPos.ZERO), + depth.symbol(SqlParserPos.ZERO), + format.symbol(SqlParserPos.ZERO), + nDynamicParams); + } + } + + /** Parses a query (SELECT or VALUES) + * or DML statement (INSERT, UPDATE, DELETE, MERGE). */ + SqlNode SqlQueryOrTableDml() : + { + SqlNode stmt; + } + { + ( + stmt = SqlInsertTable() + | + stmt = SqlDeleteFromTable() + | + stmt = SqlUpdateTable() + | + stmt = SqlMergeIntoTable() + | + stmt = OrderedQueryOrExpr(ExprContext.ACCEPT_QUERY) + ) { return stmt; } + } + +/** + * CREATE FOLDER [ IF NOT EXISTS ] [source.]parentFolderName[.childFolder] + * [ AT ( REF[ERENCE) | BRANCH | TAG | COMMIT ) refValue ] + */ +SqlNode SqlCreateFolder() : +{ + SqlParserPos pos; + SqlLiteral ifNotExists = SqlLiteral.createBoolean(false, SqlParserPos.ZERO); + SqlIdentifier folderName; + ReferenceType refType = null; + SqlIdentifier refValue = null; +} +{ + { pos = getPos(); } + + [ { ifNotExists = SqlLiteral.createBoolean(true, SqlParserPos.ZERO); } ] + folderName = CompoundIdentifier() + [ + + ( + { refType = ReferenceType.REFERENCE; } + | + { refType = ReferenceType.REFERENCE; } + | + { refType = ReferenceType.BRANCH; } + | + { refType = ReferenceType.TAG; } + | + { refType = ReferenceType.COMMIT; } + ) + { refValue = SimpleIdentifier(); } + ] + { return new SqlCreateFolder(pos, ifNotExists, folderName, refType, refValue); } +} + diff --git a/sabot/grammar/src/main/codegen/includes/versionSupport.ftl b/sabot/grammar/src/main/codegen/includes/versionSupport.ftl index a13bfdbd30..701750943b 100644 --- a/sabot/grammar/src/main/codegen/includes/versionSupport.ftl +++ b/sabot/grammar/src/main/codegen/includes/versionSupport.ftl @@ -359,7 +359,7 @@ SqlNode TableWithVersionContext(SqlNode tableRef) : SqlCall call; SqlBasicCall collectionTableCall; SqlBasicCall functionCall; - List list = Lists.newArrayList(); + List list = new ArrayList(); List timeTravelFunctionName = TableMacroNames.TIME_TRAVEL; } { @@ -423,3 +423,48 @@ SqlNode TableWithVersionContext(SqlNode tableRef) : } } } +/** + [AT (BRANCH | TAG | COMMIT | SNAPSHOT | TIMESTAMP (versionSpec)] +*/ +SqlTableVersionSpec ATVersionSpec() : +{ + SqlParserPos pos; + SqlIdentifier simpleId; + TableVersionType tableVersionType = TableVersionType.NOT_SPECIFIED; + SqlNode specifier = SqlLiteral.createCharString("NOT_SPECIFIED",SqlParserPos.ZERO); +} +{ + { pos = getPos(); } + ( + specifier = StringLiteral() { tableVersionType = TableVersionType.SNAPSHOT_ID; } + | + simpleId = SimpleIdentifier() + { + tableVersionType = TableVersionType.BRANCH; + specifier = SqlLiteral.createCharString(simpleId.toString(), simpleId.getParserPosition()); + } + | + simpleId = SimpleIdentifier() + { + tableVersionType = TableVersionType.TAG; + specifier = SqlLiteral.createCharString(simpleId.toString(), simpleId.getParserPosition()); + } + | + simpleId = SimpleIdentifier() + { + tableVersionType = TableVersionType.COMMIT_HASH_ONLY; + specifier = SqlLiteral.createCharString(simpleId.toString(), simpleId.getParserPosition()); + } + | + ( | ) simpleId = SimpleIdentifier() + { + tableVersionType = TableVersionType.REFERENCE; + specifier = SqlLiteral.createCharString(simpleId.toString(), simpleId.getParserPosition()); + } + | + specifier = Expression(ExprContext.ACCEPT_NON_QUERY) { tableVersionType = TableVersionType.TIMESTAMP; } + ) + { + return new SqlTableVersionSpec(pos, tableVersionType, specifier); + } +} diff --git a/sabot/kernel/pom.xml b/sabot/kernel/pom.xml index 9312e20138..283a76263d 100644 --- a/sabot/kernel/pom.xml +++ b/sabot/kernel/pom.xml @@ -21,7 +21,7 @@ com.dremio.sabot dremio-sabot-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-sabot-kernel Sabot - Kernel @@ -114,6 +114,16 @@ com.hubspot.jackson jackson-datatype-protobuf + + com.fasterxml.jackson.datatype + jackson-datatype-guava + test + + + com.fasterxml.jackson.datatype + jackson-datatype-jdk8 + test + com.dremio.services @@ -121,9 +131,8 @@ - io.opentelemetry - opentelemetry-extension-annotations - ${opentelemetry.version} + io.opentelemetry.instrumentation + opentelemetry-instrumentation-annotations @@ -177,12 +186,12 @@ - org.projectnessie + org.projectnessie.nessie nessie-client - org.projectnessie + org.projectnessie.nessie-integrations iceberg-views ${nessie.version} @@ -329,8 +338,8 @@ org.apache.commons - commons-math - 2.2 + commons-math3 + test com.thoughtworks.paranamer @@ -663,6 +672,10 @@ commons-compiler 3.1.6 + + com.github.ben-manes.caffeine + caffeine + joda-time joda-time @@ -757,18 +770,8 @@ ${project.version} - org.junit.jupiter - junit-jupiter-api - test - - - org.junit.vintage - junit-vintage-engine - test - - - org.projectnessie - nessie-jaxrs-testextension + org.projectnessie.nessie + nessie-compatibility-common test @@ -794,7 +797,6 @@ com.dremio.services dremio-services-orphanage ${project.version} - compile com.dremio.services @@ -1184,6 +1186,17 @@ + + maven-failsafe-plugin + + + + integration-test + verify + + + + org.apache.maven.plugins maven-jar-plugin @@ -1224,6 +1237,15 @@ false + + + ${project.basedir}/src/test/resources + + + ${project.basedir}/src/test/resources-nessie + true + + diff --git a/sabot/kernel/src/main/checkstyle/sabot-checkstyle-config.xml b/sabot/kernel/src/main/checkstyle/sabot-checkstyle-config.xml index 31a9e58bdb..bc788f5a01 100644 --- a/sabot/kernel/src/main/checkstyle/sabot-checkstyle-config.xml +++ b/sabot/kernel/src/main/checkstyle/sabot-checkstyle-config.xml @@ -60,7 +60,9 @@ - + + + @@ -95,9 +97,7 @@ - - - + @@ -148,17 +148,14 @@ - - - + - - - + + @@ -168,9 +165,7 @@ - - - + @@ -181,6 +176,7 @@ + @@ -237,9 +233,9 @@ - - - + + + diff --git a/sabot/kernel/src/main/java/com/dremio/common/dialect/arp/transformer/NoOpTransformer.java b/sabot/kernel/src/main/java/com/dremio/common/dialect/arp/transformer/NoOpTransformer.java index 93db3edf75..f9c736c1b6 100644 --- a/sabot/kernel/src/main/java/com/dremio/common/dialect/arp/transformer/NoOpTransformer.java +++ b/sabot/kernel/src/main/java/com/dremio/common/dialect/arp/transformer/NoOpTransformer.java @@ -30,6 +30,7 @@ private NoOpTransformer() { /** * The set of SqlOperators that match this CallTransformer. */ + @Override public Set getCompatibleOperators() { throw new UnsupportedOperationException("NoOpTransformer matches every operator"); } diff --git a/sabot/kernel/src/main/java/com/dremio/common/rel2sql/DremioRelToSqlConverter.java b/sabot/kernel/src/main/java/com/dremio/common/rel2sql/DremioRelToSqlConverter.java index c3319c20bf..86988454b0 100644 --- a/sabot/kernel/src/main/java/com/dremio/common/rel2sql/DremioRelToSqlConverter.java +++ b/sabot/kernel/src/main/java/com/dremio/common/rel2sql/DremioRelToSqlConverter.java @@ -656,9 +656,8 @@ public Context selectListContext(SqlNodeList selectList) { return new DremioSelectListContext(selectList); } - public SqlNode createIntervalLiteral(final RexLiteral interval) { + public SqlNode createIntervalLiteral(final RexLiteral interval, final SqlTypeFamily family) { final BigDecimal intervalValue = interval.getValueAs(BigDecimal.class); - final SqlTypeFamily family = interval.getTypeName().getFamily(); final SqlIntervalQualifier qualifier = interval.getType().getIntervalQualifier(); if (intervalValue == null) { @@ -757,7 +756,7 @@ public SqlNode toSql(RexProgram program, RexNode rex) { case INTERVAL_YEAR_MONTH: case INTERVAL_DAY_TIME: - return createIntervalLiteral(literal); + return createIntervalLiteral(literal, literal.getTypeName().getFamily()); case BINARY: case ANY: @@ -765,13 +764,13 @@ public SqlNode toSql(RexProgram program, RexNode rex) { switch (literal.getTypeName()) { case NULL: return SqlLiteral.createNull(SqlImplementor.POS); - default: - // Fall through. + break; } + break; default: - // Fall through. + break; } break; @@ -888,6 +887,8 @@ public SqlNode toSql(RexProgram program, RexNode rex) { // would be added, but we only want them in the SELECT list so strip them if they show up in comparisons. this.stripNumericCastFromInputRef = true; break; + default: + break; } final CallTransformer transformer = getDialect().getCallTransformer(originalCall); @@ -1456,7 +1457,7 @@ private List getSelectNodes(String tableAlias, SqlSelect childNode, Set if (null == colAlias) { // Guard against possible null aliases being returned by generating a unique value. colAlias = SqlValidatorUtil.uniquify(colAlias, usedNames, SqlValidatorUtil.EXPR_SUGGESTER); - } else if (colAlias.equals("\"*\"")) { + } else if ("\"*\"".equals(colAlias)) { // If * is used as an alias, it ends up getting double quoted when it should not be. colAlias = "*"; } diff --git a/sabot/kernel/src/main/java/com/dremio/common/rel2sql/SqlImplementor.java b/sabot/kernel/src/main/java/com/dremio/common/rel2sql/SqlImplementor.java index 3af717fef8..db3191dd8b 100644 --- a/sabot/kernel/src/main/java/com/dremio/common/rel2sql/SqlImplementor.java +++ b/sabot/kernel/src/main/java/com/dremio/common/rel2sql/SqlImplementor.java @@ -290,6 +290,8 @@ private static RexNode stripCastFromString(RexNode node) { case CHAR: case VARCHAR: return call.clone(call.getType(), ImmutableList.of(o0b, o1)); + default: + break; } } } @@ -301,9 +303,14 @@ private static RexNode stripCastFromString(RexNode node) { case CHAR: case VARCHAR: return call.clone(call.getType(), ImmutableList.of(o0, o1b)); + default: + break; } } } + break; + default: + break; } return node; } @@ -523,11 +530,14 @@ public SqlNode toSql(RexProgram program, RexNode rex) { switch (literal.getTypeName()) { case NULL: return SqlLiteral.createNull(POS); - // fall through + default: + break; } + break; default: - throw new AssertionError(literal + ": " + literal.getTypeName()); + break; } + throw new AssertionError(literal + ": " + literal.getTypeName()); case CASE: final RexCall caseCall = (RexCall) rex; @@ -607,6 +617,9 @@ public SqlNode toSql(RexProgram program, RexNode rex) { switch (op.getKind()) { case SUM0: op = SqlStdOperatorTable.SUM; + break; + default: + break; } final List nodeList = toSql(program, call.getOperands()); switch (call.getKind()) { @@ -617,6 +630,9 @@ public SqlNode toSql(RexProgram program, RexNode rex) { } else { nodeList.add(dialect.getCastSpec(call.getType())); } + break; + default: + break; } if (op instanceof SqlBinaryOperator && nodeList.size() > 2) { // In RexNode trees, OR and AND have any number of children; @@ -755,6 +771,9 @@ protected SqlNode toSql(RexProgram program, RexFieldCollation rfc) { case DESCENDING: case STRICTLY_DESCENDING: node = SqlStdOperatorTable.DESC.createCall(POS, node); + break; + default: + break; } if (rfc.getNullDirection() != dialect.defaultNullDirection(rfc.getDirection())) { @@ -765,6 +784,8 @@ protected SqlNode toSql(RexProgram program, RexFieldCollation rfc) { case LAST: node = SqlStdOperatorTable.NULLS_LAST.createCall(POS, node); break; + default: + break; } } return node; @@ -853,6 +874,9 @@ public SqlNode toSql(RelFieldCollation collation) { case DESCENDING: case STRICTLY_DESCENDING: node = SqlStdOperatorTable.DESC.createCall(POS, node); + break; + default: + break; } if (collation.nullDirection != dialect.defaultNullDirection(collation.direction)) { switch (collation.nullDirection) { @@ -862,6 +886,8 @@ public SqlNode toSql(RelFieldCollation collation) { case LAST: node = SqlStdOperatorTable.NULLS_LAST.createCall(POS, node); break; + default: + break; } } return node; @@ -1059,8 +1085,9 @@ public SqlNode field(int ordinal) { switch (selectItem.getKind()) { case AS: return ((SqlCall) selectItem).operand(0); + default: + return selectItem; } - return selectItem; } }; } else { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/ExecConstants.java b/sabot/kernel/src/main/java/com/dremio/exec/ExecConstants.java index ac79324b36..63d0c096f2 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/ExecConstants.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/ExecConstants.java @@ -193,7 +193,10 @@ public interface ExecConstants { LongValidator PARQUET_DICT_PAGE_SIZE_VALIDATOR = new LongValidator(PARQUET_DICT_PAGE_SIZE, 1024*1024); String PARQUET_WRITER_COMPRESSION_TYPE = "store.parquet.compression"; EnumeratedStringValidator PARQUET_WRITER_COMPRESSION_TYPE_VALIDATOR = new EnumeratedStringValidator( - PARQUET_WRITER_COMPRESSION_TYPE, "snappy", "snappy", "gzip", "none"); + PARQUET_WRITER_COMPRESSION_TYPE, "snappy", "snappy", "gzip", "zstd", "none"); + String PARQUET_WRITER_COMPRESSION_ZSTD_LEVEL = "store.parquet.compression.zstd.level"; + RangeLongValidator PARQUET_WRITER_COMPRESSION_ZSTD_LEVEL_VALIDATOR = new RangeLongValidator( + PARQUET_WRITER_COMPRESSION_ZSTD_LEVEL, Integer.MIN_VALUE, Integer.MAX_VALUE, -3); String PARQUET_MAX_FOOTER_LEN = "store.parquet.max_footer_length"; LongValidator PARQUET_MAX_FOOTER_LEN_VALIDATOR = new LongValidator(PARQUET_MAX_FOOTER_LEN, 16*1024*1024); @@ -412,6 +415,13 @@ public interface ExecConstants { * Enabling this support option enables the memory arbiter */ BooleanValidator MEMORY_ARBITER_ENABLED = new BooleanValidator("exec.memory.arbiter.enabled", false); + // max memory that can be granted for a run - default: 40MB, max: 100MB + LongValidator MAX_MEMORY_GRANT_SIZE = new PositiveLongValidator("exec.memory.arbiter.max_memory_grant_bytes", 100 * (1 << 20), 40 * (1 << 20)); + // percent memory to be set aside for other allocators like RPC + DoubleValidator PCT_MEMORY_SET_ASIDE = new RangeDoubleValidator("exec.memory.arbiter.pct_memory_set_aside", 0.0, 100.0, 10.0); + + // if true dynamically track and consider prev N number of allocations for next allocation grant, else consider all previous allocations + BooleanValidator DYNAMICALLY_TRACK_ALLOCATIONS = new BooleanValidator("exec.memory.arbiter.dynamically_track_allocations", true); /** * This factor determines how much larger the load for a given slice can be than the expected size in order to maintain locality @@ -531,10 +541,12 @@ public interface ExecConstants { new BooleanValidator("dremio.iceberg.merge_on_read_scan_with_equality_delete.enabled", false); BooleanValidator ENABLE_ICEBERG_DML_USE_HASH_DISTRIBUTION_FOR_WRITES = new BooleanValidator("dremio.iceberg.dml.use_hash_distribution_for_writes.enabled", true); BooleanValidator ENABLE_ICEBERG_DML_WITH_NATIVE_ROW_COLUMN_POLICIES = new BooleanValidator("dremio.iceberg.dml.native_row_column_policies.enabled", false); + BooleanValidator ENABLE_ICEBERG_TABLE_PROPERTIES = new BooleanValidator("dremio.iceberg.table.properties.enabled", false); - BooleanValidator ENABLE_ICEBERG_OPTIMIZE = new BooleanValidator("dremio.iceberg.optimize.enabled", true); BooleanValidator ENABLE_ICEBERG_ROLLBACK = new BooleanValidator("dremio.iceberg.rollback.enabled", true); - BooleanValidator ENABLE_ICEBERG_VACUUM = new BooleanValidator("dremio.iceberg.vacuum.enabled", false); + BooleanValidator ENABLE_ICEBERG_VACUUM = new BooleanValidator("dremio.iceberg.vacuum.enabled", true); + BooleanValidator ENABLE_ICEBERG_VACUUM_CATALOG = new BooleanValidator("dremio.iceberg.vacuum.catalog.enabled", false); + BooleanValidator ENABLE_HIVE_DATABASE_LOCATION = new BooleanValidator("dremio.hive.database.location", true); BooleanValidator ENABLE_QUERY_LABEL = new BooleanValidator("dremio.query.label.enabled", true); @@ -544,12 +556,12 @@ public interface ExecConstants { LongValidator OPTIMIZE_MINIMUM_INPUT_FILES = new LongValidator("dremio.iceberg.optimize.min_input_files", 5); BooleanValidator ENABLE_USE_VERSION_SYNTAX = new TypeValidators.BooleanValidator("dremio.sql.use_version.enabled", true); - BooleanValidator VERSIONED_VIEW_ENABLED = new TypeValidators.BooleanValidator("plugins.dataplane.view", false); - BooleanValidator VERSIONED_INFOSCHEMA_ENABLED = new TypeValidators.BooleanValidator("arctic.infoschema.enabled", false); + BooleanValidator VERSIONED_VIEW_ENABLED = new TypeValidators.BooleanValidator("plugins.dataplane.view", true); + BooleanValidator VERSIONED_INFOSCHEMA_ENABLED = new TypeValidators.BooleanValidator("arctic.infoschema.enabled", true); BooleanValidator ENABLE_AZURE_SOURCE = new TypeValidators.BooleanValidator("dremio.enable_azure_source", false); // warning threshold for running time of a task - PositiveLongValidator SLICING_WARN_MAX_RUNTIME_MS = new PositiveLongValidator("dremio.sliced.warn_max_runtime", Long.MAX_VALUE, 120000); + PositiveLongValidator SLICING_WARN_MAX_RUNTIME_MS = new PositiveLongValidator("dremio.sliced.warn_max_runtime", Long.MAX_VALUE, 20000); BooleanValidator SLICING_THREAD_MONITOR = new BooleanValidator("dremio.sliced.enable_monitor", true); BooleanValidator SLICING_OFFLOAD_ENQUEUE = new BooleanValidator("dremio.sliced.offload_enqueue", true); TypeValidators.EnumValidator SLICING_OBSERVER_TYPE = @@ -612,6 +624,8 @@ public interface ExecConstants { BooleanValidator ENABLE_DELTALAKE_HIVE_SUPPORT = new BooleanValidator("store.deltalake.hive_support.enabled", true); + BooleanValidator ENABLE_DELTALAKE_SPARK_SUPPORT = new BooleanValidator("store.deltalake.spark_support.enabled", true); + /** * Controls the 'compression' factor for the TDigest algorithm. */ @@ -698,6 +712,28 @@ public interface ExecConstants { DoubleValidator EXPR_COMPLEXITY_NO_CACHE_THRESHOLD = new DoubleValidator("exec.expression.complexity.no_cache.threshold", 100.00); BooleanValidator ENABLE_MAP_DATA_TYPE = new BooleanValidator("dremio.data_types.map.enabled", true); + BooleanValidator ENABLE_COMPLEX_HIVE_DATA_TYPE = new BooleanValidator("dremio.data_types.hive_complex.enabled", true); BooleanValidator EARLY_ACK_ENABLED = new BooleanValidator("dremio.jdbc_client.early_ack.enabled", true); + + BooleanValidator RESULTS_CLEANUP_SERVICE_ENABLED = new BooleanValidator("dremio.results_cleanup.enabled", false); + + BooleanValidator CATALOG_JOB_COUNT_ENABLED = new BooleanValidator("catalog.job_count.enabled", true); + + // Option to specify the size to truncate large SQL queries; setting value to 0 disables truncation + LongValidator SQL_TEXT_TRUNCATE_LENGTH = new LongValidator("jobs.sql.truncate.length", 32000); + + BooleanValidator NESSIE_SOURCE_API = new TypeValidators.BooleanValidator("nessie.source.api", true); + + BooleanValidator PARQUET_READER_VECTORIZE_FOR_V2_ENCODINGS = new BooleanValidator("vectorized.read.parquet.v2.encodings", false); + + /** + * Controls the WARN logging threshold for {@link com.dremio.exec.store.dfs.LoggedFileSystem} calls. + */ + RangeLongValidator FS_LOGGER_WARN_THRESHOLD_MS = new RangeLongValidator("filesystem.logger.warn.io_threshold_ms", 0, Long.MAX_VALUE, 5000); + + /** + * Controls the DEBUG logging threshold for {@link com.dremio.exec.store.dfs.LoggedFileSystem} calls. + */ + RangeLongValidator FS_LOGGER_DEBUG_THRESHOLD_MS = new RangeLongValidator("filesystem.logger.debug.io_threshold_ms", 0, Long.MAX_VALUE, 50); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/cache/VectorAccessibleSerializable.java b/sabot/kernel/src/main/java/com/dremio/exec/cache/VectorAccessibleSerializable.java index ab43d1dec9..024ed0e9e8 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/cache/VectorAccessibleSerializable.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/cache/VectorAccessibleSerializable.java @@ -327,8 +327,7 @@ public void writeToStream(OutputStream output) throws IOException { if (useCodec) { /* if we are serializing the spilled data, compress the ArrowBufs */ writeCompressedBuf(buf, output); - } - else { + } else { writeBuf(buf, output); } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/calcite/SqlNodes.java b/sabot/kernel/src/main/java/com/dremio/exec/calcite/SqlNodes.java index 5810c2d8f1..d1deed05a1 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/calcite/SqlNodes.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/calcite/SqlNodes.java @@ -108,7 +108,9 @@ private String label(SqlKind kind, int i) { case 9: return "fetch"; default: + break; } + break; case ORDER_BY: switch(i) { case 0: @@ -120,8 +122,11 @@ private String label(SqlKind kind, int i) { case 3: return "fetch"; default: + break; } + break; default: + break; } return String.valueOf(i); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/calcite/logical/FlattenCrel.java b/sabot/kernel/src/main/java/com/dremio/exec/calcite/logical/FlattenCrel.java index f1b2501f39..673fa54e33 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/calcite/logical/FlattenCrel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/calcite/logical/FlattenCrel.java @@ -42,6 +42,11 @@ public RelNode copy(RelTraitSet traitSet, List inputs) { return new FlattenCrel(getCluster(), traitSet, sole(inputs), toFlatten, numProjectsPushed); } + @Override + public FlattenRelBase copy(List inputs, List toFlatten) { + return new FlattenCrel(getCluster(), getTraitSet(), sole(inputs), toFlatten, numProjectsPushed); + } + @Override public RelNode copyWith(CopyWithCluster copier) { final RelNode input = getInput().accept(copier); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/calcite/logical/JdbcCrel.java b/sabot/kernel/src/main/java/com/dremio/exec/calcite/logical/JdbcCrel.java index 515720e473..3b5b534fdf 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/calcite/logical/JdbcCrel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/calcite/logical/JdbcCrel.java @@ -82,6 +82,7 @@ public StoragePluginId getPluginId() { return this.pluginId; } + @Override public RelNode accept(RelShuttle shuttle) { return shuttle.visit(this); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/calcite/logical/SampleCrel.java b/sabot/kernel/src/main/java/com/dremio/exec/calcite/logical/SampleCrel.java index 840ff86310..3c01a3fcc4 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/calcite/logical/SampleCrel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/calcite/logical/SampleCrel.java @@ -59,6 +59,7 @@ public RelNode copy(RelTraitSet traitSet, List inputs) { return new SampleCrel(this.getCluster(), traitSet, sole(inputs)); } + @Override public RelNode accept(RelShuttle shuttle) { return shuttle.visit(this); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/calcite/logical/ScanCrel.java b/sabot/kernel/src/main/java/com/dremio/exec/calcite/logical/ScanCrel.java index 8b50d579dc..0298223e07 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/calcite/logical/ScanCrel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/calcite/logical/ScanCrel.java @@ -56,7 +56,7 @@ public ScanCrel( boolean isSubstitutable) { super(cluster, traitSet, new RelOptNamespaceTable(metadata, cluster), pluginId, metadata, projectedColumns, observedRowcountAdjustment, hints); this.isDirectNamespaceDescendent = isDirectNamespaceDescendent; - this.isSubstitutable = isSubstitutable; + this.isSubstitutable = isSubstitutable; // TODO: Support reflections on Iceberg time travel } @Override diff --git a/sabot/kernel/src/main/java/com/dremio/exec/calcite/logical/VacuumTableCrel.java b/sabot/kernel/src/main/java/com/dremio/exec/calcite/logical/VacuumTableCrel.java new file mode 100644 index 0000000000..579acd2f56 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/calcite/logical/VacuumTableCrel.java @@ -0,0 +1,56 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.calcite.logical; + +import java.util.List; + +import org.apache.calcite.plan.Convention; +import org.apache.calcite.plan.RelOptCluster; +import org.apache.calcite.plan.RelOptTable; +import org.apache.calcite.plan.RelTraitSet; +import org.apache.calcite.rel.RelNode; + +import com.dremio.exec.catalog.VacuumOptions; +import com.dremio.exec.planner.common.VacuumTableRelBase; +import com.dremio.exec.planner.logical.CreateTableEntry; + +/** + * Crel for 'VACUUM' query. + */ +public class VacuumTableCrel extends VacuumTableRelBase { + + public VacuumTableCrel(RelOptCluster cluster, + RelTraitSet traitSet, + RelNode input, + RelOptTable table, + CreateTableEntry createTableEntry, + VacuumOptions vacuumOptions) { + super(Convention.NONE, cluster, traitSet, input, table, createTableEntry, vacuumOptions); + } + + @Override + public RelNode copy(RelTraitSet traitSet, List inputs) { + return new VacuumTableCrel(getCluster(), traitSet, sole(inputs), getTable(), getCreateTableEntry(), getVacuumOptions()); + } + + public VacuumTableCrel createWith(CreateTableEntry createTableEntry) { + return new VacuumTableCrel(getCluster(), getTraitSet(), getInput(), getTable(), createTableEntry, getVacuumOptions()); + } + + public RelNode createWith(VacuumOptions vacuumOptions) { + return new VacuumTableCrel(getCluster(), traitSet, getInput(), getTable(), getCreateTableEntry(), vacuumOptions); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/AbstractSplitsPointer.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/AbstractSplitsPointer.java index 55cf13b961..f0a562eb7d 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/AbstractSplitsPointer.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/AbstractSplitsPointer.java @@ -42,6 +42,7 @@ protected AbstractSplitsPointer() { splitsCount = -1; } + @Override public abstract long getSplitVersion(); @Override diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/CachingCatalog.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/CachingCatalog.java index 56f0793dc1..75aeeee49b 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/CachingCatalog.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/CachingCatalog.java @@ -16,87 +16,118 @@ package com.dremio.exec.catalog; import java.io.IOException; +import java.util.Collection; import java.util.Comparator; +import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; import java.util.function.Supplier; +import java.util.stream.Collectors; +import org.apache.calcite.schema.Function; +import org.apache.calcite.util.Pair; + +import com.dremio.common.utils.PathUtils; import com.dremio.exec.dotfile.View; import com.dremio.exec.physical.base.ViewOptions; +import com.dremio.exec.planner.logical.ViewTable; +import com.dremio.exec.store.NamespaceTable; +import com.dremio.exec.tablefunctions.TimeTravelTableMacro; import com.dremio.service.namespace.NamespaceAttribute; import com.dremio.service.namespace.NamespaceKey; import com.dremio.service.namespace.dataset.proto.DatasetConfig; +import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; +import io.opentelemetry.instrumentation.annotations.WithSpan; + /** * {@link Catalog} implementation that caches table requests. - * One case not handled yet is {@link SimpleCatalog#getFunctions(NamespaceKey, FunctionType)}. */ public class CachingCatalog extends DelegatingCatalog { + private final Map tablesByDatasetId; + // The same table may appear under multiple keys when a context is set in the query or view definition - private final Map tablesByNamespaceKey; + private final Map tablesByNamespaceKey; // Time spent accessing a particular table or view by canonical key (in String form). - private final Map canonicalKeyAccessTime; + private final Map canonicalKeyAccessTime; // Number of resolutions for a particular canonical key // For example, a query may reference a table with canonical key source.table but if a context ctx is set in the query // or view definition then we need to be able to look up the table as both ctx.source.table and source.table. - private final Map canonicalKeyResolutionCount; - private final Map canonicalKeyTables; + private final Map canonicalKeyResolutionCount; + private final Map canonicalKeyTables; + private final Map> functionsCache; - CachingCatalog(Catalog delegate) { - this(delegate, new ConcurrentHashMap<>(), new ConcurrentHashMap<>(), new ConcurrentHashMap<>(), new ConcurrentHashMap<>()); + @VisibleForTesting + public CachingCatalog(Catalog delegate) { + this(delegate, new ConcurrentHashMap<>(), new ConcurrentHashMap<>(), new ConcurrentHashMap<>(), new ConcurrentHashMap<>(), new ConcurrentHashMap<>(), new ConcurrentHashMap<>()); } - private CachingCatalog(Catalog delegate, Map tablesByNamespaceKey, - Map canonicalKeyAccessTime, - Map canonicalKeyResolutionCount, Map canonicalKeyTables) { + private CachingCatalog(Catalog delegate, Map tablesByNamespaceKey, + Map tablesByDatasetId, + Map canonicalKeyAccessTime, + Map canonicalKeyResolutionCount, + Map canonicalKeyTables, + Map> functionsCache) { super(delegate); this.tablesByNamespaceKey = tablesByNamespaceKey; + this.tablesByDatasetId = tablesByDatasetId; this.canonicalKeyAccessTime = canonicalKeyAccessTime; this.canonicalKeyResolutionCount = canonicalKeyResolutionCount; this.canonicalKeyTables = canonicalKeyTables; + this.functionsCache = functionsCache; + } + + private CachingCatalog(Catalog delegate, CachingCatalog cache) { + + this(delegate, cache.tablesByNamespaceKey, cache.tablesByDatasetId, cache.canonicalKeyAccessTime, cache.canonicalKeyResolutionCount, cache.canonicalKeyTables, cache.functionsCache); } - private DremioTable putTable(NamespaceKey requestKey, DremioTable table) { + private DremioTable putTable(TableCacheKey requestKey, DremioTable table) { if (table == null) { return null; } final DatasetConfig dataset = table.getDatasetConfig(); if (dataset != null) { final NamespaceKey canonicalizedKey = new NamespaceKey(dataset.getFullPathList()); - tablesByNamespaceKey.put(canonicalizedKey, table); + tablesByNamespaceKey.put(TableCacheKey.of(requestKey.getCatalogIdentity(), canonicalizedKey.getPathComponents(), requestKey.getTableVersionContext()), table); } tablesByNamespaceKey.put(requestKey, table); return table; } - private boolean containsNamespaceKey(NamespaceKey key) { + private boolean containsNamespaceKey(TableCacheKey key) { return tablesByNamespaceKey.containsKey(key); } - private DremioTable getByNamespaceKey(NamespaceKey key) { + private DremioTable getByNamespaceKey(TableCacheKey key) { return tablesByNamespaceKey.get(key); } - private DremioTable removeByNamespaceKey(NamespaceKey key) { - return tablesByNamespaceKey.remove(key); + private void removeByNamespaceKey(final NamespaceKey key) { + // remove table from cache for all users + tablesByNamespaceKey.entrySet().removeIf(entry -> entry.getKey().getKeyComponents().equals(key.getPathComponents())); + invalidateNamespaceCache(key); } @Override public DremioTable getTableNoResolve(NamespaceKey key) { - if (!containsNamespaceKey(key)) { - return putTable(key, timedGet(() -> super.getTableNoResolve(key))); + final TableCacheKey tableKey = TableCacheKey.of(getCatalogIdentity(), key.getPathComponents(), null); + if (!containsNamespaceKey(tableKey)) { + return putTable(tableKey, timedGet(() -> super.getTableNoResolve(key))); } - return getByNamespaceKey(key); + return getByNamespaceKey(tableKey); } @Override public DremioTable getTableNoColumnCount(NamespaceKey key) { - if (!containsNamespaceKey(key)) { - return putTable(key, timedGet(() -> super.getTableNoColumnCount(key))); + TableCacheKey tableKey = TableCacheKey.of(getCatalogIdentity(), key.getPathComponents(), null); + if (!containsNamespaceKey(tableKey)) { + return putTable(tableKey, timedGet(() -> super.getTableNoColumnCount(key))); } - return getByNamespaceKey(key); + return getByNamespaceKey(tableKey); } @Override @@ -112,15 +143,17 @@ public void dropView(final NamespaceKey key, ViewOptions viewOptions) throws IOE } @Override + @WithSpan public DremioTable getTable(NamespaceKey key) { NamespaceKey resolved = resolveToDefault(key); if (resolved == null) { resolved = key; } - if (!containsNamespaceKey(resolved)) { - return putTable(resolved, timedGet(() -> super.getTable(key))); + TableCacheKey tableKey = TableCacheKey.of(getCatalogIdentity(), resolved.getPathComponents(), null); + if (!containsNamespaceKey(tableKey)) { + return putTable(tableKey, timedGet(() -> super.getTable(key))); } - return getByNamespaceKey(resolved); + return getByNamespaceKey(tableKey); } @Override @@ -129,10 +162,11 @@ public DremioTable getTableForQuery(NamespaceKey key) { if (resolved == null) { resolved = key; } - if (!containsNamespaceKey(resolved)) { - return putTable(resolved, timedGet(() -> super.getTableForQuery(key))); + TableCacheKey tableKey = TableCacheKey.of(getCatalogIdentity(), resolved.getPathComponents(), null); + if (!containsNamespaceKey(tableKey)) { + return putTable(tableKey, timedGet(() -> super.getTableForQuery(key))); } - return getByNamespaceKey(resolved); + return getByNamespaceKey(tableKey); } @Override @@ -142,12 +176,12 @@ public Iterable getAllRequestedTables() { @Override public Catalog resolveCatalog(CatalogIdentity subject) { - return new CachingCatalog(delegate.resolveCatalog(subject), tablesByNamespaceKey, canonicalKeyAccessTime, canonicalKeyResolutionCount, canonicalKeyTables); + return new CachingCatalog(delegate.resolveCatalog(subject), this); } @Override public Catalog resolveCatalog(Map sourceVersionMapping) { - return new CachingCatalog(delegate.resolveCatalog(sourceVersionMapping), tablesByNamespaceKey, canonicalKeyAccessTime, canonicalKeyResolutionCount, canonicalKeyTables); + return new CachingCatalog(delegate.resolveCatalog(sourceVersionMapping), this); } @Override @@ -162,28 +196,28 @@ public Catalog resolveCatalogResetContext(String sourceName, VersionContext vers @Override public Catalog resolveCatalog(NamespaceKey newDefaultSchema) { - return new CachingCatalog(delegate.resolveCatalog(newDefaultSchema), tablesByNamespaceKey, canonicalKeyAccessTime, canonicalKeyResolutionCount, canonicalKeyTables); + return new CachingCatalog(delegate.resolveCatalog(newDefaultSchema), this); } @Override public Catalog resolveCatalog(CatalogIdentity subject, NamespaceKey newDefaultSchema) { - return new CachingCatalog(delegate.resolveCatalog(subject, newDefaultSchema), tablesByNamespaceKey, canonicalKeyAccessTime, canonicalKeyResolutionCount, canonicalKeyTables); + return new CachingCatalog(delegate.resolveCatalog(subject, newDefaultSchema), this); } @Override public Catalog resolveCatalog(CatalogIdentity subject, NamespaceKey newDefaultSchema, boolean checkValidity) { - return new CachingCatalog(delegate.resolveCatalog(subject, newDefaultSchema, checkValidity), tablesByNamespaceKey, canonicalKeyAccessTime, canonicalKeyResolutionCount, canonicalKeyTables); + return new CachingCatalog(delegate.resolveCatalog(subject, newDefaultSchema, checkValidity), this); } @Override public Catalog resolveCatalog(boolean checkValidity) { - return new CachingCatalog(delegate.resolveCatalog(checkValidity), tablesByNamespaceKey, canonicalKeyAccessTime, canonicalKeyResolutionCount, canonicalKeyTables); + return new CachingCatalog(delegate.resolveCatalog(checkValidity), this); } @Override public Catalog visit(java.util.function.Function catalogRewrite) { Catalog newDelegate = delegate.visit(catalogRewrite); - if(newDelegate == delegate) { + if (newDelegate == delegate) { return catalogRewrite.apply(this); } else { return catalogRewrite.apply(new CachingCatalog(newDelegate)); @@ -195,16 +229,16 @@ private DremioTable timedGet(Supplier s) { DremioTable table = s.get(); long end = System.currentTimeMillis(); if (table != null) { - String path = table.getPath().getSchemaPath(); - canonicalKeyResolutionCount.compute(path, (k, v) -> (v == null) ? 1 : v + 1); - canonicalKeyAccessTime.compute(path, (k, v) -> (v == null) ? end-start : v + (end-start)); - canonicalKeyTables.put(path, table); + TableCacheKey key = TableCacheKey.of(getCatalogIdentity(), table.getPath().getPathComponents(), getTableVersionContext(table)); + canonicalKeyResolutionCount.compute(key, (k, v) -> (v == null) ? 1 : v + 1); + canonicalKeyAccessTime.compute(key, (k, v) -> (v == null) ? end - start : v + (end - start)); + canonicalKeyTables.put(key, table); } return table; } /** - * Logs time spent accessing the catalog for any table or view. + * Logs time spent accessing the catalog for any table or view, and version context (if available). */ @Override public void addCatalogStats() { @@ -212,11 +246,174 @@ public void addCatalogStats() { long totalResolutions = canonicalKeyResolutionCount.values().stream().mapToLong(Long::longValue).sum(); getMetadataStatsCollector().addDatasetStat(String.format("Catalog Access for %d Total Dataset(s)", canonicalKeyResolutionCount.keySet().size()), String.format("using %d resolved key(s)", totalResolutions), totalTime); - canonicalKeyAccessTime.entrySet().stream().sorted(Comparator.comparing(Map.Entry::getValue).reversed()). + canonicalKeyAccessTime.entrySet().stream().sorted(Comparator.comparing(Map.Entry::getValue).reversed()). forEach(entry -> getMetadataStatsCollector().addDatasetStat( - String.format("Catalog Access for %s (%s)", entry.getKey(), canonicalKeyTables.get(entry.getKey()).getDatasetConfig().getType()), + String.format("Catalog Access for %s%s(%s)", PathUtils.constructFullPath(entry.getKey().getKeyComponents()), + getTableVersionContext(entry), + canonicalKeyTables.get(entry.getKey()).getDatasetConfig().getType()), String.format("using %d resolved key(s)", canonicalKeyResolutionCount.get(entry.getKey())), canonicalKeyAccessTime.get(entry.getKey()))); } + private Collection getAndMapFunctions(NamespaceKey path, FunctionType functionType) { + return super.getFunctions(path, functionType).stream() + .map(function -> { + if (function instanceof TimeTravelTableMacro) { + return new TimeTravelTableMacro( + (tablePath, versionContext) -> { + return this.getTableSnapshotForQuery(new NamespaceKey(tablePath), versionContext); + }); + } + return function; + }) + .collect(Collectors.toList()); + } + + @Override + public Collection getFunctions(NamespaceKey path, FunctionType functionType) { + // NamespaceKey is normalized by default + final FunctionCacheKey key = FunctionCacheKey.of(getCatalogIdentity(), path.getSchemaPath().toLowerCase(), functionType); + if (!functionsCache.containsKey(key)) { + Collection f = getAndMapFunctions(path, functionType); + functionsCache.put(key, f); + return f; + } + return functionsCache.get(key); + } + + @Override + public DremioTable getTableSnapshotForQuery(NamespaceKey key, TableVersionContext context) { + TableCacheKey tableKey = TableCacheKey.of(getCatalogIdentity(), key.getPathComponents(), context); + if (!containsNamespaceKey(tableKey)) { + return putTable(tableKey, timedGet(() -> super.getTableSnapshotForQuery(key, context))); + } + return getByNamespaceKey(tableKey); + } + + @Override + public DremioTable getTableSnapshot(NamespaceKey key, TableVersionContext context) { + TableCacheKey tableKey = TableCacheKey.of(getCatalogIdentity(), key.getPathComponents(), context); + if (!containsNamespaceKey(tableKey)) { + return putTable(tableKey, timedGet(() -> super.getTableSnapshot(key, context))); + } + return getByNamespaceKey(tableKey); + } + + @Override + public DremioTable getTable(String datasetId) { + DremioTable table; + if (!tablesByDatasetId.containsKey(datasetId)) { + table = timedGet(() -> super.getTable(datasetId)); + if (table != null) { + tablesByDatasetId.put(datasetId, table); + } + } + return tablesByDatasetId.get(datasetId); + } + + /** + * Searches tablesByNamespaceKey for a specific table and retrieves its version context. + */ + private String getTableVersionContext(Map.Entry catalogEntry) { + if (catalogEntry.getKey().getTableVersionContext() != null) { + return String.format(" [%s] ", catalogEntry.getKey().getTableVersionContext()); + } + return " "; + } + + /** + * FunctionCacheKey is a combination of user/normalized namespace key/function type used for functions cache + */ + private static final class FunctionCacheKey { + private final CatalogIdentity subject; + private final String namespaceKey; + private final FunctionType functionType; + + private FunctionCacheKey(CatalogIdentity subject, String namespaceKey, FunctionType functionType) { + this.subject = subject; + this.namespaceKey = namespaceKey; + this.functionType = functionType; + } + + public static FunctionCacheKey of(CatalogIdentity subject, String namespaceKey, FunctionType functionType) { + return new FunctionCacheKey(subject, namespaceKey, functionType); + } + + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + FunctionCacheKey key = (FunctionCacheKey) o; + return functionType == key.functionType && + Objects.equals(subject, key.subject) && + Objects.equals(namespaceKey, key.namespaceKey); + } + + @Override + public int hashCode() { + return Objects.hash(subject, namespaceKey, functionType); + } + } + + /** + * TableCacheKey is a pair of user/CatalogEntityKey. + */ + private static final class TableCacheKey extends Pair { + /** + * Creates a Pair. + * + * @param subject left value + * @param entityKey right value + */ + private TableCacheKey(CatalogIdentity subject, CatalogEntityKey entityKey) { + super(subject, entityKey); + } + + public static TableCacheKey of(CatalogIdentity subject, List keyComponents, TableVersionContext versionContext) { + return new TableCacheKey(subject, CatalogEntityKey.newBuilder().keyComponents(keyComponents).tableVersionContext(versionContext).build()); + } + + CatalogIdentity getCatalogIdentity() { + return left; + } + + List getKeyComponents() { + return right.getKeyComponents(); + } + + TableVersionContext getTableVersionContext() { + return right.getTableVersionContext(); + } + } + + /** + * Utility function to get the table version context for a given DremioTable. + * TODO: Refactor DremioTable interface so TableVersionContext can be directly accessed. + */ + private TableVersionContext getTableVersionContext(DremioTable table) { + try { + if (table instanceof NamespaceTable || table instanceof MaterializedDatasetTable) { + return table.getDataset().getVersionContext(); + } else if (table instanceof ViewTable) { + return TableVersionContext.of(((ViewTable) table).getVersionContext()); + } + } catch (Exception e) { + return null; + } + return null; + } + + private CatalogIdentity getCatalogIdentity() { + try { + return getMetadataRequestOptions().getSchemaConfig().getAuthContext().getSubject(); + } catch (Exception ignore) { + // Some tests with mocked Catalog throws exceptions here + } + return null; + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/Catalog.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/Catalog.java index aa10b1b2f0..be3f6ab4ca 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/Catalog.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/Catalog.java @@ -64,7 +64,9 @@ public interface Catalog extends SimpleCatalog, EntityExplorer, Dataset */ Catalog resolveCatalog(CatalogIdentity subject); - MetadataStatsCollector getMetadataStatsCollector(); + default MetadataStatsCollector getMetadataStatsCollector() { + return getMetadataRequestOptions().getStatsCollector(); + } //TODO(DX-21034): Rework View Creator void createView(final NamespaceKey key, View view, ViewOptions viewOptions, NamespaceAttribute... attributes) throws IOException; @@ -92,6 +94,7 @@ public interface Catalog extends SimpleCatalog, EntityExplorer, Dataset * @param table the table to get the column extended properties for * @return the column extended properties grouped by column name */ + @Override Map> getColumnExtendedProperties(DremioTable table); /** @@ -101,5 +104,9 @@ public interface Catalog extends SimpleCatalog, EntityExplorer, Dataset */ Catalog visit(Function catalogRewrite); - public default void addCatalogStats() {} + default void addCatalogStats() {} + + default void invalidateNamespaceCache(final NamespaceKey key) {} + + MetadataRequestOptions getMetadataRequestOptions(); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/CatalogEntityKey.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/CatalogEntityKey.java new file mode 100644 index 0000000000..cb508d6583 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/CatalogEntityKey.java @@ -0,0 +1,219 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog; + +import static com.dremio.common.utils.SqlUtils.quotedCompound; + +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.dremio.common.exceptions.UserException; +import com.dremio.common.utils.PathUtils; +import com.dremio.common.utils.ReservedCharacters; +import com.dremio.service.namespace.NamespaceKey; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonPOJOBuilder; +import com.google.common.base.Preconditions; + +/** + * Class representing a generic catalog entity key + */ +@JsonDeserialize(builder = CatalogEntityKey.Builder.class) +public class CatalogEntityKey { + public static final String KEY_DELIMITER = ReservedCharacters.getInformationSeparatorOne(); // : separated key and version + private static final Logger logger = LoggerFactory.getLogger(CatalogEntityKey.class); + private List keyComponents; + private TableVersionContext tableVersionContext; + private String joinedKeyWithVersion; // see toString() + + public CatalogEntityKey(String keyInStringFormat) { + this(newBuilder().deserialize(keyInStringFormat).build()); + } + + private CatalogEntityKey(Builder builder) { + this.keyComponents = builder.keyComponents; + this.tableVersionContext = builder.tableVersionContext; + this.joinedKeyWithVersion = builder.joinedKeyWithVersion; + } + + private CatalogEntityKey(CatalogEntityKey catalogEntityKey) { + this.keyComponents = catalogEntityKey.keyComponents; + this.tableVersionContext = catalogEntityKey.tableVersionContext; + this.joinedKeyWithVersion = catalogEntityKey.joinedKeyWithVersion; + } + + public static Builder newBuilder() { + return new CatalogEntityKey.Builder(); + } + + public int size() { + return keyComponents.size(); + } + + public List getKeyComponents() { + return keyComponents; + } + + public TableVersionContext getTableVersionContext() { + return tableVersionContext; + } + + public NamespaceKey toNamespaceKey() { + return new NamespaceKey(keyComponents); + } + + public static CatalogEntityKey fromNamespaceKey(NamespaceKey namespaceKey) { + return CatalogEntityKey.newBuilder() + .keyComponents(namespaceKey.getPathComponents()) + .tableVersionContext(null) + .build(); + } + + @JsonIgnore + public String getLeaf() { + return keyComponents.get(keyComponents.size() - 1); + } + + @Override + public int hashCode() { + return joinedKeyWithVersion.hashCode(); + } + + public CatalogEntityKey asLowerCase() { + return CatalogEntityKey.newBuilder() + .keyComponents(keyComponents.stream().map(String::toLowerCase).collect(Collectors.toList())) + .tableVersionContext(tableVersionContext) + .build(); + } + + @JsonIgnore + public boolean isKeyForImmutableEntity() { + switch(tableVersionContext.getType()) { + case SNAPSHOT_ID : + case COMMIT_HASH_ONLY : + case TIMESTAMP : + return true; + default : + return false; + } + } + + @Override + public String toString() { + return joinedKeyWithVersion; + } + + public String toUrlEncodedString() { + return PathUtils.encodeURIComponent(toString()); + } + + @Override + public boolean equals(Object obj) { + boolean pathComponentsComparison = false; + boolean tableVersionComparison = false; + if (obj != null && obj instanceof CatalogEntityKey) { + CatalogEntityKey o = (CatalogEntityKey) obj; + pathComponentsComparison = keyComponents.equals(o.keyComponents); + tableVersionComparison = tableVersionContext == null ? true : tableVersionContext.equals(o.tableVersionContext); + return pathComponentsComparison && tableVersionComparison; + } + return false; + } + + @JsonIgnore + public String getEntityName() { + return keyComponents.get(keyComponents.size() - 1); + } + + @JsonIgnore + public String getRootEntity() { + return keyComponents.get(0); + } + + @JsonIgnore + public List getPathWithoutRoot() { + return keyComponents.subList(1, keyComponents.size()); + } + + public String toUnescapedString() { + return quotedCompound(keyComponents); + } + + @JsonPOJOBuilder(withPrefix = "") + public static class Builder { + private List keyComponents; + private TableVersionContext tableVersionContext; + private String joinedKeyWithVersion; // see toString() + + public Builder() { + } + + public Builder keyComponents(List key) { + keyComponents = key; + return this; + } + + public Builder tableVersionContext(TableVersionContext versionContext) { + tableVersionContext = versionContext; + return this; + } + + public CatalogEntityKey build() { + Preconditions.checkNotNull(keyComponents); + if (keyComponents.stream().anyMatch((a) -> a.contains(ReservedCharacters.getInformationSeparatorOne()))) { + throw UserException.validationError().message("Invalid CatalogEntityKey format %s", keyComponents).build(logger); + } + this.joinedKeyWithVersion = serialize(); + return new CatalogEntityKey(this); + } + + private String serialize() { + StringBuilder keyWithVersion = new StringBuilder(); + String keyWithoutVersion = quotedCompound(keyComponents); + String versionString = tableVersionContext == null ? null : tableVersionContext.serialize(); + keyWithVersion.append(keyWithoutVersion); + if (tableVersionContext != null) { + keyWithVersion.append(KEY_DELIMITER); + keyWithVersion.append(versionString); + } + return keyWithVersion.toString(); + } + + @JsonIgnore + public Builder deserialize(String keyInStringFormat) { + List keyParts = Arrays.asList(keyInStringFormat.split(KEY_DELIMITER)); + TableVersionContext tableVersionContext = null; + List keyPaths = PathUtils.parseFullPath(keyParts.get(0)); + if (keyPaths.isEmpty() || keyParts.size() > 2) { + logger.debug("Invalid CatalogEntityKey format {}", keyInStringFormat); + throw UserException.validationError().message("Invalid CatalogEntityKey format %s", keyInStringFormat).build(logger); + } + if (keyParts.size() == 2) { + String versionString = keyParts.get(1); + tableVersionContext = versionString == null ? null : TableVersionContext.deserialize(versionString); + } + return CatalogEntityKey.newBuilder() + .keyComponents(keyPaths) + .tableVersionContext(tableVersionContext); + } + } + +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/CatalogFeatures.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/CatalogFeatures.java new file mode 100644 index 0000000000..1a3ccb0b90 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/CatalogFeatures.java @@ -0,0 +1,67 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog; + +import java.util.HashSet; +import java.util.Set; + +import javax.annotation.concurrent.Immutable; + +import com.dremio.options.OptionManager; + +/** + * Immutable Catalog features class. The features are base on options. Call static {@link CatalogFeatures#get(OptionManager) get()} + * method to create an instance of this class. + */ +@Immutable +public final class CatalogFeatures { + + /** + * Catalog features + */ + public enum Feature { + ARS, + DATA_GRAPH, + HOME, + SEARCH, + SPACE, + STARRING, + } + + private final Set enabledFeatures; + + private CatalogFeatures(Feature... enabledFeatures) { + this.enabledFeatures = new HashSet(); + for (Feature feature : enabledFeatures) { + this.enabledFeatures.add(feature); + } + } + + public boolean isFeatureEnabled(Feature feature) { + return enabledFeatures.contains(feature); + } + + public static CatalogFeatures get(final OptionManager options) { + boolean arsEnabled = options.getOption(CatalogOptions.CATALOG_ARS_ENABLED); + + if (arsEnabled) { + return new CatalogFeatures(Feature.ARS); + } else { + return new CatalogFeatures(Feature.DATA_GRAPH, Feature.HOME, Feature.SEARCH, Feature.SPACE, Feature.STARRING); + } + } + +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/CatalogImpl.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/CatalogImpl.java index 366216ad20..52e3f3c425 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/CatalogImpl.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/CatalogImpl.java @@ -17,7 +17,9 @@ import static com.dremio.exec.ExecConstants.ENABLE_ICEBERG_METADATA_FUNCTIONS; import static com.dremio.exec.catalog.CatalogUtil.getTimeTravelRequest; -import static com.dremio.exec.store.metadatarefresh.MetadataRefreshExecConstants.METADATA_STORAGE_PLUGIN_NAME; +import static com.dremio.exec.catalog.VersionedDatasetId.fromString; +import static com.dremio.exec.catalog.VersionedDatasetId.isTimeTravelDatasetId; +import static com.dremio.exec.proto.UserBitShared.DremioPBError.ErrorType.VALIDATION; import static com.dremio.exec.store.sys.udf.UserDefinedFunctionSerde.fromProto; import java.io.IOException; @@ -43,7 +45,7 @@ import org.apache.arrow.vector.types.pojo.Field; import org.apache.calcite.schema.Function; import org.apache.calcite.schema.TranslatableTable; -import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections4.CollectionUtils; import com.dremio.common.exceptions.ExecutionSetupException; import com.dremio.common.exceptions.UserException; @@ -58,6 +60,7 @@ import com.dremio.datastore.Serializer; import com.dremio.datastore.api.LegacyIndexedStore.LegacyFindByCondition; import com.dremio.exec.catalog.udf.DremioScalarUserDefinedFunction; +import com.dremio.exec.catalog.udf.DremioTabularUserDefinedFunction; import com.dremio.exec.dotfile.View; import com.dremio.exec.physical.base.ViewOptions; import com.dremio.exec.physical.base.WriterOptions; @@ -113,12 +116,15 @@ import com.dremio.service.namespace.proto.NameSpaceContainer.Type; import com.dremio.service.namespace.source.proto.SourceConfig; import com.dremio.service.orphanage.Orphanage; +import com.fasterxml.jackson.core.JsonProcessingException; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.instrumentation.annotations.WithSpan; import io.protostuff.ByteString; /** @@ -213,6 +219,7 @@ public DremioTable getTableNoColumnCount(NamespaceKey key) { } @Override + @WithSpan public DremioTable getTable(NamespaceKey key) { final NamespaceKey resolvedKey = resolveToDefault(key); @@ -226,6 +233,64 @@ public DremioTable getTable(NamespaceKey key) { return getTableHelper(key); } + @Override + public String getDatasetId(NamespaceKey key) { + final NamespaceKey resolvedKey = resolveToDefault(key); + String datasetId = getDatasetIdForVersionedSource(resolvedKey); + + if (datasetId == null) { + datasetId = getDatasetIdForVersionedSource(key); + } + + if (datasetId != null) { + return datasetId; + } + + final DremioTable table = getTable(key); + if (table == null + || table.getDatasetConfig() == null + || table.getDatasetConfig().getId() == null) { + return null; + } + + return table.getDatasetConfig().getId().getId(); + } + + private String getDatasetIdForVersionedSource(NamespaceKey key) { + if (key == null) { + return null; + } + + final ManagedStoragePlugin plugin = pluginRetriever.getPlugin(key.getRoot(), false); + if (plugin == null || !(plugin.getPlugin() instanceof VersionedPlugin)) { + return null; + } + + final String sourceName = plugin.getName().getRoot(); + final VersionContext versionContext = options.getVersionForSource(sourceName, key); + final ResolvedVersionContext resolvedVersionContext = + versionContextResolverImpl.resolveVersionContext(sourceName, versionContext); + final List tableKey = key.getPathWithoutRoot(); + final String contentId = + ((VersionedPlugin) plugin.getPlugin()).getContentId(tableKey, resolvedVersionContext); + + if (contentId == null) { + logger.debug( + "Cannot find the content Id for table key: {} version: {}", key, resolvedVersionContext); + return null; + } + + final TableVersionContext tableVersionContext = TableVersionContext.of(resolvedVersionContext); + final VersionedDatasetId versionedDatasetId = + VersionedDatasetId.newBuilder() + .setTableKey(key.getPathComponents()) + .setContentId(contentId) + .setTableVersionContext(tableVersionContext) + .build(); + + return versionedDatasetId.asString(); + } + /** * This follows the similar definition of getTableSnapshot(). * It can be generalized for timetravel and metadata function query. @@ -312,11 +377,22 @@ private TranslatableTable mFunctionTableUtility(MetadataFunctionsMacro.MacroName case TABLE_HISTORY: case TABLE_MANIFESTS: case TABLE_SNAPSHOT: + case TABLE_PARTITIONS: if (mFunctionMetadata.getOptions().getTimeTravelRequest() != null) { throw UserException.validationError() .message("Time Travel is not supported on metadata function: '%s' ", mFunctionName) .buildSilently(); } + // For TABLE_PARTITIONS, throw error when the table is not partitioned + if (mFunctionName == MetadataFunctionsMacro.MacroName.TABLE_PARTITIONS) { + if (mFunctionMetadata.getCurrentConfig().getReadDefinition() == null + || mFunctionMetadata.getCurrentConfig().getReadDefinition().getPartitionColumnsList() == null + || mFunctionMetadata.getCurrentConfig().getReadDefinition().getPartitionColumnsList().size() < 1) { + throw UserException.validationError() + .message("Table %s is not partitioned.", canonicalKey.getSchemaPath()) + .buildSilently(); + } + } return new IcebergMFunctionTranslatableTableImpl(catalogMetadata, mFunctionMetadata.getSchemaConfig().getUserName(), mFunctionMetadata.getMetadataLocation(), complexTypeSupport); case TABLE_FILES: @@ -331,7 +407,7 @@ private TranslatableTable mFunctionTableUtility(MetadataFunctionsMacro.MacroName } @Override - public DremioTable getTableSnapshot(NamespaceKey key, TableVersionContext context) { + public DremioTable getTableSnapshotForQuery(NamespaceKey key, TableVersionContext context) { final NamespaceKey resolvedKey = resolveToDefault(key); if (resolvedKey != null) { @@ -351,6 +427,11 @@ public DremioTable getTableSnapshot(NamespaceKey key, TableVersionContext contex return table; } + @Override + public DremioTable getTableSnapshot(NamespaceKey key, TableVersionContext context) { + return getTableSnapshotForQuery(key, context); + } + private DremioTable getTableSnapshotHelper(NamespaceKey key, TableVersionContext context) { final ManagedStoragePlugin plugin = pluginRetriever.getPlugin(key.getRoot(), false); if (plugin == null) { @@ -387,6 +468,11 @@ private DremioTable getTableSnapshotForVersionedSource( } if (handle.get() instanceof ViewHandle) { final String accessUserName = options.getSchemaConfig().getUserName(); + if(context.getType() == TableVersionType.SNAPSHOT_ID || context.getType() == TableVersionType.TIMESTAMP) { + throw UserException.validationError() + .message("Time travel is not supported on views '%s'", canonicalKey) + .buildSilently(); + } final VersionedDatasetAdapter versionedDatasetAdapter = VersionedDatasetAdapter.newBuilder() .setVersionedTableKey(canonicalKey.getPathComponents()) .setVersionContext(versionContextResolverImpl.resolveVersionContext( @@ -459,27 +545,47 @@ private DatasetConfig getDatasetConfig(NamespaceKey key) { } - private VersionedDatasetAccessOptions getVersionedDatasetAccessOptions( - NamespaceKey key, - TableVersionContext context + NamespaceKey key, + TableVersionContext context ) { - final VersionContext tableContext = context.asVersionContext() - .orElse(options.getVersionForSource(key.getRoot())); return new VersionedDatasetAccessOptions.Builder() - .setVersionContext(resolveVersionContext(key.getRoot(), tableContext)) - .build(); + .setVersionContext(resolveVersionContext(key.getRoot(), getVersionContext(key, context))) + .build(); } private VersionContext getVersionContext( NamespaceKey key, TableVersionContext context ) { - return context.asVersionContext() - .orElse(options.getVersionForSource(key.getRoot())); + VersionContext versionContext; + if (context.isTimeTravelType()) { + // TableVersionContext is SNAPSHOT OR TIMESTAMP specified with AT syntax + // Get the session version setting for this source from MetadataRequestOptions, if set. + // Eg for this use case : + // use branch dev; + // select * from T AT SNAPSHOT '242536368' + // select * from T AT TIMESTAMP '1234566768' + // In both cases we need to set the VersionContext to branch dev. + + versionContext = options.getSourceVersionMapping().get(key.getRoot()); + if (versionContext == null) { + versionContext = VersionContext.NOT_SPECIFIED; + } + } else { + // This must be of type BRANCH,TAG or COMMIT + versionContext = context.asVersionContext(); + if (!versionContext.isSpecified()) { + // Fall back to the session context setting for the source + versionContext = options.getVersionForSource(key.getRoot(), key); + } + } + return versionContext; } + @WithSpan private DremioTable getTableHelper(NamespaceKey key) { + Span.current().setAttribute("dremio.namespace.key.schemapath", key.getSchemaPath()); final DremioTable table = datasets.getTable(key, options, false); if (table != null) { addUniqueSource(table); @@ -533,6 +639,11 @@ public boolean supportsVersioning(NamespaceKey namespaceKey) { @Override public DremioTable getTable(String datasetId) { + final boolean isTimeTravelDataset = VersionedDatasetId.isTimeTravelDatasetId(datasetId); + Span.current().setAttribute("dremio.catalog.getTable.isTimeTravelDataset", isTimeTravelDataset); + if (isTimeTravelDataset) { + return getTableForTimeTravel(datasetId); + } final DremioTable t = datasets.getTable(datasetId, options); if (t != null) { addUniqueSource(t); @@ -700,7 +811,7 @@ public Collection getFunctions(NamespaceKey path, final NamespaceKey resolvedPath = resolveSingle(path); switch (functionType) { case TABLE: - return getUserDefinedTableFunctions(path, resolvedPath); + return getUserDefinedTableFunctions(resolvedPath); case SCALAR: return getUserDefinedScalarFunctions(resolvedPath); default: @@ -708,68 +819,123 @@ public Collection getFunctions(NamespaceKey path, } } - private Collection getUserDefinedScalarFunctions(NamespaceKey path) { + private Optional getUserDefinedFunction(NamespaceKey path) { + Optional optionalUserDefinedFunction = getUserDefinedFunctionImplementation(path); + if (optionalUserDefinedFunction.isPresent()) { + return optionalUserDefinedFunction; + } + + if (path.size() == 1) { + return optionalUserDefinedFunction; + } + + // Try again but from the root context + return getUserDefinedFunctionImplementation(new NamespaceKey(path.getLeaf())); + } + + private Optional getUserDefinedFunctionImplementation(NamespaceKey path) { try { - if(userNamespaceService.exists(path, Type.FUNCTION)) { - FunctionConfig functionConfig = userNamespaceService.getFunction(path); - if (null != functionConfig) { - CatalogIdentity owner = identityResolver.getOwner(path.getPathComponents()); - if (owner == null) { - // Owner is null in non-enterprise. - // In this case, use the current userName - owner = new CatalogUser(userName); - } - return ImmutableList.of(new DremioScalarUserDefinedFunction(owner, - fromProto(functionConfig))); - } + if (!userNamespaceService.exists(path, Type.FUNCTION)) { + return Optional.empty(); } - return ImmutableList.of(); + + FunctionConfig functionConfig = userNamespaceService.getFunction(path); + if (functionConfig == null) { + return Optional.empty(); + } + + UserDefinedFunction userDefinedFunction = fromProto(functionConfig); + return Optional.of(userDefinedFunction); } catch (NamespaceException e) { - //TODO what to do on ambigous results + //TODO what to do on ambiguous results throw new RuntimeException(e); } } - private Collection getUserDefinedTableFunctions(NamespaceKey path, NamespaceKey resolved) { - List functions = new ArrayList<>(); - /* - Check table function name first which is of type metadata functions. In this case getPathComponents will always return max one element - * */ - if (path.getPathComponents().size() == 1 && isMetadataFunctions(path)) { + private CatalogIdentity getOwner(NamespaceKey path) { + try { + CatalogIdentity owner = identityResolver.getOwner(path.getPathComponents()); + if (owner == null) { + // Owner is null in non-enterprise. + // In this case, use the current userName + owner = new CatalogUser(userName); + } + + return owner; + } catch (NamespaceException e) { + //TODO what to do on ambiguous results + throw new RuntimeException(e); + } + } + + private Collection getUserDefinedScalarFunctions(NamespaceKey path) { + Optional optionalUserDefinedFunction = getUserDefinedFunction(path); + if (!optionalUserDefinedFunction.isPresent()) { + return ImmutableList.of(); + } + + UserDefinedFunction userDefinedFunction = optionalUserDefinedFunction.get(); + if (userDefinedFunction.getReturnType().getType().isComplex()) { + return ImmutableList.of(); + } + + Function function = new DremioScalarUserDefinedFunction(getOwner(path),userDefinedFunction); + return ImmutableList.of(function); + } + + private Collection getUserDefinedTableFunctions(NamespaceKey path) { + if (isMetadataFunctions(path.getLeaf())) { if (!optionManager.getOption(ENABLE_ICEBERG_METADATA_FUNCTIONS)) { throw UserException.unsupportedError().message("Query on metadata functions are not supported on iceberg.").buildSilently(); } - functions.add(new MetadataFunctionsMacro( - (tablePath, versionContext) -> getMFunctionTable(new NamespaceKey(tablePath), versionContext, path.getPathComponents().get(0)))); - return functions; + + Function function = new MetadataFunctionsMacro( + (tablePath, versionContext) -> getMFunctionTable( + new NamespaceKey(tablePath), + versionContext, + path.getLeaf())); + + return ImmutableList.of(function); + } + + if (TableMacroNames.TIME_TRAVEL.equals(path.getPathComponents())) { + Function function = new TimeTravelTableMacro( + (tablePath, versionContext) -> getTableSnapshotForQuery( + new NamespaceKey(tablePath), + versionContext)); + return ImmutableList.of(function); } - if (resolved != null) { - if (containerExists(resolved.getParent(), systemNamespaceService)) { - Collection resolvedFunctions = getFunctionsInternal(resolved); - functions.addAll(resolvedFunctions); - return functions; + Optional optionalUserDefinedFunction = getUserDefinedFunction(path); + if (optionalUserDefinedFunction.isPresent()) { + UserDefinedFunction userDefinedFunction = optionalUserDefinedFunction.get(); + if (!userDefinedFunction.getReturnType().getType().isComplex()) { + return ImmutableList.of(); } + + Function function = new DremioTabularUserDefinedFunction( + getOwner(path), + userDefinedFunction); + + return ImmutableList.of(function); } if (containerExists(path.getParent(), systemNamespaceService)) { - functions.addAll(getFunctionsInternal(path)); + return getFunctionsInternal(path); } - if (TableMacroNames.TIME_TRAVEL.equals(path.getPathComponents())) { - functions.add(new TimeTravelTableMacro( - (tablePath, versionContext) -> getTableSnapshot(new NamespaceKey(tablePath), versionContext))); - } - return functions; + return ImmutableList.of(); } /** * Validate If table function is of type metadata functions - * @param path + * @param functionName * @return */ - private boolean isMetadataFunctions(NamespaceKey path) { - return Arrays.stream(MetadataFunctionsMacro.MacroName.values()).anyMatch(e -> e.name().toUpperCase(Locale.ROOT).equals(path.getPathComponents().get(0).toUpperCase(Locale.ROOT))); + private boolean isMetadataFunctions(String functionName) { + return Arrays + .stream(MetadataFunctionsMacro.MacroName.values()) + .anyMatch(e -> e.name().equalsIgnoreCase(functionName)); } private Collection getFunctionsInternal( @@ -921,19 +1087,10 @@ public Catalog resolveCatalog(NamespaceKey newDefaultSchema) { identityResolver, versionContextResolverImpl); } - @Override - public MetadataStatsCollector getMetadataStatsCollector() { - return options.getStatsCollector(); - } - private FileSystemPlugin getHomeFilesPlugin() throws ExecutionSetupException { return pluginRetriever.getPlugin("__home", true).unwrap(FileSystemPlugin.class); } - private FileSystemPlugin getMetadataPlugin() throws ExecutionSetupException { - return pluginRetriever.getPlugin(METADATA_STORAGE_PLUGIN_NAME, true).unwrap(FileSystemPlugin.class); - } - @Override public void createEmptyTable(NamespaceKey key, BatchSchema batchSchema, final WriterOptions writerOptions) { asMutable(key, "does not support create table operations.").createEmptyTable(key, options.getSchemaConfig(), @@ -1102,7 +1259,18 @@ public void dropTable(NamespaceKey key, TableMutationOptions tableMutationOption isLayered = DatasetHelper.isIcebergDataset(dataset); } - MutablePlugin mutablePlugin = asMutable(key, "does not support dropping tables"); + MutablePlugin mutablePlugin; + // If we can't find the source, we can't find the table. + try { + mutablePlugin = asMutable(key, "does not support dropping tables"); + } catch (UserException e) { + if (e.getErrorType() == VALIDATION) { + throw UserException.validationError() + .message("Table [%s] not found.", key) + .build(logger); + } + throw e; + } TableMutationOptions localTableMutationOptions = tableMutationOptions != null ? ImmutableTableMutationOptions .copyOf(tableMutationOptions) @@ -1113,7 +1281,7 @@ public void dropTable(NamespaceKey key, TableMutationOptions tableMutationOption if (existsInNamespace) { try { - if (dataset != null && CatalogUtil.hasIcebergMetadata(dataset)) { + if (CatalogUtil.hasIcebergMetadata(dataset)) { CatalogUtil.addIcebergMetadataOrphan(dataset, orphanage); } systemNamespaceService.deleteEntity(key); @@ -1124,20 +1292,13 @@ public void dropTable(NamespaceKey key, TableMutationOptions tableMutationOption } private boolean isDroppable(DatasetConfig datasetConfig) { - if (isSystemTable(datasetConfig) || datasetConfig.getType() == DatasetType.VIRTUAL_DATASET) { - return false; - } - - return true; + return !isSystemTable(datasetConfig) && datasetConfig.getType() != DatasetType.VIRTUAL_DATASET; } private boolean isSystemTable(DatasetConfig config) { // check if system tables and information schema. final String root = config.getFullPathList().get(0); - if( ("sys").equals(root) || ("INFORMATION_SCHEMA").equals(root) ) { - return true; - } - return false; + return ("sys").equals(root) || ("INFORMATION_SCHEMA").equals(root); } private DatasetConfig getConfigFromNamespace(NamespaceKey key) { @@ -1261,12 +1422,6 @@ public void rollbackTable(NamespaceKey key, DatasetConfig datasetConfig, Rollbac mutablePlugin.rollbackTable(key, datasetConfig, options.getSchemaConfig(), rollbackOption, tableMutationOptions); } - @Override - public void vacuumTable(NamespaceKey key, DatasetConfig datasetConfig, VacuumOption vacuumOption, TableMutationOptions tableMutationOptions) { - MutablePlugin mutablePlugin = asMutable(key, "does not support vacuum table"); - mutablePlugin.vacuumTable(key, datasetConfig, options.getSchemaConfig(), vacuumOption, tableMutationOptions); - } - @Override public void addColumns(NamespaceKey key, DatasetConfig datasetConfig, List colsToAdd, TableMutationOptions tableMutationOptions) { MutablePlugin mutablePlugin = asMutable(key, "does not support dropping tables"); @@ -1323,10 +1478,50 @@ public boolean alterDataset(final NamespaceKey key, final Map properties = + attributes.entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + entry -> String.valueOf(entry.getValue().getValueAsObject()))); + final ViewOptions viewOptions = + new ViewOptions.ViewOptionsBuilder() + .version(resolvedVersionContext) + .actionType(ViewOptions.ActionType.ALTER_VIEW) + .properties(properties) + .build(); + + CatalogUtil.validateResolvedVersionIsBranch(resolvedVersionContext); + + try { + return asMutable(key, "does not support update view") + .createOrUpdateView(key, options.getSchemaConfig(), null, viewOptions); + } catch (IOException e) { + throw UserException.validationError(e) + .message("Failure while accessing view") + .buildSilently(); + } + } + try { // try resolving names with "default" namespace; for example, if the key is // hivestore.datatab then try to resolve it using hivestore."default".datatab final Optional handle = plugin.getDatasetHandle(key, null, plugin.getDefaultRetrievalOptions()); + // TODO: handle.get() is called without an isPresent() check. final NamespaceKey namespaceKey = MetadataObjectsUtils.toNamespaceKey(handle.get().getDatasetPath()); datasetConfig = systemNamespaceService.getDataset(namespaceKey); } catch (ConnectorException e) { @@ -1472,7 +1667,7 @@ public void addPrimaryKey(NamespaceKey table, List columns) { ManagedStoragePlugin managedStoragePlugin = pluginRetriever.getPlugin(table.getRoot(), false); versionContext = versionContextResolverImpl.resolveVersionContext( managedStoragePlugin.getName().getRoot(), - options.getVersionForSource(managedStoragePlugin.getName().getRoot())); + options.getVersionForSource(managedStoragePlugin.getName().getRoot(), table)); } mutablePlugin.addPrimaryKey(table, datasetConfig, options.getSchemaConfig(), columnFields, versionContext); @@ -1501,7 +1696,7 @@ public void dropPrimaryKey(NamespaceKey table) { ManagedStoragePlugin managedStoragePlugin = pluginRetriever.getPlugin(table.getRoot(), false); versionContext = versionContextResolverImpl.resolveVersionContext( managedStoragePlugin.getName().getRoot(), - options.getVersionForSource(managedStoragePlugin.getName().getRoot())); + options.getVersionForSource(managedStoragePlugin.getName().getRoot(), table)); } List primaryKey; @@ -1544,7 +1739,7 @@ public List getPrimaryKey(NamespaceKey table) { ManagedStoragePlugin managedStoragePlugin = pluginRetriever.getPlugin(table.getRoot(), false); versionContext = versionContextResolverImpl.resolveVersionContext( managedStoragePlugin.getName().getRoot(), - options.getVersionForSource(managedStoragePlugin.getName().getRoot())); + options.getVersionForSource(managedStoragePlugin.getName().getRoot(), table)); } List primaryKey; @@ -1566,16 +1761,14 @@ public boolean toggleSchemaLearning(NamespaceKey table, boolean enableSchemaLear private boolean updateOptions(VirtualDataset virtualDataset, Map attributes) { boolean changed = false; for (Entry attribute : attributes.entrySet()) { - switch (attribute.getKey().toLowerCase()) { - case "enable_default_reflection": + if (attribute.getKey().toLowerCase().equals("enable_default_reflection")) { AttributeValue.BooleanValue value = (AttributeValue.BooleanValue) attribute.getValue(); boolean oldValue = Optional.ofNullable(virtualDataset.getDefaultReflectionEnabled()).orElse(true); if (value.getValue() != oldValue) { changed = true; virtualDataset.setDefaultReflectionEnabled(value.getValue()); } - break; - default: + } else { throw UserException.validationError() .message("Unknown option [%s]", attribute.getKey()) .buildSilently(); @@ -1608,7 +1801,10 @@ public UpdateStatus refreshDataset(NamespaceKey key, DatasetRetrievalOptions ret .buildSilently(); } - if(CatalogUtil.requestedPluginSupportsVersionedTables(key, this)) { + final boolean requestedPluginSupportsVersionedTables = CatalogUtil.requestedPluginSupportsVersionedTables(key, this); + Span.current().setAttribute("dremio.catalog.refreshDataset.requestedPluginSupportsVersionedTables", requestedPluginSupportsVersionedTables); + + if(requestedPluginSupportsVersionedTables) { return UpdateStatus.UNCHANGED; } @@ -1655,6 +1851,7 @@ public Iterable getSubPartitions( } @Override + @WithSpan public boolean createOrUpdateDataset( NamespaceService userNamespaceService, NamespaceKey source, @@ -1738,6 +1935,7 @@ public T getSource(String name) { } @Override + @WithSpan public void createSource(SourceConfig config, NamespaceAttribute... attributes) { sourceModifier.createSource(config, attributes); } @@ -1748,6 +1946,7 @@ public void updateSource(SourceConfig config, NamespaceAttribute... attributes) } @Override + @WithSpan public void deleteSource(SourceConfig config) { NamespaceService.DeleteCallback deleteCallback = (DatasetConfig datasetConfig) -> { @@ -1816,4 +2015,25 @@ public interface IdentityResolver { CatalogIdentity getOwner(List path) throws NamespaceException; NamespaceIdentity toNamespaceIdentity(CatalogIdentity identity); } + + private DremioTable getTableForTimeTravel(String datasetId) { + VersionedDatasetId versionedDatasetId; + try { + versionedDatasetId = fromString(datasetId); + TableVersionContext tableVersionContext = versionedDatasetId.getVersionContext(); + Preconditions.checkState(isTimeTravelDatasetId(versionedDatasetId)); + return getTableSnapshotForQuery(new NamespaceKey(versionedDatasetId.getTableKey()), tableVersionContext); + } catch (JsonProcessingException j) { + logger.error("Expected datasetId with time travel specification {} ", datasetId); + return null; + } catch (UserException e) { + // getTableSnapshot returns a UserException when table is not found. + return null; + } + } + + @Override + public MetadataRequestOptions getMetadataRequestOptions() { + return options; + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/CatalogOptions.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/CatalogOptions.java index b223d112ec..b8dfb491aa 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/CatalogOptions.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/CatalogOptions.java @@ -63,7 +63,10 @@ public final class CatalogOptions { // Enable secrets field look up/resolution on sources public static final TypeValidators.BooleanValidator SOURCE_SECRETS_RESOLUTION_ENABLED = new TypeValidators.BooleanValidator("auth.source-secrets-resolution.enabled", false); - + // Enable reflection tab in NESSIE and ARCTIC source dialogs + public static final BooleanValidator REFLECTION_ARCTIC_ENABLED = new BooleanValidator("reflection.arctic.enabled", false); + // Enable Arctic Replacing Spaces + public static final BooleanValidator CATALOG_ARS_ENABLED = new TypeValidators.BooleanValidator("catalog_ars_enabled", false); // Do not instantiate private CatalogOptions() { } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/CatalogProtocol.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/CatalogProtocol.java index e6df4a5e38..dd3cfe1144 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/CatalogProtocol.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/CatalogProtocol.java @@ -101,6 +101,7 @@ public MessageLite getResponseDefaultInstance(int rpcType) throws RpcException { } } + @Override public void close() { executor.shutdownNow(); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/CatalogServiceImpl.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/CatalogServiceImpl.java index eb27e65ffe..14353ab7a8 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/CatalogServiceImpl.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/CatalogServiceImpl.java @@ -102,6 +102,8 @@ import com.google.common.util.concurrent.Futures; import com.google.protobuf.ByteString; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.instrumentation.annotations.WithSpan; import io.protostuff.LinkedBuffer; import io.protostuff.ProtobufIOUtil; @@ -116,13 +118,13 @@ public class CatalogServiceImpl implements CatalogService { private static final Logger logger = LoggerFactory.getLogger(CatalogServiceImpl.class); public static final long CATALOG_SYNC = TimeUnit.MINUTES.toMillis(3); - private static final long CHANGE_COMMUNICATION_WAIT = TimeUnit.SECONDS.toMillis(5); + private static final long CHANGE_COMMUNICATION_WAIT = TimeUnit.SECONDS.toMillis(10); public static final String CATALOG_SOURCE_DATA_NAMESPACE = "catalog-source-data"; public static final String SYSTEM_TABLE_SOURCE_NAME = "sys"; - protected final Provider context; + protected final Provider sabotContext; protected final Provider scheduler; private final Provider>> sysTableConfProvider; private final Provider>> sysFlightTableConfProvider; @@ -148,7 +150,7 @@ public class CatalogServiceImpl implements CatalogService { protected Provider modifiableSchedulerService; public CatalogServiceImpl( - Provider context, + Provider sabotContext, Provider scheduler, Provider>> sysTableConfProvider, Provider>> sysFlightTableConfProvider, @@ -163,13 +165,13 @@ public CatalogServiceImpl( EnumSet roles, Provider modifiableSchedulerService ) { - this(context, scheduler, sysTableConfProvider, sysFlightTableConfProvider, fabric, connectionReaderProvider, bufferAllocator, + this(sabotContext, scheduler, sysTableConfProvider, sysFlightTableConfProvider, fabric, connectionReaderProvider, bufferAllocator, kvStoreProvider, datasetListingService, optionManager, broadcasterProvider, config, roles, CatalogServiceMonitor.DEFAULT, modifiableSchedulerService); } @VisibleForTesting CatalogServiceImpl( - Provider context, + Provider sabotContext, Provider scheduler, Provider>> sysTableConfProvider, Provider>> sysFlightTableConfProvider, @@ -185,7 +187,7 @@ public CatalogServiceImpl( final CatalogServiceMonitor monitor, Provider modifiableSchedulerService ) { - this.context = context; + this.sabotContext = sabotContext; this.scheduler = scheduler; this.sysTableConfProvider = sysTableConfProvider; this.sysFlightTableConfProvider = sysFlightTableConfProvider; @@ -206,7 +208,7 @@ public CatalogServiceImpl( @Override public void start() throws Exception { - SabotContext context = this.context.get(); + SabotContext context = this.sabotContext.get(); this.allocator = bufferAllocator.get().newChildAllocator("catalog-protocol", 0, Long.MAX_VALUE); this.systemNamespace = context.getNamespaceService(SystemUser.SYSTEM_USERNAME); this.sourceDataStore = kvStoreProvider.get().getStore(CatalogSourceDataCreator.class); @@ -283,25 +285,25 @@ public void start() throws Exception { } protected PluginsManager newPluginsManager() { - return new PluginsManager(context.get(), systemNamespace, context.get().getOrphanageFactory().get(), datasetListingService.get(), optionManager.get(), + return new PluginsManager(sabotContext.get(), systemNamespace, sabotContext.get().getOrphanageFactory().get(), datasetListingService.get(), optionManager.get(), config, sourceDataStore, scheduler.get(), connectionReaderProvider.get(), monitor, broadcasterProvider, isInfluxSource, modifiableSchedulerService.get()); } public void communicateChange(SourceConfig config, RpcType rpcType) { final Set endpoints = new HashSet<>(); - endpoints.add(context.get().getEndpoint()); + endpoints.add(sabotContext.get().getEndpoint()); List> futures = new ArrayList<>(); SourceWrapper wrapper = SourceWrapper.newBuilder().setBytes(ByteString.copyFrom(ProtobufIOUtil.toByteArray(config, SourceConfig.getSchema(), LinkedBuffer.allocate()))).build(); - for(NodeEndpoint e : Iterables.concat(this.context.get().getCoordinators(), this.context.get().getExecutors())) { + for(NodeEndpoint e : Iterables.concat(this.sabotContext.get().getCoordinators(), this.sabotContext.get().getExecutors())) { if(!endpoints.add(e)) { continue; } SendSource send = new SendSource(wrapper, rpcType); tunnelFactory.getCommandRunner(e.getAddress(), e.getFabricPort()).runCommand(send); - logger.trace("Sending [{}] to {}:{}", config.getName(), e.getAddress(), e.getUserPort()); + logger.info("Sending [{}] to {}:{}", config.getName(), e.getAddress(), e.getUserPort()); futures.add(send.getFuture()); } @@ -331,7 +333,7 @@ public void doRpcCall(RpcOutcomeListener outcomeListener, ProxyConnection c class CatalogChangeListener { void sourceUpdate(SourceConfig config) { try { - logger.debug("Received source update for [{}]", config.getName()); + logger.info("Received source update for [{}]", config.getName()); plugins.getSynchronized(config, isInfluxSource); } catch (Exception ex) { logger.warn("Failure while synchronizing source [{}].", config.getName(), ex); @@ -340,7 +342,7 @@ void sourceUpdate(SourceConfig config) { void sourceDelete(SourceConfig config) { try { - logger.debug("Received delete source for [{}]", config.getName()); + logger.info("Received delete source for [{}]", config.getName()); plugins.closeAndRemoveSource(config); } catch (Exception ex) { @@ -407,6 +409,7 @@ private boolean createSourceIfMissing(SourceConfig config, NamespaceAttribute... return false; } + @Override public boolean createSourceIfMissingWithThrow(SourceConfig config) { Preconditions.checkArgument(config.getTag() == null); if(!getPlugins().hasPlugin(config.getName())) { @@ -487,8 +490,9 @@ public void deleteSource(String name) { * @param config * @param subject */ + @WithSpan private void deleteSource(SourceConfig config, CatalogIdentity subject, NamespaceService.DeleteCallback callback) { - NamespaceService namespaceService = context.get().getNamespaceService(subject.getName()); + NamespaceService namespaceService = sabotContext.get().getNamespaceService(subject.getName()); boolean afterUnknownEx = false; try (AutoCloseable l = getDistributedLock(config.getName()) ) { @@ -516,7 +520,7 @@ private void deleteSource(SourceConfig config, CatalogIdentity subject, Namespac private AutoCloseable getDistributedLock(String sourceName) throws Exception { long millis = 15_000; - DistributedLease lease = context.get().getClusterCoordinator().getSemaphore("-source-" + sourceName.toLowerCase(), 1).acquire(millis, TimeUnit.MILLISECONDS); + DistributedLease lease = sabotContext.get().getClusterCoordinator().getSemaphore("-source-" + sourceName.toLowerCase(), 1).acquire(millis, TimeUnit.MILLISECONDS); if(lease == null) { throw UserException.resourceError().message("Unable to acquire source change lock for source [%s] within timeout.", sourceName).build(logger); } @@ -555,13 +559,20 @@ private ManagedStoragePlugin getPlugin(StoragePluginId id) { private ManagedStoragePlugin getPlugin(String name, boolean errorOnMissing) { ManagedStoragePlugin plugin = getPlugins().get(name); - if(plugin != null) { + + final boolean pluginFoundInPlugins = plugin != null; + Span.current().setAttribute("Catalog.CatalogServiceImpl.getPlugin.pluginFoundInPlugins", pluginFoundInPlugins); + + if(pluginFoundInPlugins) { return plugin; } try { logger.debug("Synchronizing source [{}] with namespace", name); - if (isInfluxSource(name)) { + final boolean isSourceAnInfluxSource = isInfluxSource(name); + Span.current().setAttribute("Catalog.CatalogServiceImpl.getPlugin.isInfluxSource", isSourceAnInfluxSource); + + if (isSourceAnInfluxSource) { if (!errorOnMissing) { return null; } @@ -582,6 +593,7 @@ private ManagedStoragePlugin getPlugin(String name, boolean errorOnMissing) { } } + @Override @VisibleForTesting public ManagedStoragePlugin getManagedSource(String name) { return getPlugins().get(name); @@ -594,19 +606,22 @@ public T getSource(StoragePluginId pluginId) { } @Override + @WithSpan public SourceState getSourceState(String name) { - // Preconditions.checkState(isCoordinator); - ManagedStoragePlugin plugin = getPlugin(name, false); - if(plugin == null) { - return null; + try { + ManagedStoragePlugin plugin = getPlugin(name, false); + if(plugin == null) { + return SourceState.badState(String.format("Source %s could not be found. Please verify the source name.", name), "Unable to find source."); + } + return plugin.getState(); + } catch (Exception e) { + return SourceState.badState("", e); } - return plugin.getState(); } @SuppressWarnings("unchecked") @Override public T getSource(String name) { - // Preconditions.checkState(isCoordinator); ?? return (T) getPlugin(name, true).unwrap(StoragePlugin.class); } @@ -639,14 +654,14 @@ public Catalog getCatalog(MetadataRequestOptions requestOptions) { } protected Catalog createCatalog(MetadataRequestOptions requestOptions) { - return createCatalog(requestOptions, new CatalogIdentityResolver(), context.get().getNamespaceServiceFactory()); + return createCatalog(requestOptions, new CatalogIdentityResolver(), sabotContext.get().getNamespaceServiceFactory()); } protected Catalog createCatalog(MetadataRequestOptions requestOptions, IdentityResolver identityProvider, NamespaceService.Factory namespaceServiceFactory) { OptionManager optionManager = requestOptions.getSchemaConfig().getOptions(); if (optionManager == null) { - optionManager = context.get().getOptionManager(); + optionManager = sabotContext.get().getOptionManager(); } PluginRetriever retriever = new Retriever(); @@ -656,16 +671,17 @@ protected Catalog createCatalog(MetadataRequestOptions requestOptions, IdentityR retriever, new SourceModifier(requestOptions.getSchemaConfig().getAuthContext().getSubject()), optionManager, - context.get().getNamespaceService(SystemUser.SYSTEM_USERNAME), + sabotContext.get().getNamespaceService(SystemUser.SYSTEM_USERNAME), namespaceServiceFactory, - context.get().getOrphanageFactory().get(), - context.get().getDatasetListing(), - context.get().getViewCreatorFactoryProvider().get(), + sabotContext.get().getOrphanageFactory().get(), + sabotContext.get().getDatasetListing(), + sabotContext.get().getViewCreatorFactoryProvider().get(), identityProvider, new VersionContextResolverImpl(retriever)); } @Override + @WithSpan public boolean isSourceConfigMetadataImpacting(SourceConfig config) { return getPlugins().get(config.getName()).isSourceConfigMetadataImpacting(config); } @@ -814,7 +830,7 @@ public NamespaceIdentity toNamespaceIdentity(CatalogIdentity identity) { } try { - final User user = context.get().getUserService().getUser(identity.getName()); + final User user = sabotContext.get().getUserService().getUser(identity.getName()); return new NamespaceUser(() -> user); } catch (UserNotFoundException ignored) { } @@ -824,6 +840,7 @@ public NamespaceIdentity toNamespaceIdentity(CatalogIdentity identity) { } } + @Override public void communicateChangeToExecutors(List nodeEndpointList, SourceConfig config, CatalogRPC.RpcType rpcType) { List> futures = new ArrayList<>(); CatalogRPC.SourceWrapper wrapper = CatalogRPC.SourceWrapper.newBuilder().setBytes(ByteString.copyFrom(ProtobufIOUtil.toByteArray(config, SourceConfig.getSchema(), LinkedBuffer.allocate()))).build(); @@ -836,7 +853,7 @@ public void communicateChangeToExecutors(List n } try { - Futures.successfulAsList(futures).get(10, TimeUnit.SECONDS); + Futures.successfulAsList(futures).get(CHANGE_COMMUNICATION_WAIT, TimeUnit.MILLISECONDS); } catch (InterruptedException | ExecutionException | TimeoutException e1) { // Error is ignored here as plugin propagation is best effort logger.warn("Failure while communicating source change [{}].", config.getName(), e1); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/CatalogUtil.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/CatalogUtil.java index c0b0df8ab9..97223d94a6 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/CatalogUtil.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/CatalogUtil.java @@ -23,9 +23,11 @@ import com.dremio.connector.metadata.PartitionChunk; import com.dremio.connector.metadata.PartitionChunkListing; import com.dremio.connector.metadata.options.TimeTravelOption; +import com.dremio.exec.store.CatalogService; import com.dremio.exec.store.NoDefaultBranchException; import com.dremio.exec.store.ReferenceConflictException; import com.dremio.exec.store.ReferenceNotFoundException; +import com.dremio.exec.store.SchemaConfig; import com.dremio.exec.store.StoragePlugin; import com.dremio.service.namespace.DatasetMetadataSaver; import com.dremio.service.namespace.NamespaceKey; @@ -33,6 +35,7 @@ import com.dremio.service.namespace.dataset.proto.DatasetConfig; import com.dremio.service.orphanage.Orphanage; import com.dremio.service.orphanage.proto.OrphanEntry; +import com.dremio.service.users.SystemUser; public final class CatalogUtil { @@ -137,11 +140,10 @@ public static NamespaceService.DeleteCallback getDeleteCallback(Orphanage orphan return deleteCallback; } - public static void validateResolvedVersionIsBranch(ResolvedVersionContext resolvedVersionContext, String tableName) { + public static void validateResolvedVersionIsBranch(ResolvedVersionContext resolvedVersionContext) { if ((resolvedVersionContext != null) && !resolvedVersionContext.isBranch()) { throw UserException.validationError() - .message("Unable to perform operation on %s - version %s is not a branch ", - tableName, + .message("DDL and DML operations are only supported for branches - not on tags or commits. %s is not a branch. ", resolvedVersionContext.getRefName()) .buildSilently(); } @@ -163,10 +165,11 @@ public static boolean isFSInternalIcebergTableOrJsonTableOrMongo(Catalog catalog return ((MutablePlugin) storagePlugin).isSupportUserDefinedSchema(dataset); -} + } /** * Utility to return TimeTravelRequest for query : select * from iceberg_table AT SNAPSHOT/TIMESTAMP + * * @param key * @param context * @return @@ -194,4 +197,63 @@ public static TimeTravelOption.TimeTravelRequest getTimeTravelRequest(NamespaceK throw new AssertionError("Unsupported type " + context.getType()); } } + + /** + * This Catalog will allow the caller to search for entries but will not promote entries that are missing in Namespace KV store + * It will not check validity of metadata + * + * @param catalogService + * @return + */ + public static EntityExplorer getSystemCatalogForReflections(CatalogService catalogService) { + return catalogService.getCatalog(MetadataRequestOptions.newBuilder() + .setSchemaConfig(SchemaConfig.newBuilder(CatalogUser.from(SystemUser.SYSTEM_USERNAME)).build()) + .setCheckValidity(false) + .setNeverPromote(true) + .build()); + } + + //TODO(DX-63224) : Remove this CatalogUtil function and have callers use the main Catalog API directly. + + /** + * Throws UserException if source is temporarily unavailable. + * Throws AccessControlException if catalog user does not haves access. + * Returns null if table not found + * + * @param catalogEntityKey + * @param catalog + * @return + */ + public static DremioTable getTable(CatalogEntityKey catalogEntityKey, EntityExplorer catalog) { + NamespaceKey key = catalogEntityKey.toNamespaceKey(); + if (catalogEntityKey.getTableVersionContext() != null) { + try { + return catalog.getTableSnapshot(key, catalogEntityKey.getTableVersionContext()); + } catch (UserException e) { + // getTableSnapshot returns a UserException when table is not found. + return null; + } + } else { + return catalog.getTable(key); + } + } + + public static DatasetConfig getDatasetConfig(EntityExplorer catalog, String datasetId) { + DremioTable dremioTable = catalog.getTable(datasetId); + DatasetConfig datasetConfig = null; + if (dremioTable != null) { + datasetConfig = dremioTable.getDatasetConfig(); + } + return datasetConfig; + } + + public static DatasetConfig getDatasetConfig(EntityExplorer catalog, NamespaceKey key) { + DremioTable dremioTable = catalog.getTable(key); + DatasetConfig datasetConfig = null; + if (dremioTable != null) { + datasetConfig = dremioTable.getDatasetConfig(); + } + return datasetConfig; + } + } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/DatasetCatalog.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/DatasetCatalog.java index 87c06d8b2e..b25caa6cb9 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/DatasetCatalog.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/DatasetCatalog.java @@ -102,8 +102,6 @@ CreateTableEntry createNewTable(NamespaceKey key, IcebergTableProps icebergTable void rollbackTable(NamespaceKey path, DatasetConfig datasetConfig, RollbackOption rollbackOption, TableMutationOptions tableMutationOptions); - void vacuumTable(NamespaceKey path, DatasetConfig datasetConfig, VacuumOption vacuumOption, TableMutationOptions tableMutationOptions); - void addColumns(NamespaceKey datasetKey, DatasetConfig datasetConfig, List colsToAdd, TableMutationOptions tableMutationOptions); void dropColumn(NamespaceKey datasetKey, DatasetConfig datasetConfig, String columnToDrop, TableMutationOptions tableMutationOptions); @@ -143,6 +141,8 @@ boolean alterColumnOption(final NamespaceKey key, String columnToChange, */ DremioTable getTable(NamespaceKey key); + String getDatasetId(NamespaceKey key); + enum UpdateStatus { /** * Metadata hasn't changed. diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/DatasetCatalogServiceImpl.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/DatasetCatalogServiceImpl.java index a217afef11..fe219a5424 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/DatasetCatalogServiceImpl.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/DatasetCatalogServiceImpl.java @@ -187,6 +187,9 @@ public void addOrUpdateDataset(AddOrUpdateDatasetRequest request, StreamObserver if (partitionStatsFile != null) { icebergMetadata.setPartitionStatsFile(partitionStatsFile); } + if (metadata.hasPartitionStatsFileSize()) { + icebergMetadata.setPartitionStatsFileSize(metadata.getPartitionStatsFileSize()); + } config.getPhysicalDataset().setIcebergMetadata(icebergMetadata); } saveDataset(namespaceService, catalog, request.getDatasetConfig(), name, config); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/DatasetManager.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/DatasetManager.java index aff67d307f..84cbc1d657 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/DatasetManager.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/DatasetManager.java @@ -15,6 +15,7 @@ */ package com.dremio.exec.catalog; +import static com.dremio.exec.catalog.VersionedDatasetId.isVersionedDatasetId; import static com.dremio.exec.planner.physical.PlannerSettings.FULL_NESTED_SCHEMA_SUPPORT; import java.security.AccessControlException; @@ -49,6 +50,7 @@ import com.dremio.exec.record.BatchSchema; import com.dremio.exec.store.DatasetRetrievalOptions; import com.dremio.exec.store.NamespaceTable; +import com.dremio.exec.store.ReferenceNotFoundException; import com.dremio.exec.store.SchemaConfig; import com.dremio.exec.store.StoragePlugin; import com.dremio.exec.store.TableMetadata; @@ -77,7 +79,8 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; -import io.opentelemetry.extension.annotations.WithSpan; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.instrumentation.annotations.WithSpan; /** * The workhorse of Catalog, responsible for retrieving datasets and interacting with sources as @@ -201,6 +204,7 @@ private NamespaceKey getCanonicalKey(NamespaceKey key) { return new NamespaceKey(entries); } + @WithSpan public DremioTable getTable( NamespaceKey key, MetadataRequestOptions options, @@ -224,7 +228,7 @@ public DremioTable getTable( if (config == null) { logger.debug("Got a null config"); } else { - logger.debug("Got config {}", config); + logger.debug("Got config id {}", config.getId()); } if(plugin != null) { @@ -255,16 +259,23 @@ public DremioTable getTable( final DatasetConfig config = getConfig(datasetId); if (config == null) { - //try lookup in external catalog - return getTableFromExternalCatalog(datasetId, options); + if (isVersionedDatasetId(datasetId)) { + Span.current().setAttribute("dremio.catalog.getTable.isVersionedDatasetId", true); + //try lookup in external catalog + return getTableFromNessieCatalog(datasetId, options); + } else { + return null; + } } + Span.current().setAttribute("dremio.catalog.getTable.isVersionedDatasetId", false); NamespaceKey key = new NamespaceKey(config.getFullPathList()); return getTable(key, options, false); } - //TODO (DX-58588) Needs to be revisited ot support snapshot id and timestamp - private DremioTable getTableFromExternalCatalog(String datasetId, MetadataRequestOptions options) { + + @WithSpan + private DremioTable getTableFromNessieCatalog(String datasetId, MetadataRequestOptions options) { VersionedDatasetId versionedDatasetId = null; try { versionedDatasetId = VersionedDatasetId.fromString(datasetId); @@ -297,8 +308,10 @@ private DremioTable getTableFromExternalCatalog(String datasetId, MetadataReques return table; } + @WithSpan private NamespaceTable getTableFromNamespace(NamespaceKey key, DatasetConfig datasetConfig, ManagedStoragePlugin plugin, String accessUserName, MetadataRequestOptions options) { + Span.current().setAttribute("dremio.namespace.key.schemapath", key.getSchemaPath()); plugin.checkAccess(key, datasetConfig, accessUserName, options); final TableMetadata tableMetadata = new TableMetadataImpl(plugin.getId(), @@ -329,6 +342,7 @@ private List getPrimaryKey(StoragePlugin plugin, /** * Retrieves a source table, checking that things are up to date. */ + @WithSpan private DremioTable getTableFromPlugin( NamespaceKey key, DatasetConfig datasetConfig, @@ -338,21 +352,7 @@ private DremioTable getTableFromPlugin( ) { final StoragePlugin underlyingPlugin = plugin.getPlugin(); if (underlyingPlugin instanceof VersionedPlugin) { - final String accessUserName = options.getSchemaConfig().getUserName(); - - final VersionedDatasetAdapter versionedDatasetAdapter = VersionedDatasetAdapter.newBuilder() - .setVersionedTableKey(key.getPathComponents()) - .setVersionContext(versionContextResolver.resolveVersionContext( - plugin.getName().getRoot(), options.getVersionForSource(plugin.getName().getRoot()))) - .setStoragePlugin(underlyingPlugin) - .setStoragePluginId(plugin.getId()) - .setOptionManager(optionManager) - .build(); - if (versionedDatasetAdapter == null) { - return null; - } - - return versionedDatasetAdapter.getTable(accessUserName); + return getTableFromNessieCatalog(key, plugin, options); } // Figure out the user we want to access the source with. If the source supports impersonation we allow it to @@ -385,6 +385,10 @@ private DremioTable getTableFromPlugin( return null; } + // If only the cached version is needed, check and return when no entry is found + if (options.neverPromote()) { + return null; + } if (datasetConfig != null) { // canonicalize key if we can. @@ -439,7 +443,7 @@ private DremioTable getTableFromPlugin( if (opportunisticSave) { datasetConfig = MetadataObjectsUtils.newShallowConfig(handle.get()); } - + logger.debug("Attempting inline refresh for key : {} , canonicalKey : {} ", key, canonicalKey); try { plugin.getSaver() .save(datasetConfig, handle.get(), plugin.unwrap(StoragePlugin.class), opportunisticSave, retrievalOptions, @@ -532,9 +536,35 @@ private ViewTable createTableFromVirtualDataset(DatasetConfig datasetConfig, Met identityProvider.getOwner(datasetConfig.getFullPathList()), datasetConfig, schema); } catch (Exception e) { - logger.warn("Failure parsing virtual dataset, not including in available schema.", e); + throw new RuntimeException(String.format("Failure while constructing the ViewTable from datasetConfig for key %s with datasetId %s", + String.join(".", datasetConfig.getFullPathList()), + datasetConfig.getId().getId()), + e); + } + } + + @WithSpan + public DremioTable getTableFromNessieCatalog(NamespaceKey key, ManagedStoragePlugin plugin, MetadataRequestOptions options) { + final String accessUserName = options.getSchemaConfig().getUserName(); + final StoragePlugin underlyingPlugin = plugin.getPlugin(); + VersionedDatasetAdapter versionedDatasetAdapter; + try { + versionedDatasetAdapter = VersionedDatasetAdapter.newBuilder() + .setVersionedTableKey(key.getPathComponents()) + .setVersionContext(versionContextResolver.resolveVersionContext( + plugin.getName().getRoot(), options.getVersionForSource(plugin.getName().getRoot(), key))) + .setStoragePlugin(underlyingPlugin) + .setStoragePluginId(plugin.getId()) + .setOptionManager(optionManager) + .build(); + } catch (ReferenceNotFoundException e) { + return null; + } + + if (versionedDatasetAdapter == null) { return null; } + return versionedDatasetAdapter.getTable(accessUserName); } private static boolean isFSBasedDataset(DatasetConfig datasetConfig) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/DatasetSaverImpl.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/DatasetSaverImpl.java index 1dd66af963..5a4c8007aa 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/DatasetSaverImpl.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/DatasetSaverImpl.java @@ -46,7 +46,7 @@ import com.dremio.service.users.SystemUser; import com.google.common.base.Preconditions; -import io.opentelemetry.extension.annotations.WithSpan; +import io.opentelemetry.instrumentation.annotations.WithSpan; import io.protostuff.ByteString; /** @@ -195,7 +195,8 @@ private boolean saveUsingInternalRefreshDatasetQuery(DatasetConfig datasetConfig if (uex.getMessage().contains(unsupportedPartitionListingError)) { logger.error("REFRESH DATASET query failed. Using old refresh mechanism", uex); return false; - } // else continue and throw error + } + // fall through and throw error default: throw UserException.refreshDatasetError(uex).message(firstLine(uex.getMessage())).build(logger); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/DelegatingCatalog.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/DelegatingCatalog.java index e008063326..a6f4f104ea 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/DelegatingCatalog.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/DelegatingCatalog.java @@ -110,6 +110,16 @@ public DremioTable getTableForQuery(NamespaceKey key) { return delegate.getTableForQuery(key); } + @Override + public String getDatasetId(NamespaceKey key) { + return delegate.getDatasetId(key); + } + + @Override + public DremioTable getTableSnapshotForQuery(NamespaceKey key, TableVersionContext context) { + return delegate.getTableSnapshotForQuery(key, context); + } + @Override public DremioTable getTableSnapshot(NamespaceKey key, TableVersionContext context) { return delegate.getTableSnapshot(key, context); @@ -196,11 +206,6 @@ public Catalog resolveCatalog(Map sourceVersionMapping) return delegate.resolveCatalog(sourceVersionMapping); } - @Override - public MetadataStatsCollector getMetadataStatsCollector() { - return delegate.getMetadataStatsCollector(); - } - @Override public CreateTableEntry createNewTable(NamespaceKey key, IcebergTableProps icebergTableProps, WriterOptions writerOptions, Map storageOptions) { @@ -259,11 +264,6 @@ public void rollbackTable(NamespaceKey path, DatasetConfig datasetConfig, Rollba delegate.rollbackTable(path, datasetConfig, rollbackOption, tableMutationOptions); } - @Override - public void vacuumTable(NamespaceKey path, DatasetConfig datasetConfig, VacuumOption vacuumOption, TableMutationOptions tableMutationOptions) { - delegate.vacuumTable(path, datasetConfig, vacuumOption, tableMutationOptions); - } - @Override public void addColumns(NamespaceKey table, DatasetConfig datasetConfig, List colsToAdd, TableMutationOptions tableMutationOptions) { delegate.addColumns(table, datasetConfig, colsToAdd, tableMutationOptions); @@ -423,4 +423,12 @@ public void validateOwnership(NamespaceKey key) { @Override public Iterable getAllFunctions() throws IOException { return delegate.getAllFunctions(); } + + @Override public void invalidateNamespaceCache(final NamespaceKey key) { + delegate.invalidateNamespaceCache(key); + } + + @Override public MetadataRequestOptions getMetadataRequestOptions() { + return delegate.getMetadataRequestOptions(); + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/DremioCatalogReader.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/DremioCatalogReader.java index f4deffdbee..12b4902ece 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/DremioCatalogReader.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/DremioCatalogReader.java @@ -73,7 +73,7 @@ import com.google.common.collect.Lists; import io.opentelemetry.api.trace.Span; -import io.opentelemetry.extension.annotations.WithSpan; +import io.opentelemetry.instrumentation.annotations.WithSpan; /** * Dremio implementation of several interfaces that are typically provided by CalciteCatalogReader. @@ -114,6 +114,10 @@ public DremioPrepareTable getTable(List paramList) { return new DremioPrepareTable(this, typeFactory, table); } + public DremioTranslatableTable getTableSnapshot(NamespaceKey key, TableVersionContext context) { + return catalog.getTableSnapshotForQuery(key, context); + } + public DremioPrepareTable getTableUnchecked(List paramList) { final DremioTable table = catalog.getTable(new NamespaceKey(paramList)); if(table == null) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/DremioTable.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/DremioTable.java index 42df9952aa..3379a374a7 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/DremioTable.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/DremioTable.java @@ -50,10 +50,12 @@ public interface DremioTable extends DremioTranslatableTable, ExtensibleTable { DatasetConfig getDatasetConfig(); + @Override default boolean isRolledUp(String column) { return false; } + @Override default boolean rolledUpColumnValidInsideAgg( String column, SqlCall call, @@ -72,10 +74,12 @@ default TableMetadata getDataset() { */ String UNSUPPORTED_EXTENDED_TABLE = "The extended table of type '%s' is unsupported."; + @Override default Table extend(List fields) { throw new UnsupportedOperationException(); } + @Override default int getExtendedColumnOffset() { throw new UnsupportedOperationException(); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/EntityExplorer.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/EntityExplorer.java index 5083fe507c..6138970065 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/EntityExplorer.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/EntityExplorer.java @@ -99,5 +99,14 @@ public interface EntityExplorer { * @param context version context * @return translatable table */ + DremioTable getTableSnapshotForQuery(NamespaceKey key, TableVersionContext context); + + /** + * Retrieve a table snapshot when querying the table's data, without checking for privileges. + * + * @param key path to table + * @param context version context + * @return translatable table + */ DremioTable getTableSnapshot(NamespaceKey key, TableVersionContext context); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/InformationSchemaServiceImpl.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/InformationSchemaServiceImpl.java index 2b91ea5463..4b9f738f2b 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/InformationSchemaServiceImpl.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/InformationSchemaServiceImpl.java @@ -230,7 +230,8 @@ public void listSysColumns(ListSysColumnsRequest request, StreamObserver newStartSupplier(SourceConfig config, final boolea } return state; - } catch(Throwable e) { + } catch (Throwable e) { if (config.getType() != MissingPluginConf.TYPE) { logger.warn("Error starting new source: {}", sourceConfig.getName(), e); } @@ -619,10 +623,12 @@ private Supplier newStartSupplier(SourceConfig config, final boolea } throw new CompletionException(e); - }} - ); + } + }); } catch (Exception ex) { - return () -> {throw new CompletionException(ex);}; + return () -> { + throw new CompletionException(ex); + }; } } @@ -771,8 +777,7 @@ public void run() { metadataPolicy.getNamesRefreshMs(), metadataPolicy.getDatasetDefinitionRefreshAfterMs()), Integer.MAX_VALUE); - final Retryer retryer = - new Retryer.Builder() + final Retryer retryer = Retryer.newBuilder() .retryIfExceptionOfType(BadSourceStateException.class) .setWaitStrategy( Retryer.WaitStrategy.EXPONENTIAL, @@ -901,6 +906,7 @@ public void checkAccess(NamespaceKey key, DatasetConfig datasetConfig, String us * @param requestOptions request options * @return true iff the metadata is complete and meets validity constraints */ + @WithSpan public boolean isCompleteAndValid(DatasetConfig datasetConfig, MetadataRequestOptions requestOptions, NamespaceService userNamespaceService) { try (AutoCloseableLock l = readLock()) { checkState(); @@ -947,7 +953,6 @@ public DatasetConfig getUpdatedDatasetConfig(DatasetConfig oldConfig, BatchSchem } } - @WithSpan("get-view") public ViewTable getView(NamespaceKey key, final MetadataRequestOptions options) { try(AutoCloseableLock l = readLock()) { checkState(); @@ -970,6 +975,9 @@ public Optional getDatasetHandle( } else { entityPath = MetadataObjectsUtils.toEntityPath(key); } + + // include the full path of the dataset + Span.current().setAttribute("dremio.dataset.path", PathUtils.constructFullPath(entityPath.getComponents())); return plugin.getDatasetHandle(entityPath, retrievalOptions.asGetDatasetOptions(datasetConfig)); } @@ -980,58 +988,82 @@ public Optional getDatasetHandle( * @param config */ private void setLocals(SourceConfig config) { - if (plugin == null) { + if (getPlugin() == null) { + return; + } + initPlugin(config); + this.pluginId = new StoragePluginId(sourceConfig, conf, getPlugin().getSourceCapabilities()); + } + + /** + * Reset the plugin locals to the state before the plugin was started. + * @param config the original source configuration, must be not-null + * @param pluginId the id of the plugin before startup + */ + private void resetLocals(SourceConfig config, StoragePluginId pluginId) { + if (getPlugin() == null) { return; } + initPlugin(config); + this.pluginId = pluginId; + } + + /** + * Helper function to set the plugin locals. + * @param config source config, must be not null + */ + private void initPlugin(SourceConfig config) { this.sourceConfig = config; this.metadataPolicy = config.getMetadataPolicy() == null? CatalogService.NEVER_REFRESH_POLICY : config.getMetadataPolicy(); - this.state = plugin.getState(); + this.state = getPlugin().getState(); this.conf = config.getConnectionConf(reader); - this.resolvedConf = resolveConnectionConf(this.conf); - this.pluginId = new StoragePluginId(sourceConfig, conf, plugin.getSourceCapabilities()); + this.resolvedConf = resolveConnectionConf(getConnectionConf()); } /** * Update the cached state of the plugin. * - * Note that if this is */ public CompletableFuture refreshState() throws Exception { return CompletableFuture .supplyAsync(() -> { try { - while(true) { - if(plugin == null) { - Optional writeLock = AutoCloseableLock.of(this.writeLock, true).tryOpen(5, TimeUnit.SECONDS); - if(!writeLock.isPresent()) { - // we failed to get the write lock, return current state; - return state; - } + Optional refreshLock = AutoCloseableLock.of(this.refreshStateLock, true).tryOpen(0, TimeUnit.SECONDS); + if (!refreshLock.isPresent()) { + return state; + } + try (AutoCloseableLock rl = refreshLock.get()) { + while (true) { + if (plugin == null) { + Optional writeLock = AutoCloseableLock.of(this.writeLock, true).tryOpen(5, TimeUnit.SECONDS); + if (!writeLock.isPresent()) { + return state; + } - // we have the write lock. - try(AutoCloseableLock l = writeLock.get()) { - if(plugin != null) { - // while waiting for write lock, someone else started things, start this loop over. - continue; + try (AutoCloseableLock l = writeLock.get()) { + if (plugin != null) { + // while waiting for write lock, someone else started things, start this loop over. + continue; + } + plugin = resolvedConf.newPlugin(context, sourceConfig.getName(), this::getId); + return newStartSupplier(sourceConfig, false).get(); } - plugin = resolvedConf.newPlugin(context, sourceConfig.getName(), this::getId); - return newStartSupplier(sourceConfig, false).get(); } - } - // the plugin is not null. - Optional readLock = AutoCloseableLock.of(this.readLock, true).tryOpen(1, TimeUnit.SECONDS); - if(!readLock.isPresent()) { - return state; - } + // the plugin is not null. + Optional readLock = AutoCloseableLock.of(this.readLock, true).tryOpen(1, TimeUnit.SECONDS); + if (!readLock.isPresent()) { + return state; + } - try (Closeable a = readLock.get()) { - final SourceState state = plugin.getState(); - this.state = state; - return state; + try (Closeable a = readLock.get()) { + final SourceState state = plugin.getState(); + this.state = state; + return state; + } } } - } catch(Exception ex) { + } catch (Exception ex) { logger.debug("Failed to start plugin while trying to refresh state, error:", ex); this.state = SourceState.NOT_AVAILABLE; return SourceState.NOT_AVAILABLE; @@ -1080,6 +1112,7 @@ private boolean replacePlugin(SourceConfig config, final long waitMillis, boolea // hold the old plugin until we successfully replace it. final SourceConfig oldConfig = sourceConfig; final StoragePlugin oldPlugin = plugin; + final StoragePluginId oldPluginId = pluginId; final ConnectionConf resolvedNewConnectionConf = resolveConnectionConf(newConnectionConf); this.plugin = resolvedNewConnectionConf.newPlugin(context, sourceKey.getRoot(), this::getId, influxSourcePred); try { @@ -1099,7 +1132,7 @@ private boolean replacePlugin(SourceConfig config, final long waitMillis, boolea // the update failed, go back to previous state. this.plugin = oldPlugin; try { - setLocals(oldConfig); + resetLocals(oldConfig, oldPluginId); } catch (Exception e) { ex.addSuppressed(e); } @@ -1279,11 +1312,6 @@ public I next() { * the manager can't cause problems with plugin locking. */ class MetadataBridge { - - // since refreshes coming from the metadata manager could back up if the refresh takes a long time, create a lock so - // only one is actually pending at any point in time. - private final Lock refreshStateLock = new ReentrantLock(); - SourceMetadata getMetadata() { try(AutoCloseableLock read = tryReadLock()) { if(plugin == null) { @@ -1332,24 +1360,7 @@ int getMaxNestedLevels() { } public void refreshState() throws Exception { - Optional refreshLock = AutoCloseableLock.of(refreshStateLock, true).tryOpen(0, TimeUnit.SECONDS); - try { - CompletableFuture refreshState; - if (!refreshLock.isPresent()) { - // don't refresh the state multiple times through MetadataBridge. All calls that are secondary should be skipped. - refreshState = CompletableFuture.completedFuture(state); - } else { - try (AutoCloseableLock read = tryReadLock()) { - refreshState = ManagedStoragePlugin.this.refreshState(); - } - } - - refreshState.get(30, TimeUnit.SECONDS); - } finally { - if (refreshLock.isPresent()) { - refreshLock.get().close(); - } - } + ManagedStoragePlugin.this.refreshState().get(30, TimeUnit.SECONDS); } SourceState getState() { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/MaterializedDatasetTable.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/MaterializedDatasetTable.java index 9d2de15085..ce7126c221 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/MaterializedDatasetTable.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/MaterializedDatasetTable.java @@ -56,7 +56,7 @@ public class MaterializedDatasetTable implements DremioTable { private final StoragePluginId pluginId; private final String user; private final boolean complexTypeSupport; - private final boolean timeTravel; + private final TableVersionContext versionContext; public MaterializedDatasetTable( NamespaceKey canonicalPath, @@ -65,7 +65,7 @@ public MaterializedDatasetTable( Supplier datasetConfig, Supplier> partitionChunks, boolean complexTypeSupport, - boolean timeTravel + TableVersionContext versionContext ) { this.canonicalPath = canonicalPath; this.pluginId = pluginId; @@ -73,7 +73,7 @@ public MaterializedDatasetTable( this.partitionChunks = partitionChunks; this.user = user; this.complexTypeSupport = complexTypeSupport; - this.timeTravel = timeTravel; + this.versionContext = versionContext; } @Override @@ -87,12 +87,12 @@ public RelNode toRel(RelOptTable.ToRelContext context, RelOptTable relOptTable) context.getCluster(), context.getCluster().traitSetOf(Convention.NONE), pluginId, - new MaterializedTableMetadata(pluginId, datasetConfig.get(), user, partitionChunks.get(), timeTravel), + new MaterializedTableMetadata(pluginId, datasetConfig.get(), user, partitionChunks.get(), versionContext), null, 1.0d, ImmutableList.of(), true, - !timeTravel); + true); } @Override @@ -146,14 +146,22 @@ public boolean rolledUpColumnValidInsideAgg(String column, SqlCall call, SqlNode return true; } + @Override + public TableMetadata getDataset() { + return new MaterializedTableMetadata(pluginId, datasetConfig.get(), user, partitionChunks.get(), versionContext); + } + private static class MaterializedTableMetadata extends TableMetadataImpl { + private final TableVersionContext versionContext; + public MaterializedTableMetadata(StoragePluginId plugin, DatasetConfig config, String user, List splits, - boolean timeTravel) { + TableVersionContext versionContext) { super(plugin, config, user, MaterializedSplitsPointer.oldObsoleteOf(getSplitVersion(config), splits, splits.size()), null); + this.versionContext = versionContext; } private static long getSplitVersion(DatasetConfig datasetConfig) { @@ -168,5 +176,10 @@ public TableMetadata prune(SearchTypes.SearchQuery partitionFilterQuery) { // Don't prune based on lucene query return this; } + + @Override + public TableVersionContext getVersionContext() { + return versionContext; + } } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/MaterializedDatasetTableProvider.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/MaterializedDatasetTableProvider.java index 23e5aacda5..00e4d9eb10 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/MaterializedDatasetTableProvider.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/MaterializedDatasetTableProvider.java @@ -86,10 +86,17 @@ public MaterializedDatasetTable get() { final Supplier datasetConfig = Suppliers.memoize( () -> createDatasetConfig(partitionChunkListing, partitionChunks)); - final boolean timeTravel = options.getTimeTravelRequest() != null; + TableVersionContext versionContext = + options.versionedDatasetAccessOptions() != null && this.options.versionedDatasetAccessOptions().getVersionContext() != null ? + TableVersionContext.of(this.options.versionedDatasetAccessOptions().getVersionContext()) : null; + if (versionContext == null && options.getTimeTravelRequest() != null) { + // Only applies to versioned tables outside of Nessie + versionContext = TableVersionContext.of(options.getTimeTravelRequest()); + } + return new MaterializedDatasetTable(MetadataObjectsUtils.toNamespaceKey(handle.getDatasetPath()), pluginId, schemaConfig.getUserName(), datasetConfig, partitionChunks, - optionManager.getOption(PlannerSettings.FULL_NESTED_SCHEMA_SUPPORT), timeTravel); + optionManager.getOption(PlannerSettings.FULL_NESTED_SCHEMA_SUPPORT), versionContext); } private PartitionChunkListing getPartitionChunkListing() { @@ -129,14 +136,26 @@ private DatasetConfig createDatasetConfig( final DatasetConfig toReturn = currentConfig != null ? currentConfig : MetadataObjectsUtils.newShallowConfig(handle); if (handle instanceof VersionedDatasetHandle) { + //AT BRANCH/TAG/COMMIT case VersionedDatasetHandle versionedDatasetHandle = handle.unwrap(VersionedDatasetHandle.class); - VersionedDatasetId versionedDatasetId = new VersionedDatasetId( - handle.getDatasetPath().getComponents(), - versionedDatasetHandle.getContentId(), - TableVersionContext.of(this.options.versionedDatasetAccessOptions().getVersionContext())); + VersionedDatasetId.Builder builder = new VersionedDatasetId.Builder() + .setTableKey(handle.getDatasetPath().getComponents()) + .setContentId(versionedDatasetHandle.getContentId()) + .setTableVersionContext(this.options.getTimeTravelRequest() != null ? + TableVersionContext.of(this.options.getTimeTravelRequest()) : + TableVersionContext.of(this.options.versionedDatasetAccessOptions().getVersionContext())); + VersionedDatasetId versionedDatasetId = builder.build(); + toReturn.setId(new EntityId(versionedDatasetId.asString())); + } else if (this.options.getTimeTravelRequest() != null) { + //AT TIMESTAMP/SNAPSHOT case + VersionedDatasetId.Builder builder = new VersionedDatasetId.Builder() + .setTableKey(handle.getDatasetPath().getComponents()) + .setContentId(null) + .setTableVersionContext(TableVersionContext.of(this.options.getTimeTravelRequest())); + VersionedDatasetId versionedDatasetId = builder.build(); toReturn.setId(new EntityId(versionedDatasetId.asString())); - } + //Otherwise use the generated datasetId in other cases final DatasetMetadata datasetMetadata; try { datasetMetadata = plugin.getDatasetMetadata(handle, listingSupplier.get(), diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/MetadataObjectsUtils.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/MetadataObjectsUtils.java index 572255d245..e98926c9bb 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/MetadataObjectsUtils.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/MetadataObjectsUtils.java @@ -163,6 +163,7 @@ public static void overrideExtended( final SupportsIcebergMetadata newMetadata = (SupportsIcebergMetadata) newExtended; final PhysicalDataset pds = getPhysicalDataset(datasetConfig); final DatasetStats deleteStats = newMetadata.getDeleteStats(); + final DatasetStats equalityDeleteStats = newMetadata.getEqualityDeleteStats(); final DatasetStats deleteManifestStats = newMetadata.getDeleteManifestStats(); final IcebergMetadata icebergMetadata = new IcebergMetadata(); @@ -179,6 +180,13 @@ public static void overrideExtended( ScanStatsType.NO_EXACT_ROW_COUNT) .setRecordCount(deleteStats.getRecordCount())); } + if (equalityDeleteStats != null) { + icebergMetadata.setEqualityDeleteStats(new ScanStats() + .setScanFactor(equalityDeleteStats.getScanFactor()) + .setType(equalityDeleteStats.isExactRecordCount() ? ScanStatsType.EXACT_ROW_COUNT : + ScanStatsType.NO_EXACT_ROW_COUNT) + .setRecordCount(equalityDeleteStats.getRecordCount())); + } if (deleteManifestStats != null) { icebergMetadata.setDeleteManifestStats(new ScanStats() .setScanFactor(deleteManifestStats.getScanFactor()) @@ -187,6 +195,7 @@ public static void overrideExtended( .setRecordCount(deleteManifestStats.getRecordCount())); } pds.setIcebergMetadata(icebergMetadata); + datasetConfig.setLastModified(newMetadata.getMtime()); } else { // TODO(DX-43317): try deprecated way of populating iceberg metadata, until DX-43317 is resolved. final byte[] icebergMetadataBytes = newExtended.getIcebergMetadata(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/MetadataRequestOptions.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/MetadataRequestOptions.java index 9d6f5b7e34..46afa2c354 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/MetadataRequestOptions.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/MetadataRequestOptions.java @@ -20,6 +20,7 @@ import org.immutables.value.Value; import com.dremio.ValidatingGnarlyStyle; +import com.dremio.common.exceptions.UserException; import com.dremio.common.map.CaseInsensitiveMap; import com.dremio.exec.store.SchemaConfig; import com.dremio.service.namespace.NamespaceKey; @@ -53,6 +54,14 @@ public long newerThan() { /** * If metadata validity should be checked. + * If set to false, + * -Completeness of the datasetConfig is still checked (see ManagedStoragePlugin#isComplete) + * -Validity - i.e freshness of metadata -schema, splits , partition info based on metadata policy is *not* checked. + * -newerThan() has no effect if this is set to false + * Change default to false when only if the following cases are desired : + * - Cached version of a table or view from Dremio KV store that are complete + * - Inline refresh will occur if DatasetConfig is incomplete + * - Table from an External catalog - Nessie will be returned since metadata is always up to date *

        * By default, the validity is checked. */ @@ -61,6 +70,44 @@ public boolean checkValidity() { return true; } + /** + * If this flag is set to true, + * - A null entry or shallow entry will not cause a promotion/inline refresh to occur + * Change default to true only if the following behavior is desired : + * - Get only cached version of a table from Dremio KV store + * - Get table from an External catalog - Nessie (These will be returned since they don't go through promotion) + * Note : By default, a null or shallow entry in Namepsace is considered "incomplete" and promotion will be + * attempted. Tables in Nessie are considered always valid and uptodate and promotion/refresh does not apply to them + *

        + */ + @Value.Default + public boolean neverPromote() { + return false; + } + + /** + * If set to true and any versioned dataset is resolved using the default source version, then a validation + * exception is thrown. This flag can be used to validate that the plan associated to a reflection refresh job + * or a view definition contains only table references that resolve using the AT SQL syntax or a source version + * mapping explicitly set by the caller. + * + * @return + */ + @Value.Default + public boolean errorOnUnspecifiedSourceVersion() { + return false; + } + + /** + * If set to true, request Catalog to use caching namespace to reduce duplicated calls to KV store. + * + * @return + */ + @Value.Default + public boolean useCachingNamespace() { + return false; + } + MetadataRequestOptions cloneWith(CatalogIdentity subject, NamespaceKey newDefaultSchema, boolean checkValidity) { final SchemaConfig newSchemaConfig = SchemaConfig.newBuilder(subject) .defaultSchema(newDefaultSchema) @@ -127,7 +174,16 @@ public static MetadataRequestOptions of(SchemaConfig schemaConfig, Map iterator = datasetListing.iterator(); + long entityCount = 0L; do { try { + // DX-60601, the current theory is that iterator exit earlier while we still have datasets not refreshed yet. + // Hence, it causes them to be deleted following this method. Let's log them for now specifically in + // handleExistingDataset when something bad happened to see if we still have datasets to be refreshed. if (!iterator.hasNext()) { break; } + ++entityCount; final DatasetHandle handle = iterator.next(); final NamespaceKey datasetKey = MetadataObjectsUtils.toNamespaceKey(handle.getDatasetPath()); final boolean existing = orphanedDatasets.remove(datasetKey); @@ -203,7 +208,7 @@ private void synchronizeDatasets() throws NamespaceException, ConnectorException } if (existing) { addAncestors(datasetKey, ancestorsToKeep); - handleExistingDataset(datasetKey, handle); + handleExistingDataset(datasetKey, handle, iterator); } else { handleNewDataset(datasetKey, handle); } @@ -212,7 +217,10 @@ private void synchronizeDatasets() throws NamespaceException, ConnectorException logger.error("Dataset {} sync failed ({}) due to Metadata too large. Please check.", e.getMessage(), existing ? "existing" : "new"); } } while (true); + logger.info("Source '{}' iterated through {} entities", sourceKey, entityCount); } + // Intentionally leave without a catch block. + // TODO: Any unhandled exceptions will be handled by the caller if theory is confirmed. } /** @@ -220,8 +228,9 @@ private void synchronizeDatasets() throws NamespaceException, ConnectorException * * @param datasetKey dataset key * @param handle dataset handle + * @param iterator dataset handle iterator */ - private void handleExistingDataset(NamespaceKey datasetKey, DatasetHandle handle) { + private void handleExistingDataset(NamespaceKey datasetKey, DatasetHandle handle, Iterator iterator) { int tryCount = 0; while (true) { if (tryCount++ > NUM_RETRIES) { @@ -236,14 +245,16 @@ private void handleExistingDataset(NamespaceKey datasetKey, DatasetHandle handle } catch (ConcurrentModificationException ignored) { // retry // continue; - } catch (DatasetNotFoundException e) { + } catch (DatasetNotFoundException | NamespaceNotFoundException e) { // race condition: metadata will be removed from catalog in next sync - logger.debug("Dataset '{}' is no longer valid, skipping sync", datasetKey, e); + logger.debug("Dataset '{}' is no longer valid, skipping sync. Has next? {}", datasetKey, iterator.hasNext(), e); + failedDatasets.add(Tuple.of(datasetKey.getSchemaPath(), e.getMessage())); + syncStatus.incrementExtendedUnreadable(); break; } catch (Exception e) { // TODO: this should not be an Exception. Once exception handling is defined, change this. This is unfortunately // the current behavior. - logger.debug("Dataset '{}' sync failed unexpectedly. Will retry next sync", datasetKey, e); + logger.debug("Dataset '{}' sync failed unexpectedly. Will retry next sync. Has next? {}", datasetKey, iterator.hasNext(), e); failedDatasets.add(Tuple.of(datasetKey.getSchemaPath(), e.getMessage())); syncStatus.incrementExtendedUnreadable(); break; @@ -268,14 +279,7 @@ private void tryHandleExistingDataset(NamespaceKey datasetKey, DatasetHandle dat // invariant: only metadata attributes of currentConfig are overwritten, and then the same currentConfig is saved, // so the rest of the attributes are as is; so CME is handled by retrying this entire block - final DatasetConfig currentConfig; - try { - currentConfig = systemNamespace.getDataset(datasetKey); - } catch (NamespaceNotFoundException ignored) { - // race condition - logger.debug("Dataset '{}' no longer in namespace, skipping", datasetKey); - return; - } + final DatasetConfig currentConfig = systemNamespace.getDataset(datasetKey); final boolean isExtended = currentConfig.getReadDefinition() != null; if (updateMode == UpdateMode.PREFETCH_QUERIED && !isExtended) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/MutablePlugin.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/MutablePlugin.java index aea7eb1c6a..5b9f12bd41 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/MutablePlugin.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/MutablePlugin.java @@ -18,7 +18,6 @@ import java.io.IOException; import java.util.List; import java.util.Map; -import java.util.function.Supplier; import org.apache.arrow.vector.types.pojo.Field; import org.apache.hadoop.conf.Configuration; @@ -38,7 +37,6 @@ import com.dremio.sabot.exec.context.OperatorContext; import com.dremio.service.namespace.NamespaceKey; import com.dremio.service.namespace.dataset.proto.DatasetConfig; -import com.dremio.service.users.SystemUser; public interface MutablePlugin extends StoragePlugin { @@ -69,12 +67,6 @@ void rollbackTable(NamespaceKey tableSchemaPath, RollbackOption rollbackOption, TableMutationOptions tableMutationOptions); - void vacuumTable(NamespaceKey tableSchemaPath, - DatasetConfig datasetConfig, - SchemaConfig schemaConfig, - VacuumOption vacuumOption, - TableMutationOptions tableMutationOptions); - boolean createOrUpdateView(NamespaceKey tableSchemaPath, SchemaConfig schemaConfig, View view, ViewOptions viewOptions) throws IOException; @@ -166,23 +158,4 @@ default FileSystem getSystemUserFS() { default Configuration getFsConfCopy() { throw new UnsupportedOperationException("getFsConfCopy is not Implemented"); } - - /** - * This provides the supplier of fs which is created in dremio class loader - * @param path Path for which hadoop file system is being created - * @param conf Configuration for creating hadoop file system - * @return Supplier of hadoopFs - */ - default Supplier getHadoopFsSupplier(String path, Iterable> conf) { - return getHadoopFsSupplier(path, conf, SystemUser.SYSTEM_USERNAME); - } - - /** - * This provides the supplier of fs which is created in dremio class loader - * @param path Path for which hadoop file system is being created - * @param conf Configuration for creating hadoop file system - * @param queryUser query user using which file System will be created - * @return Supplier of hadoopFs - */ - Supplier getHadoopFsSupplier(String path, Iterable> conf, String queryUser); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/PluginsManager.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/PluginsManager.java index 43b1e3f59b..60cb8fc33f 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/PluginsManager.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/PluginsManager.java @@ -65,6 +65,8 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; +import io.opentelemetry.instrumentation.annotations.WithSpan; + /** * Manages the creation, deletion and retrieval of storage plugins. * @@ -359,6 +361,7 @@ public ConcurrentHashMap getPlugins() { return plugins; } + @WithSpan public ManagedStoragePlugin getSynchronized(SourceConfig pluginConfig, java.util.function.Predicate influxSourcePred) throws Exception { while (true) { ManagedStoragePlugin plugin = plugins.get(c(pluginConfig.getName())); @@ -367,14 +370,14 @@ public ManagedStoragePlugin getSynchronized(SourceConfig pluginConfig, java.util plugin.synchronizeSource(pluginConfig); return plugin; } - //Try to create the plugin to synchronize. + // Try to create the plugin to synchronize. plugin = newPlugin(pluginConfig); plugin.replacePluginWithLock(pluginConfig, createWaitMillis(), true); // If this is a coordinator and a plugin is missing, it's probably been deleted from the CHM by a - // concurrent thread or a create operation may be in progress(check if it's in flux) and has not + // concurrent thread or a create operation may be in progress (check if it's in flux) and has not // yet added it to the CHM. - // So lets skip it and allow this to be picked up int he next refresher run . + // So lets skip it and allow this to be picked up in the next refresher run. // For an executor, there should be no clashes with any mutation. if (influxSourcePred.test(pluginConfig.getName()) || (context.isCoordinator() && !systemNamespace.exists(new NamespaceKey(pluginConfig.getName())))) { throw new ConcurrentModificationException(String.format( diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/ResolvedVersionContext.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/ResolvedVersionContext.java index 5fee7bd869..a8e7eb1f3e 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/ResolvedVersionContext.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/ResolvedVersionContext.java @@ -16,6 +16,7 @@ package com.dremio.exec.catalog; import org.immutables.value.Value; +import org.projectnessie.model.Detached; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; @@ -48,7 +49,7 @@ public enum Type { BARE_COMMIT, } - public static final String DETACHED = "DETACHED"; + public static final String DETACHED_REF_NAME = Detached.REF_NAME; public abstract Type getType(); public abstract String getRefName(); @@ -73,7 +74,7 @@ public static ResolvedVersionContext ofTag(String tagName, String commitHash) { public static ResolvedVersionContext ofBareCommit(String commitHash) { return ImmutableResolvedVersionContext.builder() .type(Type.BARE_COMMIT) - .refName(DETACHED) + .refName(DETACHED_REF_NAME) .commitHash(commitHash) .build(); } @@ -86,7 +87,7 @@ protected void check() { Preconditions.checkNotNull(getRefName()); break; case BARE_COMMIT: - Preconditions.checkArgument(getRefName() == DETACHED); + Preconditions.checkArgument(DETACHED_REF_NAME.equals(getRefName())); break; default: throw new IllegalStateException("Unexpected value: " + getType()); @@ -102,6 +103,7 @@ public boolean isBranch() { @JsonIgnore public boolean isBareCommit() { return getType() == Type.BARE_COMMIT; } + @Override public String toString() { String out; switch (getType()) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/SafeNamespaceService.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/SafeNamespaceService.java index d8ff2876b5..24e15dddfe 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/SafeNamespaceService.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/SafeNamespaceService.java @@ -27,6 +27,7 @@ import com.dremio.datastore.api.LegacyKVStore.LegacyFindByRange; import com.dremio.exec.catalog.ManagedStoragePlugin.SafeRunner; import com.dremio.service.namespace.BoundedDatasetCount; +import com.dremio.service.namespace.DatasetConfigAndEntitiesOnPath; import com.dremio.service.namespace.DatasetMetadataSaver; import com.dremio.service.namespace.NamespaceAttribute; import com.dremio.service.namespace.NamespaceException; @@ -46,6 +47,8 @@ import com.dremio.service.namespace.space.proto.HomeConfig; import com.dremio.service.namespace.space.proto.SpaceConfig; +import io.opentelemetry.instrumentation.annotations.WithSpan; + /** * A decorator for namespace service that only does operations underneath a safe runner to avoid making changes when * we shouldn't. @@ -222,10 +225,16 @@ public List getCounts(SearchQuery... arg0) throws NamespaceException { } @Override + @WithSpan public DatasetConfig getDataset(NamespaceKey arg0) throws NamespaceException { return runner.doSafe(() -> delegate.getDataset(arg0)); } + @Override + public DatasetConfigAndEntitiesOnPath getDatasetAndEntitiesOnPath(NamespaceKey arg0) throws NamespaceException { + return runner.doSafe(() -> delegate.getDatasetAndEntitiesOnPath(arg0)); + } + @Override public BoundedDatasetCount getDatasetCount(NamespaceKey arg0, long arg1, int arg2) throws NamespaceException { return runner.doSafe(() -> delegate.getDatasetCount(arg0, arg1, arg2)); @@ -258,6 +267,7 @@ public NameSpaceContainer getEntityByPath(NamespaceKey arg0) throws NamespaceExc } @Override + @WithSpan public FolderConfig getFolder(NamespaceKey arg0) throws NamespaceException { return runner.doSafe(() -> delegate.getFolder(arg0)); } @@ -278,6 +288,7 @@ public int getPartitionChunkCount(LegacyFindByCondition arg0) { } @Override + @WithSpan public SourceConfig getSource(NamespaceKey arg0) throws NamespaceException { return runner.doSafe(() -> delegate.getSource(arg0)); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/SourceAccessChecker.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/SourceAccessChecker.java index 274c34c6f7..d941a1d531 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/SourceAccessChecker.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/SourceAccessChecker.java @@ -133,6 +133,20 @@ public DremioTable getTableForQuery(NamespaceKey key) { return getIfVisible(key, () -> delegate.getTableForQuery(key)); } + @Override + public String getDatasetId(NamespaceKey key) { + if (isInvisible(key)) { + return null; + } + + return delegate.getDatasetId(key); + } + + @Override + public DremioTable getTableSnapshotForQuery(NamespaceKey key, TableVersionContext context) { + return delegate.getTableSnapshotForQuery(key, context); + } + @Override public DremioTable getTableSnapshot(NamespaceKey key, TableVersionContext context) { return delegate.getTableSnapshot(key, context); @@ -162,11 +176,6 @@ public NamespaceKey resolveToDefault(NamespaceKey key) { return delegate.resolveToDefault(key); } - @Override - public MetadataStatsCollector getMetadataStatsCollector() { - return delegate.getMetadataStatsCollector(); - } - @Override public void createEmptyTable(NamespaceKey key, BatchSchema batchSchema, WriterOptions writerOptions) { delegate.createEmptyTable(key, batchSchema, writerOptions); @@ -279,15 +288,6 @@ public void rollbackTable(NamespaceKey key, delegate.rollbackTable(key, datasetConfig, rollbackOption, tableMutationOptions); } - @Override - public void vacuumTable(NamespaceKey key, - DatasetConfig datasetConfig, - VacuumOption vacuumOption, - TableMutationOptions tableMutationOptions) { - throwIfInvisible(key); - delegate.vacuumTable(key, datasetConfig, vacuumOption, tableMutationOptions); - } - @Override public void addColumns(NamespaceKey table, DatasetConfig datasetConfig, List colsToAdd, TableMutationOptions tableMutationOptions) { throwIfInvisible(table); @@ -567,4 +567,14 @@ public boolean hasPrivilege(NamespaceKey key, SqlGrant.Privilege privilege) { public void validateOwnership(NamespaceKey key) { delegate.validateOwnership(key); } + + @Override + public void invalidateNamespaceCache(final NamespaceKey key) { + delegate.invalidateNamespaceCache(key); + } + + @Override + public MetadataRequestOptions getMetadataRequestOptions() { + return options; + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/SourceMetadataManager.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/SourceMetadataManager.java index 0d9ab61646..c97e3ef7ee 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/SourceMetadataManager.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/SourceMetadataManager.java @@ -75,6 +75,7 @@ import com.google.common.cache.CacheBuilder; import com.google.common.collect.Sets; +import io.opentelemetry.api.trace.Span; import io.protostuff.ByteString; @@ -321,6 +322,7 @@ boolean isStillValid(MetadataRequestOptions options, DatasetConfig config, Sourc final Long updateTime = localUpdateTime.getIfPresent(key); final long currentTime = System.currentTimeMillis(); final long expiryTime = bridge.getMetadataPolicy().getDatasetDefinitionExpireAfterMs(); + Span.current().setAttribute("dremio.namespace.key.schemapath", key.getSchemaPath()); final boolean isDatasetExpired = options.newerThan() < currentTime || // request marks this expired ((updateTime == null || updateTime + expiryTime < currentTime) && // dataset was locally updated too long ago (or never) @@ -369,7 +371,7 @@ private abstract class RefreshRunner { boolean refreshDatasetNames() throws NamespaceException { - logger.debug("Name-only update for source '{}'", sourceKey); + logger.info("Name-only update for source '{}'", sourceKey); final Set existingDatasets = Sets.newHashSet(systemNamespace.getAllDatasets(sourceKey)); final SyncStatus syncStatus = new SyncStatus(false); @@ -417,7 +419,7 @@ boolean refreshDatasetNames() throws NamespaceException { } boolean refreshFull(MetadataPolicy metadataPolicy) throws NamespaceException { - logger.debug("Full update for source '{}'", sourceKey); + logger.info("Full update for source '{}'", sourceKey); final DatasetRetrievalOptions retrievalOptions; if (metadataPolicy == null) { metadataPolicy = bridge.getMetadataPolicy(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/TableMetadataImpl.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/TableMetadataImpl.java index d2b777a776..62be4576b1 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/TableMetadataImpl.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/TableMetadataImpl.java @@ -106,7 +106,7 @@ public TableMetadata prune(List newPartitionChunks) thro @Override public String computeDigest(){ - return String.format("%s|%s|%s", splits.computeDigest(), pluginId.getName(), config.getId().getId()); + return String.format("%s|%s|%s|%s", splits.computeDigest(), pluginId.getName(), config.getId().getId(), getVersionContext()); } @Override diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/TableVersionContext.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/TableVersionContext.java index 96e4cd3543..62317fc276 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/TableVersionContext.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/TableVersionContext.java @@ -15,11 +15,20 @@ */ package com.dremio.exec.catalog; +import java.sql.Timestamp; import java.util.Objects; +import java.util.Optional; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.dremio.connector.metadata.options.TimeTravelOption; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Preconditions; +import com.google.common.base.Strings; /** * Version context for a table. Table version contexts support branch/tag/commit hash relative versioning similar @@ -31,6 +40,7 @@ * TIMESTAMP: long - for timestamp this is in milliseconds since epoch */ public class TableVersionContext { + private static final Logger logger = LoggerFactory.getLogger(TableVersionContext.class); private final TableVersionType type; private final Object value; @@ -67,6 +77,27 @@ public String toString() { return builder.toString(); } + /** + * Converts the TableVersionContext into a valid SQL expression + * @return + */ + public String toSql() { + switch (type) { + case BRANCH: + case TAG: + case COMMIT_HASH_ONLY: + case REFERENCE: + return String.format("%s \"%s\"", type.toSqlRepresentation(), value); + case TIMESTAMP: + Timestamp ts = new Timestamp(Long.valueOf(value.toString())); + return String.format("%s '%s'", type.toSqlRepresentation(), ts); + case SNAPSHOT_ID: + return String.format("%s '%s'", type.toSqlRepresentation(), value); + default: + throw new IllegalStateException(String.format("Unable to convert %s to sql", type)); + } + } + @Override public boolean equals(Object obj) { if (obj == null || this.getClass() != obj.getClass()) { @@ -89,21 +120,21 @@ public int hashCode() { private static Object validateTypeAndSpecifier(TableVersionType type, Object value) { switch (type) { - case LATEST_VERSION: - Preconditions.checkArgument(value == null); - break; - case BRANCH: - case TAG: - case COMMIT_HASH_ONLY: - case REFERENCE: - case SNAPSHOT_ID: - Preconditions.checkArgument(value instanceof String); - break; - case TIMESTAMP: - Preconditions.checkArgument(value instanceof Long); - break; - default: - throw new AssertionError("Unsupported type " + type); + case LATEST_VERSION: + Preconditions.checkArgument(value == null); + break; + case BRANCH: + case TAG: + case COMMIT_HASH_ONLY: + case REFERENCE: + case SNAPSHOT_ID: + Preconditions.checkArgument(value instanceof String); + break; + case TIMESTAMP: + Preconditions.checkArgument(value instanceof Long); + break; + default: + throw new AssertionError("Unsupported type " + type); } return value; @@ -111,20 +142,20 @@ private static Object validateTypeAndSpecifier(TableVersionType type, Object val public VersionContext asVersionContext() { switch (type) { - case BRANCH: - return VersionContext.ofBranch(getValueAs(String.class)); - case TAG: - return VersionContext.ofTag(getValueAs(String.class)); - case COMMIT_HASH_ONLY: - return VersionContext.ofBareCommit(getValueAs(String.class)); - case REFERENCE: - return VersionContext.ofRef(getValueAs(String.class)); - case LATEST_VERSION: - case SNAPSHOT_ID: - case TIMESTAMP: - return VersionContext.NOT_SPECIFIED; - default: - throw new AssertionError("Unsupported type " + type); + case BRANCH: + return VersionContext.ofBranch(getValueAs(String.class)); + case TAG: + return VersionContext.ofTag(getValueAs(String.class)); + case COMMIT_HASH_ONLY: + return VersionContext.ofBareCommit(getValueAs(String.class)); + case REFERENCE: + return VersionContext.ofRef(getValueAs(String.class)); + case LATEST_VERSION: + case SNAPSHOT_ID: + case TIMESTAMP: + return VersionContext.NOT_SPECIFIED; + default: + throw new AssertionError("Unsupported type " + type); } } @@ -134,13 +165,92 @@ public static TableVersionContext of(ResolvedVersionContext resolvedVersionConte case TAG: return new TableVersionContext(TableVersionType.TAG, resolvedVersionContext.getRefName()); case BRANCH: - return new TableVersionContext(TableVersionType.BRANCH,resolvedVersionContext.getRefName()); + return new TableVersionContext(TableVersionType.BRANCH, resolvedVersionContext.getRefName()); case BARE_COMMIT: - return new TableVersionContext(TableVersionType.COMMIT_HASH_ONLY,resolvedVersionContext.getCommitHash()); + return new TableVersionContext(TableVersionType.COMMIT_HASH_ONLY, resolvedVersionContext.getCommitHash()); default: throw new IllegalStateException("Unexpected value: " + resolvedVersionContext.getType()); } } + public static TableVersionContext of(VersionContext versionContext) { + Preconditions.checkNotNull(versionContext); + switch (versionContext.getType()) { + case TAG: + return new TableVersionContext(TableVersionType.TAG, versionContext.getValue()); + case BRANCH: + return new TableVersionContext(TableVersionType.BRANCH, versionContext.getValue()); + case BARE_COMMIT: + return new TableVersionContext(TableVersionType.COMMIT_HASH_ONLY, versionContext.getValue()); + case REF: + return new TableVersionContext(TableVersionType.REFERENCE, versionContext.getValue()); + case UNSPECIFIED: + return LATEST_VERSION; + default: + throw new IllegalStateException("Unexpected value: " + versionContext.getType()); + } + } + public static TableVersionContext of(TimeTravelOption.TimeTravelRequest timeTravelRequest) { + Preconditions.checkNotNull(timeTravelRequest); + if (timeTravelRequest instanceof TimeTravelOption.TimestampRequest) { + return new TableVersionContext(TableVersionType.TIMESTAMP, + (((TimeTravelOption.TimestampRequest) timeTravelRequest).getTimestampMillis())); + } else if (timeTravelRequest instanceof TimeTravelOption.SnapshotIdRequest) { + return new TableVersionContext((TableVersionType.SNAPSHOT_ID), + ((TimeTravelOption.SnapshotIdRequest) timeTravelRequest).getSnapshotId()); + } else { + throw new IllegalStateException("Unexpected value for TimeTravelRequest "); + } + } + + public String serialize() { + ObjectMapper om = new ObjectMapper(); + String versionString = null; + try { + versionString = om.writeValueAsString(this); + } catch (JsonProcessingException e) { + logger.debug("Could not process table version context for {} ", this); + } + return versionString; + } + + public static TableVersionContext deserialize(String versionString) { + TableVersionContext tableVersionContext = null; + ObjectMapper om = new ObjectMapper(); + try { + tableVersionContext = om.readValue(versionString, TableVersionContext.class); + } catch (JsonProcessingException e) { + logger.debug("Invalid TableVersionContext string {}", versionString); + } + return tableVersionContext; + } + + boolean isTimeTravelType() { + return type == TableVersionType.TIMESTAMP || type == TableVersionType.SNAPSHOT_ID; + } + + public static Optional tryParse(String type, String value) { + if (Strings.isNullOrEmpty(type) || Strings.isNullOrEmpty(value)) { + return Optional.empty(); + } + + final TableVersionType versionType = TableVersionType.getType(type.toUpperCase()); + + if (versionType == null) { + return Optional.empty(); + } + + switch (versionType) { + case BRANCH: + case TAG: + case COMMIT_HASH_ONLY: + case SNAPSHOT_ID: + return Optional.of(new TableVersionContext(versionType, value)); + case TIMESTAMP: + return Optional.of(new TableVersionContext(versionType, Long.valueOf(value))); + default: + return Optional.empty(); + } + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/TableVersionType.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/TableVersionType.java index 9b619185a5..9ad8fb307b 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/TableVersionType.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/TableVersionType.java @@ -15,13 +15,16 @@ */ package com.dremio.exec.catalog; +import java.util.stream.Stream; + public enum TableVersionType { LATEST_VERSION("LATEST_VERSION"), + NOT_SPECIFIED("NOT_SPECIFIED"), BRANCH("BRANCH"), TAG("TAG"), COMMIT_HASH_ONLY("COMMIT"), REFERENCE("REFERENCE"), - SNAPSHOT_ID("SNAPSHOT_ID"), + SNAPSHOT_ID("SNAPSHOT"), TIMESTAMP("TIMESTAMP"), ; @@ -34,4 +37,11 @@ public enum TableVersionType { public String toSqlRepresentation() { return sqlRepresentation; } + + public static TableVersionType getType(String type) { + return Stream.of(values()) + .filter(tableVersionType -> tableVersionType.sqlRepresentation.equals(type)) + .findFirst() + .orElse(null); + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/VacuumOption.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/VacuumOptions.java similarity index 80% rename from sabot/kernel/src/main/java/com/dremio/exec/catalog/VacuumOption.java rename to sabot/kernel/src/main/java/com/dremio/exec/catalog/VacuumOptions.java index 95fb69bcea..fcea1d7995 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/VacuumOption.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/VacuumOptions.java @@ -15,26 +15,26 @@ */ package com.dremio.exec.catalog; -public class VacuumOption { +public class VacuumOptions { public enum Type { TABLE // Vacuum table } - private final VacuumOption.Type type; + private final VacuumOptions.Type type; private final Long olderThanInMillis; private final int retainLast; - public VacuumOption(VacuumOption.Type type, Long olderThanInMillis, int retainLast) { + public VacuumOptions(VacuumOptions.Type type, long olderThanInMillis, int retainLast) { this.type = type; this.olderThanInMillis = olderThanInMillis; this.retainLast = retainLast; } - public VacuumOption.Type getType() { + public VacuumOptions.Type getType() { return type; } - public Long getOlderThanInMillis() { + public long getOlderThanInMillis() { return olderThanInMillis; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/VersionContext.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/VersionContext.java index 8e19d4bb22..4ccef49f7e 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/VersionContext.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/VersionContext.java @@ -17,6 +17,7 @@ import javax.annotation.Nullable; +import org.apache.commons.lang3.StringUtils; import org.immutables.value.Value; import org.immutables.value.Value.Style.ImplementationVisibility; @@ -127,10 +128,15 @@ public VersionContext orElse(VersionContext other) { return (getType() == Type.UNSPECIFIED) ? other : this; } + @SuppressWarnings("immutables") public static VersionContext NOT_SPECIFIED = ImmutableVersionContext.builder() .type(Type.UNSPECIFIED) .build(); + public String toStringFirstLetterCapitalized() { + return StringUtils.capitalize(toString()); + } + @Override public String toString() { String out; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/VersionedDatasetAdapter.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/VersionedDatasetAdapter.java index b3ae7dda23..2c658a185a 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/VersionedDatasetAdapter.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/VersionedDatasetAdapter.java @@ -73,6 +73,7 @@ import com.google.common.collect.FluentIterable; import com.google.common.collect.Iterables; +import io.opentelemetry.instrumentation.annotations.WithSpan; import io.protostuff.ByteString; /** @@ -108,6 +109,7 @@ public static VersionedDatasetAdapter.Builder newBuilder() { return new VersionedDatasetAdapter.Builder(); } + @WithSpan public DremioTable getTable(final String accessUserName) { return datasetHandle.unwrap(VersionedDatasetHandle.class).translateToDremioTable(this, accessUserName); } @@ -137,6 +139,7 @@ public DremioTable translateIcebergView(String accessUserName) { TableVersionContext.of(versionContext)); viewConfig.setId(new EntityId(versionedDatasetId.asString())); viewConfig.setRecordSchema(batchSchema.toByteString()); + viewConfig.setLastModified(viewVersionMetadata.currentVersion().timestampMillis()); final View view = Views.fieldTypesToView(Iterables.getLast(viewKeyPath), viewVersionMetadata.definition().sql(), @@ -149,7 +152,7 @@ public DremioTable translateIcebergView(String accessUserName) { return new ViewTable(new NamespaceKey(viewKeyPath), view, null, - viewConfig, batchSchema); + viewConfig, batchSchema, TableVersionContext.of(versionContext).asVersionContext()); } private DatasetConfig createShallowVirtualDatasetConfig(List viewKeyPath, @@ -163,6 +166,12 @@ private DatasetConfig createShallowVirtualDatasetConfig(List viewKeyPath virtualDataset.setCalciteFieldsList(viewFieldTypesList); virtualDataset.setSqlFieldsList(viewFieldTypesList); + if (viewVersionMetadata.properties().containsKey("enable_default_reflection")) { + final boolean enableDefaultReflection = + Boolean.parseBoolean(viewVersionMetadata.properties().get("enable_default_reflection")); + virtualDataset.setDefaultReflectionEnabled(enableDefaultReflection); + } + versionedDatasetConfig.setName(Iterables.getLast(viewKeyPath)); //TODO: DX-48432 View ownership should set the view owner to the view/dataset creator versionedDatasetConfig.setOwner("dremio"); @@ -171,6 +180,7 @@ private DatasetConfig createShallowVirtualDatasetConfig(List viewKeyPath return versionedDatasetConfig; } + @WithSpan public DremioTable translateIcebergTable(final String accessUserName) { // Figure out the user we want to access the dataplane with. // *TBD* Use the Filesystem(Iceberg) plugin to tell us the configuration/username @@ -195,6 +205,7 @@ public DremioTable translateIcebergTable(final String accessUserName) { versionedDatasetConfig.setId(new EntityId(versionedDatasetId.asString())); setIcebergTableUUID(versionedDatasetConfig, versionedDatasetHandle.getUniqueInstanceId()); + // TODO: DX-62735 Table ownership should be set, i.e. versionedDatasetConfig.setOwner() // Construct the TableMetadata @@ -202,7 +213,13 @@ public DremioTable translateIcebergTable(final String accessUserName) { versionedDatasetConfig, accessUserName, splitsPointer, - getPrimaryKey(storagePlugin, versionedDatasetConfig, new NamespaceKey(versionedDatasetConfig.getFullPathList()), accessUserName, versionContext)); + getPrimaryKey(storagePlugin, versionedDatasetConfig, new NamespaceKey(versionedDatasetConfig.getFullPathList()), + accessUserName, versionContext)) { + @Override + public TableVersionContext getVersionContext() { + return TableVersionContext.of(versionContext); + } + }; return new NamespaceTable(tableMetadata, optionManager.getOption(FULL_NESTED_SCHEMA_SUPPORT)); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/VersionedDatasetId.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/VersionedDatasetId.java index d402c765d1..9525436b66 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/VersionedDatasetId.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/VersionedDatasetId.java @@ -37,9 +37,9 @@ public class VersionedDatasetId { private TableVersionContext versionContext; @JsonCreator - VersionedDatasetId (@JsonProperty("tableKey") List tableKey, - @JsonProperty("contentId") String contentId, - @JsonProperty("versionContext") TableVersionContext versionContext) { + VersionedDatasetId(@JsonProperty("tableKey") List tableKey, + @JsonProperty("contentId") String contentId, + @JsonProperty("versionContext") TableVersionContext versionContext) { this.tableKey = tableKey; this.contentId = contentId; this.versionContext = versionContext; @@ -55,18 +55,50 @@ public String asString() { try { return om.writeValueAsString(this); } catch (JsonProcessingException e) { - logger.debug("Could not map VersinedDatasetId to String", e); + logger.debug("Could not map VersionedDatasetId to String", e); return null; } } public static VersionedDatasetId fromString(String idAsString) throws JsonProcessingException { - //try lookup in external catalog - //parser the dataset id + //parse the dataset id ObjectMapper objectMapper = new ObjectMapper(); return objectMapper.readValue(idAsString, VersionedDatasetId.class); } + + public static VersionedDatasetId tryParse(String idAsString) { + try { + return idAsString == null ? null : fromString(idAsString); + } catch (JsonProcessingException e) { + return null; + } + } + + + public static boolean isVersionedDatasetId(String idAsString) { + try { + VersionedDatasetId versionedDatasetId = fromString(idAsString); + return true; + } catch (JsonProcessingException j) { + return false; + } + } + + public static boolean isTimeTravelDatasetId(String idAsString) { + try { + VersionedDatasetId versionedDatasetId = fromString(idAsString); + return isTimeTravelDatasetId(versionedDatasetId); + } catch (JsonProcessingException j) { + return false; + } + } + + public static boolean isTimeTravelDatasetId(VersionedDatasetId versionedDatasetId) { + return versionedDatasetId.getVersionContext().getType() == TableVersionType.TIMESTAMP || + versionedDatasetId.getVersionContext().getType() == TableVersionType.SNAPSHOT_ID; + } + @Override public boolean equals(Object obj) { if (obj == null || this.getClass() != obj.getClass()) { @@ -86,6 +118,10 @@ public int hashCode() { return Objects.hash(tableKey, contentId, versionContext); } + public static boolean isVersioned(String datasetId) { + return datasetId.indexOf("versionContext") >= 0; + } + public static VersionedDatasetId.Builder newBuilder() { return new VersionedDatasetId.Builder(); } @@ -114,14 +150,12 @@ public Builder setTableVersionContext(TableVersionContext tableVersionContext) { public VersionedDatasetId build() { Preconditions.checkNotNull(tableKey); Preconditions.checkState(tableKey.size() > 0); - Preconditions.checkNotNull(contentId); Preconditions.checkNotNull(versionContext); - if (!(versionContext instanceof TableVersionContext)) { - throw new IllegalArgumentException("Illegal TableVersionContext"); + if (versionContext.getType() != TableVersionType.TIMESTAMP && versionContext.getType() != TableVersionType.SNAPSHOT_ID) { + Preconditions.checkNotNull(contentId); } - if ((versionContext.getType() == TableVersionType.SNAPSHOT_ID) || (versionContext.getType() == TableVersionType.TIMESTAMP)) { - //TODO (DX-58588) Needs to be revisited to support snapshot id and timestamp - throw new UnsupportedOperationException("No support yet for Snapshot and Timestamp"); + if (!(versionContext instanceof TableVersionContext)) { + throw new IllegalArgumentException("versionContext must be of type TableVersionContext"); } return new VersionedDatasetId(tableKey, contentId, versionContext); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/VersionedPlugin.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/VersionedPlugin.java index c5691847c2..13c40d8800 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/VersionedPlugin.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/VersionedPlugin.java @@ -18,7 +18,13 @@ import java.util.List; import java.util.stream.Stream; +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.store.ChangeInfo; +import com.dremio.exec.store.NessieNamespaceAlreadyExistsException; import com.dremio.exec.store.NoDefaultBranchException; +import com.dremio.exec.store.ReferenceAlreadyExistsException; +import com.dremio.exec.store.ReferenceConflictException; +import com.dremio.exec.store.ReferenceInfo; import com.dremio.exec.store.ReferenceNotFoundException; import com.dremio.exec.store.ReferenceTypeConflictException; import com.dremio.plugins.ExternalNamespaceEntry; @@ -27,6 +33,7 @@ import com.dremio.service.catalog.Table; import com.dremio.service.catalog.TableSchema; import com.dremio.service.catalog.View; +import com.dremio.service.namespace.NamespaceKey; /** * Versioning-specific methods for the Catalog interface. @@ -110,4 +117,167 @@ public enum EntityType { */ Stream getAllInformationSchemaColumnInfo(SearchQuery searchQuery); + /** + * Gets contentId for the given key and version + */ + String getContentId(List key, ResolvedVersionContext version); + + /** + * Checks that a commit hash exists in the server. + */ + boolean commitExists(String commitHash); + + /** + * List all branches. + */ + Stream listBranches(); + + /** + * List all tags. + */ + Stream listTags(); + + /** + * List all references (both branches and tags). + */ + Stream listReferences(); + + /** + * List all changes for the given version. + * + * @param version If the version is NOT_SPECIFIED, the default branch is used (if it exists) + * + * @throws ReferenceNotFoundException If the given reference cannot be found + * @throws NoDefaultBranchException If the Nessie server does not have a default branch set + * @throws ReferenceTypeConflictException If the requested version type does not match the server + */ + Stream listChanges(VersionContext version); + + /** + * List only entries under the given path for the given version. + * + * @param catalogPath Acts as the namespace filter. It will scope entries to this namespace. + * @param version If the version is NOT_SPECIFIED, the default branch is used (if it exists). + * + * @throws ReferenceNotFoundException If the given reference cannot be found + * @throws NoDefaultBranchException If the Nessie server does not have a default branch set + * @throws ReferenceTypeConflictException If the requested version type does not match the server + */ + Stream listEntries(List catalogPath, VersionContext version) + throws ReferenceNotFoundException, NoDefaultBranchException, ReferenceConflictException; + + /** + * List all entries under the given path and subpaths for the given version. + * + * @param catalogPath Acts as the namespace filter. It will act as the root namespace. + * @param version If the version is NOT_SPECIFIED, the default branch is used (if it exists). + * + * @throws ReferenceNotFoundException If the given reference cannot be found. + * @throws NoDefaultBranchException If the Nessie server does not have a default branch set. + * @throws ReferenceConflictException If the requested version does not match the server. + */ + Stream listEntriesIncludeNested(List catalogPath, VersionContext version) + throws ReferenceNotFoundException, NoDefaultBranchException, ReferenceConflictException; + + /** + * Create a namespace by the given path for the given version + * @param namespaceKey The namespacekey that is used to create a folder in Nessie. + * @param version If the version is NOT_SPECIFIED, the default branch is used (if it exists). + * + * @throws NessieNamespaceAlreadyExistsException If the namespace already exists. + * @throws ReferenceNotFoundException If the given reference cannot be found. + * @throws NoDefaultBranchException If the Nessie server does not have a default branch set. + * @throws ReferenceConflictException If the requested version does not match the server. + */ + void createNamespace(NamespaceKey namespaceKey, VersionContext version); + + /** + * Create a branch from the given source reference. + * + * @param sourceVersion If the version is NOT_SPECIFIED, the default branch is used (if it exists) + * + * @throws ReferenceAlreadyExistsException If the reference already exists. + * @throws ReferenceNotFoundException If the given source reference cannot be found + * @throws NoDefaultBranchException If the Nessie server does not have a default branch set + * @throws ReferenceTypeConflictException If the requested version type does not match the server + */ + void createBranch(String branchName, VersionContext sourceVersion); + + /** + * Create a tag from the given source reference. + * + * @param sourceVersion If the version is NOT_SPECIFIED, the default branch is used (if it exists) + * + * @throws ReferenceAlreadyExistsException If the reference already exists + * @throws ReferenceNotFoundException If the given source reference cannot be found + * @throws NoDefaultBranchException If the Nessie server does not have a default branch set + * @throws ReferenceTypeConflictException If the requested version type does not match the server + */ + void createTag(String tagName, VersionContext sourceVersion); + + /** + * Drop the given branch. + * + * @throws ReferenceConflictException If the drop has conflict on the given branch + * @throws ReferenceNotFoundException If the given branch cannot be found + */ + void dropBranch(String branchName, String branchHash); + + /** + * Drop the given tag. + * + * @throws ReferenceConflictException If the drop has conflict on the given tag + * @throws ReferenceNotFoundException If the given tag cannot be found + */ + void dropTag(String tagName, String tagHash); + + /** + * Merge the source branch into target branch. + * + * @param sourceBranchName The source branch we are merging from + * @param targetBranchName The target branch we are merging into + * + * @throws ReferenceConflictException If the target branch hash changes during merging + * @throws ReferenceNotFoundException If the source/target branch cannot be found + */ + void mergeBranch(String sourceBranchName, String targetBranchName); + + /** + * Update the reference for the given branch. + * + * @param branchName The branch we want to update the reference + * @param sourceVersion The source reference name + * + * @throws ReferenceConflictException If the branch hash or source reference hash changes during update + * @throws ReferenceNotFoundException If the given branch or source reference cannot be found + */ + + void assignBranch(String branchName, VersionContext sourceVersion) + throws ReferenceConflictException, ReferenceNotFoundException; + + /** + * Update the reference for the given tag. + * + * @param tagName The tag we want to update the reference + * @param sourceVersion The reference we want to update to + * + * + * @throws ReferenceConflictException If the tag hash or source reference hash changes during update + * @throws ReferenceNotFoundException If the given tag or source reference cannot be found + */ + + void assignTag(String tagName, VersionContext sourceVersion) + throws ReferenceConflictException, ReferenceNotFoundException; + + /** + * Deletes an Empty Folder by the given path for the given version + * @param namespaceKey The namespacekey that is used to create a folder in Nessie. + * @param sourceVersion If the version is NOT_SPECIFIED, the default branch is used (if it exists). + * + * @throws ReferenceNotFoundException If the given reference cannot be found. + * @throws UserException If the requested folder to be deleted is not empty. + */ + + void deleteFolder(NamespaceKey namespaceKey, VersionContext sourceVersion) + throws ReferenceNotFoundException, UserException; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/conf/AWSRegionSelection.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/conf/AWSRegionSelection.java index 0defe2ba18..6dd1647966 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/conf/AWSRegionSelection.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/conf/AWSRegionSelection.java @@ -34,8 +34,8 @@ public enum AWSRegionSelection { @Tag(11) @DisplayMetadata(label = "Asia Pacific (Sydney) ap-southeast-2") AP_SOUTHEAST_2("ap-southeast-2"), @Tag(12) @DisplayMetadata(label = "Asia Pacific (Tokyo) ap-northeast-1") AP_NORTHEAST_1("ap-northeast-1"), @Tag(13) @DisplayMetadata(label = "Canada (Central) ca-central-1") CA_CENTRAL_1("ca-central-1"), - @Tag(14) @DisplayMetadata(label = "China (Beijing) ca-north-1") CA_NORTH_1("ca-north-1"), - @Tag(15) @DisplayMetadata(label = "China (Ningxia) ca-northwest-1") CA_NORTHWEST_1("ca-northwest-1"), + @Tag(14) @DisplayMetadata(label = "China (Beijing) cn-north-1") CN_NORTH_1("cn-north-1"), + @Tag(15) @DisplayMetadata(label = "China (Ningxia) cn-northwest-1") CN_NORTHWEST_1("cn-northwest-1"), @Tag(16) @DisplayMetadata(label = "Europe (Frankfurt) eu-central-1") EU_CENTRAL_1("eu-central-1"), @Tag(17) @DisplayMetadata(label = "Europe (Ireland) eu-west-1") EU_WEST_1("eu-west-1"), @Tag(18) @DisplayMetadata(label = "Europe (London) eu-west-2") EU_WEST_2("eu-west-2"), diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/conf/NessieAuthType.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/conf/NessieAuthType.java new file mode 100644 index 0000000000..46ea814bce --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/conf/NessieAuthType.java @@ -0,0 +1,33 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.conf; + +import io.protostuff.Tag; + +/** + * Types of authentication to use with Nessie server. + */ +public enum NessieAuthType { + /** + * Access Nessie with no auth. + */ + @Tag(1) @DisplayMetadata(label = "None") NONE, + + /** + * Access Nessie with auth enabled mode. + */ + @Tag(2) @DisplayMetadata(label = "Bearer") BEARER +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/udf/CorrelatedUdfDetector.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/udf/CorrelatedUdfDetector.java new file mode 100644 index 0000000000..365ccb59b2 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/udf/CorrelatedUdfDetector.java @@ -0,0 +1,68 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.udf; + +import org.apache.calcite.rel.RelHomogeneousShuttle; +import org.apache.calcite.rel.RelNode; +import org.apache.calcite.rex.RexNode; +import org.apache.calcite.rex.RexShuttle; +import org.apache.calcite.rex.RexSubQuery; + +public final class CorrelatedUdfDetector extends RexShuttle { + private static final class RexDetector extends RexShuttle { + private boolean hasRexSubQuery; + + private RexDetector() { + } + + @Override + public RexNode visitSubQuery(RexSubQuery subQuery) { + this.hasRexSubQuery = true; + return subQuery; + } + } + + private static final class RelDetector extends RelHomogeneousShuttle { + private final RexDetector rexDetector; + + public RelDetector() { + rexDetector = new RexDetector(); + } + + @Override + public RelNode visit(RelNode other) { + other.accept(rexDetector); + if (rexDetector.hasRexSubQuery) { + return other; + } + + return visitChildren(other); + } + } + + public static boolean hasCorrelatedUdf(RexNode rexNode) { + RexDetector detector = new RexDetector(); + rexNode.accept(detector); + + return detector.hasRexSubQuery; + } + + public static boolean hasCorrelatedUdf(RelNode relNode) { + RelDetector detector = new RelDetector(); + relNode.accept(detector); + return detector.rexDetector.hasRexSubQuery; + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/udf/DremioScalarUserDefinedFunction.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/udf/DremioScalarUserDefinedFunction.java index 6872fb377d..7626d72a8c 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/udf/DremioScalarUserDefinedFunction.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/udf/DremioScalarUserDefinedFunction.java @@ -17,14 +17,24 @@ import java.util.List; +import org.apache.calcite.rel.RelNode; +import org.apache.calcite.rel.core.Project; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeFactory; +import org.apache.calcite.rex.RexNode; +import org.apache.calcite.rex.RexSubQuery; import org.apache.calcite.schema.FunctionParameter; import org.apache.calcite.schema.ScalarFunction; +import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.SqlSelect; import com.dremio.exec.catalog.CatalogIdentity; import com.dremio.exec.planner.sql.CalciteArrowHelper; +import com.dremio.exec.planner.sql.SqlConverter; +import com.dremio.exec.planner.sql.SqlValidatorAndToRelContext; +import com.dremio.exec.store.sys.udf.FunctionOperatorTable; import com.dremio.exec.store.sys.udf.UserDefinedFunction; +import com.google.common.collect.ImmutableList; public class DremioScalarUserDefinedFunction implements ScalarFunction { private final CatalogIdentity owner; @@ -54,4 +64,49 @@ public CatalogIdentity getOwner() { public String getName(){ return userDefinedFunction.getName(); } + + public RexNode extractExpression(SqlConverter sqlConverter) { + // TODO: Use the cached function plan to avoid this reparsing logic + SqlNode functionSqlNode = parse(sqlConverter); + RelNode functionPlan = SqlValidatorAndToRelContext + .builder(sqlConverter) + .withSchemaPath(ImmutableList.of()) + .withUser(owner) + .withContextualSqlOperatorTable(new FunctionOperatorTable( + userDefinedFunction.getName(), + getParameters())) + .disallowSubqueryExpansion() + .build() + .getPlanForFunctionExpression(functionSqlNode); + + if (functionPlan instanceof Project) { + // We always convert scalar UDFs to SQL queries / plans + // One quirk to this is that we only allow for a single (hence scalar) expression + // But the plan returns a whole table (but with only 1 column ever) + // So we just strip out the only project node. + Project project = (Project) functionPlan; + assert project.getProjects().size() == 1; + return project.getProjects().get(0); + } + + // We have a scalar subquery + return RexSubQuery.scalar(functionPlan, null); + } + + private SqlSelect parse(SqlConverter sqlConverter) { + // For a scalar udf the body can either be: + // 1) a + b + // 2) SELECT a + b + // In the first scenario the query is not parseable, + // so we catch the exception and try to make it like the second scenario + String sqlQueryText = getFunctionSql(); + if (!sqlQueryText.toUpperCase().startsWith("SELECT ")) { + sqlQueryText = "SELECT " + sqlQueryText; + } + + SqlNode sqlNode = sqlConverter.parse(sqlQueryText); + assert sqlNode instanceof SqlSelect; + SqlSelect sqlSelect = (SqlSelect) sqlNode; + return sqlSelect; + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/udf/DremioTabularUserDefinedFunction.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/udf/DremioTabularUserDefinedFunction.java new file mode 100644 index 0000000000..5f6be92918 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/udf/DremioTabularUserDefinedFunction.java @@ -0,0 +1,81 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.udf; + +import java.lang.reflect.Type; +import java.util.List; + +import org.apache.calcite.rel.RelNode; +import org.apache.calcite.rel.type.RelDataType; +import org.apache.calcite.rel.type.RelDataTypeFactory; +import org.apache.calcite.schema.FunctionParameter; +import org.apache.calcite.schema.TableFunction; +import org.apache.calcite.sql.SqlNode; + +import com.dremio.exec.catalog.CatalogIdentity; +import com.dremio.exec.planner.sql.CalciteArrowHelper; +import com.dremio.exec.planner.sql.SqlConverter; +import com.dremio.exec.planner.sql.SqlValidatorAndToRelContext; +import com.dremio.exec.store.sys.udf.FunctionOperatorTable; +import com.dremio.exec.store.sys.udf.UserDefinedFunction; +import com.google.common.collect.ImmutableList; + +/** + * Wraps a UserDefinedFunction to meet the TableFunction API + */ +public final class DremioTabularUserDefinedFunction implements TableFunction { + private final CatalogIdentity owner; + private final UserDefinedFunction userDefinedFunction; + + public DremioTabularUserDefinedFunction( + CatalogIdentity owner, + UserDefinedFunction userDefinedFunction) { + this.owner = owner; + this.userDefinedFunction = userDefinedFunction; + } + + @Override + public RelDataType getRowType(RelDataTypeFactory typeFactory, List arguments) { + return CalciteArrowHelper + .wrap(userDefinedFunction.getReturnType()) + .toCalciteType(typeFactory, true); + } + + @Override + public Type getElementType(List arguments) { + return Object[].class; + } + + @Override + public List getParameters() { + return FunctionParameterImpl.createParameters(userDefinedFunction.getFunctionArgsList()); + } + + public RelNode extractFunctionPlan(SqlConverter sqlConverter) { + // TODO: Use the cached function plan to avoid this reparsing logic + String functionSql = userDefinedFunction.getFunctionSql(); + SqlNode functionExpression = sqlConverter.parse(functionSql); + return SqlValidatorAndToRelContext + .builder(sqlConverter) + .withSchemaPath(ImmutableList.of()) + .withUser(owner) + .withContextualSqlOperatorTable(new FunctionOperatorTable( + userDefinedFunction.getName(), + getParameters())) + .build() + .getPlanForFunctionExpression(functionExpression); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/udf/ParameterizedQueryParameterReplacer.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/udf/ParameterizedQueryParameterReplacer.java new file mode 100644 index 0000000000..b3fca480d6 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/udf/ParameterizedQueryParameterReplacer.java @@ -0,0 +1,171 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.udf; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.calcite.rel.RelNode; +import org.apache.calcite.rel.RelShuttle; +import org.apache.calcite.rel.logical.LogicalFilter; +import org.apache.calcite.rel.logical.LogicalProject; +import org.apache.calcite.rel.type.RelDataType; +import org.apache.calcite.rex.RexBuilder; +import org.apache.calcite.rex.RexCall; +import org.apache.calcite.rex.RexNode; +import org.apache.calcite.rex.RexShuttle; +import org.apache.calcite.schema.FunctionParameter; +import org.apache.calcite.sql.type.SqlTypeName; + +import com.dremio.exec.planner.StatelessRelShuttleImpl; +import com.dremio.exec.planner.types.JavaTypeFactoryImpl; +import com.google.common.collect.ImmutableList; + +public final class ParameterizedQueryParameterReplacer { + public static RelNode replaceParameters( + RelNode functionPlan, + List functionParameters, + List values, + RexBuilder rexBuilder) { + return functionPlan.accept( + createRelParameterReplacer( + functionParameters, + values, + rexBuilder)); + } + + public static RelShuttle createRelParameterReplacer( + List functionParameters, + List values, + RexBuilder rexBuilder) { + return new RelArgumentReplacer((RexArgumentReplacer) + createRexParameterReplacer( + functionParameters, + values, + rexBuilder)); + } + + public static RexShuttle createRexParameterReplacer( + List functionParameters, + List values, + RexBuilder rexBuilder) { + Map replacementMapping = createReplacementMapping(functionParameters, values, rexBuilder); + RexArgumentReplacer replacer = new RexArgumentReplacer(replacementMapping); + return replacer; + } + + public static RexNode replaceParameters( + RexNode functionExpression, + List functionParameters, + List values, + RexBuilder rexBuilder) { + return functionExpression.accept( + createRexParameterReplacer( + functionParameters, + values, + rexBuilder)); + } + + private static Map createReplacementMapping( + List functionParameters, + List values, + RexBuilder rexBuilder) { + if (functionParameters.size() != values.size()) { + throw new UnsupportedOperationException("Parameters and Replacements weren't the same size"); + } + + Map replacementMapping = new HashMap<>(); + for (int i = 0; i < functionParameters.size(); i++) { + FunctionParameter functionParameter = functionParameters.get(i); + RelDataType parameterType = functionParameter.getType(JavaTypeFactoryImpl.INSTANCE); + String key = functionParameter.getName().toUpperCase(); + RexNode value = values.get(i); + RelDataType valueType = value.getType(); + if ((valueType.getSqlTypeName() != SqlTypeName.ANY) && (valueType!= parameterType)) { + // If value type is ANY, then don't even bother with the cast + // It's probably a function like CONVERT_FROM that type validation bug that needs to be fixed. + // Now if the types don't match and we know the correct type, then add a cast + // If it's not castable, then we will get the appropriate error message. + value = rexBuilder.makeCast(parameterType, value); + } + + replacementMapping.put(key, value); + } + + return replacementMapping; + } + + // TODO: If calcite took the nameToNodeMap and didn't throw an exception if the name didn't exist, + // then we could call into convertSelect(...) with the mapping, but for now we will manually replace the UDF arguments. + private static final class RelArgumentReplacer extends StatelessRelShuttleImpl { + private final RexArgumentReplacer rexArgumentReplacer; + + public RelArgumentReplacer(RexArgumentReplacer rexArgumentReplacer) { + this.rexArgumentReplacer = rexArgumentReplacer; + } + + @Override + public RelNode visit(LogicalProject project) { + RelNode rewrittenInput = project.getInput().accept(this); + LogicalProject rewrittenProject = (LogicalProject) project.accept(rexArgumentReplacer); + boolean rewriteHappened = (rewrittenInput != project.getInput()) || (rewrittenProject != project); + if (!rewriteHappened) { + return project; + } + + return rewrittenProject.copy( + rewrittenProject.getTraitSet(), + ImmutableList.of(rewrittenInput)); + } + + @Override + public RelNode visit(LogicalFilter filter) { + RelNode rewrittenInput = filter.getInput().accept(this); + LogicalFilter rewrittenFilter = (LogicalFilter) filter.accept(rexArgumentReplacer); + boolean rewriteHappened = (rewrittenInput != filter.getInput()) || (rewrittenFilter != filter); + if (!rewriteHappened) { + return filter; + } + + return rewrittenFilter.copy( + rewrittenFilter.getTraitSet(), + rewrittenInput, + rewrittenFilter.getCondition()); + } + } + + private static final class RexArgumentReplacer extends RexShuttle { + private final Map mapping; + + public RexArgumentReplacer(Map mapping) { + this.mapping = mapping; + } + + @Override public RexNode visitCall(RexCall call) { + if (mapping.isEmpty()) { + return call; + } + + RexNode replacement = mapping.get(call.getOperator().getName().toUpperCase()); + if (replacement == null) { + return super.visitCall(call); + } + + return replacement; + } + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/udf/ScalarUserDefinedFunctionExpanderRule.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/udf/ScalarUserDefinedFunctionExpanderRule.java index 085353096a..2b69325f98 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/udf/ScalarUserDefinedFunctionExpanderRule.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/udf/ScalarUserDefinedFunctionExpanderRule.java @@ -15,222 +15,257 @@ */ package com.dremio.exec.catalog.udf; -import java.util.ArrayList; import java.util.List; -import java.util.function.Supplier; +import java.util.stream.Collectors; -import org.apache.calcite.plan.RelOptRule; import org.apache.calcite.plan.RelOptRuleCall; import org.apache.calcite.plan.RelRule; -import org.apache.calcite.rel.RelHomogeneousShuttle; import org.apache.calcite.rel.RelNode; +import org.apache.calcite.rel.RelShuttle; import org.apache.calcite.rel.core.CorrelationId; import org.apache.calcite.rel.core.Filter; -import org.apache.calcite.rel.core.Project; import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexCall; +import org.apache.calcite.rex.RexCorrelVariable; +import org.apache.calcite.rex.RexInputRef; import org.apache.calcite.rex.RexNode; import org.apache.calcite.rex.RexShuttle; import org.apache.calcite.rex.RexSubQuery; import org.apache.calcite.schema.Function; import org.apache.calcite.schema.FunctionParameter; -import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.SqlOperator; -import org.apache.calcite.sql.SqlSelect; +import org.apache.calcite.sql.fun.SqlQuantifyOperator; import org.apache.calcite.sql.validate.SqlUserDefinedFunction; import org.apache.calcite.util.Pair; import com.dremio.exec.planner.sql.SqlConverter; -import com.dremio.exec.planner.sql.SqlValidatorAndToRelContext; -import com.dremio.exec.store.sys.udf.FunctionOperatorTable; -import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; -public abstract class ScalarUserDefinedFunctionExpanderRule - extends RelRule { +public class ScalarUserDefinedFunctionExpanderRule extends RelRule { + private final SqlConverter sqlConverter; - private final Supplier sqlSubQueryConverterBuilderSupplier; - - public ScalarUserDefinedFunctionExpanderRule( - Config config, - Supplier sqlSubQueryConverterBuilderSupplier) { - super(config); - this.sqlSubQueryConverterBuilderSupplier = sqlSubQueryConverterBuilderSupplier; + public ScalarUserDefinedFunctionExpanderRule(SqlConverter sqlConverter) { + super(Config.EMPTY + .withDescription("ScalarUserDefinedFunctionExpanderRuleFilter") + .withOperandSupplier(op1 -> + op1.operand(RelNode.class).anyInputs())); + this.sqlConverter = sqlConverter; } + @Override public void onMatch(RelOptRuleCall relOptRuleCall) { + RelNode rel = relOptRuleCall.rel(0); + Pair relAndCor = convert(rel); + if (relAndCor.left == rel) { + return; + } - protected Pair convert(RelNode rel) { - CorrelationId correlationId = rel.getVariablesSet().isEmpty() - ? rel.getCluster().createCorrel() - : Iterables.getOnlyElement(rel.getVariablesSet()); + RelNode transformedRel; + if (!(rel instanceof Filter)) { + // Don't need to do anything with the correlate id. + transformedRel = relAndCor.left; + } else { + Filter filterRel = (Filter) rel; + transformedRel = relOptRuleCall + .builder() + .push(filterRel.getInput()) + .filter(ImmutableList.of(relAndCor.right), ImmutableList.of(((Filter) relAndCor.left).getCondition())) + .build(); + } - ScalarParserValidator scalarParserValidator = new ScalarParserValidator(sqlSubQueryConverterBuilderSupplier.get()); - UdfExpander udfExpander = new UdfExpander(scalarParserValidator, rel.getCluster().getRexBuilder()); - return Pair.of(rel.accept(udfExpander), correlationId); + relOptRuleCall.transformTo(transformedRel); } - public static RelOptRule createFilterRule( - Supplier sqlSubQueryConverterBuilderSupplier){ - return new ScalarUserDefinedFunctionExpanderRule( - Config.EMPTY - .withDescription("ScalarUserDefinedFunctionExpanderRuleFilter") - .withOperandSupplier(op1 -> - op1.operand(Filter.class).anyInputs()), - sqlSubQueryConverterBuilderSupplier) { - - @Override public void onMatch(RelOptRuleCall relOptRuleCall) { - Filter rel = relOptRuleCall.rel(0); - Pair relAndCor = convert(rel); - if (relAndCor.left == rel) { - return; - } - - relOptRuleCall.transformTo(relOptRuleCall.builder() - .push(rel.getInput()) - .filter(ImmutableList.of(relAndCor.right), ImmutableList.of(((Filter) relAndCor.left).getCondition())) - .build()); - } - }; - } + private Pair convert(RelNode relNode) { + RexBuilder rexBuilder = relNode.getCluster().getRexBuilder(); + CorrelationId correlationId = relNode.getVariablesSet().isEmpty() + ? relNode.getCluster().createCorrel() + : Iterables.getOnlyElement(relNode.getVariablesSet()); + RexCorrelVariable rexCorrelVariable = relNode.getInputs().isEmpty() ? null : (RexCorrelVariable)rexBuilder.makeCorrel( + relNode.getInput(0).getRowType(), + correlationId); - public static RelOptRule createProjectRule( - Supplier sqlSubQueryConverterBuilderSupplier){ - return new ScalarUserDefinedFunctionExpanderRule( - Config.EMPTY - .withDescription("ScalarUserDefinedFunctionExpanderRuleProject") - .withOperandSupplier(op1 -> - op1.operand(Project.class).anyInputs()), - sqlSubQueryConverterBuilderSupplier) { - - @Override public void onMatch(RelOptRuleCall relOptRuleCall) { - Project rel = relOptRuleCall.rel(0); - Pair relAndCor = convert(rel); - if (relAndCor.left == rel) { - return; - } - - //TODO project drop correlate node..... - relOptRuleCall.transformTo(relAndCor.left); - } - }; + UdfExpander udfExpander = new UdfExpander(sqlConverter, rexBuilder, rexCorrelVariable); + RelNode transformedRelNode = relNode.accept(udfExpander); + return Pair.of(transformedRelNode, correlationId); } -} - -class ReplaceArgumentsVisitor extends RexShuttle { - private final List arguments; - private final List argumentsOperators; - public ReplaceArgumentsVisitor(List arguments, - List argumentsOperators) { - this.arguments = arguments; - this.argumentsOperators = argumentsOperators; - } - - @Override public RexNode visitCall(RexCall call) { - int index = argumentsOperators.indexOf(call.getOperator()); - if(index == -1) { - return super.visitCall(call); - } else { - return arguments.get(index); + private static final class UdfExpander extends RexShuttle { + private final SqlConverter sqlConverter; + private final RexBuilder rexBuilder; + private final RexCorrelVariable rexCorrelVariable; + + public UdfExpander( + SqlConverter sqlConverter, + RexBuilder rexBuilder, + RexCorrelVariable rexCorrelVariable) { + this.sqlConverter = sqlConverter; + this.rexBuilder = rexBuilder; + this.rexCorrelVariable = rexCorrelVariable; } - } - - @Override public RexNode visitSubQuery(RexSubQuery subQuery) { - RexShuttle rexShuttle = this; - RelNode relNode = subQuery.rel.accept(new RelHomogeneousShuttle() { - @Override public RelNode visit(RelNode other) { - return other.accept(rexShuttle); - } - }); - List rexNodes = subQuery.getOperands().stream() - .map(o -> o.accept(rexShuttle)) - .collect(ImmutableList.toImmutableList()); - - return subQuery - .clone(subQuery.type, rexNodes) - .clone(relNode); - } -} -class ScalarParserValidator { - private final SqlValidatorAndToRelContext.Builder sqlSubQueryConverterBuilder; + @Override public RexNode visitCall(RexCall call) { + SqlOperator operator = call.getOperator(); - public ScalarParserValidator(SqlValidatorAndToRelContext.Builder sqlSubQueryConverterBuilder) { - this.sqlSubQueryConverterBuilder = sqlSubQueryConverterBuilder; - } + // Preorder traversal to handle nested UDFs + RexCall visitedCall = (RexCall) super.visitCall(call); + if (!(operator instanceof SqlUserDefinedFunction)) { + return visitedCall; + } - public SqlValidatorAndToRelContext.FunctionBodyAndArguments expand( - DremioScalarUserDefinedFunction dremioScalarUserDefinedFunction) { - SqlValidatorAndToRelContext sqlValidatorAndToRelContext = sqlSubQueryConverterBuilder - .withSchemaPath(ImmutableList.of()) - .withUser(dremioScalarUserDefinedFunction.getOwner()) - .withContextualSqlOperatorTable(new FunctionOperatorTable( - dremioScalarUserDefinedFunction.getName(), - dremioScalarUserDefinedFunction.getParameters())) - .build(); + Function function = ((SqlUserDefinedFunction) operator).getFunction(); + if (!(function instanceof DremioScalarUserDefinedFunction)) { + return visitedCall; + } + DremioScalarUserDefinedFunction dremioScalarUserDefinedFunction = (DremioScalarUserDefinedFunction) function; + + RexNode udfExpression = dremioScalarUserDefinedFunction.extractExpression(sqlConverter); + RexNode rewrittenUdfExpression; + if (!CorrelatedUdfDetector.hasCorrelatedUdf(udfExpression)) { + rewrittenUdfExpression = ParameterizedQueryParameterReplacer.replaceParameters( + udfExpression, + function.getParameters(), + visitedCall.getOperands(), + rexBuilder); + } else { + RexInputRefToFieldAccess replacer = new RexInputRefToFieldAccess( + rexBuilder, + rexCorrelVariable); + List rewrittenCorrelateOperands = visitedCall + .getOperands() + .stream() + .map(operand -> operand.accept(replacer)) + .collect(Collectors.toList()); + rewrittenUdfExpression = RexArgumentReplacer.replaceArguments( + udfExpression, + function.getParameters(), + rewrittenCorrelateOperands, + visitedCall.getOperands(), + rexCorrelVariable.id, + rexBuilder); + } - SqlNode sqlNode = parse(sqlValidatorAndToRelContext.getSqlConverter(), dremioScalarUserDefinedFunction); - return sqlValidatorAndToRelContext.validateAndConvertScalarFunction(sqlNode, dremioScalarUserDefinedFunction.getName(), dremioScalarUserDefinedFunction.getParameters()); - } + RexNode castedNode = rexBuilder.makeCast( + call.getType(), + rewrittenUdfExpression, + true); - private SqlNode parse(SqlConverter sqlConverter, DremioScalarUserDefinedFunction udf) { - SqlNode sqlNode = sqlConverter.parse(udf.getFunctionSql()); - if (sqlNode instanceof SqlSelect) { - SqlSelect sqlSelect = (SqlSelect) sqlNode; - Preconditions.checkState(null == sqlSelect.getFrom()); - Preconditions.checkState(sqlSelect.getSelectList().size() == 1); - return sqlSelect.getSelectList().get(0); - } else { - throw new RuntimeException(); + return castedNode; } } -} - + private static final class RexInputRefToFieldAccess extends RexShuttle { + private final RexBuilder rexBuilder; + private final RexNode rexCorrelVariable; -class UdfExpander extends RexShuttle { - private final ScalarParserValidator scalarParserValidator; - private final RexBuilder rexBuilder; + public RexInputRefToFieldAccess(RexBuilder rexBuilder, RexNode rexCorrelVariable) { + this.rexBuilder = rexBuilder; + this.rexCorrelVariable = rexCorrelVariable; + } - public UdfExpander(ScalarParserValidator scalarParserValidator, RexBuilder rexBuilder) { - this.scalarParserValidator = scalarParserValidator; - this.rexBuilder = rexBuilder; + @Override + public RexNode visitInputRef(RexInputRef inputRef) { + return rexBuilder.makeFieldAccess(rexCorrelVariable, inputRef.getIndex()); + } } - @Override public RexNode visitCall(RexCall call) { - SqlOperator operator = call.getOperator(); - RexCall converted = (RexCall) super.visitCall(call); + private static final class RexArgumentReplacer extends RexShuttle { + private final RelShuttle correlateRelReplacer; + private final RelShuttle refIndexRelReplacer; + private final RexShuttle correlateRexReplacer; + private final RexShuttle refIndexRexReplacer; + private final CorrelationId correlationId; + + private RexArgumentReplacer( + RelShuttle correlateRelReplacer, + RelShuttle refIndexRelReplacer, + RexShuttle correlateRexReplacer, + RexShuttle refIndexRexReplacer, + CorrelationId correlationId) { + this.correlateRelReplacer = correlateRelReplacer; + this.refIndexRelReplacer = refIndexRelReplacer; + this.correlateRexReplacer = correlateRexReplacer; + this.refIndexRexReplacer = refIndexRexReplacer; + this.correlationId = correlationId; + } - if (operator instanceof SqlUserDefinedFunction) { - Function function = ((SqlUserDefinedFunction) operator).getFunction(); - if(function instanceof DremioScalarUserDefinedFunction) { - - DremioScalarUserDefinedFunction dremioScalarUserDefinedFunction = - (DremioScalarUserDefinedFunction) function; - - SqlValidatorAndToRelContext.FunctionBodyAndArguments functionBodyAndArguments = - scalarParserValidator.expand(dremioScalarUserDefinedFunction); - List paramRexList = converted.getOperands(); - List transformedArguments = new ArrayList<>(); - Preconditions.checkState(function.getParameters().size() == paramRexList.size()); - for (int i = 0; i < paramRexList.size(); i++) { - RexNode paramRex = paramRexList.get(i); - FunctionParameter param = dremioScalarUserDefinedFunction.getParameters().get(i); - transformedArguments.add(rexBuilder.makeCast(param.getType(rexBuilder.getTypeFactory()), paramRex)); - } - - ReplaceArgumentsVisitor replaceArgumentsVisitor = new ReplaceArgumentsVisitor( - transformedArguments, - functionBodyAndArguments.getUserDefinedFunctionArgumentOperators()); - RexNode expandedNode = functionBodyAndArguments.getFunctionBody() - .accept(replaceArgumentsVisitor) - .accept(this); - return rexBuilder.makeCast(call.getType(), expandedNode, true); + @Override + public RexNode visitSubQuery(RexSubQuery subQuery) { + // For a subquery we want to rewrite the RelNode using correlates: + RelNode rewrittenRelNode = subQuery.rel.accept(correlateRelReplacer); + boolean relRewritten = rewrittenRelNode != subQuery.rel; + + // And the operands with ref indexes: + List rewrittenOperands = subQuery + .getOperands() + .stream() + .map(operand -> operand.accept(refIndexRexReplacer)) + .collect(ImmutableList.toImmutableList()); + + // This is because the operands are in relation to the outer query + // And the RelNode is in relation to the inner query + + // This is more clear in the case of IN vs EXISTS: + // IN($0, { + // LogicalProject(DEPTNO=[$6]) + // ScanCrel(table=[cp.scott."EMP.json"], columns=[`EMPNO`, `ENAME`, `JOB`, `MGR`, `HIREDATE`, `SAL`, `DEPTNO`, `COMM`], splits=[1]) + //}) + // + // EXISTS({ + // LogicalFilter(condition=[=($6, $cor1.DEPTNO)]) + // ScanCrel(table=[cp.scott."EMP.json"], columns=[`EMPNO`, `ENAME`, `JOB`, `MGR`, `HIREDATE`, `SAL`, `DEPTNO`, `COMM`], splits=[1]) + //}) + + + CorrelationId rewrittenCorrelateId = relRewritten ? correlationId : null; + // TODO: add RexSubQuery.clone(CorrelationId) so we don't need this switch case + SqlKind kind = subQuery.op.kind; + switch (kind) { + case SCALAR_QUERY: + return RexSubQuery.scalar(rewrittenRelNode, rewrittenCorrelateId); + + case EXISTS: + return RexSubQuery.exists(rewrittenRelNode, rewrittenCorrelateId); + + case IN: + return RexSubQuery.in(rewrittenRelNode, rewrittenOperands, rewrittenCorrelateId); + + case SOME: + return RexSubQuery.some(rewrittenRelNode, rewrittenOperands, (SqlQuantifyOperator) subQuery.op, rewrittenCorrelateId); + + default: + throw new UnsupportedOperationException("Can not support kind: " + kind); } } - return converted; + + @Override + public RexNode visitCall(final RexCall call) { + RexNode visitedCall = super.visitCall(call); + // For regular calls we replace with ref indexes + return visitedCall.accept(refIndexRexReplacer); + } + + public static RexNode replaceArguments( + RexNode rexNode, + List functionParameters, + List correlateReplacements, + List refIndexReplacements, + CorrelationId correlationId, + RexBuilder rexBuilder) { + RelShuttle correlateRelReplacer = ParameterizedQueryParameterReplacer.createRelParameterReplacer(functionParameters, correlateReplacements, rexBuilder); + RelShuttle refIndexRelReplacer = ParameterizedQueryParameterReplacer.createRelParameterReplacer(functionParameters, refIndexReplacements, rexBuilder); + RexShuttle correlateRexReplacer = ParameterizedQueryParameterReplacer.createRexParameterReplacer(functionParameters, correlateReplacements, rexBuilder); + RexShuttle refIndexRexReplacer = ParameterizedQueryParameterReplacer.createRexParameterReplacer(functionParameters, refIndexReplacements, rexBuilder); + RexArgumentReplacer compositeReplacer = new RexArgumentReplacer( + correlateRelReplacer, + refIndexRelReplacer, + correlateRexReplacer, + refIndexRexReplacer, + correlationId); + return rexNode.accept(compositeReplacer); + } } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/udf/TabularUserDefinedFunctionExpanderRule.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/udf/TabularUserDefinedFunctionExpanderRule.java new file mode 100644 index 0000000000..bb35c58795 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/udf/TabularUserDefinedFunctionExpanderRule.java @@ -0,0 +1,69 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog.udf; + + +import org.apache.calcite.plan.RelOptRuleCall; +import org.apache.calcite.plan.RelRule; +import org.apache.calcite.rel.RelNode; +import org.apache.calcite.rel.core.TableFunctionScan; +import org.apache.calcite.rex.RexCall; +import org.apache.calcite.schema.Function; +import org.apache.calcite.sql.SqlOperator; +import org.apache.calcite.sql.validate.SqlUserDefinedFunction; + +import com.dremio.exec.planner.common.MoreRelOptUtil; +import com.dremio.exec.planner.sql.SqlConverter; + +public final class TabularUserDefinedFunctionExpanderRule extends RelRule { + private final SqlConverter sqlConverter; + + public TabularUserDefinedFunctionExpanderRule(SqlConverter sqlConverter) { + super(Config.EMPTY + .withDescription("TabularUserDefinedFunctionExpanderRule") + .withOperandSupplier(op1 -> + op1.operand(TableFunctionScan.class).anyInputs())); + this.sqlConverter = sqlConverter; + } + + @Override + public void onMatch(RelOptRuleCall call) { + TableFunctionScan tableFunctionScan = call.rel(0); + RexCall rexCall = (RexCall) tableFunctionScan.getCall(); + SqlOperator operator = rexCall.getOperator(); + if (!(operator instanceof SqlUserDefinedFunction)) { + return; + } + + SqlUserDefinedFunction userDefinedFunction = (SqlUserDefinedFunction) operator; + Function function = userDefinedFunction.getFunction(); + if (!(function instanceof DremioTabularUserDefinedFunction)) { + return; + } + + DremioTabularUserDefinedFunction tabularFunction = (DremioTabularUserDefinedFunction) function; + RelNode tabularFunctionPlan = tabularFunction.extractFunctionPlan(sqlConverter); + tabularFunctionPlan = ParameterizedQueryParameterReplacer.replaceParameters( + tabularFunctionPlan, + tabularFunction.getParameters(), + rexCall.operands, + tableFunctionScan.getCluster().getRexBuilder()); + RelNode castProject = MoreRelOptUtil.createCastRel( + tabularFunctionPlan, + tableFunctionScan.getRowType()); + call.transformTo(castProject); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/catalog/udf/UserDefinedFunctionArgumentOperator.java b/sabot/kernel/src/main/java/com/dremio/exec/catalog/udf/UserDefinedFunctionArgumentOperator.java index cc816066da..78471c1ef4 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/catalog/udf/UserDefinedFunctionArgumentOperator.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/catalog/udf/UserDefinedFunctionArgumentOperator.java @@ -15,8 +15,6 @@ */ package com.dremio.exec.catalog.udf; -import java.util.List; - import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.schema.FunctionParameter; import org.apache.calcite.sql.SqlFunctionCategory; @@ -24,33 +22,39 @@ import org.apache.calcite.sql.fun.SqlBaseContextVariable; import com.dremio.exec.planner.types.SqlTypeFactoryImpl; -import com.google.common.collect.ImmutableList; /** * This is a work around since for creating a different rex node type */ public class UserDefinedFunctionArgumentOperator { - public static List createArgumentOperator( + public static SqlOperator createArgumentOperator( String udfName, - List parameters) { - - return parameters.stream() - .map(p -> - new ScalarArgumentOperator( - udfName + p.getName(), - p.getName(), - p.getType(SqlTypeFactoryImpl.INSTANCE))) - .collect(ImmutableList.toImmutableList()); + FunctionParameter parameter) { + return new ScalarArgumentOperator( + udfName + "::" + parameter.getName(), + parameter.getName(), + parameter.getOrdinal(), + parameter.getType(SqlTypeFactoryImpl.INSTANCE)); } public abstract static class ArgumentOperator extends SqlBaseContextVariable { + private final int ordinal; private final RelDataType returnRelDataType; - public ArgumentOperator(String name, - RelDataType returnRelDataType, SqlFunctionCategory category) { + + public ArgumentOperator( + int ordinal, + String name, + RelDataType returnRelDataType, + SqlFunctionCategory category) { super(name, (sqlOperatorBinding)-> returnRelDataType, category); + this.ordinal = ordinal; this.returnRelDataType = returnRelDataType; } + public int getOrdinal() { + return ordinal; + } + public RelDataType getReturnRelDataType(){ return returnRelDataType; } @@ -59,8 +63,8 @@ public RelDataType getReturnRelDataType(){ class ScalarArgumentOperator extends UserDefinedFunctionArgumentOperator.ArgumentOperator { public final String namePath; - public ScalarArgumentOperator(String namePath, String name, RelDataType relDataType) { - super(name, relDataType, SqlFunctionCategory.USER_DEFINED_FUNCTION); + public ScalarArgumentOperator(String namePath, String name, int ordinal, RelDataType relDataType) { + super(ordinal, name, relDataType, SqlFunctionCategory.USER_DEFINED_FUNCTION); this.namePath = namePath; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/dotfile/View.java b/sabot/kernel/src/main/java/com/dremio/exec/dotfile/View.java index 70da32b456..15927ae616 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/dotfile/View.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/dotfile/View.java @@ -142,6 +142,7 @@ public FieldType(String name, RelDataType dataType) { case INTERVAL_MINUTE_SECOND: case INTERVAL_SECOND: p = dataType.getIntervalQualifier().getStartPrecisionPreservingDefault(); + break; default: break; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/CaseFunctions.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/CaseFunctions.java index d143b5a9ec..d4be953eb7 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/CaseFunctions.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/CaseFunctions.java @@ -86,7 +86,7 @@ public static void loadInstance(OperatorContext context) { final AbstractFunctionHolder gtFnHolder = lookupIntFunction(GT_FUNCTION_NAME, context); final AbstractFunctionHolder ltFnHolder = lookupIntFunction(LT_FUNCTION_NAME, context); if (eqFnHolder != null && gtFnHolder != null && ltFnHolder != null) { - INSTANCE = lref = new CaseFunctions(eqFnHolder, gtFnHolder, ltFnHolder); + INSTANCE = new CaseFunctions(eqFnHolder, gtFnHolder, ltFnHolder); } else { throw new RuntimeException("Unable to load/find basic functions from registry. Should never happen"); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/CodeGenContext.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/CodeGenContext.java index a39956b996..e1414b224a 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/CodeGenContext.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/CodeGenContext.java @@ -202,6 +202,7 @@ public int getSizeOfChildren() { } + @Override public String toString() { LogicalExpression childWithoutContext = CodeGenerationContextRemover.removeCodeGenContext (child); @@ -218,6 +219,7 @@ public boolean equals(Object obj) { return child.equals(obj); } + @Override public T accept(ExprVisitor visitor, V value) throws E { return child.accept(visitor, value); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/DirectExpression.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/DirectExpression.java index d38b212241..fe85a567bf 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/DirectExpression.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/DirectExpression.java @@ -28,6 +28,7 @@ private DirectExpression(final String source) { this.source = source; } + @Override public void generate( JFormatter f ) { f.p('(').p(source).p(')'); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/EvaluationVisitor.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/EvaluationVisitor.java index 0a35aa00bc..769b417318 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/EvaluationVisitor.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/EvaluationVisitor.java @@ -852,6 +852,7 @@ private HoldingContainer visitReturnValueExpression(ReturnValueExpression e, Cla return null; } + @Override public HoldingContainer visitQuotedStringConstant(QuotedString e, ClassGenerator generator) throws RuntimeException { CompleteType completeType = CompleteType.VARCHAR; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/ExpressionMaterializationVisitor.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/ExpressionMaterializationVisitor.java index fba958328d..d7555b0716 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/ExpressionMaterializationVisitor.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/ExpressionMaterializationVisitor.java @@ -373,8 +373,6 @@ private LogicalExpression getIsTypeExpressionForType(MinorType type, LogicalExpr public LogicalExpression visitCaseExpression(CaseExpression caseExpression, FunctionLookupContext functionLookupContext) throws RuntimeException { List newConditions = new ArrayList<>(); LogicalExpression newElseExpr = caseExpression.elseExpr.accept(this, functionLookupContext); - final CompleteType elseType = newElseExpr.getCompleteType(); - final MinorType elseMinor = elseType.toMinorType(); CompleteType outputType = caseExpression.outputType; boolean newElseExprReWritten = false; @@ -383,31 +381,33 @@ public LogicalExpression visitCaseExpression(CaseExpression caseExpression, Func LogicalExpression newThen = conditionNode.thenExpr.accept(this, functionLookupContext); CaseExpression.CaseConditionNode condition = new CaseExpression.CaseConditionNode(newWhen, newThen); - final CompleteType thenType = newThen.getCompleteType(); - final MinorType thenMinor = thenType.toMinorType(); + final CompleteType newelseType = newElseExpr.getCompleteType(); + final MinorType newelseMinor = newelseType.toMinorType(); + final CompleteType newthenType = newThen.getCompleteType(); + final MinorType newthenMinor = newthenType.toMinorType(); // if the types aren't equal (and one of them isn't null), we need to unify them. - if(!thenType.equals(elseType) && !thenType.isNull() && !elseType.isNull()){ + if(!newthenType.equals(newelseType) && !newthenType.isNull() && !newelseType.isNull()){ final MinorType leastRestrictive = TypeCastRules.getLeastRestrictiveType((Arrays.asList - (thenMinor, elseMinor))); - if (leastRestrictive != thenMinor && leastRestrictive != elseMinor && leastRestrictive != + (newthenMinor, newelseMinor))); + if (leastRestrictive != newthenMinor && leastRestrictive != newelseMinor && leastRestrictive != null) { // Implicitly cast then and else to common type CompleteType toType = CompleteType.fromMinorType(leastRestrictive); condition = new CaseExpression.CaseConditionNode(newWhen, ExpressionTreeMaterializer .addImplicitCastExact(newThen, toType, functionLookupContext, errorCollector, allowGandivaFunctions)); newElseExpr = ExpressionTreeMaterializer.addImplicitCastExact(newElseExpr, toType, functionLookupContext, errorCollector, allowGandivaFunctions); - }else if (leastRestrictive != thenMinor) { + }else if (leastRestrictive != newthenMinor) { // Implicitly cast the then expression condition = new CaseExpression.CaseConditionNode(newWhen, ExpressionTreeMaterializer .addImplicitCastExact(newThen, newElseExpr.getCompleteType(), functionLookupContext, errorCollector, allowGandivaFunctions)); - } else if (leastRestrictive != elseMinor) { + } else if (leastRestrictive != newelseMinor) { // Implicitly cast the else expression newElseExpr = ExpressionTreeMaterializer.addImplicitCastExact(newElseExpr, newThen.getCompleteType(), functionLookupContext, errorCollector, allowGandivaFunctions); } else{ // casting didn't work, now we need to merge the types. - outputType = thenType.merge(elseType, ALLOW_MIXED_DECIMALS); + outputType = newthenType.merge(newelseType, ALLOW_MIXED_DECIMALS); condition = new CaseExpression.CaseConditionNode(newWhen, ExpressionTreeMaterializer .addImplicitCastExact(newElseExpr, outputType, functionLookupContext, errorCollector, allowGandivaFunctions)); newElseExpr = ExpressionTreeMaterializer.addImplicitCastExact(newElseExpr, outputType, functionLookupContext, errorCollector, allowGandivaFunctions); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/ExpressionSplit.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/ExpressionSplit.java index 23a758f75f..7da544ca84 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/ExpressionSplit.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/ExpressionSplit.java @@ -109,6 +109,7 @@ public void setDependsOnSplits(Set dependsOnSplits) { this.dependsOnSplits = dependsOnSplits; } + @Override public String toString() { StringBuilder dependsOn = new StringBuilder(); for (String str : getDependsOnSplits()) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/ReturnValueExpression.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/ReturnValueExpression.java index 80e6b69fa0..275cdb09f0 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/ReturnValueExpression.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/ReturnValueExpression.java @@ -70,10 +70,12 @@ public boolean isReturnTrueOnOne() { return returnTrueOnOne; } + @Override public int getSelfCost() { throw new UnsupportedOperationException(String.format("The type of %s doesn't currently support LogicalExpression.getSelfCost().", this.getClass().getCanonicalName())); } + @Override public int getCumulativeCost() { throw new UnsupportedOperationException(String.format("The type of %s doesn't currently support LogicalExpression.getCumulativeCost().", this.getClass().getCanonicalName())); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/BaseFunctionHolder.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/BaseFunctionHolder.java index f50c671b4b..0d9eb3a016 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/BaseFunctionHolder.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/BaseFunctionHolder.java @@ -215,7 +215,7 @@ protected JVar[] declareWorkspaceVariables(ClassGenerator g, FunctionErrorCon protected void generateBody(ClassGenerator g, BlockType bt, String body, HoldingContainer[] inputVariables, JVar[] workspaceJVars, boolean decConstantInputOnly) { final String trimmedBody = Strings.nullToEmpty(body).trim(); - if (!trimmedBody.isEmpty() && !trimmedBody.equals("{}")) { + if (!trimmedBody.isEmpty() && !"{}".equals(trimmedBody)) { JBlock sub = new JBlock(true, true); if (decConstantInputOnly) { addProtectedBlock(g, sub, body, inputVariables, workspaceJVars, true); @@ -312,6 +312,10 @@ public boolean isFieldReader(int i) { @Override public CompleteType getReturnType(final List args) { + if (derivation instanceof OutputDerivation.Dummy) { + String functionName = registeredNames.length != 0 ? registeredNames[0] : "unknown"; + throw new UnsupportedOperationException(String.format("Unable to determine output type for %s function.", functionName)); + } return derivation.getOutputType(returnValue.type, args); } @@ -319,6 +323,7 @@ public String getReturnName(){ return returnValue.name; } + @Override public NullHandling getNullHandling() { return attributes.getNullHandling(); } @@ -447,6 +452,7 @@ public String getName() { } } + @Override public boolean checkPrecisionRange() { return false; } @@ -454,6 +460,7 @@ public boolean checkPrecisionRange() { /** * Does this function always return the same type, no matter the inputs? */ + @Override public boolean isReturnTypeIndependent(){ return derivation.getClass() == OutputDerivation.Default.class; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/FunctionErrorContextImpl.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/FunctionErrorContextImpl.java index 27d85706fb..671e4a2713 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/FunctionErrorContextImpl.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/FunctionErrorContextImpl.java @@ -69,28 +69,33 @@ private static class ExceptionBuilderImpl implements FunctionErrorContext.Except b = UserException.functionError(cause); } + @Override public ExceptionBuilder message(final String message) { b.message(message); return this; } + @Override @FormatMethod public ExceptionBuilder message(final String format, final Object... args) { b.message(format, args); return this; } + @Override public ExceptionBuilder addContext(final String value) { b.addContext(value); return this; } + @Override @FormatMethod public ExceptionBuilder addContext(final String value, Object... args) { b.addContext(value, args); return this; } + @Override public ExceptionBuilder addContext(final String name, final String value) { b.addContext(name, value); return this; @@ -99,6 +104,7 @@ public ExceptionBuilder addContext(final String name, final String value) { /** * Builds an exception that can be thrown by the caller */ + @Override public RuntimeException build() { return b.build(logger); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/FunctionGenerationHelper.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/FunctionGenerationHelper.java index 91e8a3de18..6735ad71c4 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/FunctionGenerationHelper.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/FunctionGenerationHelper.java @@ -33,7 +33,11 @@ import com.dremio.exec.expr.HoldingContainerExpression; import com.google.common.collect.Lists; -public class FunctionGenerationHelper { +public final class FunctionGenerationHelper { + private FunctionGenerationHelper() { + // Utility class + } + public static final String COMPARE_TO_NULLS_HIGH = "compare_to_nulls_high"; public static final String COMPARE_TO_NULLS_LOW = "compare_to_nulls_low"; @@ -53,13 +57,13 @@ public static LogicalExpression getOrderingComparator( HoldingContainer left, HoldingContainer right, ClassProducer producer) { - final String comparator_name = + final String comparatorName = null_high ? COMPARE_TO_NULLS_HIGH : COMPARE_TO_NULLS_LOW; if (!left.getCompleteType().isComparable() || ! right.getCompleteType().isComparable()){ throw new UnsupportedOperationException(formatCanNotCompareMsg(left.getCompleteType(), right.getCompleteType())); } - LogicalExpression comparisonFunctionExpression = getFunctionExpression(comparator_name, Types.required(MinorType.INT), + LogicalExpression comparisonFunctionExpression = getFunctionExpression(comparatorName, Types.required(MinorType.INT), left, right); if (!left.getCompleteType().isUnion() && !right.getCompleteType().isUnion()) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/FunctionRegistry.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/FunctionRegistry.java index 62ca45712e..19042c5990 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/FunctionRegistry.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/FunctionRegistry.java @@ -92,8 +92,8 @@ public FunctionRegistry(ScanResult classpathScan) { String functionName = name.toLowerCase(); registeredFunctions.put(functionName, holder); String functionSignature = functionName + functionInput; - String existingImplementation; - if ((existingImplementation = functionSignatureMap.get(functionSignature)) != null) { + String existingImplementation = functionSignatureMap.get(functionSignature); + if (existingImplementation != null) { throw new AssertionError( String.format( "Conflicting functions with similar signature found. Func Name: %s, Class name: %s " + @@ -129,6 +129,7 @@ public List getMethods(String name) { return this.registeredFunctions.get(name.toLowerCase()); } + @Override public void register(OperatorTable operatorTable, boolean isDecimalV2Enabled) { for (Entry> function : registeredFunctions.asMap().entrySet()) { final ArrayListMultimap, BaseFunctionHolder> functions = ArrayListMultimap.create(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/ItemsSketch/ItemsSketchFunctions.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/ItemsSketch/ItemsSketchFunctions.java index 467ff55540..4de571c1c2 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/ItemsSketch/ItemsSketchFunctions.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/ItemsSketch/ItemsSketchFunctions.java @@ -83,6 +83,7 @@ public static class NullableIntegerItemsSketchFunction implements AggrFunction { @Inject OptionResolver options; + @Override public void setup() { sketch = new ObjectHolder(); maxSize.value = (int) options.getOption(com.dremio.exec.ExecConstants.ITEMS_SKETCH_MAX_SIZE); @@ -129,6 +130,7 @@ public static class NullableBigIntItemsSketchFunction implements AggrFunction { @Inject OptionResolver options; + @Override public void setup() { sketch = new ObjectHolder(); maxSize.value = (int) options.getOption(com.dremio.exec.ExecConstants.ITEMS_SKETCH_MAX_SIZE); @@ -175,6 +177,7 @@ public static class NullableInternalDayItemsSketchFunction implements AggrFuncti @Inject OptionResolver options; + @Override public void setup() { sketch = new ObjectHolder(); maxSize.value = (int) options.getOption(com.dremio.exec.ExecConstants.ITEMS_SKETCH_MAX_SIZE); @@ -221,6 +224,7 @@ public static class NullableIntervalYearsItemsSketchFunction implements AggrFunc @Inject OptionResolver options; + @Override public void setup() { sketch = new ObjectHolder(); maxSize.value = (int) options.getOption(com.dremio.exec.ExecConstants.ITEMS_SKETCH_MAX_SIZE); @@ -267,6 +271,7 @@ public static class NullableFloat4ItemsSketchFunction implements AggrFunction { @Inject OptionResolver options; + @Override public void setup() { sketch = new ObjectHolder(); maxSize.value = (int) options.getOption(com.dremio.exec.ExecConstants.ITEMS_SKETCH_MAX_SIZE); @@ -313,6 +318,7 @@ public static class NullableFloat8ItemsSketchFunction implements AggrFunction { @Inject OptionResolver options; + @Override public void setup() { sketch = new ObjectHolder(); maxSize.value = (int) options.getOption(com.dremio.exec.ExecConstants.ITEMS_SKETCH_MAX_SIZE); @@ -359,6 +365,7 @@ public static class NullableDateItemsSketchFunction implements AggrFunction { @Inject OptionResolver options; + @Override public void setup() { sketch = new ObjectHolder(); maxSize.value = (int) options.getOption(com.dremio.exec.ExecConstants.ITEMS_SKETCH_MAX_SIZE); @@ -406,6 +413,7 @@ public static class NullableTimeItemsSketchFunction implements AggrFunction { @Inject OptionResolver options; + @Override public void setup() { sketch = new ObjectHolder(); maxSize.value = (int) options.getOption(com.dremio.exec.ExecConstants.ITEMS_SKETCH_MAX_SIZE); @@ -452,6 +460,7 @@ public static class NullableTimeStampItemsSketchFunction implements AggrFunction @Inject OptionResolver options; + @Override public void setup() { sketch = new ObjectHolder(); maxSize.value = (int) options.getOption(com.dremio.exec.ExecConstants.ITEMS_SKETCH_MAX_SIZE); @@ -498,6 +507,7 @@ public static class NullableBitItemsSketchFunction implements AggrFunction { @Inject OptionResolver options; + @Override public void setup() { sketch = new ObjectHolder(); maxSize.value = (int) options.getOption(com.dremio.exec.ExecConstants.ITEMS_SKETCH_MAX_SIZE); @@ -544,6 +554,7 @@ public static class NullableVarCharItemsSketchFunction implements AggrFunction { @Inject OptionResolver options; + @Override public void setup() { sketch = new ObjectHolder(); maxSize.value = (int) options.getOption(com.dremio.exec.ExecConstants.ITEMS_SKETCH_MAX_SIZE); @@ -594,6 +605,7 @@ public static class NullableVarBinaryItemsSketchMergeNumber implements AggrFunct @Inject OptionResolver options; + @Override public void setup() { sketch = new ObjectHolder(); maxSize.value = (int) options.getOption(com.dremio.exec.ExecConstants.ITEMS_SKETCH_MAX_SIZE); @@ -642,6 +654,7 @@ public static class NullableVarBinaryItemsSketchMergeDouble implements AggrFunct @Inject OptionResolver options; + @Override public void setup() { sketch = new ObjectHolder(); maxSize.value = (int) options.getOption(com.dremio.exec.ExecConstants.ITEMS_SKETCH_MAX_SIZE); @@ -690,6 +703,7 @@ public static class NullableVarBinaryItemsSketchMergeVarchar implements AggrFunc @Inject OptionResolver options; + @Override public void setup() { sketch = new ObjectHolder(); maxSize.value = (int) options.getOption(com.dremio.exec.ExecConstants.ITEMS_SKETCH_MAX_SIZE); @@ -738,6 +752,7 @@ public static class NullableVarBinaryItemsSketchMergeLong implements AggrFunctio @Inject OptionResolver options; + @Override public void setup() { sketch = new ObjectHolder(); maxSize.value = (int) options.getOption(com.dremio.exec.ExecConstants.ITEMS_SKETCH_MAX_SIZE); @@ -787,6 +802,7 @@ public static class NullableVarBinaryItemsSketchMergeBoolean implements AggrFunc @Inject OptionResolver options; + @Override public void setup() { sketch = new ObjectHolder(); maxSize.value = (int) options.getOption(com.dremio.exec.ExecConstants.ITEMS_SKETCH_MAX_SIZE); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/OutputDerivation.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/OutputDerivation.java index ca0e146fa3..c1554f6b7d 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/OutputDerivation.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/OutputDerivation.java @@ -131,6 +131,7 @@ public CompleteType getOutputType(CompleteType baseReturn, List args) { assert (args.size() == 2) && (args.get(1) instanceof ValueExpressions.IntExpression); // Get the scale from the second argument which should be a constant @@ -142,6 +143,7 @@ public CompleteType getOutputType(CompleteType baseReturn, List args) { assert (args.size() == 2) && (args.get(1) instanceof ValueExpressions.IntExpression); // Get the scale from the second argument which should be a constant @@ -153,6 +155,7 @@ public CompleteType getOutputType(CompleteType baseReturn, List args) { ArrowType.Decimal type = getDecimalOutputTypeForRound(prec(args.get(0)), scale(args.get(0)), 0); @@ -161,12 +164,14 @@ public CompleteType getOutputType(CompleteType baseReturn, List args) { return CompleteType.fromDecimalPrecisionScale(prec(args.get(0)), scale(args.get(0))); } } class DecimalZeroScaleTruncate implements OutputDerivation { + @Override public CompleteType getOutputType(CompleteType baseReturn, List args) { ArrowType.Decimal type = getDecimalOutputTypeForTruncate(prec(args.get(0)), scale(args.get(0)), 0); @@ -216,6 +221,7 @@ static ArrowType.Decimal getDecimalOutputTypeForRound(int arg1Precision, int arg * trunc and round functions with single argument use this */ class DecimalZeroScale implements OutputDerivation { + @Override public CompleteType getOutputType(CompleteType baseReturn, List args) { int precision = 0; for (LogicalExpression e : args) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Alternator.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Alternator.java index f3ff5a074b..173c4c3442 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Alternator.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Alternator.java @@ -29,11 +29,13 @@ public static class Alternate2 implements SimpleFunction{ @Workspace int val; @Output NullableBigIntHolder out; + @Override public void setup() { val = 0; } + @Override public void eval() { out.isSet = 1; out.value = val; @@ -50,11 +52,13 @@ public static class Alternate3 implements SimpleFunction{ @Workspace int val; @Output NullableBigIntHolder out; + @Override public void setup() { val = 0; } + @Override public void eval() { out.isSet = 1; out.value = val; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/ArrayContains.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/ArrayContains.java index 86d35ebae6..6c5e5f16fb 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/ArrayContains.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/ArrayContains.java @@ -39,10 +39,12 @@ public static class ArrayContain implements SimpleFunction { @Inject private FunctionErrorContext errCtx; @Workspace private Object inputValue; + @Override public void setup() { inputValue = value.readObject(); } + @Override public void eval() { if (!in.isSet() || in.readObject() == null || inputValue == null) { out.isSet = 0; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/BitFunctions.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/BitFunctions.java index f0e8a49d0f..c9f570d0ba 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/BitFunctions.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/BitFunctions.java @@ -41,10 +41,12 @@ public static class BitCounter implements SimpleFunction { @Workspace int partitionNumber; @Output IntHolder out; + @Override public void setup() { partitionNumber = 0; } + @Override public void eval() { partitionNumber += input.value; out.value = partitionNumber; @@ -59,8 +61,10 @@ public static class BitOr implements SimpleFunction { @Param BitHolder right; @Output BitHolder out; + @Override public void setup() {} + @Override public void eval() { out.value = left.value | right.value; } @@ -74,8 +78,10 @@ public static class BitAnd implements SimpleFunction { @Param BitHolder right; @Output BitHolder out; + @Override public void setup() {} + @Override public void eval() { out.value = left.value & right.value; } @@ -91,8 +97,10 @@ public static class IntXor implements SimpleFunction { @Param IntHolder right; @Output IntHolder out; + @Override public void setup() {} + @Override public void eval() { out.value = left.value ^ right.value; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/BooleanAggrFunctions.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/BooleanAggrFunctions.java index 2222b8999c..61b1e52615 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/BooleanAggrFunctions.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/BooleanAggrFunctions.java @@ -42,6 +42,7 @@ public static class NullableBitBooleanOr implements AggrFunction{ @Workspace BitHolder inter; @Output NullableBitHolder out; + @Override public void setup() { inter = new BitHolder(); @@ -81,6 +82,7 @@ public static class NullableBitBooleanAnd implements AggrFunction{ @Workspace BitHolder inter; @Output NullableBitHolder out; + @Override public void setup() { inter = new BitHolder(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Cardinality.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Cardinality.java new file mode 100644 index 0000000000..6f67745f6a --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Cardinality.java @@ -0,0 +1,60 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.expr.fn.impl; + + +import org.apache.arrow.vector.complex.reader.FieldReader; +import org.apache.arrow.vector.holders.NullableIntHolder; + +import com.dremio.exec.expr.SimpleFunction; +import com.dremio.exec.expr.annotations.FunctionTemplate; +import com.dremio.exec.expr.annotations.Output; +import com.dremio.exec.expr.annotations.Param; + +public final class Cardinality { + + @FunctionTemplate(name = "cardinality", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.INTERNAL) + public static class ListCardinality implements SimpleFunction { + @Param + private FieldReader input; + @Output + private NullableIntHolder out; + + @Override + public void setup() { + } + + @Override + public void eval() { + //Runtime type-checking in order to disallow STRUCT + if (input.getMinorType() == org.apache.arrow.vector.types.Types.MinorType.LIST || + input.getMinorType() == org.apache.arrow.vector.types.Types.MinorType.MAP) { + if (input.isSet()) { + out.isSet = 1; + out.value = input.size(); + } else { + out.isSet = 0; + } + } else { + throw new UnsupportedOperationException( + String.format("Cannot apply 'CARDINALITY' to arguments of type 'CARDINALITY(<%s>)'. " + + "Supported form(s): 'CARDINALITY(|)'", input.getMinorType().toString() + ) + ); + } + } + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/CastIntervalDayToBigInt.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/CastIntervalDayToBigInt.java index 56cbdce1e1..09ea04e250 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/CastIntervalDayToBigInt.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/CastIntervalDayToBigInt.java @@ -36,9 +36,11 @@ public class CastIntervalDayToBigInt implements SimpleFunction { @Output BigIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.value = (long) in.milliseconds + (long) in.days * (long) org.apache.arrow.vector.util.DateUtility.daysToStandardMillis; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/CastTimestampToBigInt.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/CastTimestampToBigInt.java index 3e720935ea..dd9fa6fb74 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/CastTimestampToBigInt.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/CastTimestampToBigInt.java @@ -33,9 +33,11 @@ public class CastTimestampToBigInt implements SimpleFunction { @Output BigIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.value = in.value; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/ContainsFunctions.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/ContainsFunctions.java index ee9ec0e2dc..ab471cba1d 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/ContainsFunctions.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/ContainsFunctions.java @@ -41,9 +41,11 @@ public static class Contains implements SimpleFunction { @Inject FunctionErrorContext errCtx; + @Override public void setup() { } + @Override public void eval() { if (true) { throw errCtx.error() diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/DateTimestampMinusFunctions.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/DateTimestampMinusFunctions.java index cf400aba1d..f46ca0e3bd 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/DateTimestampMinusFunctions.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/DateTimestampMinusFunctions.java @@ -36,9 +36,11 @@ public static class DateDiff implements SimpleFunction { @Output IntervalDayHolder out; + @Override public void setup() { } + @Override public void eval() { out.days = (int) ((input1.value - input2.value) / org.apache.arrow.vector.util.DateUtility.daysToStandardMillis); out.milliseconds = 0; @@ -53,9 +55,11 @@ public static class TimestampDiff implements SimpleFunction { @Output IntervalDayHolder out; + @Override public void setup() { } + @Override public void eval() { long difference = (input1.value - input2.value); out.milliseconds = (int) (difference % org.apache.arrow.vector.util.DateUtility.daysToStandardMillis); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/DecimalFunctions.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/DecimalFunctions.java index 4cbcb778f5..84d99ae8f3 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/DecimalFunctions.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/DecimalFunctions.java @@ -457,6 +457,7 @@ public static class NullableDecimalSum implements AggrFunction { @Workspace NullableBigIntHolder nonNullCount; @Output NullableFloat8Holder out; + @Override public void setup() { sum = new NullableFloat8Holder(); sum.isSet = 1; @@ -465,6 +466,7 @@ public void setup() { nonNullCount.isSet = 1; nonNullCount.value = 0; } + @Override public void add() { if (in.isSet != 0) { long index = (in.start / (org.apache.arrow.vector.DecimalVector.TYPE_WIDTH)); @@ -473,6 +475,7 @@ public void add() { nonNullCount.value++; } } + @Override public void output() { if (nonNullCount.value > 0) { out.isSet = 1; @@ -482,6 +485,7 @@ public void output() { out.isSet = 0; } } + @Override public void reset() { sum.value = 0; nonNullCount.value = 0; @@ -495,11 +499,13 @@ public static class NullableDecimalSumZero implements AggrFunction { @Workspace NullableFloat8Holder sum; @Output NullableFloat8Holder out; + @Override public void setup() { sum = new NullableFloat8Holder(); sum.isSet = 1; sum.value = 0; } + @Override public void add() { if (in.isSet == 1) { long index = (in.start / (org.apache.arrow.vector.DecimalVector.TYPE_WIDTH)); @@ -507,10 +513,12 @@ public void add() { sum.value += bd.doubleValue(); } } + @Override public void output() { out.isSet = 1; out.value = sum.value; } + @Override public void reset() { sum.value = 0; } @@ -524,6 +532,7 @@ public static class NullableDecimalMin implements AggrFunction { @Workspace NullableBigIntHolder nonNullCount; @Output NullableFloat8Holder out; + @Override public void setup() { minVal = new NullableFloat8Holder(); minVal.isSet = 1; @@ -532,6 +541,7 @@ public void setup() { nonNullCount.isSet = 1; nonNullCount.value = 0; } + @Override public void add() { if (in.isSet != 0) { nonNullCount.value = 1; @@ -543,6 +553,7 @@ public void add() { } } } + @Override public void output() { if (nonNullCount.value > 0) { out.isSet = 1; @@ -552,6 +563,7 @@ public void output() { out.isSet = 0; } } + @Override public void reset() { minVal.value = 0; nonNullCount.value = 0; @@ -566,6 +578,7 @@ public static class NullableDecimalMax implements AggrFunction { @Workspace NullableBigIntHolder nonNullCount; @Output NullableFloat8Holder out; + @Override public void setup() { maxVal = new NullableFloat8Holder(); maxVal.isSet = 1; @@ -574,6 +587,7 @@ public void setup() { nonNullCount.isSet = 1; nonNullCount.value = 0; } + @Override public void add() { if (in.isSet != 0) { nonNullCount.value = 1; @@ -585,6 +599,7 @@ public void add() { } } } + @Override public void output() { if (nonNullCount.value > 0) { out.isSet = 1; @@ -594,6 +609,7 @@ public void output() { out.isSet = 0; } } + @Override public void reset() { maxVal.value = 0; nonNullCount.value = 0; @@ -610,6 +626,7 @@ public static class NullableDecimalSumV2 implements AggrFunction { @Output NullableDecimalHolder out; @Inject ArrowBuf buffer; + @Override public void setup() { sum = new NullableDecimalHolder(); sum.isSet = 1; @@ -623,6 +640,7 @@ public void setup() { nonNullCount.value = 0; } + @Override public void add() { if (in.isSet == 1) { com.dremio.exec.util.DecimalUtils.addSignedDecimalInLittleEndianBytes(sum.buffer, org.apache.arrow.memory.util.LargeMemoryUtil.capAtMaxInt(sum.start), in.buffer, @@ -631,6 +649,7 @@ public void add() { } } + @Override public void output() { if (nonNullCount.value > 0) { out.isSet = 1; @@ -642,6 +661,7 @@ public void output() { } } + @Override public void reset() { nonNullCount.value = 0; java.math.BigDecimal zero = new java.math.BigDecimal(java.math.BigInteger.ZERO, 0); @@ -662,6 +682,7 @@ public static class NullableDecimalSumZeroV2 implements AggrFunction { @Output NullableDecimalHolder out; @Inject ArrowBuf buffer; + @Override public void setup() { sum = new NullableDecimalHolder(); sum.isSet = 1; @@ -672,6 +693,7 @@ public void setup() { org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(zero, sum.buffer, 0, org.apache.arrow.vector.DecimalVector.TYPE_WIDTH); } + @Override public void add() { if (in.isSet == 1) { com.dremio.exec.util.DecimalUtils.addSignedDecimalInLittleEndianBytes(sum.buffer, org.apache.arrow.memory.util.LargeMemoryUtil.capAtMaxInt(sum.start), in.buffer, @@ -679,12 +701,14 @@ public void add() { } } + @Override public void output() { out.isSet = 1; out.buffer = sum.buffer; out.start = sum.start; } + @Override public void reset() { java.math.BigDecimal zero = new java.math.BigDecimal(java.math.BigInteger.ZERO, 0); org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(zero, sum.buffer, 0, org.apache.arrow.vector.DecimalVector.TYPE_WIDTH); @@ -701,6 +725,7 @@ public static class NullableDecimalMinV2 implements AggrFunction { @Output NullableDecimalHolder out; @Inject ArrowBuf buffer; + @Override public void setup() { minVal = new NullableDecimalHolder(); minVal.isSet = 1; @@ -713,6 +738,7 @@ public void setup() { nonNullCount.value = 0; } + @Override public void add() { if (in.isSet != 0) { nonNullCount.value = 1; @@ -723,6 +749,7 @@ public void add() { } } } + @Override public void output() { if (nonNullCount.value > 0) { out.isSet = 1; @@ -734,6 +761,7 @@ public void output() { } } + @Override public void reset() { org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(com.dremio.exec.util.DecimalUtils.MAX_DECIMAL, minVal.buffer, 0, org.apache.arrow.vector.DecimalVector.TYPE_WIDTH); nonNullCount.value = 0; @@ -751,6 +779,7 @@ public static class NullableDecimalMaxV2 implements AggrFunction { @Output NullableDecimalHolder out; @Inject ArrowBuf buffer; + @Override public void setup() { maxVal = new NullableDecimalHolder(); maxVal.isSet = 1; @@ -762,6 +791,7 @@ public void setup() { nonNullCount.isSet = 1; nonNullCount.value = 0; } + @Override public void add() { if (in.isSet != 0) { nonNullCount.value = 1; @@ -772,6 +802,7 @@ public void add() { } } } + @Override public void output() { if (nonNullCount.value > 0) { out.isSet = 1; @@ -782,6 +813,7 @@ public void output() { out.isSet = 0; } } + @Override public void reset() { org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(com.dremio.exec.util.DecimalUtils.MIN_DECIMAL, maxVal.buffer, 0, org.apache.arrow.vector.DecimalVector.TYPE_WIDTH); nonNullCount.value = 0; @@ -803,8 +835,10 @@ public static class CompareDecimalVsDecimalNullsHigh implements SimpleFunction { @Output NullableIntHolder out; + @Override public void setup() {} + @Override public void eval() { out.isSet = 1; outside: @@ -843,8 +877,10 @@ public static class CompareDecimalVsDecimalNullsLow implements SimpleFunction { @Output NullableIntHolder out; + @Override public void setup() {} + @Override public void eval() { out.isSet = 1; outside: diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/DremioArgChecker.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/DremioArgChecker.java index 09322d6f7b..0ffd806bdc 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/DremioArgChecker.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/DremioArgChecker.java @@ -49,10 +49,12 @@ public DremioArgChecker(boolean allowAny, Checker... checkers) { this.allowAny = allowAny; } + @Override public boolean isOptional(int i) { return false; } + @Override public boolean checkSingleOperandType( SqlCallBinding callBinding, SqlNode node, @@ -128,6 +130,7 @@ private boolean checkOp(Checker checker, SqlCallBinding callBinding, SqlNode nod return true; } + @Override public boolean checkOperandTypes(SqlCallBinding callBinding, boolean throwOnFailure) { if (checkers.size() != callBinding.getOperandCount()) { // assume this is an inapplicable sub-rule of a composite rule; @@ -147,14 +150,17 @@ public boolean checkOperandTypes(SqlCallBinding callBinding, boolean throwOnFail return true; } + @Override public SqlOperandCountRange getOperandCountRange() { return SqlOperandCountRanges.between(0, checkers.size()); } + @Override public String getAllowedSignatures(SqlOperator op, String opName) { return String.format(signature, opName); } + @Override public Consistency getConsistency() { return Consistency.NONE; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/FloatingPointDivideFunctions.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/FloatingPointDivideFunctions.java index f0e2f51132..7a6a8b28a0 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/FloatingPointDivideFunctions.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/FloatingPointDivideFunctions.java @@ -46,9 +46,11 @@ public static class Float4Float4Divide implements SimpleFunction { @Inject FunctionErrorContext errCtx; + @Override public void setup() { } + @Override public void eval() { if(in2.value == 0.0) { throw errCtx.error() @@ -69,9 +71,11 @@ public static class Float8Float8Divide implements SimpleFunction { @Inject FunctionErrorContext errCtx; + @Override public void setup() { } + @Override public void eval() { if(in2.value == 0.0) { throw errCtx.error() diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/GeoFunctions.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/GeoFunctions.java index eb131b950c..9ba6d8fdc9 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/GeoFunctions.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/GeoFunctions.java @@ -15,35 +15,60 @@ */ package com.dremio.exec.expr.fn.impl; +import java.util.ArrayList; +import java.util.List; + +import javax.inject.Inject; + +import org.apache.arrow.memory.ArrowBuf; +import org.apache.arrow.vector.complex.writer.BaseWriter; +import org.apache.arrow.vector.holders.BigIntHolder; import org.apache.arrow.vector.holders.BitHolder; import org.apache.arrow.vector.holders.Float4Holder; import org.apache.arrow.vector.holders.Float8Holder; +import org.apache.arrow.vector.holders.NullableVarCharHolder; +import org.apache.arrow.vector.holders.VarCharHolder; +import org.apache.arrow.vector.types.FloatingPointPrecision; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.arrow.vector.types.pojo.FieldType; +import com.dremio.common.expression.CompleteType; +import com.dremio.common.expression.LogicalExpression; import com.dremio.exec.expr.SimpleFunction; import com.dremio.exec.expr.annotations.FunctionTemplate; import com.dremio.exec.expr.annotations.FunctionTemplate.NullHandling; import com.dremio.exec.expr.annotations.Output; import com.dremio.exec.expr.annotations.Param; +import com.dremio.exec.expr.annotations.Workspace; +import com.dremio.exec.expr.fn.FunctionErrorContext; +import com.dremio.exec.expr.fn.OutputDerivation; /** * Geo functions */ public class GeoFunctions { - - + public static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(com.dremio.exec.expr.fn.impl.GeoFunctions.class); @FunctionTemplate(name = "geo_distance", nulls = NullHandling.NULL_IF_NULL) public static class GeoDistance implements SimpleFunction { - @Param Float4Holder lat1; - @Param Float4Holder lon1; - @Param Float4Holder lat2; - @Param Float4Holder lon2; - @Output Float8Holder out; + @Param + Float4Holder lat1; + @Param + Float4Holder lon1; + @Param + Float4Holder lat2; + @Param + Float4Holder lon2; + @Output + Float8Holder out; + @Override public void setup() { } + @Override public void eval() { - double distance = com.dremio.exec.expr.fn.impl.GeoHelper.distance(lat1.value,lon1.value,lat2.value,lon2.value); + double distance = com.dremio.exec.expr.fn.impl.GeoHelper.distance(lat1.value, lon1.value, lat2.value, lon2.value); out.value = distance; } @@ -51,16 +76,24 @@ public void eval() { @FunctionTemplate(name = "geo_nearby", nulls = NullHandling.NULL_IF_NULL) public static class GeoNearby implements SimpleFunction { - @Param Float4Holder lat1; - @Param Float4Holder lon1; - @Param Float4Holder lat2; - @Param Float4Holder lon2; - @Param(constant = true) Float8Holder distance; - @Output BitHolder out; + @Param + Float4Holder lat1; + @Param + Float4Holder lon1; + @Param + Float4Holder lat2; + @Param + Float4Holder lon2; + @Param(constant = true) + Float8Holder distance; + @Output + BitHolder out; + @Override public void setup() { } + @Override public void eval() { out.value = com.dremio.exec.expr.fn.impl.GeoHelper.isNear(lat1.value, lon1.value, lat2.value, lon2.value, distance.value) ? 1 : 0; } @@ -69,18 +102,163 @@ public void eval() { @FunctionTemplate(name = "geo_beyond", nulls = NullHandling.NULL_IF_NULL) public static class GeoBeyond implements SimpleFunction { - @Param Float4Holder lat1; - @Param Float4Holder lon1; - @Param Float4Holder lat2; - @Param Float4Holder lon2; - @Param(constant = true) Float8Holder distance; - @Output BitHolder out; + @Param + Float4Holder lat1; + @Param + Float4Holder lon1; + @Param + Float4Holder lat2; + @Param + Float4Holder lon2; + @Param(constant = true) + Float8Holder distance; + @Output + BitHolder out; + @Override public void setup() { } + @Override public void eval() { out.value = com.dremio.exec.expr.fn.impl.GeoHelper.isNear(lat1.value, lon1.value, lat2.value, lon2.value, distance.value) ? 0 : 1; } } + @FunctionTemplate(name = "st_geohash", nulls = NullHandling.NULL_IF_NULL) + public static class GeoHashEncode implements SimpleFunction { + @Param + Float8Holder lat; + @Param + Float8Holder lon; + @Output + VarCharHolder out; + @Inject + ArrowBuf buffer; + @Inject + FunctionErrorContext errCtx; + + @Override + public void setup() { + } + + @Override + public void eval() { + out.start = 0; + out.end = 0; + String result = com.dremio.exec.expr.fn.impl.GeoHelper.encodeGeohash(lat.value, lon.value, errCtx); + byte[] byteString = result.getBytes(); + + buffer = buffer.reallocIfNeeded(byteString.length); + out.buffer = buffer; + out.buffer.setBytes(out.start, byteString); + out.end = byteString.length; + out.buffer.readerIndex(out.start); + out.buffer.writerIndex(byteString.length); + + } + } + @FunctionTemplate(name = "st_geohash", nulls = NullHandling.NULL_IF_NULL) + public static class GeoHashEncodePrecision implements SimpleFunction { + @Param + Float8Holder lat; + @Param + Float8Holder lon; + @Param + BigIntHolder precision; + @Output + VarCharHolder out; + @Inject + ArrowBuf buffer; + @Inject + FunctionErrorContext errCtx; + @Override + public void setup() { + } + @Override + public void eval() { + out.start = 0; + out.end = 0; + String result = com.dremio.exec.expr.fn.impl.GeoHelper.encodeGeohash(lat.value, lon.value, precision.value, errCtx); + byte[] byteString = result.getBytes(); + + buffer = buffer.reallocIfNeeded(byteString.length); + out.buffer = buffer; + out.buffer.setBytes(out.start, byteString); + out.end = byteString.length; + out.buffer.readerIndex(out.start); + out.buffer.writerIndex(byteString.length); + + } + } + @FunctionTemplate(name = "st_fromgeohash", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL, isDeterministic = false, derivation = ListOfGeo.class) + public static class GeoHashDecode implements SimpleFunction { + @Param + NullableVarCharHolder encoded; + @Output + BaseWriter.ComplexWriter out; + @Inject + ArrowBuf buffer; + @Inject + FunctionErrorContext errCtx; + //Cache decode results since clients may need to access struct elements repeatedly. + @Workspace + java.util.Map decodedHashes; + @Workspace + int cacheMisses; + @Workspace + int cacheHits; + @Override + public void setup() { + decodedHashes = new java.util.HashMap<>(); + cacheMisses = 0; + cacheHits = 0; + } + @Override + public void eval() { + final int maxCacheSize = 100; + if (encoded.end <= encoded.start || encoded.isSet == 0) { + throw errCtx.error() + .message(com.dremio.exec.expr.fn.impl.GeoHelper.INVALID_HASH_MSG) + .build(); + } + String hash = com.dremio.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(encoded.start, encoded.end, encoded.buffer); + + double[] res; + if (decodedHashes.containsKey(hash)) { + res = (double[])decodedHashes.get(hash); + cacheHits++; + } else { + cacheMisses++; + res = com.dremio.exec.expr.fn.impl.GeoHelper.decodeGeohash(hash, errCtx); + if (decodedHashes.size() <= maxCacheSize) { + decodedHashes.put(hash, res); + } else { + com.dremio.exec.expr.fn.impl.GeoFunctions.logger.debug("Geohash cache has reached the maximum size:" + maxCacheSize); + } + } + + if (res.length != 2) { + //This shouldn't happen, but check the size for safety. + throw errCtx.error() + .message("st_fromgeohash computed results in the wrong format") + .build(); + } + com.dremio.exec.expr.fn.impl.GeoFunctions.logger.debug("Geohash cache hit/miss:" + cacheHits + "/" + cacheMisses); + org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter.StructWriter positionWriter = out.rootAsStruct(); + positionWriter.start(); + positionWriter.float8("Latitude").writeFloat8(res[0]); + positionWriter.float8("Longitude").writeFloat8(res[1]); + positionWriter.end(); + } + } + public static class ListOfGeo implements OutputDerivation { + @Override + public CompleteType getOutputType(CompleteType baseReturn, List args) { + List children = new ArrayList<>(); + children.add(new Field("Latitude", FieldType.notNullable(new ArrowType.FloatingPoint(FloatingPointPrecision.DOUBLE)), null)); + children.add(new Field("Longitude", FieldType.notNullable(new ArrowType.FloatingPoint(FloatingPointPrecision.DOUBLE)), null)); + Field f = new Field("Results", FieldType.notNullable(new ArrowType.Struct()), children); + return CompleteType.fromField(f); + } + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/GeoHelper.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/GeoHelper.java index 8f46cbecfa..579a398d22 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/GeoHelper.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/GeoHelper.java @@ -21,8 +21,19 @@ import static java.lang.Math.sqrt; import static java.lang.Math.toRadians; +import java.util.HashMap; +import java.util.Map; + +import com.dremio.exec.expr.fn.FunctionErrorContext; + public class GeoHelper { + static final long DEFAULT_GEO_HASH_PRECISION = 20; + static final long GEO_HASH_PRECISION_MIN = 1; + static final long GEO_HASH_PRECISION_MAX = 20; + public static final String INVALID_HASH_MSG = "geohash must be a valid, base32-encoded geohash"; + static final Character[] baseHashValues = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'j', 'k', 'm', 'n', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z'}; + /** * Return a Haversine distance in Kilometers between two points. * @param lat1 Lat of point 1 @@ -37,27 +48,169 @@ public class GeoHelper { * @return distance between two points in Meters */ public static double distance(final double lat1, final double lon1, final double lat2, final double lon2) { - double R = 6_371_000.0d; // Radius of the earth in m - double dLat = toRadians(lat2 - lat1); // deg2rad below - double dLon = toRadians(lon2 - lon1); - double sinDLat = sin(dLat / 2.d); - double sinDLon = sin(dLon / 2.d); - double a = sinDLat * sinDLat + cos(toRadians(lat1)) * cos(toRadians(lat2)) * sinDLon * sinDLon; - double c = 2.d * atan2(sqrt(a), sqrt(1.d - a)); - return R * c; // Distance in Km + double R = 6_371_000.0d; // Radius of the earth in m + double dLat = toRadians(lat2 - lat1); // deg2rad below + double dLon = toRadians(lon2 - lon1); + double sinDLat = sin(dLat / 2.d); + double sinDLon = sin(dLon / 2.d); + double a = sinDLat * sinDLat + cos(toRadians(lat1)) * cos(toRadians(lat2)) * sinDLon * sinDLon; + double c = 2.d * atan2(sqrt(a), sqrt(1.d - a)); + return R * c; // Distance in Km + } + /** + * + * @param lat + * @param lon + * @param errCtx + * @return + */ + public static String encodeGeohash(final double lat, final double lon, FunctionErrorContext errCtx) { + return encodeGeohash(lat, lon, DEFAULT_GEO_HASH_PRECISION, errCtx); + } + /** + * Return the GeoHash of a point, with an optional level of precision. + * The 'official' site (http://geohash.org/site/tips.html) is sparse and points to WikiPedia for documentaion. + * https://en.wikipedia.org/wiki/Geohash + * @param lat + * @param lon + * @param precision + * @return String + */ + public static String encodeGeohash(final double lat, final double lon, final long precision, FunctionErrorContext errCtx) { + + if (precision < GEO_HASH_PRECISION_MIN || precision > GEO_HASH_PRECISION_MAX){ + String errorMessage = "precision must be between " + GEO_HASH_PRECISION_MIN + " and " + GEO_HASH_PRECISION_MAX; + throw errCtx.error() + .message(errorMessage) + .build(); + } + if (lat < -90.0 || lat > 90.0) { + throw errCtx.error() + .message("latitude must be between –90° and +90°") + .build(); + } + if (lon < -180.0 || lon > 180.0) { + throw errCtx.error() + .message("longitude must be between –180° and +180°") + .build(); + } + + int dstEnd = 0; + double latLow = -90.0; + double latHigh = 90.0; + double longLow = -180.0; + double longHigh = 180.0; + int c = 0; + int[] evenBits = {16, 8, 4, 2, 1}; + int bit = 0; + + String hash = ""; + int index = 0; + while (hash.length() < precision) { + if (index % 2 == 0) { + //even + double midValue = (longLow + longHigh) / 2; + if (lon > midValue) { + c |= evenBits[bit]; + longLow = midValue; + } else { + longHigh = midValue; + } + } else { + //odd + double midValue = (latLow + latHigh) / 2; + if (lat > midValue) { + c |= evenBits[bit]; + latLow = midValue; + } else { + latHigh = midValue; + } + } + index++; + + if (bit < 4) { + bit += 1; + } else { + hash += baseHashValues[c]; + bit = 0; + c = 0; + } + } + return hash; } + /** + * Decode a geohash into lat, lon values. + * @param in + * @param errCtx + * @return + */ + public static double[] decodeGeohash(String in, FunctionErrorContext errCtx) { + Map codeMap = buildCodeMap(); + + String decoded = ""; + //Only the character values in baseValues are accepted. + for (int id = 0; id < in.length(); id++) { + char currentChar = in.charAt(id); + if ( (Character.isLetter(currentChar) || Character.isDigit(currentChar)) && + codeMap.containsKey(currentChar) ) { + String c = Integer.toBinaryString(codeMap.get(currentChar)); + //Pad to 5 bits. + while (c.length() < 5) { + c = '0' + c; + } + decoded += c; + } else { + throw errCtx.error() + .message(INVALID_HASH_MSG) + .build(); + } + } + + double latLow = -90.0; + double latHigh = 90.0; + double lonLow = -180.0; + double lonHigh = 180.0; + + byte[] decodedBytes = decoded.getBytes(); + + for (int i = 0; i < decodedBytes.length; i++) { + byte b = decodedBytes[i]; + if (i % 2 == 0) { + if (b == '1') { + lonLow = (lonLow + lonHigh) / 2; + } else { + lonHigh = (lonLow + lonHigh) / 2; + } + } else { + if (b == '1') { + latLow = (latLow + latHigh) / 2; + } else { + latHigh = (latLow + latHigh) / 2; + } + } + } + double[] result = {(latLow + latHigh) / 2, (lonLow + lonHigh) / 2}; + return result; + } + private static Map buildCodeMap() { + Map codeMap = new HashMap<>(); + for (int i = 0; i < baseHashValues.length; i++) { + codeMap.put(baseHashValues[i], i); + } + return codeMap; + } /** * Given two points, determine if they are within distance of each other. - * @param lat1 Latitude of point 1 - * @param lon1 Longitude of point 1 - * @param lat2 Latitude of point 2 - * @param lon2 Longitude of point 2 + * + * @param lat1 Latitude of point 1 + * @param lon1 Longitude of point 1 + * @param lat2 Latitude of point 2 + * @param lon2 Longitude of point 2 * @param distance The distance in meters between the two points. * @return True if the two points are within the desired distance. */ public static boolean isNear(double lat1, double lon1, double lat2, double lon2, double distance) { - return distance(lat1,lon1, lat2, lon2) < distance; + return distance(lat1, lon1, lat2, lon2) < distance; } - } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Hash32AsDouble.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Hash32AsDouble.java index 9f85a3176f..82b6b8b095 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Hash32AsDouble.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Hash32AsDouble.java @@ -40,9 +40,11 @@ public static class NullableFloatHash implements SimpleFunction { @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -61,9 +63,11 @@ public static class NullableDoubleHash implements SimpleFunction { @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -83,9 +87,11 @@ public static class NullableBigIntHash implements SimpleFunction { @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -103,9 +109,11 @@ public static class NullableIntHash implements SimpleFunction { @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Hash32Functions.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Hash32Functions.java index 7374866861..6fdc7a97c8 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Hash32Functions.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Hash32Functions.java @@ -41,9 +41,11 @@ public static class NullableFloatHash implements SimpleFunction { @Param NullableFloat4Holder in; @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -60,9 +62,11 @@ public static class NullableDoubleHash implements SimpleFunction { @Param NullableFloat8Holder in; @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -79,9 +83,11 @@ public static class NullableVarBinaryHash implements SimpleFunction { @Param NullableVarBinaryHolder in; @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -98,9 +104,11 @@ public static class NullableVarCharHash implements SimpleFunction { @Param NullableVarCharHolder in; @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -117,15 +125,16 @@ public static class NullableBigIntHash implements SimpleFunction { @Param NullableBigIntHolder in; @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { out.value = 0; - } - else { + } else { out.value = com.dremio.common.expression.fn.impl.HashHelper.hash32(in.value, 0); } } @@ -136,15 +145,16 @@ public static class NullableIntHash implements SimpleFunction { @Param NullableIntHolder in; @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { out.value = 0; - } - else { + } else { out.value = com.dremio.common.expression.fn.impl.HashHelper.hash32(in.value, 0); } } @@ -155,9 +165,11 @@ public static class NullableDateHash implements SimpleFunction { @Param NullableDateMilliHolder in; @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -173,9 +185,11 @@ public static class NullableTimeStampHash implements SimpleFunction { @Param NullableTimeStampMilliHolder in; @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -191,9 +205,11 @@ public static class NullableTimeHash implements SimpleFunction { @Param NullableTimeMilliHolder in; @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -209,9 +225,11 @@ public static class NullableDecimalHash implements SimpleFunction { @Param NullableDecimalHolder in; @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -229,9 +247,11 @@ public static class NullableBitHash implements SimpleFunction { @Param NullableBitHolder in; @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Hash32FunctionsWithSeed.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Hash32FunctionsWithSeed.java index c32926cc82..3c4e0a84cd 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Hash32FunctionsWithSeed.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Hash32FunctionsWithSeed.java @@ -46,9 +46,11 @@ public static class NullableFloatHash implements SimpleFunction { @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -67,9 +69,11 @@ public static class NullableDoubleHash implements SimpleFunction { @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -88,9 +92,11 @@ public static class NullableVarBinaryHash implements SimpleFunction { @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -109,9 +115,11 @@ public static class NullableVarCharHash implements SimpleFunction { @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -130,15 +138,16 @@ public static class NullableBigIntHash implements SimpleFunction { @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { out.value = seed.value; - } - else { + } else { out.value = com.dremio.common.expression.fn.impl.HashHelper.hash32(in.value, seed.value); } } @@ -151,15 +160,16 @@ public static class NullableIntHash implements SimpleFunction { @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { out.value = seed.value; - } - else { + } else { out.value = com.dremio.common.expression.fn.impl.HashHelper.hash32(in.value, seed.value); } } @@ -172,9 +182,11 @@ public static class NullableDateHash implements SimpleFunction { @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -192,9 +204,11 @@ public static class NullableTimeStampHash implements SimpleFunction { @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -212,9 +226,11 @@ public static class NullableTimeHash implements SimpleFunction { @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -232,9 +248,11 @@ public static class NullableDecimalHash implements SimpleFunction { @Param IntHolder seed; @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -254,9 +272,11 @@ public static class NullableBitHash implements SimpleFunction { @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Hash32WithSeedAsDouble.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Hash32WithSeedAsDouble.java index 2258fb0ac5..5eec5386c7 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Hash32WithSeedAsDouble.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Hash32WithSeedAsDouble.java @@ -40,9 +40,11 @@ public static class NullableFloatHash implements SimpleFunction { @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -61,9 +63,11 @@ public static class NullableDoubleHash implements SimpleFunction { @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -82,15 +86,16 @@ public static class NullableBigIntHash implements SimpleFunction { @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { out.value = seed.value; - } - else { + } else { out.value = com.dremio.common.expression.fn.impl.HashHelper.hash32((double) in.value, seed.value); } } @@ -103,15 +108,16 @@ public static class NullableIntHash implements SimpleFunction { @Output NullableIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { out.value = seed.value; - } - else { + } else { out.value = com.dremio.common.expression.fn.impl.HashHelper.hash32((double) in.value, seed.value); } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Hash64Functions.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Hash64Functions.java index fa467dd86a..66bce76aa3 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Hash64Functions.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Hash64Functions.java @@ -45,9 +45,11 @@ public static class NullableFloatHash implements SimpleFunction { @Output NullableBigIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -66,9 +68,11 @@ public static class NullableDoubleHash implements SimpleFunction { @Output NullableBigIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -87,9 +91,11 @@ public static class NullableVarBinaryHash implements SimpleFunction { @Output NullableBigIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -108,9 +114,11 @@ public static class NullableVarCharHash implements SimpleFunction { @Output NullableBigIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -129,9 +137,11 @@ public static class NullableBigIntHash implements SimpleFunction { @Output NullableBigIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -149,9 +159,11 @@ public static class NullableIntHash implements SimpleFunction { @Output NullableBigIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -169,9 +181,11 @@ public static class NullableDateHash implements SimpleFunction { @Output NullableBigIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -189,9 +203,11 @@ public static class NullableTimeStampHash implements SimpleFunction { @Output NullableBigIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -209,9 +225,11 @@ public static class NullableTimeHash implements SimpleFunction { @Output NullableBigIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -228,9 +246,11 @@ public static class NullableDecimalHash implements SimpleFunction { NullableDecimalHolder in; @Output NullableBigIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -250,9 +270,11 @@ public static class NullableBitHash implements SimpleFunction { @Output NullableBigIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Hash64FunctionsWithSeed.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Hash64FunctionsWithSeed.java index 90aaafee2b..bb5d06205a 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Hash64FunctionsWithSeed.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Hash64FunctionsWithSeed.java @@ -47,9 +47,11 @@ public static class NullableFloatHash implements SimpleFunction { @Output NullableBigIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -68,9 +70,11 @@ public static class NullableDoubleHash implements SimpleFunction { @Output NullableBigIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -89,9 +93,11 @@ public static class NullableVarBinaryHash implements SimpleFunction { @Output NullableBigIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -110,9 +116,11 @@ public static class NullableVarCharHash implements SimpleFunction { @Output NullableBigIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -131,15 +139,16 @@ public static class NullableBigIntHash implements SimpleFunction { @Output NullableBigIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { out.value = seed.value; - } - else { + } else { out.value = com.dremio.common.expression.fn.impl.HashHelper.hash64(in.value, seed.value); } } @@ -152,15 +161,16 @@ public static class NullableIntHash implements SimpleFunction { @Output NullableBigIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { out.value = seed.value; - } - else { + } else { out.value = com.dremio.common.expression.fn.impl.HashHelper.hash64(in.value, seed.value); } } @@ -173,9 +183,11 @@ public static class NullableDateHash implements SimpleFunction { @Output NullableBigIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -193,9 +205,11 @@ public static class NullableTimeStampHash implements SimpleFunction { @Output NullableBigIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -213,9 +227,11 @@ public static class NullableTimeHash implements SimpleFunction { @Output NullableBigIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -233,9 +249,11 @@ public static class NullableDecimalHash implements SimpleFunction { @Param BigIntHolder seed; @Output NullableBigIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -255,9 +273,11 @@ public static class NullableBitHash implements SimpleFunction { @Output NullableBigIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Hash64WithSeedAsDouble.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Hash64WithSeedAsDouble.java index c6e0d6001a..5bcf260ec2 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Hash64WithSeedAsDouble.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Hash64WithSeedAsDouble.java @@ -99,8 +99,7 @@ public void eval() { out.isSet = 1; if (in.isSet == 0) { out.value = seed.value; - } - else { + } else { out.value = com.dremio.common.expression.fn.impl.HashHelper.hash64((double) in.value, seed.value); } } @@ -122,8 +121,7 @@ public void eval() { out.isSet = 1; if (in.isSet == 0) { out.value = seed.value; - } - else { + } else { out.value = com.dremio.common.expression.fn.impl.HashHelper.hash64((double) in.value, seed.value); } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/IsFalse.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/IsFalse.java index db7beaedfc..0abe3cae66 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/IsFalse.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/IsFalse.java @@ -31,8 +31,10 @@ public static class Optional implements SimpleFunction { @Param NullableBitHolder in; @Output NullableBitHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/IsNotFalse.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/IsNotFalse.java index abbabd036b..4d89fa88f8 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/IsNotFalse.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/IsNotFalse.java @@ -31,8 +31,10 @@ public static class Optional implements SimpleFunction { @Param NullableBitHolder in; @Output NullableBitHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/IsNotTrue.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/IsNotTrue.java index 822c1ebf8d..17685f9d68 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/IsNotTrue.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/IsNotTrue.java @@ -33,8 +33,10 @@ public static class Optional implements SimpleFunction { @Param NullableBitHolder in; @Output NullableBitHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -51,8 +53,10 @@ public static class OptionalInt implements SimpleFunction { @Param NullableIntHolder in; @Output NullableBitHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 1 && in.value != 0) { @@ -69,8 +73,10 @@ public static class OptionalLong implements SimpleFunction { @Param NullableBigIntHolder in; @Output NullableBitHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 1 && in.value != 0L) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/IsTrue.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/IsTrue.java index 40df120c5b..75c2ef72a6 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/IsTrue.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/IsTrue.java @@ -33,8 +33,10 @@ public static class Optional implements SimpleFunction { @Param NullableBitHolder in; @Output NullableBitHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 0) { @@ -51,8 +53,10 @@ public static class OptionalInt implements SimpleFunction { @Param NullableIntHolder in; @Output NullableBitHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 1 && in.value != 0) { @@ -69,8 +73,10 @@ public static class OptionalLong implements SimpleFunction { @Param NullableBigIntHolder in; @Output NullableBitHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; if (in.isSet == 1 && in.value != 0L) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/MapFunctions.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/MapFunctions.java index 8f527c113d..093047a67f 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/MapFunctions.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/MapFunctions.java @@ -45,6 +45,7 @@ public static class GetMapKeys implements SimpleFunction { @Output BaseWriter.ComplexWriter out; + @Override public void setup() { } @@ -68,6 +69,7 @@ public static class GetMapValues implements SimpleFunction { @Output BaseWriter.ComplexWriter out; + @Override public void setup() { } @@ -91,6 +93,7 @@ public static class GetMapSize implements SimpleFunction { @Output NullableIntHolder out; + @Override public void setup() { } @@ -107,6 +110,7 @@ public void eval() { } public static class KeyValueOutputLastMatching implements OutputDerivation { + @Override public CompleteType getOutputType(CompleteType baseReturn, List args) { Field entryStruct = getEntryStruct(args, "GetLastMatchingMapEntryForKey"); return CompleteType.fromField(entryStruct); @@ -114,6 +118,7 @@ public CompleteType getOutputType(CompleteType baseReturn, List args) { Field entryStruct = getEntryStruct(args, "getMapKeys"); return CompleteType.fromField(entryStruct.getChildren().get(0)).asList(); @@ -121,6 +126,7 @@ public CompleteType getOutputType(CompleteType baseReturn, List args) { Field entryStruct = getEntryStruct(args, "getMapValues"); return CompleteType.fromField(entryStruct.getChildren().get(1)).asList(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Mappify.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Mappify.java index ae3e65bcdf..8fe95e0ece 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Mappify.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Mappify.java @@ -68,15 +68,18 @@ public static class ConvertMapToKeyValuePairs implements SimpleFunction { @Output ComplexWriter writer; @Inject FunctionErrorContext errorContext; + @Override public void setup() { } + @Override public void eval() { buffer = com.dremio.exec.expr.fn.impl.MappifyUtility.mappify(reader, writer, buffer, errorContext); } } public static class KvGenOutput implements OutputDerivation { + @Override public CompleteType getOutputType(CompleteType baseReturn, List args) { Preconditions.checkArgument(args.size() == 1); CompleteType type = args.get(0).getCompleteType(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/MathFunctions.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/MathFunctions.java index 968ae52f07..fe558b90e1 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/MathFunctions.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/MathFunctions.java @@ -52,8 +52,10 @@ public static class Negative implements SimpleFunction{ @Param BigIntHolder input; @Output BigIntHolder out; + @Override public void setup(){} + @Override public void eval(){ out.value = -input.value; return; @@ -67,8 +69,10 @@ public static class NegativeFloat8 implements SimpleFunction{ @Param Float8Holder input; @Output Float8Holder out; + @Override public void setup(){} + @Override public void eval(){ out.value = -input.value; return; @@ -82,8 +86,10 @@ public static class NegativeFloat4 implements SimpleFunction{ @Param Float4Holder input; @Output Float4Holder out; + @Override public void setup(){} + @Override public void eval(){ out.value = -input.value; return; @@ -143,8 +149,10 @@ public static class Power implements SimpleFunction{ @Param Float8Holder b; @Output Float8Holder out; + @Override public void setup(){} + @Override public void eval(){ out.value = java.lang.Math.pow(a.value, b.value); } @@ -158,10 +166,12 @@ public static class Random implements SimpleFunction { @Workspace java.util.Random random; + @Override public void setup() { random = new java.util.Random(); } + @Override public void eval() { out.isSet = 1; out.value = random.nextDouble(); @@ -175,11 +185,13 @@ public static class Sample implements SimpleFunction { @Workspace private ObjectHolder random; @Workspace private IntHolder samplingRate; + @Override public void setup() { random = new ObjectHolder(); random.obj = new java.util.SplittableRandom(); } + @Override public void eval() { out.isSet = 1; out.value = ((java.util.SplittableRandom) random.obj).nextDouble() * 100 < rate.value ? 1 : 0; @@ -196,6 +208,7 @@ public static class RandomWithSeed implements SimpleFunction { @Workspace java.util.Random random; + @Override public void setup() { int seed = 0; if (seedHolder.isSet == 1) { @@ -204,6 +217,7 @@ public void setup() { random = new java.util.Random(seed); } + @Override public void eval() { out.isSet = 1; out.value = random.nextDouble(); @@ -251,6 +265,7 @@ public static class ToNumber implements SimpleFunction { @Output Float8Holder out; @Inject FunctionErrorContext errCtx; + @Override public void setup() { byte[] buf = new byte[right.end - right.start]; right.buffer.getBytes(right.start, buf, 0, right.end - right.start); @@ -258,6 +273,7 @@ public void setup() { decimalDigits = inputFormat.getMaximumFractionDigits(); } + @Override public void eval() { byte[] buf1 = new byte[left.end - left.start]; left.buffer.getBytes(left.start, buf1, 0, left.end - left.start); @@ -281,9 +297,11 @@ public static class Pi implements SimpleFunction { @Output Float8Holder out; + @Override public void setup() { } + @Override public void eval() { out.value = java.lang.Math.PI; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/ModFunctions.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/ModFunctions.java index 9d58c1cda3..76d2e61e40 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/ModFunctions.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/ModFunctions.java @@ -42,9 +42,11 @@ public static class ModInt implements SimpleFunction { @Param IntHolder input2; @Output IntHolder out; + @Override public void setup() { } + @Override public void eval() { out.value = (int) (input2.value == 0 ? input1.value : (input1.value % input2.value)); } @@ -57,9 +59,11 @@ public static class ModBigInt implements SimpleFunction { @Param BigIntHolder input2; @Output BigIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.value = (long) (input2.value == 0 ? input1.value : (input1.value % input2.value)); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Not.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Not.java index 54f43ebdb3..aba3c8f30d 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Not.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Not.java @@ -30,8 +30,10 @@ public static class Required implements SimpleFunction { @Param BitHolder in; @Output BitHolder out; + @Override public void setup() { } + @Override public void eval() { out.value = in.value == 0 ? 1 : 0; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/NullOpMap.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/NullOpMap.java index 8b7dc4ff16..8c7a449f6d 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/NullOpMap.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/NullOpMap.java @@ -30,8 +30,10 @@ public static class IsNull implements SimpleFunction { @Param FieldReader fieldReader; @Output NullableBitHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; out.value = fieldReader.isSet() ? 0 : 1; @@ -44,8 +46,10 @@ public static class IsNotNull implements SimpleFunction { @Param FieldReader fieldReader; @Output NullableBitHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; out.value = fieldReader.isSet() ? 1 : 0; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/NumericFunctions.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/NumericFunctions.java index 973c1b8834..d66d262de4 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/NumericFunctions.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/NumericFunctions.java @@ -37,9 +37,11 @@ public static class NullableBigIntIsNumeric implements SimpleFunction { @Output NullableBitHolder out; + @Override public void setup() { } + @Override public void eval() { if (in.isSet == 0) { out.value = 0; @@ -58,9 +60,11 @@ public static class NullableIntIsNumeric implements SimpleFunction { @Output NullableBitHolder out; + @Override public void setup() { } + @Override public void eval() { if (in.isSet == 0) { out.value = 0; @@ -83,11 +87,13 @@ public static class NullableVarCharIsNumeric implements SimpleFunction { @Workspace java.util.regex.Matcher matcher; + @Override public void setup() { pattern = java.util.regex.Pattern.compile("[-+]?\\d+(\\.\\d+)?"); matcher = pattern.matcher(""); } + @Override public void eval() { if (in.isSet == 0) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/SimpleCastFunctions.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/SimpleCastFunctions.java index 76e5c514f5..ae2b208cf6 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/SimpleCastFunctions.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/SimpleCastFunctions.java @@ -41,10 +41,12 @@ public static class CastVarCharBoolean implements SimpleFunction { @Output BitHolder out; @Inject FunctionErrorContext errCtx; + @Override public void setup() { } + @Override public void eval() { byte[] buf = new byte[in.end - in.start]; in.buffer.getBytes(in.start, buf, 0, in.end - in.start); @@ -69,8 +71,10 @@ public static class CastBooleanVarChar implements SimpleFunction { @Output VarCharHolder out; @Inject ArrowBuf buffer; + @Override public void setup() {} + @Override public void eval() { byte[] outB = in.value == 1 ? com.dremio.exec.expr.fn.impl.SimpleCastFunctions.TRUE : com.dremio.exec.expr.fn.impl.SimpleCastFunctions.FALSE; buffer.setBytes(0, outB); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Soundex.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Soundex.java index e106e06955..5d5a690fb0 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Soundex.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/Soundex.java @@ -77,16 +77,20 @@ public void eval() { final byte[] outBytea; if (in.end <= in.start || in.isSet == 0) { - out.buffer = buffer = buffer.reallocIfNeeded(0); - out.start = out.end = 0; + buffer = buffer.reallocIfNeeded(0); + out.buffer = buffer; + out.start = 0; + out.end = 0; out.isSet = 1; } else { String text = com.dremio.exec.expr.fn.impl.StringFunctionUtil.soundexCleanUtf8(in, errCtx); int len = text.length(); if (len == 0) { - out.buffer = buffer = buffer.reallocIfNeeded(0); - out.start = out.end = 0; + buffer = buffer.reallocIfNeeded(0); + out.buffer = buffer; + out.start = 0; + out.end = 0; out.isSet = 0; } else { byte[] soundex = new byte[len]; @@ -136,7 +140,8 @@ public void eval() { } outBytea = new String(ret).getBytes(java.nio.charset.StandardCharsets.UTF_8); - out.buffer = buffer = buffer.reallocIfNeeded(outBytea.length); + buffer = buffer.reallocIfNeeded(outBytea.length); + out.buffer = buffer; out.buffer.setBytes(0, outBytea); out.start = 0; out.end = outBytea.length; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/StringFunctions.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/StringFunctions.java index 88c0f23e5c..ce7ec29f8f 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/StringFunctions.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/StringFunctions.java @@ -107,6 +107,42 @@ public void eval() { } } + @FunctionTemplate(name = "col_like", scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) + public static class ColLike implements SimpleFunction { + @Param VarCharHolder input; + @Param VarCharHolder pattern; + @Output BitHolder out; + @Workspace java.util.Map compiledPatternCache; + @Inject FunctionErrorContext errCtx; + + @Override + public void setup() { + compiledPatternCache = new java.util.HashMap<>(); + } + + @Override + public void eval() { + final int maxPatternCacheSize = 100; + + com.dremio.exec.expr.fn.impl.CharSequenceWrapper charSequenceWrapper = + new com.dremio.exec.expr.fn.impl.CharSequenceWrapper(input.start, input.end, input.buffer); + String pat = com.dremio.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(pattern.start, pattern.end, pattern.buffer); + java.util.regex.Pattern compiledPattern; + if (compiledPatternCache.containsKey(pat)) { + compiledPattern = (java.util.regex.Pattern)compiledPatternCache.get(pat); + } else { + compiledPattern = com.dremio.exec.expr.fn.impl.StringFunctionUtil.compilePattern( + com.dremio.exec.expr.fn.impl.RegexpUtil.sqlToRegexLike(pat, errCtx), java.util.regex.Pattern.DOTALL, errCtx); + // bounding the size of the cache to avoid excess heap usage + if (compiledPatternCache.size() < maxPatternCacheSize) { + compiledPatternCache.put(pat, compiledPattern); + } + } + java.util.regex.Matcher matcher = compiledPattern.matcher(charSequenceWrapper); + out.value = matcher.matches()? 1:0; + } + } + @FunctionTemplate(name = "ilike", scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) public static class ILike implements SimpleFunction { @@ -275,13 +311,14 @@ public void eval() { } while (result); matcher.appendTail(sb); final byte [] bytea = sb.toString().getBytes(java.nio.charset.StandardCharsets.UTF_8); - out.buffer = buffer = buffer.reallocIfNeeded(bytea.length); + buffer = buffer.reallocIfNeeded(bytea.length); + out.buffer = buffer; out.buffer.setBytes(out.start, bytea); out.end = bytea.length; - } - else { + } else { // There is no matches, copy the input bytes into the output buffer - out.buffer = buffer = buffer.reallocIfNeeded(input.end - input.start); + buffer = buffer.reallocIfNeeded(input.end - input.start); + out.buffer = buffer; out.buffer.setBytes(0, input.buffer, input.start, input.end - input.start); out.end = input.end - input.start; } @@ -298,7 +335,6 @@ public static class RegexpMatches implements SimpleFunction { @Param VarCharHolder input; @Param(constant=true) VarCharHolder pattern; - @Inject ArrowBuf buffer; @Workspace java.util.regex.Matcher matcher; @Workspace com.dremio.exec.expr.fn.impl.CharSequenceWrapper charSequenceWrapper; @Output BitHolder out; @@ -322,6 +358,45 @@ public void eval() { } } + /* + * Match the given input against a regular expression specified by a column. + * + * This is similar to regexp_like, except that its 2nd argument is a column and not a literal constant + */ + @FunctionTemplate(names = {"regexp_col_like", "regexp_col_matches"}, scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) + public static class RegexpColLike implements SimpleFunction { + @Param VarCharHolder input; + @Param VarCharHolder pattern; + @Output BitHolder out; + @Workspace java.util.Map compiledPatternCache; + @Inject FunctionErrorContext errCtx; + + @Override + public void setup() { + compiledPatternCache = new java.util.HashMap<> (); + } + + @Override + public void eval() { + final int maxPatternCacheSize = 100; + + com.dremio.exec.expr.fn.impl.CharSequenceWrapper charSequenceWrapper = + new com.dremio.exec.expr.fn.impl.CharSequenceWrapper(input.start, input.end, input.buffer); + String pat = com.dremio.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(pattern.start, pattern.end, pattern.buffer); + java.util.regex.Pattern compiledPattern = (java.util.regex.Pattern)compiledPatternCache.get(pat); + if (compiledPattern == null) { + compiledPattern = com.dremio.exec.expr.fn.impl.StringFunctionUtil.compilePattern( + pat, java.util.regex.Pattern.DOTALL, errCtx); + if (compiledPatternCache.size() < maxPatternCacheSize) { + compiledPatternCache.put(pat, compiledPattern); + } + } + + java.util.regex.Matcher matcher = compiledPattern.matcher(charSequenceWrapper); + out.value = matcher.find()? 1:0; + } + } + @FunctionTemplate(names = {"char_length", "character_length", "length"}, scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) public static class CharLength implements SimpleFunction { @Param VarCharHolder input; @@ -555,13 +630,15 @@ public void setup() { @Override public void eval() { - out.buffer = buffer = buffer.reallocIfNeeded(input.end - input.start); + buffer = buffer.reallocIfNeeded(input.end - input.start); + out.buffer = buffer; out.start = 0; out.end = input.end - input.start; final String toLower = (com.dremio.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end, input.buffer)).toLowerCase(); final byte[] outBytea = toLower.getBytes(java.nio.charset.StandardCharsets.UTF_8); - out.buffer = buffer = buffer.reallocIfNeeded(outBytea.length); + buffer = buffer.reallocIfNeeded(outBytea.length); + out.buffer = buffer; out.buffer.setBytes(0, outBytea); out.start = 0; out.end = outBytea.length; @@ -584,13 +661,15 @@ public void setup() { @Override public void eval() { - out.buffer = buffer = buffer.reallocIfNeeded(input.end- input.start); + buffer = buffer.reallocIfNeeded(input.end- input.start); + out.buffer = buffer; out.start = 0; out.end = input.end - input.start; final String toUpper = (com.dremio.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end, input.buffer)).toUpperCase(); final byte[] outBytea = toUpper.getBytes(java.nio.charset.StandardCharsets.UTF_8); - out.buffer = buffer = buffer.reallocIfNeeded(outBytea.length); + buffer = buffer.reallocIfNeeded(outBytea.length); + out.buffer = buffer; out.buffer.setBytes(0, outBytea); out.start = 0; out.end = outBytea.length; @@ -620,7 +699,8 @@ public void eval() { out.buffer = string.buffer; // if length is NOT positive, or input string is empty, return empty string. if (length.value <= 0 || string.end <= string.start) { - out.start = out.end = 0; + out.start = 0; + out.end = 0; } else { //Do 1st scan to counter # of character in string. final int charCount = com.dremio.exec.expr.fn.impl.StringFunctionUtil.getUTF8CharLength @@ -636,7 +716,8 @@ public void eval() { } if (fromCharIdx <= 0 || fromCharIdx > charCount ) { // invalid offset, return empty string. - out.start = out.end = 0; + out.start = 0; + out.end = 0; } else { out.start = com.dremio.exec.expr.fn.impl.StringFunctionUtil.getUTF8CharPosition( io.netty.buffer.NettyArrowBuf.unwrapBuffer(string.buffer), string.start, string.end, fromCharIdx-1, errCtx); @@ -669,7 +750,8 @@ public void eval() { out.buffer = string.buffer; // If the input string is empty, return empty string. if (string.end <= string.start) { - out.start = out.end = 0; + out.start = 0; + out.end = 0; } else { //Do 1st scan to counter # of character in string. final int charCount = com.dremio.exec.expr.fn.impl.StringFunctionUtil.getUTF8CharLength @@ -685,7 +767,8 @@ public void eval() { } if (fromCharIdx <= 0 || fromCharIdx > charCount ) { // invalid offset, return empty string. - out.start = out.end = 0; + out.start = 0; + out.end = 0; } else { out.start = com.dremio.exec.expr.fn.impl.StringFunctionUtil.getUTF8CharPosition( io.netty.buffer.NettyArrowBuf.unwrapBuffer(string.buffer), string.start, string.end, fromCharIdx-1, errCtx); @@ -759,7 +842,8 @@ public void eval() { out.buffer = string.buffer; // if length is 0, or input string is empty, return empty string. if (length.value == 0 || string.end <= string.start) { - out.start = out.end = 0; + out.start = 0; + out.end = 0; } else { //Do 1st scan to counter # of character in string. final int charCount = com.dremio.exec.expr.fn.impl.StringFunctionUtil.getUTF8CharLength @@ -799,7 +883,8 @@ public void eval() { out.buffer = string.buffer; // invalid length. if (length.value == 0 || string.end <= string.start) { - out.start = out.end = 0; + out.start = 0; + out.end = 0; } else { //Do 1st scan to counter # of character in string. final int charCount = com.dremio.exec.expr.fn.impl.StringFunctionUtil.getUTF8CharLength @@ -816,7 +901,8 @@ public void eval() { // invalid length : right('abc', -5) -> '' if (charLen <= 0) { - out.start = out.end = 0; + out.start = 0; + out.end = 0; } else { //Do 2nd scan of string. Get bytes corresponding chars in range. out.start = com.dremio.exec.expr.fn.impl.StringFunctionUtil.getUTF8CharPosition( @@ -841,7 +927,8 @@ public void setup() { @Override public void eval() { - out.buffer = buffer = buffer.reallocIfNeeded(input.end - input.start); + buffer = buffer.reallocIfNeeded(input.end - input.start); + out.buffer = buffer; out.start = 0; out.end = input.end - input.start; com.dremio.exec.expr.fn.impl.StringFunctionHelpers.initCap(input.start, input.end, input.buffer, out.buffer); @@ -866,7 +953,8 @@ public void setup() { @Override public void eval() { out.buffer = buffer; - out.start = out.end = 0; + out.start = 0; + out.end = 0; int fromL = from.end - from.start; int textL = text.end - text.start; @@ -946,7 +1034,8 @@ public void eval() { if (theLength <= 0) { //case 1: target length is <=0, then return an empty string. out.buffer = buffer; - out.start = out.end = 0; + out.start = 0; + out.end = 0; } else if (theLength == textCharCount || (theLength > textCharCount && fillCharCount == 0) ) { //case 2: target length is same as text's length, or need fill into text but "fill" is empty, then return text directly. out.buffer = text.buffer; @@ -962,7 +1051,8 @@ public void eval() { //case 4: copy "fill" on left. Total # of char to copy : theLength - textCharCount int count = 0; out.buffer = buffer; - out.start = out.end = 0; + out.start = 0; + out.end = 0; while (count < theLength - textCharCount) { for (id = fill.start; id < fill.end; id++) { @@ -1020,7 +1110,8 @@ public void eval() { if (theLength <= 0) { //case 1: target length is <=0, then return an empty string. out.buffer = buffer; - out.start = out.end = 0; + out.start = 0; + out.end = 0; } else if (theLength == textCharCount) { //case 2: target length is same as text's length. out.buffer = text.buffer; @@ -1036,7 +1127,8 @@ public void eval() { //case 4: copy " " on left. Total # of char to copy : theLength - textCharCount int count = 0; out.buffer = buffer; - out.start = out.end = 0; + out.start = 0; + out.end = 0; while (count < theLength - textCharCount) { out.buffer.setByte(out.end++, spaceInByte); @@ -1088,7 +1180,8 @@ public void eval() { if (theLength <= 0) { //case 1: target length is <=0, then return an empty string. out.buffer = buffer; - out.start = out.end = 0; + out.start = 0; + out.end = 0; } else if (theLength == textCharCount || (theLength > textCharCount && fillCharCount == 0) ) { //case 2: target length is same as text's length, or need fill into text but "fill" is empty, then return text directly. out.buffer = text.buffer; @@ -1103,7 +1196,8 @@ public void eval() { } else if (theLength > textCharCount) { //case 4: copy "text" into "out", then copy "fill" on the right. out.buffer = buffer; - out.start = out.end = 0; + out.start = 0; + out.end = 0; for (id = text.start; id < text.end; id++) { out.buffer.setByte(out.end++, text.buffer.getByte(id)); @@ -1165,7 +1259,8 @@ public void eval() { if (theLength <= 0) { //case 1: target length is <=0, then return an empty string. out.buffer = buffer; - out.start = out.end = 0; + out.start = 0; + out.end = 0; } else if (theLength == textCharCount) { //case 2: target length is same as text's length. out.buffer = text.buffer; @@ -1180,7 +1275,8 @@ public void eval() { } else if (theLength > textCharCount) { //case 4: copy "text" into "out", then copy " " on the right. out.buffer = buffer; - out.start = out.end = 0; + out.start = 0; + out.end = 0; for (int id = text.start; id < text.end; id++) { out.buffer.setByte(out.end++, text.buffer.getByte(id)); @@ -1216,7 +1312,8 @@ public void setup() { @Override public void eval() { out.buffer = text.buffer; - out.start = out.end = text.end; + out.start = text.end; + out.end = text.end; int bytePerChar = 0; //Scan from left of "text", stop until find a char not in "from" @@ -1251,7 +1348,8 @@ public void setup() { @Override public void eval() { out.buffer = text.buffer; - out.start = out.end = text.end; + out.start = text.end; + out.end = text.end; //Scan from left of "text", stop until find a char not " " for (int id = text.start; id < text.end; ++id) { @@ -1281,7 +1379,8 @@ public void setup() { @Override public void eval() { out.buffer = text.buffer; - out.start = out.end = text.start; + out.start = text.start; + out.end = text.start; int bytePerChar = 0; //Scan from right of "text", stop until find a char not in "from" @@ -1319,7 +1418,8 @@ public void setup() { @Override public void eval() { out.buffer = text.buffer; - out.start = out.end = text.start; + out.start = text.start; + out.end = text.start; //Scan from right of "text", stop until find a char not in " " for (int id = text.end - 1; id >= text.start; --id) { @@ -1352,7 +1452,8 @@ public void setup() { @Override public void eval() { out.buffer = text.buffer; - out.start = out.end = text.start; + out.start = text.start; + out.end = text.start; int bytePerChar = 0; //Scan from left of "text", stop until find a char not in "from" @@ -1402,7 +1503,8 @@ public void setup() { @Override public void eval() { out.buffer = text.buffer; - out.start = out.end = text.start; + out.start = text.start; + out.end = text.start; //Scan from left of "text", stop until find a char not " " for (int id = text.start; id < text.end; ++id) { @@ -1438,8 +1540,10 @@ public void setup() { @Override public void eval() { - out.buffer = buffer = buffer.reallocIfNeeded( (left.end - left.start) + (right.end - right.start)); - out.start = out.end = 0; + buffer = buffer.reallocIfNeeded( (left.end - left.start) + (right.end - right.start)); + out.buffer = buffer; + out.start = 0; + out.end = 0; int id = 0; for (id = left.start; id < left.end; id++) { @@ -1464,8 +1568,10 @@ public void setup() {} @Override public void eval() { - out.buffer = buffer = buffer.reallocIfNeeded(in.end - in.start); - out.start = out.end = 0; + buffer = buffer.reallocIfNeeded(in.end - in.start); + out.buffer = buffer; + out.start = 0; + out.end = 0; out.end = com.dremio.common.util.DremioStringUtils.parseBinaryString(io.netty.buffer.NettyArrowBuf.unwrapBuffer(in.buffer), in.start, in.end, io.netty.buffer.NettyArrowBuf.unwrapBuffer(out.buffer)); out.buffer.readerIndex(out.start); @@ -1485,8 +1591,10 @@ public void setup() {} @Override public void eval() { - out.buffer = buffer = buffer.reallocIfNeeded(in.end - in.start); - out.start = out.end = 0; + buffer = buffer.reallocIfNeeded(in.end - in.start); + out.buffer = buffer; + out.start = 0; + out.end = 0; out.end = com.dremio.exec.expr.fn.impl.StringFunctionUtil.parseBinaryStringNoFormat(io.netty.buffer.NettyArrowBuf.unwrapBuffer(in.buffer), in.start, in.end, io.netty.buffer.NettyArrowBuf.unwrapBuffer(out.buffer), errCtx); out.buffer.readerIndex(out.start); @@ -1510,7 +1618,8 @@ public void setup() { public void eval() { byte[] buf = com.dremio.common.util.DremioStringUtils.toBinaryStringNoFormat(io.netty.buffer.NettyArrowBuf.unwrapBuffer(in.buffer), in .start, in.end).getBytes(charset); - out.buffer = buffer = buffer.reallocIfNeeded(buf.length); + buffer = buffer.reallocIfNeeded(buf.length); + out.buffer = buffer; buffer.setBytes(0, buf); buffer.readerIndex(0); buffer.writerIndex(buf.length); @@ -1583,7 +1692,8 @@ public void setup() { @Override public void eval() { out.buffer = buf; - out.start = out.end = 0; + out.start = 0; + out.end = 0; out.buffer.setByte(0, in.value); ++out.end; } @@ -1609,7 +1719,8 @@ public void eval() { final int len = in.end - in.start; final int num = nTimes.value; out.start = 0; - out.buffer = buffer = buffer.reallocIfNeeded( len * num ); + buffer = buffer.reallocIfNeeded( len * num ); + out.buffer = buffer; for (int i =0; i < num; i++) { in.buffer.getBytes(in.start, out.buffer, i * len, len); } @@ -1641,7 +1752,8 @@ public void eval() { bytea[index] = in.buffer.getByte(i); } final byte[] outBytea = new String(bytea, inCharset).getBytes(java.nio.charset.StandardCharsets.UTF_8); - out.buffer = buffer = buffer.reallocIfNeeded(outBytea.length); + buffer = buffer.reallocIfNeeded(outBytea.length); + out.buffer = buffer; out.buffer.setBytes(0, outBytea); out.start = 0; out.end = outBytea.length; @@ -1667,15 +1779,16 @@ public void eval() { final int len = in.end - in.start; out.start = 0; out.end = len; - out.buffer = buffer = buffer.reallocIfNeeded(len); + buffer = buffer.reallocIfNeeded(len); + out.buffer = buffer; int charlen = 0; int index = len; int innerindex = 0; for (int id = in.start; id < in.end; id += charlen) { - innerindex = charlen = com.dremio.exec.expr.fn.impl.StringFunctionUtil.utf8CharLen(io.netty.buffer.NettyArrowBuf.unwrapBuffer(in.buffer), - id, errCtx); + charlen = com.dremio.exec.expr.fn.impl.StringFunctionUtil.utf8CharLen(io.netty.buffer.NettyArrowBuf.unwrapBuffer(in.buffer), id, errCtx); + innerindex = charlen; // retain byte order of multibyte characters while (innerindex > 0) { @@ -1706,7 +1819,8 @@ public void setup() {} public void eval() { final byte[] outBytea = org.apache.commons.lang3.StringUtils.replaceChars(getStringFromVarCharHolder(in), getStringFromVarCharHolder(searchChars), getStringFromVarCharHolder(replaceChars)).getBytes(); - out.buffer = buffer = buffer.reallocIfNeeded(outBytea.length); + buffer = buffer.reallocIfNeeded(outBytea.length); + out.buffer = buffer; out.buffer.setBytes(0, outBytea); out.start = 0; out.end = outBytea.length; @@ -1735,7 +1849,8 @@ public void setup() { @Override public void eval() { - out.start = out.end = 0; + out.start = 0; + out.end = 0; out.isSet = 0; if(separator.isSet == 0){ @@ -1753,7 +1868,8 @@ public void eval() { } outputLength += (separator.end - separator.start) * (numValidInput > 1 ? numValidInput - 1 : 0); - out.buffer = buffer = buffer.reallocIfNeeded(outputLength); + buffer = buffer.reallocIfNeeded(outputLength); + out.buffer = buffer; com.dremio.exec.expr.fn.impl.StringFunctionUtil.concatWsWord(out, word1, separator); com.dremio.exec.expr.fn.impl.StringFunctionUtil.concatWsWord(out, word2, separator); out.isSet = 1; @@ -1785,7 +1901,8 @@ public void setup() { @Override public void eval() { - out.start = out.end = 0; + out.start = 0; + out.end = 0; out.isSet = 0; if(separator.isSet == 0){ @@ -1807,7 +1924,8 @@ public void eval() { } outputLength += (separator.end - separator.start) * (numValidInput > 1 ? numValidInput - 1 : 0); - out.buffer = buffer = buffer.reallocIfNeeded(outputLength); + buffer = buffer.reallocIfNeeded(outputLength); + out.buffer = buffer; com.dremio.exec.expr.fn.impl.StringFunctionUtil.concatWsWord(out, word1, separator); com.dremio.exec.expr.fn.impl.StringFunctionUtil.concatWsWord(out, word2, separator); com.dremio.exec.expr.fn.impl.StringFunctionUtil.concatWsWord(out, word3, separator); @@ -1842,7 +1960,8 @@ public void setup() { @Override public void eval() { - out.start = out.end = 0; + out.start = 0; + out.end = 0; out.isSet = 0; if(separator.isSet == 0){ @@ -1868,7 +1987,8 @@ public void eval() { } outputLength += (separator.end - separator.start) * (numValidInput > 1 ? numValidInput - 1 : 0); - out.buffer = buffer = buffer.reallocIfNeeded(outputLength); + buffer = buffer.reallocIfNeeded(outputLength); + out.buffer = buffer; com.dremio.exec.expr.fn.impl.StringFunctionUtil.concatWsWord(out, word1, separator); com.dremio.exec.expr.fn.impl.StringFunctionUtil.concatWsWord(out, word2, separator); com.dremio.exec.expr.fn.impl.StringFunctionUtil.concatWsWord(out, word3, separator); @@ -1906,7 +2026,8 @@ public void setup() { @Override public void eval() { - out.start = out.end = 0; + out.start = 0; + out.end = 0; out.isSet = 0; if(separator.isSet == 0){ @@ -1936,7 +2057,8 @@ public void eval() { } outputLength += (separator.end - separator.start) * (numValidInput > 1 ? numValidInput - 1 : 0); - out.buffer = buffer = buffer.reallocIfNeeded(outputLength); + buffer = buffer.reallocIfNeeded(outputLength); + out.buffer = buffer; com.dremio.exec.expr.fn.impl.StringFunctionUtil.concatWsWord(out, word1, separator); com.dremio.exec.expr.fn.impl.StringFunctionUtil.concatWsWord(out, word2, separator); com.dremio.exec.expr.fn.impl.StringFunctionUtil.concatWsWord(out, word3, separator); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/UnionFunctions.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/UnionFunctions.java index 655a10dd3c..528b88cb0d 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/UnionFunctions.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/UnionFunctions.java @@ -64,8 +64,10 @@ public static class CompareType implements SimpleFunction { @Param FieldReader input2; @Output NullableIntHolder out; + @Override public void setup() {} + @Override public void eval() { org.apache.arrow.vector.types.Types.MinorType type1; if (input1.isSet()) { @@ -142,8 +144,10 @@ public static class GetType implements SimpleFunction { @Inject ArrowBuf buf; + @Override public void setup() {} + @Override public void eval() { out.isSet = 1; @@ -168,8 +172,10 @@ public static class CastUnionToUnion implements SimpleFunction{ @Output UnionHolder out; + @Override public void setup() {} + @Override public void eval() { out.reader = in; out.isSet = in.isSet() ? 1 : 0; @@ -183,8 +189,10 @@ public static class CastUnionList implements SimpleFunction { @Output ComplexWriter out; @Inject FunctionErrorContext errorContext; + @Override public void setup() {} + @Override public void eval() { if (in.isSet == 1) { if (in.reader.getMinorType() != org.apache.arrow.vector.types.Types.MinorType.LIST) { @@ -236,8 +244,10 @@ public static class UnionIsList implements SimpleFunction { @Param UnionHolder in; @Output NullableBitHolder out; + @Override public void setup() {} + @Override public void eval() { out.isSet = 1; if (in.isSet == 1) { @@ -256,8 +266,10 @@ public static class CastUnionDecimal implements SimpleFunction { @Output NullableDecimalHolder out; + @Override public void setup() {} + @Override public void eval() { if (in.isSet == 1) { in.reader.read(out); @@ -299,8 +311,10 @@ public static class UnionIsDecimal implements SimpleFunction { @Param UnionHolder in; @Output NullableBitHolder out; + @Override public void setup() {} + @Override public void eval() { out.isSet = 1; if (in.isSet == 1) { @@ -321,8 +335,10 @@ public static class CastUnionStruct implements SimpleFunction { @Output ComplexWriter out; @Inject FunctionErrorContext errorContext; + @Override public void setup() {} + @Override public void eval() { if (in.isSet == 1) { if (in.reader.getMinorType() != org.apache.arrow.vector.types.Types.MinorType.STRUCT) { @@ -374,8 +390,10 @@ public static class UnionIsStruct implements SimpleFunction { @Param UnionHolder in; @Output NullableBitHolder out; + @Override public void setup() {} + @Override public void eval() { out.isSet = 1; if (in.isSet == 1) { @@ -392,8 +410,10 @@ public static class IsNotNull implements SimpleFunction { @Param UnionHolder input; @Output NullableBitHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; out.value = input.isSet == 1 ? 1 : 0; @@ -406,8 +426,10 @@ public static class IsNull implements SimpleFunction { @Param UnionHolder input; @Output NullableBitHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; out.value = input.isSet == 1 ? 0 : 1; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/conv/Base64ConvertTo.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/conv/Base64ConvertTo.java index c5cfc9f044..1762db4732 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/conv/Base64ConvertTo.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/conv/Base64ConvertTo.java @@ -44,7 +44,8 @@ public void setup() { public void eval() { final String inputStr = com.dremio.exec.expr.fn.impl.StringFunctionHelpers.getStringFromNullableVarCharHolder(in); final byte[] outBytea = javax.xml.bind.DatatypeConverter.parseBase64Binary(inputStr); - out.buffer = buffer = buffer.reallocIfNeeded(outBytea.length); + buffer = buffer.reallocIfNeeded(outBytea.length); + out.buffer = buffer; out.buffer.setBytes(0, outBytea); out.start = 0; out.end = outBytea.length; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/conv/JsonConvertFrom.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/conv/JsonConvertFrom.java index 0670833111..740b7fbc8c 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/conv/JsonConvertFrom.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/conv/JsonConvertFrom.java @@ -50,12 +50,14 @@ public static class ConvertFromJson implements SimpleFunction { @Inject FunctionErrorContext errCtx; + @Override public void setup() { throw errCtx.error() .message("Operation not supported") .build(); } + @Override public void eval() { throw errCtx.error() .message("Operation not supported") @@ -78,12 +80,14 @@ public static class ConvertFromJsonVarchar implements SimpleFunction { @Inject FunctionErrorContext errCtx; + @Override public void setup() { throw errCtx.error() .message("Operation not supported") .build(); } + @Override public void eval() { throw errCtx.error() .message("Operation not supported") diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/conv/JsonConvertTo.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/conv/JsonConvertTo.java index 125f6a198e..72c7249833 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/conv/JsonConvertTo.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/conv/JsonConvertTo.java @@ -73,8 +73,8 @@ public void eval(){ } byte[] bytea = stream.toByteArray(); - - out.buffer = buffer = buffer.reallocIfNeeded(bytea.length); + buffer = buffer.reallocIfNeeded(bytea.length); + out.buffer = buffer; out.buffer.setBytes(0, bytea); out.end = bytea.length; } @@ -109,8 +109,8 @@ public void eval(){ } byte [] bytea = stream.toByteArray(); - - out.buffer = buffer = buffer.reallocIfNeeded(bytea.length); + buffer = buffer.reallocIfNeeded(bytea.length); + out.buffer = buffer; out.buffer.setBytes(0, bytea); out.end = bytea.length; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/conv/RoundFunctions.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/conv/RoundFunctions.java index 821f69b1b3..f5e755d9c0 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/conv/RoundFunctions.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/conv/RoundFunctions.java @@ -42,9 +42,11 @@ public static class RoundInt implements SimpleFunction { @Param IntHolder in; @Output IntHolder out; + @Override public void setup() { } + @Override public void eval() { out.value = in.value; } @@ -56,9 +58,11 @@ public static class RoundBigInt implements SimpleFunction { @Param BigIntHolder in; @Output BigIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.value = in.value; } @@ -70,9 +74,11 @@ public static class RoundFloat4 implements SimpleFunction { @Param Float4Holder in; @Output Float4Holder out; + @Override public void setup() { } + @Override public void eval() { java.math.BigDecimal input = java.math.BigDecimal.valueOf(in.value); out.value = input.setScale(0, java.math.RoundingMode.HALF_UP).floatValue(); @@ -85,9 +91,11 @@ public static class RoundFloat8 implements SimpleFunction { @Param Float8Holder in; @Output Float8Holder out; + @Override public void setup() { } + @Override public void eval() { java.math.BigDecimal input = java.math.BigDecimal.valueOf(in.value); out.value = input.setScale(0, java.math.RoundingMode.HALF_UP).doubleValue(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/interpreter/InterpreterEvaluator.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/interpreter/InterpreterEvaluator.java index 79b9ee3876..e4c36258b2 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/interpreter/InterpreterEvaluator.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/interpreter/InterpreterEvaluator.java @@ -447,7 +447,10 @@ private ValueHolder visitBooleanAnd(BooleanOperator op, Integer inIndex) { return TypeHelper.nullify(ValueHolderHelper.getBitHolder(0)); case NULL: hasNull = true; + break; case TRUE: + default: + break; } } @@ -478,7 +481,10 @@ private ValueHolder visitBooleanOr(BooleanOperator op, Integer inIndex) { return TypeHelper.nullify(ValueHolderHelper.getBitHolder(1)); case NULL: hasNull = true; + break; case FALSE: + default: + break; } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/tdigest/TDigestFunctions.java b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/tdigest/TDigestFunctions.java index e56110b707..6c0e3ed4b8 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/tdigest/TDigestFunctions.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/expr/fn/tdigest/TDigestFunctions.java @@ -60,6 +60,7 @@ public static class NullableFloat8TDigestSampleFunction implements AggrFunction @Inject OptionResolver options; + @Override public void setup() { digest = new ObjectHolder(); compression.value = (int) options.getOption(com.dremio.exec.ExecConstants.TDIGEST_COMPRESSION); @@ -108,6 +109,7 @@ public static class NullableDateTDigestSampleFunction implements AggrFunction { @Inject OptionResolver options; + @Override public void setup() { digest = new ObjectHolder(); compression.value = (int) options.getOption(com.dremio.exec.ExecConstants.TDIGEST_COMPRESSION); @@ -157,6 +159,7 @@ public static class NullableTimeTDigestSampleFunction implements AggrFunction { @Inject OptionResolver options; + @Override public void setup() { digest = new ObjectHolder(); compression.value = (int) options.getOption(com.dremio.exec.ExecConstants.TDIGEST_COMPRESSION); @@ -205,6 +208,7 @@ public static class NullableTimeStampTDigestSampleFunction implements AggrFuncti @Inject OptionResolver options; + @Override public void setup() { digest = new ObjectHolder(); compression.value = (int) options.getOption(com.dremio.exec.ExecConstants.TDIGEST_COMPRESSION); @@ -253,6 +257,7 @@ public static class NullableBitTDigestSampleFunction implements AggrFunction { @Inject OptionResolver options; + @Override public void setup() { digest = new ObjectHolder(); compression.value = (int) options.getOption(com.dremio.exec.ExecConstants.TDIGEST_COMPRESSION); @@ -302,6 +307,7 @@ public static class NullableVarBinaryTDigestMerge implements AggrFunction { @Inject OptionResolver options; + @Override public void setup() { digest = new ObjectHolder(); compression.value = (int) options.getOption(com.dremio.exec.ExecConstants.TDIGEST_COMPRESSION); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/hadoop/HadoopFileSystem.java b/sabot/kernel/src/main/java/com/dremio/exec/hadoop/HadoopFileSystem.java index a412c89a96..7428735470 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/hadoop/HadoopFileSystem.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/hadoop/HadoopFileSystem.java @@ -361,9 +361,9 @@ public DirectoryStream glob(Path pattern, Predicate filter throws FileNotFoundException, IOException { try (WaitRecorder metaRecorder = OperatorStats.getMetadataWaitRecorder(operatorStats, pattern)) { FileStatus[] fileStatuses = underlyingFs.globStatus(toHadoopPath(pattern), toPathFilter(filter)); - if (logger.isTraceEnabled()) { + if (fileStatuses != null && logger.isTraceEnabled()) { for (FileStatus fileStatus : fileStatuses) { - logger.trace(fileStatus.toString()); + logger.trace("HFS glob file status: {}", fileStatus.toString()); } } return new ArrayDirectoryStream(fileStatuses); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/hadoop/HadoopFileSystemConfigurationAdapter.java b/sabot/kernel/src/main/java/com/dremio/exec/hadoop/HadoopFileSystemConfigurationAdapter.java new file mode 100644 index 0000000000..29514d07c4 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/hadoop/HadoopFileSystemConfigurationAdapter.java @@ -0,0 +1,42 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.hadoop; + +import org.apache.hadoop.conf.Configuration; + +import com.dremio.exec.store.dfs.FileSystemConfigurationAdapter; + +/** + * A FileSystemConfigurationAdapter that exposes Hadoop configuration using the global Hadoop version. + */ +public class HadoopFileSystemConfigurationAdapter implements FileSystemConfigurationAdapter { + + private final Configuration conf; + + public HadoopFileSystemConfigurationAdapter(Configuration conf) { + this.conf = conf; + } + + @Override + public String get(String name) { + return conf.get(name); + } + + @Override + public String get(String name, String defaultValue) { + return conf.get(name, defaultValue); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/hadoop/HadoopFsCacheWrapperDremioClassLoader.java b/sabot/kernel/src/main/java/com/dremio/exec/hadoop/HadoopFsCacheWrapperDremioClassLoader.java deleted file mode 100644 index 727378148b..0000000000 --- a/sabot/kernel/src/main/java/com/dremio/exec/hadoop/HadoopFsCacheWrapperDremioClassLoader.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.dremio.exec.hadoop; - -import java.io.IOException; -import java.util.Map; -import java.util.concurrent.ExecutionException; -import java.util.function.Supplier; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.dremio.common.util.Closeable; -import com.dremio.common.util.concurrent.ContextClassLoaderSwapper; -import com.dremio.exec.store.iceberg.DremioFileIO; -import com.dremio.exec.store.iceberg.HadoopFsCacheKey; -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.LoadingCache; -import com.google.common.cache.RemovalListener; -import com.google.common.cache.RemovalNotification; - -/** - * - * This class is wrapper for the cache which holds the FileSystem objects created using dremio class loader - */ -public class HadoopFsCacheWrapperDremioClassLoader implements HadoopFsSupplierProviderDremioClassLoader { - private static final Logger logger = LoggerFactory.getLogger(HadoopFsCacheWrapperDremioClassLoader.class); - private LoadingCache cache = CacheBuilder.newBuilder() - .softValues() - .removalListener(new RemovalListener() { - @Override - public void onRemoval(RemovalNotification notification) { - try { - notification.getValue().close(); - } catch (IOException e) { - // Ignore - logger.error("Failed to remove fs in HadoopFsCacheWrapperDremioClassLoader" , e); - } - } - }) - .build(new CacheLoader() { - @Override - public org.apache.hadoop.fs.FileSystem load(HadoopFsCacheKey key) throws Exception { - try (Closeable swapper = ContextClassLoaderSwapper.swapClassLoader(DremioFileIO.class)) { - final String disableCacheName = String.format("fs.%s.impl.disable.cache", key.getUri().getScheme()); - // Clone the conf and set cache to disable, so that a new instance is created rather than returning an existing - final Configuration cloneConf = new Configuration(key.getConf()); - cloneConf.set(disableCacheName, "true"); - return org.apache.hadoop.fs.FileSystem.get(key.getUri(), cloneConf); - } catch (IOException e) { - throw new RuntimeException(e); - } - }; - }); - - @Override - public Supplier getHadoopFsSupplierDremioClassLoader(String path, Iterable> conf) { - return () -> { - try (Closeable swapper = ContextClassLoaderSwapper.swapClassLoader(DremioFileIO.class)) { - return cache.get(new HadoopFsCacheKey(new Path(path).toUri(), conf)); - } catch (ExecutionException e) { - throw new RuntimeException(e); - } - }; - } - - @Override - public void close() throws Exception { - // Empty cache - cache.invalidateAll(); - cache.cleanUp(); - } -} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/maestro/FragmentStarter.java b/sabot/kernel/src/main/java/com/dremio/exec/maestro/FragmentStarter.java index 5b41926593..156dfd804e 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/maestro/FragmentStarter.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/maestro/FragmentStarter.java @@ -59,7 +59,7 @@ import com.google.protobuf.MessageLite; import io.grpc.stub.StreamObserver; -import io.opentelemetry.extension.annotations.WithSpan; +import io.opentelemetry.instrumentation.annotations.WithSpan; /** * Class used to start remote fragment execution. diff --git a/sabot/kernel/src/main/java/com/dremio/exec/maestro/MaestroForwarderImpl.java b/sabot/kernel/src/main/java/com/dremio/exec/maestro/MaestroForwarderImpl.java index 8eb3036ba2..7c521749ea 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/maestro/MaestroForwarderImpl.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/maestro/MaestroForwarderImpl.java @@ -79,6 +79,7 @@ public MaestroForwarderImpl( this.allocator = maestroForwarderAllocator; } + @Override public void screenCompleted(NodeQueryScreenCompletion completion) { if (mustForwardRequest(completion.getForeman())) { logger.debug("Forwarding NodeQueryScreenCompletion request for Query {} from {} to target {}", @@ -93,6 +94,7 @@ public void screenCompleted(NodeQueryScreenCompletion completion) { } } + @Override public void nodeQueryCompleted(NodeQueryCompletion completion) { if (mustForwardRequest(completion.getForeman())) { logger.debug("Forwarding NodeQueryCompletion request for Query {} from {} to target {}", @@ -107,6 +109,7 @@ public void nodeQueryCompleted(NodeQueryCompletion completion) { } } + @Override public void nodeQueryMarkFirstError(NodeQueryFirstError error) { if (mustForwardRequest(error.getForeman())) { logger.debug("Forwarding NodeQueryFirstError request for Query {} from {} to target {}", @@ -121,11 +124,13 @@ public void nodeQueryMarkFirstError(NodeQueryFirstError error) { } } + @Override public void dataArrived(JobResultsRequestWrapper jobResultsRequestWrapper, ResponseSender sender) { logger.debug("MaestroForwarder dataArrived.requestWrapper"); dataArrived(null, jobResultsRequestWrapper, sender); } + @Override public void dataArrived(JobResultsRequest jobResultsRequest, ResponseSender sender) { logger.debug("MaestroForwarder dataArrived.request"); dataArrived(jobResultsRequest, null, sender); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/maestro/MaestroObserver.java b/sabot/kernel/src/main/java/com/dremio/exec/maestro/MaestroObserver.java index 784286e755..c08d3182da 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/maestro/MaestroObserver.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/maestro/MaestroObserver.java @@ -47,7 +47,6 @@ public interface MaestroObserver { /** * The planning and parallelization phase of the query is completed. - * * An {@link ExecutionPlan execution plan} is provided to observer. */ void planCompleted(ExecutionPlan plan); @@ -71,7 +70,7 @@ public interface MaestroObserver { /** * The decisions made for parallelizations and fragments were completed. - * @param planningSet + * @param planningSet parallelized execution plan */ void planParallelized(PlanningSet planningSet); @@ -107,25 +106,32 @@ public interface MaestroObserver { /** * Time taken for sending start fragment rpcs to all nodes. - * @param millisTaken + * @param millisTaken time in millis */ void fragmentsStarted(long millisTaken, FragmentRpcSizeStats stats); /** * Time taken for sending activate fragment rpcs to all nodes. - * @param millisTaken + * @param millisTaken time in millis */ void fragmentsActivated(long millisTaken); /** * Failed to activate fragment. - * @param ex + * @param ex actual cause of failure */ void activateFragmentFailed(Exception ex); /** * ResourceScheduling related information - * @param resourceSchedulingDecisionInfo + * @param resourceSchedulingDecisionInfo Information of completed resource allocation */ void resourcesScheduled(ResourceSchedulingDecisionInfo resourceSchedulingDecisionInfo); + + /** + * Signals movement to next stage within maestro + */ + interface ExecutionStageChangeListener { + void moveToNextStage(AttemptEvent.State nextStage); + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/maestro/MaestroService.java b/sabot/kernel/src/main/java/com/dremio/exec/maestro/MaestroService.java index 17cb1cc564..a22c549029 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/maestro/MaestroService.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/maestro/MaestroService.java @@ -20,6 +20,7 @@ import com.dremio.common.exceptions.ExecutionSetupException; import com.dremio.exec.ops.QueryContext; import com.dremio.exec.physical.PhysicalPlan; +import com.dremio.exec.proto.UserBitShared; import com.dremio.exec.proto.UserBitShared.QueryId; import com.dremio.exec.work.SafeExit; import com.dremio.exec.work.foreman.CompletionListener; @@ -53,8 +54,8 @@ public interface MaestroService extends Service, SafeExit { * @param observer observer to notify on state changes, and progress. * @param listener listener to notify on completion or failures. * - * @throws ExecutionSetupException - * @throws ResourceAllocationException + * @throws ExecutionSetupException failure in execution planning + * @throws ResourceAllocationException failure in resource allocation */ void executeQuery( QueryId queryId, @@ -65,6 +66,15 @@ void executeQuery( CompletionListener listener) throws ExecutionSetupException, ResourceAllocationException; + /** + * Interrupts the execution if in wait states. Different stages of query execution require(s) different action as + * they may be waiting/blocking for different resources. + * + * @param queryId Id of the query whose execution needs to be interrupted + * @param currentStage current Stage of the query (Interrupt actions may depend on the stage of the query) + */ + void interruptExecutionInWaitStates(QueryId queryId, UserBitShared.AttemptEvent.State currentStage); + /** * Cancel a previously triggered query. * diff --git a/sabot/kernel/src/main/java/com/dremio/exec/maestro/MaestroServiceImpl.java b/sabot/kernel/src/main/java/com/dremio/exec/maestro/MaestroServiceImpl.java index 0dd940ab58..e79e50552e 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/maestro/MaestroServiceImpl.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/maestro/MaestroServiceImpl.java @@ -15,10 +15,10 @@ */ package com.dremio.exec.maestro; +import java.util.ArrayList; import java.util.List; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutionException; -import java.util.stream.Collectors; import javax.inject.Provider; @@ -123,7 +123,7 @@ public MaestroServiceImpl( @Override public void start() throws Exception { - Metrics.newGauge(Metrics.join("maestro", "active"), () -> activeQueryMap.size()); + Metrics.newGauge(Metrics.join("maestro", "active"), activeQueryMap::size); execToCoordStatusHandlerImpl = new ExecToCoordStatusHandlerImpl(jobTelemetryClient); reader = sabotContext.get().getPlanReader(); @@ -180,6 +180,28 @@ public void executeQuery( ExecutionSetupException.class); } + @Override + public void interruptExecutionInWaitStates(QueryId queryId, AttemptEvent.State currentStage) { + final QueryTracker queryTracker = activeQueryMap.get(queryId); + if (queryTracker == null) { + return; + } + switch (currentStage) { + case ENGINE_START: + case QUEUED: + if (logger.isDebugEnabled()) { + logger.debug("Interrupting allocation {} {}", QueryIdHelper.getQueryId(queryId), currentStage); + } + queryTracker.interruptAllocation(); + break; + + default: + // TODO: support interruptions in other interruptible states of query execution within maestro + // as well (future PRs). + break; + } + } + @Override public void cancelQuery(QueryId queryId) { QueryTracker queryTracker = activeQueryMap.get(queryId); @@ -223,10 +245,7 @@ public void close() throws Exception { @Override public List getActiveQueryIds() { - return activeQueryMap - .keySet() - .stream() - .collect(Collectors.toList()); + return new ArrayList<>(activeQueryMap.keySet()); } /** @@ -241,12 +260,17 @@ public ExecToCoordStatusHandlerImpl(Provider jobTelemetryCli @Override public void screenCompleted(NodeQueryScreenCompletion completion) throws RpcException { - logger.debug("Screen complete message came in for id {}", QueryIdHelper.getQueryId(completion.getId())); + if (logger.isDebugEnabled()) { + logger.debug("Screen complete message came in for id {}", QueryIdHelper.getQueryId(completion.getId())); + } QueryTracker queryTracker = activeQueryMap.get(completion.getId()); if (queryTracker != null) { - logger.debug("Received NodeQueryScreenCompletion request for Query {} from {} in {}", - QueryIdHelper.getQueryId(completion.getId()), completion.getEndpoint().getAddress(), completion.getForeman().getAddress()); + if (logger.isDebugEnabled()) { + logger.debug("Received NodeQueryScreenCompletion request for Query {} from {} in {}", + QueryIdHelper.getQueryId(completion.getId()), completion.getEndpoint().getAddress(), + completion.getForeman().getAddress()); + } queryTracker.screenCompleted(completion); } else { @@ -256,13 +280,19 @@ public void screenCompleted(NodeQueryScreenCompletion completion) throws RpcExce @Override public void nodeQueryCompleted(NodeQueryCompletion completion) throws RpcException { - logger.debug("Node query complete message came in for id {}", QueryIdHelper.getQueryId(completion.getId())); + if (logger.isDebugEnabled()) { + logger.debug("Node query complete message came in for id {}", + QueryIdHelper.getQueryId(completion.getId())); + } updateFinalExecutorProfile(completion); QueryTracker queryTracker = activeQueryMap.get(completion.getId()); if (queryTracker != null) { - logger.debug("Received NodeQueryCompletion request for Query {} from {} in {}", - QueryIdHelper.getQueryId(completion.getId()), completion.getEndpoint().getAddress(), completion.getForeman().getAddress()); + if (logger.isDebugEnabled()) { + logger.debug("Received NodeQueryCompletion request for Query {} from {} in {}", + QueryIdHelper.getQueryId(completion.getId()), completion.getEndpoint().getAddress(), + completion.getForeman().getAddress()); + } queryTracker.nodeCompleted(completion); } else { @@ -293,12 +323,17 @@ private void updateFinalExecutorProfile(NodeQueryCompletion completion) { @Override public void nodeQueryMarkFirstError(NodeQueryFirstError error) throws RpcException { - logger.debug("Node Query error came in for id {} ", QueryIdHelper.getQueryId(error.getHandle().getQueryId())); + if (logger.isDebugEnabled()) { + logger.debug("Node Query error came in for id {} ", QueryIdHelper.getQueryId(error.getHandle().getQueryId())); + } QueryTracker queryTracker = activeQueryMap.get(error.getHandle().getQueryId()); if (queryTracker != null) { - logger.debug("Received NodeQueryFirstError request for Query {} from {} in {}", - QueryIdHelper.getQueryId(error.getHandle().getQueryId()), error.getEndpoint().getAddress(), error.getForeman().getAddress()); + if (logger.isDebugEnabled()) { + logger.debug("Received NodeQueryFirstError request for Query {} from {} in {}", + QueryIdHelper.getQueryId(error.getHandle().getQueryId()), error.getEndpoint().getAddress(), + error.getForeman().getAddress()); + } queryTracker.nodeMarkFirstError(error); } else { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/maestro/QueryTracker.java b/sabot/kernel/src/main/java/com/dremio/exec/maestro/QueryTracker.java index e1a17b8ece..25e8cc6472 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/maestro/QueryTracker.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/maestro/QueryTracker.java @@ -34,6 +34,11 @@ interface QueryTracker extends AutoCloseable { */ void allocateResources() throws ExecutionSetupException, ResourceAllocationException; + /** + * Interrupts allocation, provided we are in an interruptible stage within allocation. + */ + void interruptAllocation(); + /** * Execution planning include parallelization of the query fragments. * @throws ExecutionSetupException diff --git a/sabot/kernel/src/main/java/com/dremio/exec/maestro/QueryTrackerImpl.java b/sabot/kernel/src/main/java/com/dremio/exec/maestro/QueryTrackerImpl.java index 9451f3547f..a1e47cb6b5 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/maestro/QueryTrackerImpl.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/maestro/QueryTrackerImpl.java @@ -42,7 +42,7 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; -import io.opentelemetry.extension.annotations.WithSpan; +import io.opentelemetry.instrumentation.annotations.WithSpan; public class QueryTrackerImpl implements QueryTracker { @VisibleForTesting @@ -104,7 +104,15 @@ public class QueryTrackerImpl implements QueryTracker { @WithSpan("allocate-resources") @Override public void allocateResources() throws ExecutionSetupException, ResourceAllocationException { - resourceTracker = new ResourceTracker(physicalPlan, context, queryResourceManager, observer); + resourceTracker = new ResourceTracker(context, queryResourceManager); + resourceTracker.allocate(physicalPlan, observer); + } + + @Override + public void interruptAllocation() { + if (resourceTracker != null) { + resourceTracker.interruptAllocation(); + } } @Override diff --git a/sabot/kernel/src/main/java/com/dremio/exec/maestro/ResourceTracker.java b/sabot/kernel/src/main/java/com/dremio/exec/maestro/ResourceTracker.java index 7dccd4f704..e6954e3008 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/maestro/ResourceTracker.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/maestro/ResourceTracker.java @@ -45,7 +45,9 @@ public class ResourceTracker implements AutoCloseable { private static final ControlsInjector injector = ControlsInjectorFactory.getInjector(ResourceTracker.class); - private ResourceSet resourceSet; + private final ResourceAllocator resourceAllocator; + private final QueryContext context; + private volatile ResourceSet resourceSet; private ResourceSchedulingDecisionInfo resourceSchedulingDecisionInfo; @VisibleForTesting @@ -63,19 +65,25 @@ public class ResourceTracker implements AutoCloseable { @VisibleForTesting public static final String INJECTOR_QUEUED_PAUSE = "queued-pause"; - ResourceTracker( - PhysicalPlan physicalPlan, - QueryContext context, - ResourceAllocator resourceAllocator, - MaestroObserver observer) throws ExecutionSetupException, ResourceAllocationException { + ResourceTracker(QueryContext context, ResourceAllocator resourceAllocator) { + this.resourceAllocator = resourceAllocator; + this.resourceSet = null; + this.context = context; + } - allocate(physicalPlan, context, resourceAllocator, observer); + /** + * Interrupts the allocation, esp if it is in any of the wait states. + *

        + * Typically called when the cancel thread knows from the execution stage that we are somewhere in the + * allocator. + *

        + */ + void interruptAllocation() { + resourceAllocator.cancel(context); } - private void allocate( + void allocate( PhysicalPlan physicalPlan, - QueryContext context, - ResourceAllocator resourceAllocator, MaestroObserver observer) throws ExecutionSetupException, ResourceAllocationException { final double planCost = physicalPlan.getCost(); @@ -139,6 +147,8 @@ ResourceSet getResources() { @Override public void close() throws Exception { - resourceSet.close(); + if (resourceSet != null) { + resourceSet.close(); + } } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/ops/QueryContext.java b/sabot/kernel/src/main/java/com/dremio/exec/ops/QueryContext.java index 80cde5e15a..aac7083fdd 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/ops/QueryContext.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/ops/QueryContext.java @@ -162,17 +162,18 @@ public QueryContext( final SabotContext sabotContext, QueryId queryId ) { - this(session, sabotContext, queryId, Optional.empty()); + this(session, sabotContext, queryId, Optional.empty(), Optional.empty()); } public QueryContext( final UserSession session, final SabotContext sabotContext, QueryId queryId, - Optional checkMetadataValidity + Optional checkMetadataValidity, + Optional neverPromote ) { this(session, sabotContext, queryId, null, Long.MAX_VALUE, Predicates.alwaysTrue(), - checkMetadataValidity); + checkMetadataValidity, neverPromote); } public QueryContext( @@ -183,7 +184,7 @@ public QueryContext( long maxAllocation, Predicate datasetValidityChecker ) { - this(session, sabotContext, queryId, priority, maxAllocation, datasetValidityChecker, Optional.empty()); + this(session, sabotContext, queryId, priority, maxAllocation, datasetValidityChecker, Optional.empty(), Optional.empty()); } public QueryContext( @@ -195,7 +196,7 @@ public QueryContext( Predicate datasetValidityChecker, PlanCache planCache ) { - this(session, sabotContext, queryId, priority, maxAllocation, datasetValidityChecker, Optional.empty()); + this(session, sabotContext, queryId, priority, maxAllocation, datasetValidityChecker, Optional.empty(), Optional.empty()); this.planCache = planCache; } @@ -206,7 +207,9 @@ private QueryContext( QueryPriority priority, long maxAllocation, Predicate datasetValidityChecker, - Optional checkMetadataValidity + Optional checkMetadataValidity, + Optional neverPromote + ) { this.sabotContext = sabotContext; this.session = session; @@ -249,10 +252,17 @@ private QueryContext( .setDatasetValidityChecker(datasetValidityChecker) .build(); + // Using caching namespace for query planning. The lifecycle of the cache is associated with the life cycle of + // the Catalog. final ImmutableMetadataRequestOptions.Builder requestOptions = MetadataRequestOptions.newBuilder() .setSchemaConfig(schemaConfig) - .setSourceVersionMapping(CaseInsensitiveMap.newImmutableMap(session.getSourceVersionMapping())); + .setSourceVersionMapping(CaseInsensitiveMap.newImmutableMap(session.getSourceVersionMapping())) + .setUseCachingNamespace(true); checkMetadataValidity.ifPresent(requestOptions::setCheckValidity); + neverPromote.ifPresent(requestOptions::setNeverPromote); + if (priority != null && priority.getWorkloadType() == WorkloadType.ACCELERATOR) { + requestOptions.setErrorOnUnspecifiedSourceVersion(true); + } this.catalog = sabotContext.getCatalogService() .getCatalog(requestOptions.build()); this.substitutionProviderFactory = sabotContext.getConfig() @@ -342,6 +352,7 @@ public String getQueryUserName() { /** * Get the OptionManager for this context. */ + @Override public OptionManager getOptions() { return optionManager; } @@ -550,7 +561,7 @@ public Provider getNessieClientProvider() { } @Override - public Pair getSurvivingRowCountWithPruneFilter(ScanRelBase scan, PruneFilterCondition pruneCondition) { + public Pair getSurvivingRowCountWithPruneFilter(ScanRelBase scan, PruneFilterCondition pruneCondition) throws Exception { if (pruneCondition != null && getPlannerSettings().getOptions().getOption(ENABLE_PARTITION_STATS_USAGE)) { List table = scan.getTableMetadata().getName().getPathComponents(); if (!survivingRowCountsWithPruneFilter.containsKey(table)) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/ops/ReflectionContext.java b/sabot/kernel/src/main/java/com/dremio/exec/ops/ReflectionContext.java index 71c47614b9..504162853e 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/ops/ReflectionContext.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/ops/ReflectionContext.java @@ -38,4 +38,6 @@ public String getUserName() { public boolean isAdmin() { return isAdmin; } + + } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/ops/ViewExpansionContext.java b/sabot/kernel/src/main/java/com/dremio/exec/ops/ViewExpansionContext.java index 7511a4231f..2a46450455 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/ops/ViewExpansionContext.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/ops/ViewExpansionContext.java @@ -70,6 +70,8 @@ public class ViewExpansionContext { private final CatalogIdentity catalogIdentity; private final ObjectIntHashMap userTokens = new ObjectIntHashMap<>(); + private boolean substitutedWithDRR = false; + public ViewExpansionContext(CatalogIdentity catalogIdentity) { super(); @@ -146,4 +148,15 @@ public void release() { public CatalogIdentity getQueryUser() { return catalogIdentity; } + + public boolean isSubstitutedWithDRR() { + return substitutedWithDRR; + } + + /** + * Indicates that query has been substituted with a default raw reflection during conversion. + */ + public void setSubstitutedWithDRR() { + this.substitutedWithDRR = true; + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/physical/base/AbstractReceiver.java b/sabot/kernel/src/main/java/com/dremio/exec/physical/base/AbstractReceiver.java index c44e2ee7ec..531eadf8fb 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/physical/base/AbstractReceiver.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/physical/base/AbstractReceiver.java @@ -59,6 +59,7 @@ public T accept(PhysicalVisitor physicalVis return physicalVisitor.visitReceiver(this, value); } + @Override public int getSenderMajorFragmentId() { return senderMajorFragmentId; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/physical/base/AbstractSubScan.java b/sabot/kernel/src/main/java/com/dremio/exec/physical/base/AbstractSubScan.java index 05a798f0c0..55f11cb957 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/physical/base/AbstractSubScan.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/physical/base/AbstractSubScan.java @@ -108,6 +108,7 @@ public Iterator iterator() { return Collections.emptyIterator(); } + @Override @JsonProperty("fullSchema") public BatchSchema getFullSchema() { return fullSchema; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/physical/base/PhysicalOperator.java b/sabot/kernel/src/main/java/com/dremio/exec/physical/base/PhysicalOperator.java index 577a393030..53d184e078 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/physical/base/PhysicalOperator.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/physical/base/PhysicalOperator.java @@ -56,6 +56,7 @@ public interface PhysicalOperator extends GraphValue { @JsonIgnore PhysicalOperator getNewWithChildren(List children) throws ExecutionSetupException; + @Override default void accept(GraphVisitor visitor) { visitor.enter(this); if (this.iterator() == null) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/physical/base/TableFormatWriterOptions.java b/sabot/kernel/src/main/java/com/dremio/exec/physical/base/TableFormatWriterOptions.java index c430180eae..4885c817c4 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/physical/base/TableFormatWriterOptions.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/physical/base/TableFormatWriterOptions.java @@ -34,7 +34,8 @@ enum TableFormatOperation { INSERT, MERGE, UPDATE, - OPTIMIZE + OPTIMIZE, + VACUUM } /** @@ -52,6 +53,14 @@ default TableFormatOperation getOperation() { @Nullable Long getMinInputFilesBeforeOptimize(); + /** + * SnapshotId for operation. + * Consuming operation: Starting snapshot ID for conflict + * resolution with delete files in optimization. + */ + @Nullable + Long getSnapshotId(); + /** * Target file size to be used by writers. * System defaults to be used in case the value is null. diff --git a/sabot/kernel/src/main/java/com/dremio/exec/physical/base/ViewOptions.java b/sabot/kernel/src/main/java/com/dremio/exec/physical/base/ViewOptions.java index d6748faa61..7db9313cf6 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/physical/base/ViewOptions.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/physical/base/ViewOptions.java @@ -15,6 +15,9 @@ */ package com.dremio.exec.physical.base; +import java.util.HashMap; +import java.util.Map; + import com.dremio.exec.catalog.ResolvedVersionContext; import com.dremio.exec.record.BatchSchema; import com.google.common.base.Preconditions; @@ -25,26 +28,43 @@ public class ViewOptions { private final ResolvedVersionContext version; private final BatchSchema batchSchema; // tracks the columns of the table to create from - private final boolean isViewUpdate; + private final ActionType actionType; + private final Map properties; private ViewOptions(ViewOptionsBuilder builder) { this.version = builder.version; this.batchSchema = builder.batchSchema; - this.isViewUpdate = builder.isViewUpdate; + this.actionType = builder.actionType; + this.properties = builder.properties; + } + + public enum ActionType { + CREATE_VIEW, + UPDATE_VIEW, + ALTER_VIEW } public ResolvedVersionContext getVersion(){ return version; } - public BatchSchema getBatchSchema() {return batchSchema;} + public BatchSchema getBatchSchema() { return batchSchema; } + + public ActionType getActionType() { return actionType; } + + public boolean isViewCreate() { return actionType == ActionType.CREATE_VIEW; } - public boolean isViewUpdate() { return isViewUpdate; } + public boolean isViewUpdate() { return actionType == ActionType.UPDATE_VIEW; } - public static class ViewOptionsBuilder{ + public boolean isViewAlter() { return actionType == ActionType.ALTER_VIEW; } + + public Map getProperties() { return properties; } + + public static class ViewOptionsBuilder { private ResolvedVersionContext version; private BatchSchema batchSchema; - private boolean isViewUpdate; + private ActionType actionType; + private Map properties; public ViewOptionsBuilder() { } @@ -61,8 +81,14 @@ public ViewOptionsBuilder batchSchema(BatchSchema schema){ return this; } - public ViewOptionsBuilder viewUpdate(boolean isViewUpdate) { - this.isViewUpdate = isViewUpdate; + public ViewOptionsBuilder actionType(ActionType actionType) { + this.actionType = actionType; + return this; + } + + public ViewOptionsBuilder properties(Map properties) { + Preconditions.checkArgument(!properties.isEmpty()); + this.properties = new HashMap<>(properties); return this; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/physical/config/DeletedFilesMetadataTableFunctionContext.java b/sabot/kernel/src/main/java/com/dremio/exec/physical/config/DeletedFilesMetadataTableFunctionContext.java new file mode 100644 index 0000000000..a33cd8b85e --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/physical/config/DeletedFilesMetadataTableFunctionContext.java @@ -0,0 +1,63 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.physical.config; + +import java.util.List; + +import com.dremio.common.expression.SchemaPath; +import com.dremio.exec.catalog.StoragePluginId; +import com.dremio.exec.planner.physical.visitor.GlobalDictionaryFieldInfo; +import com.dremio.exec.record.BatchSchema; +import com.dremio.exec.store.OperationType; +import com.dremio.exec.store.ScanFilter; +import com.dremio.service.namespace.dataset.proto.UserDefinedSchemaSettings; +import com.dremio.service.namespace.file.proto.FileConfig; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeName; + +import io.protostuff.ByteString; + +@JsonIgnoreProperties(ignoreUnknown = true) +@JsonTypeName("deleted-files-metadata-table-function") +public class DeletedFilesMetadataTableFunctionContext extends TableFunctionContext{ + + private final OperationType operationType; + + public DeletedFilesMetadataTableFunctionContext(@JsonProperty("operationType") OperationType operationType, + @JsonProperty("formatSettings") FileConfig formatSettings, + @JsonProperty("schema") BatchSchema fullSchema, + @JsonProperty("tableschema") BatchSchema tableSchema, + @JsonProperty("referencedTables") List> tablePath, + @JsonProperty("scanFilter") ScanFilter scanFilter, + @JsonProperty("pluginId") StoragePluginId pluginId, + @JsonProperty("internalTablePluginId") StoragePluginId internalTablePluginId, + @JsonProperty("columns") List columns, + @JsonProperty("partitionColumns") List partitionColumns, + @JsonProperty("globalDictionaryEncodedColumns") List globalDictionaryEncodedColumns, + @JsonProperty("extendedProperty") ByteString extendedProperty, + @JsonProperty("arrowCachingEnabled") boolean arrowCachingEnabled, + @JsonProperty("convertedIcebergDataset") boolean isConvertedIcebergDataset, + @JsonProperty("icebergMetadata") boolean isIcebergMetadata, + @JsonProperty("userDefinedSchemaSettings") UserDefinedSchemaSettings userDefinedSchemaSettings) { + super(formatSettings, fullSchema, tableSchema, tablePath, scanFilter, pluginId, internalTablePluginId, columns, partitionColumns, globalDictionaryEncodedColumns, extendedProperty, arrowCachingEnabled, isConvertedIcebergDataset, isIcebergMetadata, userDefinedSchemaSettings); + this.operationType = operationType; + } + + public OperationType getOperationType() { + return operationType; + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/physical/config/EmptyValues.java b/sabot/kernel/src/main/java/com/dremio/exec/physical/config/EmptyValues.java index e588b9f3b8..2a3876f294 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/physical/config/EmptyValues.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/physical/config/EmptyValues.java @@ -78,6 +78,7 @@ public boolean mayLearnSchema() { return false; } + @Override public BatchSchema getFullSchema(){ return schema; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/physical/config/ExtendedFormatOptions.java b/sabot/kernel/src/main/java/com/dremio/exec/physical/config/ExtendedFormatOptions.java index f7db499233..35e54b51a5 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/physical/config/ExtendedFormatOptions.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/physical/config/ExtendedFormatOptions.java @@ -22,6 +22,20 @@ @JsonTypeName("ExtendedFormatOptions") public class ExtendedFormatOptions { + /* + areStringTransformationsNeeded is a special flag variable that we use to keep track of whether certain string transformations + like NULL_IF are set in the query. This is useful to us in scenarios where we can follow optimised code paths with fewer steps + and save time. + + One example of this in action is SchemaImposedOutput::writeValueInCurrentVector where we follow an optimised path + provided the following conditions are met: + 1. The target column data type is VARCHAR + 2. Transformations like NULL_IF are not used. + + In future, if we add more string transformations or modify any existing ones, it is recommended that we also explore whether + this flag can be used in that case for any optimisations. + */ + private Boolean areStringTransformationsNeeded = false; private Boolean trimSpace; private Boolean emptyAsNull = true; private String dateFormat; @@ -88,6 +102,19 @@ public List getNullIfExpressions() { public void setNullIfExpressions(final List nullIfExpressions) { this.nullIfExpressions = nullIfExpressions; + // Check if nullIfExpressions is non-null and non-empty. + final boolean nullIfExpressionsContainsData = (nullIfExpressions != null && !nullIfExpressions.isEmpty()); + // In case 'nullIfExpressionsContainsData' is false, we want to preserve areStringTransformationsNeeded's original value. + // Hence, use logical OR here while setting areStringTransformationsNeeded. + setAreStringTransformationsNeeded(areStringTransformationsNeeded || nullIfExpressionsContainsData); + } + + public boolean getAreStringTransformationsNeeded() { + return areStringTransformationsNeeded; + } + + private void setAreStringTransformationsNeeded(final Boolean areStringTransformationsNeeded) { + this.areStringTransformationsNeeded = areStringTransformationsNeeded; } @Override diff --git a/sabot/kernel/src/main/java/com/dremio/exec/physical/config/ManifestListScanTableFunctionContext.java b/sabot/kernel/src/main/java/com/dremio/exec/physical/config/ManifestListScanTableFunctionContext.java new file mode 100644 index 0000000000..a686060131 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/physical/config/ManifestListScanTableFunctionContext.java @@ -0,0 +1,63 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.physical.config; + +import java.util.List; + +import com.dremio.common.expression.SchemaPath; +import com.dremio.exec.catalog.StoragePluginId; +import com.dremio.exec.planner.physical.visitor.GlobalDictionaryFieldInfo; +import com.dremio.exec.record.BatchSchema; +import com.dremio.exec.store.ScanFilter; +import com.dremio.service.namespace.dataset.proto.UserDefinedSchemaSettings; +import com.dremio.service.namespace.file.proto.FileConfig; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeName; + +import io.protostuff.ByteString; + +@JsonIgnoreProperties(ignoreUnknown = true) +@JsonTypeName("manifest-list-scan") +public class ManifestListScanTableFunctionContext extends TableFunctionContext { + private final boolean isCarryForwardEnabled; // Enable TF to carry forward processed rows' info + + public ManifestListScanTableFunctionContext( + @JsonProperty("formatSettings") FileConfig formatSettings, + @JsonProperty("schema") BatchSchema fullSchema, + @JsonProperty("tableschema") BatchSchema tableSchema, + @JsonProperty("referencedTables") List> tablePath, + @JsonProperty("scanFilter") ScanFilter scanFilter, + @JsonProperty("pluginId") StoragePluginId pluginId, + @JsonProperty("internalTablePluginId") StoragePluginId internalTablePluginId, + @JsonProperty("columns") List columns, + @JsonProperty("partitionColumns") List partitionColumns, + @JsonProperty("globalDictionaryEncodedColumns") List globalDictionaryEncodedColumns, + @JsonProperty("extendedProperty") ByteString extendedProperty, + @JsonProperty("arrowCachingEnabled") boolean arrowCachingEnabled, + @JsonProperty("convertedIcebergDataset") boolean isConvertedIcebergDataset, + @JsonProperty("icebergMetadata") boolean isIcebergMetadata, + @JsonProperty("userDefinedSchemaSettings") UserDefinedSchemaSettings userDefinedSchemaSettings, + @JsonProperty("carryForwardEnabled") boolean isCarryForwardEnabled) { + super(formatSettings, fullSchema, tableSchema, tablePath, scanFilter, pluginId, internalTablePluginId, columns, partitionColumns, globalDictionaryEncodedColumns, + extendedProperty, arrowCachingEnabled, isConvertedIcebergDataset, isIcebergMetadata, userDefinedSchemaSettings); + this.isCarryForwardEnabled = isCarryForwardEnabled; + } + + public boolean isCarryForwardEnabled() { + return isCarryForwardEnabled; + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/physical/config/ManifestScanTableFunctionContext.java b/sabot/kernel/src/main/java/com/dremio/exec/physical/config/ManifestScanTableFunctionContext.java index acdc1d8499..05e437e3a2 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/physical/config/ManifestScanTableFunctionContext.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/physical/config/ManifestScanTableFunctionContext.java @@ -41,6 +41,7 @@ public class ManifestScanTableFunctionContext extends TableFunctionContext { private String icebergSchema; private final ManifestContent manifestContent; private final ManifestScanFilters manifestScanFilters; + private final boolean isCarryForwardEnabled; // Enable TF to carry forward processed rows' info public ManifestScanTableFunctionContext( @JsonProperty("partitionSpecMap") ByteString partitionSpecMap, @@ -62,13 +63,15 @@ public ManifestScanTableFunctionContext( @JsonProperty("icebergMetadata") boolean isIcebergMetadata, @JsonProperty("userDefinedSchemaSettings") UserDefinedSchemaSettings userDefinedSchemaSettings, @JsonProperty("manifestContent") ManifestContent manifestContent, - @JsonProperty("metadataFilters") ManifestScanFilters manifestScanFilters) { + @JsonProperty("metadataFilters") ManifestScanFilters manifestScanFilters, + @JsonProperty("carryForwardEnabled") boolean isCarryForwardEnabled) { super(formatSettings, fullSchema, tableSchema, tablePath, scanFilter, pluginId, internalTablePluginId, columns, partitionColumns, globalDictionaryEncodedColumns, extendedProperty, arrowCachingEnabled, isConvertedIcebergDataset, isIcebergMetadata, userDefinedSchemaSettings); this.partitionSpecMap = partitionSpecMap; this.icebergSchema = icebergSchema; this.jsonPartitionSpecMap = jsonPartitionSpecMap; this.manifestScanFilters = manifestScanFilters; this.manifestContent = manifestContent; + this.isCarryForwardEnabled = isCarryForwardEnabled; } public ByteString getPartitionSpecMap() { @@ -90,4 +93,8 @@ public ManifestScanFilters getManifestScanFilters() { public ManifestContent getManifestContent() { return manifestContent; } + + public boolean isCarryForwardEnabled() { + return isCarryForwardEnabled; + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/physical/config/MergingReceiverPOP.java b/sabot/kernel/src/main/java/com/dremio/exec/physical/config/MergingReceiverPOP.java index ee138e6da0..07fda9b65e 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/physical/config/MergingReceiverPOP.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/physical/config/MergingReceiverPOP.java @@ -81,6 +81,7 @@ public int getOperatorType() { return CoreOperatorType.MERGING_RECEIVER_VALUE; } + @Override @JsonProperty("senders") public List getProvidingEndpoints() { return senders; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/physical/config/TableFunctionConfig.java b/sabot/kernel/src/main/java/com/dremio/exec/physical/config/TableFunctionConfig.java index c9bef9d7cc..dfb64f87ed 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/physical/config/TableFunctionConfig.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/physical/config/TableFunctionConfig.java @@ -39,11 +39,15 @@ public enum FunctionType { SPLIT_ASSIGNMENT, BOOST_TABLE_FUNCTION, ICEBERG_PARTITION_TRANSFORM, - DELETED_DATA_FILES_METADATA, + DELETED_FILES_METADATA, ICEBERG_SPLIT_GEN, ICEBERG_MANIFEST_SCAN, ICEBERG_DELETE_FILE_AGG, - ICEBERG_DML_MERGE_DUPLICATE_CHECK + ICEBERG_DML_MERGE_DUPLICATE_CHECK, + ICEBERG_OPTIMIZE_MANIFESTS, + ICEBERG_ORPHAN_FILE_DELETE, + ICEBERG_MANIFEST_LIST_SCAN, + ICEBERG_PARTITION_STATS_SCAN } private final FunctionType type; private final TableFunctionContext functionContext; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/physical/config/TableFunctionContext.java b/sabot/kernel/src/main/java/com/dremio/exec/physical/config/TableFunctionContext.java index e7269670ef..1314713b27 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/physical/config/TableFunctionContext.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/physical/config/TableFunctionContext.java @@ -23,6 +23,7 @@ import com.dremio.exec.planner.physical.visitor.GlobalDictionaryFieldInfo; import com.dremio.exec.record.BatchSchema; import com.dremio.exec.store.ScanFilter; +import com.dremio.exec.store.iceberg.OptimizeManifestsTableFunctionContext; import com.dremio.service.namespace.dataset.proto.UserDefinedSchemaSettings; import com.dremio.service.namespace.file.proto.FileConfig; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; @@ -45,7 +46,10 @@ @JsonSubTypes.Type(value = BoostTableFunctionContext.class, name = "boost"), @JsonSubTypes.Type(value = ManifestScanTableFunctionContext.class, name = "manifest-scan"), @JsonSubTypes.Type(value = PartitionTransformTableFunctionContext.class, name = "partition-transform-table"), - @JsonSubTypes.Type(value = EasyScanTableFunctionContext.class, name = "easy-scan-table-function")} + @JsonSubTypes.Type(value = EasyScanTableFunctionContext.class, name = "easy-scan-table-function"), + @JsonSubTypes.Type(value = DeletedFilesMetadataTableFunctionContext.class, name = "deleted-files-metadata-table-function"), + @JsonSubTypes.Type(value = OptimizeManifestsTableFunctionContext.class, name = "optimize-manifests"), + @JsonSubTypes.Type(value = ManifestListScanTableFunctionContext.class, name = "manifest-list-scan")} ) public class TableFunctionContext { private final List columns; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/physical/config/UnorderedReceiver.java b/sabot/kernel/src/main/java/com/dremio/exec/physical/config/UnorderedReceiver.java index 8a2ba332b0..ee0cf968a2 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/physical/config/UnorderedReceiver.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/physical/config/UnorderedReceiver.java @@ -76,6 +76,7 @@ public final PhysicalOperator getNewWithChildren(List children return new UnorderedReceiver(props, getSchema(), getSenderMajorFragmentId(), senders, isSpooling()); } + @Override @JsonIgnore public List getProvidingEndpoints() { return senders; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/DremioHepPlanner.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/DremioHepPlanner.java index c2120bde38..7a5bd59ee7 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/DremioHepPlanner.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/DremioHepPlanner.java @@ -104,7 +104,7 @@ public void checkCancel() { if (cancelFlag.isCancelRequested()) { ExceptionUtils.throwUserException(String.format("Query was cancelled because planning time exceeded %d seconds", cancelFlag.getTimeoutInSecs()), - null, plannerSettings, phase, logger); + null, plannerSettings, phase, UserException.AttemptCompletionState.PLANNING_TIMEOUT, logger); } if (executionControls != null) { @@ -115,7 +115,7 @@ public void checkCancel() { super.checkCancel(); } catch (CalciteException e) { if (plannerSettings.isCancelledByHeapMonitor()) { - ExceptionUtils.throwUserException(plannerSettings.getCancelReason(), e, plannerSettings, phase, logger); + ExceptionUtils.throwUserException(plannerSettings.getCancelReason(), e, plannerSettings, phase, UserException.AttemptCompletionState.HEAP_MONITOR_C, logger); } else { ExceptionUtils.throwUserCancellationException(plannerSettings); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/DremioVolcanoPlanner.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/DremioVolcanoPlanner.java index 96695123b9..817cc89345 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/DremioVolcanoPlanner.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/DremioVolcanoPlanner.java @@ -65,7 +65,7 @@ private DremioVolcanoPlanner(RelOptCostFactory costFactory, Context context, Sub this.phase = null; this.maxNodesListener = new MaxNodesListener(plannerSettings.getMaxNodesPerPlan()); this.matchCountListener = new MatchCountListener((int) plannerSettings.getOptions().getOption(PlannerSettings.HEP_PLANNER_MATCH_LIMIT), - plannerSettings.getOptions().getOption(PlannerSettings.VERBOSE_RULE_MATCH_LISTENER)); + plannerSettings.getOptions().getOption(PlannerSettings.VERBOSE_PROFILE)); // A hacky way to add listeners to first multicast listener and register that listener to the Volcano planner. // The Volcano planner currently only supports a single listener. Need to update that to use the multi class // listener from its super class AbstractRelOptPlanner. @@ -148,7 +148,7 @@ public void checkCancel() { if (cancelFlag.isCancelRequested()) { ExceptionUtils.throwUserException(String.format("Query was cancelled because planning time exceeded %d seconds", cancelFlag.getTimeoutInSecs()), - null, plannerSettings, phase, logger); + null, plannerSettings, phase, UserException.AttemptCompletionState.PLANNING_TIMEOUT, logger); } if (executionControls != null) { @@ -159,7 +159,7 @@ public void checkCancel() { super.checkCancel(); } catch (CalciteException e) { if (plannerSettings.isCancelledByHeapMonitor()) { - ExceptionUtils.throwUserException(plannerSettings.getCancelReason(), e, plannerSettings, phase, logger); + ExceptionUtils.throwUserException(plannerSettings.getCancelReason(), e, plannerSettings, phase, UserException.AttemptCompletionState.HEAP_MONITOR_C, logger); } else { ExceptionUtils.throwUserCancellationException(plannerSettings); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/ExceptionUtils.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/ExceptionUtils.java index 7f2b2e6f2c..5cb1c273e4 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/ExceptionUtils.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/ExceptionUtils.java @@ -31,6 +31,7 @@ public static void throwUserException(String message, Throwable t, PlannerSettings plannerSettings, PlannerPhase phase, + UserException.AttemptCompletionState attemptCompletionState, Logger logger) { UserException.Builder builder; if (t != null) { @@ -39,6 +40,7 @@ public static void throwUserException(String message, builder = UserException.planError(); } builder = builder.message(message); + builder = builder.attemptCompletionState(attemptCompletionState); if (phase != null) { builder = builder.addContext("Planner Phase", phase.description); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/MatchCountListener.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/MatchCountListener.java index 9ceda7e301..fa0f9e8615 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/MatchCountListener.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/MatchCountListener.java @@ -128,6 +128,10 @@ public int getMatchLimit() { return matchLimit; } + public Map getRuleToTotalTime() { + return ruleToTotalTime; + } + public void reset() { attemptCount = 0; matchCount = 0; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/OptimizeOutputSchema.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/OptimizeOutputSchema.java index fc7dbc55d6..962edca676 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/OptimizeOutputSchema.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/OptimizeOutputSchema.java @@ -28,11 +28,20 @@ private OptimizeOutputSchema() { } public static final String REWRITTEN_DATA_FILE_COUNT = "rewritten_data_files_count"; + public static final String REWRITTEN_DELETE_FILE_COUNT = "rewritten_delete_files_count"; public static final String NEW_DATA_FILES_COUNT = "new_data_files_count"; + public static final String OPTIMIZE_OUTPUT_SUMMARY = "summary"; + + public static RelDataType getRelDataType(RelDataTypeFactory typeFactory, boolean onlyOptimizeManifests) { + if (onlyOptimizeManifests) { + return typeFactory.builder() + .add(OptimizeOutputSchema.OPTIMIZE_OUTPUT_SUMMARY, typeFactory.createTypeWithNullability(typeFactory.createSqlType(SqlTypeName.VARCHAR), true)) + .build(); + } - public static RelDataType getRelDataType(RelDataTypeFactory typeFactory) { return typeFactory.builder() .add(OptimizeOutputSchema.REWRITTEN_DATA_FILE_COUNT, typeFactory.createTypeWithNullability(typeFactory.createSqlType(SqlTypeName.BIGINT), true)) + .add(OptimizeOutputSchema.REWRITTEN_DELETE_FILE_COUNT, typeFactory.createTypeWithNullability(typeFactory.createSqlType(SqlTypeName.BIGINT), true)) .add(OptimizeOutputSchema.NEW_DATA_FILES_COUNT, typeFactory.createTypeWithNullability(typeFactory.createSqlType(SqlTypeName.BIGINT), true)) .build(); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/OptimizePlanGenerator.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/OptimizePlanGenerator.java index 83c909b361..dceee5f238 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/OptimizePlanGenerator.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/OptimizePlanGenerator.java @@ -16,11 +16,18 @@ package com.dremio.exec.planner; import static com.dremio.exec.planner.OptimizeOutputSchema.NEW_DATA_FILES_COUNT; +import static com.dremio.exec.planner.OptimizeOutputSchema.OPTIMIZE_OUTPUT_SUMMARY; import static com.dremio.exec.planner.OptimizeOutputSchema.REWRITTEN_DATA_FILE_COUNT; +import static com.dremio.exec.planner.OptimizeOutputSchema.REWRITTEN_DELETE_FILE_COUNT; import static com.dremio.exec.store.RecordWriter.OPERATION_TYPE_COLUMN; import static com.dremio.exec.store.RecordWriter.RECORDS_COLUMN; import static com.dremio.exec.store.SystemSchemas.DATAFILE_PATH; +import static com.dremio.exec.store.SystemSchemas.DELETE_FILE_PATH; +import static com.dremio.exec.store.SystemSchemas.FILE_SIZE; import static com.dremio.exec.store.SystemSchemas.ICEBERG_METADATA; +import static com.dremio.exec.store.SystemSchemas.IMPLICIT_SEQUENCE_NUMBER; +import static com.dremio.exec.store.SystemSchemas.PARTITION_SPEC_ID; +import static com.dremio.exec.store.SystemSchemas.POS; import static com.dremio.exec.store.iceberg.IcebergUtils.getCurrentPartitionSpec; import static org.apache.calcite.sql.fun.SqlStdOperatorTable.CASE; import static org.apache.calcite.sql.fun.SqlStdOperatorTable.EQUALS; @@ -30,7 +37,9 @@ import java.math.BigDecimal; import java.util.Collections; +import java.util.HashSet; import java.util.List; +import java.util.Set; import java.util.function.Function; import org.apache.calcite.plan.RelOptCluster; @@ -47,43 +56,96 @@ import org.apache.calcite.rex.RexInputRef; import org.apache.calcite.rex.RexNode; import org.apache.calcite.rex.RexUtil; +import org.apache.calcite.rex.RexVisitorImpl; +import org.apache.calcite.sql.fun.SqlStdOperatorTable; +import org.apache.calcite.sql.type.SqlTypeName; import org.apache.calcite.sql.validate.SqlValidatorUtil; import org.apache.calcite.util.ImmutableBitSet; import org.apache.calcite.util.Pair; +import org.apache.iceberg.ManifestContent; +import org.apache.iceberg.PartitionSpec; +import com.dremio.common.JSONOptions; +import com.dremio.common.exceptions.UserException; import com.dremio.exec.ops.OptimizerRulesContext; import com.dremio.exec.physical.config.ImmutableManifestScanFilters; -import com.dremio.exec.physical.config.ManifestScanFilters; +import com.dremio.exec.physical.config.TableFunctionConfig; +import com.dremio.exec.physical.config.TableFunctionContext; import com.dremio.exec.planner.common.MoreRelOptUtil; import com.dremio.exec.planner.logical.CreateTableEntry; +import com.dremio.exec.planner.logical.partition.PruneFilterCondition; +import com.dremio.exec.planner.physical.DistributionTrait; +import com.dremio.exec.planner.physical.FilterPrel; +import com.dremio.exec.planner.physical.HashAggPrel; import com.dremio.exec.planner.physical.Prel; import com.dremio.exec.planner.physical.ProjectPrel; import com.dremio.exec.planner.physical.StreamAggPrel; +import com.dremio.exec.planner.physical.TableFunctionPrel; +import com.dremio.exec.planner.physical.TableFunctionUtil; +import com.dremio.exec.planner.physical.UnionAllPrel; +import com.dremio.exec.planner.physical.ValuesPrel; +import com.dremio.exec.planner.sql.CalciteArrowHelper; import com.dremio.exec.planner.sql.handlers.query.OptimizeOptions; +import com.dremio.exec.record.BatchSchema; import com.dremio.exec.store.OperationType; +import com.dremio.exec.store.RecordWriter; import com.dremio.exec.store.TableMetadata; import com.dremio.exec.store.iceberg.IcebergScanPlanBuilder; +import com.dremio.exec.store.iceberg.OptimizeManifestsTableFunctionContext; import com.dremio.exec.store.iceberg.model.ImmutableManifestScanOptions; import com.dremio.exec.store.iceberg.model.ManifestScanOptions; import com.dremio.exec.util.ColumnUtils; import com.dremio.exec.util.LongRange; +import com.dremio.service.namespace.dataset.proto.ScanStats; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.fasterxml.jackson.databind.node.TextNode; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; /*** * Expand plans for OPTIMIZE TABLE */ -public class OptimizePlanGenerator extends TableManagementPlanGenerator { +public class +OptimizePlanGenerator extends TableManagementPlanGenerator { + private static ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + private static final List deleteFilesMetadataInputCols = ImmutableList.of(ColumnUtils.ROW_COUNT_COLUMN_NAME, ColumnUtils.FILE_PATH_COLUMN_NAME, ICEBERG_METADATA);; private final IcebergScanPlanBuilder planBuilder; - private final Long minInputFiles; + private final OptimizeOptions optimizeOptions; + private final Integer icebergCurrentPartitionSpecId; - public OptimizePlanGenerator(RelOptTable table, RelOptCluster cluster, RelTraitSet traitSet, RelNode input, - TableMetadata tableMetadata, CreateTableEntry createTableEntry, OptimizerRulesContext context, OptimizeOptions optimizeOptions) { + public OptimizePlanGenerator(RelOptTable table, + RelOptCluster cluster, + RelTraitSet traitSet, + RelNode input, + TableMetadata tableMetadata, + CreateTableEntry createTableEntry, + OptimizerRulesContext context, + OptimizeOptions optimizeOptions, + PruneFilterCondition partitionFilter) { super(table, cluster, traitSet, input, tableMetadata, createTableEntry, context); - ManifestScanFilters manifestScanFilters = new ImmutableManifestScanFilters.Builder() - .setSkipDataFileSizeRange(new LongRange(optimizeOptions.getMinFileSizeBytes(), optimizeOptions.getMaxFileSizeBytes())) - .setMinPartitionSpecId(getCurrentPartitionSpec(tableMetadata.getDatasetConfig().getPhysicalDataset()).specId()).build(); + PartitionSpec currentPartitionSpec = getCurrentPartitionSpec(tableMetadata.getDatasetConfig().getPhysicalDataset()); + validatePruneCondition(partitionFilter); + if (!isPartitionExpressionRequired(partitionFilter, currentPartitionSpec)) { + partitionFilter = new PruneFilterCondition(partitionFilter.getPartitionRange(), null, null); + } + this.icebergCurrentPartitionSpecId = currentPartitionSpec != null ? currentPartitionSpec.specId() : 0; + int minSpecId = icebergCurrentPartitionSpecId; + /* + * In case of filter, it should not use all the data files for compaction. + * It filtered out and applies the target file size range. + * If filter is not there, it compacts all the old data files irrespective of the target file size. + * */ + if (partitionFilter != null && (partitionFilter.getPartitionRange() != null || partitionFilter.getPartitionExpression() != null)) { + minSpecId = 0; + } + ScanStats deleteStats = tableMetadata.getDatasetConfig().getPhysicalDataset().getIcebergMetadata().getDeleteManifestStats(); + ImmutableManifestScanFilters.Builder manifestScanFiltersBuilder = + (deleteStats != null && deleteStats.getRecordCount() > 0) ? new ImmutableManifestScanFilters.Builder() + : new ImmutableManifestScanFilters.Builder() + .setSkipDataFileSizeRange(new LongRange(optimizeOptions.getMinFileSizeBytes(), optimizeOptions.getMaxFileSizeBytes())) + .setMinPartitionSpecId(minSpecId); this.planBuilder = new IcebergScanPlanBuilder ( cluster, traitSet, @@ -91,9 +153,49 @@ public OptimizePlanGenerator(RelOptTable table, RelOptCluster cluster, RelTraitS tableMetadata, null, context, - manifestScanFilters + manifestScanFiltersBuilder.build(), + partitionFilter ); - this.minInputFiles = optimizeOptions.getMinInputFiles(); + this.optimizeOptions = optimizeOptions; + } + + /** + * Optimize is only supported on partition columns. + * It validates if pruneFilterCondition contains any non-partition columns and throws userException. + * else it returns a list of partition columns from pruneFilterCondition. + */ + private void validatePruneCondition(PruneFilterCondition pruneFilterCondition) { + if (pruneFilterCondition != null && pruneFilterCondition.getNonPartitionRange() != null) { + pruneFilterCondition.getNonPartitionRange().accept(new RexVisitorImpl(true) { + @Override + public Void visitInputRef(RexInputRef inputRef) { + throw UserException.unsupportedError().message(String.format("OPTIMIZE command is only supported on the partition columns - %s", + tableMetadata.getReadDefinition().getPartitionColumnsList())).buildSilently(); + } + }); + } + } + + /** + * Use all the applicable data files in the case filter is on transformed partition expression. + */ + private Boolean isPartitionExpressionRequired(PruneFilterCondition pruneFilterCondition, PartitionSpec partitionSpec) { + Set expressionSourceIds = new HashSet<>(); + if (pruneFilterCondition != null && pruneFilterCondition.getPartitionExpression() != null) { + pruneFilterCondition.getPartitionExpression().accept(new RexVisitorImpl(true) { + @Override + public Void visitInputRef(RexInputRef inputRef) { + expressionSourceIds.add(inputRef.getIndex()+1); + return null; + } + }); + } + for (Integer id: expressionSourceIds) { + if (!partitionSpec.identitySourceIds().contains(id)) { + return false; + } + } + return true; } /* @@ -121,14 +223,117 @@ public OptimizePlanGenerator(RelOptTable table, RelOptCluster cluster, RelTraitS │ IcebergManifestListPrel │ │IcebergManifestListPrel │ └────────────────────────────────┘ └────────────────────────┘ * */ + @Override public Prel getPlan() { try { - return getOutputSummaryPlan(getDataWriterPlan(planBuilder.build(), deleteDataFilePlan())); + // Optimize manifests only + if (optimizeOptions.isOptimizeManifestsOnly()) { + return getOptimizeManifestsOnlyPlan(); + } + + Prel rewritePlan = planBuilder.hasDeleteFiles() ? deleteAwareOptimizePlan() + : getDataWriterPlan(planBuilder.build(), deleteDataFilePlan()); + if (optimizeOptions.isOptimizeManifestFiles()) { // Optimize data files as well as manifests + rewritePlan = getOptimizeManifestTableFunctionPrel(rewritePlan, RecordWriter.SCHEMA); + } + return getOutputSummaryPlan(rewritePlan); } catch (InvalidRelException e) { throw new RuntimeException(e); } } + /* + * Plan for OPTIMIZE TABLE operation when table has positional delete files. + * + * The left side of the plan is used to mark which files need to be rewritten. It has 2 marked boxes for reuse: + * Section *A* + * Left Branch of section *A* is used to scan DATA manifests and filter these data file objects based on input from + * Right Branch which scans DELETE manifests and reads positional delete files. This filtering of data files is done + * based on the following conditions: + * - File size not in ideal range + * - Partition Spec not current + * - Data file has Delete File attached to it + * + * Section *B* + * Used to read DELETE manifests and pass file objects to DELETED_FILES_METADATA table function + * + * The right-most branch of the plan is used to write new data files - It takes as input the plan from boxes A and B + * to mark the input data that needs to be rewritten into ideally sized files. + * + ┌──────────────────────┐ + │IcebergWriterCommitter│ + └─────────▲────────────┘ + ┌─────────┴────────────┐ + │ Union │ + └─────────▲────────────┘ + ┌───────────────────────────────────────────────┴──────────────────────────────────────┐ + │ │ + ┌─────────┴────────────┐ │ + │ Union │ │ + └─────────▲────────────┘ │ + ┌────────────────────────┴───────────────────────────────────────────────────────┐ │ + ┌────────────┴───────────────────┐ ┌────────────┴──────────────┐ ┌──────────┴───────────┐ + │ TableFunctionPrel │ │ TableFunctionPrel │ │ WriterPrel │ + │ (DELETED_FILES_METADATA) │ │ (DELETED_FILES_METADATA) │ └──────────▲───────────┘ + └────────────▲───────────────────┘ └────────────▲──────────────┘ │ + ┌──────────────│─────────────────────────────────────────────────────────────┐ ┌───────────────│───────────────┐ ┌──────────┴───────────┐ + │ ┌──────────┴─────────────┐ *A* │ │ │ *B*│ │ TableFunctionPrel │ + │ │ Filter │ │ │ │ │ │ (DATA_FILE_SCAN) │ + │ └──────────▲─────────────┘ │ │ │ │ └──────────▲───────────┘ + │ ┌──────────┴─────────────┐ │ │ │ │ │ + │ │ HashJoin │──────────────────────────────┐ │ │ │ │ ┌──────────┴───────────┐ + │ └──────────▲─────────────┘ │ │ │ │ │ │ TableFunctionPrel │ + │ │ ┌──────────┴─────────────┐ │ │ │ │ │(IcebergDeleteFileAgg)│ + │ │ │ HashAggPrel │ │ │ │ │ └──────────▲───────────┘ + │ │ └──────────▲─────────────┘ │ │ │ │ │ + │ │ ┌──────────┴─────────────┐ │ │ │ │ ┌──────────┴───────────┐ + │ │ │ TableFunctionPrel │ │ │ │ │ │ HashJoin │ + │ │ │ (DATA_FILE_SCAN) │ │ │ │ │ └──────────▲───────────┘ + │ │ └──────────▲─────────────┘ │ │ │ │ │ + │┌─────────────┴──────────────────┐ ┌──────────┴─────────────┐ │ │ ┌──────────┴─────────────┐ │ │ + ││ IcebergManifestScanPrel │ │ IcebergManifestScanPrel│ │ │ │ IcebergManifestScanPrel│ │ ┌─────┴──────┐ + ││ DATA │ │ DELETE │ │ │ │ DELETE │ │ ┌─────┴────┐ ┌────┴─────┐ + │└──────────────▲─────────────────┘ └───────────▲────────────┘ │ │ └───────────▲────────────┘ │ │ │ │ │ + │┌──────────────┴─────────────────┐ ┌───────────┴────────────┐ │ │ ┌───────────┴────────────┐ │ │ *A* │ │ *B* │ + ││ IcebergManifestListPrel │ │IcebergManifestListPrel │ │ │ │IcebergManifestListPrel │ │ │ │ │ │ + │└────────────────────────────────┘ └────────────────────────┘ │ │ └────────────────────────┘ │ └──────────┘ └──────────┘ + └────────────────────────────────────────────────────────────────────────────┘ └───────────────────────────────┘ + */ + private Prel deleteAwareOptimizePlan() throws InvalidRelException { + return getDataWriterPlan(planBuilder.buildDataScanWithSplitGen( + planBuilder.buildDataAndDeleteFileJoinAndAggregate( + buildRemoveSideDataFilePlan(), buildRemoveSideDeleteFilePlan())), + manifestWriterPlan -> { + try { + return getMetadataWriterPlan(deleteDataFilePlan(), removeDeleteFilePlan(), manifestWriterPlan); + } catch (InvalidRelException e) { + throw new RuntimeException(e); + } + }); + } + + private Prel getOptimizeManifestsOnlyPlan() { + RelTraitSet manifestTraitSet = traitSet.plus(DistributionTrait.SINGLETON).plus(Prel.PHYSICAL); + + // OptimizeManifestTableFunction forwards the input to the next operator. + // Hence, the values provided here will be supplied to the output in happy case. + ObjectNode successMessage = OBJECT_MAPPER.createObjectNode(); + successMessage.set(OPTIMIZE_OUTPUT_SUMMARY, new TextNode("Optimize table successful")); + RelDataType rowType = OptimizeOutputSchema.getRelDataType(cluster.getTypeFactory(), true); + ValuesPrel valuesPrel = new ValuesPrel(cluster, manifestTraitSet, rowType, new JSONOptions(successMessage), 1d); + + return getOptimizeManifestTableFunctionPrel(valuesPrel, CalciteArrowHelper.fromCalciteRowTypeJson(rowType)); + } + + private Prel getOptimizeManifestTableFunctionPrel(Prel input, BatchSchema outputSchema) { + TableFunctionContext functionContext = new OptimizeManifestsTableFunctionContext(tableMetadata, outputSchema, + createTableEntry.getIcebergTableProps()); + + TableFunctionConfig functionConfig = new TableFunctionConfig( + TableFunctionConfig.FunctionType.ICEBERG_OPTIMIZE_MANIFESTS, true, functionContext); + return new TableFunctionPrel(cluster, traitSet, table, input, tableMetadata, functionConfig, input.getRowType()); + } + /** * Scan the manifests to return the deleted data files. * @@ -141,27 +346,160 @@ public Prel getPlan() { * IcebergManifestListPrel */ private Prel deleteDataFilePlan() { - ManifestScanOptions manifestScanOptions = new ImmutableManifestScanOptions.Builder() + ManifestScanOptions manifestScanOptions = new ImmutableManifestScanOptions.Builder() .setIncludesSplitGen(false) .setIncludesIcebergMetadata(true) .build(); - RelNode manifestScan = planBuilder.buildManifestRel(manifestScanOptions); - RexBuilder rexBuilder = cluster.getRexBuilder(); - final List projectFields = ImmutableList.of(ColumnUtils.ROW_COUNT_COLUMN_NAME,ColumnUtils.FILE_PATH_COLUMN_NAME, ICEBERG_METADATA); + RelNode output = planBuilder.hasDeleteFiles() ? buildRemoveSideDataFilePlan() + : planBuilder.buildManifestRel(manifestScanOptions); + RexBuilder rexBuilder = cluster.getRexBuilder(); - Pair datafilePathCol = MoreRelOptUtil.findFieldWithIndex(manifestScan.getRowType().getFieldList(), DATAFILE_PATH); - Pair icebergMetadataCol = MoreRelOptUtil.findFieldWithIndex(manifestScan.getRowType().getFieldList(), ICEBERG_METADATA); - Preconditions.checkNotNull(datafilePathCol, "ManifestScan should always have datafilePath with rowType."); - Preconditions.checkNotNull(icebergMetadataCol, "ManifestScan should always have icebergMetadata with rowType."); + Pair datafilePathCol = MoreRelOptUtil.findFieldWithIndex(output.getRowType().getFieldList(), DATAFILE_PATH); + Pair icebergMetadataCol = MoreRelOptUtil.findFieldWithIndex(output.getRowType().getFieldList(), ICEBERG_METADATA); final List projectExpressions = ImmutableList.of(rexBuilder.makeBigintLiteral(BigDecimal.ONE), rexBuilder.makeInputRef(datafilePathCol.right.getType(), datafilePathCol.left), rexBuilder.makeInputRef(icebergMetadataCol.right.getType(), icebergMetadataCol.left)); - RelDataType newRowType = RexUtil.createStructType(rexBuilder.getTypeFactory(), projectExpressions, projectFields, SqlValidatorUtil.F_SUGGESTER); + RelDataType newRowType = RexUtil.createStructType(rexBuilder.getTypeFactory(), projectExpressions, deleteFilesMetadataInputCols, SqlValidatorUtil.F_SUGGESTER); + + return ProjectPrel.create(output.getCluster(), output.getTraitSet(), output, projectExpressions, newRowType); + } + + /** + * Scan the manifests to return the purged delete files. + * Plan same as {@link #deleteDataFilePlan} with manifest scan operator reading DELETE manifests instead of DATA. + */ + private RelNode removeDeleteFilePlan() { + RexBuilder rexBuilder = cluster.getRexBuilder(); + RelNode output = buildRemoveSideDeleteFilePlan(); + Pair outputFilePathCol = MoreRelOptUtil.findFieldWithIndex(output.getRowType().getFieldList(), DATAFILE_PATH); + Pair outputIcebergMetadataCol = MoreRelOptUtil.findFieldWithIndex(output.getRowType().getFieldList(), ICEBERG_METADATA); + + final List outputExpressions = ImmutableList.of(rexBuilder.makeBigintLiteral(BigDecimal.ONE), + rexBuilder.makeInputRef(outputFilePathCol.right.getType(), outputFilePathCol.left), + rexBuilder.makeInputRef(outputIcebergMetadataCol.right.getType(), outputIcebergMetadataCol.left)); + + RelDataType outputRowType = RexUtil.createStructType(rexBuilder.getTypeFactory(), outputExpressions, deleteFilesMetadataInputCols, SqlValidatorUtil.F_SUGGESTER); - return ProjectPrel.create(manifestScan.getCluster(), manifestScan.getTraitSet(), manifestScan, projectExpressions, newRowType); + return ProjectPrel.create(output.getCluster(), output.getTraitSet(), output, outputExpressions, outputRowType); + } + + private Prel getMetadataWriterPlan(RelNode dataFileAggrPlan, RelNode deleteFileAggrPlan, RelNode manifestWriterPlan) throws InvalidRelException { + // Insert a table function that'll pass the path through and set the OperationType + TableFunctionPrel deletedDataFilesTableFunctionPrel = getDeleteFilesMetadataTableFunctionPrel(dataFileAggrPlan, + getProjectedColumns(), TableFunctionUtil.getDeletedFilesMetadataTableFunctionContext( + OperationType.DELETE_DATAFILE, RecordWriter.SCHEMA, getProjectedColumns(), true)); + TableFunctionPrel deletedDeleteFilesTableFunctionPrel = getDeleteFilesMetadataTableFunctionPrel(deleteFileAggrPlan, + getProjectedColumns(), TableFunctionUtil.getDeletedFilesMetadataTableFunctionContext( + OperationType.DELETE_DELETEFILE, RecordWriter.SCHEMA, getProjectedColumns(), true)); + + RelNode deletedDataAndDeleteFilesTableFunction = new UnionAllPrel(cluster, + deleteFileAggrPlan.getTraitSet(), + ImmutableList.of(deletedDataFilesTableFunctionPrel, deletedDeleteFilesTableFunctionPrel), + true); + + final RelTraitSet traits = traitSet.plus(DistributionTrait.SINGLETON).plus(Prel.PHYSICAL); + + // Union the updating of the deleted data's metadata with the rest + return getUnionPrel(traits, manifestWriterPlan, deletedDataAndDeleteFilesTableFunction); + } + + /** + * @param deleteFileScan DataFileScan table function Prel created by scanning positional delete files + * @return HashAggregate of input on File path with COUNT aggregation on delete positions + */ + public static Prel aggregateDeleteFiles(RelNode deleteFileScan) { + RelDataTypeField filePathField = Preconditions.checkNotNull(deleteFileScan.getRowType() + .getField(DELETE_FILE_PATH, false, false)); + RelDataTypeField implicitSequenceNumberField = Preconditions.checkNotNull(deleteFileScan.getRowType() + .getField(IMPLICIT_SEQUENCE_NUMBER, false, false)); + + AggregateCall aggPosCount = AggregateCall.create( + SqlStdOperatorTable.COUNT, + false, + false, + Collections.emptyList(), + -1, + RelCollations.EMPTY, + 1, + deleteFileScan, + deleteFileScan.getCluster().getTypeFactory().createSqlType(SqlTypeName.BIGINT), + POS + ); + AggregateCall aggSeqNumberMax = AggregateCall.create( + SqlStdOperatorTable.MAX, + false, + ImmutableList.of(implicitSequenceNumberField.getIndex()), + -1, + implicitSequenceNumberField.getType(), + IMPLICIT_SEQUENCE_NUMBER + ); + + ImmutableBitSet groupSet = ImmutableBitSet.of(filePathField.getIndex()); + try { + return HashAggPrel.create( + deleteFileScan.getCluster(), + deleteFileScan.getTraitSet(), + deleteFileScan, + groupSet, + ImmutableList.of(groupSet), + ImmutableList.of(aggPosCount, aggSeqNumberMax), + null + ); + } catch (InvalidRelException e) { + throw new RuntimeException("Failed to create HashAggPrel during delete file scan.", e); + } + } + + /** + * @param input Manifest Scan (DATA), joined with Delete file reads - Files that have deletes linked to them + * will have a non-null POS column. + * @return filter input for data files that need to be rewritten by applying the following conditions + *
          + *
        • File size not in ideal range
        • + *
        • Partition spec not matching current partition
        • + *
        • Has delete file(s) attached
        • + *
        + */ + private RelNode subOptimalDataFilesFilter(RelNode input) { + RexBuilder rexBuilder = cluster.getRexBuilder(); + + Pair dataFileSizeCol = MoreRelOptUtil.findFieldWithIndex(input.getRowType().getFieldList(), FILE_SIZE); + Pair dataPartitionSpecIdCol = MoreRelOptUtil.findFieldWithIndex(input.getRowType().getFieldList(), PARTITION_SPEC_ID); + Pair deleteDataFilePosCol = MoreRelOptUtil.findFieldWithIndex(input.getRowType().getFieldList(), POS); + + RexNode posCondition = rexBuilder.makeCall(SqlStdOperatorTable.IS_NOT_NULL, rexBuilder.makeInputRef(deleteDataFilePosCol.right.getType(), deleteDataFilePosCol.left)); + RexNode partitionSpecIdCondition = rexBuilder.makeCall(SqlStdOperatorTable.NOT_EQUALS, + rexBuilder.makeInputRef(dataPartitionSpecIdCol.right.getType(), dataPartitionSpecIdCol.left), + rexBuilder.makeLiteral(icebergCurrentPartitionSpecId, dataPartitionSpecIdCol.right.getType())); + RexNode minFileSizeCondition = rexBuilder.makeCall(SqlStdOperatorTable.LESS_THAN, + rexBuilder.makeInputRef(dataFileSizeCol.right.getType(), dataFileSizeCol.left), + rexBuilder.makeLiteral(optimizeOptions.getMinFileSizeBytes(), dataFileSizeCol.right.getType())); + RexNode maxFileSizeCondition = rexBuilder.makeCall(SqlStdOperatorTable.GREATER_THAN, + rexBuilder.makeInputRef(dataFileSizeCol.right.getType(), dataFileSizeCol.left), + rexBuilder.makeLiteral(optimizeOptions.getMaxFileSizeBytes(), dataFileSizeCol.right.getType())); + + RexNode filterCondition = rexBuilder.makeCall(SqlStdOperatorTable.OR, ImmutableList.of(posCondition, partitionSpecIdCondition, + minFileSizeCondition, maxFileSizeCondition)); + return new FilterPrel(cluster, input.getTraitSet(), input, RexUtil.flatten(rexBuilder, filterCondition)); + } + + /** + * [*A*] from {@link #deleteAwareOptimizePlan} + */ + private RelNode buildRemoveSideDataFilePlan(){ + return subOptimalDataFilesFilter(planBuilder.buildDataManifestScanWithDeleteJoin( + aggregateDeleteFiles(planBuilder.buildDeleteFileScan(context)))); + } + + /** + * [*B*] from {@link #deleteAwareOptimizePlan} + */ + private RelNode buildRemoveSideDeleteFilePlan(){ + return planBuilder.buildManifestRel(new ImmutableManifestScanOptions.Builder().setIncludesSplitGen(false) + .setIncludesIcebergMetadata(true).setManifestContent(ManifestContent.DELETES).build(), false); } /** @@ -183,20 +521,26 @@ private Prel getOutputSummaryPlan(Prel writerPrel) throws InvalidRelException { // Projected conditions RexNode deletedFileOp = rexBuilder.makeCall(EQUALS, opTypeIn, makeLiteral.apply(OperationType.DELETE_DATAFILE.value)); + RexNode removedDeleteFileOp = rexBuilder.makeCall(EQUALS, opTypeIn, makeLiteral.apply(OperationType.DELETE_DELETEFILE.value)); RexNode newFileOp = rexBuilder.makeCall(EQUALS, opTypeIn, makeLiteral.apply(OperationType.ADD_DATAFILE.value)); RexNode flagRewrittenFile = rexBuilder.makeCall(CASE, deletedFileOp, recordsIn, rexBuilder.makeZeroLiteral(nullableBigInt)); + RexNode flagRewrittenDeleteFile = rexBuilder.makeCall(CASE, removedDeleteFileOp, recordsIn, rexBuilder.makeZeroLiteral(nullableBigInt)); RexNode flagNewFile = rexBuilder.makeCall(CASE, newFileOp, recordsIn, rexBuilder.makeZeroLiteral(nullableBigInt)); // Projected new/written data files - List projectExpression = ImmutableList.of(flagRewrittenFile, flagNewFile); - RelDataType projectedRowType = typeFactory.builder().add(REWRITTEN_DATA_FILE_COUNT, nullableBigInt).add(NEW_DATA_FILES_COUNT, nullableBigInt).build(); + List projectExpression = ImmutableList.of(flagRewrittenFile, flagRewrittenDeleteFile, flagNewFile); + RelDataType projectedRowType = typeFactory.builder() + .add(REWRITTEN_DATA_FILE_COUNT, nullableBigInt) + .add(REWRITTEN_DELETE_FILE_COUNT, nullableBigInt) + .add(NEW_DATA_FILES_COUNT, nullableBigInt).build(); ProjectPrel project = ProjectPrel.create(cluster, traitSet, writerPrel, projectExpression, projectedRowType); // Aggregated summary AggregateCall totalRewrittenFiles = sum(project, projectedRowType, REWRITTEN_DATA_FILE_COUNT); + AggregateCall totalRewrittenDeleteFiles = sum(project, projectedRowType, REWRITTEN_DELETE_FILE_COUNT); AggregateCall totalNewFiles = sum(project, projectedRowType, NEW_DATA_FILES_COUNT); StreamAggPrel aggregatedCounts = StreamAggPrel.create(cluster, traitSet, project, ImmutableBitSet.of(), - Collections.EMPTY_LIST, ImmutableList.of(totalRewrittenFiles, totalNewFiles), null); + Collections.EMPTY_LIST, ImmutableList.of(totalRewrittenFiles, totalRewrittenDeleteFiles, totalNewFiles), null); return aggregatedCounts; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/PlanCache.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/PlanCache.java index f6120a5603..9904e34fa6 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/PlanCache.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/PlanCache.java @@ -21,9 +21,13 @@ import java.util.Optional; import java.util.stream.Collectors; +import org.apache.calcite.plan.RelOptUtil; +import org.apache.calcite.rel.RelNode; import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.dialect.CalciteSqlDialect; import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.CatalogUtil; import com.dremio.exec.catalog.DremioTable; import com.dremio.exec.catalog.ManagedStoragePlugin; import com.dremio.exec.ops.QueryContext; @@ -90,7 +94,14 @@ public void createNewCachedPlan(Catalog catalog, String cachedKey, String sql, } } - public static boolean supportPlanCache(PlanCache planCache, SqlHandlerConfig config, SqlNode sqlNode) { + public static boolean supportPlanCache(PlanCache planCache, SqlHandlerConfig config, SqlNode sqlNode, Catalog catalog) { + for (DremioTable table : catalog.getAllRequestedTables()) { + if (CatalogUtil.requestedPluginSupportsVersionedTables(table.getPath(), catalog)) { + // Versioned tables don't have a mtime - they have snapshot ids. Since we don't have a way to invalidate + // cache entries containing versioned datasets, don't allow these plans to enter the cache. + return false; + } + } if (org.apache.commons.lang3.StringUtils.containsIgnoreCase(sqlNode.toString(), "external_query")) { return false; } @@ -98,13 +109,19 @@ public static boolean supportPlanCache(PlanCache planCache, SqlHandlerConfig con && planCache != null && config.getContext().getPlannerSettings().isPlanCacheEnabled(); } - public static String generateCacheKey(String sql, QueryContext context) { + public static String generateCacheKey(SqlNode sqlNode, RelNode relNode, QueryContext context) { Hasher hasher = Hashing.sha256().newHasher(); - hasher.putString(sql, UTF_8) + hasher + .putString(sqlNode.toSqlString(CalciteSqlDialect.DEFAULT).getSql(), UTF_8) + .putString(RelOptUtil.toString(relNode), UTF_8) .putString(context.getWorkloadType().name(), UTF_8) .putString(context.getContextInformation().getCurrentDefaultSchema(), UTF_8); + if (context.getPlannerSettings().isPlanCacheEnableSecuredUserBasedCaching()){ + hasher.putString(context.getQueryUserName(), UTF_8); + } + context.getOptions().getNonDefaultOptions() .stream() // A sanity filter in case an option with default value is put into non-default options diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/PlanCaptureAttemptObserver.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/PlanCaptureAttemptObserver.java index 6bfe0fe672..f1a906b275 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/PlanCaptureAttemptObserver.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/PlanCaptureAttemptObserver.java @@ -101,6 +101,10 @@ public class PlanCaptureAttemptObserver extends AbstractAttemptObserver { private int numPlanCacheUses = 0; + private Integer numJoinsInUserQuery = null; + + private Integer numJoinsInFinalPrel = null; + private final CachedAccelDetails accelDetails = new CachedAccelDetails(); public PlanCaptureAttemptObserver(final boolean verbose, final boolean includeDatasetProfiles, @@ -331,6 +335,14 @@ public int getNumPlanCacheUses() { return numPlanCacheUses; } + public Integer getNumJoinsInUserQuery() { + return numJoinsInUserQuery; + } + + public Integer getNumJoinsInFinalPrel() { + return numJoinsInFinalPrel; + } + // Serializes and stores plans private void serializeAndStoreRel(RelNode converted) throws Exception{ PlannerSettings settings = PrelUtil.getSettings(converted.getCluster()); @@ -381,7 +393,8 @@ public void planConvertedScan(RelNode converted, long millisTaken) { } @Override - public void planRelTransform(final PlannerPhase phase, RelOptPlanner planner, final RelNode before, final RelNode after, final long millisTaken) { + public void planRelTransform(final PlannerPhase phase, RelOptPlanner planner, final RelNode before, + final RelNode after, final long millisTaken, final Map timeBreakdownPerRule) { final boolean noTransform = before == after; final String planAsString = toStringOrEmpty(after, noTransform || phase.forceVerbose()); final long millisTakenFinalize = (phase.useMaterializations) ? millisTaken - (findMaterializationMillis + normalizationMillis + substitutionMillis) : millisTaken; @@ -390,12 +403,14 @@ public void planRelTransform(final PlannerPhase phase, RelOptPlanner planner, fi .setPhaseName(PlannerPhase.PLAN_REL_TRANSFORM) .setDurationMillis(substitutionMillis) .setPlan("") + .putAllTimeBreakdownPerRule(timeBreakdownPerRule) .build()); } PlanPhaseProfile.Builder b = PlanPhaseProfile.newBuilder() .setPhaseName(phase.description) .setDurationMillis(millisTakenFinalize) + .putAllTimeBreakdownPerRule(timeBreakdownPerRule) .setPlan(planAsString); // dump state of volcano planner to troubleshoot costing issues (or long planning issues). @@ -549,6 +564,16 @@ public void updateReflectionsWithHints(ReflectionExplanationsAndQueryDistance re detailsPopulator.addReflectionHints(reflectionExplanationsAndQueryDistance); } + @Override + public void setNumJoinsInUserQuery(Integer joins) { + this.numJoinsInUserQuery = joins; + } + + @Override + public void setNumJoinsInFinalPrel(Integer joins) { + this.numJoinsInFinalPrel = joins; + } + public String getText() { return text; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/PlannerPhase.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/PlannerPhase.java index 1f3a110d51..18f7178464 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/PlannerPhase.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/PlannerPhase.java @@ -60,6 +60,7 @@ import org.apache.calcite.tools.RuleSets; import com.dremio.exec.catalog.udf.ScalarUserDefinedFunctionExpanderRule; +import com.dremio.exec.catalog.udf.TabularUserDefinedFunctionExpanderRule; import com.dremio.exec.expr.fn.hll.ConvertCountDistinctToHll; import com.dremio.exec.expr.fn.hll.RewriteNdvAsHll; import com.dremio.exec.ops.OptimizerRulesContext; @@ -99,6 +100,7 @@ import com.dremio.exec.planner.logical.ProjectInputRefPastFilterRule; import com.dremio.exec.planner.logical.ProjectRel; import com.dremio.exec.planner.logical.ProjectRule; +import com.dremio.exec.planner.logical.PushFilterPastFlattenrule; import com.dremio.exec.planner.logical.PushFilterPastProjectRule; import com.dremio.exec.planner.logical.PushJoinFilterIntoProjectRule; import com.dremio.exec.planner.logical.PushProjectForFlattenIntoScanRule; @@ -106,6 +108,7 @@ import com.dremio.exec.planner.logical.PushProjectIntoFilesystemScanRule; import com.dremio.exec.planner.logical.PushProjectIntoScanRule; import com.dremio.exec.planner.logical.PushProjectPastFlattenRule; +import com.dremio.exec.planner.logical.RegexpLikeToLikeRule; import com.dremio.exec.planner.logical.RemoveEmptyScansRule; import com.dremio.exec.planner.logical.RewriteProjectToFlattenRule; import com.dremio.exec.planner.logical.SampleRule; @@ -115,6 +118,7 @@ import com.dremio.exec.planner.logical.TableOptimizeRule; import com.dremio.exec.planner.logical.UnionAllRule; import com.dremio.exec.planner.logical.UnionRel; +import com.dremio.exec.planner.logical.VacuumTableRule; import com.dremio.exec.planner.logical.ValuesRule; import com.dremio.exec.planner.logical.WindowRule; import com.dremio.exec.planner.logical.rule.GroupSetToCrossJoinCaseStatement; @@ -152,7 +156,6 @@ import com.dremio.exec.planner.physical.rule.computation.NestedLoopJoinComputationExtractionRule; import com.dremio.exec.planner.rules.DremioRelRules; import com.dremio.exec.planner.sql.SqlConverter; -import com.dremio.exec.planner.sql.SqlValidatorAndToRelContext; import com.dremio.exec.planner.tablefunctions.ExternalQueryScanPrule; import com.dremio.exec.planner.tablefunctions.ExternalQueryScanRule; import com.dremio.exec.store.mfunctions.MFunctionQueryScanPrule; @@ -167,15 +170,16 @@ public enum PlannerPhase { public RuleSet getRules(OptimizerRulesContext context, SqlConverter sqlConverter) { //TODO there is a bug in the HEP planner where we need sub queries expaned first return RuleSets.ofList( - ScalarUserDefinedFunctionExpanderRule.createFilterRule(() -> SqlValidatorAndToRelContext.builder(sqlConverter)), - ScalarUserDefinedFunctionExpanderRule.createProjectRule(() -> SqlValidatorAndToRelContext.builder(sqlConverter)), + new ScalarUserDefinedFunctionExpanderRule(sqlConverter), + new TabularUserDefinedFunctionExpanderRule(sqlConverter), DremioRelRules.JOIN_SUB_QUERY_TO_CORRELATE, CoreRules.FILTER_SUB_QUERY_TO_CORRELATE, CoreRules.PROJECT_SUB_QUERY_TO_CORRELATE, CALC_REDUCE_EXPRESSIONS_CALCITE_RULE, CoreRules.PROJECT_TO_LOGICAL_PROJECT_AND_WINDOW, DremioRelRules.REDUCE_FUNCTIONS_FOR_GROUP_SETS, - GroupSetToCrossJoinCaseStatement.RULE + GroupSetToCrossJoinCaseStatement.RULE, + RewriteProjectToFlattenRule.INSTANCE ); } }, @@ -240,16 +244,7 @@ public RuleSet getRules(OptimizerRulesContext context, SqlConverter sqlConverter PUSH_PROJECT_PAST_FILTER_LOGICAL_INSTANCE, PUSH_PROJECT_PAST_JOIN_RULE_WITH_EXPR_JOIN, MergeProjectRule.LOGICAL_INSTANCE - ); - } - }, - - EXPAND_OPERATORS("Expands Operators"){ - @Override - public RuleSet getRules(OptimizerRulesContext context, SqlConverter sqlConverter){ - ImmutableList.Builder b = ImmutableList.builder(); - b.add(RewriteProjectToFlattenRule.INSTANCE); - return RuleSets.ofList(b.build()); + ); } }, @@ -277,6 +272,56 @@ public RuleSet getRules(OptimizerRulesContext context, SqlConverter sqlConverter } }, + FILTER_CONSTANT_RESOLUTION_PUSHDOWN("Filter Constant Resolution Pushdown"){ + @Override + public RuleSet getRules(OptimizerRulesContext context, SqlConverter sqlConverter) { + ImmutableList.Builder b = ImmutableList.builder(); + PlannerSettings ps = context.getPlannerSettings(); + b.add( + PushFilterPastProjectRule.CALCITE_NO_CHILD_CHECK, + JoinFilterCanonicalizationRule.INSTANCE, + FILTER_SET_OP_TRANSPOSE_CALCITE_RULE, + FILTER_AGGREGATE_TRANSPOSE_CALCITE_RULE, + FILTER_MERGE_CALCITE_RULE, + FilterWindowTransposeRule.INSTANCE, + LOGICAL_FILTER_CORRELATE_RULE + ); + + if(ps.isPushFilterPastFlattenEnabled()){ + b.add(PushFilterPastFlattenrule.INSTANCE); + } + + if (ps.isEnhancedFilterJoinPushdownEnabled()) { + b.add(EnhancedFilterJoinRule.WITH_FILTER); + b.add(EnhancedFilterJoinRule.NO_FILTER); + } + + if (ps.isTransitiveFilterPushdownEnabled()) { + // Add reduce expression rules to reduce any filters after applying transitive rule. + if (ps.options.getOption(PlannerSettings.REDUCE_ALGEBRAIC_EXPRESSIONS)) { + b.add(ReduceTrigFunctionsRule.INSTANCE); + } + + if (ps.isConstantFoldingEnabled()) { + if (ps.isTransitiveReduceProjectExpressionsEnabled()) { + b.add(PROJECT_REDUCE_EXPRESSIONS_CALCITE_RULE); + } + if (ps.isTransitiveReduceFilterExpressionsEnabled()) { + b.add(FILTER_REDUCE_EXPRESSIONS_CALCITE_RULE); + } + if (ps.isTransitiveReduceCalcExpressionsEnabled()) { + b.add(CALC_REDUCE_EXPRESSIONS_CALCITE_RULE); + } + } + } else { + b.add(FILTER_INTO_JOIN_CALCITE_RULE, + JOIN_CONDITION_PUSH_CALCITE_RULE, + JOIN_PUSH_EXPRESSIONS_RULE); + } + return RuleSets.ofList(b.build()); + } + }, + FILESYSTEM_PROJECT_PUSHDOWN("FileSystem Project Pushdown") { @Override public RuleSet getRules(OptimizerRulesContext context, SqlConverter sqlConverter) { @@ -291,13 +336,9 @@ public RuleSet getRules(OptimizerRulesContext context, SqlConverter sqlConverter @Override public RuleSet getRules(OptimizerRulesContext context, SqlConverter sqlConverter) { ImmutableList.Builder b = ImmutableList.builder(); - PlannerSettings ps = context.getPlannerSettings(); ImmutableList commonRules = getPreLogicalCommonRules(context); b.addAll(commonRules); - if (ps.isEnhancedFilterJoinPushdownEnabled()) { - b.add(EnhancedFilterJoinRule.WITH_FILTER); - b.add(EnhancedFilterJoinRule.NO_FILTER); - } + b.add(PlannerPhase.PUSH_PROJECT_PAST_JOIN_CALCITE_RULE); return RuleSets.ofList(b.build()); } }, @@ -305,11 +346,7 @@ public RuleSet getRules(OptimizerRulesContext context, SqlConverter sqlConverter PRE_LOGICAL_TRANSITIVE("Pre-Logical Transitive Filter Pushdown") { @Override public RuleSet getRules(OptimizerRulesContext context, SqlConverter sqlConverter) { - ImmutableList.Builder b = ImmutableList.builder(); - ImmutableList commonRules = getPreLogicalCommonRules(context); - b.addAll(commonRules); - b.add(PlannerPhase.PUSH_PROJECT_PAST_JOIN_CALCITE_RULE); - return RuleSets.ofList(b.build()); + return RuleSets.ofList(getPreLogicalCommonRules(context)); } }, @@ -840,31 +877,35 @@ static RuleSet getEnabledReduceExpressionsRules(OptimizerRulesContext optimizerR } } + if (ps.isDistinctAggWithGroupingSetsEnabled()) { + userConfigurableRules.add(CoreRules.AGGREGATE_EXPAND_DISTINCT_AGGREGATES); + } + userConfigurableRules.add(AggregateFilterToCaseRule.INSTANCE); userConfigurableRules.add(MedianRewriteRule.INSTANCE); userConfigurableRules.add(PercentileFunctionsRewriteRule.INSTANCE); + if (ps.isRegexpLikeToLikeEnabled()) { + userConfigurableRules.add(RegexpLikeToLikeRule.INSTANCE); + } + return RuleSets.ofList(userConfigurableRules.build()); } static ImmutableList getPreLogicalCommonRules(OptimizerRulesContext context) { ImmutableList.Builder b = ImmutableList.builder(); - PlannerSettings ps = context.getPlannerSettings(); b.add( DremioAggregateProjectPullUpConstantsRule.INSTANCE2_REMOVE_ALL, LogicalAggregateGroupKeyFixRule.RULE, ConvertCountDistinctToHll.INSTANCE, RewriteNdvAsHll.INSTANCE, + // Need to remove this rule as it has already been applied in the filter pushdown phase. + // However, while removing this rule, some acceleration tests are failing. DX-64115 PushFilterPastProjectRule.CALCITE_NO_CHILD_CHECK, - JoinFilterCanonicalizationRule.INSTANCE, - - FILTER_SET_OP_TRANSPOSE_CALCITE_RULE, - FILTER_AGGREGATE_TRANSPOSE_CALCITE_RULE, - FILTER_MERGE_CALCITE_RULE, CoreRules.INTERSECT_TO_DISTINCT, MinusToJoin.RULE, @@ -873,35 +914,9 @@ static ImmutableList getPreLogicalCommonRules(OptimizerRulesContext CoreRules.PROJECT_WINDOW_TRANSPOSE, CoreRules.PROJECT_SET_OP_TRANSPOSE, MergeProjectRule.CALCITE_INSTANCE, - RemoveEmptyScansRule.INSTANCE, - FilterWindowTransposeRule.INSTANCE + RemoveEmptyScansRule.INSTANCE ); - b.add(LOGICAL_FILTER_CORRELATE_RULE); - - if (ps.isTransitiveFilterPushdownEnabled()) { - // Add reduce expression rules to reduce any filters after applying transitive rule. - if (ps.options.getOption(PlannerSettings.REDUCE_ALGEBRAIC_EXPRESSIONS)) { - b.add(ReduceTrigFunctionsRule.INSTANCE); - } - - if (ps.isConstantFoldingEnabled()) { - if (ps.isTransitiveReduceProjectExpressionsEnabled()) { - b.add(PROJECT_REDUCE_EXPRESSIONS_CALCITE_RULE); - } - if (ps.isTransitiveReduceFilterExpressionsEnabled()) { - b.add(FILTER_REDUCE_EXPRESSIONS_CALCITE_RULE); - } - if (ps.isTransitiveReduceCalcExpressionsEnabled()) { - b.add(CALC_REDUCE_EXPRESSIONS_CALCITE_RULE); - } - } - } else { - b.add(FILTER_INTO_JOIN_CALCITE_RULE, - JOIN_CONDITION_PUSH_CALCITE_RULE, - JOIN_PUSH_EXPRESSIONS_RULE); - } - return b.build(); } @@ -971,7 +986,8 @@ static ImmutableList getPreLogicalCommonRules(OptimizerRulesContext CorrelateRule.INSTANCE, TableModifyRule.INSTANCE, TableOptimizeRule.INSTANCE, - CopyIntoTableRule.INSTANCE + CopyIntoTableRule.INSTANCE, + VacuumTableRule.INSTANCE ).build()); static final RuleSet getPhysicalRules(OptimizerRulesContext optimizerRulesContext) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/TableManagementPlanGenerator.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/TableManagementPlanGenerator.java index 086da26a91..af78736ce9 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/TableManagementPlanGenerator.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/TableManagementPlanGenerator.java @@ -15,6 +15,8 @@ */ package com.dremio.exec.planner; +import java.util.function.Function; + import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.plan.RelOptTable; import org.apache.calcite.plan.RelTraitSet; @@ -30,9 +32,11 @@ import com.dremio.exec.planner.physical.DistributionTrait; import com.dremio.exec.planner.physical.Prel; import com.dremio.exec.planner.physical.TableFunctionPrel; +import com.dremio.exec.planner.physical.TableFunctionUtil; import com.dremio.exec.planner.physical.UnionAllPrel; import com.dremio.exec.planner.physical.UnionExchangePrel; import com.dremio.exec.planner.physical.WriterPrule; +import com.dremio.exec.store.OperationType; import com.dremio.exec.store.RecordWriter; import com.dremio.exec.store.TableMetadata; import com.google.common.base.Preconditions; @@ -71,7 +75,7 @@ public TableManagementPlanGenerator(RelOptTable table, RelOptCluster cluster, Re * UnionAllPrel ---------------------------------------------| * | | * | | - * WriterPrel TableFunctionPrel (DELETED_DATA_FILES_METADATA) + * WriterPrel TableFunctionPrel (DELETED_FILES_METADATA) * | this converts a path into required IcebergMetadata blob * | | * (input from copyOnWriteResultsPlan) (deleted data files list from dataFileAggrPlan) @@ -91,34 +95,66 @@ protected Prel getDataWriterPlan(RelNode copyOnWriteResultsPlan, final RelNode d }); } + /** + * UnionAllPrel <------ WriterPrel + * | + * | + * UnionAllPrel ---------------------------------------------| + * | | + * | | + * TableFunctionPrel (DELETED_FILES_METADATA) TableFunctionPrel (DELETED_FILES_METADATA) + * | this converts deleteFile paths into required IcebergMetadata blob + * | | + * (deleted data files list from dataFileAggrPlan) (deleted data files list from deleteFileAggrPlan) + */ + protected Prel getDataWriterPlan(RelNode copyOnWriteResultsPlan, final Function metadataWriterFunction) { + return WriterPrule.createWriter( + copyOnWriteResultsPlan, + copyOnWriteResultsPlan.getRowType(), + tableMetadata.getDatasetConfig(), + createTableEntry, + metadataWriterFunction); + } + private Prel getMetadataWriterPlan(RelNode dataFileAggrPlan, RelNode manifestWriterPlan) throws InvalidRelException { - ImmutableList projectedCols = RecordWriter.SCHEMA.getFields().stream() + // Insert a table function that'll pass the path through and set the OperationType + TableFunctionPrel deletedFilesTableFunctionPrel = getDeleteFilesMetadataTableFunctionPrel(dataFileAggrPlan, + getProjectedColumns(), TableFunctionUtil.getDeletedFilesMetadataTableFunctionContext( + OperationType.DELETE_DATAFILE, RecordWriter.SCHEMA, getProjectedColumns(), true)); + + final RelTraitSet traits = traitSet.plus(DistributionTrait.SINGLETON).plus(Prel.PHYSICAL); + + // Union the updating of the deleted data's metadata with the rest + return getUnionPrel(traits, manifestWriterPlan, deletedFilesTableFunctionPrel); + } + + protected ImmutableList getProjectedColumns() { + return RecordWriter.SCHEMA.getFields().stream() .map(f -> SchemaPath.getSimplePath(f.getName())) .collect(ImmutableList.toImmutableList()); + } - // Insert a table function that'll pass the path through and set the OperationType - TableFunctionPrel deletedDataFilesTableFunctionPrel = new TableFunctionPrel( - dataFileAggrPlan.getCluster(), - dataFileAggrPlan.getTraitSet(), + protected TableFunctionPrel getDeleteFilesMetadataTableFunctionPrel(RelNode input, ImmutableList projectedCols, TableFunctionContext tableFunctionContext) { + return new TableFunctionPrel( + input.getCluster(), + input.getTraitSet(), table, - dataFileAggrPlan, + input, tableMetadata, new TableFunctionConfig( - TableFunctionConfig.FunctionType.DELETED_DATA_FILES_METADATA, + TableFunctionConfig.FunctionType.DELETED_FILES_METADATA, true, - new TableFunctionContext(RecordWriter.SCHEMA, projectedCols, true)), + tableFunctionContext), ScanRelBase.getRowTypeFromProjectedColumns(projectedCols, - RecordWriter.SCHEMA, dataFileAggrPlan.getCluster())); - - final RelTraitSet traits = traitSet.plus(DistributionTrait.SINGLETON).plus(Prel.PHYSICAL); + RecordWriter.SCHEMA, input.getCluster())); + } - // Union the updating of the deleted data's metadata with the rest + protected Prel getUnionPrel(RelTraitSet traits, RelNode manifestWriterPlan, RelNode input) throws InvalidRelException { return new UnionAllPrel(cluster, traits, ImmutableList.of(manifestWriterPlan, new UnionExchangePrel(cluster, traits, - deletedDataFilesTableFunctionPrel)), + input)), false); } - } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/VacuumOutputSchema.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/VacuumOutputSchema.java new file mode 100644 index 0000000000..a3301dcfe6 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/VacuumOutputSchema.java @@ -0,0 +1,64 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner; + + +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.calcite.rel.type.RelDataType; +import org.apache.calcite.rel.type.RelDataTypeFactory; + +import com.dremio.common.expression.CompleteType; +import com.dremio.exec.planner.sql.CalciteArrowHelper; +import com.dremio.exec.record.BatchSchema; + +/** + * Static schema from VACUUM TABLE. + */ +public class VacuumOutputSchema { + + private VacuumOutputSchema() { + } + + public static final String DELETE_DATA_FILE_COUNT = "deleted_data_files_count"; + public static final String DELETE_POSITION_DELETE_FILES_COUNT = "deleted_position_delete_files_count"; + public static final String DELETE_EQUALITY_DELETE_FILES_COUNT = "deleted_equality_delete_files_count"; + public static final String DELETE_MANIFEST_FILES_COUNT = "deleted_manifest_files_count"; + public static final String DELETE_MANIFEST_LISTS_COUNT = "deleted_manifest_lists_count"; + public static final String DELETE_PARTITION_STATS_FILES_COUNT = "deleted_partition_stats_files_count"; + + public static final BatchSchema OUTPUT_SCHEMA = BatchSchema.newBuilder() + .addField(Field.nullable(DELETE_DATA_FILE_COUNT, Types.MinorType.BIGINT.getType())) + .addField(Field.nullable(DELETE_POSITION_DELETE_FILES_COUNT, Types.MinorType.BIGINT.getType())) + .addField(Field.nullable(DELETE_EQUALITY_DELETE_FILES_COUNT, Types.MinorType.BIGINT.getType())) + .addField(Field.nullable(DELETE_MANIFEST_FILES_COUNT, Types.MinorType.BIGINT.getType())) + .addField(Field.nullable(DELETE_MANIFEST_LISTS_COUNT, Types.MinorType.BIGINT.getType())) + .addField(Field.nullable(DELETE_PARTITION_STATS_FILES_COUNT, Types.MinorType.BIGINT.getType())) + .setSelectionVectorMode(BatchSchema.SelectionVectorMode.NONE) + .build(); + + public static RelDataType getRelDataType(RelDataTypeFactory typeFactory) { + return getRowType(OUTPUT_SCHEMA, typeFactory); + } + + public static RelDataType getRowType(BatchSchema schema, RelDataTypeFactory factory) { + final RelDataTypeFactory.FieldInfoBuilder builder = new RelDataTypeFactory.FieldInfoBuilder(factory); + for(Field field : schema){ + builder.add(field.getName(), CalciteArrowHelper.wrap(CompleteType.fromField(field)).toCalciteType(factory, true)); + } + return builder.build(); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/VacuumPlanGenerator.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/VacuumPlanGenerator.java new file mode 100644 index 0000000000..33cd86ba5e --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/VacuumPlanGenerator.java @@ -0,0 +1,545 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner; + +import static com.dremio.exec.store.RecordReader.SPLIT_GEN_AND_COL_IDS_SCAN_SCHEMA; +import static com.dremio.exec.store.SystemSchemas.CARRY_FORWARD_FILE_PATH_TYPE_SCHEMA; +import static com.dremio.exec.store.SystemSchemas.DATAFILE_PATH; +import static com.dremio.exec.store.SystemSchemas.FILE_CONTENT; +import static com.dremio.exec.store.SystemSchemas.FILE_PATH; +import static com.dremio.exec.store.SystemSchemas.FILE_TYPE; +import static com.dremio.exec.store.SystemSchemas.ICEBERG_SNAPSHOTS_SCAN_SCHEMA; +import static com.dremio.exec.store.SystemSchemas.RECORDS; +import static com.dremio.exec.store.iceberg.model.IcebergConstants.ADDED_DATA_FILES; +import static com.dremio.exec.store.iceberg.model.IcebergConstants.DELETED_DATA_FILES; +import static org.apache.calcite.sql.fun.SqlStdOperatorTable.CASE; +import static org.apache.calcite.sql.fun.SqlStdOperatorTable.EQUALS; +import static org.apache.calcite.sql.fun.SqlStdOperatorTable.SUM; +import static org.apache.calcite.sql.type.SqlTypeName.BIGINT; +import static org.apache.calcite.sql.type.SqlTypeName.VARCHAR; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.function.Function; +import java.util.stream.Collectors; + +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.calcite.plan.RelOptCluster; +import org.apache.calcite.plan.RelOptTable; +import org.apache.calcite.plan.RelTraitSet; +import org.apache.calcite.rel.InvalidRelException; +import org.apache.calcite.rel.RelCollations; +import org.apache.calcite.rel.RelNode; +import org.apache.calcite.rel.core.AggregateCall; +import org.apache.calcite.rel.core.JoinRelType; +import org.apache.calcite.rel.type.RelDataType; +import org.apache.calcite.rel.type.RelDataTypeFactory; +import org.apache.calcite.rel.type.RelDataTypeField; +import org.apache.calcite.rex.RexBuilder; +import org.apache.calcite.rex.RexInputRef; +import org.apache.calcite.rex.RexNode; +import org.apache.calcite.rex.RexUtil; +import org.apache.calcite.sql.fun.SqlStdOperatorTable; +import org.apache.calcite.sql.type.SqlTypeName; +import org.apache.calcite.sql.validate.SqlValidatorUtil; +import org.apache.calcite.util.ImmutableBitSet; +import org.apache.calcite.util.Pair; +import org.apache.iceberg.ManifestContent; +import org.apache.iceberg.Snapshot; +import org.apache.iceberg.Table; + +import com.dremio.common.exceptions.UserException; +import com.dremio.common.expression.SchemaPath; +import com.dremio.exec.catalog.VacuumOptions; +import com.dremio.exec.physical.config.ImmutableManifestScanFilters; +import com.dremio.exec.planner.common.MoreRelOptUtil; +import com.dremio.exec.planner.logical.CreateTableEntry; +import com.dremio.exec.planner.physical.DistributionTrait; +import com.dremio.exec.planner.physical.FilterPrel; +import com.dremio.exec.planner.physical.HashAggPrel; +import com.dremio.exec.planner.physical.HashJoinPrel; +import com.dremio.exec.planner.physical.HashToRandomExchangePrel; +import com.dremio.exec.planner.physical.Prel; +import com.dremio.exec.planner.physical.ProjectPrel; +import com.dremio.exec.planner.physical.StreamAggPrel; +import com.dremio.exec.planner.physical.TableFunctionUtil; +import com.dremio.exec.planner.physical.UnionExchangePrel; +import com.dremio.exec.record.BatchSchema; +import com.dremio.exec.store.SystemSchemas; +import com.dremio.exec.store.TableMetadata; +import com.dremio.exec.store.dfs.IcebergTableProps; +import com.dremio.exec.store.iceberg.IcebergFileType; +import com.dremio.exec.store.iceberg.IcebergManifestListScanPrel; +import com.dremio.exec.store.iceberg.IcebergManifestScanPrel; +import com.dremio.exec.store.iceberg.IcebergOrphanFileDeletePrel; +import com.dremio.exec.store.iceberg.IcebergSnapshotsPrel; +import com.dremio.exec.store.iceberg.PartitionStatsScanPrel; +import com.dremio.exec.store.iceberg.SnapshotsScanOptions; +import com.dremio.exec.store.iceberg.SupportsIcebergMutablePlugin; +import com.dremio.exec.store.iceberg.model.IcebergModel; +import com.dremio.io.file.FileSystem; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableList; + +/*** + * Expand plans for VACUUM TABLE. + */ +public class VacuumPlanGenerator { + private static final long ESTIMATED_RECORDS_PER_MANIFEST = 330000; + private final RelOptTable table; + private final RelOptCluster cluster; + private final RelTraitSet traitSet; + private final TableMetadata tableMetadata; + private final VacuumOptions vacuumOptions; + private final CreateTableEntry createTableEntry; + private Table icebergTable = null; + private long snapshotsCount = 0L; + private long dataFileEstimatedCount = 0L; + private long manifestFileEstimatedCount = 0L; + + public VacuumPlanGenerator(RelOptTable table, RelOptCluster cluster, RelTraitSet traitSet, TableMetadata tableMetadata, + CreateTableEntry createTableEntry, VacuumOptions vacuumOptions) { + this.table = Preconditions.checkNotNull(table); + this.cluster = cluster; + this.traitSet = traitSet; + this.tableMetadata = Preconditions.checkNotNull(tableMetadata, "TableMetadata cannot be null."); + this.createTableEntry = createTableEntry; + this.vacuumOptions = Preconditions.checkNotNull(vacuumOptions, "VacuumOption cannot be null."); + loadIcebergTable(); + } + + /* + * UnionExchangePrel + * │ + * │ + * IcebergOrphanFileDeleteTF + * │ + * │ + * Filter (live.filePath = null) + * │ + * │ + * HashJoin (expired.filePath=live.filePath (LEFT)) + * │ │ + * ┌─────────────────────┘ └──────────┐ + * │ │ + * Project (filepath, filetype) Project (filepath, filetype) + * │ │ + * │ │ + * │ HashAgg(filepath [deduplicate]) + * │ │ + * │ │ + * Project Project + * [ (filepath, filetype) [ (filepath, filetype) + * │ │ │ │ + * (datafilepath, filecontent) ] (datafilepath, filecontent) ] + * │ │ + * │ │ + * IcebergManifestScanTF IcebergManifestScanTF + * │ │ + * │ │ + * IcebergManifestListScanTF IcebergManifestListScanTF + * │ │ + * │ │ + * PartitionStatsScanTF PartitionStatsScanTF + * │ │ + * │ │ + * ExpireSnapshotScan ExpiredSnapshotScan + * (Expired snapshot ids) (Live snapshot ids) + */ + + public Prel buildPlan() { + try { + Prel expiredSnapshotFilesPlan = filePathAndTypeScanPlan(SnapshotsScanOptions.Mode.EXPIRED_SNAPSHOTS); + Prel liveSnapshotsFilesPlan = deDupFilePathAndTypeScanPlan(SnapshotsScanOptions.Mode.LIVE_SNAPSHOTS); + Prel orphanFilesPlan = orphanFilesPlan(expiredSnapshotFilesPlan, liveSnapshotsFilesPlan); + Prel deleteOrphanFilesPlan = deleteOrphanFilesPlan(orphanFilesPlan); + return outputSummaryPlan(deleteOrphanFilesPlan); + } catch (InvalidRelException e) { + throw new RuntimeException(e); + } + } + + private Prel orphanFilesPlan(Prel expired, Prel live) { + Prel joinPlan = joinLiveAndExpiredPlan(expired, live); + // Need to count left side fields. + final int leftFieldCount = joinPlan.getInput(0).getRowType().getFieldCount(); + + // Orphan file paths: right.FILE_PATH IS NULL + RelDataTypeField rightRowIndexField = joinPlan.getInput(1).getRowType() + .getField(FILE_PATH, false, false); + + return addColumnIsNullFilter(joinPlan, rightRowIndexField.getType(), leftFieldCount + rightRowIndexField.getIndex()); + } + + private Prel joinLiveAndExpiredPlan(Prel expired, Prel live) { + RexBuilder rexBuilder = expired.getCluster().getRexBuilder(); + + // Left side: Source files from expired snapshots + DistributionTrait leftDistributionTrait = getHashDistributionTraitForFields(expired.getRowType(), ImmutableList.of(FILE_PATH)); + RelTraitSet leftTraitSet = cluster.getPlanner().emptyTraitSet().plus(Prel.PHYSICAL) + .plus(leftDistributionTrait); + HashToRandomExchangePrel leftSourceFilePathHashExchange = new HashToRandomExchangePrel(cluster, leftTraitSet, + expired, leftDistributionTrait.getFields()); + + // Right side: Source files from live snapshots + DistributionTrait rightDistributionTrait = getHashDistributionTraitForFields(live.getRowType(), ImmutableList.of(FILE_PATH)); + RelTraitSet rightTraitSet = cluster.getPlanner().emptyTraitSet().plus(Prel.PHYSICAL) + .plus(rightDistributionTrait); + HashToRandomExchangePrel rightSourceFilePathHashExchange = new HashToRandomExchangePrel(cluster, rightTraitSet, + live, rightDistributionTrait.getFields()); + + // hash join on FILE_PATH == FILE_PATH + + RelDataTypeField leftSourceFilePathField = leftSourceFilePathHashExchange.getRowType() + .getField(FILE_PATH, false, false); + RelDataTypeField rightSourceFilePathField = rightSourceFilePathHashExchange.getRowType() + .getField(FILE_PATH, false, false); + + int leftFieldCount = leftSourceFilePathHashExchange.getRowType().getFieldCount(); + RexNode joinCondition = rexBuilder.makeCall( + SqlStdOperatorTable.EQUALS, + rexBuilder.makeInputRef(leftSourceFilePathField.getType(), leftSourceFilePathField.getIndex()), + rexBuilder.makeInputRef(rightSourceFilePathField.getType(), leftFieldCount + rightSourceFilePathField.getIndex())); + + return HashJoinPrel.create( + leftSourceFilePathHashExchange.getCluster(), + leftSourceFilePathHashExchange.getTraitSet(), + leftSourceFilePathHashExchange, + rightSourceFilePathHashExchange, + joinCondition, + null, + JoinRelType.LEFT); + } + + private Prel deDupFilePathAndTypeScanPlan(SnapshotsScanOptions.Mode scanMode) throws InvalidRelException { + Prel manifestPlan = filePathAndTypePlanFromManifest(scanMode); + Prel filePathAndTypeProject = projectDataFileAndType(manifestPlan); + Prel deDupFilePathPlan = reduceDuplicateFilePaths(filePathAndTypeProject); + return projectFilePathAndType(deDupFilePathPlan); + } + + private Prel filePathAndTypeScanPlan(SnapshotsScanOptions.Mode scanMode) throws InvalidRelException { + Prel manifestPlan = filePathAndTypePlanFromManifest(scanMode); + Prel filePathAndTypeProject = projectDataFileAndType(manifestPlan); + return projectFilePathAndType(filePathAndTypeProject); + } + + private Prel filePathAndTypePlanFromManifest(SnapshotsScanOptions.Mode scanMode) throws InvalidRelException { + Prel snapshotsScanPlan = snapshotsScanPlan(scanMode); + Prel partitionStatsScan = getPartitionStatsScanPrel(snapshotsScanPlan); + Prel manifestListScan = getManifestListScanPrel(partitionStatsScan); + return getManifestScanPrel(manifestListScan); + } + + private Prel snapshotsScanPlan(SnapshotsScanOptions.Mode scanMode) { + SnapshotsScanOptions snapshotsOption = new SnapshotsScanOptions(scanMode, vacuumOptions.getOlderThanInMillis(), vacuumOptions.getRetainLast()); + return new IcebergSnapshotsPrel( + cluster, + traitSet, + tableMetadata, + createTableEntry.getIcebergTableProps(), + snapshotsOption, + snapshotsCount, + 1); + } + + private Prel getManifestListScanPrel(Prel input) { + BatchSchema manifestListsReaderSchema = SPLIT_GEN_AND_COL_IDS_SCAN_SCHEMA.merge(CARRY_FORWARD_FILE_PATH_TYPE_SCHEMA); + List manifestListsReaderColumns = manifestListsReaderSchema.getFields().stream().map(f -> SchemaPath.getSimplePath(f.getName())).collect(Collectors.toList()); + return new IcebergManifestListScanPrel( + input.getCluster(), + input.getTraitSet(), + table, + input, + tableMetadata, + manifestListsReaderSchema, + manifestListsReaderColumns, + input.getEstimatedSize() + manifestFileEstimatedCount); + } + + private Prel getPartitionStatsScanPrel(Prel input) { + BatchSchema partitionStatsScanSchema = ICEBERG_SNAPSHOTS_SCAN_SCHEMA.merge(CARRY_FORWARD_FILE_PATH_TYPE_SCHEMA); + // TODO: it could be further improved whether it needs to apply PartitionStatsScan, if table is written by other engines, + // or the partition stats metadata entry is not present. + long estimatedRows = 2 * input.getEstimatedSize(); + return new PartitionStatsScanPrel(input.getCluster(), input.getTraitSet(), table, input, partitionStatsScanSchema, tableMetadata, estimatedRows); + } + + private Prel getManifestScanPrel(Prel input) { + DistributionTrait.DistributionField distributionField = new DistributionTrait.DistributionField(0); + DistributionTrait distributionTrait = new DistributionTrait(DistributionTrait.DistributionType.HASH_DISTRIBUTED, ImmutableList.of(distributionField)); + HashToRandomExchangePrel manifestSplitsExchange = new HashToRandomExchangePrel(input.getCluster(), input.getTraitSet(), + input, distributionTrait.getFields(), TableFunctionUtil.getHashExchangeTableFunctionCreator(tableMetadata, true)); + + BatchSchema manifestFileReaderSchema = SystemSchemas.ICEBERG_MANIFEST_SCAN_SCHEMA.merge(CARRY_FORWARD_FILE_PATH_TYPE_SCHEMA); + List manifestFileReaderColumns = manifestFileReaderSchema.getFields().stream().map(f -> SchemaPath.getSimplePath(f.getName())).collect(Collectors.toList()); + + return new IcebergManifestScanPrel(manifestSplitsExchange.getCluster(), manifestSplitsExchange.getTraitSet().plus(DistributionTrait.ANY), table, + manifestSplitsExchange, tableMetadata, manifestFileReaderSchema, manifestFileReaderColumns, + new ImmutableManifestScanFilters.Builder().build(), input.getEstimatedSize() + dataFileEstimatedCount, ManifestContent.DATA, true); + } + + private Prel projectDataFileAndType(Prel manifestPrel) { + // Project condition might not be correct + final List projectFields = ImmutableList.of(FILE_PATH, FILE_TYPE); + Pair implicitFilePathCol = MoreRelOptUtil.findFieldWithIndex(manifestPrel.getRowType().getFieldList(), FILE_PATH); + Pair implicitFileTypeCol = MoreRelOptUtil.findFieldWithIndex(manifestPrel.getRowType().getFieldList(), FILE_TYPE); + Pair dataFilePathCol = MoreRelOptUtil.findFieldWithIndex(manifestPrel.getRowType().getFieldList(), DATAFILE_PATH); + Pair fileContentCol = MoreRelOptUtil.findFieldWithIndex(manifestPrel.getRowType().getFieldList(), FILE_CONTENT); + Preconditions.checkNotNull(implicitFilePathCol, "ManifestScan should always have implicitFilePath with rowType."); + Preconditions.checkNotNull(implicitFileTypeCol, "ManifestScan should always have implicitFileType with rowType."); + Preconditions.checkNotNull(dataFilePathCol, "ManifestScan should always have dataFileType with rowType."); + Preconditions.checkNotNull(fileContentCol, "ManifestScan should always have fileContent with rowType."); + + RexBuilder rexBuilder = cluster.getRexBuilder(); + + // if filePathCol is null, then use dataFilePathCol and fileContentCol values as file path and file type values. + RexNode implicitFilePathNullCheck = rexBuilder.makeCall(SqlStdOperatorTable.IS_NULL, + rexBuilder.makeInputRef(implicitFilePathCol.right.getType(), implicitFilePathCol.left)); + + RexNode filePathExpr = rexBuilder.makeCall(SqlStdOperatorTable.CASE, implicitFilePathNullCheck, + rexBuilder.makeInputRef(dataFilePathCol.right.getType(), dataFilePathCol.left), + rexBuilder.makeInputRef(implicitFilePathCol.right.getType(), implicitFilePathCol.left)); + RexNode fileTypeExpr = rexBuilder.makeCall(SqlStdOperatorTable.CASE, implicitFilePathNullCheck, + rexBuilder.makeInputRef(fileContentCol.right.getType(), fileContentCol.left), + rexBuilder.makeInputRef(implicitFileTypeCol.right.getType(), implicitFileTypeCol.left)); + + final List projectExpressions = ImmutableList.of(filePathExpr, fileTypeExpr); + RelDataType newRowType = RexUtil.createStructType(rexBuilder.getTypeFactory(), projectExpressions, projectFields, SqlValidatorUtil.F_SUGGESTER); + return ProjectPrel.create(manifestPrel.getCluster(), manifestPrel.getTraitSet(), manifestPrel, projectExpressions, newRowType); + } + + private Prel projectFilePathAndType(Prel input) { + final List projectFields = ImmutableList.of(FILE_PATH, FILE_TYPE); + Pair filePathCol = MoreRelOptUtil.findFieldWithIndex(input.getRowType().getFieldList(), FILE_PATH); + Pair fileTypeCol = MoreRelOptUtil.findFieldWithIndex(input.getRowType().getFieldList(), FILE_TYPE); + RexBuilder rexBuilder = cluster.getRexBuilder(); + RexNode filePathExpr = rexBuilder.makeInputRef(filePathCol.right.getType(), filePathCol.left); + RexNode fileContentExpr = rexBuilder.makeInputRef(fileTypeCol.right.getType(), fileTypeCol.left); + + final List projectExpressions = ImmutableList.of(filePathExpr, fileContentExpr); + RelDataType newRowType = RexUtil.createStructType(rexBuilder.getTypeFactory(), projectExpressions, projectFields, SqlValidatorUtil.F_SUGGESTER); + return ProjectPrel.create(input.getCluster(), input.getTraitSet(), input, projectExpressions, newRowType); + } + + private Prel reduceDuplicateFilePaths(Prel input) { + AggregateCall aggOnFilePath = AggregateCall.create( + SqlStdOperatorTable.COUNT, + true, + false, + Collections.emptyList(), + -1, + RelCollations.EMPTY, + 1, + input, + input.getCluster().getTypeFactory().createSqlType(SqlTypeName.BIGINT), + FILE_PATH + ); + + ImmutableBitSet groupSet = ImmutableBitSet.of( + input.getRowType().getField(FILE_PATH, false, false).getIndex(), + input.getRowType().getField(FILE_TYPE, false, false).getIndex()); + try { + return HashAggPrel.create( + input.getCluster(), + input.getTraitSet(), + input, + groupSet, + ImmutableList.of(groupSet), + ImmutableList.of(aggOnFilePath), + null + ); + } catch (InvalidRelException e) { + throw new RuntimeException("Failed to create HashAggPrel during delete file scan.", e); + } + } + + private Prel deleteOrphanFilesPlan(Prel input) { + // We do overestimate instead of underestimate. 1) Use file counts from ALL snapshot; 2) consider every snapshot has partition stats files. + long estimatedRows = dataFileEstimatedCount + manifestFileEstimatedCount + snapshotsCount /*Manifest list file*/ + snapshotsCount * 2 /*Partition stats files*/; + return new IcebergOrphanFileDeletePrel( + input.getCluster(), input.getTraitSet(), table, input, tableMetadata, estimatedRows); + } + + private Prel outputSummaryPlan(Prel input) throws InvalidRelException { + RelOptCluster cluster = input.getCluster(); + RelDataTypeFactory typeFactory = cluster.getTypeFactory(); + + // Use single thread to collect deleted orphan files. + input = new UnionExchangePrel(input.getCluster(), input.getTraitSet().plus(DistributionTrait.SINGLETON), input); + + // Projected conditions + RexNode dataFileCondition = buildCaseCall(input, IcebergFileType.DATA); + RexNode positionDeleteCondition = buildCaseCall(input, IcebergFileType.POSITION_DELETES); + RexNode equalityDeleteCondition = buildCaseCall(input, IcebergFileType.EQUALITY_DELETES); + RexNode manifestCondition = buildCaseCall(input, IcebergFileType.MANIFEST); + RexNode manifestListCondition = buildCaseCall(input, IcebergFileType.MANIFEST_LIST); + RexNode partitionStatsCondition = buildCaseCall(input, IcebergFileType.PARTITION_STATS); + + // Projected deleted data files + RelDataType nullableBigInt = typeFactory.createTypeWithNullability(typeFactory.createSqlType(BIGINT), true); + List projectExpression = ImmutableList.of(dataFileCondition, positionDeleteCondition, equalityDeleteCondition, + manifestCondition, manifestListCondition, partitionStatsCondition); + + List summaryCols = VacuumOutputSchema.OUTPUT_SCHEMA.getFields().stream().map(Field::getName).collect(Collectors.toList()); + + RelDataTypeFactory.FieldInfoBuilder fieldInfoBuilder = typeFactory.builder(); + summaryCols.forEach(c -> fieldInfoBuilder.add(c, nullableBigInt)); + RelDataType projectedRowType = fieldInfoBuilder.build(); + + ProjectPrel project = ProjectPrel.create(cluster, traitSet, input, projectExpression, projectedRowType); + + // Aggregated summary + List aggs = summaryCols.stream().map(c -> buildAggregateCall(project, projectedRowType, c)).collect(Collectors.toList()); + Prel agg = StreamAggPrel.create(cluster, project.getTraitSet(), project, ImmutableBitSet.of(), Collections.EMPTY_LIST, aggs, null); + + // Project: return 0 as row count in case there is no Agg record (i.e., no orphan files to delete) + List projectExprs = summaryCols.stream().map(c -> notNullProjectExpr(agg, c)).collect(Collectors.toList()); + RelDataType projectRowType = RexUtil.createStructType(agg.getCluster().getTypeFactory(), projectExprs, + summaryCols, null); + return ProjectPrel.create(cluster, agg.getTraitSet(), agg, projectExprs, projectRowType); + } + + private RexNode notNullProjectExpr(Prel input, String fieldName) { + RexBuilder rexBuilder = cluster.getRexBuilder(); + RelDataTypeFactory typeFactory = cluster.getTypeFactory(); + + final RexNode zeroLiteral = rexBuilder.makeLiteral(0, typeFactory.createSqlType(SqlTypeName.INTEGER), true); + RelDataTypeField field = input.getRowType().getField(fieldName, false, false); + RexInputRef inputRef = rexBuilder.makeInputRef(field.getType(), field.getIndex()); + RexNode rowCountRecordExistsCheckCondition = rexBuilder.makeCall(SqlStdOperatorTable.IS_NULL, inputRef); + + // case when the count of row count records is 0, return 0, else return aggregated row count + return rexBuilder.makeCall(SqlStdOperatorTable.CASE, rowCountRecordExistsCheckCondition, zeroLiteral, + rexBuilder.makeInputRef(field.getType(), field.getIndex())); + } + + private AggregateCall buildAggregateCall(Prel relNode, RelDataType projectRowType, String fieldName) { + RelDataTypeField aggField = projectRowType.getField(fieldName, false, false); + return AggregateCall.create( + SUM, + false, + false, + ImmutableList.of(aggField.getIndex()), + -1, + RelCollations.EMPTY, + 1, + relNode, + cluster.getTypeFactory().createTypeWithNullability(cluster.getTypeFactory().createSqlType(SqlTypeName.BIGINT), true), + fieldName); + } + + private RexNode buildCaseCall(Prel orphanFileDeleteRel, IcebergFileType icebergFileType) { + RexBuilder rexBuilder = cluster.getRexBuilder(); + RelDataTypeFactory typeFactory = cluster.getTypeFactory(); + Function makeLiteral = i -> rexBuilder.makeLiteral(i, typeFactory.createSqlType(VARCHAR), false); + RelDataType nullableBigInt = typeFactory.createTypeWithNullability(typeFactory.createSqlType(BIGINT), true); + + RelDataTypeField orphanFileTypeField = orphanFileDeleteRel.getRowType().getField(FILE_TYPE, false, false); + RexInputRef orphanFileTypeIn = rexBuilder.makeInputRef(orphanFileTypeField.getType(), orphanFileTypeField.getIndex()); + RelDataTypeField recordsField = orphanFileDeleteRel.getRowType().getField(RECORDS, false, false); + RexNode recordsIn = rexBuilder.makeCast(nullableBigInt, rexBuilder.makeInputRef(recordsField.getType(), recordsField.getIndex())); + + RexNode equalsCall = rexBuilder.makeCall(EQUALS, orphanFileTypeIn, makeLiteral.apply(icebergFileType.name())); + return rexBuilder.makeCall(CASE, equalsCall, recordsIn, rexBuilder.makeZeroLiteral(nullableBigInt)); + } + + private DistributionTrait getHashDistributionTraitForFields(RelDataType rowType, List columnNames) { + ImmutableList fields = columnNames.stream() + .map(n -> new DistributionTrait.DistributionField( + Preconditions.checkNotNull(rowType.getField(n, false, false)).getIndex())) + .collect(ImmutableList.toImmutableList()); + return new DistributionTrait(DistributionTrait.DistributionType.HASH_DISTRIBUTED, fields); + } + + /** + * Utility function to apply IS_NULL(col) filter for the given input node + */ + private Prel addColumnIsNullFilter(RelNode inputNode, RelDataType fieldType, int fieldIndex) { + RexBuilder rexBuilder = cluster.getRexBuilder(); + + RexNode filterCondition = rexBuilder.makeCall( + SqlStdOperatorTable.IS_NULL, + rexBuilder.makeInputRef(fieldType, fieldIndex)); + + return FilterPrel.create( + inputNode.getCluster(), + inputNode.getTraitSet(), + inputNode, + filterCondition); + } + + /** + * Here is a suboptimal plan to estimate the row accounts for Prels used in ExpireSnapshots plan. The 'suboptimal' mean + * to directly load the Iceberg table and read back its all snapshots and stats of each snapshot for row estimates. + * Another approach is tracked in DX-63280. + */ + private void loadIcebergTable() { + if (icebergTable == null) { + IcebergTableProps icebergTableProps = createTableEntry.getIcebergTableProps(); + Preconditions.checkState(createTableEntry.getPlugin() instanceof SupportsIcebergMutablePlugin, "Plugin not instance of SupportsIcebergMutablePlugin"); + SupportsIcebergMutablePlugin plugin = (SupportsIcebergMutablePlugin) createTableEntry.getPlugin(); + try (FileSystem fs = plugin.createFS(icebergTableProps.getTableLocation(), createTableEntry.getUserName(), null)) { + IcebergModel icebergModel = plugin.getIcebergModel(icebergTableProps, createTableEntry.getUserName(), null, fs); + icebergTable = icebergModel.getIcebergTable(icebergModel.getTableIdentifier(icebergTableProps.getTableLocation())); + } catch (IOException ex) { + throw new UncheckedIOException(ex); + } + } + + Iterator iterator = icebergTable.snapshots().iterator(); + while (iterator.hasNext()) { + Snapshot snapshot = iterator.next(); + snapshotsCount++; + estimateFilesFromSnapshot(snapshot, snapshotsCount); + } + + if (snapshotsCount == 1 || vacuumOptions.getRetainLast() >= snapshotsCount) { + throw UserException.unsupportedError() + .message("Vacuum table succeeded, and the operation did not change the number of snapshots.") + .buildSilently(); + } + } + + private void estimateFilesFromSnapshot(Snapshot snapshot, long snapshotsCount) { + // First snapshot + if (1 == snapshotsCount) { + long numDataFiles = snapshot != null ? + Long.parseLong(snapshot.summary().getOrDefault("total-data-files", "0")) : 0L; + dataFileEstimatedCount += numDataFiles; + long numPositionDeletes = snapshot != null ? + Long.parseLong(snapshot.summary().getOrDefault("total-position-deletes", "0")) : 0L; + dataFileEstimatedCount += numPositionDeletes; + long numEqualityDeletes = snapshot != null ? + Long.parseLong(snapshot.summary().getOrDefault("total-equality-deletes", "0")) : 0L; + dataFileEstimatedCount += numEqualityDeletes; + + manifestFileEstimatedCount += Math.max(dataFileEstimatedCount / ESTIMATED_RECORDS_PER_MANIFEST, 1); + } else { + long numAddedDataFiles = snapshot != null ? + Long.parseLong(snapshot.summary().getOrDefault(ADDED_DATA_FILES, "0")) : 0L; + dataFileEstimatedCount += numAddedDataFiles; + long numAddedDeleteFiles = snapshot != null ? + Long.parseLong(snapshot.summary().getOrDefault(DELETED_DATA_FILES, "0")) : 0L; + dataFileEstimatedCount += numAddedDeleteFiles; + + manifestFileEstimatedCount += Math.max((numAddedDataFiles + numAddedDeleteFiles) / ESTIMATED_RECORDS_PER_MANIFEST, 1); + } + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/CachedMaterializationDescriptor.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/CachedMaterializationDescriptor.java index c4b9c1af3c..a741f6f319 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/CachedMaterializationDescriptor.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/CachedMaterializationDescriptor.java @@ -15,7 +15,6 @@ */ package com.dremio.exec.planner.acceleration; -import java.util.List; import java.util.Set; import org.apache.calcite.plan.CopyWithCluster; @@ -106,7 +105,7 @@ public DremioMaterialization getMaterialization() { * @return */ @Override - public boolean isApplicable(Set> queryTablesUsed, Set> queryVdsUsed, Set externalQueries) { + public boolean isApplicable(Set queryTablesUsed, Set queryVdsUsed, Set externalQueries) { return SubstitutionUtils.usesTableOrVds(queryTablesUsed, queryVdsUsed, externalQueries, materialization.getQueryRel()); } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/DefaultExpansionNode.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/DefaultExpansionNode.java index 790d261984..465da73a00 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/DefaultExpansionNode.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/DefaultExpansionNode.java @@ -23,18 +23,21 @@ import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.type.RelDataType; +import com.dremio.exec.catalog.TableVersionContext; import com.dremio.service.namespace.NamespaceKey; /** * Represents a location where the query was expanded from a VDS to a default reflection */ public class DefaultExpansionNode extends ExpansionNode { - protected DefaultExpansionNode(NamespaceKey path, RelDataType rowType, RelOptCluster cluster, RelTraitSet traits, RelNode input, boolean contextSensitive) { - super(path, rowType, cluster, traits, input, contextSensitive); + protected DefaultExpansionNode(NamespaceKey path, RelDataType rowType, RelOptCluster cluster, RelTraitSet traits, RelNode input, + boolean contextSensitive, TableVersionContext versionContext) { + super(path, rowType, cluster, traits, input, contextSensitive, versionContext); } - public static DefaultExpansionNode wrap(NamespaceKey path, RelNode node, RelDataType rowType, boolean contextSensitive) { - return new DefaultExpansionNode(path, rowType, node.getCluster(), node.getTraitSet(), node, contextSensitive); + public static DefaultExpansionNode wrap(NamespaceKey path, RelNode node, RelDataType rowType, + boolean contextSensitive, TableVersionContext versionContext) { + return new DefaultExpansionNode(path, rowType, node.getCluster(), node.getTraitSet(), node, contextSensitive, versionContext); } @Override @@ -44,11 +47,13 @@ public boolean isDefault() { @Override public RelNode copy(RelTraitSet traitSet, List inputs) { - return new DefaultExpansionNode(getPath(), rowType, this.getCluster(), traitSet, inputs.get(0), isContextSensitive()); + return new DefaultExpansionNode(getPath(), rowType, this.getCluster(), traitSet, inputs.get(0), + isContextSensitive(), getVersionContext()); } @Override public RelNode copyWith(CopyWithCluster copier) { - return new DefaultExpansionNode(getPath(), rowType, copier.getCluster(), copier.copyOf(getTraitSet()), getInput().accept(copier), isContextSensitive()); + return new DefaultExpansionNode(getPath(), rowType, copier.getCluster(), copier.copyOf(getTraitSet()), + getInput().accept(copier), isContextSensitive(), getVersionContext()); } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/ExpansionNode.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/ExpansionNode.java index 0000efdbdc..604a26f443 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/ExpansionNode.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/ExpansionNode.java @@ -36,6 +36,7 @@ import org.apache.calcite.sql2rel.RelStructuredTypeFlattener; import org.apache.calcite.sql2rel.RelStructuredTypeFlattener.SelfFlatteningRel; +import com.dremio.exec.catalog.TableVersionContext; import com.dremio.exec.planner.StatelessRelShuttleImpl; import com.dremio.service.Pointer; import com.dremio.service.namespace.NamespaceKey; @@ -48,25 +49,30 @@ public class ExpansionNode extends SingleRel implements CopyToCluster, SelfFlatt private final NamespaceKey path; private final boolean contextSensitive; + private final TableVersionContext versionContext; - protected ExpansionNode(NamespaceKey path, RelDataType rowType, RelOptCluster cluster, RelTraitSet traits, RelNode input, boolean contextSensitive) { + protected ExpansionNode(NamespaceKey path, RelDataType rowType, RelOptCluster cluster, RelTraitSet traits, RelNode input, + boolean contextSensitive, TableVersionContext versionContext) { super(cluster, traits, input); this.path = path; this.contextSensitive = contextSensitive; this.rowType = rowType; + this.versionContext = versionContext; } - public static RelNode wrap(NamespaceKey path, RelNode node, RelDataType rowType, boolean contextSensitive, boolean isDefault) { + public static RelNode wrap(NamespaceKey path, RelNode node, RelDataType rowType, boolean contextSensitive, + boolean isDefault, TableVersionContext versionContext) { if (isDefault) { - return new DefaultExpansionNode(path, rowType, node.getCluster(), node.getTraitSet(), node, contextSensitive); + return new DefaultExpansionNode(path, rowType, node.getCluster(), node.getTraitSet(), node, contextSensitive, versionContext); } else { - return new ExpansionNode(path, rowType, node.getCluster(), node.getTraitSet(), node, contextSensitive); + return new ExpansionNode(path, rowType, node.getCluster(), node.getTraitSet(), node, contextSensitive, versionContext); } } @Override public RelNode copyWith(CopyWithCluster copier) { - return new ExpansionNode(path, rowType, copier.getCluster(), copier.copyOf(getTraitSet()), getInput().accept(copier), contextSensitive); + return new ExpansionNode(path, rowType, copier.getCluster(), copier.copyOf(getTraitSet()), getInput().accept(copier), + contextSensitive, versionContext); } @Override @@ -78,7 +84,8 @@ public void flattenRel(RelStructuredTypeFlattener flattener) { public RelWriter explainTerms(RelWriter pw) { return super.explainTerms(pw) .item("path", path.toUnescapedString()) - .itemIf("contextSensitive", contextSensitive, contextSensitive); + .itemIf("contextSensitive", contextSensitive, contextSensitive) + .itemIf("version", versionContext, versionContext != null); } public boolean isContextSensitive() { @@ -91,7 +98,7 @@ public boolean isDefault() { @Override public RelNode copy(RelTraitSet traitSet, List inputs) { - return new ExpansionNode(path, rowType, this.getCluster(), traitSet, inputs.get(0), contextSensitive); + return new ExpansionNode(path, rowType, this.getCluster(), traitSet, inputs.get(0), contextSensitive, versionContext); } @Override @@ -108,6 +115,8 @@ public NamespaceKey getPath() { return path; } + public TableVersionContext getVersionContext() { return versionContext; } + public static RelNode removeFromTree(RelNode tree) { return tree.accept(new RelShuttleImpl() { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/JoinDependencyProperties.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/JoinDependencyProperties.java index c217842faa..4709a69cf0 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/JoinDependencyProperties.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/JoinDependencyProperties.java @@ -17,6 +17,7 @@ import java.util.List; +import com.dremio.exec.catalog.TableVersionContext; import com.google.common.collect.ImmutableList; /** @@ -42,17 +43,28 @@ public List getDependencies() { public static class Dependency { public List foreignKeyTable; public List uniqueKeyTable; + public TableVersionContext foreignKeyTableVersionContext = null; + public TableVersionContext uniqueKeyTableVersionContext = null; - public Dependency(List foreignKeyTable, List uniqueKeyTable) { + public Dependency(List foreignKeyTable, String foreignKeyTableVersionContextValue, + List uniqueKeyTable, String uniqueKeyTableVersionContextValue) { this.foreignKeyTable = foreignKeyTable; + if (foreignKeyTableVersionContextValue != null) { + foreignKeyTableVersionContext = TableVersionContext.deserialize(foreignKeyTableVersionContextValue); + } this.uniqueKeyTable = uniqueKeyTable; + if (uniqueKeyTableVersionContextValue != null) { + uniqueKeyTableVersionContext = TableVersionContext.deserialize(uniqueKeyTableVersionContextValue); + } } @Override public String toString() { return "Dependency{" + "foreignKeyTable=" + foreignKeyTable + + (foreignKeyTableVersionContext != null ? ", foreignKeyTableVersionContext=" + foreignKeyTableVersionContext.serialize() : "") + ", uniqueKeyTable=" + uniqueKeyTable + + (uniqueKeyTableVersionContext != null ? ", uniqueKeyTableVersionContext=" + uniqueKeyTableVersionContext.serialize() : "") + '}'; } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/MaterializationDescriptor.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/MaterializationDescriptor.java index 1d05d38447..f1c1e79798 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/MaterializationDescriptor.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/MaterializationDescriptor.java @@ -170,7 +170,7 @@ public List getPartition() { return partition; } - public boolean isApplicable(Set> queryTablesUsed, Set> queryVdsUsed, Set externalQueries) { + public boolean isApplicable(Set queryTablesUsed, Set queryVdsUsed, Set externalQueries) { return true; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/MaterializationList.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/MaterializationList.java index 115629fb41..1462e3610a 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/MaterializationList.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/MaterializationList.java @@ -23,6 +23,8 @@ import org.apache.calcite.rel.RelNode; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.TableVersionContext; import com.dremio.exec.planner.acceleration.substitution.MaterializationProvider; import com.dremio.exec.planner.acceleration.substitution.SubstitutionUtils; import com.dremio.exec.planner.sql.SqlConverter; @@ -68,8 +70,10 @@ public List getApplicableMaterializations() { } @Override - public java.util.Optional getDefaultRawMaterialization(NamespaceKey path, List vdsFields) { - return getDefaultRawMaterialization(provider, path, vdsFields); + public java.util.Optional getDefaultRawMaterialization(NamespaceKey path, + TableVersionContext versionContext, + List vdsFields, Catalog catalog) { + return getDefaultRawMaterialization(provider, path, versionContext, vdsFields, catalog); } public Optional getDescriptor(final List path) { @@ -83,8 +87,8 @@ public Optional getDescriptor(final TablePath path) { @Override public List buildApplicableMaterializations(RelNode userQueryNode) { - final Set> queryTablesUsed = SubstitutionUtils.findTables(userQueryNode); - final Set> queryVdsUsed = SubstitutionUtils.findExpansionNodes(userQueryNode); + final Set queryTablesUsed = SubstitutionUtils.findTables(userQueryNode); + final Set queryVdsUsed = SubstitutionUtils.findExpansionNodes(userQueryNode); final Set externalQueries = SubstitutionUtils.findExternalQueries(userQueryNode); final Set exclusions = Sets.newHashSet(session.getSubstitutionSettings().getExclusions()); @@ -132,11 +136,15 @@ public List buildApplicableMaterializations(RelNode userQ * @return materializations used by planner */ @VisibleForTesting - protected java.util.Optional getDefaultRawMaterialization(final MaterializationDescriptorProvider provider, NamespaceKey path, List vdsFields) { + protected java.util.Optional getDefaultRawMaterialization( + final MaterializationDescriptorProvider provider, NamespaceKey path, + TableVersionContext versionContext, List vdsFields, Catalog catalog) { + final Set exclusions = Sets.newHashSet(session.getSubstitutionSettings().getExclusions()); final Set inclusions = Sets.newHashSet(session.getSubstitutionSettings().getInclusions()); final boolean hasInclusions = !inclusions.isEmpty(); - final java.util.Optional opt = provider.getDefaultRawMaterialization(path, vdsFields); + final java.util.Optional opt = provider.getDefaultRawMaterialization(path, + versionContext, vdsFields, catalog); if (opt.isPresent()) { MaterializationDescriptor descriptor = opt.get(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/AbstractSubstitutionProvider.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/AbstractSubstitutionProvider.java index 670baef3e9..92a2c6f67b 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/AbstractSubstitutionProvider.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/AbstractSubstitutionProvider.java @@ -21,6 +21,8 @@ import org.apache.calcite.rel.RelNode; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.TableVersionContext; import com.dremio.exec.planner.acceleration.DremioMaterialization; import com.dremio.exec.planner.sql.handlers.RelTransformer; import com.dremio.service.namespace.NamespaceKey; @@ -52,8 +54,9 @@ public List buildApplicableMaterializations(final RelNode return getMaterializationProvider().buildApplicableMaterializations(origRoot); } - public Optional getDefaultRawMaterialization(NamespaceKey path, List vdsFields) { - return getMaterializationProvider().getDefaultRawMaterialization(path, vdsFields); + public Optional getDefaultRawMaterialization(NamespaceKey path, + TableVersionContext versionContext, List vdsFields, Catalog catalog) { + return getMaterializationProvider().getDefaultRawMaterialization(path, versionContext, vdsFields, catalog); } @Override diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/AccelerationAwareSubstitutionProvider.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/AccelerationAwareSubstitutionProvider.java index 6061348699..13f72f35c1 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/AccelerationAwareSubstitutionProvider.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/AccelerationAwareSubstitutionProvider.java @@ -22,6 +22,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.TableVersionContext; import com.dremio.exec.planner.observer.AttemptObserver; import com.dremio.exec.planner.sql.handlers.RelTransformer; import com.dremio.service.namespace.NamespaceKey; @@ -78,8 +80,9 @@ public void setPostSubstitutionTransformers(List transformers) { } @Override - public RelNode wrapExpansionNode(NamespaceKey path, final RelNode query, List vdsFields, RelDataType rowType, boolean contextSensitive) { - return delegate.wrapExpansionNode(path, query, vdsFields, rowType, contextSensitive); + public RelNode wrapExpansionNode(NamespaceKey path, final RelNode query, List vdsFields, RelDataType rowType, + boolean contextSensitive, TableVersionContext versionContext, Catalog catalog) { + return delegate.wrapExpansionNode(path, query, vdsFields, rowType, contextSensitive, versionContext, catalog); } @Override diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/AggJoinFinder.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/AggJoinFinder.java index b0df999234..c72f9917b1 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/AggJoinFinder.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/AggJoinFinder.java @@ -57,6 +57,7 @@ public boolean isFoundAggOnJoin() { return foundAggOnJoin; } + @Override public RelNode visit(LogicalAggregate aggregate) { toState(aggregate, State.AGG); return visitChildren(aggregate); @@ -98,6 +99,7 @@ private RelNode visitJoin(RelNode join) { return visitChildren(join); } + @Override public RelNode visit(LogicalJoin join) { return visitJoin(join); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/CorrelateFinder.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/CorrelateFinder.java index 1a94922d76..cced88b87d 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/CorrelateFinder.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/CorrelateFinder.java @@ -36,6 +36,7 @@ public RelNode visit(RelNode other) { return super.visit(other); } + @Override public RelNode visit(LogicalCorrelate correlate) { foundCorrelate = true; return correlate; // No need to recurse any further. We found our first completion state. diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/DistinctFinder.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/DistinctFinder.java index 9429ddac45..1526fcff13 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/DistinctFinder.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/DistinctFinder.java @@ -33,6 +33,7 @@ public boolean isFoundDistinct() { return foundDistinct; } + @Override public RelNode visit(LogicalAggregate aggregate) { List aggCallList = aggregate.getAggCallList(); @@ -46,6 +47,7 @@ public RelNode visit(LogicalAggregate aggregate) { return visitChildren(aggregate); } + @Override public RelNode visit(RelNode node) { return visitChildren(node); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/MaterializationProvider.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/MaterializationProvider.java index edca8e580e..c8485a6e6c 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/MaterializationProvider.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/MaterializationProvider.java @@ -20,6 +20,8 @@ import org.apache.calcite.rel.RelNode; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.TableVersionContext; import com.dremio.exec.planner.acceleration.DremioMaterialization; import com.dremio.service.namespace.NamespaceKey; @@ -49,6 +51,8 @@ public interface MaterializationProvider { * for the VDS with the given path * @return The default reflection for the VDS */ - Optional getDefaultRawMaterialization(NamespaceKey path, List displayFields); + Optional getDefaultRawMaterialization(NamespaceKey path, + TableVersionContext versionContext, + List displayFields, Catalog catalog); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/SubstitutionProvider.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/SubstitutionProvider.java index 008216e47f..ec39989f15 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/SubstitutionProvider.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/SubstitutionProvider.java @@ -23,6 +23,8 @@ import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.type.RelDataType; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.TableVersionContext; import com.dremio.exec.planner.acceleration.ExpansionNode; import com.dremio.exec.planner.sql.handlers.RelTransformer; import com.dremio.service.namespace.NamespaceKey; @@ -85,10 +87,12 @@ public static SubstitutionStream empty() { * @param vdsFields List of all the fields in the VDS * @param rowType Row data type * @param contextSensitive If the expansion node is context sensitive + * @param catalog caching catalog to use for table lookups * @return Wrapped RelNode */ - default RelNode wrapExpansionNode(NamespaceKey path, final RelNode query, List vdsFields, RelDataType rowType, boolean contextSensitive) { - return ExpansionNode.wrap(path, query, rowType, contextSensitive, false); + default RelNode wrapExpansionNode(NamespaceKey path, final RelNode query, List vdsFields, RelDataType rowType, + boolean contextSensitive, TableVersionContext versionContext, Catalog catalog) { + return ExpansionNode.wrap(path, query, rowType, contextSensitive, false, versionContext); } default boolean isDefaultRawReflectionEnabled() { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/SubstitutionUtils.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/SubstitutionUtils.java index fd6b712923..5606c01cae 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/SubstitutionUtils.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/acceleration/substitution/SubstitutionUtils.java @@ -24,14 +24,18 @@ import java.util.Objects; import java.util.Set; +import org.apache.calcite.plan.RelOptTable; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.RelShuttle; import org.apache.calcite.rel.RelVisitor; import org.apache.calcite.rel.core.TableScan; import org.apache.calcite.rel.externalize.RelWriterImpl; import org.apache.calcite.sql.SqlExplainLevel; +import org.apache.calcite.util.Pair; import com.dremio.exec.calcite.logical.ScanCrel; +import com.dremio.exec.catalog.DremioTable; +import com.dremio.exec.catalog.TableVersionContext; import com.dremio.exec.planner.RoutingShuttle; import com.dremio.exec.planner.StatelessRelShuttleImpl; import com.dremio.exec.planner.acceleration.ExpansionNode; @@ -58,14 +62,14 @@ public RelNode visit(RelNode other) { private SubstitutionUtils() { } - public static Set> findExpansionNodes(final RelNode node) { - final Set> usedVdsPaths = new LinkedHashSet<>(); + public static Set findExpansionNodes(final RelNode node) { + final Set usedVdsPaths = new LinkedHashSet<>(); final RelVisitor visitor = new RelVisitor() { @Override public void visit(final RelNode node, final int ordinal, final RelNode parent) { if (node instanceof ExpansionNode) { ExpansionNode expansionNode = (ExpansionNode) node; - usedVdsPaths.add(expansionNode.getPath().getPathComponents()); + usedVdsPaths.add(VersionedPath.of(expansionNode.getPath().getPathComponents(), expansionNode.getVersionContext())); } super.visit(node, ordinal, parent); } @@ -78,12 +82,16 @@ public static boolean isSubstitutableScan(RelNode node) { return node instanceof TableScan && !(node instanceof ScanCrel && !((ScanCrel) node).isSubstitutable()); } - public static Set> findTables(final RelNode node) { - final Set> usedTables = Sets.newLinkedHashSet(); + public static Set findTables(final RelNode node) { + final Set usedTables = Sets.newLinkedHashSet(); final RelVisitor visitor = new RelVisitor() { @Override public void visit(final RelNode node, final int ordinal, final RelNode parent) { if (isSubstitutableScan(node)) { - usedTables.add(node.getTable().getQualifiedName()); + TableVersionContext versionContext = null; + if (node instanceof ScanCrel) { + versionContext = ((ScanCrel)node).getTableMetadata().getVersionContext(); + } + usedTables.add(VersionedPath.of(node.getTable().getQualifiedName(), versionContext)); } super.visit(node, ordinal, parent); } @@ -123,12 +131,16 @@ public void close() throws IOException { } * Returns whether {@code table} uses one or more of the tables in * {@code usedTables}. */ - public static boolean usesTableOrVds(final Set> tables, final Set> vdsPaths, final Set externalQueries, final RelNode rel) { + public static boolean usesTableOrVds(final Set tables, final Set vdsPaths, final Set externalQueries, final RelNode rel) { final Pointer used = new Pointer<>(false); rel.accept(new RoutingShuttle() { @Override public RelNode visit(TableScan scan) { - if (tables.contains(scan.getTable().getQualifiedName())) { + TableVersionContext versionContext = null; + if (scan instanceof ScanCrel) { + versionContext = ((ScanCrel)scan).getTableMetadata().getVersionContext(); + } + if (tables.contains(VersionedPath.of(scan.getTable().getQualifiedName(), versionContext))) { used.value = true; } return scan; @@ -147,7 +159,7 @@ public RelNode visit(RelNode other) { } if (other instanceof ExpansionNode) { ExpansionNode expansionNode = (ExpansionNode) other; - if (vdsPaths.contains(expansionNode.getPath().getPathComponents())) { + if (vdsPaths.contains(VersionedPath.of(expansionNode.getPath().getPathComponents(), expansionNode.getVersionContext()))) { used.value = true; return other; } @@ -219,4 +231,33 @@ public static boolean arePlansEqualIgnoringReplacementPointer(RelNode query, Rel return queryCode == candidateCode; } + /** + * VersionedPath is a table/view path with an optional TableVersionContext. + * For example, an Arctic table could have a "schema"."table" path with a "BRANCH main" table version context. + * Non-versioned tables such as RDBMS or filesystem parquet will have a null TableVersionContext. + * + * Since VersionedPath extends {@link Pair}, we can conveniently use VersionedPath as keys with various Java collections. + */ + public static final class VersionedPath extends Pair, TableVersionContext> { + /** + * Creates a Pair. + * + * @param path left value + * @param versionContext right value + */ + private VersionedPath(List path, TableVersionContext versionContext) { + super(path, versionContext); + } + public static VersionedPath of(List path, TableVersionContext versionContext) { + return new VersionedPath(path, versionContext); + } + public static VersionedPath of(List path) { + return new VersionedPath(path, null); + } + } + + public static TableVersionContext getVersionContext(RelOptTable table) { + DremioTable dremioTable = Preconditions.checkNotNull(table.unwrap(DremioTable.class)); + return dremioTable.getDataset().getVersionContext(); + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/common/DremioFileAttrs.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/common/DremioFileAttrs.java new file mode 100644 index 0000000000..bd5a536e60 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/common/DremioFileAttrs.java @@ -0,0 +1,27 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.common; + +import org.immutables.value.Value; + +/** + * Utility class to return a sub-set of the file attributes + */ +@Value.Immutable +public abstract class DremioFileAttrs { + public abstract String fileName(); + public abstract Long fileLength(); +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/common/FlattenRelBase.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/common/FlattenRelBase.java index eaf33cb3dc..35dc3f8706 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/common/FlattenRelBase.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/common/FlattenRelBase.java @@ -63,6 +63,8 @@ public List getToFlatten() { return toFlatten; } + public abstract FlattenRelBase copy(List inputs, List toFlatten); + @Override protected RelDataType deriveRowType() { if (PrelUtil.getPlannerSettings(getCluster()).isFullNestedSchemaSupport()) { @@ -92,7 +94,8 @@ protected RelDataType deriveRowType() { return super.deriveRowType(); } - @Override public double estimateRowCount(RelMetadataQuery mq) { + @Override + public double estimateRowCount(RelMetadataQuery mq) { // We expect for flattens output to be expanding. Use a constant to expand the data. return mq.getRowCount(input) * toFlatten.size() * PrelUtil.getPlannerSettings(getCluster().getPlanner()).getFlattenExpansionAmount(); } @@ -124,6 +127,7 @@ public RelOptCost computeSelfCost(RelOptPlanner planner, RelMetadataQuery mq) { } } + @Override public RelWriter explainTerms(RelWriter pw) { return super.explainTerms(pw).item("flattenField", this.toFlatten); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/common/MoreRelOptUtil.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/common/MoreRelOptUtil.java index 7aebdf67f8..4c29ee124f 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/common/MoreRelOptUtil.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/common/MoreRelOptUtil.java @@ -102,7 +102,7 @@ import org.apache.calcite.util.ImmutableBitSet; import org.apache.calcite.util.Pair; import org.apache.calcite.util.Util; -import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.Triple; @@ -134,6 +134,22 @@ public final class MoreRelOptUtil { private MoreRelOptUtil() {} + /** + * Finds all columns used by {@link RexInputRef} in a {@link RexNode}. + * @param rexNode {@link RexNode} to find inputs for + * @return set of columns used by the rexNode + */ + public static ImmutableBitSet findColumnsUsed(RexNode rexNode) { + ImmutableBitSet.Builder inputs = ImmutableBitSet.builder(); + rexNode.accept(new RexVisitorImpl(true){ + @Override public Void visitInputRef(RexInputRef inputRef) { + inputs.set(inputRef.getIndex()); + return super.visitInputRef(inputRef); + } + }); + return inputs.build(); + } + /** * Computes the height of the rel tree under the input rel node. * @param rel RelNode to compute the minimum height of the tree underneath it @@ -1271,6 +1287,7 @@ private ConditionFlattenter(RexBuilder builder) { this.builder = builder; } + @Override public RexNode visitCall(RexCall rexCall) { if (rexCall.isA(SqlKind.COMPARISON)) { if(rexCall.getOperands().get(0).getType().isStruct()) { @@ -1490,24 +1507,28 @@ public static boolean isDatetimeIntervalArithmetic(RexCall call) { public static long longHashCode(RelNode relNode) { Hasher hasher = Hashing.sha256().newHasher(); relNode.explain(new RelWriter() { - @Override public void explain(RelNode rel, List> valueList) { + @Override + public void explain(RelNode rel, List> valueList) { for(Pair pair: valueList) { item(pair.left, pair.right); } done(relNode); } - @Override public SqlExplainLevel getDetailLevel() { + @Override + public SqlExplainLevel getDetailLevel() { return SqlExplainLevel.DIGEST_ATTRIBUTES; } - @Override public RelWriter input(String term, RelNode input) { + @Override + public RelWriter input(String term, RelNode input) { hasher.putString(term, StandardCharsets.UTF_8); input.explain(this); return this; } - @Override public RelWriter item(String term, Object value) { + @Override + public RelWriter item(String term, Object value) { if(value instanceof RelNode) { input(term, (RelNode) value); } else { @@ -1517,7 +1538,8 @@ public static long longHashCode(RelNode relNode) { return this; } - @Override public RelWriter itemIf(String term, Object value, boolean condition) { + @Override + public RelWriter itemIf(String term, Object value, boolean condition) { if(condition) { return item(term, value); } else { @@ -1525,12 +1547,14 @@ public static long longHashCode(RelNode relNode) { } } - @Override public RelWriter done(RelNode node) { + @Override + public RelWriter done(RelNode node) { hasher.putString(node.getClass().toString(), StandardCharsets.UTF_8); return this; } - @Override public boolean nest() { + @Override + public boolean nest() { return true; } }); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/common/PartitionStatsHelper.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/common/PartitionStatsHelper.java new file mode 100644 index 0000000000..b15a8eaf47 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/common/PartitionStatsHelper.java @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.common; + +import com.dremio.datastore.LegacyProtobufSerializer; +import com.dremio.sabot.exec.store.iceberg.proto.IcebergProtobuf; +import com.dremio.service.namespace.DatasetHelper; +import com.dremio.service.namespace.dataset.proto.IcebergMetadata; +import com.google.protobuf.InvalidProtocolBufferException; + +/** + * Utility class to provide helper methods to read the partition stats file + */ +public class PartitionStatsHelper { + public static ImmutableDremioFileAttrs getPartitionStatsFileAttrs(ScanRelBase drel) { + String fileName; + Long fileLength; + + if(DatasetHelper.isInternalIcebergTable(drel.getTableMetadata().getDatasetConfig())) { + IcebergMetadata icebergMetadata = drel.getTableMetadata().getDatasetConfig().getPhysicalDataset().getIcebergMetadata(); + fileName = icebergMetadata.getPartitionStatsFile(); + fileLength = icebergMetadata.getPartitionStatsFileSize(); + } else { + byte[] byteBuffer = drel.getTableMetadata().getReadDefinition().getExtendedProperty().toByteArray(); + + IcebergProtobuf.IcebergDatasetXAttr icebergDatasetXAttr; + try { + icebergDatasetXAttr = LegacyProtobufSerializer.parseFrom(IcebergProtobuf.IcebergDatasetXAttr.PARSER, byteBuffer); + } catch (InvalidProtocolBufferException e) { + throw new RuntimeException(e); + } + fileName = icebergDatasetXAttr.getPartitionStatsFile(); + fileLength = (icebergDatasetXAttr.hasPartitionStatsFileSize()) ? icebergDatasetXAttr.getPartitionStatsFileSize() : null; + } + + return new ImmutableDremioFileAttrs.Builder() + .setFileName(fileName) + .setFileLength(fileLength) + .build(); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/common/RelNodeCounter.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/common/RelNodeCounter.java new file mode 100644 index 0000000000..70ad7d9716 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/common/RelNodeCounter.java @@ -0,0 +1,56 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.common; + +import org.apache.calcite.rel.RelNode; +import org.apache.calcite.rel.logical.LogicalJoin; + +import com.dremio.exec.planner.StatelessRelShuttleImpl; +import com.dremio.exec.planner.physical.JoinPrel; + +/** + Generic counter for counting the number of nodes of certain type + in the logical plan. Primarily written for reporting / logging. +*/ +public class RelNodeCounter extends StatelessRelShuttleImpl { + private int nodesCount = 0; + + protected void increment(int value) { + nodesCount += value; + } + + public int getCount() { + return nodesCount; + } + + public static class LogicalJoinCounter extends RelNodeCounter { + @Override + public RelNode visit(LogicalJoin join) { + super.increment(1); + return super.visit(join); + } + } + + public static class JoinPrelCounter extends RelNodeCounter { + @Override + public RelNode visit(RelNode other) { + if (other instanceof JoinPrel) { + super.increment(1); + } + return super.visit(other); + } + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/common/ScanRelBase.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/common/ScanRelBase.java index dda636d676..fc00133712 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/common/ScanRelBase.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/common/ScanRelBase.java @@ -141,6 +141,11 @@ public static RelWriter explainScanRel(RelWriter pw, .map(IcebergMetadata::getSnapshotId) .ifPresent(snapshotId -> pw.item("snapshot", snapshotId)); + Optional.ofNullable(tableMetadata) + .map(TableMetadata::getVersionContext) + .filter(x -> x != null) + .ifPresent(versionContext -> pw.item("version", versionContext)); + if(projectedColumns != null){ pw.item("columns", FluentIterable.from(projectedColumns).transform(new Function(){ diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/common/TableOptimizeRelBase.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/common/TableOptimizeRelBase.java index b93e647f73..029c5ef23a 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/common/TableOptimizeRelBase.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/common/TableOptimizeRelBase.java @@ -67,7 +67,7 @@ public RelWriter explainTerms(RelWriter pw) { @Override protected RelDataType deriveRowType() { - return getRelDataType(getCluster().getTypeFactory()); + return getRelDataType(getCluster().getTypeFactory(), optimizeOptions.isOptimizeManifestsOnly()); } public CreateTableEntry getCreateTableEntry() { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/common/VacuumTableRelBase.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/common/VacuumTableRelBase.java new file mode 100644 index 0000000000..0e2db4aefe --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/common/VacuumTableRelBase.java @@ -0,0 +1,84 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.common; + +import static com.dremio.exec.planner.VacuumOutputSchema.getRelDataType; +import static com.dremio.exec.planner.sql.handlers.SqlHandlerUtil.getTimestampFromMillis; + +import org.apache.calcite.plan.Convention; +import org.apache.calcite.plan.RelOptCluster; +import org.apache.calcite.plan.RelOptTable; +import org.apache.calcite.plan.RelTraitSet; +import org.apache.calcite.rel.RelNode; +import org.apache.calcite.rel.RelWriter; +import org.apache.calcite.rel.SingleRel; +import org.apache.calcite.rel.type.RelDataType; + +import com.dremio.exec.catalog.VacuumOptions; +import com.dremio.exec.planner.logical.CreateTableEntry; +import com.google.common.base.Preconditions; + +/** + * Base class for 'VACUUM' query. + */ +public class VacuumTableRelBase extends SingleRel { + private final RelOptTable table; + private final CreateTableEntry createTableEntry; + private final VacuumOptions vacuumOptions; + + protected VacuumTableRelBase(Convention convention, + RelOptCluster cluster, + RelTraitSet traitSet, + RelNode input, + RelOptTable table, + CreateTableEntry createTableEntry, + VacuumOptions vacuumOptions) { + super(cluster, traitSet, input); + assert getConvention() == convention; + this.table = table; + this.createTableEntry = createTableEntry; + this.vacuumOptions = Preconditions.checkNotNull(vacuumOptions, "Vacuum option can't be null!"); + } + + @Override + public RelWriter explainTerms(RelWriter pw) { + super.explainTerms(pw); + if (table != null) { + pw.item("table", table.getQualifiedName()); + } + pw.item("older_than", getTimestampFromMillis(vacuumOptions.getOlderThanInMillis())); + pw.item("retain_last", vacuumOptions.getRetainLast()); + return pw; + } + + @Override + protected RelDataType deriveRowType() { + return getRelDataType(getCluster().getTypeFactory()); + } + + @Override + public RelOptTable getTable() { + return table; + } + + public CreateTableEntry getCreateTableEntry() { + return createTableEntry; + } + + public VacuumOptions getVacuumOptions() { + return vacuumOptions; + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/cost/DremioRelMetadataCache.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/cost/DremioRelMetadataCache.java index de8daca169..79ca9f4ba2 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/cost/DremioRelMetadataCache.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/cost/DremioRelMetadataCache.java @@ -97,16 +97,17 @@ public Object put(RelNode relNode, Object args, Object value) { if (value != NullSentinel.ACTIVE || relNode instanceof RelSubset || relNode instanceof HepRelVertex) { Map row = map.get(relNode); if (row == null) { - //Only check when a we see a new RelNode to make sure the overhead is minimized. - final PlannerSettings settings; - if (planner instanceof AbstractRelOptPlanner - && null != (settings = PrelUtil.getPlannerSettings(planner))) { - long maxCallCount = settings.maxMetadataCallCount(); - if (pcc > maxCallCount) { - throw UserException.planError() + //Only check when we see a new RelNode to make sure the overhead is minimized. + if (planner instanceof AbstractRelOptPlanner) { + PlannerSettings settings = PrelUtil.getPlannerSettings(planner); + if (settings != null) { + long maxCallCount = settings.maxMetadataCallCount(); + if (pcc > maxCallCount) { + throw UserException.planError() .message(MAX_METADATA_CALL_ERROR_MESSAGE).buildSilently(); + } + ((AbstractRelOptPlanner) planner).checkCancel(); } - ((AbstractRelOptPlanner) planner).checkCancel(); } row = new HashMap<>(); map.put(relNode, row); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/cost/RelMdDistinctRowCount.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/cost/RelMdDistinctRowCount.java index 3e0529df04..d4d83bb11a 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/cost/RelMdDistinctRowCount.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/cost/RelMdDistinctRowCount.java @@ -452,6 +452,7 @@ public Double getDistinctRowCount(RelSubset rel, RelMetadataQuery mq, return super.getDistinctRowCount(rel, mq, groupKey, predicate); } + @Override public Double getDistinctRowCount(Project rel, RelMetadataQuery mq, ImmutableBitSet groupKey, RexNode predicate) { if (predicate == null || predicate.isAlwaysTrue()) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/cost/RelMdPopulationSize.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/cost/RelMdPopulationSize.java index 1efc378285..0d9403c5e5 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/cost/RelMdPopulationSize.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/cost/RelMdPopulationSize.java @@ -66,6 +66,7 @@ public RelMdPopulationSize(StatisticsService statisticsService) { this.isNoOp = statisticsService == StatisticsService.NO_OP; } + @Override public MetadataDef getDef() { return PopulationSize.DEF; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/cost/RelMdSelectivity.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/cost/RelMdSelectivity.java index 866d8103e1..82a9ef51f8 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/cost/RelMdSelectivity.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/cost/RelMdSelectivity.java @@ -103,6 +103,7 @@ public Double getSelectivity(TableFunctionPrel rel, RelMetadataQuery mq, RexNode return super.getSelectivity(rel, mq, predicate); } + @Override public Double getSelectivity(Join rel, RelMetadataQuery mq, RexNode predicate) { if (DremioRelMdUtil.isStatisticsEnabled(rel.getCluster().getPlanner(), isNoOp)) { double sel = 1.0; @@ -336,14 +337,8 @@ private double computeNotNullSelectivity(TableMetadata tableMetadata, RexNode or if (pred.isA(RANGE_PREDICATE) && !isMultiColumnPredicate(pred)) { String col = getColumn(pred, fieldNames); if (col != null) { - List predList = null; - if ((predList = colToRangePredicateMap.get(col)) != null) { - predList.add(pred); - } else { - predList = new ArrayList<>(); - predList.add(pred); - colToRangePredicateMap.put(col, predList); - } + List predList = colToRangePredicateMap.computeIfAbsent(col, s -> new ArrayList<>()); + predList.add(pred); } else { nonRangePredList.add(pred); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/cost/janio/DremioRelMetadataHandlerCreator.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/cost/janio/DremioRelMetadataHandlerCreator.java index 9567ea5051..523fe43ac5 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/cost/janio/DremioRelMetadataHandlerCreator.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/cost/janio/DremioRelMetadataHandlerCreator.java @@ -154,7 +154,7 @@ private static String simpleNameForHandler(Class> c String simpleName = clazz.getSimpleName(); //Previously the pattern was to have a nested in class named Handler //So we need to add the parents class to get a unique name - if (simpleName.equals("Handler")) { + if ("Handler".equals(simpleName)) { String[] parts = clazz.getName().split("\\.|\\$"); return parts[parts.length - 2] + parts[parts.length - 1]; } else { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/fragment/AssignFragmentPriorityVisitor.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/fragment/AssignFragmentPriorityVisitor.java index 4117a2c7ff..0ec8b1dd9d 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/fragment/AssignFragmentPriorityVisitor.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/fragment/AssignFragmentPriorityVisitor.java @@ -123,9 +123,11 @@ public int getFragmentWeight(int majorFragmentId) { final int maxAssignPriority = Math.max(maxPrioritySeenAtExchange, 1); Integer prio = majorFragmentToPriorityMap.get(majorFragmentId); if (prio == null) { - // this should not happen, but let us not make it fatal if it does - logger.warn("Assigned Priority not found for major fragment {}. Defaulting to {}", majorFragmentId, - maxAssignPriority); + if (maxPrioritySeenAtExchange > 0 || majorFragmentId > 0) { + // this should not happen, except for single phase profiles, but let us not make it fatal if it does + logger.warn("Assigned Priority not found for major fragment {}. Defaulting to {}", majorFragmentId, + maxAssignPriority); + } return maxAssignPriority; } else { return maxAssignPriority - prio + 1; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/fragment/ExecutionPlanningResources.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/fragment/ExecutionPlanningResources.java index 1a6e201e21..61ffd5332c 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/fragment/ExecutionPlanningResources.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/fragment/ExecutionPlanningResources.java @@ -44,6 +44,7 @@ public com.dremio.resource.GroupResourceInformation getGroupResourceInformation( return groupResourceInformation; } + @Override public void close() throws Exception { executorSelectionHandle.close(); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/CorrelateRel.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/CorrelateRel.java index 51b9a9d2d1..622c2da394 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/CorrelateRel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/CorrelateRel.java @@ -46,6 +46,7 @@ public Correlate copy(RelTraitSet traitSet, correlationId, requiredColumns, joinType); } + @Override public RelNode accept(RelShuttle shuttle) { return shuttle.visit(this); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/DremioAggregateProjectPullUpConstantsRule.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/DremioAggregateProjectPullUpConstantsRule.java index 3651d8618b..e2dd519a38 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/DremioAggregateProjectPullUpConstantsRule.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/DremioAggregateProjectPullUpConstantsRule.java @@ -103,6 +103,7 @@ public DremioAggregateProjectPullUpConstantsRule( //~ Methods ---------------------------------------------------------------- + @Override public void onMatch(RelOptRuleCall call) { final Aggregate aggregate = call.rel(0); final RelNode input = call.rel(1); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/DremioJoinPushTransitivePredicatesRule.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/DremioJoinPushTransitivePredicatesRule.java index cce0b40860..28c46a87fa 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/DremioJoinPushTransitivePredicatesRule.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/DremioJoinPushTransitivePredicatesRule.java @@ -28,19 +28,14 @@ import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rex.RexBuilder; -import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexChecker; import org.apache.calcite.rex.RexNode; import org.apache.calcite.rex.RexUtil; -import org.apache.calcite.rex.RexVisitorImpl; -import org.apache.calcite.sql.SqlKind; import org.apache.calcite.util.Litmus; -import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections4.CollectionUtils; import com.dremio.exec.planner.common.MoreRelOptUtil; import com.dremio.exec.planner.logical.partition.FindSimpleFilters; -import com.dremio.exec.planner.physical.PrelUtil; -import com.dremio.service.Pointer; /** * Dremio version of JoinPushTransitivePredicatesRule extended from Calcite @@ -125,12 +120,6 @@ public static List getCanonicalizedPredicates(RelNode join, RexBuilder // To simplify predicates of the type "Input IS NOT DISTINCT FROM Constant" so that we don't end up // with filter conditions like: condition=[AND(=($0, 3), IS NOT DISTINCT FROM($0, CAST(3):INTEGER))] for (RexNode predicate : inferredPredicates) { - if (!isTransitiveFilterNotNullExprPushdownEnabled(join, predicate)) { - // If the filter condition contains "IS NOT NULL($*)", it generates - // illegal SQL during JDBC RelToSql, and fails to run the query. - // Disable pushing IS NOT NULL until DX-26452 is fixed. - continue; - } predicate = predicate.accept(new MoreRelOptUtil.RexLiteralCanonicalizer(builder)); final FindSimpleFilters.StateHolder holder = predicate.accept(new FindSimpleFilters(builder, false)); if (holder.hasConditions()) { @@ -142,25 +131,6 @@ public static List getCanonicalizedPredicates(RelNode join, RexBuilder return predicates; } - public static boolean isTransitiveFilterNotNullExprPushdownEnabled(RelNode join, RexNode predicate) { - // TODO: Remove this when DX-26452 is fixed - Pointer found = new Pointer<>(false); - predicate.accept(new RexVisitorImpl(true) { - @Override - public Void visitCall(RexCall call) { - if (call.getKind() == SqlKind.IS_NOT_NULL) { - found.value = true; - return null; - } - return super.visitCall(call); - } - }); - if (found.value) { - return PrelUtil.getPlannerSettings(join.getCluster()).isTransitiveFilterNotNullExprPushdownEnabled(); - } - return true; - } - private static boolean isValidFilter(List predicates) { // A valid filter should not contain a flatten in it if (CollectionUtils.isEmpty(predicates)) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/DremioProjectJoinTransposeRule.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/DremioProjectJoinTransposeRule.java index dccce8f7bb..949d7c3010 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/DremioProjectJoinTransposeRule.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/DremioProjectJoinTransposeRule.java @@ -49,6 +49,7 @@ private DremioProjectJoinTransposeRule(Class projectClass, Cl this.preserveExprCondition = preserveExprCondition; } + @Override public void onMatch(final RelOptRuleCall call) { Project origProj = (Project)call.rel(0); Join join = (Join)call.rel(1); @@ -56,6 +57,7 @@ public void onMatch(final RelOptRuleCall call) { if (!join.isSemiJoin()) { RexNode joinFilter = (RexNode)join.getCondition().accept(new RexShuttle() { + @Override public RexNode visitCall(RexCall rexCall) { RexNode node = super.visitCall(rexCall); return (RexNode)(!(node instanceof RexCall) ? node : RelOptUtil.collapseExpandedIsNotDistinctFromExpr((RexCall)node, call.builder().getRexBuilder())); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/DremioRelFactories.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/DremioRelFactories.java index 0062fe455e..fc256eb3d5 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/DremioRelFactories.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/DremioRelFactories.java @@ -194,6 +194,7 @@ public RelNode createCorrelate(RelNode left, RelNode right, CorrelationId correl */ private static class SetOpFactoryImpl implements RelFactories.SetOpFactory { + @Override public RelNode createSetOp(SqlKind kind, List inputs, boolean all) { final RelOptCluster cluster = inputs.get(0).getCluster(); final RelTraitSet traitSet = cluster.traitSetOf(Rel.LOGICAL); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/DremioSortMergeRule.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/DremioSortMergeRule.java index c24c1aaf4e..1ae2caf2d1 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/DremioSortMergeRule.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/DremioSortMergeRule.java @@ -61,6 +61,7 @@ public boolean matches(RelOptRuleCall call) { return limitRelNode(bottomSort); } + @Override public void onMatch(RelOptRuleCall call) { final Sort topSort = call.rel(0); final Sort bottomSort = call.rel(1); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/EnhancedFilterJoinSimplifier.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/EnhancedFilterJoinSimplifier.java index 204c047acf..5da24e3b4f 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/EnhancedFilterJoinSimplifier.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/EnhancedFilterJoinSimplifier.java @@ -105,12 +105,11 @@ public static RexNode simplifyDisjunction(RexBuilder rexBuilder, List o // If no common extraction, return the original filter if (commonExtractedFilter == null || commonExtractedFilter.isAlwaysTrue()) { return RexUtil.composeDisjunction(rexBuilder, originalFilters, false); - } + } else { + // Else, return disjunction of simplified non-entirely pushed child nodes (need to supply + // non-common part) and entirely pushed child nodes. We need to preserve the order of child nodes + // in the disjunction because we have checks whether a child gets entirely pushed elsewhere - // Else, return disjunction of simplified non-entirely pushed child nodes (need to supply - // non-common part) and entirely pushed child nodes. We need to preserve the order of child nodes - // in the disjunction because we have checks whether a child gets entirely pushed elsewhere - else { // Record the order of extractions Map extractionIndex = Maps.newHashMap(); for (int i = 0; i < extractions.size(); ++i) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/FilterMergeCrule.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/FilterMergeCrule.java index b3da6dd0a0..2c1b647102 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/FilterMergeCrule.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/FilterMergeCrule.java @@ -36,6 +36,7 @@ public FilterMergeCrule(Class clazz, RelBuilderFactory relBuil super(operand(clazz, operand(clazz, any())), relBuilderFactory, null); } + @Override public void onMatch(RelOptRuleCall call) { final Filter topFilter = call.rel(0); final Filter bottomFilter = call.rel(1); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/FlattenRel.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/FlattenRel.java index 2f958a316b..35a0ca18d3 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/FlattenRel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/FlattenRel.java @@ -38,4 +38,9 @@ public FlattenRel(final RelOptCluster cluster, RelTraitSet traits, RelNode child public RelNode copy(RelTraitSet traitSet, List inputs) { return new FlattenRel(getCluster(), traitSet, sole(inputs), toFlatten, numProjectsPushed); } + + @Override + public FlattenRelBase copy(List inputs, List toFlatten) { + return new FlattenRel(getCluster(), getTraitSet(), sole(inputs), toFlatten, numProjectsPushed); + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/InvalidViewRel.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/InvalidViewRel.java index ebeb0b7f68..b7d3bc3cb7 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/InvalidViewRel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/InvalidViewRel.java @@ -137,7 +137,7 @@ public static void checkForInvalid(Catalog viewCatalog, SqlConverter sqlConverte } currVersionContext = VersionContext.ofBranch(resolvedVersionContext.getRefName()); viewOptions = new ViewOptions.ViewOptionsBuilder() - .viewUpdate(true) + .actionType(ViewOptions.ActionType.UPDATE_VIEW) .version(resolvedVersionContext) .batchSchema(CalciteArrowHelper.fromCalciteRowType(rowType)) .build(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/PreProcessRel.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/PreProcessRel.java index 09fb564909..c92b74224d 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/PreProcessRel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/PreProcessRel.java @@ -24,32 +24,22 @@ import org.apache.calcite.rel.logical.LogicalJoin; import org.apache.calcite.rel.logical.LogicalProject; import org.apache.calcite.rel.logical.LogicalUnion; -import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexCall; -import org.apache.calcite.rex.RexLiteral; import org.apache.calcite.rex.RexNode; import org.apache.calcite.rex.RexShuttle; import org.apache.calcite.rex.RexUtil; -import org.apache.calcite.sql.SqlFunction; import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.SqlOperator; -import org.apache.calcite.sql.SqlOperatorBinding; -import org.apache.calcite.sql.type.SqlReturnTypeInference; import org.apache.calcite.sql.type.SqlTypeName; -import org.apache.calcite.util.NlsString; import com.dremio.common.exceptions.UserException; import com.dremio.exec.exception.UnsupportedOperatorCollector; import com.dremio.exec.planner.StarColumnHelper; import com.dremio.exec.planner.StatelessRelShuttleImpl; -import com.dremio.exec.planner.sql.Checker; import com.dremio.exec.planner.sql.OperatorTable; -import com.dremio.exec.planner.sql.SqlFunctionImpl; -import com.dremio.exec.util.ApproximateStringMatcher; import com.dremio.exec.work.foreman.SqlUnsupportedException; -import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; /** @@ -58,9 +48,9 @@ * Eg: convert_from(EXPR, 'JSON') is rewritten as convert_fromjson(EXPR) * * With the actual method name we can find out if the function has a complex - * output type and we will fire/ ignore certain rules (merge project rule) based on this fact. + * output type, and we will fire/ ignore certain rules (merge project rule) based on this fact. */ -public class PreProcessRel extends StatelessRelShuttleImpl { +public final class PreProcessRel extends StatelessRelShuttleImpl { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(PreProcessRel.class); @@ -68,6 +58,7 @@ public class PreProcessRel extends StatelessRelShuttleImpl { private final UnsupportedOperatorCollector unsupportedOperatorCollector; private final UnwrappingExpressionVisitor unwrappingExpressionVisitor; private final ConvertItemToInnerMapFunctionVisitor.RewriteItemOperatorVisitor rewriteItemOperatorVisitor; + private final RewriteConvertFunctionVisitor.RenameConvertToConvertFromVisitor renameConvertToConvertFromVisitor; public static PreProcessRel createVisitor(OperatorTable table, RexBuilder rexBuilder) { @@ -80,11 +71,11 @@ private PreProcessRel(OperatorTable table, RexBuilder rexBuilder) { this.unsupportedOperatorCollector = new UnsupportedOperatorCollector(); this.unwrappingExpressionVisitor = new UnwrappingExpressionVisitor(rexBuilder); this.rewriteItemOperatorVisitor = new ConvertItemToInnerMapFunctionVisitor.RewriteItemOperatorVisitor(rexBuilder); + this.renameConvertToConvertFromVisitor = new RewriteConvertFunctionVisitor.RenameConvertToConvertFromVisitor(rexBuilder, table); } @Override public RelNode visit(LogicalProject project) { - final RenameConvertToConvertFromVisitor renameVisitor = new RenameConvertToConvertFromVisitor(project.getCluster().getRexBuilder(), table); final List projExpr = Lists.newArrayList(); for(RexNode rexNode : project.getProjects()) { projExpr.add(rexNode.accept(unwrappingExpressionVisitor).accept(rewriteItemOperatorVisitor)); @@ -99,7 +90,7 @@ public RelNode visit(LogicalProject project) { boolean rewrite = false; for (RexNode rex : project.getProjects()) { - RexNode newExpr = rex.accept(renameVisitor); + RexNode newExpr = rex.accept(renameConvertToConvertFromVisitor); if (newExpr != rex) { if (newExpr instanceof RexCall) { RexCall newExprCall = ((RexCall) newExpr); @@ -122,7 +113,7 @@ public RelNode visit(LogicalProject project) { @Override public RelNode visit(LogicalFilter filter) { - final RexNode condition = filter.getCondition().accept(unwrappingExpressionVisitor).accept(rewriteItemOperatorVisitor); + final RexNode condition = filter.getCondition().accept(unwrappingExpressionVisitor).accept(rewriteItemOperatorVisitor).accept(renameConvertToConvertFromVisitor); filter = filter.copy( filter.getTraitSet(), filter.getInput(), @@ -150,7 +141,7 @@ public RelNode visit(LogicalAggregate aggregate) { @Override public RelNode visit(LogicalJoin join) { - final RexNode conditionExpr = join.getCondition().accept(unwrappingExpressionVisitor).accept(rewriteItemOperatorVisitor); + final RexNode conditionExpr = join.getCondition().accept(unwrappingExpressionVisitor).accept(rewriteItemOperatorVisitor).accept(renameConvertToConvertFromVisitor); join = join.copy(join.getTraitSet(), conditionExpr, join.getLeft(), @@ -181,147 +172,7 @@ public void convertException() throws SqlUnsupportedException { unsupportedOperatorCollector.convertException(); } - private static class RenameConvertToConvertFromVisitor extends RexShuttle { - - private final RexBuilder builder; - private final OperatorTable table; - - public RenameConvertToConvertFromVisitor(RexBuilder builder, OperatorTable table) { - this.builder = builder; - this.table = table; - } - - @Override - public RexNode visitCall(final RexCall call) { - final String functionName = call.getOperator().getName(); - - // check if its a convert_from or convert_to function - if (!functionName.equalsIgnoreCase("convert_from") && !functionName.equalsIgnoreCase("convert_to")) { - return super.visitCall(call); - } - - boolean[] update = {false}; - final List clonedOperands = visitList(call.getOperands(), update); - - final int nArgs = clonedOperands.size(); - - if (nArgs < 2) { - // Second operand is missing - throw UserException.parseError() - .message("'%s' expects a string literal as a second argument.", functionName) - .build(logger); - } else if (nArgs > 3 || (nArgs > 2 && functionName.equalsIgnoreCase("convert_to"))) { - // Too many operands (>2 for 'convert_to', or >3 for 'convert_from') - throw UserException.parseError() - .message("Too many operands (%d) for '%s'", nArgs, functionName) - .build(logger); - } - - if (!(clonedOperands.get(1) instanceof RexLiteral)) { - // caused by user entering a non-literal - throw getConvertFunctionInvalidTypeException(call); - } - - if (nArgs == 3 && !(clonedOperands.get(2) instanceof RexLiteral)) { - // caused by user entering a non-literal - throw getConvertFunctionInvalidTypeException(call); - } - - String literal; - try { - literal = ((NlsString) (((RexLiteral) clonedOperands.get(1)).getValue())).getValue(); - } catch (final ClassCastException e) { - // Caused by user entering a value with a non-string literal - throw getConvertFunctionInvalidTypeException(call); - } - - // construct the new function name based on the input argument - String newFunctionName = functionName + literal; - if (nArgs == 3) { - if (!literal.equalsIgnoreCase("utf8")) { - throw UserException.parseError() - .message("3-argument convert_from only supported for utf8 encoding. Instead, got %s", literal) - .build(logger); - } - newFunctionName = "convert_replaceUTF8"; - } - - // Look up the new function name in the operator table - List operatorList = table.getSqlOperator(newFunctionName); - if (operatorList.size() == 0) { - // User typed in an invalid type name - throw getConvertFunctionException(functionName, literal); - } - SqlFunction newFunction = null; - - // Find the SqlFunction with the correct args - for (SqlOperator op : operatorList) { - if (op.getOperandTypeChecker().getOperandCountRange().isValidCount(nArgs - 1)) { - newFunction = (SqlFunction) op; - break; - } - } - if (newFunction == null) { - // we are here because we found some dummy convert function. (See DummyConvertFrom and DummyConvertTo) - throw getConvertFunctionException(functionName, literal); - } - - SqlFunction sqlOperator = SqlFunctionImpl.create( - newFunctionName, - new SqlReturnTypeInference() { - @Override - public RelDataType inferReturnType(SqlOperatorBinding opBinding) { - return call.getType(); - } - }, - Checker.between(1, nArgs - 1)); - - // create the new expression to be used in the rewritten project - if (nArgs == 2) { - return builder.makeCall(sqlOperator, clonedOperands.subList(0, 1)); - } else { - // 3 arguments. The two arguments passed to the function are the first and last argument (the middle is the type) - return builder.makeCall(sqlOperator, ImmutableList.of(clonedOperands.get(0), clonedOperands.get(2))); - } - } - - private UserException getConvertFunctionInvalidTypeException(final RexCall function) { - // Caused by user entering a value with a numeric type - final String functionName = function.getOperator().getName(); - final String typeName = function.getOperands().get(1).getType().getFullTypeString(); - return UserException.parseError() - .message("Invalid type %s passed as second argument to function '%s'. " + - "The function expects a literal argument.", - typeName, - functionName) - .build(logger); - } - - private UserException getConvertFunctionException(final String functionName, final String typeName) { - final String newFunctionName = functionName + typeName; - final boolean emptyTypeName = typeName.isEmpty(); - final String typeNameToPrint = emptyTypeName ? "" : typeName; - final UserException.Builder exceptionBuilder = UserException.unsupportedError() - .message("%s does not support conversion %s type '%s'.", functionName, functionName.substring(8).toLowerCase(), typeNameToPrint); - // Build a nice error message - if (!emptyTypeName) { - List ops = new ArrayList<>(); - for (SqlOperator op : table.getOperatorList()) { - ops.add(op.getName()); - } - final String bestMatch = ApproximateStringMatcher.getBestMatch(ops, newFunctionName); - if (bestMatch != null && bestMatch.length() > 0 && bestMatch.toLowerCase().startsWith("convert")) { - final StringBuilder s = new StringBuilder("Did you mean ") - .append(bestMatch.substring(functionName.length())) - .append("?"); - exceptionBuilder.addContext(s.toString()); - } - } - return exceptionBuilder.build(logger); - } - } - - private static class UnwrappingExpressionVisitor extends RexShuttle { + private static final class UnwrappingExpressionVisitor extends RexShuttle { private final RexBuilder rexBuilder; private UnwrappingExpressionVisitor(RexBuilder rexBuilder) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/PushFilterPastFlattenrule.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/PushFilterPastFlattenrule.java new file mode 100644 index 0000000000..b2e3af194a --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/PushFilterPastFlattenrule.java @@ -0,0 +1,132 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.logical; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +import org.apache.calcite.plan.RelOptPredicateList; +import org.apache.calcite.plan.RelOptRule; +import org.apache.calcite.plan.RelOptRuleCall; +import org.apache.calcite.plan.RelOptUtil; +import org.apache.calcite.plan.RelOptUtil.InputFinder; +import org.apache.calcite.rel.RelNode; +import org.apache.calcite.rel.core.Filter; +import org.apache.calcite.rel.metadata.RelMetadataQuery; +import org.apache.calcite.rex.RexBuilder; +import org.apache.calcite.rex.RexCall; +import org.apache.calcite.rex.RexNode; +import org.apache.calcite.rex.RexUtil; +import org.apache.calcite.util.ImmutableBitSet; + +import com.dremio.exec.planner.common.FlattenRelBase; +import com.dremio.extra.exec.store.dfs.parquet.pushdownfilter.FilterExtractor; + +public class PushFilterPastFlattenrule extends RelOptRule { + public static final RelOptRule INSTANCE = new PushFilterPastFlattenrule(); + + private PushFilterPastFlattenrule() { + super(RelOptHelper.any(Filter.class, FlattenRelBase.class), "PushFilterPastFlattenRule"); + } + + @Override + public void onMatch(RelOptRuleCall call) { + final Filter filter = call.rel(0); + final FlattenRelBase flatten = call.rel(1); + final RexBuilder rexBuilder = filter.getCluster().getRexBuilder(); + + final ImmutableBitSet flattenIndices = ImmutableBitSet.of(flatten.getFlattenedIndices()); + final RexNode filterCondition = filter.getCondition(); + + // Extract the parts of filter which can be pushed down the flatten node. + RexNode belowFilter = FilterExtractor.extractFilter(rexBuilder, filterCondition, + rexNode -> { + InputFinder inputFinder = InputFinder.analyze(rexNode); + return !flattenIndices.intersects(inputFinder.build()); + }); + + // We have nothing to push down. + if(belowFilter.isAlwaysTrue()){ + return; + } + + List belowFilters = RelOptUtil.conjunctions(belowFilter); + + // Remove the conditions that we already pushed down from the original filter to simplify it. + final RexNode aboveFilter = simplifyFilterCondition(rexBuilder, filterCondition, + belowFilters.stream().collect(Collectors.toSet())); + + final RelMetadataQuery mq = flatten.getCluster().getMetadataQuery(); + // Retrieve predicates which are already present below the flatten + final RelOptPredicateList preds = mq.getPulledUpPredicates(flatten.getInput()); + + // filter out the predicates which are already present below the flatten, so that we don't push it down again. + belowFilter = RexUtil.composeConjunction(rexBuilder, belowFilters.stream() + .filter(rexNode -> !preds.pulledUpPredicates.contains(rexNode)) + .collect(Collectors.toList()), false); + + //create a filter from the pushed down predicates. + RelNode filterBelowFlatten = belowFilter.isAlwaysTrue()? flatten.getInput(): + filter.copy(filter.getTraitSet(), flatten.getInput(), belowFilter); + //change the input of flatten to the above filter. + RelNode flattenWithNewInput = flatten.copy(flatten.getTraitSet(), Arrays.asList(filterBelowFlatten)); + //change the original filter with this new simplified filter after push down. + RelNode filterAboveFlatten = aboveFilter.isAlwaysTrue() ? flattenWithNewInput : filter.copy(filter.getTraitSet(), + flattenWithNewInput, aboveFilter); + call.transformTo(filterAboveFlatten); + } + + /** + * Simplifies the filter condition to exclude the pushed down predicates. + * @param rexBuilder + * @param filterCondition condition to simplify. + * @param belowPredicates predicates that needs to be excluded. + * @return simplified filter condition. + */ + private RexNode simplifyFilterCondition(RexBuilder rexBuilder, RexNode filterCondition, Set belowPredicates){ + if(belowPredicates.contains(filterCondition)){ + return rexBuilder.makeLiteral(true); + } + if(filterCondition instanceof RexCall){ + RexCall rexCall = (RexCall) filterCondition; + switch (rexCall.getOperator().getKind()){ + case AND:{ + List nodeList = new ArrayList<>(); + for(RexNode rexNode : rexCall.getOperands()){ + nodeList.add(simplifyFilterCondition(rexBuilder, rexNode, belowPredicates)); + } + return RexUtil.composeConjunction(rexBuilder, nodeList, false); + } + case OR: { + List nodeList = new ArrayList<>(); + for(RexNode rexNode : rexCall.getOperands()){ + RexNode sub = simplifyFilterCondition(rexBuilder, rexNode, belowPredicates); + if(sub.isAlwaysTrue()){ + return rexBuilder.makeLiteral(true); + } + nodeList.add(sub); + } + return RexUtil.composeDisjunction(rexBuilder, nodeList, false); + } + } + } + return filterCondition; + } + +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/PushProjectPastFlattenRule.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/PushProjectPastFlattenRule.java index fb053c5a68..79eb9396ff 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/PushProjectPastFlattenRule.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/PushProjectPastFlattenRule.java @@ -87,7 +87,7 @@ private static int findStructuredColumnInputRefIndex(RexNode rexNode) { if (rexNode instanceof RexCall) { String functionName = ((RexCall) rexNode).getOperator().getName(); - if (functionName.equalsIgnoreCase("item")) { + if ("item".equalsIgnoreCase(functionName)) { return findStructuredColumnInputRefIndex(((RexCall) rexNode).getOperands().get(0)); } else if (functionName.equalsIgnoreCase(STRUCTURED_WRAPPER.getName())) { return findStructuredColumnInputRefIndex(((RexCall) rexNode).getOperands().get(0)); @@ -115,7 +115,7 @@ private static RexNode replaceStructuredColumnInputRefIndex(RexBuilder rexBuilde if (rexNode instanceof RexCall) { String functionName = ((RexCall) rexNode).getOperator().getName(); - if (functionName.equalsIgnoreCase("item")) { + if ("item".equalsIgnoreCase(functionName)) { assert ((RexCall) rexNode).getOperands().size() == 2; RexNode newInput0 = replaceStructuredColumnInputRefIndex(rexBuilder, ((RexCall) rexNode).getOperands().get(0), orig, replace); RexNode newInput1 = replaceStructuredColumnInputRefIndex(rexBuilder, ((RexCall) rexNode).getOperands().get(1), orig, replace); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/RedundantSortEliminator.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/RedundantSortEliminator.java new file mode 100644 index 0000000000..0e170096a6 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/RedundantSortEliminator.java @@ -0,0 +1,141 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.logical; + +import org.apache.calcite.rel.RelNode; +import org.apache.calcite.rel.logical.LogicalAggregate; +import org.apache.calcite.rel.logical.LogicalJoin; +import org.apache.calcite.rel.logical.LogicalSort; +import org.apache.calcite.rel.type.RelDataTypeField; + +import com.dremio.exec.planner.StatelessRelShuttleImpl; +import com.google.common.collect.ImmutableList; + +/** + * When a user writes a query that has NO-OP sort operations, + * then we can remove them to increase performance. + * + * For example a user can write a query: + * + * SELECT * FROM ( + * SELECT ENAME, SAL + * FROM EMP + * ORDER BY ENAME) + * ORDER BY SAL + * + * Which will have the following plan: + * + * LogicalSort(sort0=[$1], dir0=[ASC]) + * LogicalProject(ENAME=[$0], SAL=[$1]) + * LogicalSort(sort0=[$0], dir0=[ASC]) + * LogicalProject(ENAME=[$1], SAL=[$5]) + * LogicalTableScan(table=[[EMP]]) + * + * Notice that the above plan has a SORT on ENAME followed by a SORT on SAL. + * It is redundant to sort on ENAME and then to just sort on SAL at the end. + * An optimization is to just remove the sort on ENAME. + * + * The general rule is to remove sorts that have parent operations that make them redundant (negate them). + * There are a few exceptions like if the SORT operation has an OFFSET or a FETCH, + * since those operations change the result set and rely on the ordering of the input. + */ + +public final class RedundantSortEliminator { + private RedundantSortEliminator() {} + + public static RelNode apply(RelNode relNode) { + return relNode.accept(RedundantSortEliminatorRelShuttle.INSTANCE); + } + + private static final class RedundantSortEliminatorRelShuttle extends StatelessRelShuttleImpl { + private static final RedundantSortEliminatorRelShuttle HAS_NEGATING_PARENT_TRUE = new RedundantSortEliminatorRelShuttle(true); + private static final RedundantSortEliminatorRelShuttle HAS_NEGATING_PARENT_FALSE = new RedundantSortEliminatorRelShuttle(false); + public static final RedundantSortEliminatorRelShuttle INSTANCE = HAS_NEGATING_PARENT_FALSE; + + private final boolean hasNegatingParent; + + private RedundantSortEliminatorRelShuttle(boolean hasNegatingParent) { + this.hasNegatingParent = hasNegatingParent; + } + + @Override + public RelNode visit(LogicalSort sort) { + boolean isPureSortOperations = (sort.fetch == null) && (sort.offset == null); + if (isPureSortOperations && hasNegatingParent) { + // Note that even if are under an aggregate or join, but there is a fetch / offset inbetween, + // then we no longer have a redundant parent. + return sort.getInput().accept(this); + } + + // Calcite represents LIMIT rels as LogicalSorts. + // The quirk is that we are matching the LIMIT operation in this sort logic, + // but we don't want to treat it like a sort (since there is no sorting going on). + boolean isNotRealSortOperation = !sort.collation.getFieldCollations().isEmpty(); + RedundantSortEliminatorRelShuttle shuttle = isNotRealSortOperation ? HAS_NEGATING_PARENT_TRUE : HAS_NEGATING_PARENT_FALSE; + RelNode prunedInput = sort.getInput().accept(shuttle); + boolean inputWasPruned = prunedInput != sort.getInput(); + if (!inputWasPruned) { + return sort; + } + + return LogicalSort.create( + prunedInput, + sort.collation, + sort.offset, + sort.fetch); + } + + @Override + public RelNode visit(LogicalAggregate logicalAggregate) { + // Any sorts that come below this node should be removed. + RelNode prunedInput = logicalAggregate.getInput().accept(HAS_NEGATING_PARENT_TRUE); + boolean inputWasPruned = prunedInput != logicalAggregate.getInput(); + if (!inputWasPruned) { + return logicalAggregate; + } + + return LogicalAggregate.create( + prunedInput, + logicalAggregate.getHints(), + logicalAggregate.getGroupSet(), + logicalAggregate.getGroupSets(), + logicalAggregate.getAggCallList()); + } + + @Override + public RelNode visit(LogicalJoin logicalJoin) { + // Any sorts that come below this node should be removed. + RelNode leftPrunedInput = logicalJoin.getLeft().accept(HAS_NEGATING_PARENT_TRUE); + RelNode rightPrunedInput = logicalJoin.getRight().accept(HAS_NEGATING_PARENT_TRUE); + + boolean leftInputWasPruned = leftPrunedInput != logicalJoin.getLeft(); + boolean rightInputWasPruned = rightPrunedInput != logicalJoin.getRight(); + if (!leftInputWasPruned && !rightInputWasPruned) { + return logicalJoin; + } + + return LogicalJoin.create( + leftPrunedInput, + rightPrunedInput, + logicalJoin.getHints(), + logicalJoin.getCondition(), + logicalJoin.getVariablesSet(), + logicalJoin.getJoinType(), + logicalJoin.isSemiJoinDone(), + (ImmutableList) logicalJoin.getSystemFieldList()); + } + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/RegexpLikeToLikeRule.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/RegexpLikeToLikeRule.java new file mode 100644 index 0000000000..aba93fba7c --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/RegexpLikeToLikeRule.java @@ -0,0 +1,115 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dremio.exec.planner.logical; + +import java.util.List; +import java.util.Set; + +import org.apache.calcite.plan.RelOptRule; +import org.apache.calcite.plan.RelOptRuleCall; +import org.apache.calcite.rel.RelNode; +import org.apache.calcite.rex.RexBuilder; +import org.apache.calcite.rex.RexCall; +import org.apache.calcite.rex.RexLiteral; +import org.apache.calcite.rex.RexNode; +import org.apache.calcite.rex.RexShuttle; +import org.apache.calcite.sql.fun.SqlStdOperatorTable; + +import com.google.common.collect.ImmutableSet; + +/** + * "REGEXP_LIKE is similar to the LIKE condition, + * except REGEXP_LIKE performs regular expression matching instead of the simple pattern matching performed by LIKE." + * + * We noticed that for simple patterns LIKE performs about twice as fast as REGEXP_LIKE. + * This is most likely due to the fact that LIKE is running less code, since it has a simplier pattern language. + * + * Since we don't have access to either implementation + * we can instead write a rule to use LIKE in place of REGEXP_LIKE in certain scenarios. + * + * REGEXP_LIKE has the following special characters +, *, ?, ^, $, (, ), [, ], {, }, |, \ + * which means that none of the other characters have special meaning. + * If a pattern doesn't have any special characters, then we can safely convert REGEXP_LIKE to LIKE. + * + * For example: + * + * SELECT * + * FROM EMP + * WHERE REGEXP_LIKE(name, 'asdf') + * + * Can we rewritten to: + * + * SELECT * + * FROM EMP + * WHERE LIKE(name, '%asdf%') + */ +public final class RegexpLikeToLikeRule extends RelOptRule { + public static final RegexpLikeToLikeRule INSTANCE = new RegexpLikeToLikeRule(); + + private RegexpLikeToLikeRule() { + super(operand(RelNode.class, any()), "RegexpLikeToLikeRule"); + } + + @Override + public void onMatch(RelOptRuleCall relOptRuleCall) { + RelNode relNode = relOptRuleCall.rel(0); + RexBuilder rexBuilder = relNode.getCluster().getRexBuilder(); + InternalRexShuttle shuttle = new InternalRexShuttle(rexBuilder); + + RelNode rewrittenQuery = relNode.accept(shuttle); + relOptRuleCall.transformTo(rewrittenQuery); + } + + private static final class InternalRexShuttle extends RexShuttle { + private static final Set SPECIAL_CHARACTERS = ImmutableSet.of( + '.', '+', '*', '?', + '^','$', '(', ')', + '[', ']', '{', '}', '|', '\\'); + + private final RexBuilder rexBuilder; + + public InternalRexShuttle(RexBuilder rexBuilder) { + this.rexBuilder = rexBuilder; + } + + @Override + public RexNode visitCall(RexCall call) { + // If the call isn't REGEXP_LIKE, then just recurse on the children calls. + if (!call.op.getName().equalsIgnoreCase("REGEXP_LIKE")) { + boolean[] update = new boolean[]{false}; + List clonedOperands = this.visitList(call.operands, update); + return update[0] ? rexBuilder.makeCall(call.op, clonedOperands) : call; + } + + // At this point we know it's a REGEXP_LIKE call. + // If the pattern has special characters, then don't do a rewrite. + String pattern = ((RexLiteral)call.getOperands().get(1)).getValueAs(String.class); + for (int i = 0; i < pattern.length(); i++) { + Character patternCharacter = pattern.charAt(i); + if (SPECIAL_CHARACTERS.contains(patternCharacter)) { + return call; + } + } + + // The pattern doesn't have special characters, + // so just convert it to a regular LIKE call. + RexNode sourceString = call.getOperands().get(0); + RexNode newPattern = rexBuilder.makeLiteral("%" + pattern + "%"); + return rexBuilder.makeCall(SqlStdOperatorTable.LIKE, sourceString, newPattern); + } + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/RewriteConvertFunctionVisitor.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/RewriteConvertFunctionVisitor.java new file mode 100644 index 0000000000..377afe40b4 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/RewriteConvertFunctionVisitor.java @@ -0,0 +1,263 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.logical; + +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; + +import org.apache.calcite.rel.RelNode; +import org.apache.calcite.rel.logical.LogicalFilter; +import org.apache.calcite.rel.logical.LogicalJoin; +import org.apache.calcite.rel.logical.LogicalProject; +import org.apache.calcite.rel.type.RelDataType; +import org.apache.calcite.rex.RexBuilder; +import org.apache.calcite.rex.RexCall; +import org.apache.calcite.rex.RexLiteral; +import org.apache.calcite.rex.RexNode; +import org.apache.calcite.rex.RexShuttle; +import org.apache.calcite.sql.SqlFunction; +import org.apache.calcite.sql.SqlOperator; +import org.apache.calcite.sql.SqlOperatorBinding; +import org.apache.calcite.sql.type.SqlReturnTypeInference; +import org.apache.calcite.util.NlsString; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.planner.StatelessRelShuttleImpl; +import com.dremio.exec.planner.sql.Checker; +import com.dremio.exec.planner.sql.OperatorTable; +import com.dremio.exec.planner.sql.SqlFunctionImpl; +import com.dremio.exec.util.ApproximateStringMatcher; +import com.google.common.collect.ImmutableList; + +public final class RewriteConvertFunctionVisitor extends StatelessRelShuttleImpl { + + private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RewriteConvertFunctionVisitor.class); + private final RenameConvertToConvertFromVisitor renameConvertToConvertFromVisitor; + + public RewriteConvertFunctionVisitor(RexBuilder rexBuilder, OperatorTable table) { + renameConvertToConvertFromVisitor = new RenameConvertToConvertFromVisitor(rexBuilder, table); + } + + public static RelNode process(RelNode relNode, OperatorTable table){ + RewriteConvertFunctionVisitor convertRewriteVisitor = new RewriteConvertFunctionVisitor(relNode.getCluster().getRexBuilder(), table); + return relNode.accept(convertRewriteVisitor); + } + + @Override + public RelNode visit(LogicalFilter logicalFilter) { + RexBuilder rexBuilder = logicalFilter.getCluster().getRexBuilder(); + + RelNode rewrittenInput = logicalFilter.getInput().accept(this); + RexNode rewrittenCondition = logicalFilter.getCondition().accept(renameConvertToConvertFromVisitor); + boolean rewriteHappened = (rewrittenInput != logicalFilter.getInput()) || (rewrittenCondition != logicalFilter.getCondition()); + if (!rewriteHappened) { + return logicalFilter; + } + + return logicalFilter.copy( + logicalFilter.getTraitSet(), + rewrittenInput, + rewrittenCondition); + } + + @Override + public RelNode visit(LogicalProject logicalProject) { + RexBuilder rexBuilder = logicalProject.getCluster().getRexBuilder(); + + RelNode rewrittenInput = logicalProject.getInput().accept(this); + + boolean rewriteHappened = rewrittenInput != logicalProject.getInput(); + List rewrittenProjects = new ArrayList<>(); + for (RexNode project : logicalProject.getProjects()) { + RexNode rewrittenProject = project.accept(renameConvertToConvertFromVisitor); + if (rewrittenProject != project) { + rewriteHappened = true; + } + + rewrittenProjects.add(rewrittenProject); + } + + if (!rewriteHappened) { + return logicalProject; + } + + return logicalProject.copy( + logicalProject.getTraitSet(), + logicalProject.getInput(), + rewrittenProjects, + logicalProject.getRowType()); + } + + @Override + public RelNode visit(LogicalJoin logicalJoin) { + RexBuilder rexBuilder = logicalJoin.getCluster().getRexBuilder(); + + RelNode rewrittenLeft = logicalJoin.getLeft().accept(this); + RelNode rewrittenRight = logicalJoin.getRight().accept(this); + RexNode rewrittenCondition = logicalJoin.getCondition().accept(renameConvertToConvertFromVisitor); + + boolean rewriteHappened = (rewrittenLeft != logicalJoin.getLeft()) + || (rewrittenRight != logicalJoin.getRight()) + || (rewrittenCondition != logicalJoin.getCondition()); + + if (!rewriteHappened) { + return logicalJoin; + } + + return logicalJoin.copy( + logicalJoin.getTraitSet(), + rewrittenCondition, + rewrittenLeft, rewrittenRight, + logicalJoin.getJoinType(), + logicalJoin.isSemiJoinDone()); + } + + public static class RenameConvertToConvertFromVisitor extends RexShuttle { + + private final RexBuilder builder; + private final OperatorTable table; + + public RenameConvertToConvertFromVisitor(RexBuilder builder, OperatorTable table) { + this.builder = builder; + this.table = table; + } + + @Override + public RexNode visitCall(final RexCall call) { + final String functionName = call.getOperator().getName(); + + // check if it's a convert_from or convert_to function + if (!"convert_from".equalsIgnoreCase(functionName) && !"convert_to".equalsIgnoreCase(functionName)) { + return super.visitCall(call); + } + + boolean[] update = {false}; + final List clonedOperands = visitList(call.getOperands(), update); + + final int nArgs = clonedOperands.size(); + + if (nArgs < 2) { + // Second operand is missing + throw UserException.parseError() + .message("'%s' expects a string literal as a second argument.", functionName) + .build(logger); + } else if (nArgs > 3 || (nArgs > 2 && "convert_to".equalsIgnoreCase(functionName))) { + // Too many operands (>2 for 'convert_to', or >3 for 'convert_from') + throw UserException.parseError() + .message("Too many operands (%d) for '%s'", nArgs, functionName) + .build(logger); + } + + if (!(clonedOperands.get(1) instanceof RexLiteral)) { + // caused by user entering a non-literal + throw getConvertFunctionInvalidTypeException(call); + } + + if (nArgs == 3 && !(clonedOperands.get(2) instanceof RexLiteral)) { + // caused by user entering a non-literal + throw getConvertFunctionInvalidTypeException(call); + } + + String literal; + try { + literal = ((NlsString) (((RexLiteral) clonedOperands.get(1)).getValue())).getValue(); + } catch (final ClassCastException e) { + // Caused by user entering a value with a non-string literal + throw getConvertFunctionInvalidTypeException(call); + } + + // construct the new function name based on the input argument + String newFunctionName = functionName + literal; + if (nArgs == 3) { + if (!"utf8".equalsIgnoreCase(literal)) { + throw UserException.parseError() + .message("3-argument convert_from only supported for utf8 encoding. Instead, got %s", literal) + .build(logger); + } + newFunctionName = "convert_replaceUTF8"; + } + + // Look up the new function name in the operator table + List operatorList = table.getSqlOperator(newFunctionName); + if (operatorList.size() == 0) { + // User typed in an invalid type name + throw getConvertFunctionException(functionName, literal); + } + + // Find the SqlFunction with the correct args + Optional newFunctionOperator = table + .getSqlOperator(newFunctionName) + .stream() + .filter(op -> op.getOperandTypeChecker().getOperandCountRange().isValidCount(nArgs - 1)) + .findAny(); + + if (!newFunctionOperator.isPresent()) { + // we are here because we found some dummy convert function. (See DummyConvertFrom and DummyConvertTo) + throw getConvertFunctionException(functionName, literal); + } + + SqlFunction sqlOperator = SqlFunctionImpl.create( + newFunctionName, + new SqlReturnTypeInference() { + @Override + public RelDataType inferReturnType(SqlOperatorBinding opBinding) { + return call.getType(); + } + }, + Checker.between(1, nArgs - 1)); + + List operands = nArgs == 2 ? clonedOperands.subList(0, 1) : ImmutableList.of(clonedOperands.get(0), clonedOperands.get(2)); + + return builder.makeCall(sqlOperator, operands); + } + + private static UserException getConvertFunctionInvalidTypeException(final RexCall function) { + // Caused by user entering a value with a numeric type + final String functionName = function.getOperator().getName(); + final String typeName = function.getOperands().get(1).getType().getFullTypeString(); + return UserException.parseError() + .message("Invalid type %s passed as second argument to function '%s'. " + + "The function expects a literal argument.", + typeName, + functionName) + .build(logger); + } + + private UserException getConvertFunctionException(final String functionName, final String typeName) { + final String newFunctionName = functionName + typeName; + final boolean emptyTypeName = typeName.isEmpty(); + final String typeNameToPrint = emptyTypeName ? "" : typeName; + final UserException.Builder exceptionBuilder = UserException.unsupportedError() + .message("%s does not support conversion %s type '%s'.", functionName, functionName.substring(8).toLowerCase(), typeNameToPrint); + // Build a nice error message + if (!emptyTypeName) { + List ops = new ArrayList<>(); + for (SqlOperator op : table.getOperatorList()) { + ops.add(op.getName()); + } + final String bestMatch = ApproximateStringMatcher.getBestMatch(ops, newFunctionName); + if (bestMatch != null && bestMatch.length() > 0 && bestMatch.toLowerCase().startsWith("convert")) { + final StringBuilder s = new StringBuilder("Did you mean ") + .append(bestMatch.substring(functionName.length())) + .append("?"); + exceptionBuilder.addContext(s.toString()); + } + } + return exceptionBuilder.build(logger); + } + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/RexToExpr.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/RexToExpr.java index 16c77c109a..d5828303dd 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/RexToExpr.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/RexToExpr.java @@ -93,7 +93,12 @@ /** * Utilities for Dremio's planner. */ -public class RexToExpr { +public final class RexToExpr { + + private RexToExpr() { + // utility class + } + public static final String UNSUPPORTED_REX_NODE_ERROR = "Cannot convert RexNode to equivalent Dremio expression. "; private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RexToExpr.class); private static boolean warnDecimal = true; @@ -273,8 +278,9 @@ public LogicalExpression visitCall(RexCall call) { case IS_FALSE: case OTHER: return FunctionCallFactory.createExpression(call.getOperator().getName().toLowerCase(), call.getOperands().get(0).accept(this)); + default: + throw new AssertionError("todo: implement syntax " + syntax + "(" + call + ")"); } - throw new AssertionError("todo: implement syntax " + syntax + "(" + call + ")"); case PREFIX: LogicalExpression arg = call.getOperands().get(0).accept(this); switch(call.getKind()){ @@ -288,8 +294,9 @@ public LogicalExpression visitCall(RexCall call) { return visitCall((RexCall) rexBuilder.makeCall( SqlStdOperatorTable.MULTIPLY, operands)); + default: + throw new AssertionError("todo: implement syntax " + syntax + "(" + call + ")"); } - throw new AssertionError("todo: implement syntax " + syntax + "(" + call + ")"); case SPECIAL: switch(call.getKind()){ case CAST: @@ -331,6 +338,8 @@ public LogicalExpression visitCall(RexCall call) { } } return elseExpression; + default: + break; } if (call.getOperator() == SqlStdOperatorTable.ITEM) { @@ -359,7 +368,7 @@ public LogicalExpression visitCall(RexCall call) { case VARCHAR: return left.getChild(literal.getValue2().toString()); default: - // fall through + break; } } @@ -375,7 +384,7 @@ public LogicalExpression visitCall(RexCall call) { case DECIMAL: case INTEGER: default: - // fall through + break; } } } @@ -394,6 +403,8 @@ public LogicalExpression visitCall(RexCall call) { case INTERVAL_MONTH: case INTERVAL_DAY: return FunctionCallFactory.createCast(Types.required(MinorType.DATE), dtPlus); + default: + break; } } @@ -403,7 +414,6 @@ public LogicalExpression visitCall(RexCall call) { if (MoreRelOptUtil.isDatetimeMinusInterval(call)) { return doFunction(call, "-"); } - // fall through default: throw new AssertionError("todo: implement syntax " + syntax + "(" + call + ")"); @@ -430,6 +440,7 @@ private LogicalExpression doFunction(RexCall call, String funcName) { } } + private LogicalExpression doUnknown(RexNode o){ final String message = String.format(UNSUPPORTED_REX_NODE_ERROR + "RexNode Class: %s, RexNode Digest: %s", o.getClass().getName(), o.toString()); if(throwUserException) { @@ -475,6 +486,7 @@ public LogicalExpression visitFieldAccess(RexFieldAccess fieldAccess) { return reference.getChild(fieldAccess.getField().getName()); } + @SuppressWarnings("FallThrough") // FIXME: remove suppression by properly handling switch fallthrough private LogicalExpression getCastFunction(RexCall call){ Preconditions.checkArgument(call.getOperands().size() == 1); @@ -568,6 +580,8 @@ private LogicalExpression getCastFunction(RexCall call){ case INTERVAL_SECOND: multiplier = org.apache.arrow.vector.util.DateUtility.secondsToMillis; break; + default: + break; } arg = FunctionCallFactory.createExpression("multiply", arg, ValueExpressions.getBigInt(multiplier)); @@ -626,11 +640,15 @@ private LogicalExpression getCastFunction(RexCall call){ final List args = ImmutableList.of(castExpr, divider); return FunctionCallFactory.createExpression("divide", args); } + break; + default: + break; } return FunctionCallFactory.createCast(castType, arg); } + @SuppressWarnings("checkstyle:EqualsAvoidNull") // "functionName" is never null private LogicalExpression getFunction(RexCall call) { List args = new ArrayList<>(); @@ -762,9 +780,10 @@ private LogicalExpression handleExtractFunction(final List ar } private LogicalExpression handleDateTruncFunction(final List args) { - // Assert that the first argument to extract is a QuotedString - assert args.get(0) instanceof ValueExpressions.QuotedString; - + if (!(args.get(0) instanceof ValueExpressions.QuotedString)){ + throw new UnsupportedOperationException("The first argument of date_trunc function must be time units. Expecting " + + "YEAR, MONTH, DAY, HOUR, MINUTE, SECOND, WEEK, QUARTER, DECADE, CENTURY, MILLENNIUM"); + } // Get the unit of time to be extracted String timeUnitStr = ((ValueExpressions.QuotedString)args.get(0)).value.toUpperCase(); @@ -782,8 +801,9 @@ private LogicalExpression handleDateTruncFunction(final List case ("MILLENNIUM"): final String functionPostfix = timeUnitStr.substring(0, 1).toUpperCase() + timeUnitStr.substring(1).toLowerCase(); return FunctionCallFactory.createExpression("date_trunc_" + functionPostfix, args.subList(1, 2)); + default: + break; } - throw new UnsupportedOperationException("date_trunc function supports the following time units: " + "YEAR, MONTH, DAY, HOUR, MINUTE, SECOND, WEEK, QUARTER, DECADE, CENTURY, MILLENNIUM"); } @@ -926,24 +946,21 @@ public LogicalExpression visitLiteral(RexLiteral literal) { case NULL: return NullExpression.INSTANCE; case ANY: + case ROW: + case ARRAY: if (isLiteralNull(literal)) { return NullExpression.INSTANCE; } + break; case VARBINARY: if (isLiteralNull(literal)) { return createNullExpr(MinorType.VARBINARY); } - case ROW: - if (isLiteralNull(literal)) { - return NullExpression.INSTANCE; - } - case ARRAY: - if (isLiteralNull(literal)) { - return NullExpression.INSTANCE; - } + break; default: - throw new UnsupportedOperationException(String.format("Unable to convert the value of %s and type %s to a Dremio constant expression.", literal, literal.getType().getSqlTypeName())); + break; } + throw new UnsupportedOperationException(String.format("Unable to convert the value of %s and type %s to a Dremio constant expression.", literal, literal.getType().getSqlTypeName())); } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/TableOptimizeRel.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/TableOptimizeRel.java index 8c34945951..496c872b6b 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/TableOptimizeRel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/TableOptimizeRel.java @@ -22,26 +22,59 @@ import org.apache.calcite.plan.RelTraitSet; import org.apache.calcite.rel.RelNode; +import com.dremio.common.exceptions.UserException; import com.dremio.exec.planner.common.TableOptimizeRelBase; +import com.dremio.exec.planner.logical.partition.PruneFilterCondition; import com.dremio.exec.planner.sql.handlers.query.OptimizeOptions; +import com.dremio.exec.store.dfs.FilterableScan; /** * Drel for 'OPTIMIZE TABLE'. */ public class TableOptimizeRel extends TableOptimizeRelBase implements Rel { + private final PruneFilterCondition partitionFilter; + public TableOptimizeRel(RelOptCluster cluster, - RelTraitSet traitSet, - RelNode input, - RelOptTable table, - CreateTableEntry createTableEntry, - OptimizeOptions optimizeOptions) { + RelTraitSet traitSet, + RelNode input, + RelOptTable table, + CreateTableEntry createTableEntry, + OptimizeOptions optimizeOptions, + PruneFilterCondition partitionFilter) { super(LOGICAL, cluster, traitSet, input, table, createTableEntry, optimizeOptions); + this.partitionFilter = partitionFilter; } @Override public RelNode copy(RelTraitSet traitSet, List inputs) { - return new TableOptimizeRel(getCluster(), traitSet, sole(inputs), getTable(), getCreateTableEntry(), getOptimizeOptions()); + RelNode relNode = sole(inputs); + PruneFilterCondition pruneFilterCondition = partitionFilter == null ? computePartitionFilter(relNode) : partitionFilter; + return new TableOptimizeRel(getCluster(), traitSet, relNode, getTable(), getCreateTableEntry(), getOptimizeOptions(), pruneFilterCondition); + } + + /** + * Rule for filters with ICEBERG Compaction. + */ + private PruneFilterCondition computePartitionFilter(RelNode relNode) { + PruneFilterCondition pruneFilterCondition = null; + if (relNode instanceof FilterableScan) { + // All filters have been resolved by pruning; In this case, there is nothing to push down for select scan + pruneFilterCondition = ((FilterableScan) relNode).getPartitionFilter(); + } else if (relNode instanceof FilterRel) { + // Things have been pruned; In this case, select scan does parquet push down too. + FilterableScan scan = (FilterableScan) ((FilterRel) relNode).getInput(); + pruneFilterCondition = scan.getPartitionFilter(); + if ((pruneFilterCondition == null || pruneFilterCondition.isEmpty())) { + //Nothing was pruned, In this case, select scan does parquet push down only. + throw UserException.unsupportedError().message(String.format("OPTIMIZE command is only supported on the partition columns - %s", + scan.getTableMetadata().getReadDefinition().getPartitionColumnsList())).buildSilently(); + } + } + return pruneFilterCondition; } + public PruneFilterCondition getPartitionFilter() { + return partitionFilter; + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/TableOptimizeRule.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/TableOptimizeRule.java index 4d1a240b5a..9654757df2 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/TableOptimizeRule.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/TableOptimizeRule.java @@ -42,6 +42,7 @@ public void onMatch(RelOptRuleCall call) { call.transformTo(new TableOptimizeRel( optimizeCrel.getCluster(), optimizeCrel.getTraitSet().plus(Rel.LOGICAL), - convertedInput, optimizeCrel.getTable(), optimizeCrel.getCreateTableEntry(), optimizeCrel.getOptimizeOptions())); + convertedInput, optimizeCrel.getTable(), optimizeCrel.getCreateTableEntry(), + optimizeCrel.getOptimizeOptions(), null)); } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/VacuumTableRel.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/VacuumTableRel.java new file mode 100644 index 0000000000..e7f13a7d7b --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/VacuumTableRel.java @@ -0,0 +1,46 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.logical; + +import java.util.List; + +import org.apache.calcite.plan.RelOptCluster; +import org.apache.calcite.plan.RelOptTable; +import org.apache.calcite.plan.RelTraitSet; +import org.apache.calcite.rel.RelNode; + +import com.dremio.exec.catalog.VacuumOptions; +import com.dremio.exec.planner.common.VacuumTableRelBase; + +/** + * Drel for 'VACUUM' query. + */ +public class VacuumTableRel extends VacuumTableRelBase implements Rel { + + public VacuumTableRel(RelOptCluster cluster, + RelTraitSet traitSet, + RelNode input, + RelOptTable table, + CreateTableEntry createTableEntry, + VacuumOptions vacuumOptions) { + super(LOGICAL, cluster, traitSet, input, table, createTableEntry, vacuumOptions); + } + + @Override + public RelNode copy(RelTraitSet traitSet, List inputs) { + return new VacuumTableRel(getCluster(), traitSet, sole(inputs), getTable(), getCreateTableEntry(), getVacuumOptions()); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/VacuumTableRule.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/VacuumTableRule.java new file mode 100644 index 0000000000..25f929858e --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/VacuumTableRule.java @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.logical; + +import org.apache.calcite.plan.Convention; +import org.apache.calcite.plan.RelOptRule; +import org.apache.calcite.plan.RelOptRuleCall; +import org.apache.calcite.rel.RelNode; + +import com.dremio.exec.calcite.logical.VacuumTableCrel; + +/** + * Planner rule, Applicable for VACUUM command only. + */ +public class VacuumTableRule extends RelOptRule { + + public static final RelOptRule INSTANCE = new VacuumTableRule(); + + private VacuumTableRule() { + super(RelOptHelper.any(VacuumTableCrel.class, Convention.NONE), "VacuumTableRule"); + } + + @Override + public void onMatch(RelOptRuleCall call) { + final VacuumTableCrel vacuumTableCrel = call.rel(0); + final RelNode input = vacuumTableCrel.getInput(); + //Set traits with Logical convention, Else it won't fetch the best plan. + final RelNode convertedInput = convert(input, input.getTraitSet().plus(Rel.LOGICAL).simplify()); + call.transformTo(new VacuumTableRel( + vacuumTableCrel.getCluster(), + vacuumTableCrel.getTraitSet().plus(Rel.LOGICAL), + convertedInput, + vacuumTableCrel.getTable(), + vacuumTableCrel.getCreateTableEntry(), + vacuumTableCrel.getVacuumOptions())); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/partition/FindPartitionConditions.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/partition/FindPartitionConditions.java index 0e07fb5e48..b5c28d2f05 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/partition/FindPartitionConditions.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/partition/FindPartitionConditions.java @@ -215,6 +215,7 @@ private boolean isHolisticExpression(RexCall call) { return false; } + @Override public Void visitInputRef(RexInputRef inputRef) { if(dirs.get(inputRef.getIndex())){ pushStatusStack.add(PushDirFilter.PUSH); @@ -225,22 +226,26 @@ public Void visitInputRef(RexInputRef inputRef) { return null; } + @Override public Void visitLiteral(RexLiteral literal) { pushStatusStack.add(PushDirFilter.PUSH); addResult(literal); return null; } + @Override public Void visitOver(RexOver over) { // assume NO_PUSH until proven otherwise analyzeCall(over, PushDirFilter.NO_PUSH); return null; } + @Override public Void visitCorrelVariable(RexCorrelVariable correlVariable) { return pushVariable(); } + @Override public Void visitCall(RexCall call) { analyzeCall(call, PushDirFilter.PUSH); return null; @@ -326,14 +331,17 @@ private void analyzeCall(RexCall call, PushDirFilter callPushDirFilter) { pushStatusStack.add(callPushDirFilter); } + @Override public Void visitDynamicParam(RexDynamicParam dynamicParam) { return pushVariable(); } + @Override public Void visitRangeRef(RexRangeRef rangeRef) { return pushVariable(); } + @Override public Void visitFieldAccess(RexFieldAccess fieldAccess) { return pushVariable(); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/partition/FindSimpleFilters.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/partition/FindSimpleFilters.java index 914411bab6..146801410b 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/partition/FindSimpleFilters.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/partition/FindSimpleFilters.java @@ -215,10 +215,9 @@ public StateHolder visitCall(RexCall call) { } return new StateHolder(Type.CONDITION, null) .add((RexCall) builder.makeCall(call.getType(), call.getOperator(), Arrays.asList(a.node, b.node))); - } else { - // the two inputs are not literals/direct inputs. - return new StateHolder(Type.OTHER, call); } + // the two inputs are not literals/direct inputs. + break; } case AND: @@ -241,6 +240,7 @@ public StateHolder visitCall(RexCall call) { if (a.type == Type.CONDITION) { return a; } + break; } case CAST: @@ -249,14 +249,14 @@ public StateHolder visitCall(RexCall call) { (call.getOperands().size() == 1 && call.getOperands().get(0) instanceof RexLiteral)) { // If its a single literal cast return call.getOperands().get(0).accept(this); } - - // fallthrough + break; } default: - return new StateHolder(Type.OTHER, call); + break; } + return new StateHolder(Type.OTHER, call); } private RexNode composeConjunction(RexNode a, RexNode b) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/partition/PartitionStatsBasedPruner.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/partition/PartitionStatsBasedPruner.java index dc7824ec3b..2146c558a8 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/partition/PartitionStatsBasedPruner.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/partition/PartitionStatsBasedPruner.java @@ -24,6 +24,7 @@ import static org.apache.calcite.sql.SqlKind.LESS_THAN; import static org.apache.calcite.sql.SqlKind.LESS_THAN_OR_EQUAL; +import java.io.IOException; import java.math.BigDecimal; import java.nio.ByteBuffer; import java.util.ArrayList; @@ -69,6 +70,7 @@ import org.apache.iceberg.PartitionStatsReader; import org.apache.iceberg.StructLike; import org.apache.iceberg.io.CloseableIterator; +import org.apache.iceberg.io.FileIO; import org.apache.iceberg.io.InputFile; import com.dremio.common.AutoCloseables; @@ -81,21 +83,26 @@ import com.dremio.exec.catalog.StoragePluginId; import com.dremio.exec.ops.OptimizerRulesContext; import com.dremio.exec.planner.acceleration.IncrementalUpdateUtils; +import com.dremio.exec.planner.common.ImmutableDremioFileAttrs; +import com.dremio.exec.planner.common.PartitionStatsHelper; import com.dremio.exec.planner.common.ScanRelBase; import com.dremio.exec.planner.logical.partition.FindSimpleFilters.StateHolder; import com.dremio.exec.record.BatchSchema; import com.dremio.exec.store.TableMetadata; -import com.dremio.exec.store.dfs.FileSystemRulesFactory; -import com.dremio.exec.store.iceberg.DremioFileIO; import com.dremio.exec.store.iceberg.IcebergUtils; import com.dremio.exec.store.iceberg.SupportsIcebergRootPointer; +import com.dremio.io.file.FileSystem; +import com.dremio.sabot.exec.context.OpProfileDef; +import com.dremio.sabot.exec.context.OperatorContextImpl; +import com.dremio.sabot.exec.context.OperatorStats; import com.dremio.service.Pointer; import com.google.common.collect.HashMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.Maps; import com.google.common.collect.Multimap; -import io.opentelemetry.extension.annotations.WithSpan; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.instrumentation.annotations.WithSpan; /** * Implementation of {@link RecordPruner} which prunes based on Iceberg partition stats @@ -513,16 +520,52 @@ private void writePartitionValues(int row, StructLike partitionData) { } } - public static Optional> prune(OptimizerRulesContext context, ScanRelBase scan, PruneFilterCondition pruneCondition) { + public static OperatorContextImpl newOperatorContext(OptimizerRulesContext optimizerRulesContext, ScanRelBase scan) { + return new OperatorContextImpl( + optimizerRulesContext.getPlannerSettings().getSabotConfig(), + null, // DremioConfig + null, // FragmentHandle + null, // popConfig + optimizerRulesContext.getAllocator().newChildAllocator("p-s-r-" + scan.getTableMetadata().getDatasetConfig().getName(), 0, Long.MAX_VALUE), + null, // output allocator + null, // code compiler + new OperatorStats(new OpProfileDef(0, 0, 0, 0), optimizerRulesContext.getAllocator()), // stats + null, // execution controls + null, // fragment executor builder + null, // executor service + null, // function lookup context + null, // context information + optimizerRulesContext.getFunctionRegistry().getOptionManager(), // option manager + null, // spill service + null, // node debug context provider + 1000, // target batch size + null, + null, + null, + null, + null, + null, + null, + null + ); + } + + @WithSpan("prune-partition-stats") + public static Optional> prune(OptimizerRulesContext context, ScanRelBase scan, PruneFilterCondition pruneCondition) + throws Exception { final TableMetadata tableMetadata = scan.getTableMetadata(); final RelDataType rowType = scan.getRowType(); final List projectedColumns = scan.getProjectedColumns(); boolean specEvolTransEnabled = context.getPlannerSettings().getOptions().getOption(ENABLE_ICEBERG_SPEC_EVOL_TRANFORMATION); + Span.current().setAttribute("dremio.table.name", tableMetadata.getName().getSchemaPath()); BatchSchema batchSchema = tableMetadata.getSchema(); List partitionColumns = tableMetadata.getReadDefinition().getPartitionColumnsList(); - String partitionStatsFile = FileSystemRulesFactory.getPartitionStatsFile(scan); + ImmutableDremioFileAttrs partitionStatsFileInfo = PartitionStatsHelper.getPartitionStatsFileAttrs(scan); + String partitionStatsFile = partitionStatsFileInfo.fileName(); + Long partionStatsFileLength = partitionStatsFileInfo.fileLength(); Optional> survivingRecords = Optional.empty(); StoragePluginId storagePluginId = scan.getIcebergStatisticsPluginId(context); + StoragePluginId tablePluginId = scan.getPluginId(); if (shouldPrune(pruneCondition, partitionColumns, partitionStatsFile, projectedColumns, specEvolTransEnabled, storagePluginId)) { /* * Build a mapping between partition column name and partition column ID. @@ -550,21 +593,39 @@ public static Optional> prune(OptimizerRulesContext context, Sc } } - SupportsIcebergRootPointer icebergRootPointerPlugin = context.getCatalogService().getSource(storagePluginId); - PartitionSpec spec = IcebergUtils.getIcebergPartitionSpec(batchSchema, partitionColumns, null); - InputFile inputFile = new DremioFileIO(icebergRootPointerPlugin.getFsConfCopy(), (MutablePlugin) icebergRootPointerPlugin).newInputFile(partitionStatsFile); - PartitionStatsReader partitionStatsReader = new PartitionStatsReader(inputFile, spec); - try(RecordPruner pruner = new PartitionStatsBasedPruner(inputFile.location(), partitionStatsReader, context, spec)) { - RexNode finalPruneCondition = pruneCondition.getPartitionExpression(); - if (specEvolTransEnabled && pruneCondition.getPartitionRange() != null) { - finalPruneCondition = finalPruneCondition == null ? pruneCondition.getPartitionRange() - : scan.getCluster().getRexBuilder().makeCall(SqlStdOperatorTable.AND, pruneCondition.getPartitionRange(), finalPruneCondition); - } - survivingRecords = Optional.of(pruner.prune(inUseColIdToNameMap, partitionColToIdMap, getUsedIndices, projectedColumns, - tableMetadata, finalPruneCondition, batchSchema, rowType, scan.getCluster())); + MutablePlugin plugin = context.getCatalogService().getSource(storagePluginId); + if (plugin instanceof SupportsIcebergRootPointer) { + SupportsIcebergRootPointer icebergRootPointerPlugin = (SupportsIcebergRootPointer) plugin; + PartitionSpec spec = IcebergUtils.getIcebergPartitionSpec(batchSchema, partitionColumns, null); + try (OperatorContextImpl operatorContext = newOperatorContext(context, scan)) { + FileSystem fs = icebergRootPointerPlugin.createFS(partitionStatsFile, + context.getContextInformation().getQueryUser(), operatorContext); + FileIO io = icebergRootPointerPlugin.createIcebergFileIO(fs, operatorContext, scan.getTableMetadata().getDatasetConfig().getFullPathList(), tablePluginId.getName(), partionStatsFileLength); + InputFile inputFile = io.newInputFile(partitionStatsFile); + PartitionStatsReader partitionStatsReader = new PartitionStatsReader(inputFile, spec); + if(!partitionStatsReader.iterator().hasNext()){ + //handle the case where the stats are empty or not successfully generated + logger.warn(String.format("Encountered empty partition stats file for table %s during row count estimation. File %s.", + tableMetadata.getName().toString(), partitionStatsFile)); + //Returning an empty Optional indicates a problem with partition pruning + //the upper layers handle it by considering all the rows of the table as qualified + return Optional.empty(); + } + try (RecordPruner pruner = new PartitionStatsBasedPruner(inputFile.location(), partitionStatsReader, context, + spec)) { + RexNode finalPruneCondition = pruneCondition.getPartitionExpression(); + if (specEvolTransEnabled && pruneCondition.getPartitionRange() != null) { + finalPruneCondition = finalPruneCondition == null ? pruneCondition.getPartitionRange() + : scan.getCluster().getRexBuilder().makeCall(SqlStdOperatorTable.AND, pruneCondition.getPartitionRange(), finalPruneCondition); + } + survivingRecords = Optional.of( + pruner.prune(inUseColIdToNameMap, partitionColToIdMap, getUsedIndices, projectedColumns, + tableMetadata, finalPruneCondition, batchSchema, rowType, scan.getCluster())); - } catch (RuntimeException e) { - logger.error("Encountered exception during row count estimation: ", e); + } + } catch (RuntimeException | IOException e) { + logger.error("Encountered exception during row count estimation: ", e); + } } } @@ -586,6 +647,7 @@ private static boolean isConditionOnImplicitCol(PruneFilterCondition pruneCondit int updateColIndex = projectedColumns.indexOf(SchemaPath.getSimplePath(IncrementalUpdateUtils.UPDATE_COLUMN)); final AtomicBoolean isImplicit = new AtomicBoolean(false); pruneCondition.getPartitionExpression().accept(new RexVisitorImpl(true) { + @Override public Void visitInputRef(RexInputRef inputRef) { isImplicit.set(updateColIndex==inputRef.getIndex()); return null; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/partition/PruneFilterCondition.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/partition/PruneFilterCondition.java index b952814413..91bb6df71e 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/partition/PruneFilterCondition.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/partition/PruneFilterCondition.java @@ -17,28 +17,30 @@ import static com.dremio.exec.planner.common.MoreRelOptUtil.getInputRewriterFromProjectedFields; -import java.util.ArrayList; import java.util.List; import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.rel.type.RelDataType; -import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexNode; -import org.apache.calcite.sql.fun.SqlStdOperatorTable; import com.dremio.common.expression.SchemaPath; import com.dremio.exec.planner.physical.PrelUtil; import com.dremio.exec.record.BatchSchema; +import com.google.common.base.Preconditions; /** * PruneFilterCondition */ public class PruneFilterCondition { - private RexNode partitionRange; - private RexNode nonPartitionRange; - private RexNode partitionExpression; + private final RexNode partitionRange; + private final RexNode nonPartitionRange; + private final RexNode partitionExpression; public PruneFilterCondition(RexNode partitionRange, RexNode nonPartitionRange, RexNode partitionExpression) { + //TODO use alwaysTrue instead of null to follow the calcite convention + Preconditions.checkArgument(null == partitionRange || !partitionRange.isAlwaysTrue()); + Preconditions.checkArgument(null == nonPartitionRange || !nonPartitionRange.isAlwaysTrue()); + Preconditions.checkArgument(null == partitionExpression || !partitionExpression.isAlwaysTrue()); this.partitionRange = partitionRange; this.nonPartitionRange = nonPartitionRange; this.partitionExpression = partitionExpression; @@ -60,38 +62,6 @@ public boolean isEmpty() { return partitionRange == null && nonPartitionRange == null && partitionExpression == null; } - public static PruneFilterCondition mergeConditions(RexBuilder builder, List conditions) { - if (conditions.size() == 1) { - return conditions.get(0); - } - - List nonPartitionRangeList = new ArrayList<>(); - List partitionRangeList = new ArrayList<>(); - List expressionList = new ArrayList<>(); - for (PruneFilterCondition condition : conditions) { - RexNode partitionRange = condition.getPartitionRange(); - if (partitionRange != null) { - partitionRangeList.add(partitionRange); - } - RexNode nonPartitionRange = condition.getNonPartitionRange(); - if (nonPartitionRange != null) { - nonPartitionRangeList.add(nonPartitionRange); - } - RexNode partitionExpression = condition.getPartitionExpression(); - if (partitionExpression != null) { - expressionList.add(partitionExpression); - } - } - return new PruneFilterCondition( - buildConditionFromList(partitionRangeList, builder), - buildConditionFromList(nonPartitionRangeList, builder), - buildConditionFromList(expressionList, builder)); - } - - private static RexNode buildConditionFromList(List conditions, RexBuilder builder) { - return conditions.size() == 0 ? null : (conditions.size() == 1 ? conditions.get(0) : builder.makeCall(SqlStdOperatorTable.AND, conditions)); - } - public PruneFilterCondition applyProjection(List projection, RelDataType rowType, RelOptCluster cluster, BatchSchema batchSchema) { final PrelUtil.InputRewriter inputRewriter = getInputRewriterFromProjectedFields(projection, rowType, batchSchema, cluster); RexNode newPartitionRange = getPartitionRange() != null ? getPartitionRange().accept(inputRewriter) : null; @@ -103,14 +73,14 @@ public PruneFilterCondition applyProjection(List projection, RelData @Override public String toString() { StringBuilder builder = new StringBuilder(); - if (partitionRange != null) { - builder.append("partition_range_filter:").append(partitionRange.toString()).append(";"); + if (null != partitionRange) { + builder.append("partition_range_filter:").append(partitionRange).append(";"); } - if (nonPartitionRange != null) { - builder.append("non_partition_range_filter:").append(nonPartitionRange.toString()).append(";"); + if (null != nonPartitionRange) { + builder.append("non_partition_range_filter:").append(nonPartitionRange).append(";"); } - if (partitionExpression != null) { - builder.append("other_partition_filter:").append(partitionExpression.toString()).append(";"); + if (null != partitionExpression) { + builder.append("other_partition_filter:").append(partitionExpression).append(";"); } return builder.toString(); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/partition/PruneScanRuleBase.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/partition/PruneScanRuleBase.java index 6711f18e2c..65fadcebaa 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/partition/PruneScanRuleBase.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/partition/PruneScanRuleBase.java @@ -115,7 +115,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import io.opentelemetry.extension.annotations.WithSpan; +import io.opentelemetry.instrumentation.annotations.WithSpan; /** * Prune partitions based on partition values diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/rule/GroupSetToCrossJoinCaseStatement.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/rule/GroupSetToCrossJoinCaseStatement.java index 44cfaa1ba5..7f5b39dc1a 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/rule/GroupSetToCrossJoinCaseStatement.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/logical/rule/GroupSetToCrossJoinCaseStatement.java @@ -366,6 +366,18 @@ private RelDataType rowIntType() { ); private static boolean canPreAggregate(Aggregate agg) { + // only pre-aggregate if the over-all set of grouping keys + // is included as specific grouping set. Otherwise, the + // pre-aggregation may be much higher cardinality than + // that of any of the grouping sets. For example, imagine + // GROUPING SETS ((a,b), (c,d), (e,f)) + // each grouping set may itself be relatively low cardinality, + // but the combined grouping with all columns may end up being + // very high cardinality, and doing the pre-aggregate would + // result in much higher memory usage. + if (!agg.getGroupSets().contains(agg.getGroupSet())) { + return false; + } return agg.getAggCallList().stream() .allMatch(a -> PRE_AGGREGATION_FUNCTIONS.contains(a.getAggregation().getKind())); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/observer/AbstractAttemptObserver.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/observer/AbstractAttemptObserver.java index 57c4b13e0d..f19e15f463 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/observer/AbstractAttemptObserver.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/observer/AbstractAttemptObserver.java @@ -16,6 +16,7 @@ package com.dremio.exec.planner.observer; import java.util.List; +import java.util.Map; import org.apache.calcite.plan.RelOptPlanner; import org.apache.calcite.rel.RelNode; @@ -60,7 +61,8 @@ public void finalPrel(Prel prel) { } @Override - public void planRelTransform(PlannerPhase phase, RelOptPlanner planner, RelNode before, RelNode after, long millisTaken) { + public void planRelTransform(PlannerPhase phase, RelOptPlanner planner, RelNode before, RelNode after, + long millisTaken, final Map timeBreakdownPerRule) { } @Override @@ -196,4 +198,12 @@ public void resourcesScheduled(ResourceSchedulingDecisionInfo resourceScheduling @Override public void updateReflectionsWithHints(ReflectionExplanationsAndQueryDistance reflectionExplanationsAndQueryDistance) { } + + @Override + public void setNumJoinsInUserQuery(Integer joins) { + } + + @Override + public void setNumJoinsInFinalPrel(Integer joins) { + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/observer/AttemptObserver.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/observer/AttemptObserver.java index 16bf47272b..b48661d47e 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/observer/AttemptObserver.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/observer/AttemptObserver.java @@ -16,6 +16,7 @@ package com.dremio.exec.planner.observer; import java.util.List; +import java.util.Map; import org.apache.calcite.plan.RelOptPlanner; import org.apache.calcite.rel.RelNode; @@ -145,8 +146,10 @@ public interface AttemptObserver { * @param before The graph before the transformation occurred. * @param after The graph after the planning transformation took place * @param millisTaken The amount of time taken to complete the planning. + * @param timeBreakdownPerRule Breakdown of time spent by different rules. */ - void planRelTransform(PlannerPhase phase, RelOptPlanner planner, RelNode before, RelNode after, long millisTaken); + void planRelTransform(PlannerPhase phase, RelOptPlanner planner, RelNode before, RelNode after, long millisTaken, + final Map timeBreakdownPerRule); /** * Called when all tables have been collected from the plan @@ -297,6 +300,19 @@ void planSubstituted(DremioMaterialization materialization, */ void activateFragmentFailed(Exception ex); + /** + * Number of joins in the user-provided query + * @param joins + */ + void setNumJoinsInUserQuery(Integer joins); + + /** + * Number of joins in the final Prel plan + * @param joins + */ + void setNumJoinsInFinalPrel(Integer joins); + + /** * ResourceScheduling related information * @param resourceSchedulingDecisionInfo diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/observer/AttemptObservers.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/observer/AttemptObservers.java index 24cfae7d0c..6fc36e0bde 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/observer/AttemptObservers.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/observer/AttemptObservers.java @@ -17,6 +17,7 @@ import java.util.LinkedList; import java.util.List; +import java.util.Map; import org.apache.calcite.plan.RelOptPlanner; import org.apache.calcite.rel.RelNode; @@ -156,9 +157,10 @@ public void planExpandView(RelRoot expanded, List schemaPath, int nestin } @Override - public void planRelTransform(PlannerPhase phase, RelOptPlanner planner, RelNode before, RelNode after, long millisTaken) { + public void planRelTransform(PlannerPhase phase, RelOptPlanner planner, RelNode before, RelNode after, + long millisTaken, final Map timeBreakdownPerRule) { for (final AttemptObserver observer : observers) { - observer.planRelTransform(phase, planner, before, after, millisTaken); + observer.planRelTransform(phase, planner, before, after, millisTaken, timeBreakdownPerRule); } } @@ -350,6 +352,19 @@ public void tablesCollected(Iterable tables) { observers.forEach(o -> o.tablesCollected(tables)); } + @Override + public void setNumJoinsInUserQuery(Integer joins) { + for (final AttemptObserver observer : observers) { + observer.setNumJoinsInUserQuery(joins); + } + } + + @Override + public void setNumJoinsInFinalPrel(Integer joins) { + for (final AttemptObserver observer : observers) { + observer.setNumJoinsInFinalPrel(joins); + } + } /** * Add to the collection of observers. * diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/observer/DelegatingAttemptObserver.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/observer/DelegatingAttemptObserver.java index 9882e6946b..7621a2e274 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/observer/DelegatingAttemptObserver.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/observer/DelegatingAttemptObserver.java @@ -16,6 +16,7 @@ package com.dremio.exec.planner.observer; import java.util.List; +import java.util.Map; import org.apache.calcite.plan.RelOptPlanner; import org.apache.calcite.rel.RelNode; @@ -112,8 +113,9 @@ public void finalPrel(Prel prel) { } @Override - public void planRelTransform(PlannerPhase phase, RelOptPlanner planner, RelNode before, RelNode after, long millisTaken) { - observer.planRelTransform(phase, planner, before, after, millisTaken); + public void planRelTransform(PlannerPhase phase, RelOptPlanner planner, RelNode before, RelNode after, + long millisTaken, final Map timeBreakdownPerRule) { + observer.planRelTransform(phase, planner, before, after, millisTaken, timeBreakdownPerRule); } @Override @@ -238,4 +240,14 @@ public void tablesCollected(Iterable tables) { public void updateReflectionsWithHints(ReflectionExplanationsAndQueryDistance reflectionExplanationsAndQueryDistance) { observer.updateReflectionsWithHints(reflectionExplanationsAndQueryDistance); } + + @Override + public void setNumJoinsInUserQuery(Integer joins) { + observer.setNumJoinsInUserQuery(joins); + } + + @Override + public void setNumJoinsInFinalPrel(Integer joins) { + observer.setNumJoinsInFinalPrel(joins); + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/observer/OutOfBandAttemptObserver.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/observer/OutOfBandAttemptObserver.java index 484e0dc57a..832ea46cc8 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/observer/OutOfBandAttemptObserver.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/observer/OutOfBandAttemptObserver.java @@ -16,8 +16,11 @@ package com.dremio.exec.planner.observer; import java.util.List; +import java.util.Map; import java.util.concurrent.CountDownLatch; +import javax.inject.Provider; + import org.apache.calcite.plan.RelOptPlanner; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.RelRoot; @@ -26,7 +29,9 @@ import com.dremio.common.DeferredException; import com.dremio.common.SerializedExecutor; +import com.dremio.common.tracing.TracingUtils; import com.dremio.common.utils.protos.QueryWritableBatch; +import com.dremio.context.RequestContext; import com.dremio.exec.catalog.DremioTable; import com.dremio.exec.planner.CachedAccelDetails; import com.dremio.exec.planner.PlannerPhase; @@ -45,6 +50,9 @@ import com.dremio.exec.work.protector.UserResult; import com.dremio.reflection.hints.ReflectionExplanationsAndQueryDistance; import com.dremio.resource.ResourceSchedulingDecisionInfo; +import com.dremio.telemetry.utils.TracerFacade; + +import io.opentracing.Span; /** * Does query observations in order but not in the query execution thread. This @@ -56,18 +64,42 @@ * {@link #attemptCompletion(UserResult)} callback */ public class OutOfBandAttemptObserver implements AttemptObserver { - private final SerializedExecutor serializedExec; private final AttemptObserver innerObserver; private final DeferredException deferred = new DeferredException(); - OutOfBandAttemptObserver(AttemptObserver innerObserver, SerializedExecutor serializedExec) { + private Span executionSpan; + private long numCallsToExecDataArrived = 0; + // log the next span event after this many calls to execDataArrived + private long recordNextEventOnNumCalls = 1; + private long eventNameSuffix = 1; + private final Provider requestContextProvider; + + OutOfBandAttemptObserver( + AttemptObserver innerObserver, + SerializedExecutor serializedExec, + Provider requestContextProvider) { this.serializedExec = serializedExec; this.innerObserver = innerObserver; + this.requestContextProvider = requestContextProvider; } @Override public void beginState(final AttemptEvent event) { + switch (event.getState()) { + case RUNNING: { + executionSpan = TracingUtils.buildChildSpan(TracerFacade.INSTANCE, "execution-started"); + break; + } + case COMPLETED: + case CANCELED: + case FAILED: { + if (executionSpan != null) { + executionSpan.finish(); + } + break; + } + } execute(() -> innerObserver.beginState(event)); } @@ -97,8 +129,9 @@ public void recordExtraInfo(final String name, final byte[] bytes) { } @Override - public void planRelTransform(final PlannerPhase phase, final RelOptPlanner planner, final RelNode before, final RelNode after, final long millisTaken) { - execute(() -> innerObserver.planRelTransform(phase, planner, before, after, millisTaken)); + public void planRelTransform(final PlannerPhase phase, final RelOptPlanner planner, final RelNode before, + final RelNode after, final long millisTaken, final Map timeBreakdownPerRule) { + execute(() -> innerObserver.planRelTransform(phase, planner, before, after, millisTaken, timeBreakdownPerRule)); } @Override @@ -145,7 +178,13 @@ public void applyAccelDetails(final CachedAccelDetails accelDetails) { @Override public void planCompleted(final ExecutionPlan plan) { - execute(() -> innerObserver.planCompleted(plan)); + // TODO(DX-61807): The catalog lookup will be avoided if we use cache. + final RequestContext requestContext = + (RequestContext.current() != RequestContext.empty() || requestContextProvider == null) + ? RequestContext.current() + : requestContextProvider.get(); + + execute(() -> requestContext.run(() -> innerObserver.planCompleted(plan))); } @Override @@ -155,6 +194,16 @@ public void execStarted(final QueryProfile profile) { @Override public void execDataArrived(final RpcOutcomeListener outcomeListener, final QueryWritableBatch result) { + if ((eventNameSuffix <= 10) && (executionSpan != null)) { + numCallsToExecDataArrived++; + if (numCallsToExecDataArrived == recordNextEventOnNumCalls) { + // log data-arrived event + executionSpan.log("execDataArrived-" + eventNameSuffix); + // increase the gap between events as the output data is large + recordNextEventOnNumCalls *= 2; + eventNameSuffix++; + } + } execute(() -> innerObserver.execDataArrived(outcomeListener, result)); } @@ -281,6 +330,16 @@ public void tablesCollected(Iterable tables) { execute(() -> innerObserver.tablesCollected(tables)); } + @Override + public void setNumJoinsInUserQuery(Integer joins) { + execute(() -> innerObserver.setNumJoinsInUserQuery(joins)); + } + + @Override + public void setNumJoinsInFinalPrel(Integer joins) { + execute(() -> innerObserver.setNumJoinsInFinalPrel(joins)); + } + /** * Wraps the runnable so that any exception thrown will eventually cause the attempt * to fail when handling the {@link #attemptCompletion(UserResult)} callback diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/observer/OutOfBandQueryObserver.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/observer/OutOfBandQueryObserver.java index c8234d6101..840556fa0d 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/observer/OutOfBandQueryObserver.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/observer/OutOfBandQueryObserver.java @@ -17,29 +17,35 @@ import java.util.concurrent.Executor; +import javax.inject.Provider; + import com.dremio.common.SerializedExecutor; import com.dremio.common.utils.protos.AttemptId; +import com.dremio.context.RequestContext; import com.dremio.exec.planner.fragment.PlanningSet; import com.dremio.exec.work.protector.UserResult; import com.dremio.proto.model.attempts.AttemptReason; -/** - * - */ public class OutOfBandQueryObserver extends AbstractQueryObserver { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(OutOfBandQueryObserver.class); private final QueryObserver observer; private final Exec serializedExec; + private final Provider requestContextProvider; - public OutOfBandQueryObserver(final QueryObserver observer, final Executor executor) { + public OutOfBandQueryObserver( + final QueryObserver observer, + final Executor executor, + final Provider requestContextProvider) { this.observer = observer; this.serializedExec = new Exec(executor); + this.requestContextProvider = requestContextProvider; } @Override public AttemptObserver newAttempt(AttemptId attemptId, AttemptReason reason) { - return new OutOfBandAttemptObserver(observer.newAttempt(attemptId, reason), serializedExec); + return new OutOfBandAttemptObserver( + observer.newAttempt(attemptId, reason), serializedExec, requestContextProvider); } @Override diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/BroadcastExchangePrel.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/BroadcastExchangePrel.java index a9fbd1b5de..1dfe771484 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/BroadcastExchangePrel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/BroadcastExchangePrel.java @@ -93,6 +93,7 @@ public RelNode copy(RelTraitSet traitSet, List inputs) { return new BroadcastExchangePrel(getCluster(), traitSet, sole(inputs)); } + @Override public PhysicalOperator getPhysicalOperator(PhysicalPlanCreator creator) throws IOException { Prel child = (Prel) this.getInput(); PhysicalOperator childPOP = child.getPhysicalOperator(creator); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/DistributionTrait.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/DistributionTrait.java index 17508cd9ba..3fb2ecf548 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/DistributionTrait.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/DistributionTrait.java @@ -83,6 +83,7 @@ public boolean satisfies(RelTrait trait) { return this.equals(trait); } + @Override public RelTraitDef getTraitDef() { return DistributionTraitDef.INSTANCE; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/DmlPlanGenerator.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/DmlPlanGenerator.java index 34cf50bd24..c5af002752 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/DmlPlanGenerator.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/DmlPlanGenerator.java @@ -44,7 +44,7 @@ import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.calcite.util.ImmutableBitSet; -import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections4.CollectionUtils; import com.dremio.common.exceptions.UserException; import com.dremio.common.expression.SchemaPath; @@ -133,6 +133,7 @@ private void validateOperation(TableModify.Operation operation, List upd } } + @Override public Prel getPlan() { try { Prel dataFileAggPlan; @@ -479,7 +480,8 @@ private RelNode getCopyOnWriteDataFileScanPlan(RelNode dataFileListInput) { tableMetadata, allColumns, context, - ManifestScanFilters.empty()); + ManifestScanFilters.empty(), + null); return builder.buildWithDmlDataFileFiltering(dataFileListInput); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/FileSystemVacuumTablePrule.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/FileSystemVacuumTablePrule.java new file mode 100644 index 0000000000..b0581407d0 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/FileSystemVacuumTablePrule.java @@ -0,0 +1,48 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.physical; + +import org.apache.calcite.plan.RelOptRuleCall; +import org.apache.calcite.rel.RelNode; + +import com.dremio.exec.catalog.DremioPrepareTable; +import com.dremio.exec.ops.OptimizerRulesContext; +import com.dremio.exec.planner.logical.Rel; +import com.dremio.exec.planner.logical.RelOptHelper; +import com.dremio.exec.planner.logical.VacuumTableRel; +import com.dremio.exec.store.dfs.FileSystemPlugin; + +/** + * Generate physical plan for VACUUM TABLE with file systems. + */ +public class FileSystemVacuumTablePrule extends VacuumTablePruleBase { + + public FileSystemVacuumTablePrule(OptimizerRulesContext context) { + super(RelOptHelper.some(VacuumTableRel.class, Rel.LOGICAL, RelOptHelper.any(RelNode.class)), + "Prel.FileSystemVacuumTablePrule", context); + } + + @Override + public boolean matches(RelOptRuleCall call) { + return call.rel(0).getCreateTableEntry().getPlugin() instanceof FileSystemPlugin; + } + + @Override + public void onMatch(RelOptRuleCall call) { + final VacuumTableRel vacuumRel = call.rel(0); + call.transformTo(getPhysicalPlan(vacuumRel, ((DremioPrepareTable) vacuumRel.getTable()).getTable().getDataset())); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/HashAggPrel.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/HashAggPrel.java index a996a4d934..e6f9ef48cd 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/HashAggPrel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/HashAggPrel.java @@ -183,10 +183,10 @@ private boolean initialUseSpill(PhysicalPlanCreator creator, PhysicalOperator ch * ListAggExpression is not an instance of FunctionHolderExpr so it will be skipped here. * ListAgg support spilling so no additional config options are necessary */ - if (expr != null && (expr instanceof FunctionHolderExpr)) { + if (expr instanceof FunctionHolderExpr) { final String functionName = ((FunctionHolderExpr) expr).getName(); - final boolean isMinMaxFn = (functionName.equals("min") || functionName.equals("max")); - final boolean isNDVFn = (functionName.equals("hll") || functionName.equals("hll_merge")); + final boolean isMinMaxFn = ("min".equals(functionName) || "max".equals(functionName)); + final boolean isNDVFn = ("hll".equals(functionName) || "hll_merge".equals(functionName)); if ((isNDVFn && !isNdvSpillEnabled) || (isMinMaxFn && expr.getCompleteType().isVariableWidthScalar() && !isVarLenMinMaxSpillEnabled)) { useSpill = false; @@ -275,8 +275,9 @@ private boolean initialCanVectorize(PhysicalPlanCreator creator, PhysicalOperato case INT: case DECIMAL: continue; + default: + break; } - return false; case "min": @@ -287,6 +288,7 @@ private boolean initialCanVectorize(PhysicalPlanCreator creator, PhysicalOperato if (!enabledSpillVarchar && !enabledVarcharNdv) { return false; } + // fall through case BIGINT: case FLOAT4: case FLOAT8: @@ -299,8 +301,9 @@ private boolean initialCanVectorize(PhysicalPlanCreator creator, PhysicalOperato case TIMESTAMP: case DECIMAL: continue; + default: + break; } - return false; case "hll": diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/HashToMergeExchangePrel.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/HashToMergeExchangePrel.java index 7f59abbf1d..b1f25c1f3a 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/HashToMergeExchangePrel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/HashToMergeExchangePrel.java @@ -85,6 +85,7 @@ public RelNode copy(RelTraitSet traitSet, List inputs) { this.collation, numEndPoints); } + @Override public PhysicalOperator getPhysicalOperator(PhysicalPlanCreator creator) throws IOException { Prel child = (Prel) this.getInput(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/HashToRandomExchangePrel.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/HashToRandomExchangePrel.java index cd10891bea..87e8f128f9 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/HashToRandomExchangePrel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/HashToRandomExchangePrel.java @@ -123,6 +123,7 @@ public RelNode copy(RelTraitSet traitSet, List inputs, boolean windowPu return new HashToRandomExchangePrel(getCluster(), traitSet, sole(inputs), fields, hashFunctionName, tableFunctionCreator, windowPushedDown); } + @Override public PhysicalOperator getPhysicalOperator(PhysicalPlanCreator creator) throws IOException { Prel child = (Prel) this.getInput(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/IcebergDmlMergeDuplicateCheckPrel.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/IcebergDmlMergeDuplicateCheckPrel.java index a4e82398a8..a309957719 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/IcebergDmlMergeDuplicateCheckPrel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/IcebergDmlMergeDuplicateCheckPrel.java @@ -78,6 +78,7 @@ public RelNode copy(RelTraitSet traitSet, List inputs) { getTableMetadata(), getTableFunctionConfig(), getRowType()); } + @Override protected double defaultEstimateRowCount(TableFunctionConfig ignored, RelMetadataQuery mq) { return mq.getRowCount(input); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/NestedLoopJoinPrule.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/NestedLoopJoinPrule.java index 0f6575bb2a..55b14dd93b 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/NestedLoopJoinPrule.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/NestedLoopJoinPrule.java @@ -66,6 +66,8 @@ protected boolean checkPreconditions(JoinRel join, RelNode left, RelNode right, return false; } break; + case FULL: + return join.getCondition().isAlwaysTrue(); default: return false; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/OrderedPartitionExchangePrel.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/OrderedPartitionExchangePrel.java index 717bdfd9b2..07df8c054b 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/OrderedPartitionExchangePrel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/OrderedPartitionExchangePrel.java @@ -68,6 +68,7 @@ public RelNode copy(RelTraitSet traitSet, List inputs) { return new OrderedPartitionExchangePrel(getCluster(), traitSet, sole(inputs)); } + @Override public PhysicalOperator getPhysicalOperator(PhysicalPlanCreator creator) throws IOException { throw new IOException(this.getClass().getSimpleName() + " not supported yet!"); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/PlannerSettings.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/PlannerSettings.java index c4c5d2f38a..d035c1c372 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/PlannerSettings.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/PlannerSettings.java @@ -118,7 +118,6 @@ public class PlannerSettings implements Context{ public static final LongValidator HEP_PLANNER_MATCH_LIMIT = new PositiveLongValidator("planner.hep_match_limit", Integer.MAX_VALUE, Integer.MAX_VALUE); public static final BooleanValidator ENHANCED_FILTER_JOIN_PUSHDOWN = new BooleanValidator("planner.enhanced_filter_join_pushdown", true); public static final BooleanValidator TRANSITIVE_FILTER_JOIN_PUSHDOWN = new BooleanValidator("planner.filter.transitive_pushdown", true); - public static final BooleanValidator TRANSITIVE_FILTER_NOT_NULL_EXPR_PUSHDOWN = new BooleanValidator("planner.filter.transitive_pushdown_not_null_expr", false); // Until DX-26452 is fixes public static final BooleanValidator ENABLE_RUNTIME_FILTER = new BooleanValidator("planner.filter.runtime_filter", true); public static final BooleanValidator CSE_BEFORE_RF = new BooleanValidator("planner.cse_before_rf", true); @@ -134,9 +133,11 @@ public class PlannerSettings implements Context{ public static final DoubleValidator UNION_ALL_INPUT_ROUND_ROBIN_THRESHOLD_RATIO = new DoubleValidator("planner.input_round_robin_threshold_ratio", 0.5); public static final BooleanValidator UNIONALL_DISTRIBUTE_ALL_CHILDREN = new BooleanValidator("planner.unionall_distribute_all_children", false); public static final LongValidator PLANNING_MAX_MILLIS = new LongValidator("planner.timeout_per_phase_ms", 60_000); + public static final BooleanValidator TRIM_JOIN_BRANCH = new BooleanValidator("planner.enable_trim_join_branch", false); public static final BooleanValidator NESTED_SCHEMA_PROJECT_PUSHDOWN = new BooleanValidator("planner.enable_nested_schema_project_pushdown", true); public static final BooleanValidator SPLIT_COMPLEX_FILTER_EXPRESSION = new BooleanValidator("planner.split_complex_filter_conditions", true); public static final BooleanValidator SORT_IN_JOIN_REMOVER = new BooleanValidator("planner.enable_sort_in_join_remover", true); + public static final BooleanValidator REGEXP_LIKE_TO_LIKE = new BooleanValidator("planner.enable_regexp_like_to_like", true); public static final BooleanValidator FULL_NESTED_SCHEMA_SUPPORT = new BooleanValidator("planner.enable_full_nested_schema", true); public static final BooleanValidator COMPLEX_TYPE_FILTER_PUSHDOWN = new BooleanValidator("planner.complex_type_filter_pushdown", true); @@ -176,9 +177,16 @@ public class PlannerSettings implements Context{ public static final LongValidator MAX_DNF_NODE_COUNT = new PositiveLongValidator("planner.max_dnf_node_count", Integer.MAX_VALUE, 128); + public static final RangeLongValidator MAX_FUNCTION_DEPTH = new RangeLongValidator("planner.max_function_depth", 1, 100, 10); + public static final BooleanValidator VDS_AUTO_FIX = new BooleanValidator("validator.enable_vds_autofix", true); + public static final BooleanValidator FLATTEN_CASE_EXPRS_ENABLED = new BooleanValidator("planner.flatten_case_exprs_enabled", true); public static final BooleanValidator CONVERT_FROM_JSON_PUSHDOWN = new BooleanValidator("planner.convert_from_json_pushdown", true); + public static final BooleanValidator NESTED_FUNCTIONS_PUSHDOWN = new BooleanValidator("planner.nested_functions_pushdown", true); + + public static final BooleanValidator JOIN_CONDITIONS_VALIDATION = new BooleanValidator("planner.join_conditions_validation", true); + public static final BooleanValidator NLJ_PUSHDOWN = new BooleanValidator("planner.nlj.expression_pushdown", true); public static final BooleanValidator HASH_JOIN_PUSHDOWN = new BooleanValidator("planner.hash_join.expression_pushdown", true); @@ -215,6 +223,7 @@ public class PlannerSettings implements Context{ public static final BooleanValidator ENABLE_DELTALAKE = new BooleanValidator("dremio.deltalake.enabled", true); public static final LongValidator ICEBERG_MANIFEST_SCAN_RECORDS_PER_THREAD = new LongValidator("planner.iceberg.manifestscan.records_per_thread", 1000); public static final BooleanValidator UNLIMITED_SPLITS_SUPPORT = new BooleanValidator("dremio.execution.support_unlimited_splits", true); + public static final LongValidator ORPHAN_FILE_DELETE_RECORDS_PER_THREAD = new LongValidator("planner.orphanfiledelete.records_per_thread", 200); public static final DoubleValidator METADATA_REFRESH_INCREASE_FACTOR = new DoubleValidator("dremio.metadata.increase_factor", 0.1); @@ -266,6 +275,8 @@ public enum StoreQueryResultsPolicy { public static final BooleanValidator ENABLE_REDUCE_CALC = new BooleanValidator("planner.enable_reduce_calc", true); public static final BooleanValidator ENABLE_REDUCE_JOIN = new BooleanValidator("planner.enable_reduce_join", true); + public static final BooleanValidator ENABLE_DISTINCT_AGG_WITH_GROUPING_SETS = new BooleanValidator("planner.enable_distinct_agg_with_grouping_sets", false); + // Filter reduce expression rules used in conjunction with transitive filter public static final BooleanValidator ENABLE_TRANSITIVE_REDUCE_PROJECT = new BooleanValidator("planner.enable_transitive_reduce_project", false); public static final BooleanValidator ENABLE_TRANSITIVE_REDUCE_FILTER = new BooleanValidator("planner.enable_transitive_reduce_filter", false); @@ -296,7 +307,7 @@ public enum StoreQueryResultsPolicy { public static final BooleanValidator PROJECT_PULLUP = new BooleanValidator("planner.project_pullup", false); - public static final BooleanValidator EXPAND_OPERATORS = new BooleanValidator("planner.expand_operators", false); + public static final BooleanValidator PUSH_FILTER_PAST_FLATTEN = new BooleanValidator("planner.push_filter_past_flatten", true); public static final BooleanValidator VERBOSE_PROFILE = new BooleanValidator("planner.verbose_profile", false); public static final BooleanValidator USE_STATISTICS = new BooleanValidator("planner.use_statistics", false); public static final BooleanValidator USE_MIN_SELECTIVITY_ESTIMATE_FACTOR_FOR_STAT = new BooleanValidator("planner.use_selectivity_estimate_factor_for_stat", false); @@ -304,7 +315,6 @@ public enum StoreQueryResultsPolicy { public static final PositiveLongValidator STATISTICS_SAMPLING_THRESHOLD = new PositiveLongValidator("planner.statistics_sampling_threshold", Long.MAX_VALUE, 1000000000L); public static final DoubleValidator STATISTICS_SAMPLING_RATE = new DoubleValidator("planner.statistics_sampling_rate", 5.0); public static final BooleanValidator USE_ROW_COUNT_STATISTICS = new BooleanValidator("planner.use_rowcount_statistics", false); - public static final BooleanValidator VERBOSE_RULE_MATCH_LISTENER = new BooleanValidator("planner.verbose_rule_match_listener", false); public static final BooleanValidator PRETTY_PLAN_SCRAPING = new BooleanValidator("planner.pretty_plan_scraping_enabled", false); public static final BooleanValidator INCLUDE_DATASET_PROFILE = new BooleanValidator("planner.include_dataset_profile", true); @@ -364,8 +374,9 @@ public enum StoreQueryResultsPolicy { public static final BooleanValidator LEGACY_SERIALIZER_ENABLED = new BooleanValidator("planner.legacy_serializer_enabled", false); public static final BooleanValidator PLAN_SERIALIZATION = new BooleanValidator("planner.plan_serialization", true); public static final LongValidator PLAN_SERIALIZATION_LENGTH_LIMIT = new PositiveLongValidator("planner.plan_serialization_length_limit", Long.MAX_VALUE, 100000); - public static final BooleanValidator EXTENDED_ALIAS = new BooleanValidator("planner.extended_alias", true); public static final BooleanValidator USE_SQL_TO_REL_SUB_QUERY_EXPANSION = new BooleanValidator("planner.sql_to_rel_sub_query_expansion", true); + public static final BooleanValidator EXTENDED_ALIAS = new BooleanValidator("planner.extended_alias", true); + public static final BooleanValidator ENFORCE_VALID_JSON_DATE_FORMAT_ENABLED = new BooleanValidator("planner.enforce_valid_json_date_format_enabled", true); private static final Set SOURCES_WITH_MIN_COST = ImmutableSet.of( "adl", @@ -403,6 +414,12 @@ public enum StoreQueryResultsPolicy { */ public static final BooleanValidator QUERY_PLAN_CACHE_ENABLED = new BooleanValidator("planner.query_plan_cache_enabled", true); + public static final BooleanValidator QUERY_PLAN_CACHE_ENABLED_SECURITY_FIX = + new BooleanValidator("planner.query_plan_cache_enabled_security_fix", false); + + public static final BooleanValidator QUERY_PLAN_CACHE_ENABLED_SECURED_USER_BASED_CACHING = + new BooleanValidator("planner.query_plan_cache_enabled_secured_user_based_caching", true); + public static final BooleanValidator REFLECTION_ROUTING_INHERITANCE_ENABLED = new BooleanValidator("planner.reflection_routing_inheritance_enabled", false); private final SabotConfig sabotConfig; @@ -556,12 +573,8 @@ public boolean isProjectPullUpEnabled(){ return options.getOption(PROJECT_PULLUP); } - public boolean isExpandOperatorsEnabled(){ - return options.getOption(EXPAND_OPERATORS); - } - - public boolean isTransitiveFilterNotNullExprPushdownEnabled() { - return options.getOption(TRANSITIVE_FILTER_NOT_NULL_EXPR_PUSHDOWN); + public boolean isPushFilterPastFlattenEnabled(){ + return options.getOption(PUSH_FILTER_PAST_FLATTEN); } public boolean isComplexTypeFilterPushdownEnabled() { @@ -620,6 +633,10 @@ public void setUseDefaultCosting(boolean defcost) { this.useDefaultCosting = defcost; } + public boolean trimJoinBranch() { + return options.getOption(TRIM_JOIN_BRANCH); + } + public boolean isNestedSchemaProjectPushdownEnabled() { return options.getOption(NESTED_SCHEMA_PROJECT_PUSHDOWN); } @@ -632,8 +649,21 @@ public boolean isSortInJoinRemoverEnabled() { return options.getOption(SORT_IN_JOIN_REMOVER); } + public boolean isRegexpLikeToLikeEnabled() { + return options.getOption(REGEXP_LIKE_TO_LIKE); + } + public boolean isPlanCacheEnabled() { - return options.getOption(QUERY_PLAN_CACHE_ENABLED); + return options.getOption(QUERY_PLAN_CACHE_ENABLED) && !options.getOption(QUERY_PLAN_CACHE_ENABLED_SECURITY_FIX); + } + + public boolean isPlanCacheEnableSecuredUserBasedCaching() { + return options.getOption(QUERY_PLAN_CACHE_ENABLED_SECURED_USER_BASED_CACHING); + } + + + public boolean isEnforceValidJsonFormatEnabled() { + return options.getOption(ENFORCE_VALID_JSON_DATE_FORMAT_ENABLED); } public boolean isReflectionRoutingInheritanceEnabled() { @@ -682,6 +712,10 @@ public boolean isReduceFilterExpressionsEnabled() { return options.getOption(ENABLE_REDUCE_FILTER); } + public boolean isDistinctAggWithGroupingSetsEnabled() { + return options.getOption(ENABLE_DISTINCT_AGG_WITH_GROUPING_SETS); + } + public boolean isReduceCalcExpressionsEnabled() { return options.getOption(ENABLE_REDUCE_CALC); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/RoundRobinExchangePrel.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/RoundRobinExchangePrel.java index 62bd94e80a..45bc408da4 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/RoundRobinExchangePrel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/RoundRobinExchangePrel.java @@ -82,6 +82,7 @@ public RelNode copy(RelTraitSet traitSet, List inputs) { return new RoundRobinExchangePrel(getCluster(), traitSet, sole(inputs)); } + @Override public PhysicalOperator getPhysicalOperator(PhysicalPlanCreator creator) throws IOException { Prel child = (Prel) this.getInput(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/SingleMergeExchangePrel.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/SingleMergeExchangePrel.java index de58ed6162..2fb8b5a1d6 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/SingleMergeExchangePrel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/SingleMergeExchangePrel.java @@ -88,6 +88,7 @@ public RelNode copy(RelTraitSet traitSet, List inputs) { return new SingleMergeExchangePrel(getCluster(), traitSet, sole(inputs), collation); } + @Override public PhysicalOperator getPhysicalOperator(PhysicalPlanCreator creator) throws IOException { Prel child = (Prel) this.getInput(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/TableFunctionPrel.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/TableFunctionPrel.java index c185802474..fd85c150f4 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/TableFunctionPrel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/TableFunctionPrel.java @@ -238,6 +238,7 @@ public boolean hasFilter() { return functionConfig.getFunctionContext().getScanFilter() != null; } + @Override public TableMetadata getTableMetadata() { return tableMetadata; } @@ -293,6 +294,7 @@ private void validateSchema(BatchSchema inputSchema, BatchSchema tableFunctionSc } } + @Override public Function getEstimateRowCountFn() { return estimateRowCountFn; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/TableFunctionUtil.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/TableFunctionUtil.java index 9adc50d0ba..ec3a89fd30 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/TableFunctionUtil.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/TableFunctionUtil.java @@ -16,6 +16,7 @@ package com.dremio.exec.planner.physical; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Optional; @@ -33,9 +34,11 @@ import com.dremio.common.expression.CompleteType; import com.dremio.common.expression.SchemaPath; import com.dremio.exec.catalog.StoragePluginId; +import com.dremio.exec.physical.config.DeletedFilesMetadataTableFunctionContext; import com.dremio.exec.physical.config.EasyScanTableFunctionContext; import com.dremio.exec.physical.config.ExtendedFormatOptions; import com.dremio.exec.physical.config.FooterReaderTableFunctionContext; +import com.dremio.exec.physical.config.ManifestListScanTableFunctionContext; import com.dremio.exec.physical.config.ManifestScanFilters; import com.dremio.exec.physical.config.ManifestScanTableFunctionContext; import com.dremio.exec.physical.config.PartitionTransformTableFunctionContext; @@ -43,6 +46,7 @@ import com.dremio.exec.physical.config.TableFunctionContext; import com.dremio.exec.planner.sql.CalciteArrowHelper; import com.dremio.exec.record.BatchSchema; +import com.dremio.exec.store.OperationType; import com.dremio.exec.store.RecordReader; import com.dremio.exec.store.ScanFilter; import com.dremio.exec.store.SplitIdentity; @@ -70,13 +74,28 @@ private static StoragePluginId getInternalTablePluginId(TableMetadata tableMetad return null; } + public static TableFunctionContext getTableFunctionContext(TableMetadata tableMetadata, BatchSchema schema, List columns) { + return new TableFunctionContext(tableMetadata.getFormatSettings(), + schema, + tableMetadata.getSchema(), + ImmutableList.of(tableMetadata.getName().getPathComponents()), + null, + tableMetadata.getStoragePluginId(), + getInternalTablePluginId(tableMetadata), + columns, + tableMetadata.getReadDefinition().getPartitionColumnsList(), null, + tableMetadata.getReadDefinition().getExtendedProperty(), false, false, true, + tableMetadata.getDatasetConfig().getPhysicalDataset().getInternalSchemaSettings()); + } + public static TableFunctionContext getManifestScanTableFunctionContext( final TableMetadata tableMetadata, List columns, BatchSchema schema, ScanFilter scanFilter, ManifestContent manifestContent, - ManifestScanFilters manifestScanFilters) { + ManifestScanFilters manifestScanFilters, + boolean isCarryForwardEnabled) { ByteString partitionSpecMap = null; ByteString jsonPartitionSpecMap = null; String icebergSchema = null; @@ -98,7 +117,29 @@ public static TableFunctionContext getManifestScanTableFunctionContext( tableMetadata.getReadDefinition().getExtendedProperty(), false, false, true, tableMetadata.getDatasetConfig().getPhysicalDataset().getInternalSchemaSettings(), manifestContent, - manifestScanFilters); + manifestScanFilters, + isCarryForwardEnabled); + } + + public static TableFunctionContext getManifestListScanTableFunctionContext(TableMetadata tableMetadata, + ScanFilter scanFilter, + BatchSchema schema, + List projectedCols, + boolean isCarryForwardEnabled) { + return new ManifestListScanTableFunctionContext( + tableMetadata.getFormatSettings(), + schema, + schema, + ImmutableList.of(tableMetadata.getName().getPathComponents()), + scanFilter, + tableMetadata.getStoragePluginId(), + getInternalTablePluginId(tableMetadata), + projectedCols, + tableMetadata.getReadDefinition().getPartitionColumnsList(), null, + tableMetadata.getReadDefinition().getExtendedProperty(), false, false, true, + tableMetadata.getDatasetConfig().getPhysicalDataset().getInternalSchemaSettings(), + isCarryForwardEnabled + ); } public static TableFunctionContext getEasyScanTableFunctionContext( @@ -152,21 +193,33 @@ public static TableFunctionContext getDataFileScanTableFunctionContext( ScanFilter scanFilter, List columns, boolean arrowCachingEnabled, - boolean isConvertedIcebergDataset) { + boolean isConvertedIcebergDataset, + List implicitPartitionCols) { final BatchSchema schema = tableMetadata.getSchema().maskAndReorder(columns); + return new TableFunctionContext( tableMetadata.getFormatSettings(), tableMetadata.getSchema(), schema, ImmutableList.of(tableMetadata.getName().getPathComponents()), scanFilter, tableMetadata.getStoragePluginId(), getInternalTablePluginId(tableMetadata), columns, - tableMetadata.getReadDefinition().getPartitionColumnsList(), null, + mergeSafely(tableMetadata.getReadDefinition().getPartitionColumnsList(), implicitPartitionCols), null, tableMetadata.getReadDefinition().getExtendedProperty(), arrowCachingEnabled, isConvertedIcebergDataset, false, tableMetadata.getDatasetConfig().getPhysicalDataset().getInternalSchemaSettings() ); } + private static List mergeSafely(List... additions) { + List source = new ArrayList<>(); + for (List addition : additions) { + if (addition != null) { + source.addAll(addition); + } + } + return source; + } + public static TableFunctionContext getSplitProducerTableFunctionContext( final TableMetadata tableMetadata, ScanFilter scanFilter, @@ -189,8 +242,9 @@ public static TableFunctionConfig getDataFileScanTableFunctionConfig( boolean arrowCachingEnabled, boolean isConvertedIcebergDataset, boolean limitDataScanParallelism, - long survivingFileCount) { - TableFunctionContext tableFunctionContext = getDataFileScanTableFunctionContext(tableMetadata, scanFilter, columns, arrowCachingEnabled, isConvertedIcebergDataset); + long survivingFileCount, + List implicitPartitionCols) { + TableFunctionContext tableFunctionContext = getDataFileScanTableFunctionContext(tableMetadata, scanFilter, columns, arrowCachingEnabled, isConvertedIcebergDataset, implicitPartitionCols); TableFunctionConfig config = new TableFunctionConfig(TableFunctionConfig.FunctionType.DATA_FILE_SCAN, false, tableFunctionContext); if(limitDataScanParallelism) { config.setMinWidth(1); @@ -218,7 +272,7 @@ public static TableFunctionConfig getSplitGenManifestScanTableFunctionConfig( ScanFilter scanFilter, ManifestScanFilters manifestScanFilters) { TableFunctionContext tableFunctionContext = getManifestScanTableFunctionContext(tableMetadata, columns, schema, - scanFilter, ManifestContent.DATA, manifestScanFilters); + scanFilter, ManifestContent.DATA, manifestScanFilters, false); return new TableFunctionConfig(TableFunctionConfig.FunctionType.SPLIT_GEN_MANIFEST_SCAN, true, tableFunctionContext); } @@ -228,13 +282,24 @@ public static TableFunctionConfig getManifestScanTableFunctionConfig( BatchSchema schema, ScanFilter scanFilter, ManifestContent manifestContent, - ManifestScanFilters manifestScanFilters) { + ManifestScanFilters manifestScanFilters, + boolean isCarryForwardEnabled) { TableFunctionContext tableFunctionContext = getManifestScanTableFunctionContext(tableMetadata, columns, schema, - scanFilter, manifestContent, manifestScanFilters); + scanFilter, manifestContent, manifestScanFilters, isCarryForwardEnabled); return new TableFunctionConfig(TableFunctionConfig.FunctionType.ICEBERG_MANIFEST_SCAN, true, tableFunctionContext); } + public static TableFunctionConfig getManifestListScanTableFunctionConfig( + TableMetadata tableMetadata, + ScanFilter scanFilter, + BatchSchema schema, + List projectedCols) { + TableFunctionContext tableFunctionContext = getManifestListScanTableFunctionContext(tableMetadata, scanFilter, schema, projectedCols, true); + return new TableFunctionConfig(TableFunctionConfig.FunctionType.ICEBERG_MANIFEST_LIST_SCAN, true, + tableFunctionContext); + } + public static TableFunctionConfig getSplitGenFunctionConfig( final TableMetadata tableMetadata, ScanFilter scanFilter) { @@ -282,7 +347,7 @@ public static TableFunctionConfig getMetadataManifestScanTableFunctionConfig( BatchSchema schema, ScanFilter scanFilter) { TableFunctionContext tableFunctionContext = getManifestScanTableFunctionContext(tableMetadata, columns, schema, - scanFilter, ManifestContent.DATA, ManifestScanFilters.empty()); + scanFilter, ManifestContent.DATA, ManifestScanFilters.empty(), false); return new TableFunctionConfig(TableFunctionConfig.FunctionType.METADATA_MANIFEST_FILE_SCAN, true, tableFunctionContext); } @@ -334,6 +399,65 @@ public static TableFunctionConfig getIcebergDeleteFileAggTableFunctionConfig(Bat return new TableFunctionConfig(TableFunctionConfig.FunctionType.ICEBERG_DELETE_FILE_AGG, true, context); } + public static TableFunctionConfig getIcebergOrphanFileDeleteFunctionConfig(BatchSchema outputSchema, TableMetadata tableMetadata) { + TableFunctionContext context = new TableFunctionContext( + tableMetadata.getFormatSettings(), + outputSchema, + null, + null, + null, + tableMetadata.getStoragePluginId(), + null, + outputSchema.getFields().stream().map(f -> SchemaPath.getSimplePath(f.getName())).collect(Collectors.toList()), + null, + null, + null, + false, + false, + false, + null); + return new TableFunctionConfig(TableFunctionConfig.FunctionType.ICEBERG_ORPHAN_FILE_DELETE, true, context); + } + + public static TableFunctionConfig getIcebergPartitionStatsFunctionConfig(BatchSchema outputSchema, TableMetadata tableMetadata) { + TableFunctionContext context = new TableFunctionContext( + tableMetadata.getFormatSettings(), + outputSchema, + null, + null, + null, + tableMetadata.getStoragePluginId(), + null, + outputSchema.getFields().stream().map(f -> SchemaPath.getSimplePath(f.getName())).collect(Collectors.toList()), + null, + null, + null, + false, + false, + false, + null); + return new TableFunctionConfig(TableFunctionConfig.FunctionType.ICEBERG_PARTITION_STATS_SCAN, true, context); + } + + public static TableFunctionContext getDeletedFilesMetadataTableFunctionContext(OperationType operationType, BatchSchema schema, List columns, boolean isIcebergMetadata) { + return new DeletedFilesMetadataTableFunctionContext(operationType, + null, + schema, + null, + null, + null, + null, + null, + columns, + null, + null, + null, + false, + false, + isIcebergMetadata, + null); + } + public static Function getHashExchangeTableFunctionCreator(final TableMetadata tableMetadata, boolean isIcebergMetadata) { return input -> getSplitAssignTableFunction(input, tableMetadata, isIcebergMetadata, null); } @@ -361,4 +485,10 @@ private static TableFunctionPrel getSplitAssignTableFunction(Prel input, TableMe TableFunctionConfig tableFunctionConfig = new TableFunctionConfig(TableFunctionConfig.FunctionType.SPLIT_ASSIGNMENT, true, tableFunctionContext); return new TableFunctionPrel(input.getCluster(), input.getTraitSet(), null, input, tableMetadata, tableFunctionConfig, output); } + + public static List getDataset(TableFunctionConfig functionConfig) { + TableFunctionContext functionContext = functionConfig.getFunctionContext(); + Collection> referencedTables = functionContext.getReferencedTables(); + return referencedTables != null ? referencedTables.iterator().next() : null; + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/TableOptimizePruleBase.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/TableOptimizePruleBase.java index f7615aa715..f0d3c09114 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/TableOptimizePruleBase.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/TableOptimizePruleBase.java @@ -44,7 +44,8 @@ public Prel getPhysicalPlan(TableOptimizeRel optimizeRel, RelNode input, TableMe tableMetadata, optimizeRel.getCreateTableEntry(), context, - optimizeRel.getOptimizeOptions()); + optimizeRel.getOptimizeOptions(), + optimizeRel.getPartitionFilter()); return planGenerator.getPlan(); } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/VacuumTablePruleBase.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/VacuumTablePruleBase.java new file mode 100644 index 0000000000..40684ea95c --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/VacuumTablePruleBase.java @@ -0,0 +1,48 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.physical; + +import org.apache.calcite.plan.RelOptRuleOperand; + +import com.dremio.exec.ops.OptimizerRulesContext; +import com.dremio.exec.planner.VacuumPlanGenerator; +import com.dremio.exec.planner.logical.VacuumTableRel; +import com.dremio.exec.store.TableMetadata; + +/** + * A base physical plan generator for VACUUM + */ +public abstract class VacuumTablePruleBase extends Prule { + + private final OptimizerRulesContext context; + + public VacuumTablePruleBase(RelOptRuleOperand operand, String description, OptimizerRulesContext context) { + super(operand, description); + this.context = context; + } + + public Prel getPhysicalPlan(VacuumTableRel vacuumTableRel, TableMetadata tableMetadata) { + VacuumPlanGenerator planBuilder = new VacuumPlanGenerator( + vacuumTableRel.getTable(), + vacuumTableRel.getCluster(), + vacuumTableRel.getTraitSet().plus(Prel.PHYSICAL), + tableMetadata, + vacuumTableRel.getCreateTableEntry(), + vacuumTableRel.getVacuumOptions()); + + return planBuilder.buildPlan(); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/filter/RuntimeFilteredRel.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/filter/RuntimeFilteredRel.java index bb53ae62b7..f449895042 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/filter/RuntimeFilteredRel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/filter/RuntimeFilteredRel.java @@ -29,10 +29,12 @@ public interface RuntimeFilteredRel extends RelNode { enum ColumnType { PARTITION { + @Override protected String alias() { return "p"; } }, RANDOM{ + @Override protected String alias() { return "r"; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/rule/computation/SinCosPlusMinusRewriteRule.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/rule/computation/SinCosPlusMinusRewriteRule.java index dd78e04054..781d9c58c9 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/rule/computation/SinCosPlusMinusRewriteRule.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/rule/computation/SinCosPlusMinusRewriteRule.java @@ -56,7 +56,10 @@ public RexNode rewrite(RexCall call) { childCall.getOperands().get(0), childCall.getOperands().get(1)); } + default: + break; } + break; } case "COS": { RexNode child = call.getOperands().get(0); @@ -75,11 +78,15 @@ public RexNode rewrite(RexCall call) { childCall.getOperands().get(0), childCall.getOperands().get(1)); } + default: + break; } + break; } default: - return null; + break; } + return null; } public static final SqlFunction SIN_PLUS = diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/visitor/ExpandNestedFunctionVisitor.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/visitor/ExpandNestedFunctionVisitor.java new file mode 100644 index 0000000000..7507cda40d --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/visitor/ExpandNestedFunctionVisitor.java @@ -0,0 +1,211 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.physical.visitor; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.stream.Collectors; + +import org.apache.calcite.plan.RelOptCluster; +import org.apache.calcite.rel.RelNode; +import org.apache.calcite.rel.type.RelDataType; +import org.apache.calcite.rel.type.RelDataTypeFactory; +import org.apache.calcite.rel.type.RelDataTypeField; +import org.apache.calcite.rex.RexBuilder; +import org.apache.calcite.rex.RexCall; +import org.apache.calcite.rex.RexNode; +import org.apache.calcite.rex.RexShuttle; +import org.apache.calcite.sql.SqlFunction; + +import com.dremio.exec.planner.physical.PlannerSettings; +import com.dremio.exec.planner.physical.Prel; +import com.dremio.exec.planner.physical.ProjectPrel; +import com.dremio.options.OptionManager; + +/** + *
        + * Expand nested functions and push them down to break the nested structure.
        + * e.g. an expression like:
        + *
        + * replace(replace(replace(replace(r_name, 'A', 'B'), 'B', 'C'), 'C', 'D'), 'D', 'E') as foo
        + *
        + * will have input node:
        + * ProjectPrel(foo=[REPLACE(REPLACE(REPLACE(REPLACE($1, 'A':VARCHAR(1), 'B':VARCHAR(1)), 'B':VARCHAR(1), 'C':VARCHAR(1)), 'C':VARCHAR(1), 'D':VARCHAR(1)), 'D':VARCHAR(1), 'E':VARCHAR(1))])
        + *
        + * And after applying this visitor: (if the depth is set to 1)
        + *
        + * ProjectPrel($f0=[REPLACE($3, 'D':VARCHAR(1), 'E':VARCHAR(1))])
        + *   ProjectPrel(r_regionkey=[$0], r_name=[$1], r_comment=[$2], $f3=[REPLACE($3, 'C':VARCHAR(1), 'D':VARCHAR(1))])
        + *     ProjectPrel(r_regionkey=[$0], r_name=[$1], r_comment=[$2], $f3=[REPLACE($3, 'B':VARCHAR(1), 'C':VARCHAR(1))])
        + *       ProjectPrel(r_regionkey=[$0], r_name=[$1], r_comment=[$2], $f3=[REPLACE($1, 'A':VARCHAR(1), 'B':VARCHAR(1))])
        + * 
        + */ +public class ExpandNestedFunctionVisitor extends BasePrelVisitor { + private static final String NESTED_EXPRS_PREFIX = "NESTED_EXPRS_"; + private final int maxFunctionDepth; + + private ExpandNestedFunctionVisitor(int maxFunctionDepth) { + this.maxFunctionDepth = maxFunctionDepth; + } + + public static Prel pushdownNestedFunctions(Prel prel, OptionManager options) { + return options.getOption(PlannerSettings.NESTED_FUNCTIONS_PUSHDOWN) ? + pushdownNestedFunctionsRecursive(prel, (int) options.getOption(PlannerSettings.MAX_FUNCTION_DEPTH)) : + prel; + } + + private static Prel pushdownNestedFunctionsRecursive(Prel prel, int maxFunctionDepth) { + ExpandNestedFunctionVisitor visitor = new ExpandNestedFunctionVisitor(maxFunctionDepth); + AtomicBoolean pushedDown = new AtomicBoolean(false); + do { + pushedDown.set(false); + prel = prel.accept(visitor, pushedDown); + } while (pushedDown.get()); // Recursively push down + return prel; + } + + @Override + public Prel visitPrel(Prel prel, AtomicBoolean value) throws RuntimeException { + List children = new ArrayList<>(); + + for (Prel child : prel) { + children.add(child.accept(this, value)); + } + return (Prel) prel.copy(prel.getTraitSet(), children); + } + + @Override + public Prel visitProject(ProjectPrel prel, AtomicBoolean value) throws RuntimeException { + final RelNode inputRel = prel.getInput(); + final RexBuilder rexBuilder = prel.getCluster().getRexBuilder(); + final RelOptCluster cluster = prel.getCluster(); + final RelDataTypeFactory factory = cluster.getTypeFactory(); + final RelDataType inputRowType = inputRel.getRowType(); + final List projects = new ArrayList<>(); + final List nestedNodes = new ArrayList<>(); + final NestedFunctionFinder finder = new NestedFunctionFinder( + rexBuilder, + maxFunctionDepth, + inputRowType.getFieldCount()); + + for (RexNode rexNode : prel.getProjects()) { + projects.add(rexNode.accept(finder)); + if (finder.isNested()) { + nestedNodes.addAll(finder.getPushedDownExprs()); + } + finder.reset(); // Reset so we can reuse the same instance which is keeping track of input count when an expression is pushed down. + } + if (nestedNodes.isEmpty()) { + return super.visitProject(prel, value); + } + + value.set(true); + + final List pushedDownNodes = new ArrayList<>(); + final List bottomProjectType = new ArrayList<>(); + final List fieldNameList = new ArrayList<>(); + for (int i = 0; i < inputRowType.getFieldCount(); i++) { + final RelDataTypeField field = inputRowType.getFieldList().get(i); + final RelDataType type = field.getType(); + fieldNameList.add(field.getName()); + bottomProjectType.add(type); + pushedDownNodes.add(rexBuilder.makeInputRef(inputRowType.getFieldList().get(i).getType(), i)); + } + + for (int i = 0; i < nestedNodes.size(); i++) { + RexNode rexNode = nestedNodes.get(i); + pushedDownNodes.add(rexNode); + fieldNameList.add(NESTED_EXPRS_PREFIX + i); + bottomProjectType.add(rexNode.getType()); + } + + final RelDataType bottomType = factory.createStructType(bottomProjectType, fieldNameList); + final ProjectPrel bottomProject = ProjectPrel.create(cluster, prel.getTraitSet(), inputRel, pushedDownNodes, bottomType); + + final List topProjectType = projects.stream().map(RexNode::getType).collect(Collectors.toList()); + final RelDataType topType = factory.createStructType(topProjectType, prel.getRowType().getFieldNames()); + return ProjectPrel.create(cluster, prel.getTraitSet(), bottomProject, projects, topType); + } + + public static final class NestedFunctionFinder extends RexShuttle { + private final RexBuilder rexBuilder; + private final int maxFunctionDepth; + + // Input size is needed because we add the pushed down expression at the end of the input node + private int inputSize; + + // We found nesting. + private boolean isNested; + + // Keep a list of nested function. There can be more than one like: replace(replace..) and replace(replace..) + private List pushedDownExprs; + + // Use a map to keep track of duplicate expressions like: replace(replace(..)) or replace(replace(..)) + private Map dupExprMap; + + private int functionDepth; + + public NestedFunctionFinder(RexBuilder rexBuilder, int maxFunctionDepth, int inputSize) { + this.rexBuilder = rexBuilder; + this.maxFunctionDepth = maxFunctionDepth; + this.inputSize = inputSize; + this.functionDepth = 0; + this.reset(); + } + + @Override + public RexNode visitCall(RexCall call) { + if (call.getOperator() instanceof SqlFunction) { + functionDepth++; + final RexNode rexNode; + if (functionDepth > maxFunctionDepth) { + // If we have reached the max function depth, break here and push the nested expression down + if (dupExprMap.containsKey(call)) { + rexNode = dupExprMap.get(call); + } else { + rexNode = rexBuilder.makeInputRef(call.getType(), inputSize++); // Make a reference of pushed down expression + isNested = true; + pushedDownExprs.add(call); // This expression is pushed down now + dupExprMap.put(call, rexNode); + } + } else { + rexNode = super.visitCall(call); + } + functionDepth--; + return rexNode; + } + return super.visitCall(call); + } + + public void reset() { + this.isNested = false; + this.pushedDownExprs = new ArrayList<>(); + this.dupExprMap = new HashMap<>(); + this.functionDepth = 0; + } + + public boolean isNested() { + return isNested; + } + + public List getPushedDownExprs() { + return pushedDownExprs; + } + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/visitor/FragmentStatVisitor.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/visitor/FragmentStatVisitor.java index d7c058d93d..d3d8880556 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/visitor/FragmentStatVisitor.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/visitor/FragmentStatVisitor.java @@ -41,6 +41,7 @@ public FragmentStatVisitor(long targetSliceSize) { this.targetSliceSize = targetSliceSize; } + @Override public abstract Prel visitExchange(ExchangePrel prel, MajorFragmentStat s); @Override diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/visitor/JoinConditionValidatorVisitor.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/visitor/JoinConditionValidatorVisitor.java new file mode 100644 index 0000000000..b4d9edc690 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/visitor/JoinConditionValidatorVisitor.java @@ -0,0 +1,123 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.physical.visitor; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.calcite.rel.RelNode; +import org.apache.calcite.rex.RexCall; +import org.apache.calcite.rex.RexInputRef; +import org.apache.calcite.rex.RexNode; +import org.apache.calcite.rex.RexVisitorImpl; +import org.apache.calcite.sql.SqlKind; + +import com.dremio.exec.planner.physical.JoinPrel; +import com.dremio.exec.planner.physical.PlannerSettings; +import com.dremio.exec.planner.physical.Prel; +import com.dremio.options.OptionManager; +import com.google.common.base.Preconditions; + +/* +* This class validates there are no join conditions containing columns from the same side of the tables in join. +* Otherwise, those become predicates we can push down so join operator has fewer rows of data as input. +* This should be validated after all push-down is finished. +* */ +public final class JoinConditionValidatorVisitor extends BasePrelVisitor { + private static final JoinConditionValidatorVisitor INSTANCE = new JoinConditionValidatorVisitor(); + + public static Prel validate(Prel prel, OptionManager options) { + return options.getOption(PlannerSettings.JOIN_CONDITIONS_VALIDATION) ? + prel.accept(INSTANCE, null) : + prel; + } + + public void validateJoinCondition(int leftTableColumnCount, int rightTableColumnCount, RexNode condition){ + + JoinConditionValidator joinConditionValidator = new JoinConditionValidator(leftTableColumnCount, rightTableColumnCount); + condition.accept(joinConditionValidator); + + boolean isValid = joinConditionValidator.isValid(); + + String message = String.format("Join condition %s is invalid.", condition); + + Preconditions.checkArgument(isValid, + message); + } + @Override + public Prel visitPrel(Prel prel, Void value) throws RuntimeException { + List children = new ArrayList<>(); + + for (Prel child : prel) { + children.add(child.accept(this, null)); + } + return (Prel) prel.copy(prel.getTraitSet(), children); + } + + @Override + public Prel visitJoin(JoinPrel prel, Void voidValue) throws RuntimeException { + RexNode condition = prel.getCondition(); + int leftTableColumnCount= prel.getLeft().getRowType().getFieldCount(); + int rightTableColumnCount= prel.getRight().getRowType().getFieldCount(); + + validateJoinCondition(leftTableColumnCount,rightTableColumnCount,condition); + return prel; + } + + private static class JoinConditionValidator extends RexVisitorImpl { + + private final int leftTableColumnCount; + private final int rightTableColumnCount; + + private boolean isValid; + + public boolean isValid() { + return isValid; + } + + public JoinConditionValidator(int leftTableColumnCount, int rightTableColumnCount) { + super(true); + this.leftTableColumnCount = leftTableColumnCount; + this.rightTableColumnCount = rightTableColumnCount; + this.isValid = true; + } + + @Override + public Void visitCall(RexCall call){ + if(call.op.kind== SqlKind.EQUALS + || call.op.kind== SqlKind.NOT_EQUALS + || call.op.kind== SqlKind.LESS_THAN + || call.op.kind== SqlKind.LESS_THAN_OR_EQUAL + || call.op.kind== SqlKind.GREATER_THAN + || call.op.kind== SqlKind.GREATER_THAN_OR_EQUAL + ) { + if (call.operands.get(0) instanceof RexInputRef && call.operands.get(1) instanceof RexInputRef) { + + int in1 = ((RexInputRef) call.operands.get(0)).getIndex(); + int in2 = ((RexInputRef) call.operands.get(1)).getIndex(); + if ((in1 < leftTableColumnCount && in2 < leftTableColumnCount) // If both expressions are on left side + || (in1 >= leftTableColumnCount && in2 >= leftTableColumnCount) // If both expressions are on right side + || (in1 >= leftTableColumnCount + rightTableColumnCount || in2 >= leftTableColumnCount + rightTableColumnCount)) // If either of them is beyond upper bound + { + isValid = false; + } + } + } + return super.visitCall(call); + } + } + +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/visitor/RuntimeFilterDecorator.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/visitor/RuntimeFilterDecorator.java index 1798415a44..22ca85ffeb 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/visitor/RuntimeFilterDecorator.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/visitor/RuntimeFilterDecorator.java @@ -65,6 +65,7 @@ private JoinVisitor(boolean nonParitionRuntimeFiltersEnabled) { this.nonParitionRuntimeFiltersEnabled = nonParitionRuntimeFiltersEnabled; } + @Override public Prel visitPrel(Prel prel, Void value) throws RuntimeException { List children = new ArrayList<>(); for (Prel child : prel) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/visitor/UnionAllExpander.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/visitor/UnionAllExpander.java index 53dccbef6b..e276829a98 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/visitor/UnionAllExpander.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/physical/visitor/UnionAllExpander.java @@ -124,7 +124,7 @@ public Prel visitPrel(Prel prel, Void value) throws IOException { try { UnionChild newInput = pq.poll(); Prel newRight = addRoundRobin(newInput.relNode); - currentRel = new UnionAllPrel(prel.getCluster(), prel.getTraitSet(), ImmutableList.of(currentRel, newRight), false); + currentRel = new UnionAllPrel(prel.getCluster(), prel.getTraitSet().replace(DistributionTrait.ANY), ImmutableList.of(currentRel, newRight), false); } catch (InvalidRelException ex) { // This exception should not be thrown as we already checked compatibility logger.warn("Failed to expand unionAll as inputs are not compatible", ex); @@ -148,7 +148,8 @@ public Prel visitPrel(Prel prel, Void value) throws IOException { } else { currentWidth = Math.min(currentWidth, newWidth); } - currentRel = new UnionAllPrel(prel.getCluster(), prel.getTraitSet(), ImmutableList.of(currentRel, newRight), false); + //the DistributionTrait.ANY doesn't solve single thread problem. Need revisit (see DX-64339) + currentRel = new UnionAllPrel(prel.getCluster(), prel.getTraitSet().replace(DistributionTrait.ANY), ImmutableList.of(currentRel, newRight), false); } catch (InvalidRelException ex) { // This exception should not be thrown as we already checked compatibility logger.warn("Failed to expand unionAll as inputs are not compatible", ex); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/CalciteArrowHelper.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/CalciteArrowHelper.java index f7847021aa..7fa2baa3ee 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/CalciteArrowHelper.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/CalciteArrowHelper.java @@ -81,7 +81,9 @@ public RelDataType toCalciteRecordType(RelDataTypeFactory factory, Function { if (inclusionPredicate.apply(f)) { - builder.add(f.getName(), toCalciteType(f, factory, withComplexTypeSupport));}}); + builder.add(f.getName(), toCalciteType(f, factory, withComplexTypeSupport)); + } + }); } RelDataType rowType = builder.build(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/ConvertletTable.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/ConvertletTable.java index 3b183565de..751037d138 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/ConvertletTable.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/ConvertletTable.java @@ -107,14 +107,15 @@ public RexNode convertCall(SqlRexContext cx, SqlCall call) { final RexBuilder rexBuilder = cx.getRexBuilder(); return rexBuilder.makeCall( e.getType(), SqlStdOperatorTable.MINUS_DATE, e.getOperands()); - default: - return e; + break; } } + break; default: - return e; + break; } + return e; } }); // For normalizing TimestampAdd to Datetime_Plus @@ -130,9 +131,12 @@ public RexNode convertCall(SqlRexContext cx, SqlCall call) { // TODO(DX-11268): Support sub-second intervals with TIMESTAMPADD. case MILLISECOND: case MICROSECOND: + case NANOSECOND: throw UserException.unsupportedError() .message("TIMESTAMPADD function supports the following time units: YEAR, QUARTER, MONTH, WEEK, DAY, HOUR, MINUTE, SECOND") .build(); + default: + break; } final RexNode timestampNode = cx.convertExpression(call.operand(2)); final RexNode multiplyNode = rexBuilder.makeCall(SqlStdOperatorTable.MULTIPLY, @@ -168,12 +172,10 @@ public RexNode convertCall(SqlRexContext cx, SqlCall call) { */ @Override public SqlRexConvertlet get(SqlCall call) { - SqlRexConvertlet convertlet; - - if ((convertlet = super.get(call)) != null) { + SqlRexConvertlet convertlet = super.get(call); + if (convertlet != null) { return convertlet; } - return StandardConvertletTable.INSTANCE.get(call); } @@ -245,6 +247,9 @@ private static RelDataType consistentType(SqlRexContext cx, case NUMERIC: nonCharacterTypes.add( cx.getTypeFactory().createSqlType(SqlTypeName.BIGINT)); + break; + default: + break; } } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/DremioCompositeSqlOperatorTable.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/DremioCompositeSqlOperatorTable.java index f2c9b0ce60..360fcda03c 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/DremioCompositeSqlOperatorTable.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/DremioCompositeSqlOperatorTable.java @@ -15,7 +15,9 @@ */ package com.dremio.exec.planner.sql; +import java.util.Arrays; import java.util.List; +import java.util.stream.Collectors; import org.apache.calcite.sql.SqlFunctionCategory; import org.apache.calcite.sql.SqlIdentifier; @@ -26,28 +28,35 @@ import org.apache.calcite.sql.fun.SqlLibraryOperatorTableFactory; import org.apache.calcite.sql.fun.SqlLibraryOperators; import org.apache.calcite.sql.fun.SqlStdOperatorTable; -import org.apache.calcite.sql.util.ListSqlOperatorTable; import org.apache.calcite.sql.validate.SqlNameMatcher; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; public class DremioCompositeSqlOperatorTable implements SqlOperatorTable { private static final DremioCompositeSqlOperatorTable INSTANCE = new DremioCompositeSqlOperatorTable(); private static final SqlOperatorTable DREMIO_OT = DremioSqlOperatorTable.instance(); - private static final SqlOperatorTable STD_OT = SqlStdOperatorTable.instance(); - private static final SqlOperatorTable ORACLE_OT = - new ListSqlOperatorTable(SqlLibraryOperatorTableFactory.INSTANCE - .getOperatorTable(SqlLibrary.ORACLE).getOperatorList().stream() - .filter(op -> op != SqlLibraryOperators.LTRIM) - .filter(op -> op != SqlLibraryOperators.RTRIM) - .filter(op -> op != SqlLibraryOperators.SUBSTR) // calcite does not support oracles substring CALCITE-4408 - .filter(op -> op != SqlLibraryOperators.DECODE) // Dremio currently uses hive decode - .collect(ImmutableList.toImmutableList())); + + private static final SqlOperatorTable STD_OT = FilteredSqlOperatorTable.create( + SqlStdOperatorTable.instance(), + // REPLACE just uses the precision of it's first argument, which is problematic if the string increases in length after replacement. + SqlStdOperatorTable.REPLACE, + // CARDINALITY in Calcite accepts MAP, LIST and STRUCT. In Dremio, we plan to support only MAP and LIST. + SqlStdOperatorTable.CARDINALITY + ); + private static final SqlOperatorTable ORACLE_OT = FilteredSqlOperatorTable.create( + SqlLibraryOperatorTableFactory.INSTANCE.getOperatorTable(SqlLibrary.ORACLE), + SqlLibraryOperators.LTRIM, + SqlLibraryOperators.RTRIM, + SqlLibraryOperators.SUBSTR, // calcite does not support oracles substring CALCITE-4408 + SqlLibraryOperators.DECODE // Dremio currently uses hive decode + ); private static final List operators = ImmutableList.builder() .addAll(DREMIO_OT.getOperatorList()) .addAll(STD_OT.getOperatorList().stream() .filter(op -> op != SqlStdOperatorTable.ROUND) .filter(op -> op != SqlStdOperatorTable.TRUNCATE) + .filter(op -> op != SqlStdOperatorTable.CARDINALITY) .collect(ImmutableList.toImmutableList())) .addAll(ORACLE_OT.getOperatorList()) .build(); @@ -79,4 +88,58 @@ public List getOperatorList() { public static DremioCompositeSqlOperatorTable getInstance() { return INSTANCE; } + + /** + * Takes a SqlOperatorTable, but ignores lookups for a select number of functions. + * This is needed, since we don't want the default behavior or calcite's operator table, + * but we also don't want to create a whole new operator table which may have different behavior. + */ + private static final class FilteredSqlOperatorTable implements SqlOperatorTable { + private final SqlOperatorTable sqlOperatorTable; + private final ImmutableSet functionsToFilter; + + private FilteredSqlOperatorTable( + SqlOperatorTable sqlOperatorTable, + ImmutableSet functionsToFilter) { + this.functionsToFilter = functionsToFilter; + this.sqlOperatorTable = sqlOperatorTable; + } + + @Override + public void lookupOperatorOverloads( + SqlIdentifier opName, + SqlFunctionCategory category, + SqlSyntax syntax, + List operatorList, + SqlNameMatcher nameMatcher) { + if (opName.isSimple() && functionsToFilter.contains(opName.getSimple().toUpperCase())) { + return; + } + + sqlOperatorTable.lookupOperatorOverloads( + opName, + category, + syntax, + operatorList, + nameMatcher); + } + + @Override + public List getOperatorList() { + return sqlOperatorTable + .getOperatorList() + .stream() + .filter(operator -> !functionsToFilter.contains(operator.getName().toUpperCase())) + .collect(Collectors.toList()); + } + + public static SqlOperatorTable create(SqlOperatorTable sqlOperatorTable, SqlOperator ... operatorsToFilter) { + ImmutableSet functionsToFilter = Arrays + .stream(operatorsToFilter) + .map(operator -> operator.getName().toUpperCase()) + .collect(ImmutableSet.toImmutableSet()); + + return new FilteredSqlOperatorTable(sqlOperatorTable, functionsToFilter); + } + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/DremioOperandTypes.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/DremioOperandTypes.java new file mode 100644 index 0000000000..ba95e7890b --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/DremioOperandTypes.java @@ -0,0 +1,34 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dremio.exec.planner.sql; + +import org.apache.calcite.sql.type.OperandTypes; +import org.apache.calcite.sql.type.SqlSingleOperandTypeChecker; +import org.apache.calcite.sql.type.SqlTypeFamily; + +public final class DremioOperandTypes { + + private DremioOperandTypes() {} + + /** + * Operand type that allows either ARRAY or MAP as a + * parameter to the function + */ + public static final SqlSingleOperandTypeChecker MAP_OR_LIST = + OperandTypes.or(OperandTypes.family(SqlTypeFamily.ARRAY), + OperandTypes.family(SqlTypeFamily.MAP)); +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/DremioSqlOperatorTable.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/DremioSqlOperatorTable.java index 9b40719756..a0e3047ce5 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/DremioSqlOperatorTable.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/DremioSqlOperatorTable.java @@ -126,6 +126,18 @@ public class DremioSqlOperatorTable extends ReflectiveSqlOperatorTable { OperandTypes.NUMERIC_OPTIONAL_INTEGER, SqlFunctionCategory.NUMERIC); + // SqlStdOperatorTable.CARDINALITY is overridden here because + // it supports LIST, MAP as well as STRUCT. In Dremio, we want to + // allow only LIST and MAP. Not STRUCT. + public static final SqlFunction CARDINALITY = + new SqlFunction( + "CARDINALITY", + SqlKind.OTHER_FUNCTION, + ReturnTypes.INTEGER_NULLABLE, + null, + DremioOperandTypes.MAP_OR_LIST, + SqlFunctionCategory.SYSTEM); + // ----------------------- // Dremio Custom Functions diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/DremioSqlToRelConverter.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/DremioSqlToRelConverter.java index 87d02dd819..e258918980 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/DremioSqlToRelConverter.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/DremioSqlToRelConverter.java @@ -58,16 +58,18 @@ import org.apache.calcite.sql2rel.SqlToRelConverter; import org.apache.calcite.tools.RelBuilder; import org.apache.calcite.util.Util; -import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections4.CollectionUtils; import com.dremio.exec.calcite.logical.CopyIntoTableCrel; import com.dremio.exec.calcite.logical.TableModifyCrel; import com.dremio.exec.calcite.logical.TableOptimizeCrel; +import com.dremio.exec.calcite.logical.VacuumTableCrel; import com.dremio.exec.catalog.CatalogIdentity; import com.dremio.exec.catalog.CatalogUtil; import com.dremio.exec.catalog.DremioCatalogReader; import com.dremio.exec.catalog.DremioPrepareTable; import com.dremio.exec.catalog.DremioTable; +import com.dremio.exec.catalog.TableVersionContext; import com.dremio.exec.catalog.VersionContext; import com.dremio.exec.ops.ViewExpansionContext.ViewExpansionToken; import com.dremio.exec.planner.StatelessRelShuttleImpl; @@ -79,9 +81,11 @@ import com.dremio.exec.planner.sql.parser.SqlCopyIntoTable; import com.dremio.exec.planner.sql.parser.SqlDmlOperator; import com.dremio.exec.planner.sql.parser.SqlOptimize; +import com.dremio.exec.planner.sql.parser.SqlVacuumTable; import com.dremio.exec.planner.types.JavaTypeFactoryImpl; import com.dremio.exec.record.BatchSchema; import com.dremio.service.namespace.NamespaceKey; +import com.dremio.service.namespace.dataset.proto.DatasetConfig; import com.dremio.service.users.UserNotFoundException; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; @@ -227,15 +231,25 @@ protected RelRoot convertQueryRecursive(SqlNode query, boolean top, RelDataType */ private RelRoot convertOther(SqlNode query, boolean top, RelDataType targetRowType) { if (query instanceof SqlOptimize) { - NamespaceKey path = ((SqlOptimize)query).getPath(); + SqlOptimize optimizeNode = ((SqlOptimize)query); + NamespaceKey path = optimizeNode.getPath(); Prepare.PreparingTable nsTable = catalogReader.getTable(path.getPathComponents()); - return RelRoot.of(new TableOptimizeCrel(cluster, cluster.traitSetOf(Convention.NONE), nsTable.toRel(createToRelContext()), nsTable, null, OptimizeOptions.DEFAULT), SqlKind.OTHER); + //Get rel node with filters. + RelNode rel = convertQueryRecursive(((SqlOptimize) query).getSourceSelect(), top, targetRowType).rel; + return RelRoot.of(new TableOptimizeCrel(cluster, cluster.traitSetOf(Convention.NONE),rel.getInput(0), nsTable, null, OptimizeOptions.createInstance(optimizeNode)), + SqlKind.OTHER); } else if (query instanceof SqlCopyIntoTable) { NamespaceKey path = ((SqlCopyIntoTable)query).getPath(); Prepare.PreparingTable nsTable = catalogReader.getTable(path.getPathComponents()); CopyIntoTableContext copyIntoTableContext= new CopyIntoTableContext((SqlCopyIntoTable)query); return RelRoot.of(new CopyIntoTableCrel(cluster, cluster.traitSetOf(Convention.NONE), nsTable, copyIntoTableContext), SqlKind.OTHER); - } else { + } else if (query instanceof SqlVacuumTable) { + NamespaceKey path = ((SqlVacuumTable) query).getPath(); + Prepare.PreparingTable nsTable = catalogReader.getTable(path.getPathComponents()); + return RelRoot.of( + new VacuumTableCrel(cluster, cluster.traitSetOf(Convention.NONE), nsTable.toRel(createToRelContext()), nsTable, + null, ((SqlVacuumTable) query).getVacuumOptions()), SqlKind.OTHER); + } else { return super.convertQueryRecursive(query, top, targetRowType); } } @@ -430,38 +444,26 @@ private static RelRoot getExpandedRelNode(NamespaceKey path, SqlValidatorAndToRelContext.Builder builder = SqlValidatorAndToRelContext.builder(sqlConverter) .withSchemaPath(context) .withSystemDefaultParserConfig(); - if(viewOwner != null) { + if (viewOwner != null) { builder = builder.withUser(viewOwner); } - // versionContext is the version specified within the View Definition itself - // (i.e for a view whose SQL is : select * from AT ver1 - // versionContext would be ver1 - // viewExpansionVersionContext is the "outer" version specified at the parent level where this view is being expanded. - // For example : - // SELECT * FROM V2 AT TAG tag1 ==> tag1 is the viewExpansionVersionContext - // Definition of V2 : SELECT * FROM V1 AT tag0 ==> tag0 is the versionContext - // The version specified in the view definition (versionContex) should always override the outer version(viewExpansionVersionContext). + TableVersionContext tableVersionContext = null; if (versionContext != null && versionContext.isSpecified()) { + // Nested views/tables should inherit this version context (unless explicitly overridden by the nested view/table) builder = builder.withVersionContext(path.getRoot(), versionContext); - } else if (sqlConverter.viewExpansionVersionContext != null && sqlConverter.viewExpansionVersionContext.isSpecified()) { - builder = builder.withVersionContext(path.getRoot(), sqlConverter.viewExpansionVersionContext); + tableVersionContext = TableVersionContext.of(versionContext); } SqlValidatorAndToRelContext newConverter = builder.build(); final SqlNode parsedNode = newConverter.getSqlConverter().parse(queryString); final SqlNode validatedNode = newConverter.validate(parsedNode); if (path != null && sqlConverter.getSubstitutionProvider().isDefaultRawReflectionEnabled()) { final RelRootPlus unflattenedRoot = newConverter.toConvertibleRelRoot(validatedNode, true, false, false); - ExpansionNode expansionNode = (ExpansionNode) wrapExpansionNode( - sqlConverter, - batchSchema, - path, - unflattenedRoot.rel, - unflattenedRoot.validatedRowType, - unflattenedRoot.isContextSensitive() || ExpansionNode.isContextSensitive(unflattenedRoot.rel)); + final RelRootPlus rootWithCast = adjustIcebergSchema(path, sqlConverter, unflattenedRoot); + ExpansionNode expansionNode = (ExpansionNode) wrapExpansionNode(sqlConverter, batchSchema, path, rootWithCast, tableVersionContext); if (expansionNode.isDefault()) { + sqlConverter.getViewExpansionContext().setSubstitutedWithDRR(); sqlConverter.getFunctionContext().getContextInformation().setPlanCacheable(unflattenedRoot.isPlanCacheable()); - return new RelRoot(expansionNode, unflattenedRoot.validatedRowType, unflattenedRoot.kind, - unflattenedRoot.fields, unflattenedRoot.collation, ImmutableList.of()); + return rootWithCast.withExpansionNode(expansionNode); // Successfully replaced with default raw reflection during ConvertToRel } } final RelRootPlus root = newConverter.toConvertibleRelRoot(validatedNode, true, true); @@ -470,24 +472,52 @@ private static RelRoot getExpandedRelNode(NamespaceKey path, return root; } - boolean versionedView = false; - String sourceName = path.getRoot(); - if (CatalogUtil.requestedPluginSupportsVersionedTables(sourceName, sqlConverter.getCatalog())) { - versionedView = true; - } - // we need to make sure that if a inner expansion is context sensitive, we consider the current - // expansion context sensitive even if it isn't locally. - final boolean contextSensitive = root.isContextSensitive() || ExpansionNode.isContextSensitive(root.rel); - if (versionedView) { - RelDataType adjustedRowType = ((JavaTypeFactoryImpl) sqlConverter.getTypeFactory()).createTypeWithMaxVarcharPrecision(root.validatedRowType); - // Need to cast the rowtype to be nullable to prevent null check conflicts. (DX-49215) - RelDataType nullableRowType = sqlConverter.getTypeFactory().createTypeWithNullability(adjustedRowType, true); - return new RelRoot(ExpansionNode.wrap(path, root.rel, nullableRowType, contextSensitive, false), - nullableRowType, root.kind, root.fields, root.collation, ImmutableList.of()); - } else { - return new RelRoot(ExpansionNode.wrap(path, root.rel, root.validatedRowType, contextSensitive, false), - root.validatedRowType, root.kind, root.fields, root.collation, ImmutableList.of()); + final RelRootPlus rootWithCast = adjustIcebergSchema(path, sqlConverter, root); + return rootWithCast.withExpansionNode(ExpansionNode.wrap(path, rootWithCast.rel, rootWithCast.validatedRowType, + rootWithCast.isContextSensitive(), false, tableVersionContext)); + } + + /** + * Dremio has multiple schema types that it has to convert between: + * + * - Calcite schema used for planning + * - Arrow batch schema used for query execution + * - Iceberg schema used within Iceberg tables/views + * + * Calcite schema contains the most type precision and nullability whereas Arrow batch schema loses information + * and this is propagated down into the Iceberg schemas that Dremio engine creates. For example, Arrow does + * not include VARCHAR precision or NOT NULL on any type. As a result, when querying Iceberg tables and views + * (including reflection materializations), we need to CAST the Calcite query tree underneath the table/view into + * the table/view's lossy Arrow/Iceberg schema. + * + * Iceberg tables and view use {@link com.dremio.exec.store.iceberg.SchemaConverter} to convert between Iceberg schema + * and Arrow batch schema. + * + * Sonar Views don't have this type mismatch problem because they store both: + * - Arrow batch schema fields. See {@link com.dremio.exec.util.ViewFieldsHelper#getBatchSchemaFields(BatchSchema)} + * - Calcite fields. See {@link com.dremio.exec.util.ViewFieldsHelper#getCalciteViewFields(DatasetConfig)} + * + * @param path View path + * @param sqlConverter + * @param root Root of View's query tree + * @return new root with CAST if needed + */ + private static RelRootPlus adjustIcebergSchema(NamespaceKey path, SqlConverter sqlConverter, RelRootPlus root) { + if (!CatalogUtil.requestedPluginSupportsVersionedTables(path.getRoot(), sqlConverter.getCatalog())) { + return root; } + final JavaTypeFactoryImpl typeFactory = (JavaTypeFactoryImpl) sqlConverter.getTypeFactory(); + + // Adjust the validated row type from the SqlNode + final RelDataType validatedRowTypeNullableMaxVarchar = typeFactory.createTypeWithNullability( + typeFactory.createTypeWithMaxVarcharPrecision(root.validatedRowType), true); + + // Apply CAST to the query tree + final RelDataType relNodeRowTypeNullableMaxVarchar = typeFactory.createTypeWithNullability( + typeFactory.createTypeWithMaxVarcharPrecision(root.rel.getRowType()), true); + final RelNode relNodeWithCast = MoreRelOptUtil.createCastRel(root.rel, relNodeRowTypeNullableMaxVarchar); + + return RelRootPlus.of(root, relNodeWithCast, validatedRowTypeNullableMaxVarchar); } public static RelRoot expandView(NamespaceKey path, @@ -516,13 +546,15 @@ public static RelRoot expandView(NamespaceKey path, } } - private static RelNode wrapExpansionNode(SqlConverter sqlConverter, BatchSchema batchSchema, NamespaceKey path, RelNode root, RelDataType rowType, boolean contextSensitive) { + private static RelNode wrapExpansionNode(SqlConverter sqlConverter, BatchSchema batchSchema, NamespaceKey path, + RelRootPlus root, TableVersionContext versionContext) { List vdsFields = batchSchema == null ? new ArrayList<>() : batchSchema.getFields().stream() .map(Field::getName) .sorted() .collect(Collectors.toList()); - return sqlConverter.getSubstitutionProvider().wrapExpansionNode(path, root, vdsFields, rowType, contextSensitive); + return sqlConverter.getSubstitutionProvider().wrapExpansionNode(path, root.rel, vdsFields, root.validatedRowType, + root.isContextSensitive(), versionContext, sqlConverter.getCatalog()); } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/DremioToRelContext.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/DremioToRelContext.java index ca324757d0..ce8646e214 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/DremioToRelContext.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/DremioToRelContext.java @@ -69,6 +69,7 @@ public RelRoot expandView(RelDataType rowType, static DremioQueryToRelContext createQueryContext(final SqlConverter sqlConverter) { return new DremioQueryToRelContext() { + @Override public SqlValidatorAndToRelContext.Builder getSqlValidatorAndToRelContext() { return SqlValidatorAndToRelContext.builder(sqlConverter); } @@ -83,6 +84,7 @@ public List getTableHints() { return ImmutableList.of(); } + @Override public RelRoot expandView(ViewTable view) { final RelRoot root; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/OperatorTable.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/OperatorTable.java index ff6dae0dcb..f33bcd9f85 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/OperatorTable.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/OperatorTable.java @@ -30,16 +30,20 @@ /** * Dremio's hand rolled ChainedOperatorTable + * + * This class composes both the DremioCompositeSqlOperatorTable and the provided FunctionImplementationRegistry */ public class OperatorTable implements SqlOperatorTable { - private static final DremioCompositeSqlOperatorTable dremioCompositeOperatorTable = - new DremioCompositeSqlOperatorTable(); - private List operators; - private ArrayListMultimap opMap = ArrayListMultimap.create(); + private static final DremioCompositeSqlOperatorTable DREMIO_COMPOSITE_SQL_OPERATOR_TABLE = new DremioCompositeSqlOperatorTable(); + + private final List operators; + private final ArrayListMultimap opMap; public OperatorTable(FunctionImplementationRegistry registry) { operators = Lists.newArrayList(); - operators.addAll(dremioCompositeOperatorTable.getOperatorList()); + operators.addAll(DREMIO_COMPOSITE_SQL_OPERATOR_TABLE.getOperatorList()); + + opMap = ArrayListMultimap.create(); registry.register(this); } @@ -55,17 +59,23 @@ public void lookupOperatorOverloads(SqlIdentifier opName, SqlSyntax syntax, List operatorList, SqlNameMatcher nameMatcher) { // don't try to evaluate operators that have non name. - if(opName == null || opName.names == null) { + if (opName == null || opName.names == null) { return; } - dremioCompositeOperatorTable.lookupOperatorOverloads(opName,category,syntax,operatorList, nameMatcher); + DREMIO_COMPOSITE_SQL_OPERATOR_TABLE.lookupOperatorOverloads( + opName, + category, + syntax, + operatorList, + nameMatcher); + if (!operatorList.isEmpty()) { + return; + } - if (operatorList.isEmpty() && syntax == SqlSyntax.FUNCTION && opName.isSimple()) { + if (syntax == SqlSyntax.FUNCTION && opName.isSimple()) { List ops = opMap.get(opName.getSimple().toUpperCase()); - if (ops != null) { - operatorList.addAll(ops); - } + operatorList.addAll(ops); } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/SQLAnalyzer.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/SQLAnalyzer.java deleted file mode 100644 index bbd1c1a0b6..0000000000 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/SQLAnalyzer.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.dremio.exec.planner.sql; - -import java.util.List; - -import org.apache.calcite.sql.advise.SqlAdvisor; -import org.apache.calcite.sql.validate.SqlMoniker; -import org.apache.calcite.sql.validate.SqlValidatorWithHints; - -import com.google.common.annotations.VisibleForTesting; - -/** - * Class responsible for setting up dependencies required for SQL validation and suggestion - * as well as execution of SQL validation and SQL error suggestion using Calcite's SqlAdvisor. - */ -public class SQLAnalyzer { - - @VisibleForTesting - protected final SqlValidatorWithHints validator; - - protected SQLAnalyzer(final SqlValidatorWithHints validator) { - this.validator = validator; - } - - /** - * Pass the SqlValidatorWithHints implementation to Calcite's SqlAdvisor - * for query completion hints. - * - * @param sql The SQL being evaluated. - * @param cursorPosition The current cursor position in the editor. - * @return List that represents the query completion options for the SQL editor. - */ - public List suggest(String sql, int cursorPosition) { - SqlAdvisor sqlAdvisor = new SqlAdvisor(validator); - String[] replaced = {null}; - return sqlAdvisor.getCompletionHints(sql, cursorPosition , replaced); - } - - /** - * Pass the SqlValidatorWithHints implementation to Calcite's SqlAdvisor - * for query validation. - * - * @param sql The SQL being evaluated. - * @return List that represents parser or validation errors. - */ - public List validate(String sql) { - SqlAdvisor sqlAdvisor = new SqlAdvisor(validator); - return sqlAdvisor.validate(sql); - } -} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/SQLAnalyzerFactory.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/SQLAnalyzerFactory.java deleted file mode 100644 index 2ef9e7dafa..0000000000 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/SQLAnalyzerFactory.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.dremio.exec.planner.sql; - -import java.util.List; - -import org.apache.calcite.adapter.java.JavaTypeFactory; -import org.apache.calcite.sql.SqlOperatorTable; -import org.apache.calcite.sql.advise.SqlAdvisorValidator; -import org.apache.calcite.sql.util.ChainedSqlOperatorTable; -import org.apache.calcite.sql.validate.SqlValidatorUtil; -import org.apache.calcite.sql.validate.SqlValidatorWithHints; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.dremio.exec.catalog.Catalog; -import com.dremio.exec.catalog.CatalogUser; -import com.dremio.exec.catalog.DremioCatalogReader; -import com.dremio.exec.catalog.MetadataRequestOptions; -import com.dremio.exec.expr.fn.FunctionImplementationRegistry; -import com.dremio.exec.ops.ViewExpansionContext; -import com.dremio.exec.planner.physical.PlannerSettings; -import com.dremio.exec.planner.types.JavaTypeFactoryImpl; -import com.dremio.exec.server.SabotContext; -import com.dremio.exec.server.options.DefaultOptionManager; -import com.dremio.exec.server.options.EagerCachingOptionManager; -import com.dremio.exec.server.options.OptionManagerWrapper; -import com.dremio.exec.server.options.ProjectOptionManager; -import com.dremio.exec.server.options.QueryOptionManager; -import com.dremio.exec.store.SchemaConfig; -import com.dremio.options.OptionManager; -import com.dremio.service.namespace.NamespaceKey; -import com.google.common.collect.ImmutableList; - -public class SQLAnalyzerFactory { - - private static final Logger logger = LoggerFactory.getLogger(SQLAnalyzerFactory.class); - - /** - * Factory method to create the SQLAnalyzer using the appropriate implementation of SqlValidatorWithHints. - * - * If createForSqlSuggestions is true, construct a SqlAdvisorValidator instance, - * otherwise construct a SqlValidatorImpl instance. Inject this into the constructor - * for a SQLAnalyzer object. - * - * @param username - * @param sabotContext - * @param context - * @param createForSqlSuggestions - * @return SQLAnalyzer instance - */ - public static SQLAnalyzer createSQLAnalyzer(final String username, - final SabotContext sabotContext, - final List context, - final boolean createForSqlSuggestions, - ProjectOptionManager projectOptionManager) { - final ViewExpansionContext viewExpansionContext = new ViewExpansionContext(new CatalogUser(username)); - final OptionManager optionManager = OptionManagerWrapper.Builder.newBuilder() - .withOptionManager(new DefaultOptionManager(sabotContext.getOptionValidatorListing())) - .withOptionManager(new EagerCachingOptionManager(projectOptionManager)) - .withOptionManager(new QueryOptionManager(sabotContext.getOptionValidatorListing())) - .build(); - final NamespaceKey defaultSchemaPath = context == null ? null : new NamespaceKey(context); - - final SchemaConfig newSchemaConfig = SchemaConfig.newBuilder(CatalogUser.from(username)) - .defaultSchema(defaultSchemaPath) - .optionManager(optionManager) - .setViewExpansionContext(viewExpansionContext) - .build(); - - Catalog catalog = sabotContext.getCatalogService() - .getCatalog(MetadataRequestOptions.of(newSchemaConfig)); - JavaTypeFactory typeFactory = JavaTypeFactoryImpl.INSTANCE; - DremioCatalogReader catalogReader = new DremioCatalogReader(catalog, typeFactory); - - FunctionImplementationRegistry functionImplementationRegistry = optionManager.getOption - (PlannerSettings.ENABLE_DECIMAL_V2_KEY).getBoolVal() ? sabotContext.getDecimalFunctionImplementationRegistry() - : sabotContext.getFunctionImplementationRegistry(); - OperatorTable opTable = new OperatorTable(functionImplementationRegistry); - SqlOperatorTable chainedOpTable = new ChainedSqlOperatorTable(ImmutableList.of(opTable, catalogReader)); - - // Create the appropriate implementation depending on intended use of the validator. - SqlValidatorWithHints validator = - createForSqlSuggestions ? - new SqlAdvisorValidator(chainedOpTable, catalogReader, typeFactory, - SqlAdvisorValidator.Config.DEFAULT.withSqlConformance(DremioSqlConformance.INSTANCE)) : - SqlValidatorUtil.newValidator(chainedOpTable, catalogReader, typeFactory, - SqlAdvisorValidator.Config.DEFAULT.withSqlConformance(DremioSqlConformance.INSTANCE)); - - return new SQLAnalyzer(validator); - } -} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/SchemaUtilities.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/SchemaUtilities.java index 56392657c9..8528da5326 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/SchemaUtilities.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/SchemaUtilities.java @@ -24,8 +24,16 @@ import com.dremio.common.exceptions.UserException; import com.dremio.common.utils.SqlUtils; import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.CatalogEntityKey; +import com.dremio.exec.catalog.CatalogUtil; import com.dremio.exec.catalog.DremioTable; +import com.dremio.exec.catalog.TableVersionContext; +import com.dremio.exec.catalog.TableVersionType; +import com.dremio.exec.planner.sql.handlers.SqlHandlerUtil; +import com.dremio.exec.planner.sql.parser.SqlTableVersionSpec; import com.dremio.exec.planner.types.JavaTypeFactoryImpl; +import com.dremio.options.OptionManager; +import com.dremio.sabot.rpc.user.UserSession; import com.dremio.service.namespace.NamespaceKey; import com.google.common.base.Function; import com.google.common.base.Joiner; @@ -34,10 +42,22 @@ public class SchemaUtilities { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SchemaUtilities.class); - public static TableWithPath verify(final Catalog catalog, SqlIdentifier identifier){ + public static TableWithPath verify(final Catalog catalog, SqlIdentifier identifier, UserSession userSession, SqlTableVersionSpec sqlTableVersionSpec, OptionManager optionManager){ NamespaceKey path = catalog.resolveSingle(new NamespaceKey(identifier.names)); - DremioTable table = catalog.getTable(path); - if(table == null) { + SqlHandlerUtil.validateSupportForVersionedReflections(path.getRoot(), catalog, optionManager); + TableVersionContext tableVersionContext = null; + if (sqlTableVersionSpec != null && sqlTableVersionSpec.getTableVersionSpec().getTableVersionType() != TableVersionType.NOT_SPECIFIED) { + tableVersionContext = sqlTableVersionSpec.getTableVersionSpec().getTableVersionContext(); + } else if (CatalogUtil.requestedPluginSupportsVersionedTables(path.getRoot(), catalog)) { + tableVersionContext = TableVersionContext.of(userSession.getSessionVersionForSource(path.getRoot())); + } + CatalogEntityKey catalogEntityKey = CatalogEntityKey.newBuilder() + .keyComponents(path.getPathComponents()) + .tableVersionContext(tableVersionContext) + .build(); + + DremioTable table = CatalogUtil.getTable(catalogEntityKey, catalog); + if (table == null) { throw UserException.parseError().message("Unable to find table %s.", path).build(logger); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/SqlConverter.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/SqlConverter.java index 969b4e48cd..28616b3a5c 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/SqlConverter.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/SqlConverter.java @@ -21,6 +21,7 @@ import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.plan.RelOptCostFactory; import org.apache.calcite.plan.RelOptPlanner; +import org.apache.calcite.plan.RelOptUtil; import org.apache.calcite.rel.RelCollation; import org.apache.calcite.rel.RelCollations; import org.apache.calcite.rel.RelNode; @@ -35,6 +36,7 @@ import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.calcite.sql2rel.SqlRexConvertletTable; import org.apache.calcite.util.ImmutableIntList; +import org.apache.calcite.util.Litmus; import org.apache.calcite.util.Pair; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -44,11 +46,11 @@ import com.dremio.common.scanner.persistence.ScanResult; import com.dremio.exec.catalog.Catalog; import com.dremio.exec.catalog.CatalogUser; -import com.dremio.exec.catalog.VersionContext; import com.dremio.exec.expr.fn.FunctionImplementationRegistry; import com.dremio.exec.ops.ViewExpansionContext; import com.dremio.exec.planner.DremioRexBuilder; import com.dremio.exec.planner.DremioVolcanoPlanner; +import com.dremio.exec.planner.acceleration.ExpansionNode; import com.dremio.exec.planner.acceleration.MaterializationList; import com.dremio.exec.planner.acceleration.substitution.AccelerationAwareSubstitutionProvider; import com.dremio.exec.planner.acceleration.substitution.SubstitutionProviderFactory; @@ -92,7 +94,6 @@ public class SqlConverter { private final Catalog catalog; private final SqlRexConvertletTable convertletTable; private final ReflectionAllowedMonitoringConvertletTable.ConvertletTableNotes convertletTableNotes; - public VersionContext viewExpansionVersionContext; public SqlConverter( final PlannerSettings settings, @@ -135,7 +136,6 @@ public SqlConverter( new ConvertletTable( functionContext.getContextInformation(), settings.getOptions().getOption(PlannerSettings.IEEE_754_DIVIDE_SEMANTICS))); - this.viewExpansionVersionContext = null; } private SqlConverter(SqlConverter parent, ParserConfig parserConfig) { @@ -159,7 +159,6 @@ private SqlConverter(SqlConverter parent, ParserConfig parserConfig) { this.catalog = parent.catalog; this.convertletTable = parent.convertletTable; this.convertletTableNotes = parent.convertletTableNotes; - this.viewExpansionVersionContext = parent.viewExpansionVersionContext; } public SqlConverter withSystemDefaultParserConfig() { @@ -171,9 +170,15 @@ private static SqlNodeList parseMultipleStatementsImpl(String sql, SqlParser.Con SqlParser parser = SqlParser.create(sql, parserConfig); return parser.parseStmtList(); } catch (SqlParseException e) { - UserException.Builder builder = SqlExceptionHelper.parseError(sql, e); + UserException.Builder builder = SqlExceptionHelper + .parseError(sql, e); + + if (e.getCause() instanceof StackOverflowError) { + builder.message(SqlExceptionHelper.PLANNING_STACK_OVERFLOW_ERROR); + } else if (isInnerQuery) { + builder.message("Failure parsing a view your query is dependent upon."); + } - builder.message(isInnerQuery ? SqlExceptionHelper.INNER_QUERY_PARSING_ERROR : SqlExceptionHelper.QUERY_PARSING_ERROR); throw builder.build(logger); } } @@ -283,12 +288,12 @@ public Catalog getCatalog() { return catalog; } - public void setViewExpansionVersionContext(VersionContext versionContext) { - viewExpansionVersionContext = versionContext; - } - /** - * A RelRoot that carries additional conversion information. + * A RelRoot that carries additional conversion information such as: + * + * 1. Whether the plan contains context sensitive functions such as current_date + * which makes the plan ineligible for materializing in a reflection. + * 2. Whether the plan can be put into the query plan cache. */ public static class RelRootPlus extends RelRoot { @@ -302,12 +307,8 @@ private RelRootPlus(RelNode rel, RelDataType validatedRowType, SqlKind kind, Lis this.planCacheable = planCacheable; } - public static RelRootPlus of(RelRoot root, boolean contextSensitive, boolean planCacheable) { - return new RelRootPlus(root.rel,root.validatedRowType,root.kind,root.fields,root.collation,contextSensitive,planCacheable); - } - - public static RelRootPlus of(RelNode rel, RelDataType validatedRowType, SqlKind kind, boolean contextSensitive) { - return RelRootPlus.of(rel, validatedRowType,kind,contextSensitive,true); + public static RelRootPlus of(RelRootPlus root, RelNode rel, RelDataType validatedRowType) { + return new RelRootPlus(rel, validatedRowType, root.kind, root.fields, root.collation, root.contextSensitive, root.planCacheable); } public static RelRootPlus of(RelNode rel, RelDataType validatedRowType, SqlKind kind, boolean contextSensitive, boolean planCacheable) { @@ -317,12 +318,23 @@ public static RelRootPlus of(RelNode rel, RelDataType validatedRowType, SqlKind } public boolean isContextSensitive() { - return contextSensitive; + return contextSensitive || ExpansionNode.isContextSensitive(rel); } public boolean isPlanCacheable() { return planCacheable; } + + /** + * Converts the RelRootPlus to a RelRoot with an ExpansionNode at the root + * @param node + * @return + */ + public RelRoot withExpansionNode(RelNode node) { + assert node instanceof ExpansionNode; + assert RelOptUtil.equal("query", rel.getRowType(), "expansion", node.getRowType(), Litmus.THROW); + return new RelRoot(node, validatedRowType, kind, fields, collation, ImmutableList.of()); + } } /** diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/SqlExceptionHelper.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/SqlExceptionHelper.java index 48989fa9c8..be70a0cf5b 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/SqlExceptionHelper.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/SqlExceptionHelper.java @@ -32,9 +32,12 @@ * Helper class that generates the appropriate user exception * from specific parsing exception */ -public class SqlExceptionHelper { - public static final String QUERY_PARSING_ERROR = "Failure parsing the query."; - public static final String INNER_QUERY_PARSING_ERROR = "Failure parsing a view your query is dependent upon."; +public final class SqlExceptionHelper { + public static final String PLANNING_ERROR = "Error during planning the query."; + public static final String PLANNING_STACK_OVERFLOW_ERROR = "The query planner ran out of internal resources and " + + "could not produce a query plan. This is a rare event and only expected for extremely complex queries or queries " + + "that reference a very large number of tables. Please simplify the query. If you believe you've received this " + + "message in error, contact Dremio Support for more information."; public static final String SQL_QUERY_CONTEXT = "SQL Query"; public static final String START_LINE_CONTEXT = "startLine"; @@ -42,78 +45,78 @@ public class SqlExceptionHelper { public static final String START_COLUMN_CONTEXT = "startColumn"; public static final String END_COLUMN_CONTEXT = "endColumn"; + private SqlExceptionHelper() {} + public static UserException.Builder validationError(String query, ValidationException ex) { Throwable cause = ex; if (ex.getCause() != null) { // ValidationException generally wraps the "real" cause that we are interested in cause = ex.getCause(); } - UserException.Builder b = UserException.validationError(cause) - .addContext(SQL_QUERY_CONTEXT, query); + + if (cause instanceof StackOverflowError) { + throw SqlExceptionHelper.planError(query, cause) + .message(PLANNING_STACK_OVERFLOW_ERROR) + .buildSilently(); + } + + String message = null; + SqlParserPos pos = null; // CalciteContextException alters the error message including the start/end positions // we need to extract the original error message and add the remaining information as context - if (cause instanceof CalciteContextException && cause.getCause() != null) { + if (cause instanceof CalciteContextException) { CalciteContextException cce = (CalciteContextException) cause; - b.message(cce.getCause().getMessage()) - .addContext(START_LINE_CONTEXT, cce.getPosLine()) - .addContext(START_COLUMN_CONTEXT, cce.getPosColumn()) - .addContext(END_LINE_CONTEXT, cce.getEndPosLine()) - .addContext(END_COLUMN_CONTEXT, cce.getEndPosColumn()); + message = cce.getCause().getMessage(); + pos = new SqlParserPos( + cce.getPosLine(), + cce.getPosColumn(), + cce.getEndPosLine(), + cce.getEndPosColumn()); } - return b; + + return sqlError(UserException.validationError(cause), query, message, pos); } - public static UserException.Builder planError(String query, Exception ex) { - UserException.Builder b = UserException.planError(ex) - .addContext(SQL_QUERY_CONTEXT, query); + public static UserException.Builder planError(String query, Throwable ex) { + String message = null; + SqlParserPos pos = null; // CalciteContextException alters the error message including the start/end positions // we need to extract the original error message and add the remaining information as context if (ex instanceof CalciteContextException) { CalciteContextException cce = (CalciteContextException) ex; - b.message(cce.getMessage()) - .addContext(START_LINE_CONTEXT, cce.getPosLine()) - .addContext(START_COLUMN_CONTEXT, cce.getPosColumn()) - .addContext(END_LINE_CONTEXT, cce.getEndPosLine()) - .addContext(END_COLUMN_CONTEXT, cce.getEndPosColumn()); + message = cce.getMessage(); + pos = new SqlParserPos( + cce.getPosLine(), + cce.getPosColumn(), + cce.getEndPosLine(), + cce.getEndPosColumn()); } - return b; - } - private static UserException.Builder addParseContext(UserException.Builder builder, String query, SqlParserPos pos){ - // Calcite convention is to return column and line numbers as 1-based inclusive positions. - return builder.addContext(SQL_QUERY_CONTEXT, query) - .addContext(START_LINE_CONTEXT, pos.getLineNum()) - .addContext(START_COLUMN_CONTEXT, pos.getColumnNum()) - .addContext(END_LINE_CONTEXT, pos.getEndLineNum()) - .addContext(END_COLUMN_CONTEXT, pos.getEndColumnNum()); + return sqlError(UserException.planError(ex), query, message, pos); } public static UserException.Builder parseError(String message, String query, SqlParserPos pos) { - return addParseContext(UserException.parseError().message(message), query, pos); + return sqlError(UserException.parseError(), query, message, pos); } public static UserException.Builder parseError(String query, SqlParseException ex) { - final SqlParserPos pos = ex.getPos(); - if (pos == null) { - return UserException - .parseError(ex) - .addContext(SQL_QUERY_CONTEXT, query); - } else { - // Calcite convention is to return column and line numbers as 1-based inclusive positions. - return addParseContext(UserException.parseError(ex), query, pos); - } + return sqlError( + UserException.parseError(ex), + query, + ex.getMessage(), + ex.getPos()); } - - public static Exception coerceException(Logger logger, String sql, Exception e, boolean coerceToPlan){ - if(e instanceof UserException){ + public static Exception coerceException(Logger logger, String sql, Exception e, boolean coerceToPlan) { + if (e instanceof UserException) { return e; - } else if(e instanceof ValidationException){ + } else if (e instanceof ValidationException) { throw validationError(sql, (ValidationException) e).build(logger); - } else if (e instanceof AccessControlException){ - throw UserException.permissionError(e) + } else if (e instanceof AccessControlException) { + throw UserException + .permissionError(e) .addContext(SQL_QUERY_CONTEXT, sql) .build(logger); } else if (e instanceof SqlUnsupportedException){ @@ -127,4 +130,44 @@ public static Exception coerceException(Logger logger, String sql, Exception e, } return e; } + + private static UserException.Builder sqlError( + UserException.Builder builder, + String query, + String message, + SqlParserPos pos) { + if (message != null) { + if (message.contains("Object") && message.contains("not found")){ + builder = builder.message(message + ". Please check that it exists in the selected context."); + } else { + builder = builder.message(message); + } + } + + if (pos != null) { + // Calcite convention is to return column and line numbers as 1-based inclusive positions. + builder + .addContext(START_LINE_CONTEXT, pos.getLineNum()) + .addContext(START_COLUMN_CONTEXT, pos.getColumnNum()) + .addContext(END_LINE_CONTEXT, pos.getEndLineNum()) + .addContext(END_COLUMN_CONTEXT, pos.getEndColumnNum()); + } + + if (query != null) { + builder.addContext(SQL_QUERY_CONTEXT, query); + } + + return builder; + } + + public static Exception coerceError(String sql, Error ex) { + if (ex instanceof StackOverflowError) { + throw SqlExceptionHelper.planError(sql, ex) + .message(PLANNING_STACK_OVERFLOW_ERROR) + .buildSilently(); + } + throw SqlExceptionHelper.planError(sql, ex) + .message(PLANNING_ERROR) + .buildSilently(); + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/SqlValidatorAndToRelContext.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/SqlValidatorAndToRelContext.java index 24f27fa60d..f5ef58e5b6 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/SqlValidatorAndToRelContext.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/SqlValidatorAndToRelContext.java @@ -30,15 +30,10 @@ import org.apache.calcite.rel.hint.HintStrategies; import org.apache.calcite.rel.hint.HintStrategyTable; import org.apache.calcite.rel.type.RelDataType; -import org.apache.calcite.rex.RexBuilder; -import org.apache.calcite.rex.RexCall; -import org.apache.calcite.rex.RexNode; -import org.apache.calcite.rex.RexShuttle; -import org.apache.calcite.schema.FunctionParameter; import org.apache.calcite.sql.SqlExplainLevel; +import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNodeList; -import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.SqlOperatorTable; import org.apache.calcite.sql.SqlSelect; import org.apache.calcite.sql.parser.SqlParserPos; @@ -46,21 +41,16 @@ import org.apache.calcite.sql2rel.SqlRexConvertletTable; import org.apache.calcite.sql2rel.SqlToRelConverter; -import com.dremio.common.util.DremioCollectors; import com.dremio.exec.ExecConstants; import com.dremio.exec.catalog.Catalog; import com.dremio.exec.catalog.CatalogIdentity; import com.dremio.exec.catalog.DremioCatalogReader; import com.dremio.exec.catalog.VersionContext; -import com.dremio.exec.catalog.udf.UserDefinedFunctionArgumentOperator; -import com.dremio.exec.planner.DremioRexBuilder; import com.dremio.exec.planner.common.MoreRelOptUtil; import com.dremio.exec.planner.physical.PlannerSettings; import com.dremio.exec.planner.sql.handlers.RexSubQueryUtils; -import com.dremio.exec.planner.types.SqlTypeFactoryImpl; import com.dremio.options.OptionResolver; import com.dremio.service.namespace.NamespaceKey; -import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; /** @@ -69,22 +59,23 @@ public class SqlValidatorAndToRelContext { private static final org.slf4j.Logger logger = getLogger(SqlValidatorAndToRelContext.class); - private static final RexBuilder REX_BUILDER = new DremioRexBuilder(SqlTypeFactoryImpl.INSTANCE); - - private final SqlConverter sqlConverter; private final DremioCatalogReader dremioCatalogReader; private final SqlValidatorImpl validator; private final boolean isInnerQuery; + private final boolean allowSubqueryExpansion; - public SqlValidatorAndToRelContext(SqlConverter sqlConverter, + public SqlValidatorAndToRelContext( + SqlConverter sqlConverter, DremioCatalogReader dremioCatalogReader, @Nullable SqlOperatorTable contextualSqlOperatorTable, - boolean isInnerQuery) { + boolean isInnerQuery, + boolean allowSubqueryExpansion) { this.sqlConverter = sqlConverter; this.dremioCatalogReader = dremioCatalogReader; this.validator = createValidator(sqlConverter, dremioCatalogReader, contextualSqlOperatorTable); this.isInnerQuery = isInnerQuery; + this.allowSubqueryExpansion = allowSubqueryExpansion; } public SqlNode validate(final SqlNode parsedNode) { @@ -94,32 +85,23 @@ public SqlNode validate(final SqlNode parsedNode) { return node; } - public FunctionBodyAndArguments validateAndConvertScalarFunction( - final SqlNode parsedNode, - final String functionName, - final List functionParameters) { - final SqlToRelConverter.Config config = createDefaultSqlToRelConfigBuilder(sqlConverter.getSettings()) - .withExpand(false) - .build(); - SqlSelect selectNode = toSelect(parsedNode); - List argOps = - UserDefinedFunctionArgumentOperator.createArgumentOperator(functionName, functionParameters); - Map nameToCall = argOps.stream() - .map(REX_BUILDER::makeCall) - // TODO fix this generic so it works .map(RexCall.class::cast) - .collect(DremioCollectors.uniqueGrouping(call -> ((RexCall)call).getOperator().getName())); - - - SqlNode validatedNode = validate(selectNode); - SqlNode expressionNode = extractOnlyExpression(validatedNode); - final SqlToRelConverter sqlToRelConverter = - new DremioSqlToRelConverter(sqlConverter, dremioCatalogReader, validator, - sqlConverter.getConvertletTable(), config); - RexNode rexNode = sqlToRelConverter.convertExpression(expressionNode, nameToCall); - - return new FunctionBodyAndArguments(rexNode, argOps); - } + public RelNode getPlanForFunctionExpression(final SqlNode sqlNode) { + SqlToRelConverter.ConfigBuilder configBuilder = createDefaultSqlToRelConfigBuilder(sqlConverter.getSettings()); + if (!allowSubqueryExpansion) { + configBuilder = configBuilder.withExpand(false); + } + final SqlToRelConverter.Config config = configBuilder.build(); + final DremioSqlToRelConverter sqlToRelConverter = new DremioSqlToRelConverter( + sqlConverter, + dremioCatalogReader, + validator, + sqlConverter.getConvertletTable(), + config); + + final SqlNode query = toQuery(sqlNode); + return sqlToRelConverter.convertQuery(query, true, true).rel; + } /** * Returns a rel root that defers materialization of scans via {@link com.dremio.exec.planner.logical.ConvertibleScan} @@ -200,14 +182,16 @@ public static Builder builder(SqlConverter sqlConverter) { return new Builder(sqlConverter, sqlConverter.getCatalog(), null, - false); + false, + true); } public static Builder builder(SqlValidatorAndToRelContext sqlValidatorAndToRelContext) { return new Builder(sqlValidatorAndToRelContext.sqlConverter, sqlValidatorAndToRelContext.sqlConverter.getCatalog(), null, - sqlValidatorAndToRelContext.isInnerQuery); + sqlValidatorAndToRelContext.isInnerQuery, + true); } public SqlConverter getSqlConverter() { @@ -218,16 +202,20 @@ public static class Builder { final SqlConverter sqlConverter; final Catalog catalog; final boolean isSubQuery; + final boolean allowSubqueryExpansion; @Nullable final SqlOperatorTable contextualSqlOperatorTable; - public Builder(SqlConverter sqlConverter, - Catalog catalog, - SqlOperatorTable contextualSqlOperatorTable, - boolean isSubQuery) { + public Builder( + SqlConverter sqlConverter, + Catalog catalog, + SqlOperatorTable contextualSqlOperatorTable, + boolean isSubQuery, + boolean allowSubqueryExpansion) { this.sqlConverter = sqlConverter; this.catalog = catalog; this.contextualSqlOperatorTable = contextualSqlOperatorTable; this.isSubQuery = isSubQuery; + this.allowSubqueryExpansion = allowSubqueryExpansion; } public Builder withSchemaPath(List schemaPath) { @@ -237,7 +225,8 @@ public Builder withSchemaPath(List schemaPath) { sqlConverter, catalog.resolveCatalog(withSchemaPath), contextualSqlOperatorTable, - isSubQuery); + isSubQuery, + allowSubqueryExpansion); } public Builder withCatalog(final Function catalogTransformer) { @@ -245,7 +234,8 @@ public Builder withCatalog(final Function catalogTransformer) sqlConverter, catalogTransformer.apply(catalog), contextualSqlOperatorTable, - isSubQuery); + isSubQuery, + allowSubqueryExpansion); } public Builder withUser(CatalogIdentity user) { @@ -253,75 +243,46 @@ public Builder withUser(CatalogIdentity user) { sqlConverter, catalog.resolveCatalog(user), contextualSqlOperatorTable, - isSubQuery); + isSubQuery, + allowSubqueryExpansion); } public Builder withVersionContext(String source, VersionContext versionContext) { - sqlConverter.setViewExpansionVersionContext(versionContext); final Map sourceVersionMapping = new HashMap<>(); sourceVersionMapping.put(source, versionContext); return new Builder( sqlConverter, catalog.resolveCatalog(sourceVersionMapping), contextualSqlOperatorTable, - isSubQuery); + isSubQuery, + allowSubqueryExpansion); } public Builder withContextualSqlOperatorTable(SqlOperatorTable contextualSqlOperatorTable) { - return new Builder(sqlConverter, catalog, contextualSqlOperatorTable, isSubQuery); + return new Builder(sqlConverter, catalog, contextualSqlOperatorTable, isSubQuery, + allowSubqueryExpansion); } public Builder withSystemDefaultParserConfig() { - return new Builder(sqlConverter.withSystemDefaultParserConfig(), catalog, contextualSqlOperatorTable, isSubQuery); + return new Builder(sqlConverter.withSystemDefaultParserConfig(), catalog, contextualSqlOperatorTable, isSubQuery, + allowSubqueryExpansion); + } + + public Builder disallowSubqueryExpansion() { + return new Builder( + sqlConverter.withSystemDefaultParserConfig(), + catalog, + contextualSqlOperatorTable, + isSubQuery, + false); } public SqlValidatorAndToRelContext build() { return new SqlValidatorAndToRelContext(sqlConverter, new DremioCatalogReader(catalog, sqlConverter.getTypeFactory()), contextualSqlOperatorTable, - isSubQuery); - } - - } - - public class FunctionBodyAndArguments { - final RexNode functionBody; - final List userDefinedFunctionArgumentOperators; - - private FunctionBodyAndArguments(RexNode functionBody, - List userDefinedFunctionArgumentOperators) { - this.functionBody = functionBody; - this.userDefinedFunctionArgumentOperators = userDefinedFunctionArgumentOperators; - } - - public RexNode getFunctionBody() { - return functionBody; - } - - public List getUserDefinedFunctionArgumentOperators() { - return userDefinedFunctionArgumentOperators; - } - } - - private static RexNode replaceArgs(RexNode rexNode, Map namesToNodeMap) { - return rexNode.accept(new RexShuttle() { - @Override public RexNode visitCall(RexCall call) { - if(call.getOperands().isEmpty()) { - return namesToNodeMap.getOrDefault(call.getOperator().getName(), call); - } - return super.visitCall(call); - } - }); - } - - private static SqlNode extractOnlyExpression(SqlNode sqlNode) { - if (sqlNode instanceof SqlSelect) { - SqlSelect sqlSelect = (SqlSelect) sqlNode; - Preconditions.checkState(null == sqlSelect.getFrom()); - Preconditions.checkState(sqlSelect.getSelectList().size() == 1); - return sqlSelect.getSelectList().get(0); - } else { - throw new RuntimeException(); + isSubQuery, + allowSubqueryExpansion); } } @@ -357,19 +318,31 @@ private static SqlOperatorTable createOperatorTable(SqlConverter sqlConverter, } } - private static SqlSelect toSelect(SqlNode sqlNode) { - if(sqlNode instanceof SqlSelect) { - return (SqlSelect) sqlNode; - } else { - return new SqlSelect(SqlParserPos.ZERO, null, - new SqlNodeList(ImmutableList.of(sqlNode), SqlParserPos.ZERO), - null, null, null, null, null, null, null, null, null, null); - } - } - private static SqlToRelConverter.ConfigBuilder createDefaultSqlToRelConfigBuilder(PlannerSettings settings) { return SqlToRelConverter.configBuilder() .withExpand(settings.options.getOption(PlannerSettings.USE_SQL_TO_REL_SUB_QUERY_EXPANSION)) .withTrimUnusedFields(true); } + + private static SqlNode toQuery(SqlNode node) { + final SqlKind kind = node.getKind(); + switch (kind) { + // These are the node types that we know are already a query. + case SELECT: + case UNION: + case INTERSECT: + case EXCEPT: + case WITH: + case VALUES: + return node; + default: + // We need to convert scalar values into a select statement + return new SqlSelect( + SqlParserPos.ZERO, + null, + new SqlNodeList(ImmutableList.of(node), + SqlParserPos.ZERO), + null, null, null, null, null, null, null, null, null, null); + } + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/SqlValidatorImpl.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/SqlValidatorImpl.java index dfbf7f066c..57577e4fa7 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/SqlValidatorImpl.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/SqlValidatorImpl.java @@ -93,6 +93,7 @@ import com.dremio.exec.planner.sql.parser.SqlOptimize; import com.dremio.exec.planner.sql.parser.SqlPartitionTransform; import com.dremio.exec.planner.sql.parser.SqlUpdateTable; +import com.dremio.exec.planner.sql.parser.SqlVacuumTable; import com.dremio.exec.planner.sql.parser.SqlVersionedTableMacroCall; import com.dremio.options.OptionResolver; import com.dremio.options.TypeValidators; @@ -145,7 +146,7 @@ protected SqlNode performUnconditionalRewrites(SqlNode node, boolean underFrom) SqlMergeIntoTable merge = (SqlMergeIntoTable) node; rewriteMerge(merge); return node; - } else if (node instanceof SqlOptimize) { + } else if (node instanceof SqlOptimize) { SqlOptimize sqlOptimize = (SqlOptimize) node; SqlSelect select = createSourceSelectForOptimize(sqlOptimize); sqlOptimize.setSourceSelect(select); @@ -155,7 +156,13 @@ protected SqlNode performUnconditionalRewrites(SqlNode node, boolean underFrom) SqlSelect select = createSourceSelectForCopyIntoTable(sqlCopyIntoTable); sqlCopyIntoTable.setSourceSelect(select); return node; + } else if (node instanceof SqlVacuumTable) { + SqlVacuumTable sqlVacuumTable = (SqlVacuumTable) node; + SqlSelect select = createSourceSelectForVacuum(sqlVacuumTable); + sqlVacuumTable.setSourceSelect(select); + return node; } + return super.performUnconditionalRewrites(node, underFrom); } @@ -175,6 +182,14 @@ private SqlSelect createSourceSelectForCopyIntoTable(SqlCopyIntoTable call) { null, null, null, null, null, null, null, null, null); } + private SqlSelect createSourceSelectForVacuum(SqlVacuumTable call) { + final SqlNodeList selectList = new SqlNodeList(SqlParserPos.ZERO); + selectList.add(SqlIdentifier.star(SqlParserPos.ZERO)); + SqlNode sourceTable = call.getTable(); + return new SqlSelect(SqlParserPos.ZERO, null, selectList, sourceTable, + null, null, null, null, null, null, null, null, null); + } + @Override protected void registerNamespace(SqlValidatorScope usingScope, String alias, SqlValidatorNamespace ns, boolean forceNullable) { @@ -818,6 +833,7 @@ private static class Expander extends SqlScopedShuttle { this.validator = validator; } + @Override public SqlNode visit(SqlIdentifier id) { SqlValidator validator = getScope().getValidator(); final SqlCall call = validator.makeNullaryCall(id); @@ -943,7 +959,8 @@ public ExtendedAliasExpander( this.allowMoreThanOneAlias = allowMoreThanOneAlias; } - @Override public SqlNode visit(SqlIdentifier id) { + @Override + public SqlNode visit(SqlIdentifier id) { if (!id.isSimple()) { return super.visit(id); } @@ -1034,6 +1051,7 @@ public static SqlNode copy(SqlNode node) { return node.accept(new DeepCopier()); } + @Override public SqlNode visit(SqlNodeList list) { SqlNodeList copy = new SqlNodeList(list.getParserPosition()); for (SqlNode node : list) { @@ -1044,28 +1062,34 @@ public SqlNode visit(SqlNodeList list) { // Override to copy all arguments regardless of whether visitor changes // them. + @Override public SqlNode visit(SqlCall call) { ArgHandler argHandler = new CallCopyingArgHandler(call, true); call.getOperator().acceptCall(this, call, false, argHandler); return argHandler.result(); } + @Override public SqlNode visit(SqlLiteral literal) { return SqlNode.clone(literal); } + @Override public SqlNode visit(SqlIdentifier id) { return SqlNode.clone(id); } + @Override public SqlNode visit(SqlDataTypeSpec type) { return SqlNode.clone(type); } + @Override public SqlNode visit(SqlDynamicParam param) { return SqlNode.clone(param); } + @Override public SqlNode visit(SqlIntervalQualifier intervalQualifier) { return SqlNode.clone(intervalQualifier); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/TypeInferenceUtils.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/TypeInferenceUtils.java index 55712372c9..68274878d2 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/TypeInferenceUtils.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/TypeInferenceUtils.java @@ -43,10 +43,11 @@ import com.dremio.exec.resolver.FunctionResolver; import com.dremio.exec.resolver.FunctionResolverFactory; import com.esotericsoftware.kryo.serializers.FieldSerializer; +import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -public class TypeInferenceUtils { +public final class TypeInferenceUtils { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TypeInferenceUtils.class); private static final ImmutableMap MINOR_TO_CALCITE_TYPE_MAPPING = ImmutableMap. builder() @@ -355,30 +356,24 @@ public RelDataType inferReturnType(SqlOperatorBinding opBinding) { private static class ConcatSqlReturnTypeInference implements SqlReturnTypeInference { private static final ConcatSqlReturnTypeInference INSTANCE = new ConcatSqlReturnTypeInference(); + /** + * Calculate the expected return type of CONCAT based on the types of the operands. + * For simplicity, we just return a max-width VARCHAR type since it makes no difference in resource usage. + * @param opBinding Binding to the operands used in this function call. + * @return The return type for this call to CONCAT. + */ @Override public RelDataType inferReturnType(SqlOperatorBinding opBinding) { final RelDataTypeFactory factory = opBinding.getTypeFactory(); - - boolean isNullable = true; - int precision = 0; - for(RelDataType relDataType : opBinding.collectOperandTypes()) { - if(!relDataType.isNullable()) { - isNullable = false; - } - - // If the underlying columns cannot offer information regarding the precision (i.e., the length) of the VarChar, - // Dremio uses the largest to represent it - if(relDataType.getPrecision() == TypeHelper.VARCHAR_DEFAULT_CAST_LEN - || relDataType.getPrecision() == RelDataType.PRECISION_NOT_SPECIFIED) { - precision = TypeHelper.VARCHAR_DEFAULT_CAST_LEN; - } else { - precision += relDataType.getPrecision(); - } - } - - return factory.createTypeWithNullability( - factory.createSqlType(SqlTypeName.VARCHAR, precision), - isNullable); + final boolean isNullable = + opBinding.collectOperandTypes().stream().allMatch(relDataType -> relDataType.isNullable()); + + // Set precision to null because createCalciteTypeWithNullability always makes VARCHAR max-width. + // We do this to avoid having complex logic here to calculate the maximum width for the string + // representation of a value with a given type, and because it doesn't make a difference in + // resource usage. + final Integer precision = null; + return createCalciteTypeWithNullability(factory, SqlTypeName.VARCHAR, isNullable, precision); } } @@ -402,16 +397,15 @@ private static class ReplaceSqlReturnTypeInference implements SqlReturnTypeInfer @Override public RelDataType inferReturnType(SqlOperatorBinding opBinding) { - final RelDataTypeFactory factory = opBinding.getTypeFactory(); - final SqlTypeName sqlTypeName = SqlTypeName.VARCHAR; - - for(int i = 0; i < opBinding.getOperandCount(); ++i) { - if(opBinding.getOperandType(i).isNullable()) { - return createCalciteTypeWithNullability(factory, sqlTypeName, true, null); - } - } - - return createCalciteTypeWithNullability(factory, sqlTypeName, false, 65536); + Preconditions.checkArgument(opBinding.getOperandCount() == 3); + + boolean isNullable = opBinding.collectOperandTypes().stream().anyMatch(RelDataType::isNullable); + // TODO: calculate a more reasonable upper bound on the char length after replacement + return createCalciteTypeWithNullability( + opBinding.getTypeFactory(), + SqlTypeName.VARCHAR, + isNullable, + null /*VARCHAR just uses a default precision*/); } } @@ -570,8 +564,10 @@ public static RelDataType createCalciteTypeWithNullability(RelDataTypeFactory ty break; case VARCHAR: type = typeFactory.createSqlType(sqlTypeName, TypeHelper.VARCHAR_DEFAULT_CAST_LEN); + break; default: type = typeFactory.createSqlType(sqlTypeName); + break; } return typeFactory.createTypeWithNullability(type, isNullable); } @@ -596,10 +592,7 @@ public static FunctionCall convertSqlOperatorBindingToFunctionCall(final SqlOper return functionCall; } - /** - * This class is not intended to be instantiated - */ private TypeInferenceUtils() { - + // utility class } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/ConvertedRelNode.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/ConvertedRelNode.java index 8ced7880e1..ce3aa8170d 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/ConvertedRelNode.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/ConvertedRelNode.java @@ -18,7 +18,11 @@ import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.type.RelDataType; +/** + * A ConvertedRelNode is the result of converting a SqlNode into a RelNode with {@link org.apache.calcite.plan.Convention#NONE}. + */ public class ConvertedRelNode { + private final RelNode relNode; private final RelDataType validatedRowType; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/DremioFieldTrimmer.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/DremioFieldTrimmer.java index c7b5adee7c..9e1b069a15 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/DremioFieldTrimmer.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/DremioFieldTrimmer.java @@ -17,11 +17,14 @@ import java.math.BigDecimal; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; +import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.StreamSupport; @@ -32,6 +35,8 @@ import org.apache.calcite.rel.RelFieldCollation; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.core.Aggregate; +import org.apache.calcite.rel.core.Join; +import org.apache.calcite.rel.core.JoinRelType; import org.apache.calcite.rel.core.Project; import org.apache.calcite.rel.core.SetOp; import org.apache.calcite.rel.core.Window; @@ -52,6 +57,8 @@ import org.apache.calcite.rex.RexVisitorImpl; import org.apache.calcite.rex.RexWindowBound; import org.apache.calcite.sql.SqlAggFunction; +import org.apache.calcite.sql.SqlExplainLevel; +import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql2rel.RelFieldTrimmer; import org.apache.calcite.tools.RelBuilder; import org.apache.calcite.util.ImmutableBitSet; @@ -62,43 +69,56 @@ import org.apache.calcite.util.mapping.MappingType; import org.apache.calcite.util.mapping.Mappings; +import com.dremio.common.collections.Tuple; import com.dremio.common.expression.SchemaPath; import com.dremio.exec.calcite.logical.SampleCrel; import com.dremio.exec.calcite.logical.ScanCrel; import com.dremio.exec.calcite.logical.TableModifyCrel; -import com.dremio.exec.planner.logical.DremioRelFactories; +import com.dremio.exec.planner.common.FlattenRelBase; import com.dremio.exec.planner.logical.FlattenVisitors; import com.dremio.exec.planner.logical.LimitRel; import com.dremio.exec.planner.logical.WindowRel; import com.dremio.exec.planner.logical.partition.PruneFilterCondition; import com.dremio.exec.store.dfs.FilesystemScanDrel; import com.dremio.service.Pointer; +import com.google.common.base.Stopwatch; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -public class DremioFieldTrimmer extends RelFieldTrimmer { +public final class DremioFieldTrimmer extends RelFieldTrimmer { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DremioFieldTrimmer.class); + private static final Double ONE = new Double(1); + private final RelBuilder builder; - private final boolean isRelPlanning; - private final boolean trimProjectedColumn; + private final DremioFieldTrimmerParameters parameters; - public static DremioFieldTrimmer of(RelOptCluster cluster, boolean isRelPlanning, boolean trimProjectedColumn) { - RelBuilder builder = DremioRelFactories.CALCITE_LOGICAL_BUILDER.create(cluster, null); - return new DremioFieldTrimmer(builder, isRelPlanning, trimProjectedColumn); + public DremioFieldTrimmer( + RelBuilder builder, + DremioFieldTrimmerParameters parameters) { + super(null, builder); + this.builder = builder; + this.parameters = parameters; } - public static DremioFieldTrimmer of(RelBuilder builder) { - return new DremioFieldTrimmer(builder, false, true); - } + @Override + public RelNode trim(RelNode root) { + if (!parameters.shouldLog()) { + return super.trim(root); + } - private DremioFieldTrimmer(RelBuilder builder, boolean isRelPlanning, boolean trimProjectedColumn) { - super(null, builder); - this.builder = builder; - this.isRelPlanning = isRelPlanning; - this.trimProjectedColumn = trimProjectedColumn; + Stopwatch stopwatch = Stopwatch.createStarted(); + RelNode trimmedNode = super.trim(root); + stopwatch.stop(); + + String plan = RelOptUtil.toString(root, SqlExplainLevel.ALL_ATTRIBUTES); + long duration = stopwatch.elapsed(TimeUnit.MILLISECONDS); + String logMessage = String.format("FieldTrimmer took %dms for the following RelNode:\n%s", duration, plan); + logger.debug(logMessage); + + return trimmedNode; } public TrimResult trimFields( @@ -165,7 +185,7 @@ public TrimResult trimFields( ImmutableBitSet fieldsUsed, Set extraFields) { - if(fieldsUsed.cardinality() == crel.getRowType().getFieldCount() || !trimProjectedColumn) { + if (fieldsUsed.cardinality() == crel.getRowType().getFieldCount() || !parameters.trimProjectedColumn()) { return result(crel, Mappings.createIdentity(crel.getRowType().getFieldCount())); } @@ -203,7 +223,7 @@ public TrimResult trimFields( Set extraFields) { // if we've already pushed down projection of nested columns, we don't want to trim anymore - if (drel.getProjectedColumns().stream().anyMatch(c -> !c.isSimplePath()) || !trimProjectedColumn) { + if (drel.getProjectedColumns().stream().anyMatch(c -> !c.isSimplePath()) || !parameters.trimProjectedColumn()) { return result(drel, Mappings.createIdentity(drel.getRowType().getFieldCount())); } @@ -291,6 +311,44 @@ public TrimResult trimFields( return super.trimFields(setOp, fieldsUsed, extraFields); } + /** + * Handler method to trim fields for @FlattenCrel + * @param flatten + * @param fieldsUsed + * @param extraFields + * @return + */ + public TrimResult trimFields(FlattenRelBase flatten, ImmutableBitSet fieldsUsed, Set extraFields) { + final RelDataType rowType = flatten.getRowType(); + final int fieldCount = rowType.getFieldCount(); + final RelNode input = flatten.getInput(); + + // We use the fields used by the consumer, plus any fields used in the + // flatten. + final ImmutableBitSet inputFieldsUsed = fieldsUsed.union(ImmutableBitSet.of(flatten.getFlattenedIndices())); + + TrimResult trimResult = trimChild(flatten, input, inputFieldsUsed, extraFields); + RelNode newInput = trimResult.left; + final Mapping inputMapping = trimResult.right; + + //When input didn't change and we have to project all the columns as before. + if (newInput == input + && fieldsUsed.cardinality() == fieldCount) { + return result(flatten, Mappings.createIdentity(fieldCount)); + } + + final RexVisitor shuttle = new RexPermuteInputsShuttle(inputMapping, newInput); + + final List flattenFields = new ArrayList<>(); + + // Update flatten fields in case if the input fields have changed the mapping. + for(final RexInputRef flattenField : flatten.getToFlatten()){ + flattenFields.add((RexInputRef) flattenField.accept(shuttle)); + } + + return result(flatten.copy(Arrays.asList(newInput), flattenFields), inputMapping); + } + @Override public TrimResult trimFields(Project project, ImmutableBitSet fieldsUsed, Set extraFields) { int count = FlattenVisitors.count(project.getProjects()); @@ -300,7 +358,7 @@ public TrimResult trimFields(Project project, ImmutableBitSet fieldsUsed, Set identityProject = new ArrayList<>(); for (int i = 0; i < result.left.getRowType().getFieldCount(); i++) { identityProject.add(new RexInputRef(i, result.left.getRowType().getFieldList().get(i).getType())); @@ -406,6 +464,257 @@ public TrimResult trimFields( return result(limit.copy(newInput.getTraitSet(), ImmutableList.of(newInput)), inputMapping); } + /** + * Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for + * {@link org.apache.calcite.rel.logical.LogicalJoin}. + * Checks to see that we are projecting from a single side of the JOIN that doesn't modify the output rows. + * If so, then we remove the join and recurse onto the side we are projecting from. + */ + @Override + public TrimResult trimFields( + Join join, + ImmutableBitSet fieldsUsed, + Set extraFields) { + if (!parameters.trimJoinBranch()) { + return super.trimFields(join, fieldsUsed, extraFields); + } + + Optional> optionalTrimResult = tryTrimJoin(join, fieldsUsed); + if (!optionalTrimResult.isPresent()) { + return super.trimFields(join, fieldsUsed, extraFields); + } + + Tuple trimResult = optionalTrimResult.get(); + return result(trimResult.first, trimResult.second); + } + + private static Optional> tryTrimJoin( + Join join, + ImmutableBitSet fieldsUsed) { + /** + * The internal algorithm checks to see if we have a JOIN that can be removed. + * It does so by asking: + * 1) Are we projecting from the side that is being JOINed on? + * 2) Is the JOIN NOT modifying the rows emitted? + * + * If we answer yes to all of these, then we can remove the JOIN + */ + if (!join.getSystemFieldList().isEmpty()) { + // If there are system fields, then we can't apply the optimization + return Optional.empty(); + } + + Optional optionalProjectType = tryGetProjectType(join, fieldsUsed); + if (!optionalProjectType.isPresent()) { + return Optional.empty(); + } + + JoinRelType projectType = optionalProjectType.get(); + if (!joinTypeCompatibleWithProjectType(join, projectType)) { + return Optional.empty(); + } + + // At this point we know the JOIN will not DECREASE the cardinality + // We need to also check that the JOIN will not INCREASE the cardinality + // This is done by making sure the JOIN condition matches the elements from the left table + // with at most one element in the right table. + if (!isJoinConditionOneToOne(join, projectType)) { + return Optional.empty(); + } + + final RelNode mainBranch = projectType == JoinRelType.LEFT ? join.getLeft() : join.getRight(); + Mapping mapping = Mappings.create( + MappingType.SURJECTION, + join.getRowType().getFieldCount(), + mainBranch.getRowType().getFieldCount()); + if (projectType == JoinRelType.LEFT) { + for (int i = 0; i < mainBranch.getRowType().getFieldCount(); i++) { + mapping.set(i, i); + } + } else { + int leftFieldCount = join.getLeft().getRowType().getFieldCount(); + for (int i = 0; i < mainBranch.getRowType().getFieldCount(); i++) { + mapping.set(i + leftFieldCount, i); + } + } + + return Optional.of(Tuple.of(mainBranch, mapping)); + } + + /** + * Tries to return the side we are projecting from. + * @param join + * @param fieldsUsed + * @return The side we are projecting from (LEFT, RIGHT, or EMPTY if both). + */ + private static Optional tryGetProjectType(Join join, ImmutableBitSet fieldsUsed) { + int minFieldIndex = Integer.MAX_VALUE; + int maxFiledIndex = -1; + for (int index : fieldsUsed) { + minFieldIndex = Math.min(minFieldIndex, index); + maxFiledIndex = Math.max(maxFiledIndex, index); + } + + int numLeftColumns = join.getLeft().getRowType().getFieldCount(); + boolean projectsFromBothSides = (minFieldIndex < numLeftColumns) && (maxFiledIndex >= numLeftColumns); + if (projectsFromBothSides) { + return Optional.empty(); + } + + boolean isLeftOnlyJoin = (minFieldIndex < numLeftColumns) && (maxFiledIndex < numLeftColumns); + JoinRelType projectType = isLeftOnlyJoin ? JoinRelType.LEFT : JoinRelType.RIGHT; + return Optional.of(projectType); + } + + private static boolean joinTypeCompatibleWithProjectType(Join join, JoinRelType projectType) { + switch (join.getJoinType()) { + case FULL: + // A full join will have all the rows from both the LEFT and RIGHT table + return true; + + case INNER: + // This is the same as a FULL JOIN + return join.getCondition().isAlwaysTrue(); + + case LEFT: + return projectType == JoinRelType.LEFT; + + case RIGHT: + return projectType == JoinRelType.RIGHT; + + default: + // We don't know how to support these other join types yet. + return false; + } + } + + private static boolean isJoinConditionOneToOne(Join join, JoinRelType projectType) { + final RelNode sideBranch = projectType == JoinRelType.LEFT ? join.getRight() : join.getLeft(); + // We need metadata query to do any row count operations, + // so if it's not available, then just give up. + if (sideBranch.getCluster() == null || sideBranch.getCluster().getMetadataQuery() == null) { + return false; + } + + RexNode condition = join.getCondition(); + boolean isOneToOne; + if (condition.isAlwaysTrue()) { + isOneToOne = ONE.equals(sideBranch.getCluster().getMetadataQuery().getMaxRowCount(sideBranch)) + && ONE.equals(sideBranch.getCluster().getMetadataQuery().getMinRowCount(sideBranch)); + } else if (condition instanceof RexCall) { + RexCall rexCall = (RexCall) condition; + isOneToOne = isJoinConditionOneToOneRexCall(join, rexCall, sideBranch); + } else { + isOneToOne = false; + } + + return isOneToOne; + } + + private static boolean isJoinConditionOneToOneRexCall(Join join, RexCall condition, RelNode sideBranch) { + switch (condition.getKind()) { + case EQUALS: + return isJoinConditionOneToOneRexCallEquals(join, condition, sideBranch); + case AND: + return isJoinConditionOneToOneRexCallAnd(join, condition, sideBranch); + default: + return false; + } + } + + private static boolean isJoinConditionOneToOneRexCallEquals(Join join, RexCall condition, RelNode sideBranch) { + assert condition.getKind() == SqlKind.EQUALS; + // We need to have an equi join on the sideBranch with only unique values + // Example: + // JoinRel(condition=[=($0, $2)], joinType=[left])" + // ProjectRel(col1=[$0], col2=[$1])" + // FilesystemScanDrel(table=[cp.\"dx56085/t1.json\"], columns=[`col1`, `col2`], splits=[1])" + // AggregateRel(group=[{0}])" + // We know that =($0, $2) can match at most one value, since $2 is a groupkey + Optional optionalSideBranchEqualityIndex = tryGetSideBranchEqualityIndex(join, condition, sideBranch); + if (!optionalSideBranchEqualityIndex.isPresent()) { + return false; + } + + int sideBranchEqualityIndex = optionalSideBranchEqualityIndex.get(); + ImmutableBitSet columns = ImmutableBitSet + .builder() + .addAll(ImmutableList.of(sideBranchEqualityIndex)) + .build(); + + return Boolean.TRUE.equals(sideBranch + .getCluster() + .getMetadataQuery() + .areColumnsUnique(sideBranch, columns)); + } + + private static boolean isJoinConditionOneToOneRexCallAnd(Join join, RexCall condition, RelNode sideBranch) { + assert condition.getKind() == SqlKind.AND; + // If the condition is the AND of a bunch of equality checks that when unioned span unique column sets + // Then we can remove the JOIN + // For example: + // ProjectRel(col1=[$0])" + // JoinRel(condition=[AND(=($0, $2), =($1, $3))], joinType=[left])" + // FilesystemScanDrel(table=[cp.\"dx56085/t1.json\"], columns=[`col1`, `col2`], splits=[1])" + // AggregateRel(group=[{0, 1}])" + // FilesystemScanDrel(table=[cp.\"dx56085/t2.json\"], columns=[`col1`, `col2`], splits=[1])" + // Is doing an equality check on $2 and $3 which when combined gets us the full group=[{0, 1}] which is unique + List columns = condition + .getOperands() + .stream() + .map(operand -> tryGetSideBranchEqualityIndex(join, operand, sideBranch)) + .filter(optional -> optional.isPresent()) + .map(optional -> optional.get()) + .collect(Collectors.toList()); + + ImmutableBitSet columnBitSet = ImmutableBitSet + .builder() + .addAll(columns) + .build(); + + return Boolean.TRUE.equals(sideBranch + .getCluster() + .getMetadataQuery() + .areColumnsUnique(sideBranch, columnBitSet)); + } + + private static Optional tryGetSideBranchEqualityIndex(Join join, RexNode condition, RelNode sideBranch) { + if (condition.getKind() != SqlKind.EQUALS) { + return Optional.empty(); + } + + RexCall equalsConditon = (RexCall) condition; + if (equalsConditon.getOperands().size() != 2) { + throw new UnsupportedOperationException("Expected equals condition to have exactly 2 operands."); + } + + for (RexNode operand : equalsConditon.getOperands()) { + if (!(operand instanceof RexInputRef)) { + return Optional.empty(); + } + } + + int leftIndex = ((RexInputRef)equalsConditon.getOperands().get(0)).getIndex(); + int rightIndex = ((RexInputRef)equalsConditon.getOperands().get(1)).getIndex(); + if (leftIndex > rightIndex) { + int temp = leftIndex; + leftIndex = rightIndex; + rightIndex = temp; + } + + int numLeftColumns = join.getLeft().getRowType().getFieldCount(); + if ((leftIndex >= numLeftColumns) || (rightIndex < numLeftColumns)) { + return Optional.empty(); + } + + // We want the index of the column participating in the join condition on the side branch + // If the side branch is the right join, + // then we need to adjust the index to be relative side branch, + // since it's currently relative to the whole join. + int adjustedIndex = sideBranch.equals(join.getRight()) ? rightIndex - numLeftColumns : leftIndex; + return Optional.of(adjustedIndex); + } + /** * Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for {@link MultiJoin}. */ diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/DremioFieldTrimmerParameters.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/DremioFieldTrimmerParameters.java new file mode 100644 index 0000000000..f98ec69ba6 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/DremioFieldTrimmerParameters.java @@ -0,0 +1,35 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +import org.immutables.value.Value; + +/** + * Parameters for DremioFieldTrimmer. + * This staged builder pattern enables us to simulate "named parameters" in Java. + */ +@Value.Style(stagedBuilder = true) +@Value.Immutable +public interface DremioFieldTrimmerParameters { + boolean shouldLog(); + boolean isRelPlanning(); + boolean trimProjectedColumn(); + boolean trimJoinBranch(); + + static ImmutableDremioFieldTrimmerParameters.ShouldLogBuildStage builder() { + return ImmutableDremioFieldTrimmerParameters.builder(); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/DremioSortInJoinRemover.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/DremioSortInJoinRemover.java deleted file mode 100644 index 5d4a9fc657..0000000000 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/DremioSortInJoinRemover.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.dremio.exec.planner.sql.handlers; - -import org.apache.calcite.rel.RelNode; - -import com.dremio.exec.planner.StatelessRelShuttleImpl; -import com.dremio.exec.planner.logical.JoinRel; -import com.dremio.exec.planner.logical.LimitRel; -import com.dremio.exec.planner.logical.SortRel; - -/** - * DremioSortInJoinRemover removes a LogicalSort if it occurs in a LogicalJoin Subtree - * In an In SubQuery, after expansion, we sometimes maintain sort in a Join Subtree when it is not needed - * For example for query - - * SELECT o_custkey FROM tpch."orders.parquet" - * WHERE o_custkey in (SELECT o_custkey FROM tpch."orders.parquet" ORDER BY o_custkey) - * Order By o_custkey - * The generated plan is Join/Aggregate/Sort - * Here the sort is not needed and is removed by this remover. - */ - -public class DremioSortInJoinRemover { - - private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DremioSortInJoinRemover.class); - - public static RelNode remove(RelNode root) { - SortRemoverShuttle sortRemoverShuttle = new SortRemoverShuttle(); - return sortRemoverShuttle.visit(root); - } - - /** - * SortRemoverShuttle is a Visitor that removes a LogicalSort if it occurs in a LogicalJoin Subtree - */ - private static class SortRemoverShuttle extends StatelessRelShuttleImpl { - private boolean inJoin = false; - - // returns true if rel Contains a sort or an offset which is not null - private boolean doesRelContainsSortOrOffset(RelNode rel){ - if(rel instanceof SortRel){ - return ( ((SortRel)rel).fetch != null || ((SortRel)rel).offset != null); - } else if(rel instanceof LimitRel){ - return ( ((LimitRel)rel).fetch != null || ((LimitRel)rel).offset != null); - } - return false; - } - - public RelNode visitSortRel(SortRel sort) { - if (this.inJoin && !doesRelContainsSortOrOffset(sort)) { - RelNode output = super.visitChildren(sort); - return visit(output.getInput(0)); - } - this.inJoin = false; - return super.visitChildren(sort); - } - - public RelNode visitJoinRel(JoinRel join) { - this.inJoin = true; - RelNode output = super.visitChildren(join); - this.inJoin = false; - return output; - } - - @Override - public RelNode visit(RelNode other) { - if(other instanceof JoinRel ){ - return visitJoinRel((JoinRel) other); - }else if (other instanceof SortRel){ - return visitSortRel((SortRel) other); - } else if (other instanceof LimitRel) { - if(doesRelContainsSortOrOffset(other)){ - // do not remove sort in this subtree unless there is another Join below - this.inJoin = false; - } - } - return super.visit(other); - } - - } - -} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/FlattenCaseExpressionsVisitor.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/FlattenCaseExpressionsVisitor.java new file mode 100644 index 0000000000..83d4899225 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/FlattenCaseExpressionsVisitor.java @@ -0,0 +1,141 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.calcite.rel.RelNode; +import org.apache.calcite.rel.logical.LogicalFilter; +import org.apache.calcite.rel.logical.LogicalJoin; +import org.apache.calcite.rel.logical.LogicalProject; +import org.apache.calcite.rex.RexBuilder; +import org.apache.calcite.rex.RexCall; +import org.apache.calcite.rex.RexNode; +import org.apache.calcite.rex.RexShuttle; +import org.apache.calcite.sql.fun.SqlStdOperatorTable; + +import com.dremio.exec.planner.StatelessRelShuttleImpl; + +/** + * Flatten nested CASE expressions. + */ +public class FlattenCaseExpressionsVisitor extends StatelessRelShuttleImpl { + private static final FlattenCaseExpressionsVisitor INSTANCE = new FlattenCaseExpressionsVisitor(); + + public static RelNode simplify(RelNode relNode) { + return relNode.accept(INSTANCE); + } + + @Override + public RelNode visit(LogicalFilter filter) { + final RelNode inputRel = filter.getInput().accept(this); + final RexBuilder rexBuilder = filter.getCluster().getRexBuilder(); + + final CaseExpressionUnwrapper visitor = new CaseExpressionUnwrapper(rexBuilder); + final RexNode condition = filter.getCondition().accept(visitor); + return filter.copy(filter.getTraitSet(), inputRel, condition); + } + + @Override + public RelNode visit(LogicalProject project) { + final RelNode inputRel = project.getInput().accept(this); + final RexBuilder rexBuilder = project.getCluster().getRexBuilder(); + final List projects = new ArrayList<>(); + + final CaseExpressionUnwrapper visitor = new CaseExpressionUnwrapper(rexBuilder); + for (RexNode rexNode : project.getProjects()) { + projects.add(rexNode.accept(visitor)); + } + return project.copy(project.getTraitSet(), inputRel, projects, project.getRowType()); + } + + @Override + public RelNode visit(LogicalJoin join) { + final RelNode left = join.getLeft().accept(this); + final RelNode right = join.getRight().accept(this); + final RexBuilder rexBuilder = join.getCluster().getRexBuilder(); + + final CaseExpressionUnwrapper visitor = new CaseExpressionUnwrapper(rexBuilder); + final RexNode condition = join.getCondition().accept(visitor); + return join.copy(join.getTraitSet(), condition, left, right, + join.getJoinType(), join.isSemiJoinDone()); + } + + public static final class CaseExpressionUnwrapper extends RexShuttle { + private final RexBuilder rexBuilder; + + public CaseExpressionUnwrapper(RexBuilder rexBuilder) { + this.rexBuilder = rexBuilder; + } + + @Override + public RexNode visitCall(RexCall call) { + boolean foundNestedCase = false; + if (call.getOperator() == SqlStdOperatorTable.CASE) { + /* + * Flatten the children to see if we have nested case expressions + * Case operands are always 2n + 1, and they are like: + * (RexNode -> When expression + * RexNode -> Then expression) repeats n times + * RexNode -> Else expression + */ + List operands = new ArrayList<>(call.getOperands()); + + /* + * Flatten all ELSE expressions. Anything nested under ELSE expression can be + * pulled up to the parent case. e.g. + * + * CASE WHEN col1 = 'abc' THEN 0 + * WHEN col1 = 'def' THEN 1 + * ELSE (CASE WHEN col2 = 'ghi' THEN -1 + * ELSE (CASE WHEN col3 = 'jkl' THEN -2 + * ELSE -3)) + * + * can be rewritten as: + * CASE WHEN col1 = 'abc' THEN 0 + * WHEN col1 = 'def' THEN 1 + * WHEN col2 = 'ghi' THEN -1 + * WHEN col3 = 'jkl' THEN -2 + * ELSE -3 + */ + + boolean unwrapped = true; + while (unwrapped) { // Recursively unwrap the ELSE expression + List elseOperators = new ArrayList<>(); + RexNode elseExpr = operands.get(operands.size() - 1); + if (elseExpr instanceof RexCall) { + RexCall elseCall = ((RexCall) elseExpr); + if (elseCall.getOperator() == SqlStdOperatorTable.CASE) { + foundNestedCase = true; + elseOperators.addAll(elseCall.getOperands()); + } + } + if (elseOperators.isEmpty()) { + unwrapped = false; + } else { + operands.remove(operands.size() - 1); // Remove the ELSE expression and replace with the unwrapped one + operands.addAll(elseOperators); + } + } + if (foundNestedCase) { + return rexBuilder.makeCall(SqlStdOperatorTable.CASE, operands); + } + } + return super.visitCall(call); + } + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/MetadataRefreshPlanBuilderFactory.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/MetadataRefreshPlanBuilderFactory.java index ab06c59a9e..80ff26b78d 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/MetadataRefreshPlanBuilderFactory.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/MetadataRefreshPlanBuilderFactory.java @@ -38,10 +38,13 @@ import com.dremio.exec.store.iceberg.SupportsInternalIcebergTable; import com.dremio.service.namespace.NamespaceKey; +import io.opentelemetry.instrumentation.annotations.WithSpan; + public class MetadataRefreshPlanBuilderFactory { private static final Logger logger = LoggerFactory.getLogger(MetadataRefreshPlanBuilderFactory.class); + @WithSpan public static MetadataRefreshPlanBuilder getPlanBuilder(SqlHandlerConfig config, SqlRefreshDataset sqlRefreshDataset) throws IOException { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/PrelTransformer.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/PrelTransformer.java index 2a5fddfec0..b9c4c4f4e5 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/PrelTransformer.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/PrelTransformer.java @@ -21,8 +21,10 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.TimeUnit; @@ -53,9 +55,9 @@ import org.apache.calcite.sql.type.SqlTypeName; import org.apache.calcite.sql.validate.SqlValidatorUtil; import org.apache.calcite.sql2rel.DremioRelDecorrelator; -import org.apache.calcite.sql2rel.RelFieldTrimmer; import org.apache.calcite.tools.Program; import org.apache.calcite.tools.Programs; +import org.apache.calcite.tools.RelBuilder; import org.apache.calcite.tools.RelConversionException; import org.apache.calcite.tools.RuleSet; import org.apache.calcite.tools.ValidationException; @@ -88,12 +90,14 @@ import com.dremio.exec.planner.acceleration.substitution.SubstitutionInfo; import com.dremio.exec.planner.common.ContainerRel; import com.dremio.exec.planner.common.MoreRelOptUtil; +import com.dremio.exec.planner.common.RelNodeCounter; import com.dremio.exec.planner.cost.DremioCost; import com.dremio.exec.planner.logical.ConstExecutor; import com.dremio.exec.planner.logical.DremioRelFactories; import com.dremio.exec.planner.logical.InvalidViewRel; import com.dremio.exec.planner.logical.PreProcessRel; import com.dremio.exec.planner.logical.ProjectRel; +import com.dremio.exec.planner.logical.RedundantSortEliminator; import com.dremio.exec.planner.logical.Rel; import com.dremio.exec.planner.logical.ScreenRel; import com.dremio.exec.planner.logical.ValuesRewriteShuttle; @@ -108,10 +112,12 @@ import com.dremio.exec.planner.physical.visitor.ComplexToJsonPrelVisitor; import com.dremio.exec.planner.physical.visitor.EmptyPrelPropagator; import com.dremio.exec.planner.physical.visitor.ExcessiveExchangeIdentifier; +import com.dremio.exec.planner.physical.visitor.ExpandNestedFunctionVisitor; import com.dremio.exec.planner.physical.visitor.FinalColumnReorderer; import com.dremio.exec.planner.physical.visitor.GlobalDictionaryVisitor; import com.dremio.exec.planner.physical.visitor.InsertHashProjectVisitor; import com.dremio.exec.planner.physical.visitor.InsertLocalExchangeVisitor; +import com.dremio.exec.planner.physical.visitor.JoinConditionValidatorVisitor; import com.dremio.exec.planner.physical.visitor.JoinPrelRenameVisitor; import com.dremio.exec.planner.physical.visitor.RelUniqifier; import com.dremio.exec.planner.physical.visitor.RuntimeFilterDecorator; @@ -145,6 +151,9 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.instrumentation.annotations.WithSpan; + /** * Collection of Rel, Drel and Prel transformations used in various planning cycles. */ @@ -153,6 +162,23 @@ public class PrelTransformer { @SuppressWarnings("Slf4jIllegalPassedClass") // intentionally using logger from another class private static final org.slf4j.Logger CALCITE_LOGGER = org.slf4j.LoggerFactory.getLogger(RelOptPlanner.class); + private static class TransformationContext { + private RelNode relNode; + private Map timeBreakdownPerRule; + TransformationContext(RelNode relNode, Map timeBreakdownPerRule) { + this.relNode = relNode; + this.timeBreakdownPerRule = timeBreakdownPerRule; + } + + public RelNode getRelNode() { + return relNode; + } + + public Map getTimeBreakdownPerRule() { + return timeBreakdownPerRule; + } + } + protected static void log(final PlannerType plannerType, final PlannerPhase phase, final RelNode node, final Logger logger, Stopwatch watch) { if (logger.isDebugEnabled()) { @@ -191,10 +217,24 @@ public static ConvertedRelNode validateAndConvert(SqlHandlerConfig config, SqlNo final SqlNode validated = validatedTypedSqlNode.getKey(); final RelNode rel = convertToRel(config, sqlValidatorAndToRelContext, validated, relTransformer); - final RelNode preprocessedRel = preprocessNode(config.getContext().getOperatorTable(), rel); + config.getObserver().setNumJoinsInUserQuery(countRelNodesInPlan(rel, config, new RelNodeCounter.LogicalJoinCounter())); + final PlannerSettings plannerSettings = config.getContext().getPlannerSettings(); + final RelNode redundantSortsRemoved = plannerSettings.isSortInJoinRemoverEnabled() ? RedundantSortEliminator.apply(rel) : rel; + final RelNode preprocessedRel = preprocessNode(config.getContext().getOperatorTable(), redundantSortsRemoved); return new ConvertedRelNode(preprocessedRel, validatedTypedSqlNode.getValue()); } + private static Integer countRelNodesInPlan(final RelNode plan, final SqlHandlerConfig config, final RelNodeCounter counter) { + final PlannerSettings plannerSettings = config.getContext().getPlannerSettings(); + final boolean verboseProfile = plannerSettings.getOptions().getOption(PlannerSettings.VERBOSE_PROFILE); + Integer ret = null; + if (verboseProfile) { + plan.accept(counter); + ret = counter.getCount(); + } + return ret; + } + private static Pair validateNode(SqlHandlerConfig config, SqlValidatorAndToRelContext sqlValidatorAndToRelContext, SqlNode sqlNode) throws ValidationException, ForemanSetupException { @@ -224,20 +264,6 @@ private static Pair validateNode(SqlHandlerConfig config, return typedSqlNode; } - public static RelNode trimFields(final RelNode relNode, boolean shouldLog, boolean isRelPlanning, boolean trimProjectedColumn) { - final Stopwatch w = Stopwatch.createStarted(); - final RelFieldTrimmer trimmer = DremioFieldTrimmer.of(relNode.getCluster(), isRelPlanning, trimProjectedColumn); - final RelNode trimmed = trimmer.trim(relNode); - if(shouldLog) { - log(PlannerType.HEP, PlannerPhase.FIELD_TRIMMING, trimmed, logger, w); - } - return trimmed; - } - - public static RelNode trimFields(final RelNode relNode, boolean shouldLog, boolean isRelPlanning) { - return trimFields(relNode, shouldLog, isRelPlanning, true); - } - /** * Given a relNode tree for SELECT statement, convert to Dremio Logical RelNode tree. * @param relNode @@ -245,17 +271,29 @@ public static RelNode trimFields(final RelNode relNode, boolean shouldLog, boole * @throws SqlUnsupportedException */ public static Rel convertToDrel(SqlHandlerConfig config, final RelNode relNode) throws SqlUnsupportedException { - try { final PlannerSettings plannerSettings = config.getContext().getPlannerSettings(); - final RelNode trimmed = trimFields(relNode, true, true); - final RelNode rangeConditionRewrite = trimmed.accept(new RangeConditionRewriteVisitor(plannerSettings)); + final RelBuilder relBuilder = DremioRelFactories.CALCITE_LOGICAL_BUILDER.create(relNode.getCluster(), null); + final RelNode trimmed = new DremioFieldTrimmer( + relBuilder, + DremioFieldTrimmerParameters + .builder() + .shouldLog(true) + .isRelPlanning(true) + .trimProjectedColumn(true) + .trimJoinBranch(plannerSettings.trimJoinBranch()) + .build()) + .trim(relNode); + final RelNode flattenCaseExprs = plannerSettings.getOptions().getOption(PlannerSettings.FLATTEN_CASE_EXPRS_ENABLED) ? + FlattenCaseExpressionsVisitor.simplify(trimmed) : + trimmed; + final RelNode rangeConditionRewrite = flattenCaseExprs.accept(new RangeConditionRewriteVisitor(plannerSettings)); final RelNode projPush = transform(config, PlannerType.HEP_AC, PlannerPhase.PROJECT_PUSHDOWN, rangeConditionRewrite, rangeConditionRewrite.getTraitSet(), true); - final RelNode expandOperators = expandOperators(config,projPush,plannerSettings); - final RelNode projPull = projectPullUp(config,expandOperators,plannerSettings); - final RelNode preLog = transform(config, PlannerType.HEP_AC, PlannerPhase.PRE_LOGICAL, projPull, projPull.getTraitSet(), true); - final RelNode preLogTransitive = getPreLogicalTransitive(config, preLog, plannerSettings); - final RelNode logical = transform(config, PlannerType.VOLCANO, PlannerPhase.LOGICAL, preLogTransitive, preLogTransitive.getTraitSet().plus(Rel.LOGICAL), true); + final RelNode projPull = projectPullUp(config, projPush, plannerSettings); + final RelNode filterConstantPushdown = transform(config, PlannerType.HEP_AC, PlannerPhase.FILTER_CONSTANT_RESOLUTION_PUSHDOWN, projPull, projPull.getTraitSet(), true); + final RelNode transitiveFilterPushdown = transitiveFilterPushdown(config, filterConstantPushdown, plannerSettings); + final RelNode preLog = transform(config, PlannerType.HEP_AC, PlannerPhase.PRE_LOGICAL, transitiveFilterPushdown, transitiveFilterPushdown.getTraitSet(), true); + final RelNode logical = transform(config, PlannerType.VOLCANO, PlannerPhase.LOGICAL, preLog, preLog.getTraitSet().plus(Rel.LOGICAL), true); final RelNode rowCountAdjusted = getRowCountAdjusted(logical, plannerSettings); final RelNode postLogical = getPostLogical(config, rowCountAdjusted, plannerSettings); final RelNode nestedProjectPushdown = getNestedProjectPushdown(config, postLogical, plannerSettings); @@ -284,31 +322,24 @@ public static Rel convertToDrel(SqlHandlerConfig config, final RelNode relNode) } } - private static RelNode expandOperators(SqlHandlerConfig config, RelNode projPush, PlannerSettings plannerSettings){ - if(plannerSettings.isExpandOperatorsEnabled()){ - return transform(config, PlannerType.HEP_AC, PlannerPhase.EXPAND_OPERATORS, projPush, projPush.getTraitSet(), true); - }else{ - return projPush; - } - } - private static RelNode projectPullUp(SqlHandlerConfig config, RelNode expandOperators, PlannerSettings plannerSettings){ + private static RelNode projectPullUp(SqlHandlerConfig config, RelNode projPush, PlannerSettings plannerSettings){ if(plannerSettings.isProjectPullUpEnabled()){ - return transform(config, PlannerType.HEP_AC, PlannerPhase.PROJECT_PULLUP, expandOperators, expandOperators.getTraitSet(), true); + return transform(config, PlannerType.HEP_AC, PlannerPhase.PROJECT_PULLUP, projPush, projPush.getTraitSet(), true); }else{ - return expandOperators; + return projPush; } } - private static RelNode getPreLogicalTransitive(SqlHandlerConfig config, RelNode preLog, PlannerSettings plannerSettings) { + @WithSpan("PrelTransformer.transitiveFilterPushdown") + private static RelNode transitiveFilterPushdown(SqlHandlerConfig config, RelNode filterPushdown, PlannerSettings plannerSettings) { if (plannerSettings.isTransitiveFilterPushdownEnabled()) { Stopwatch watch = Stopwatch.createStarted(); - final RelNode joinPullFilters = preLog.accept(new JoinPullTransitiveFiltersVisitor()); + final RelNode joinPullFilters = filterPushdown.accept(new JoinPullTransitiveFiltersVisitor()); log(PlannerType.HEP, PlannerPhase.TRANSITIVE_PREDICATE_PULLUP, joinPullFilters, logger, watch); - config.getObserver().planRelTransform(PlannerPhase.TRANSITIVE_PREDICATE_PULLUP, null, preLog, joinPullFilters, watch.elapsed(TimeUnit.MILLISECONDS)); - return transform(config, PlannerType.HEP_AC, PlannerPhase.PRE_LOGICAL_TRANSITIVE, joinPullFilters, joinPullFilters.getTraitSet(), true); - } else { - return preLog; + config.getObserver().planRelTransform(PlannerPhase.TRANSITIVE_PREDICATE_PULLUP, null, filterPushdown, joinPullFilters, watch.elapsed(TimeUnit.MILLISECONDS), Collections.emptyMap()); + return transform(config, PlannerType.HEP_AC, PlannerPhase.FILTER_CONSTANT_RESOLUTION_PUSHDOWN, joinPullFilters, joinPullFilters.getTraitSet(), true); } + return filterPushdown; } private static RelNode getRowCountAdjusted(RelNode logical, PlannerSettings plannerSettings) { @@ -338,8 +369,7 @@ private static RelNode getPostLogical(SqlHandlerConfig config, RelNode rowCountA relWithoutMultipleConstantGroupKey = rowCountAdjusted; } final RelNode decorrelatedNode = DremioRelDecorrelator.decorrelateAndValidateQuery(relWithoutMultipleConstantGroupKey, DremioRelFactories.LOGICAL_BUILDER.create(relWithoutMultipleConstantGroupKey.getCluster(), null), true); - final RelNode sortRemoved = (plannerSettings.isSortInJoinRemoverEnabled())? DremioSortInJoinRemover.remove(decorrelatedNode): decorrelatedNode; - final RelNode jdbcPushDown = transform(config, PlannerType.HEP_AC, PlannerPhase.RELATIONAL_PLANNING, sortRemoved, sortRemoved.getTraitSet().plus(Rel.LOGICAL), true); + final RelNode jdbcPushDown = transform(config, PlannerType.HEP_AC, PlannerPhase.RELATIONAL_PLANNING, decorrelatedNode, decorrelatedNode.getTraitSet().plus(Rel.LOGICAL), true); return jdbcPushDown.accept(new ShortenJdbcColumnAliases()).accept(new ConvertJdbcLogicalToJdbcRel(DremioRelFactories.LOGICAL_BUILDER)); } @@ -399,17 +429,28 @@ public static Rel convertToDrelMaintainingNames( * @throws SqlUnsupportedException */ public static Rel convertToDrel(SqlHandlerConfig config, RelNode relNode, RelDataType validatedRowType) throws RelConversionException, SqlUnsupportedException { - + final PlannerSettings plannerSettings = config.getContext().getPlannerSettings(); Rel convertedRelNode = convertToDrel(config, relNode); - final DremioFieldTrimmer trimmer = DremioFieldTrimmer.of(DremioRelFactories.LOGICAL_BUILDER.create(convertedRelNode.getCluster(), null)); - Rel trimmedRelNode = (Rel) trimmer.trim(convertedRelNode); + RelBuilder relBuilder = DremioRelFactories.LOGICAL_BUILDER.create(convertedRelNode.getCluster(), null); + // We might have to trim again after decorrelation ... + DremioFieldTrimmer trimmer = new DremioFieldTrimmer( + relBuilder, + DremioFieldTrimmerParameters + .builder() + .shouldLog(true) + .isRelPlanning(false) + .trimProjectedColumn(true) + .trimJoinBranch(plannerSettings.trimJoinBranch()) + .build()); + // Trimming twice, since some columns weren't being trimmed + Rel trimmedRelNode = (Rel) trimmer.trim(trimmer.trim(convertedRelNode)); // Put a non-trivial topProject to ensure the final output field name is preserved, when necessary. trimmedRelNode = addRenamedProject(config, trimmedRelNode, validatedRowType); trimmedRelNode = SqlHandlerUtil.storeQueryResultsIfNeeded(config.getConverter().getParserConfig(), - config.getContext(), trimmedRelNode); + config.getContext(), trimmedRelNode); return new ScreenRel(trimmedRelNode.getCluster(), trimmedRelNode.getTraitSet(), trimmedRelNode); } @@ -494,7 +535,7 @@ public static RelNode transform( final RuleSet rules = config.getRules(phase); final RelTraitSet toTraits = targetTraits.simplify(); final RelOptPlanner planner; - final Supplier toPlan; + final Supplier toPlan; final PlannerSettings plannerSettings = config.getContext().getPlannerSettings(); CALCITE_LOGGER.trace("Starting Planning for phase {} with target traits {}.", phase, targetTraits); @@ -513,7 +554,7 @@ public static RelNode transform( hepPgmBldr.addMatchLimit(matchLimit); MatchCountListener matchCountListener = new MatchCountListener(relNodeCount, rulesCount, matchLimit, - plannerSettings.getOptions().getOption(PlannerSettings.VERBOSE_RULE_MATCH_LISTENER)); + plannerSettings.getOptions().getOption(PlannerSettings.VERBOSE_PROFILE)); hepPgmBldr.addMatchOrder(plannerType.getMatchOrder()); if(plannerType.isCombineRules()) { @@ -542,11 +583,12 @@ public static RelNode transform( planner = hepPlanner; toPlan = () -> { RelNode relNode = hepPlanner.findBestExp(); + Map timeBreakdownPerRule = matchCountListener.getRuleToTotalTime(); if (log) { logger.debug("Phase: {}", phase); logger.debug(matchCountListener.toString()); } - return relNode; + return new TransformationContext(relNode, timeBreakdownPerRule); }; } else { // as weird as it seems, the cluster's only planner is the volcano planner. @@ -577,11 +619,12 @@ public static RelNode transform( toPlan = () -> { try { RelNode relNode = program.run(volcanoPlanner, input, toTraits, ImmutableList.of(), ImmutableList.of()); + Map timeBreakdownPerRule = volcanoPlanner.getMatchCountListener().getRuleToTotalTime(); if (log) { logger.debug("Phase: {}", phase); logger.debug(volcanoPlanner.getMatchCountListener().toString()); } - return relNode; + return new TransformationContext(relNode, timeBreakdownPerRule); } finally { substitutions.setEnabled(false); } @@ -605,11 +648,16 @@ public static RelTransformer getPostSubstitutionTransformer(SqlHandlerConfig con }; } - private static RelNode doTransform(SqlHandlerConfig config, final PlannerType plannerType, final PlannerPhase phase, final RelOptPlanner planner, final RelNode input, boolean log, Supplier toPlan) { + @WithSpan("transform-plan") + private static RelNode doTransform(SqlHandlerConfig config, final PlannerType plannerType, final PlannerPhase phase, + final RelOptPlanner planner, final RelNode input, boolean log, + Supplier toPlan) { + Span.current().setAttribute("dremio.planner.phase", phase.name()); final Stopwatch watch = Stopwatch.createStarted(); try { - final RelNode intermediateNode = toPlan.get(); + final TransformationContext context = toPlan.get(); + final RelNode intermediateNode = context.getRelNode(); final RelNode output; if (phase == PlannerPhase.LOGICAL) { output = processBoostedMaterializations(config, intermediateNode); @@ -619,7 +667,8 @@ private static RelNode doTransform(SqlHandlerConfig config, final PlannerType pl if (log) { log(plannerType, phase, output, logger, watch); - config.getObserver().planRelTransform(phase, planner, input, output, watch.elapsed(TimeUnit.MILLISECONDS)); + config.getObserver().planRelTransform(phase, planner, input, output, watch.elapsed(TimeUnit.MILLISECONDS), + context.getTimeBreakdownPerRule()); } CALCITE_LOGGER.trace("Completed Phase: {}.", phase); @@ -629,7 +678,8 @@ private static RelNode doTransform(SqlHandlerConfig config, final PlannerType pl // log our input state as oput anyway so we can ensure that we have details. try { log(plannerType, phase, input, logger, watch); - config.getObserver().planRelTransform(phase, planner, input, input, watch.elapsed(TimeUnit.MILLISECONDS)); + config.getObserver().planRelTransform(phase, planner, input, input, watch.elapsed(TimeUnit.MILLISECONDS), + Collections.emptyMap()); } catch (Throwable unexpected) { t.addSuppressed(unexpected); } @@ -925,6 +975,17 @@ public static Pair applyPhysicalPrelTransformations(SqlHandlerConf } } + /* 10.0) + * Expand nested functions. Need to do that here at the end of planning + * so that we don't merge the projects back again. + */ + phyRelNode = ExpandNestedFunctionVisitor.pushdownNestedFunctions(phyRelNode, queryOptions); + + /* + * validate the join conditions after all prel transformation + * */ + phyRelNode = JoinConditionValidatorVisitor.validate(phyRelNode, queryOptions); + final String textPlan; if (logger.isDebugEnabled() || config.getObserver() != null) { textPlan = PrelSequencer.setPlansWithIds(phyRelNode, SqlExplainLevel.ALL_ATTRIBUTES, config.getObserver(), finalPrelTimer.elapsed(TimeUnit.MILLISECONDS)); @@ -936,6 +997,8 @@ public static Pair applyPhysicalPrelTransformations(SqlHandlerConf } config.getObserver().finalPrel(phyRelNode); + Integer joins = countRelNodesInPlan(phyRelNode, config, new RelNodeCounter.JoinPrelCounter()); + config.getObserver().setNumJoinsInFinalPrel(joins); return Pair.of(phyRelNode, textPlan); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/RangeConditionRewriteVisitor.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/RangeConditionRewriteVisitor.java index fe90964d32..75f1394942 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/RangeConditionRewriteVisitor.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/RangeConditionRewriteVisitor.java @@ -581,6 +581,7 @@ private RelNode buildIndexTable(LogicalJoin join, return logicalSort; } + @SuppressWarnings("FallThrough") // FIXME: remove suppression by properly handling switch fallthrough private RexNode getStartValueNode(RelDataType type, RexBuilder rexBuilder, double intervalWidth, RexNode zeroInputRef) { switch (type.getSqlTypeName().getFamily()) { case NUMERIC: @@ -599,7 +600,7 @@ private RexNode getStartValueNode(RelDataType type, RexBuilder rexBuilder, doubl case TIME: // TODO case TIMESTAMP: // TODO default: - throw new RuntimeException(String.format("Unsupported type: %s", type.toString())); + throw new RuntimeException("Unsupported type: " + type); } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/RefreshDatasetHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/RefreshDatasetHandler.java index 275f8789b9..a6e94280e9 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/RefreshDatasetHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/RefreshDatasetHandler.java @@ -24,6 +24,7 @@ import com.dremio.exec.physical.PhysicalPlan; import com.dremio.exec.physical.base.PhysicalOperator; +import com.dremio.exec.planner.logical.Rel; import com.dremio.exec.planner.physical.PlannerSettings; import com.dremio.exec.planner.physical.Prel; import com.dremio.exec.planner.sql.SqlExceptionHelper; @@ -36,6 +37,8 @@ import com.dremio.exec.store.pojo.PojoDataType; import com.google.common.base.Preconditions; +import io.opentelemetry.instrumentation.annotations.WithSpan; + /** * Handler for internal {@link SqlRefreshDataset} command. */ @@ -48,6 +51,7 @@ public RefreshDatasetHandler() { logger.info("Initialised {}", this.getClass().getName()); } + @WithSpan @Override public PhysicalPlan getPlan(SqlHandlerConfig config, String sql, SqlNode sqlNode) throws Exception { assertRefreshEnabled(config); @@ -93,4 +97,8 @@ public String getTextPlan() { return textPlan; } + @Override + public Rel getLogicalPlan() { + throw new UnsupportedOperationException(); + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/SqlHandlerUtil.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/SqlHandlerUtil.java index 76f02b2905..b6ec08e202 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/SqlHandlerUtil.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/SqlHandlerUtil.java @@ -57,12 +57,14 @@ import org.apache.calcite.sql.type.SqlTypeName; import org.apache.calcite.tools.ValidationException; import org.apache.calcite.util.TimestampString; +import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.text.StrTokenizer; import com.dremio.common.exceptions.UserException; import com.dremio.common.utils.protos.QueryIdHelper; import com.dremio.exec.ExecConstants; import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.CatalogOptions; import com.dremio.exec.catalog.CatalogUser; import com.dremio.exec.catalog.CatalogUtil; import com.dremio.exec.catalog.DremioTable; @@ -99,6 +101,9 @@ public class SqlHandlerUtil { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SqlHandlerUtil.class); + private static final String UNKNOWN_SOURCE_TYPE = "Unknown"; + public static final String PLANNER_SOURCE_TARGET_SOURCE_TYPE_SPAN_ATTRIBUTE_NAME = "dremio.planner.source.target.source_type"; + /** * Resolve final RelNode of the new table (or view) for given table field list and new table definition. * @@ -158,7 +163,7 @@ public static void validateRowType(boolean isNewTableView, List tableFie // CTAS's query field list shouldn't have "*" when table's field list is specified. for (String field : validatedRowtype.getFieldNames()) { - if (field.equals("*")) { + if ("*".equals(field)) { final String tblType = isNewTableView ? "view" : "table"; throw UserException.validationError() .message("%s's query field list has a '*', which is invalid when %s's field list is specified.", @@ -582,4 +587,26 @@ private static String removeEndingZeros(String timestamp) { return timestamp.substring(0, index); } + + public static void validateSupportForVersionedReflections(String source, Catalog catalog, OptionManager optionManager) { + if (CatalogUtil.requestedPluginSupportsVersionedTables(source, catalog)) { + if (!optionManager.getOption(CatalogOptions.REFLECTION_ARCTIC_ENABLED)) { + throw UserException.unsupportedError() + .message("Versioned source does not support reflection.") + .build(logger); + } + } + } + + public static String getSourceType(Catalog catalog, String sourceName) { + if (!StringUtils.isEmpty(sourceName)) { + try { + return catalog.getSource(sourceName).getClass().getSimpleName(); + } catch (UserException e) { + logger.debug("Unable to get source {} from the catalog: {}", sourceName, e.getOriginalMessage()); + } + } + + return UNKNOWN_SOURCE_TYPE; + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/ViewAccessEvaluator.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/ViewAccessEvaluator.java index 24168f1ba0..e12967eee1 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/ViewAccessEvaluator.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/ViewAccessEvaluator.java @@ -19,16 +19,20 @@ import java.util.List; import java.util.Objects; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.RelShuttleImpl; +import com.dremio.common.exceptions.UserException; import com.dremio.exec.catalog.CatalogIdentity; import com.dremio.exec.catalog.CatalogUser; import com.dremio.exec.catalog.DremioCatalogReader; import com.dremio.exec.catalog.DremioPrepareTable; import com.dremio.exec.catalog.DremioTable; +import com.dremio.exec.planner.DremioVolcanoPlanner; +import com.dremio.exec.planner.ExceptionUtils; import com.dremio.exec.planner.acceleration.ExpansionNode; import com.dremio.exec.planner.logical.ViewTable; import com.dremio.exec.planner.sql.SqlValidatorAndToRelContext; @@ -38,104 +42,160 @@ import com.dremio.service.users.UserNotFoundException; /** - * Validates dataset access by traversing parent datasets + * Validates dataset access by traversing parent datasets. + * This validation is needed in case a default raw reflection in substituted during convertToRel. + * Since this uses the same caching catalog from convertToRel, many tables may already be cached. */ -public class ViewAccessEvaluator implements Runnable { - private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ViewAccessEvaluator.class); +public interface ViewAccessEvaluator extends Runnable, AutoCloseable { - final CountDownLatch latch; - final RelNode rel; - final SqlHandlerConfig config; - Exception exception; - - public ViewAccessEvaluator(RelNode rel, SqlHandlerConfig config) { - this.rel = rel; - this.config = config; - latch = new CountDownLatch(1); - exception = null; + /** + * Returns a ViewAccessEvaluator that asynchronously checks view access only if a default raw reflection + * was substituted into the query during convertToRel phase. + * + * @param config + * @param convertedRelNode + * @return + */ + static ViewAccessEvaluator createAsyncEvaluator(SqlHandlerConfig config, ConvertedRelNode convertedRelNode) { + if (config.getConverter().getViewExpansionContext().isSubstitutedWithDRR()) { + final RelNode convertedRelWithExpansionNodes = ((DremioVolcanoPlanner) convertedRelNode.getConvertedNode().getCluster().getPlanner()).getOriginalRoot(); + ViewAccessEvaluator vae = new ViewAccessEvaluatorImpl(convertedRelWithExpansionNodes, config); + config.getContext().getExecutorService().submit(vae); + return vae; + } else { + return new ViewAccessEvaluatorNoOp(); + } } - public CountDownLatch getLatch() { - return latch; - } + class ViewAccessEvaluatorNoOp implements ViewAccessEvaluator { + @Override + public void run() {} - public Exception getException() { - return exception; + @Override + public void close() throws Exception {} } - @Override - public void run() { - final Thread currentThread = Thread.currentThread(); - final String originalName = currentThread.getName(); - currentThread.setName(config.getContext().getQueryId() + ":foreman-access-evaluation"); - try { - final List> topExpansionPaths = new ArrayList<>(); - rel.accept(new RelShuttleImpl() { - public RelNode visit(RelNode other) { - if (other instanceof ExpansionNode) { - ExpansionNode expansionNode = ((ExpansionNode) other); - topExpansionPaths.add(expansionNode.getPath().getPathComponents()); - return other; + class ViewAccessEvaluatorImpl implements ViewAccessEvaluator { + private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ViewAccessEvaluatorImpl.class); + + private final CountDownLatch latch; + private final RelNode rel; + private final SqlHandlerConfig config; + private Exception exception; + + ViewAccessEvaluatorImpl(RelNode rel, SqlHandlerConfig config) { + this.rel = rel; + this.config = config; + latch = new CountDownLatch(1); + exception = null; + } + + @Override + public void run() { + final Thread currentThread = Thread.currentThread(); + final String originalName = currentThread.getName(); + currentThread.setName(config.getContext().getQueryId() + ":foreman-access-evaluation"); + try { + final List> topExpansionPaths = new ArrayList<>(); + rel.accept(new RelShuttleImpl() { + @Override + public RelNode visit(RelNode other) { + if (other instanceof ExpansionNode) { + ExpansionNode expansionNode = ((ExpansionNode) other); + topExpansionPaths.add(expansionNode.getPath().getPathComponents()); + return other; + } + return super.visit(other); } - return super.visit(other); - } - }); - if (!topExpansionPaths.isEmpty()) { - SqlValidatorAndToRelContext sqlValidatorAndToRelContext = SqlValidatorAndToRelContext.builder(config.getConverter()).build(); - final List tables = new ArrayList<>(); - for (List path : topExpansionPaths) { - if (!supportsVersioning(path, sqlValidatorAndToRelContext.getDremioCatalogReader())) { - DremioTable table = sqlValidatorAndToRelContext.getDremioCatalogReader().getTable(path).getTable(); - tables.add(table); - } else { - logger.warn(String.format("View access checks not enabled for versioned objects")); + }); + final String queryUser = config.getContext().getQueryUserName(); + if (!topExpansionPaths.isEmpty()) { + SqlValidatorAndToRelContext sqlValidatorAndToRelContext = SqlValidatorAndToRelContext.builder(config.getConverter()).build(); + final List tables = new ArrayList<>(); + for (List path : topExpansionPaths) { + if (!supportsVersioning(path, sqlValidatorAndToRelContext.getDremioCatalogReader())) { + DremioTable table = sqlValidatorAndToRelContext.getDremioCatalogReader().getTable(path).getTable(); + tables.add(table); + } else { + // RBAC not supported on versioned views, so we assume current query user can access any Arctic view + logger.trace(String.format("Access control on versioned view %s for user %s not supported", path, queryUser)); + } } + validateViewAccess(tables, sqlValidatorAndToRelContext.getDremioCatalogReader().withCheckValidity(false), queryUser); } - validateViewAccess(tables, sqlValidatorAndToRelContext.getDremioCatalogReader().withCheckValidity(false), config.getContext().getQueryUserName()); + } catch (Exception e) { + exception = e; + } finally { + latch.countDown(); + currentThread.setName(originalName); } - } catch (Exception e) { - exception = e; - } finally { - latch.countDown(); - currentThread.setName(originalName); } - } - private void validateViewAccess(List tables, DremioCatalogReader catalogReader, String queryUser) { - for (DremioTable table : tables) { - DatasetConfig datasetConfig = table.getDatasetConfig(); - if (datasetConfig != null && table instanceof ViewTable) { - final CatalogIdentity viewOwner = ((ViewTable) table).getViewOwner(); - final DremioCatalogReader catalogReaderWithUser = viewOwner == null ? catalogReader : - catalogReader.withSchemaPathAndUser(table.getPath().getPathComponents(), viewOwner, false); - VirtualDataset vds = datasetConfig.getVirtualDataset(); - if (vds != null && vds.getParentsList() != null) { - validateViewAccess( - vds.getParentsList().stream() - .map(parent -> { - DremioPrepareTable dremioTable; - try { - dremioTable = catalogReaderWithUser.getTable(parent.getDatasetPathList()); - } catch (RuntimeException ex) { - if (!(ex.getCause() instanceof UserNotFoundException)) { - throw ex; + /** + * Validates view access by recursively checking access to parent datasets as the view owner. + * Views store their parent datasets in the KV store and these datasets are fully qualified so + * no context (if any on the view) needs to be applied to catalog when looking up these datasets. + * + * @param tables + * @param catalogReader + * @param queryUser + */ + private void validateViewAccess(List tables, DremioCatalogReader catalogReader, String queryUser) { + for (DremioTable table : tables) { + DatasetConfig datasetConfig = table.getDatasetConfig(); + if (datasetConfig != null && table instanceof ViewTable) { + final CatalogIdentity viewOwner = ((ViewTable) table).getViewOwner(); + final DremioCatalogReader catalogReaderWithUser = viewOwner == null ? catalogReader : + catalogReader.withSchemaPathAndUser(null, viewOwner, false); + VirtualDataset vds = datasetConfig.getVirtualDataset(); + if (vds != null && vds.getParentsList() != null) { + validateViewAccess( + vds.getParentsList().stream() + .map(parent -> { + DremioPrepareTable dremioTable; + try { + dremioTable = catalogReaderWithUser.getTable(parent.getDatasetPathList()); + } catch (RuntimeException ex) { + if (!(ex.getCause() instanceof UserNotFoundException)) { + throw ex; + } + dremioTable = catalogReader.withSchemaPathAndUser(null, new CatalogUser(queryUser), false) + .getTable(parent.getDatasetPathList()); } - dremioTable = catalogReader.withSchemaPathAndUser(table.getPath().getPathComponents(), new CatalogUser(queryUser), false) - .getTable(parent.getDatasetPathList()); - } - if (dremioTable != null) { - return dremioTable.getTable(); - } - return null; - }).filter(Objects::nonNull).collect(Collectors.toList()), - catalogReader, - queryUser); + if (dremioTable != null) { + return dremioTable.getTable(); + } + return null; + }).filter(Objects::nonNull).collect(Collectors.toList()), + catalogReader, + queryUser); + } } } } - } - private boolean supportsVersioning(List expansionPath, DremioCatalogReader catalogReader) { - return catalogReader.supportsVersioning(new NamespaceKey(expansionPath)); + private boolean supportsVersioning(List expansionPath, DremioCatalogReader catalogReader) { + return catalogReader.supportsVersioning(new NamespaceKey(expansionPath)); + } + + /** + * Awaits on the result of view access validation. Should be called with a try-with-resource statement + * that includes physical planning. + * + * @throws Exception - if SELECT permissions are missing + */ + @Override + public void close() throws Exception { + if (latch.await(config.getContext().getPlannerSettings().getMaxPlanningPerPhaseMS(), TimeUnit.MILLISECONDS)) { + if (exception != null) { + throw exception; // Security exception + } + } else { + // Waiting on latch timed out + final long inSecs = TimeUnit.MILLISECONDS.toSeconds(config.getContext().getPlannerSettings().getMaxPlanningPerPhaseMS()); + ExceptionUtils.throwUserException(String.format("Query was cancelled because view access evaluation time exceeded %d seconds", inSecs), + null, config.getContext().getPlannerSettings(), null, UserException.AttemptCompletionState.PLANNING_TIMEOUT, logger); + } + } } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/commands/CommandCreator.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/commands/CommandCreator.java index d7003fdb2b..204267565a 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/commands/CommandCreator.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/commands/CommandCreator.java @@ -25,6 +25,7 @@ import org.apache.calcite.sql.SqlSetOption; import com.dremio.common.exceptions.UserException; +import com.dremio.common.utils.protos.QueryIdHelper; import com.dremio.exec.ExecConstants; import com.dremio.exec.catalog.Catalog; import com.dremio.exec.catalog.DremioCatalogReader; @@ -46,6 +47,7 @@ import com.dremio.exec.planner.sql.handlers.direct.AlterClearPlanCacheHandler; import com.dremio.exec.planner.sql.handlers.direct.AlterTableChangeColumnSetOptionHandler; import com.dremio.exec.planner.sql.handlers.direct.AlterTablePartitionSpecHandler; +import com.dremio.exec.planner.sql.handlers.direct.AlterTablePropertiesHandler; import com.dremio.exec.planner.sql.handlers.direct.AlterTableSetOptionHandler; import com.dremio.exec.planner.sql.handlers.direct.AnalyzeTableStatisticsHandler; import com.dremio.exec.planner.sql.handlers.direct.ChangeColumnHandler; @@ -71,9 +73,9 @@ import com.dremio.exec.planner.sql.handlers.direct.SimpleDirectHandler; import com.dremio.exec.planner.sql.handlers.direct.SqlAlterTableToggleSchemaLearningHandler; import com.dremio.exec.planner.sql.handlers.direct.SqlDirectHandler; +import com.dremio.exec.planner.sql.handlers.direct.SqlNodeUtil; import com.dremio.exec.planner.sql.handlers.direct.TruncateTableHandler; import com.dremio.exec.planner.sql.handlers.direct.UseSchemaHandler; -import com.dremio.exec.planner.sql.handlers.direct.VacuumHandler; import com.dremio.exec.planner.sql.handlers.query.CreateTableHandler; import com.dremio.exec.planner.sql.handlers.query.DeleteHandler; import com.dremio.exec.planner.sql.handlers.query.InsertTableHandler; @@ -89,11 +91,13 @@ import com.dremio.exec.planner.sql.parser.SqlAlterTableChangeColumnSetOption; import com.dremio.exec.planner.sql.parser.SqlAlterTableDropColumn; import com.dremio.exec.planner.sql.parser.SqlAlterTablePartitionColumns; +import com.dremio.exec.planner.sql.parser.SqlAlterTableProperties; import com.dremio.exec.planner.sql.parser.SqlAlterTableSetOption; import com.dremio.exec.planner.sql.parser.SqlAlterTableToggleSchemaLearning; import com.dremio.exec.planner.sql.parser.SqlAnalyzeTableStatistics; import com.dremio.exec.planner.sql.parser.SqlCopyIntoTable; import com.dremio.exec.planner.sql.parser.SqlCreateEmptyTable; +import com.dremio.exec.planner.sql.parser.SqlCreateFolder; import com.dremio.exec.planner.sql.parser.SqlCreateFunction; import com.dremio.exec.planner.sql.parser.SqlCreateReflection; import com.dremio.exec.planner.sql.parser.SqlCreateTable; @@ -110,7 +114,6 @@ import com.dremio.exec.planner.sql.parser.SqlShowSchemas; import com.dremio.exec.planner.sql.parser.SqlTruncateTable; import com.dremio.exec.planner.sql.parser.SqlUseSchema; -import com.dremio.exec.planner.sql.parser.SqlVacuum; import com.dremio.exec.planner.sql.parser.SqlVersionBase; import com.dremio.exec.proto.ExecProtos.ServerPreparedStatementState; import com.dremio.exec.proto.UserBitShared.QueryId; @@ -136,6 +139,8 @@ import com.google.common.cache.Cache; import com.google.protobuf.InvalidProtocolBufferException; +import io.opentelemetry.api.trace.Span; + /** * Takes a request and creates the appropriate type of command. */ @@ -143,6 +148,10 @@ public class CommandCreator { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(CommandCreator.class); private static final ControlsInjector injector = ControlsInjectorFactory.getInjector(CommandCreator.class); + private static final String QUERY_ID_SPAN_ATTRIBUTE_NAME = "dremio.query.id"; + private static final String QUERY_KIND_SPAN_ATTRIBUTE_NAME = "dremio.query.kind"; + + private final QueryContext context; private final UserRequest request; private final AttemptObserver observer; @@ -295,6 +304,7 @@ protected ReflectionContext getReflectionContext() { return ReflectionContext.SYSTEM_USER_CONTEXT; } + @SuppressWarnings("FallThrough") // FIXME: remove suppression by properly handling switch fallthrough @VisibleForTesting CommandRunner getSqlCommand(String sql, PrepareMetadataType prepareMetadataType) { try{ @@ -317,6 +327,9 @@ CommandRunner getSqlCommand(String sql, PrepareMetadataType prepareMetadataTy final SqlNode sqlNode = parser.parse(sql); final SqlHandlerConfig config = new SqlHandlerConfig(context, parser, observer, parser.getMaterializations()); + Span.current().setAttribute(QUERY_ID_SPAN_ATTRIBUTE_NAME, QueryIdHelper.getQueryId(context.getQueryId())); + Span.current().setAttribute(QUERY_KIND_SPAN_ATTRIBUTE_NAME, SqlNodeUtil.getQueryKind(sqlNode)); + final DirectBuilder direct = new DirectBuilder(sql, sqlNode, prepareMetadataType); final AsyncBuilder async = new AsyncBuilder(sql, sqlNode, prepareMetadataType); @@ -384,6 +397,8 @@ CommandRunner getSqlCommand(String sql, PrepareMetadataType prepareMetadataTy return direct.create(new SqlAlterTableToggleSchemaLearningHandler(catalog, config)); } else if (sqlNode instanceof SqlAlterTablePartitionColumns) { return direct.create(new AlterTablePartitionSpecHandler(catalog, config)); + } else if (sqlNode instanceof SqlAlterTableProperties) { + return direct.create(new AlterTablePropertiesHandler(catalog, config)); } case INSERT: @@ -407,13 +422,13 @@ CommandRunner getSqlCommand(String sql, PrepareMetadataType prepareMetadataTy } else if (sqlNode instanceof SqlUseSchema) { return direct.create(new UseSchemaHandler(context.getSession(), catalog)); } else if (sqlNode instanceof SqlCreateReflection) { - return direct.create(new AccelCreateReflectionHandler(catalog, context.getAccelerationManager(), getReflectionContext(), context.getOptions().getOption(PlannerSettings.FULL_NESTED_SCHEMA_SUPPORT))); + return direct.create(new AccelCreateReflectionHandler(catalog, context, getReflectionContext())); } else if (sqlNode instanceof SqlAddExternalReflection) { - return direct.create(new AccelAddExternalReflectionHandler(catalog, context.getAccelerationManager(), getReflectionContext())); + return direct.create(new AccelAddExternalReflectionHandler(catalog, context, getReflectionContext())); } else if (sqlNode instanceof SqlAccelToggle) { - return direct.create(new AccelToggleHandler(catalog, context.getAccelerationManager(), getReflectionContext())); + return direct.create(new AccelToggleHandler(catalog, context, getReflectionContext())); } else if (sqlNode instanceof SqlDropReflection) { - return direct.create(new AccelDropReflectionHandler(catalog, context.getAccelerationManager(), getReflectionContext())); + return direct.create(new AccelDropReflectionHandler(catalog, context, getReflectionContext())); } else if (sqlNode instanceof SqlForgetTable) { return direct.create(new ForgetTableHandler(catalog)); } else if (sqlNode instanceof SqlRefreshTable) { @@ -445,8 +460,8 @@ CommandRunner getSqlCommand(String sql, PrepareMetadataType prepareMetadataTy return direct.create(new ShowFunctionsHandler(context)); } else if (sqlNode instanceof SqlCopyIntoTable) { return async.create(new InsertTableHandler(), config); - } else if (sqlNode instanceof SqlVacuum) { - return direct.create(new VacuumHandler(catalog, config)); + } else if (sqlNode instanceof SqlCreateFolder) { + return direct.create(((SqlCreateFolder) sqlNode).toDirectHandler(context)); } // fallthrough @@ -454,6 +469,8 @@ CommandRunner getSqlCommand(String sql, PrepareMetadataType prepareMetadataTy return async.create(new NormalHandler(), config); } + } catch (UserException userException) { + throw userException; } catch(SqlUnsupportedException e) { throw UserException.unsupportedError(e) .addContext(SqlExceptionHelper.SQL_QUERY_CONTEXT, sql) diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/commands/HandlerToPreparePlanBase.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/commands/HandlerToPreparePlanBase.java index d64455954b..3c558bad52 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/commands/HandlerToPreparePlanBase.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/commands/HandlerToPreparePlanBase.java @@ -17,6 +17,7 @@ import java.util.ArrayList; import java.util.List; +import java.util.Map; import java.util.concurrent.atomic.AtomicLong; import org.apache.calcite.plan.RelOptPlanner; @@ -179,8 +180,9 @@ public void planText(final String text, final long millisTaken) { } @Override - public void planRelTransform(final PlannerPhase phase, final RelOptPlanner planner, final RelNode before, final RelNode after, final long millisTaken) { - calls.add(observer -> observer.planRelTransform(phase, planner, before, after, millisTaken)); + public void planRelTransform(final PlannerPhase phase, final RelOptPlanner planner, final RelNode before, + final RelNode after, final long millisTaken, Map timeBreakdownPerRule) { + calls.add(observer -> observer.planRelTransform(phase, planner, before, after, millisTaken, timeBreakdownPerRule)); } @Override diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/commands/ServerMetaProvider.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/commands/ServerMetaProvider.java index 581f3f5b79..4b0611d32b 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/commands/ServerMetaProvider.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/commands/ServerMetaProvider.java @@ -159,8 +159,7 @@ private static final Iterable getDrill10SupportedConvertOps() { if (TypeCastRules.isCastable(from, to)) { if (from == MinorType.DECIMAL || to == MinorType.DECIMAL) { addDrill10DecimalConvertOps(supportedConvertedOps, from, to); - } - else { + } else { supportedConvertedOps.add(ConvertSupport.newBuilder().setFrom(from).setTo(to).build()); } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AccelAddExternalReflectionHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AccelAddExternalReflectionHandler.java index 858b0882b3..699693509b 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AccelAddExternalReflectionHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AccelAddExternalReflectionHandler.java @@ -21,12 +21,19 @@ import org.apache.calcite.sql.SqlIdentifier; import org.apache.calcite.sql.SqlNode; +import com.dremio.common.exceptions.UserException; import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.CatalogUtil; +import com.dremio.exec.ops.QueryContext; import com.dremio.exec.ops.ReflectionContext; import com.dremio.exec.planner.sql.SchemaUtilities; import com.dremio.exec.planner.sql.SchemaUtilities.TableWithPath; import com.dremio.exec.planner.sql.parser.SqlAddExternalReflection; +import com.dremio.exec.planner.sql.parser.SqlTableVersionSpec; import com.dremio.exec.store.sys.accel.AccelerationManager; +import com.dremio.options.OptionManager; +import com.dremio.sabot.rpc.user.UserSession; +import com.dremio.service.namespace.NamespaceKey; public class AccelAddExternalReflectionHandler extends SimpleDirectHandler { @@ -35,19 +42,35 @@ public class AccelAddExternalReflectionHandler extends SimpleDirectHandler { private final Catalog catalog; private final AccelerationManager accel; private final ReflectionContext reflectionContext; + private UserSession userSession; + private OptionManager optionManager; - public AccelAddExternalReflectionHandler(Catalog catalog, AccelerationManager accel, ReflectionContext reflectionContext) { + public AccelAddExternalReflectionHandler(Catalog catalog, QueryContext queryContext, ReflectionContext reflectionContext) { this.catalog = catalog; - this.accel = accel; + this.accel = queryContext.getAccelerationManager(); this.reflectionContext = reflectionContext; + this.userSession = queryContext.getSession(); + this.optionManager = queryContext.getOptions(); } @Override public List toResult(String sql, SqlNode sqlNode) throws Exception { final SqlAddExternalReflection addExternalReflection = SqlNodeUtil.unwrap(sqlNode, SqlAddExternalReflection.class); + final NamespaceKey queryPath = catalog.resolveSingle(new NamespaceKey(addExternalReflection.getTblName().names)); + if (CatalogUtil.requestedPluginSupportsVersionedTables(queryPath.getRoot(), catalog)) { + throw UserException.unsupportedError() + .message("External reflections are not supported on versioned source %s", queryPath.getRoot()) + .build(logger); + } + final NamespaceKey targetPath = catalog.resolveSingle(new NamespaceKey(addExternalReflection.getTargetTable().names)); + if (CatalogUtil.requestedPluginSupportsVersionedTables(targetPath.getRoot(), catalog)) { + throw UserException.unsupportedError() + .message("External reflections are not supported on versioned source %s", targetPath.getRoot()) + .build(logger); + } final SqlIdentifier name = addExternalReflection.getName(); - final TableWithPath table = SchemaUtilities.verify(catalog, addExternalReflection.getTblName()); - final TableWithPath targetTable = SchemaUtilities.verify(catalog, addExternalReflection.getTargetTable()); + final TableWithPath table = SchemaUtilities.verify(catalog, addExternalReflection.getTblName(), userSession, SqlTableVersionSpec.NOT_SPECIFIED, optionManager); + final TableWithPath targetTable = SchemaUtilities.verify(catalog, addExternalReflection.getTargetTable(), userSession, SqlTableVersionSpec.NOT_SPECIFIED, optionManager); accel.addExternalReflection(name.getSimple(), table.getPath(), targetTable.getPath(), reflectionContext); return Collections.singletonList(SimpleCommandResult.successful("External reflection added.")); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AccelCreateReflectionHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AccelCreateReflectionHandler.java index e811e4e079..69daca0695 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AccelCreateReflectionHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AccelCreateReflectionHandler.java @@ -32,9 +32,10 @@ import com.dremio.common.exceptions.UserException; import com.dremio.common.utils.SqlUtils; import com.dremio.exec.catalog.Catalog; -import com.dremio.exec.catalog.CatalogUtil; import com.dremio.exec.catalog.DremioTable; +import com.dremio.exec.ops.QueryContext; import com.dremio.exec.ops.ReflectionContext; +import com.dremio.exec.planner.physical.PlannerSettings; import com.dremio.exec.planner.sql.CalciteArrowHelper; import com.dremio.exec.planner.sql.SchemaUtilities; import com.dremio.exec.planner.sql.SchemaUtilities.TableWithPath; @@ -45,7 +46,8 @@ import com.dremio.exec.planner.types.JavaTypeFactoryImpl; import com.dremio.exec.store.sys.accel.AccelerationManager; import com.dremio.exec.store.sys.accel.LayoutDefinition; -import com.dremio.service.namespace.NamespaceKey; +import com.dremio.options.OptionManager; +import com.dremio.sabot.rpc.user.UserSession; import com.google.common.base.Joiner; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableList; @@ -60,25 +62,23 @@ public class AccelCreateReflectionHandler extends SimpleDirectHandler { private final Catalog catalog; private final AccelerationManager accel; private final ReflectionContext reflectionContext; - private final boolean complexTypeSupport; + private final OptionManager optionManager; + private UserSession userSession; - public AccelCreateReflectionHandler(Catalog catalog, AccelerationManager accel, ReflectionContext reflectionContext, boolean complexTypeSupport) { + public AccelCreateReflectionHandler(Catalog catalog, + QueryContext context, + ReflectionContext reflectionContext) { this.catalog = catalog; - this.accel = accel; + this.accel = context.getAccelerationManager(); this.reflectionContext = reflectionContext; - this.complexTypeSupport = complexTypeSupport; + this.optionManager = context.getOptions(); + this.userSession = context.getSession(); } @Override public List toResult(String sql, SqlNode sqlNode) throws Exception { final SqlCreateReflection addLayout = SqlNodeUtil.unwrap(sqlNode, SqlCreateReflection.class); - final TableWithPath table = SchemaUtilities.verify(catalog, addLayout.getTblName()); - NamespaceKey key = new NamespaceKey(table.getPath()); - if (CatalogUtil.requestedPluginSupportsVersionedTables(key.getRoot(), catalog)) { - throw UserException.unsupportedError() - .message("Source %s does not support reflection creation.", key.getRoot()) - .build(logger); - } + final TableWithPath table = SchemaUtilities.verify(catalog, addLayout.getTblName(), userSession, addLayout.getSqlTableVersionSpec(), optionManager); SqlIdentifier identifier = addLayout.getName(); String name; if(identifier != null) { @@ -91,14 +91,16 @@ public List toResult(String sql, SqlNode sqlNode) throws Ex addLayout.isRaw() ? LayoutDefinition.Type.RAW : LayoutDefinition.Type.AGGREGATE, table.qualifyColumns(addLayout.getDisplayList()), qualifyColumnsWithGranularity(table.getTable(), addLayout.getDimensionList()), - qualifyColumnsWithMeasures(table.getTable(), addLayout.getMeasureList(), complexTypeSupport), + qualifyColumnsWithMeasures(table.getTable(), + addLayout.getMeasureList(), + optionManager.getOption(PlannerSettings.FULL_NESTED_SCHEMA_SUPPORT)), table.qualifyColumns(addLayout.getSortList()), table.qualifyColumns(addLayout.getDistributionList()), table.qualifyColumns(addLayout.getPartitionList()), addLayout.getArrowCachingEnabled(), addLayout.getPartitionDistributionStrategy() ); - accel.addLayout(table.getPath(), layout, reflectionContext); + accel.addLayout(table, layout, reflectionContext); return Collections.singletonList(SimpleCommandResult.successful("Layout added.")); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AccelDropReflectionHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AccelDropReflectionHandler.java index f23390e62d..c4176ddf8f 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AccelDropReflectionHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AccelDropReflectionHandler.java @@ -21,11 +21,14 @@ import org.apache.calcite.sql.SqlNode; import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.ops.QueryContext; import com.dremio.exec.ops.ReflectionContext; import com.dremio.exec.planner.sql.SchemaUtilities; import com.dremio.exec.planner.sql.SchemaUtilities.TableWithPath; import com.dremio.exec.planner.sql.parser.SqlDropReflection; import com.dremio.exec.store.sys.accel.AccelerationManager; +import com.dremio.options.OptionManager; +import com.dremio.sabot.rpc.user.UserSession; public class AccelDropReflectionHandler extends SimpleDirectHandler { @@ -34,18 +37,26 @@ public class AccelDropReflectionHandler extends SimpleDirectHandler { private final Catalog catalog; private final AccelerationManager accel; private final ReflectionContext reflectionContext; + private final OptionManager optionManager; + private final UserSession userSession; - public AccelDropReflectionHandler(Catalog catalog, AccelerationManager accel, ReflectionContext reflectionContext) { + public AccelDropReflectionHandler(Catalog catalog, + QueryContext queryContext, + ReflectionContext reflectionContext) { this.catalog = catalog; - this.accel = accel; + this.accel = queryContext.getAccelerationManager(); this.reflectionContext = reflectionContext; + this.userSession = queryContext.getSession(); + this.optionManager = queryContext.getOptions(); } @Override public List toResult(String sql, SqlNode sqlNode) throws Exception { final SqlDropReflection dropReflection = SqlNodeUtil.unwrap(sqlNode, SqlDropReflection.class); - TableWithPath table = SchemaUtilities.verify(catalog, dropReflection.getTblName()); - accel.dropLayout(table.getPath(), dropReflection.getLayoutId().toString(), reflectionContext); + TableWithPath table = SchemaUtilities.verify(catalog, dropReflection.getTblName(), userSession, dropReflection.getSqlTableVersionSpec(), optionManager); + accel.dropLayout(table, + dropReflection.getLayoutId().toString(), + reflectionContext); return Collections.singletonList(SimpleCommandResult.successful("Reflection dropped.")); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AccelToggleHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AccelToggleHandler.java index 6b49514cf8..ff9df2ff56 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AccelToggleHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AccelToggleHandler.java @@ -21,11 +21,14 @@ import org.apache.calcite.sql.SqlNode; import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.ops.QueryContext; import com.dremio.exec.ops.ReflectionContext; import com.dremio.exec.planner.sql.SchemaUtilities; import com.dremio.exec.planner.sql.parser.SqlAccelToggle; import com.dremio.exec.store.sys.accel.AccelerationManager; import com.dremio.exec.store.sys.accel.LayoutDefinition; +import com.dremio.options.OptionManager; +import com.dremio.sabot.rpc.user.UserSession; public class AccelToggleHandler extends SimpleDirectHandler { @@ -34,18 +37,25 @@ public class AccelToggleHandler extends SimpleDirectHandler { private final Catalog catalog; private final AccelerationManager accel; private final ReflectionContext reflectionContext; + private final OptionManager optionManager ; + private final UserSession userSession; - public AccelToggleHandler(Catalog catalog, AccelerationManager accel, ReflectionContext reflectionContext) { + public AccelToggleHandler(Catalog catalog, QueryContext queryContext, ReflectionContext reflectionContext) { this.catalog = catalog; - this.accel = accel; + this.accel = queryContext.getAccelerationManager(); this.reflectionContext = reflectionContext; + this.optionManager = queryContext.getOptions(); + this.userSession = queryContext.getSession(); } @Override public List toResult(String sql, SqlNode sqlNode) throws Exception { final SqlAccelToggle toggle = SqlNodeUtil.unwrap(sqlNode, SqlAccelToggle.class); - List names = SchemaUtilities.verify(catalog, toggle.getTblName()).getPath(); - accel.toggleAcceleration(names, toggle.isRaw() ? LayoutDefinition.Type.RAW : LayoutDefinition.Type.AGGREGATE, toggle.isEnable(), reflectionContext); + SchemaUtilities.TableWithPath tableWithPath = SchemaUtilities.verify(catalog, toggle.getTblName(), userSession, toggle.getSqlTableVersionSpec(), optionManager); + + accel.toggleAcceleration(tableWithPath, + toggle.isRaw() ? LayoutDefinition.Type.RAW : LayoutDefinition.Type.AGGREGATE, toggle.isEnable(), + reflectionContext); return Collections.singletonList(SimpleCommandResult.successful(toggle.isEnable() ? "Acceleration enabled." : "Acceleration disabled.")); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AddColumnsHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AddColumnsHandler.java index 192153077d..06918abbca 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AddColumnsHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AddColumnsHandler.java @@ -66,7 +66,7 @@ public List toResult(String sql, SqlNode sqlNode) throws Ex final String sourceName = path.getRoot(); final VersionContext sessionVersion = config.getContext().getSession().getSessionVersionForSource(sourceName); ResolvedVersionContext resolvedVersionContext = CatalogUtil.resolveVersionContext(catalog, sourceName, sessionVersion); - CatalogUtil.validateResolvedVersionIsBranch(resolvedVersionContext, path.toString()); + CatalogUtil.validateResolvedVersionIsBranch(resolvedVersionContext); TableMutationOptions tableMutationOptions = TableMutationOptions.newBuilder() .setResolvedVersionContext(resolvedVersionContext) .build(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AlterTableChangeColumnSetOptionHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AlterTableChangeColumnSetOptionHandler.java index 9286edad1d..f549323ffb 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AlterTableChangeColumnSetOptionHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AlterTableChangeColumnSetOptionHandler.java @@ -61,7 +61,7 @@ public List toResult(String sql, SqlNode sqlNode) throws Ex } final String scope = sqlColumnOption.getScope(); - if (!scope.equalsIgnoreCase("COLUMN")) { + if (!"COLUMN".equalsIgnoreCase(scope)) { throw UserException.validationError() .message("[%s] is not supported", sql) .buildSilently(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AlterTablePartitionSpecHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AlterTablePartitionSpecHandler.java index 72a16d4c94..656b4a9113 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AlterTablePartitionSpecHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AlterTablePartitionSpecHandler.java @@ -78,7 +78,7 @@ public List toResult(String sql, SqlNode sqlNode) throws Ex final String sourceName = path.getRoot(); final VersionContext sessionVersion = config.getContext().getSession().getSessionVersionForSource(sourceName); ResolvedVersionContext resolvedVersionContext = CatalogUtil.resolveVersionContext(catalog, sourceName, sessionVersion); - CatalogUtil.validateResolvedVersionIsBranch(resolvedVersionContext, path.toString()); + CatalogUtil.validateResolvedVersionIsBranch(resolvedVersionContext); TableMutationOptions tableMutationOptions = TableMutationOptions.newBuilder() .setResolvedVersionContext(resolvedVersionContext) .build(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AlterTablePropertiesHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AlterTablePropertiesHandler.java new file mode 100644 index 0000000000..9ad6a9f86a --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AlterTablePropertiesHandler.java @@ -0,0 +1,85 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers.direct; + +import static com.dremio.exec.planner.sql.parser.SqlAlterTableProperties.Mode.SET; +import static com.dremio.exec.planner.sql.parser.SqlAlterTableProperties.Mode.UNSET; + +import java.util.Collections; +import java.util.List; +import java.util.Map; + +import org.apache.calcite.sql.SqlNode; + +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.DremioTable; +import com.dremio.exec.ops.QueryContext; +import com.dremio.exec.planner.sql.SqlValidatorImpl; +import com.dremio.exec.planner.sql.handlers.SqlHandlerConfig; +import com.dremio.exec.planner.sql.handlers.SqlHandlerUtil; +import com.dremio.exec.planner.sql.handlers.query.DataAdditionCmdHandler; +import com.dremio.exec.planner.sql.parser.DmlUtils; +import com.dremio.exec.planner.sql.parser.SqlAlterTableProperties; +import com.dremio.exec.store.iceberg.IcebergUtils; +import com.dremio.options.OptionManager; +import com.dremio.service.namespace.NamespaceKey; +import com.google.common.base.Preconditions; + +public class AlterTablePropertiesHandler extends SimpleDirectHandler { + private final Catalog catalog; + private final SqlHandlerConfig config; + + public AlterTablePropertiesHandler(Catalog catalog, SqlHandlerConfig config) { + this.catalog = catalog; + this.config = config; + } + + @Override + public List toResult(String sql, SqlNode sqlNode) throws Exception { + SqlAlterTableProperties sqlAlterTableProperties = SqlNodeUtil.unwrap(sqlNode, SqlAlterTableProperties.class); + QueryContext context = Preconditions.checkNotNull(config.getContext()); + OptionManager optionManager = Preconditions.checkNotNull(context.getOptions()); + SqlValidatorImpl.checkForFeatureSpecificSyntax(sqlNode, optionManager); + SqlAlterTableProperties.Mode mode = sqlAlterTableProperties.getMode(); + + IcebergUtils.validateTablePropertiesRequest(optionManager); + Map tableProperties = IcebergUtils.convertTableProperties(sqlAlterTableProperties.getTablePropertyNameList(), sqlAlterTableProperties.getTablePropertyValueList(), mode == UNSET); + + NamespaceKey path = DmlUtils.getTablePath(catalog, sqlAlterTableProperties.getTable()); + + DremioTable table = catalog.getTableNoResolve(path); + SimpleCommandResult result = SqlHandlerUtil.validateSupportForDDLOperations(catalog, config, path, table); + + if (!result.ok) { + return Collections.singletonList(result); + } + + // TODO - logic to actual alter table to set/unset table properties + + DataAdditionCmdHandler.refreshDataset(catalog, path, false); + String message = ""; + if (mode == SET) { + for (Map.Entry entry : tableProperties.entrySet()) { + message += String.format("Table Property [%s] set with value [%s]. ", entry.getKey(), entry.getValue()); + } + } else { + for (Map.Entry entry : tableProperties.entrySet()) { + message += String.format("Table Property [%s] unset. ", entry.getKey()); + } + } + return Collections.singletonList(SimpleCommandResult.successful(message)); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AlterTableSetOptionHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AlterTableSetOptionHandler.java index 36fcdf2b81..8a3f48716e 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AlterTableSetOptionHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/AlterTableSetOptionHandler.java @@ -60,7 +60,7 @@ public List toResult(String sql, SqlNode sqlNode) throws Ex } final String scope = sqlTableOption.getScope(); - if (!scope.equalsIgnoreCase("TABLE")) { + if (!"TABLE".equalsIgnoreCase(scope)) { throw UserException.validationError() .message("[%s] is not supported", sql) .buildSilently(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/ChangeColumnHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/ChangeColumnHandler.java index 1958169b5d..6738c6ff1f 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/ChangeColumnHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/ChangeColumnHandler.java @@ -85,7 +85,7 @@ public List toResult(String sql, SqlNode sqlNode) throws Ex final String sourceName = path.getRoot(); final VersionContext sessionVersion = config.getContext().getSession().getSessionVersionForSource(sourceName); ResolvedVersionContext resolvedVersionContext = CatalogUtil.resolveVersionContext(catalog, sourceName, sessionVersion); - CatalogUtil.validateResolvedVersionIsBranch(resolvedVersionContext, path.toString()); + CatalogUtil.validateResolvedVersionIsBranch(resolvedVersionContext); TableMutationOptions tableMutationOptions = TableMutationOptions.newBuilder() .setResolvedVersionContext(resolvedVersionContext) .build(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/CommonReflectionRoutingHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/CommonReflectionRoutingHandler.java index 7d0f146534..2c4ba7f512 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/CommonReflectionRoutingHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/CommonReflectionRoutingHandler.java @@ -28,6 +28,7 @@ import com.dremio.exec.planner.physical.PlannerSettings; import com.dremio.exec.planner.sql.SchemaUtilities; import com.dremio.exec.planner.sql.parser.SqlAlterDatasetReflectionRouting; +import com.dremio.exec.planner.sql.parser.SqlTableVersionSpec; import com.dremio.resource.common.ReflectionRoutingManager; import com.dremio.service.namespace.NamespaceKey; import com.dremio.service.namespace.NamespaceNotFoundException; @@ -95,7 +96,7 @@ public List toResult(String sql, SqlNode sqlNode) throws Ex } public String setRoutingForTable(SqlAlterDatasetReflectionRouting reflectionRouting, String destinationName) throws Exception { - final SchemaUtilities.TableWithPath table = SchemaUtilities.verify(catalog, reflectionRouting.getName()); + final SchemaUtilities.TableWithPath table = SchemaUtilities.verify(catalog, reflectionRouting.getName(), context.getSession(), SqlTableVersionSpec.NOT_SPECIFIED, context.getOptions()); DatasetConfig datasetConfig = table.getTable().getDatasetConfig(); //set destination diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/CreateEmptyTableHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/CreateEmptyTableHandler.java index db676a054f..206da221c2 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/CreateEmptyTableHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/CreateEmptyTableHandler.java @@ -20,7 +20,9 @@ import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.util.Collections; +import java.util.HashMap; import java.util.List; +import java.util.Map; import org.apache.calcite.sql.SqlNode; import org.apache.iceberg.PartitionSpec; @@ -71,6 +73,7 @@ public class CreateEmptyTableHandler extends SimpleDirectHandler { private final OptionManager optionManager; private final UserSession userSession; private final boolean ifNotExists; + private Map tableProperties = new HashMap<>(); public CreateEmptyTableHandler(Catalog catalog, SqlHandlerConfig config, UserSession userSession, boolean ifNotExists) { this.catalog = Preconditions.checkNotNull(catalog); @@ -156,6 +159,11 @@ protected List createEmptyTable(NamespaceKey key, String sq .buildSilently(); } + if (!(sqlCreateEmptyTable.getTablePropertyNameList() == null || sqlCreateEmptyTable.getTablePropertyNameList().isEmpty())) { + IcebergUtils.validateTablePropertiesRequest(optionManager); + tableProperties = IcebergUtils.convertTableProperties(sqlCreateEmptyTable.getTablePropertyNameList(), sqlCreateEmptyTable.getTablePropertyValueList(), false); + } + // validate if source supports providing table location DataAdditionCmdHandler.validateCreateTableLocation(this.catalog, key, sqlCreateEmptyTable); @@ -196,8 +204,7 @@ protected List createEmptyTable(NamespaceKey key, String sq if (table != null) { if(ifNotExists){ return Collections.singletonList(new SimpleCommandResult(true, String.format("Table [%s] already exists.", key))); - } - else { + } else { throw UserException.validationError() .message("A table or view with given name [%s] already exists.", key) .buildSilently(); @@ -233,7 +240,7 @@ private List createVersionedEmptyTable(NamespaceKey key, St final String sourceName = key.getRoot(); final VersionContext sessionVersion = userSession.getSessionVersionForSource(sourceName); final ResolvedVersionContext version = CatalogUtil.resolveVersionContext(catalog, sourceName, sessionVersion); - CatalogUtil.validateResolvedVersionIsBranch(version, key.toString()); + CatalogUtil.validateResolvedVersionIsBranch(version); List columnDeclarations = SqlHandlerUtil.columnDeclarationsFromSqlNodes(sqlCreateEmptyTable.getFieldList(), sql); SqlHandlerUtil.checkForDuplicateColumns(columnDeclarations, BatchSchema.of(), sql); BatchSchema batchSchema = SqlHandlerUtil.batchSchemaFromSqlSchemaSpec(config, columnDeclarations, sql); @@ -262,8 +269,7 @@ private List createVersionedEmptyTable(NamespaceKey key, St if (table != null) { if(ifNotExists){ return Collections.singletonList(new SimpleCommandResult(true, String.format("Table [%s] already exists.", key))); - } - else { + } else { throw UserException.validationError() .message("A table with the given name already exists") .buildSilently(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/CreateFunctionHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/CreateFunctionHandler.java index 6f45d8b0d7..87cfa6a4fd 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/CreateFunctionHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/CreateFunctionHandler.java @@ -22,24 +22,29 @@ import java.util.Set; import org.apache.arrow.vector.types.pojo.Field; +import org.apache.arrow.vector.types.pojo.FieldType; +import org.apache.calcite.rel.RelNode; +import org.apache.calcite.rel.core.Project; +import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeFactory; -import org.apache.calcite.rex.RexBuilder; -import org.apache.calcite.rex.RexNode; -import org.apache.calcite.sql.SqlDataTypeSpec; import org.apache.calcite.sql.SqlIdentifier; import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNodeList; import org.apache.calcite.sql.SqlSelect; import org.apache.calcite.sql.dialect.CalciteSqlDialect; import org.apache.calcite.sql.parser.SqlParserPos; +import org.apache.calcite.util.Pair; import com.dremio.common.exceptions.UserException; import com.dremio.common.expression.CompleteType; +import com.dremio.common.types.TypeProtos; +import com.dremio.common.types.Types; +import com.dremio.common.util.MajorTypeHelper; import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.udf.CorrelatedUdfDetector; import com.dremio.exec.catalog.udf.FunctionParameterImpl; import com.dremio.exec.ops.QueryContext; -import com.dremio.exec.planner.DremioRexBuilder; import com.dremio.exec.planner.common.MoreRelOptUtil; import com.dremio.exec.planner.sql.CalciteArrowHelper; import com.dremio.exec.planner.sql.SqlConverter; @@ -47,7 +52,11 @@ import com.dremio.exec.planner.sql.handlers.SqlHandlerUtil; import com.dremio.exec.planner.sql.parser.DremioSqlColumnDeclaration; import com.dremio.exec.planner.sql.parser.SqlColumnPolicyPair; +import com.dremio.exec.planner.sql.parser.SqlComplexDataTypeSpec; +import com.dremio.exec.planner.sql.parser.SqlComplexDataTypeSpecWithDefault; import com.dremio.exec.planner.sql.parser.SqlCreateFunction; +import com.dremio.exec.planner.sql.parser.SqlFunctionReturnType; +import com.dremio.exec.planner.sql.parser.SqlReturnField; import com.dremio.exec.planner.types.SqlTypeFactoryImpl; import com.dremio.exec.store.sys.udf.FunctionOperatorTable; import com.dremio.exec.store.sys.udf.UserDefinedFunction; @@ -57,13 +66,11 @@ /** * CreateFunctionHandler */ -public class CreateFunctionHandler extends SimpleDirectHandler { - +public final class CreateFunctionHandler extends SimpleDirectHandler { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(CreateFunctionHandler.class); private static final RelDataTypeFactory TYPE_FACTORY = SqlTypeFactoryImpl.INSTANCE; - private static final RexBuilder REX_BUILDER = new DremioRexBuilder(TYPE_FACTORY); - - public static final String DUPLICATE_PARAMETER_ERROR_MSG = "Parameter name %s appears more than once"; + private static final String DUPLICATE_PARAMETER_ERROR_MSG = "Parameter name %s appears more than once"; + private static final Double ONE = 1.0; private final QueryContext context; @@ -73,31 +80,151 @@ public CreateFunctionHandler(QueryContext context) { @Override public List toResult(String sql, SqlNode sqlNode) throws Exception { - final Catalog catalog = context.getCatalog(); - final SqlCreateFunction createFunction = SqlNodeUtil.unwrap(sqlNode, SqlCreateFunction.class); - final NamespaceKey functionKey = catalog.resolveSingle(createFunction.getPath()); + SqlCreateFunction createFunction = SqlNodeUtil.unwrap(sqlNode, SqlCreateFunction.class); + SimpleCommandResult result = toResultImplementation(sql, createFunction); + if (!result.ok) { + throw UserException + .validationError() + .message(result.summary) + .build(); + } + + return Collections.singletonList(result); + } + + private SimpleCommandResult toResultImplementation(String sql, SqlCreateFunction createFunction) throws Exception { if (createFunction.isIfNotExists() && createFunction.shouldReplace()) { - throw UserException.validationError().message("Cannot create a user-defined function with both IF NOT EXISTS and OR REPLACE").build(logger); + return SimpleCommandResult.fail("Cannot create a user-defined function with both IF NOT EXISTS and OR REPLACE"); + } + + Catalog catalog = context.getCatalog(); + NamespaceKey functionKey = catalog.resolveSingle(createFunction.getPath()); + boolean exists = doesFunctionExist(catalog, functionKey); + if (exists && !createFunction.shouldReplace()) { + return createFunction.isIfNotExists() + ? SimpleCommandResult.successful(String.format("Function, %s, is not created as it already exists", functionKey)) + : SimpleCommandResult.fail("The function with a key, %s, already exists", functionKey); } - boolean exists = false; + UserDefinedFunction newUdf = extractUdf(context, createFunction, sql, functionKey); + + String action; + if (exists) { + action = "updated"; + catalog.updateFunction(functionKey, newUdf); + } else { + action = "created"; + catalog.createFunction(functionKey, newUdf); + } + + return SimpleCommandResult.successful(String.format("Function, %s, is %s.", functionKey, action)); + } + + private static boolean doesFunctionExist(Catalog catalog, NamespaceKey functionKey) { + boolean exists; try { exists = catalog.getFunction(functionKey) != null; - } catch (Exception ignored) {} - if (exists && !createFunction.shouldReplace()) { - if (createFunction.isIfNotExists()) { - return Collections.singletonList(SimpleCommandResult.successful(String.format("Function, %s, is not created as it already exists", functionKey))); + } catch (Exception ignored) { + exists = false; + } + + return exists; + } + + private static UserDefinedFunction extractUdf( + QueryContext context, + SqlCreateFunction createFunction, + String sql, + NamespaceKey functionKey) { + /** + * Calcite has a bug where SqlToRelConverter will mutate the SQL DOM. + * In some scenarios like: DX-64420 + * This leads to a query that can no longer be executed. + * The solution is to serialize the query before converting to a rel and saving it. + * So we have to keep this code at the top of the method. + */ + String functionSql = createFunction.getExpression().toSqlString(CalciteSqlDialect.DEFAULT, true).getSql(); + SqlConverter sqlConverter = createConverter(context); + List arguments = extractFunctionArguments( + createFunction, + sql, + sqlConverter); + RelNode functionPlan = extractFunctionPlan( + context, + createFunction, + arguments); + + if (createFunction.isTabularFunction()) { + if (CorrelatedUdfDetector.hasCorrelatedUdf(functionPlan)) { + throw UserException + .validationError() + .message("Tabular UDFs must not be correlated.") + .build(logger); + } + } else { + RelMetadataQuery relMetadataQuery = context.getRelMetadataQuerySupplier().get(); + Double maxRowCount = relMetadataQuery.getMaxRowCount(functionPlan); + Double minRowCount = relMetadataQuery.getMinRowCount(functionPlan); + if (!ONE.equals(maxRowCount) || !ONE.equals(minRowCount)) { + throw UserException + .validationError() + .message("Scalar UDFs must return 1 row") + .build(logger); } - throw UserException.validationError().message(String.format("The function with a key, %s, already exists", functionKey)).build(logger); } - final List argList = createFunction.getFieldList().getList(); - final List convertedArgList = new ArrayList<>(); + final Pair expectedReturnRowTypeAndField = extractExpectedRowTypeAndReturnField(createFunction.getReturnType(), sql); + final RelDataType expectedReturnRowType = expectedReturnRowTypeAndField.left; + final RelDataType actualReturnRowType = functionPlan.getRowType(); + + if (expectedReturnRowType.getFieldCount() != actualReturnRowType.getFieldCount()) { + throw UserException + .validationError() + .message("Number of columns mismatched \nDefined: %s\nActual: %s", expectedReturnRowType.getFieldCount(), actualReturnRowType.getFieldCount()) + .build(logger); + } + + if (!MoreRelOptUtil.areRowTypesCompatible(expectedReturnRowType, actualReturnRowType, false, true)) { + throw UserException + .validationError() + .message("Row types are different.\nDefined: %s\nActual: %s", expectedReturnRowType, actualReturnRowType) + .build(logger); + } + + CompleteType completeReturnType = CompleteType.fromField(expectedReturnRowTypeAndField.right); + UserDefinedFunction udf = new UserDefinedFunction( + functionKey.toString(), + functionSql, + completeReturnType, + arguments, + functionKey.getPathComponents(), + new byte[]{}, + null, + null); + return udf; + } + + private static List extractFunctionArguments( + SqlCreateFunction createFunction, + String sql, + SqlConverter sqlConverter) { + List argList = createFunction.getFieldList().getList(); + Set distinctArgName = new HashSet<>(); + List convertedArgs = new ArrayList<>(); for (int i = 0 ; i < argList.size() ; i++) { List arg = ((SqlNodeList) argList.get(i)).getList(); + + // Extract the name String name = arg.get(0).toString(); - SqlDataTypeSpec dataTypeSpec = (SqlDataTypeSpec) arg.get(1); + if (!distinctArgName.add(name)) { + throw UserException.validationError() + .message(String.format(DUPLICATE_PARAMETER_ERROR_MSG, name)) + .buildSilently(); + } + + // Extract the type + SqlComplexDataTypeSpecWithDefault dataTypeSpec = (SqlComplexDataTypeSpecWithDefault) arg.get(1); Field field = SqlHandlerUtil.fieldFromSqlColDeclaration( TYPE_FACTORY, new DremioSqlColumnDeclaration( @@ -107,50 +234,134 @@ public List toResult(String sql, SqlNode sqlNode) throws Ex null), sql); CompleteType completeType = CompleteType.fromField(field); + RelDataType relDataType = CalciteArrowHelper.toCalciteType( + field, + TYPE_FACTORY, + true); - convertedArgList.add(new UserDefinedFunction.FunctionArg(name, completeType)); + // Extract the default expression + SqlNode defaultExpression = null; + if (dataTypeSpec.getDefaultExpression() != null) { + defaultExpression = extractScalarExpressionFromDefaultExpression(dataTypeSpec.getDefaultExpression()); + RelDataType actualType = getTypeFromSqlNode(sqlConverter, defaultExpression); + RelDataType expectedType = relDataType; + if (!MoreRelOptUtil.checkFieldTypesCompatibility(expectedType, actualType, true, false)) { + throw UserException + .validationError() + .message( + String.format( + "Default expression type, %s, is not compatible with argument type, %s", + actualType, + expectedType)) + .build(logger); + } + } + + UserDefinedFunction.FunctionArg convertedArg = new UserDefinedFunction.FunctionArg( + name, + completeType, + defaultExpression == null ? null : defaultExpression.toSqlString(CalciteSqlDialect.DEFAULT).getSql()); + convertedArgs.add(convertedArg); } - final SqlNode expression = extractScalarExpression(createFunction.getExpression()); - RexNode parsedExpression = validate(expression, functionKey.getName(), convertedArgList); - Field returnField = SqlHandlerUtil.fieldFromSqlColDeclaration( - TYPE_FACTORY, - new DremioSqlColumnDeclaration( - SqlParserPos.ZERO, - new SqlColumnPolicyPair(SqlParserPos.ZERO, new SqlIdentifier("return", SqlParserPos.ZERO), null), - createFunction.getScalarReturnType(), - null), - sql); - final RelDataType expectedReturnType = CalciteArrowHelper.toCalciteType(returnField, TYPE_FACTORY, true); - final RelDataType returnDataType = parsedExpression.getType(); - final RelDataType returnRowType = TYPE_FACTORY.createStructType(ImmutableList.of(returnDataType), ImmutableList.of("return")); - final RelDataType expectedReturnRowType = TYPE_FACTORY.createStructType(ImmutableList.of(expectedReturnType), ImmutableList.of("return")); - if (MoreRelOptUtil.areRowTypesCompatible(returnRowType, expectedReturnRowType, false, true)) { - CompleteType completeReturnType = CompleteType.fromField(returnField); - UserDefinedFunction newUdf = new UserDefinedFunction(functionKey.toString(), createFunction.getExpression() - .toSqlString(CalciteSqlDialect.DEFAULT).getSql(), - completeReturnType, - convertedArgList, functionKey.getPathComponents()); - if (exists) { - catalog.updateFunction(functionKey, newUdf); - return Collections.singletonList(SimpleCommandResult.successful(String.format("Function, %s, is updated.", functionKey))); - } else { - catalog.createFunction(functionKey, newUdf); - return Collections.singletonList(SimpleCommandResult.successful(String.format("Function, %s, is created.", functionKey))); + return convertedArgs; + } + + private static Pair extractExpectedRowTypeAndReturnField(SqlFunctionReturnType returnType, String sql) { + final RelDataType expectedReturnRowType; + final Field returnField; + if (returnType.isTabular()) { + List returnFields = new ArrayList<>(); + List names = new ArrayList<>(); + List fields = new ArrayList<>(); + for (SqlNode columnDef : returnType.getTabularReturnType()) { + SqlReturnField sqlReturnField = (SqlReturnField) columnDef; + SqlIdentifier name = sqlReturnField.getName(); + SqlComplexDataTypeSpec type = sqlReturnField.getType(); + names.add(name.toString()); + Field field = + SqlHandlerUtil.fieldFromSqlColDeclaration( + TYPE_FACTORY, + new DremioSqlColumnDeclaration( + SqlParserPos.ZERO, + new SqlColumnPolicyPair(SqlParserPos.ZERO, name, null), + type, + null), + sql); + fields.add(field); + + returnFields.add(CalciteArrowHelper.toCalciteType(field, TYPE_FACTORY, true)); } + + returnField = new Field( + "return", + new FieldType(true, + MajorTypeHelper.getArrowTypeForMajorType(Types.optional(TypeProtos.MinorType.STRUCT)), null), + fields); + expectedReturnRowType = TYPE_FACTORY.createTypeWithNullability(TYPE_FACTORY.createStructType(returnFields, names), true); } else { - throw UserException.validationError() - .message("Row types are different.\nDefined: %s\nActual: %s", - expectedReturnRowType, - returnDataType) + returnField = SqlHandlerUtil.fieldFromSqlColDeclaration( + TYPE_FACTORY, + new DremioSqlColumnDeclaration( + SqlParserPos.ZERO, + new SqlColumnPolicyPair(SqlParserPos.ZERO, new SqlIdentifier("return", SqlParserPos.ZERO), null), + returnType.getScalarReturnType(), + null), + sql); + expectedReturnRowType = TYPE_FACTORY.createStructType( + ImmutableList.of(CalciteArrowHelper.toCalciteType(returnField, TYPE_FACTORY, true)), + ImmutableList.of("return")); + } + + return Pair.of(expectedReturnRowType, returnField); + } + + private static SqlNode extractScalarExpressionFromDefaultExpression(SqlNode defaultExpression) { + if (!(defaultExpression instanceof SqlSelect)) { + return defaultExpression; + } + + // We need to unwrap the default expression to extract out the single column + List selectList = ((SqlSelect) defaultExpression).getSelectList().getList(); + if (selectList.size() != 1) { + throw UserException + .unsupportedError() + .message("Returning a table is not currently supported") .build(logger); } + + return selectList.get(0); + } + + private static RelDataType getTypeFromSqlNode(SqlConverter converter, SqlNode expressionNode) { + Project project = (Project) SqlValidatorAndToRelContext + .builder(converter) + .disallowSubqueryExpansion() + .build() + .getPlanForFunctionExpression(expressionNode); + + assert project.getProjects().size() == 1; + + return project.getProjects().get(0).getType(); } - private RexNode validate(SqlNode expressionNode, - String functionName, + private static RelNode extractFunctionPlan( + QueryContext queryContext, + SqlCreateFunction createFunction, List args) { - SqlConverter converter = new SqlConverter( + return SqlValidatorAndToRelContext + .builder(createConverter(queryContext)) + .withContextualSqlOperatorTable( + new FunctionOperatorTable( + createFunction.getFullName(), + FunctionParameterImpl.createParameters(args))) + .disallowSubqueryExpansion() + .build() + .getPlanForFunctionExpression(createFunction.getExpression()); + } + + private static SqlConverter createConverter(QueryContext context) { + return new SqlConverter( context.getPlannerSettings(), context.getOperatorTable(), context, @@ -163,34 +374,5 @@ private RexNode validate(SqlNode expressionNode, context.getConfig(), context.getScanResult(), context.getRelMetadataQuerySupplier()); - - final SqlValidatorAndToRelContext sqlValidatorAndToRelContext = - SqlValidatorAndToRelContext.builder(converter) - .withContextualSqlOperatorTable( - new FunctionOperatorTable(functionName, FunctionParameterImpl.createParameters(args))) - .build(); - - Set paramNames = new HashSet<>(); - for(UserDefinedFunction.FunctionArg functionArg: args) { - if(!paramNames.add(functionArg.getName())) { - throw UserException.validationError() - .message(String.format(DUPLICATE_PARAMETER_ERROR_MSG, functionArg.getName())) - .buildSilently(); - } - } - return sqlValidatorAndToRelContext.validateAndConvertScalarFunction(expressionNode, functionName, FunctionParameterImpl.createParameters(args)).getFunctionBody(); - } - - private SqlNode extractScalarExpression(SqlNode expression) { - if (expression instanceof SqlSelect) { - List selectList = ((SqlSelect) expression).getSelectList().getList(); - if (selectList.size() == 1) { - return selectList.get(0); - } else { - throw UserException.unsupportedError().message("Returning a table is not currently supported").build(logger); - } - } else { - return expression; - } } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/CreateViewHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/CreateViewHandler.java index ca870385de..114f626066 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/CreateViewHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/CreateViewHandler.java @@ -16,6 +16,7 @@ package com.dremio.exec.planner.sql.handlers.direct; import static com.dremio.exec.ExecConstants.VERSIONED_VIEW_ENABLED; +import static com.dremio.exec.planner.sql.parser.ParserUtil.isTimeTravelQuery; import java.io.IOException; import java.lang.reflect.Constructor; @@ -29,7 +30,6 @@ import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.schema.Schema; -import org.apache.calcite.sql.SqlCall; import org.apache.calcite.sql.SqlDialect; import org.apache.calcite.sql.SqlIdentifier; import org.apache.calcite.sql.SqlNode; @@ -58,11 +58,10 @@ import com.dremio.exec.planner.sql.handlers.SqlHandlerUtil; import com.dremio.exec.planner.sql.parser.ParserUtil; import com.dremio.exec.planner.sql.parser.SqlCreateView; -import com.dremio.exec.planner.sql.parser.SqlVersionedTableMacroCall; +import com.dremio.exec.planner.sql.parser.SqlGrant; import com.dremio.exec.record.BatchSchema; import com.dremio.exec.record.SchemaBuilder; import com.dremio.exec.work.foreman.ForemanSetupException; -import com.dremio.service.Pointer; import com.dremio.service.namespace.NamespaceException; import com.dremio.service.namespace.NamespaceKey; import com.google.common.base.Throwables; @@ -90,6 +89,7 @@ public List toResult(String sql, SqlNode sqlNode) throws Ex final NamespaceKey path = catalog.resolveSingle(createView.getPath()); + catalog.validatePrivilege(path, SqlGrant.Privilege.ALTER); if (isVersioned(path)) { return createVersionedView(createView, sql); } else { @@ -102,6 +102,12 @@ private List createVersionedView(SqlCreateView createView, throw UserException.unsupportedError().message("Currently do not support create versioned view").buildSilently(); } + if (isTimeTravelQuery(createView)) { + throw UserException.unsupportedError() + .message("Versioned views not supported for time travel queries. Please use AT TAG or AT COMMIT instead") + .buildSilently(); + } + final String newViewName = createView.getFullName(); View view = getView(createView, sql); @@ -111,6 +117,7 @@ private List createVersionedView(SqlCreateView createView, boolean exists = checkViewExistence(viewPath, newViewName, isUpdate); isUpdate &= exists; final ViewOptions viewOptions = getViewOptions(viewPath, isUpdate); + CatalogUtil.validateResolvedVersionIsBranch(viewOptions.getVersion()); if (isUpdate) { catalog.updateView(viewPath, view, viewOptions); } else { @@ -332,33 +339,17 @@ protected ViewOptions getViewOptions(NamespaceKey viewPath, boolean isUpdate){ final VersionContext sessionVersion = config.getContext().getSession().getSessionVersionForSource(sourceName); ResolvedVersionContext version = CatalogUtil.resolveVersionContext(catalog, viewPath.getRoot(), sessionVersion); - ViewOptions viewOptions = new ViewOptions.ViewOptionsBuilder() - .version(version) - .batchSchema(viewSchema) - .viewUpdate(isUpdate) - .build(); + ViewOptions viewOptions = + new ViewOptions.ViewOptionsBuilder() + .version(version) + .batchSchema(viewSchema) + .actionType( + isUpdate ? ViewOptions.ActionType.UPDATE_VIEW : ViewOptions.ActionType.CREATE_VIEW) + .build(); return viewOptions; } - private boolean isTimeTravelQuery(SqlNode sqlNode) { - Pointer timeTravel = new Pointer<>(false); - SqlVisitor visitor = new SqlBasicVisitor() { - @Override - public Void visit(SqlCall call) { - if (call instanceof SqlVersionedTableMacroCall) { - timeTravel.value = true; - return null; - } - - return super.visit(call); - } - }; - - sqlNode.accept(visitor); - return timeTravel.value; - } - public static CreateViewHandler create(SqlHandlerConfig config) throws SqlParseException { try { final Class cl = Class.forName("com.dremio.exec.planner.sql.handlers.EnterpriseCreateViewHandler"); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/DropColumnHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/DropColumnHandler.java index 257dae8923..99f1bc1a38 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/DropColumnHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/DropColumnHandler.java @@ -79,7 +79,7 @@ public List toResult(String sql, SqlNode sqlNode) throws Ex final String sourceName = path.getRoot(); final VersionContext sessionVersion = config.getContext().getSession().getSessionVersionForSource(sourceName); ResolvedVersionContext resolvedVersionContext = CatalogUtil.resolveVersionContext(catalog, sourceName, sessionVersion); - CatalogUtil.validateResolvedVersionIsBranch(resolvedVersionContext, path.toString()); + CatalogUtil.validateResolvedVersionIsBranch(resolvedVersionContext); TableMutationOptions tableMutationOptions = TableMutationOptions.newBuilder() .setResolvedVersionContext(resolvedVersionContext) .build(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/DropFunctionHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/DropFunctionHandler.java index ff5f9922ee..d2294e44c6 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/DropFunctionHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/DropFunctionHandler.java @@ -39,24 +39,37 @@ public DropFunctionHandler(QueryContext context) { @Override public List toResult(String sql, SqlNode sqlNode) throws Exception { + return Collections.singletonList(toResultImplementation(sql, sqlNode)); + } + + private SimpleCommandResult toResultImplementation(String sql, SqlNode sqlNode) throws Exception { final SqlDropFunction dropFunction = SqlNodeUtil.unwrap(sqlNode, SqlDropFunction.class); final Catalog catalog = context.getCatalog(); - final NamespaceKey functionKey = catalog.resolveSingle(dropFunction.getPath()); + NamespaceKey functionKey = catalog.resolveSingle(dropFunction.getPath()); boolean functionExists = checkFunctionExists(functionKey, catalog); - if (functionExists) { catalog.dropFunction(functionKey); - return Collections.singletonList( - SimpleCommandResult.successful(String.format("Function, %s, is dropped.", functionKey))); - } else if(dropFunction.isIfExists()) { - return Collections.singletonList( - SimpleCommandResult.successful("Function, %s, does not exists.", functionKey)); - } else { - throw UserException.validationError() - .message("Function, %s, does not exists.", functionKey) - .buildSilently(); + return SimpleCommandResult.successful(String.format("Function, %s, is dropped.", functionKey)); } + + // Try again but from the root context: + if (functionKey.size() > 1) { + functionKey = new NamespaceKey(functionKey.getLeaf()); + functionExists = checkFunctionExists(functionKey, catalog); + if (functionExists) { + catalog.dropFunction(functionKey); + return SimpleCommandResult.successful(String.format("Function, %s, is dropped.", functionKey)); + } + } + + if (dropFunction.isIfExists()) { + return SimpleCommandResult.successful("Function, %s, does not exists.", functionKey); + } + + throw UserException.validationError() + .message("Function, %s, does not exists.", functionKey) + .buildSilently(); } private boolean checkFunctionExists (NamespaceKey functionKey, Catalog catalog) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/DropTableHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/DropTableHandler.java index 6f3c644401..669d561fec 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/DropTableHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/DropTableHandler.java @@ -71,7 +71,7 @@ public List toResult(String sql, SqlNode sqlNode) throws Va try { ResolvedVersionContext resolvedVersionContext = CatalogUtil.resolveVersionContext(catalog, sourceName, sessionVersion); - CatalogUtil.validateResolvedVersionIsBranch(resolvedVersionContext, path.toString()); + CatalogUtil.validateResolvedVersionIsBranch(resolvedVersionContext); TableMutationOptions tableMutationOptions = TableMutationOptions.newBuilder() .setResolvedVersionContext(resolvedVersionContext) .build(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/DropViewHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/DropViewHandler.java index 2c6de9598b..e376d3fff0 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/DropViewHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/DropViewHandler.java @@ -30,6 +30,7 @@ import com.dremio.exec.physical.base.ViewOptions; import com.dremio.exec.planner.sql.handlers.SqlHandlerConfig; import com.dremio.exec.planner.sql.parser.SqlDropView; +import com.dremio.exec.planner.sql.parser.SqlGrant; import com.dremio.service.namespace.NamespaceKey; /** Handler for Drop View [If Exists] DDL command. */ @@ -49,8 +50,9 @@ public DropViewHandler(SqlHandlerConfig config) { public List toResult(String sql, SqlNode sqlNode) throws Exception { final SqlDropView dropView = SqlNodeUtil.unwrap(sqlNode, SqlDropView.class); NamespaceKey path = catalog.resolveSingle(dropView.getPath()); - + catalog.validatePrivilege(path, SqlGrant.Privilege.ALTER); DremioTable table = catalog.getTableNoColumnCount(path); + if (!dropView.checkViewExistence()) { if(table == null) { throw UserException.validationError() diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/ExplainHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/ExplainHandler.java index ea2c81cb8e..85cf78dd28 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/ExplainHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/ExplainHandler.java @@ -17,26 +17,24 @@ import java.util.Collections; import java.util.List; -import java.util.concurrent.TimeUnit; import org.apache.calcite.plan.RelOptUtil; -import org.apache.calcite.rel.RelNode; -import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.sql.SqlExplain; import org.apache.calcite.sql.SqlExplainLevel; import org.apache.calcite.sql.SqlLiteral; import org.apache.calcite.sql.SqlNode; -import org.apache.calcite.util.Pair; +import org.apache.calcite.sql.dialect.CalciteSqlDialect; import com.dremio.common.logical.PlanProperties.Generator.ResultMode; -import com.dremio.exec.planner.DremioVolcanoPlanner; -import com.dremio.exec.planner.logical.Rel; -import com.dremio.exec.planner.physical.Prel; import com.dremio.exec.planner.sql.SqlExceptionHelper; -import com.dremio.exec.planner.sql.handlers.ConvertedRelNode; -import com.dremio.exec.planner.sql.handlers.PrelTransformer; import com.dremio.exec.planner.sql.handlers.SqlHandlerConfig; -import com.dremio.exec.planner.sql.handlers.ViewAccessEvaluator; +import com.dremio.exec.planner.sql.handlers.query.DeleteHandler; +import com.dremio.exec.planner.sql.handlers.query.InsertTableHandler; +import com.dremio.exec.planner.sql.handlers.query.MergeHandler; +import com.dremio.exec.planner.sql.handlers.query.NormalHandler; +import com.dremio.exec.planner.sql.handlers.query.SqlToPlanHandler; +import com.dremio.exec.planner.sql.handlers.query.UpdateHandler; + public class ExplainHandler implements SqlDirectHandler { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ExplainHandler.class); @@ -56,74 +54,70 @@ public List toResult(String sql, SqlNode sqlNode) throws Exception { final SqlLiteral op = node.operand(2); final SqlExplain.Depth depth = (SqlExplain.Depth) op.getValue(); + final SqlExplainLevel level = node.getDetailLevel() != null + ? node.getDetailLevel() + : SqlExplainLevel.ALL_ATTRIBUTES; final ResultMode mode; - SqlExplainLevel level = SqlExplainLevel.ALL_ATTRIBUTES; - - if (node.getDetailLevel() != null) { - level = node.getDetailLevel(); - } - switch (depth) { - case LOGICAL: - mode = ResultMode.LOGICAL; - break; - case PHYSICAL: - mode = ResultMode.PHYSICAL; - break; - default: - throw new UnsupportedOperationException("Unknown depth " + depth); + case LOGICAL: + mode = ResultMode.LOGICAL; + break; + case PHYSICAL: + mode = ResultMode.PHYSICAL; + break; + default: + throw new UnsupportedOperationException("Unknown depth " + depth); } + // get plan final SqlNode innerNode = node.operand(0); - - Rel drel; - final ConvertedRelNode convertedRelNode = PrelTransformer.validateAndConvert(config, innerNode); - final RelDataType validatedRowType = convertedRelNode.getValidatedRowType(); - final RelNode queryRelNode = convertedRelNode.getConvertedNode(); - - ViewAccessEvaluator viewAccessEvaluator = null; - if (config.getConverter().getSubstitutionProvider().isDefaultRawReflectionEnabled()) { - final RelNode convertedRelWithExpansionNodes = ((DremioVolcanoPlanner) queryRelNode.getCluster().getPlanner()).getOriginalRoot(); - viewAccessEvaluator = new ViewAccessEvaluator(convertedRelWithExpansionNodes, config); - config.getContext().getExecutorService().submit(viewAccessEvaluator); + SqlToPlanHandler innerNodeHandler; + switch (innerNode.getKind()) { + // We currently only support OrderedQueryOrExpr and Insert/Delete/Update/Merge + case INSERT: + innerNodeHandler = new InsertTableHandler(); + break; + case DELETE: + innerNodeHandler = new DeleteHandler(); + break; + case MERGE: + innerNodeHandler = new MergeHandler(); + break; + case UPDATE: + innerNodeHandler = new UpdateHandler(); + break; + // for OrderedQueryOrExpr such as select, use NormalHandler + default: + innerNodeHandler = new NormalHandler(); } - PrelTransformer.log("Calcite", queryRelNode, logger, null); - drel = PrelTransformer.convertToDrel(config, queryRelNode, validatedRowType); + innerNodeHandler + .getPlan( + config, + innerNode.toSqlString(CalciteSqlDialect.DEFAULT).getSql(), + innerNode); + String planAsText; if (mode == ResultMode.LOGICAL) { - if (viewAccessEvaluator != null) { - viewAccessEvaluator.getLatch().await(config.getContext().getPlannerSettings().getMaxPlanningPerPhaseMS(), TimeUnit.MILLISECONDS); - if (viewAccessEvaluator.getException() != null) { - throw viewAccessEvaluator.getException(); - } - } - return Collections.singletonList(new Explain(RelOptUtil.toString(drel, level))); + planAsText = RelOptUtil.toString(innerNodeHandler.getLogicalPlan(), level); + } else { + planAsText = innerNodeHandler.getTextPlan(); } - final Pair convertToPrel = PrelTransformer.convertToPrel(config, drel); - if (viewAccessEvaluator != null) { - viewAccessEvaluator.getLatch().await(config.getContext().getPlannerSettings().getMaxPlanningPerPhaseMS(), TimeUnit.MILLISECONDS); - if (viewAccessEvaluator.getException() != null) { - throw viewAccessEvaluator.getException(); - } - } - final String text = convertToPrel.getValue(); - return Collections.singletonList(new Explain(text)); - } catch (Exception ex){ + Explain explain = new Explain(planAsText); + return Collections.singletonList(explain); + } catch (Exception ex) { throw SqlExceptionHelper.coerceException(logger, sql, ex, true); } } public static class Explain { - public final String text; public Explain(String text) { super(); this.text = text; } - } @Override diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/ExplainJsonHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/ExplainJsonHandler.java index 8e8565acf4..5c50e6cb75 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/ExplainJsonHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/ExplainJsonHandler.java @@ -18,7 +18,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; -import java.util.concurrent.TimeUnit; +import java.util.Map; import org.apache.calcite.plan.RelOptPlanner; import org.apache.calcite.rel.RelNode; @@ -26,7 +26,6 @@ import org.apache.calcite.sql.SqlNode; import com.dremio.common.exceptions.UserException; -import com.dremio.exec.planner.DremioVolcanoPlanner; import com.dremio.exec.planner.PlannerPhase; import com.dremio.exec.planner.logical.Rel; import com.dremio.exec.planner.observer.AbstractAttemptObserver; @@ -67,24 +66,12 @@ public List toResult(String sql, SqlNode sqlNode) throws Exception { Rel drel; final ConvertedRelNode convertedRelNode = PrelTransformer.validateAndConvert(config, innerNode); - final RelDataType validatedRowType = convertedRelNode.getValidatedRowType(); - final RelNode queryRelNode = convertedRelNode.getConvertedNode(); - ViewAccessEvaluator viewAccessEvaluator = null; - if (config.getConverter().getSubstitutionProvider().isDefaultRawReflectionEnabled()) { - final RelNode convertedRelWithExpansionNodes = ((DremioVolcanoPlanner) queryRelNode.getCluster().getPlanner()).getOriginalRoot(); - viewAccessEvaluator = new ViewAccessEvaluator(convertedRelWithExpansionNodes, config); - config.getContext().getExecutorService().submit(viewAccessEvaluator); + try (ViewAccessEvaluator ignored = ViewAccessEvaluator.createAsyncEvaluator(config, convertedRelNode)) { + final RelDataType validatedRowType = convertedRelNode.getValidatedRowType(); + final RelNode queryRelNode = convertedRelNode.getConvertedNode(); + drel = PrelTransformer.convertToDrel(config, queryRelNode, validatedRowType); + PrelTransformer.convertToPrel(config, drel); } - drel = PrelTransformer.convertToDrel(config, queryRelNode, validatedRowType); - PrelTransformer.convertToPrel(config, drel); - - if (viewAccessEvaluator != null) { - viewAccessEvaluator.getLatch().await(config.getContext().getPlannerSettings().getMaxPlanningPerPhaseMS(), TimeUnit.MILLISECONDS); - if (viewAccessEvaluator.getException() != null) { - throw viewAccessEvaluator.getException(); - } - } - return toResultInner(node.getPhase(), observer.nodes); } catch (Exception ex){ throw SqlExceptionHelper.coerceException(logger, sql, ex, true); @@ -151,7 +138,7 @@ private void add(String phase, RelNode node) { @Override public void planRelTransform(PlannerPhase phase, RelOptPlanner planner, RelNode before, RelNode after, - long millisTaken) { + long millisTaken, Map timeBreakdownPerRule) { add(phase.name(), after); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/ForgetTableHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/ForgetTableHandler.java index 764e347799..56c252b185 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/ForgetTableHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/ForgetTableHandler.java @@ -45,7 +45,7 @@ public List toResult(String sql, SqlNode sqlNode) throws Ex final NamespaceKey path = catalog.resolveSingle(sqlForgetTable.getPath()); String root = path.getRoot(); - if(root.startsWith("@") || root.equalsIgnoreCase("sys") || root.equalsIgnoreCase("INFORMATION_SCHEMA")) { + if (root.startsWith("@") || "sys".equalsIgnoreCase(root) || "INFORMATION_SCHEMA".equalsIgnoreCase(root)) { throw UserException.parseError().message("FORGET METADATA is not supported on tables in homespace, sys, or INFORMATION_SCHEMA.").build(logger); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/RefreshSourceStatusHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/RefreshSourceStatusHandler.java index 73eb1a1a5f..880fd00923 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/RefreshSourceStatusHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/RefreshSourceStatusHandler.java @@ -48,7 +48,7 @@ public List toResult(String sql, SqlNode sqlNode) throws Ex sourceCatalog.validatePrivilege(path, SqlGrant.Privilege.ALTER); String root = path.getRoot(); - if(root.startsWith("@") || root.equalsIgnoreCase("sys") || root.equalsIgnoreCase("INFORMATION_SCHEMA")) { + if (root.startsWith("@") || "sys".equalsIgnoreCase(root) || "INFORMATION_SCHEMA".equalsIgnoreCase(root)) { throw UserException.parseError().message("Unable to find source %s.", path).build(logger); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/SetApproxHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/SetApproxHandler.java index 38a1e751f2..bbd98bef46 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/SetApproxHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/SetApproxHandler.java @@ -49,7 +49,7 @@ public List toResult(String sql, SqlNode sqlNode) throws Ex final NamespaceKey path = catalog.resolveSingle(sqlSetApprox.getPath()); String root = path.getRoot(); - if(root.equalsIgnoreCase("sys") || root.equalsIgnoreCase("INFORMATION_SCHEMA")) { + if ("sys".equalsIgnoreCase(root) || "INFORMATION_SCHEMA".equalsIgnoreCase(root)) { throw UserException.parseError().message("System and Information Schema tables cannot be modified: %s", path).build(logger); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/SqlNodeUtil.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/SqlNodeUtil.java index c7ee155e0c..25e890ea7b 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/SqlNodeUtil.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/SqlNodeUtil.java @@ -19,6 +19,8 @@ import org.apache.calcite.sql.SqlCharStringLiteral; import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.SqlOrderBy; +import org.apache.calcite.sql.SqlWith; import com.dremio.exec.expr.fn.impl.RegexpUtil; import com.dremio.exec.work.foreman.ForemanSetupException; @@ -52,6 +54,21 @@ public static Pattern getPattern(SqlNode node){ Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE | Pattern.DOTALL); } + public static String getQueryKind(SqlNode sqlNode) { + // A few of these need special handling! + if (sqlNode instanceof SqlOrderBy) { + sqlNode = ((SqlOrderBy) sqlNode).query; + } else if (sqlNode instanceof SqlWith) { + sqlNode = ((SqlWith) sqlNode).body; + } + + if (sqlNode == null) { + return "unknown"; + } + + return sqlNode.getKind().lowerName; + } + // prevent instantiation private SqlNodeUtil() { } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/TableManagementDirectHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/TableManagementDirectHandler.java index 9087c431ea..5d53742918 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/TableManagementDirectHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/TableManagementDirectHandler.java @@ -29,7 +29,6 @@ import com.dremio.exec.catalog.TableMutationOptions; import com.dremio.exec.catalog.VersionContext; import com.dremio.exec.planner.sql.handlers.SqlHandlerConfig; -import com.dremio.exec.planner.sql.parser.SqlRollbackTable; import com.dremio.exec.store.iceberg.IcebergUtils; import com.dremio.service.namespace.NamespaceKey; import com.dremio.service.namespace.dataset.proto.DatasetConfig; @@ -69,7 +68,7 @@ public List toResult(String sql, SqlNode sqlNode) throws Ex final String sourceName = path.getRoot(); final VersionContext sessionVersion = config.getContext().getSession().getSessionVersionForSource(sourceName); ResolvedVersionContext resolvedVersionContext = CatalogUtil.resolveVersionContext(catalog, sourceName, sessionVersion); - CatalogUtil.validateResolvedVersionIsBranch(resolvedVersionContext, path.toString()); + CatalogUtil.validateResolvedVersionIsBranch(resolvedVersionContext); TableMutationOptions tableMutationOptions = TableMutationOptions.newBuilder() .setResolvedVersionContext(resolvedVersionContext) .build(); @@ -89,6 +88,6 @@ public List toResult(String sql, SqlNode sqlNode) throws Ex protected void checkValidations(Catalog catalog, SqlHandlerConfig config, NamespaceKey path) throws Exception { validateFeatureEnabled(config); validatePrivileges(catalog, path, config.getContext().getQueryUserName()); - IcebergUtils.checkTableExistenceAndMutability(catalog, config, path, SqlRollbackTable.OPERATOR, false); + IcebergUtils.checkTableExistenceAndMutability(catalog, config, path, getSqlOperator(), false); } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/TruncateTableHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/TruncateTableHandler.java index daf7145798..1e04ca743c 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/TruncateTableHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/TruncateTableHandler.java @@ -68,7 +68,7 @@ public List toResult(String sql, SqlNode sqlNode) throws Ex final String sourceName = path.getRoot(); final VersionContext sessionVersion = config.getContext().getSession().getSessionVersionForSource(sourceName); ResolvedVersionContext resolvedVersionContext = CatalogUtil.resolveVersionContext(catalog, sourceName, sessionVersion); - CatalogUtil.validateResolvedVersionIsBranch(resolvedVersionContext, path.toString()); + CatalogUtil.validateResolvedVersionIsBranch(resolvedVersionContext); TableMutationOptions tableMutationOptions = TableMutationOptions.newBuilder() .setResolvedVersionContext(resolvedVersionContext) .build(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/VacuumHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/VacuumHandler.java deleted file mode 100644 index fb8c911c20..0000000000 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/direct/VacuumHandler.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.dremio.exec.planner.sql.handlers.direct; - -import java.util.Collections; -import java.util.List; - -import org.apache.calcite.sql.SqlNode; -import org.apache.calcite.sql.SqlOperator; - -import com.dremio.common.exceptions.UserException; -import com.dremio.exec.ExecConstants; -import com.dremio.exec.catalog.Catalog; -import com.dremio.exec.catalog.TableMutationOptions; -import com.dremio.exec.planner.sql.handlers.SqlHandlerConfig; -import com.dremio.exec.planner.sql.parser.SqlVacuum; -import com.dremio.service.namespace.NamespaceKey; -import com.dremio.service.namespace.dataset.proto.DatasetConfig; - -/** - * Handler for vacuuming table. - */ -public class VacuumHandler extends TableManagementDirectHandler { - - public VacuumHandler(Catalog catalog, SqlHandlerConfig config) { - super(catalog, config); - } - - @Override - public NamespaceKey getTablePath(SqlNode sqlNode) throws Exception { - return SqlNodeUtil.unwrap(sqlNode, SqlVacuum.class).getPath(); - } - - @Override - protected SqlOperator getSqlOperator() { - return SqlVacuum.OPERATOR; - } - - @Override - protected void validatePrivileges(Catalog catalog, NamespaceKey path, String identityName) throws Exception { - // User must be admin,owner of the table. - catalog.validateOwnership(path); - } - - @Override - protected List getCommandResult(NamespaceKey path) { - return Collections.singletonList(SimpleCommandResult.successful("Table [%s] vacuumed", path)); - } - - @Override - protected void execute(Catalog catalog, - SqlNode sqlNode, - NamespaceKey path, - DatasetConfig datasetConfig, - TableMutationOptions tableMutationOptions) throws Exception { - final SqlVacuum sqlVacuum = SqlNodeUtil.unwrap(sqlNode, SqlVacuum.class); - catalog.vacuumTable(path, datasetConfig, sqlVacuum.getVacuumOption(), tableMutationOptions); - } - - @Override - protected void validateFeatureEnabled(SqlHandlerConfig config) { - if (!config.getContext().getOptions().getOption(ExecConstants.ENABLE_ICEBERG_VACUUM)) { - throw UserException.unsupportedError().message("VACUUM command is not supported.").buildSilently(); - } - } -} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/CopyIntoTableContext.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/CopyIntoTableContext.java index 2b184305e6..b12621d989 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/CopyIntoTableContext.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/CopyIntoTableContext.java @@ -206,7 +206,7 @@ private static Object convertStringFormatOptionValue(FormatOption option, String case TRIM_SPACE: case EMPTY_AS_NULL: String upperValue = value.toUpperCase(); - if (!upperValue.equals("FALSE") && !upperValue.equals("TRUE") ) { + if (!"FALSE".equals(upperValue) && !"TRUE".equals(upperValue) ) { break; } return Boolean.valueOf(upperValue); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/CreateTableHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/CreateTableHandler.java index 72a182a515..230718e21b 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/CreateTableHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/CreateTableHandler.java @@ -36,7 +36,8 @@ import com.dremio.exec.planner.sql.parser.SqlCreateTable; import com.dremio.exec.planner.sql.parser.SqlGrant.Privilege; import com.dremio.exec.store.dfs.FileSystemPlugin; -import com.dremio.exec.store.iceberg.DremioFileIO; +import com.dremio.io.file.FileSystem; +import com.dremio.io.file.Path; import com.dremio.options.OptionManager; import com.dremio.service.namespace.NamespaceKey; import com.google.common.annotations.VisibleForTesting; @@ -76,6 +77,7 @@ private PhysicalPlan doVersionedCtas(SqlHandlerConfig config, NamespaceKey path, final ResolvedVersionContext version = CatalogUtil.resolveVersionContext(catalog, sourceName, sessionVersion); try { + CatalogUtil.validateResolvedVersionIsBranch(version); validateVersionedTableFormatOptions(catalog, path); checkExistenceValidity(path, getDremioTable(catalog, path)); logger.debug("Creating versioned table '{}' at version '{}' resolved version {} ", @@ -139,21 +141,16 @@ public void cleanUp(DatasetCatalog datasetCatalog, NamespaceKey key) { String tableLocation = isIcebergTable() ? tableEntry.getIcebergTableProps().getTableLocation(): tableEntry.getLocation(); - DremioFileIO dremioFileIO = new DremioFileIO(tableEntry.getPlugin().getFsConfCopy(), tableEntry.getPlugin()); + FileSystem fs = tableEntry.getPlugin().createFS(tableLocation, tableEntry.getUserName(), null); - cleanUpImpl(dremioFileIO, datasetCatalog, key, tableLocation); - } catch (Exception e) { - logger.warn(String.format("cleanup failed for CTAS query + %s", e.getMessage())); - } - } - - @VisibleForTesting - public static void cleanUpImpl(DremioFileIO dremioFileIO, DatasetCatalog datasetCatalog, NamespaceKey key, String tableLocation) { - try { // delete folders created by CTAS - dremioFileIO.deleteFile(tableLocation, true, dremioFileIO.getPlugin() instanceof FileSystemPlugin); + Path path = Path.of(tableLocation); + if (!fs.supportsPathsWithScheme()) { + path = Path.of(Path.getContainerSpecificRelativePath(path)); + } + fs.delete(path, true); - if(!(dremioFileIO.getPlugin() instanceof FileSystemPlugin)){ + if(!(tableEntry.getPlugin() instanceof FileSystemPlugin)){ //try deleting table from hive and glue metastore datasetCatalog.dropTable(key, null); } else { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/DataAdditionCmdHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/DataAdditionCmdHandler.java index a3275e7c57..c06f2b796c 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/DataAdditionCmdHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/DataAdditionCmdHandler.java @@ -15,11 +15,14 @@ */ package com.dremio.exec.planner.sql.handlers.query; +import static com.dremio.exec.planner.sql.handlers.SqlHandlerUtil.PLANNER_SOURCE_TARGET_SOURCE_TYPE_SPAN_ATTRIBUTE_NAME; + +import java.io.IOException; +import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import org.apache.calcite.plan.RelOptUtil; @@ -63,7 +66,6 @@ import com.dremio.exec.physical.base.TableFormatWriterOptions.TableFormatOperation; import com.dremio.exec.physical.base.WriterOptions; import com.dremio.exec.planner.DremioRexBuilder; -import com.dremio.exec.planner.DremioVolcanoPlanner; import com.dremio.exec.planner.logical.CreateTableEntry; import com.dremio.exec.planner.logical.ProjectRel; import com.dremio.exec.planner.logical.Rel; @@ -94,6 +96,7 @@ import com.dremio.exec.store.iceberg.model.IcebergCommandType; import com.dremio.exec.store.iceberg.model.IcebergModel; import com.dremio.exec.work.foreman.SqlUnsupportedException; +import com.dremio.io.file.FileSystem; import com.dremio.options.OptionManager; import com.dremio.service.namespace.NamespaceKey; import com.google.common.annotations.VisibleForTesting; @@ -101,6 +104,8 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.instrumentation.annotations.WithSpan; import io.protostuff.ByteString; public abstract class DataAdditionCmdHandler implements SqlToPlanHandler { @@ -108,6 +113,7 @@ public abstract class DataAdditionCmdHandler implements SqlToPlanHandler { private String textPlan; + private Rel drel; private CreateTableEntry tableEntry = null; private BatchSchema tableSchemaFromKVStore = null; private List partitionColumns = null; @@ -138,7 +144,8 @@ public TableFormatOperation getIcebergWriterOperation() { protected void cleanUp(DatasetCatalog datasetCatalog, NamespaceKey key) {} - public PhysicalPlan getPlan(DatasetCatalog datasetCatalog, + @WithSpan + public PhysicalPlan getPlan(Catalog catalog, NamespaceKey path, SqlHandlerConfig config, String sql, @@ -146,7 +153,9 @@ public PhysicalPlan getPlan(DatasetCatalog datasetCatalog, ResolvedVersionContext version ) throws Exception { try { + Span.current().setAttribute(PLANNER_SOURCE_TARGET_SOURCE_TYPE_SPAN_ATTRIBUTE_NAME, SqlHandlerUtil.getSourceType(catalog, path.getRoot())); final ConvertedRelNode convertedRelNode = PrelTransformer.validateAndConvert(config, sqlCmd.getQuery()); + try (ViewAccessEvaluator ignored = ViewAccessEvaluator.createAsyncEvaluator(config, convertedRelNode)) { final RelDataType validatedRowType = convertedRelNode.getValidatedRowType(); long maxColumnCount = config.getContext().getOptions().getOption(CatalogOptions.METADATA_LEAF_COLUMN_MAX); @@ -155,14 +164,8 @@ public PhysicalPlan getPlan(DatasetCatalog datasetCatalog, } // Get and cache table info (if not already retrieved) to avoid multiple retrievals - getDremioTable(datasetCatalog, path); + getDremioTable(catalog, path); final RelNode queryRelNode = convertedRelNode.getConvertedNode(); - ViewAccessEvaluator viewAccessEvaluator = null; - if (config.getConverter().getSubstitutionProvider().isDefaultRawReflectionEnabled()) { - final RelNode convertedRelWithExpansionNodes = ((DremioVolcanoPlanner) queryRelNode.getCluster().getPlanner()).getOriginalRoot(); - viewAccessEvaluator = new ViewAccessEvaluator(convertedRelWithExpansionNodes, config); - config.getContext().getExecutorService().submit(viewAccessEvaluator); - } final RelNode newTblRelNode = SqlHandlerUtil.resolveNewTableRel(false, sqlCmd.getFieldNames(), validatedRowType, queryRelNode, !isCreate()); @@ -201,10 +204,10 @@ public PhysicalPlan getPlan(DatasetCatalog datasetCatalog, ); // Convert the query to Dremio Logical plan and insert a writer operator on top. - Rel drel = this.convertToDrel( + drel = this.convertToDrel( config, newTblRelNodeWithPCol, - datasetCatalog, + catalog, path, options, newTblRelNode.getRowType(), @@ -217,21 +220,14 @@ public PhysicalPlan getPlan(DatasetCatalog datasetCatalog, PhysicalPlan plan = PrelTransformer.convertToPlan(config, pop, isIcebergTable() && !isVersionedTable() ? - () -> refreshDataset(datasetCatalog, path, isCreate()) + () -> refreshDataset(catalog, path, isCreate()) : null, - () -> cleanUp(datasetCatalog, path)); + () -> cleanUp(catalog, path)); PrelTransformer.log(config, "Dremio Plan", plan, logger); - - if (viewAccessEvaluator != null) { - viewAccessEvaluator.getLatch().await(config.getContext().getPlannerSettings().getMaxPlanningPerPhaseMS(), TimeUnit.MILLISECONDS); - if (viewAccessEvaluator.getException() != null) { - throw viewAccessEvaluator.getException(); - } + return plan; } - return plan; - } catch(Exception ex){ throw SqlExceptionHelper.coerceException(logger, sql, ex, true); } @@ -422,22 +418,28 @@ public void validateIcebergSchemaForInsertCommand(List fieldNames, SqlHa BatchSchema querySchema = icebergTableProps.getFullSchema(); Preconditions.checkState(tableEntry.getPlugin() instanceof SupportsIcebergMutablePlugin, "Plugin not instance of SupportsIcebergMutablePlugin"); - IcebergModel icebergModel = ((SupportsIcebergMutablePlugin) tableEntry.getPlugin()) - .getIcebergModel(icebergTableProps, tableEntry.getUserName(), null, null); - Table table = icebergModel.getIcebergTable(icebergModel.getTableIdentifier(icebergTableProps.getTableLocation())); - SchemaConverter schemaConverter = SchemaConverter.getBuilder().setTableName(table.name()).setMapTypeEnabled(config.getContext().getOptions().getOption(ExecConstants.ENABLE_MAP_DATA_TYPE)).build(); - BatchSchema icebergSchema = schemaConverter.fromIceberg(table.schema()); - - // this check can be removed once we support schema evolution in dremio. - if (!icebergSchema.equalsIgnoreCase(tableSchemaFromKVStore)) { - throw UserException.validationError().message("The schema for table %s does not match with the iceberg %s.", - tableSchemaFromKVStore, icebergSchema).buildSilently(); - } + SupportsIcebergMutablePlugin plugin = (SupportsIcebergMutablePlugin) tableEntry.getPlugin(); + try (FileSystem fs = plugin.createFS(icebergTableProps.getTableLocation(), tableEntry.getUserName(), null)) { + IcebergModel icebergModel = plugin.getIcebergModel(icebergTableProps, tableEntry.getUserName(), null, fs); + Table table = icebergModel.getIcebergTable(icebergModel.getTableIdentifier(icebergTableProps.getTableLocation())); + SchemaConverter schemaConverter = SchemaConverter.getBuilder().setTableName(table.name()) + .setMapTypeEnabled(config.getContext().getOptions().getOption(ExecConstants.ENABLE_MAP_DATA_TYPE)).build(); + BatchSchema icebergSchema = schemaConverter.fromIceberg(table.schema()); + + // this check can be removed once we support schema evolution in dremio. + if (!icebergSchema.equalsIgnoreCase(tableSchemaFromKVStore)) { + throw UserException.validationError().message("The schema for table %s does not match with the iceberg %s.", + tableSchemaFromKVStore, icebergSchema).buildSilently(); + } - BatchSchema partSchemaWithSelectedFields = tableSchemaFromKVStore.subset(fieldNames).orElse(tableSchemaFromKVStore); - if (!querySchema.equalsTypesWithoutPositions(partSchemaWithSelectedFields)) { - throw UserException.validationError().message("Table %s doesn't match with query %s.", - partSchemaWithSelectedFields, querySchema).buildSilently(); + BatchSchema partSchemaWithSelectedFields = tableSchemaFromKVStore.subset(fieldNames) + .orElse(tableSchemaFromKVStore); + if (!querySchema.equalsTypesWithoutPositions(partSchemaWithSelectedFields)) { + throw UserException.validationError().message("Table %s doesn't match with query %s.", + partSchemaWithSelectedFields, querySchema).buildSilently(); + } + } catch (IOException ex) { + throw new UncheckedIOException(ex); } } @@ -579,4 +581,9 @@ public void validateVersionedTableFormatOptions(Catalog catalog, NamespaceKey pa public String getTextPlan() { return textPlan; } + + @Override + public Rel getLogicalPlan() { + return drel; + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/DmlHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/DmlHandler.java index fb7dddd191..81b1d6aa6f 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/DmlHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/DmlHandler.java @@ -26,6 +26,8 @@ import com.dremio.exec.ExecConstants; import com.dremio.exec.catalog.Catalog; import com.dremio.exec.catalog.CatalogUtil; +import com.dremio.exec.catalog.ResolvedVersionContext; +import com.dremio.exec.catalog.VersionContext; import com.dremio.exec.physical.PhysicalPlan; import com.dremio.exec.physical.base.PhysicalOperator; import com.dremio.exec.planner.logical.CreateTableEntry; @@ -37,6 +39,7 @@ import com.dremio.exec.planner.sql.handlers.PrelTransformer; import com.dremio.exec.planner.sql.handlers.SqlHandlerConfig; import com.dremio.exec.planner.sql.handlers.SqlHandlerUtil; +import com.dremio.exec.planner.sql.handlers.ViewAccessEvaluator; import com.dremio.exec.planner.sql.handlers.direct.SqlNodeUtil; import com.dremio.exec.planner.sql.parser.SqlDmlOperator; import com.dremio.exec.store.iceberg.IcebergUtils; @@ -56,44 +59,58 @@ public abstract class DmlHandler extends TableManagementHandler { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DmlHandler.class); + private String textPlan; + private Rel drel; + private Prel prel; + @Override void checkValidations(Catalog catalog, SqlHandlerConfig config, NamespaceKey path, SqlNode sqlNode) throws Exception { validateDmlRequest(catalog, config, path, getSqlOperator()); validatePrivileges(catalog, path, sqlNode); } + @VisibleForTesting @Override - protected PhysicalPlan getPlan(Catalog catalog, SqlHandlerConfig config, String sql, SqlNode sqlNode, NamespaceKey path) throws Exception { + public PhysicalPlan getPlan(Catalog catalog, SqlHandlerConfig config, String sql, SqlNode sqlNode, NamespaceKey path) throws Exception { try { - final Prel prel = getNonPhysicalPlan(catalog, config, sqlNode, path); - final PhysicalOperator pop = PrelTransformer.convertToPop(config, prel); - Runnable committer = !CatalogUtil.requestedPluginSupportsVersionedTables(path, catalog) - ? () -> refreshDataset(catalog, path, false) - : null; - // cleaner will call refreshDataset to avoid the issues like DX-49928 - Runnable cleaner = committer; - // Metadata for non-versioned plugins happens via this call back. For versioned tables (currently - // only applies to Nessie), the metadata update happens during the operation within NessieClientImpl). - return PrelTransformer.convertToPlan(config, pop, committer, cleaner); + // Extends sqlNode's DML target table with system columns (e.g., file_name and row_index) + SqlDmlOperator sqlDmlOperator = SqlNodeUtil.unwrap(sqlNode, SqlDmlOperator.class); + sqlDmlOperator.extendTableWithDataFileSystemColumns(); + + final ConvertedRelNode convertedRelNode = PrelTransformer.validateAndConvert(config, sqlNode); + try (ViewAccessEvaluator ignored = ViewAccessEvaluator.createAsyncEvaluator(config, convertedRelNode)) { + + final RelNode relNode = convertedRelNode.getConvertedNode(); + + drel = convertToDrel(config, sqlNode, path, catalog, relNode); + final Pair prelAndTextPlan = PrelTransformer.convertToPrel(config, drel); + + textPlan = prelAndTextPlan.getValue(); + prel = prelAndTextPlan.getKey(); + + final PhysicalOperator pop = PrelTransformer.convertToPop(config, prel); + final String sourceName = path.getRoot(); + final VersionContext sessionVersion = config.getContext().getSession().getSessionVersionForSource(sourceName); + final ResolvedVersionContext version = CatalogUtil.resolveVersionContext(catalog, sourceName, sessionVersion); + CatalogUtil.validateResolvedVersionIsBranch(version); + Runnable committer = !CatalogUtil.requestedPluginSupportsVersionedTables(path, catalog) + ? () -> refreshDataset(catalog, path, false) + : null; + // cleaner will call refreshDataset to avoid the issues like DX-49928 + Runnable cleaner = committer; + // Metadata for non-versioned plugins happens via this call back. For versioned tables (currently + // only applies to Nessie), the metadata update happens during the operation within NessieClientImpl). + return PrelTransformer.convertToPlan(config, pop, committer, cleaner); + } } catch (Exception e) { throw SqlExceptionHelper.coerceException(logger, sql, e, true); } } @VisibleForTesting - public Prel getNonPhysicalPlan(Catalog catalog, SqlHandlerConfig config, SqlNode sqlNode, NamespaceKey path) throws Exception{ - // Extends sqlNode's DML target table with system columns (e.g., file_name and row_index) - SqlDmlOperator sqlDmlOperator = SqlNodeUtil.unwrap(sqlNode, SqlDmlOperator.class); - sqlDmlOperator.extendTableWithDataFileSystemColumns(); - - final ConvertedRelNode convertedRelNode = PrelTransformer.validateAndConvert(config, sqlNode); - - final RelNode relNode = convertedRelNode.getConvertedNode(); - - final Rel drel = convertToDrel(config, sqlNode, path, catalog, relNode); - final Pair prelAndTextPlan = PrelTransformer.convertToPrel(config, drel); - - return prelAndTextPlan.getKey(); + public Prel getPrel() + { + return prel; } @VisibleForTesting @@ -112,7 +129,7 @@ public static void validateDmlRequest(Catalog catalog, SqlHandlerConfig config, protected Rel convertToDrel(SqlHandlerConfig config, SqlNode sqlNode, NamespaceKey path, Catalog catalog, RelNode relNode) throws Exception { // Allow TableModifyCrel to access CreateTableEntry that can only be created now. CreateTableEntry createTableEntry = IcebergUtils.getIcebergCreateTableEntry(config, catalog, - catalog.getTable(path), getSqlOperator().getKind(), null); + catalog.getTable(path), getSqlOperator(), null); Rel convertedRelNode = PrelTransformer.convertToDrel(config, rewriteCrel(relNode, createTableEntry)); // below is for results to be returned to client - delete/update/merge operation summary output @@ -122,7 +139,13 @@ protected Rel convertToDrel(SqlHandlerConfig config, SqlNode sqlNode, NamespaceK return new ScreenRel(convertedRelNode.getCluster(), convertedRelNode.getTraitSet(), convertedRelNode); } + @Override + public String getTextPlan() { + return textPlan; + } - - + @Override + public Rel getLogicalPlan() { + return drel; + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/InsertTableHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/InsertTableHandler.java index c7db9ddc4b..1a1716a887 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/InsertTableHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/InsertTableHandler.java @@ -109,6 +109,7 @@ protected PhysicalPlan doVersionedInsert(Catalog catalog, SqlHandlerConfig confi final VersionContext sessionVersion = config.getContext().getSession().getSessionVersionForSource(sourceName); final ResolvedVersionContext version = CatalogUtil.resolveVersionContext(catalog, sourceName, sessionVersion); try { + CatalogUtil.validateResolvedVersionIsBranch(version); validateVersionedTableFormatOptions(catalog, path); checkExistenceValidity(path, getDremioTable(catalog, path)); logger.debug("Insert into versioned table '{}' at version '{}' resolved version '{}' ", diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/NormalHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/NormalHandler.java index d2303cc2cc..ef69ed9f8d 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/NormalHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/NormalHandler.java @@ -19,13 +19,11 @@ import java.util.Locale; import java.util.Optional; -import java.util.concurrent.TimeUnit; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.sql.SqlExplainLevel; import org.apache.calcite.sql.SqlNode; -import org.apache.calcite.sql.dialect.CalciteSqlDialect; import org.apache.calcite.util.Pair; import com.dremio.exec.catalog.Catalog; @@ -34,7 +32,6 @@ import com.dremio.exec.physical.base.PhysicalOperator; import com.dremio.exec.planner.CachedAccelDetails; import com.dremio.exec.planner.CachedPlan; -import com.dremio.exec.planner.DremioVolcanoPlanner; import com.dremio.exec.planner.PlanCache; import com.dremio.exec.planner.logical.Rel; import com.dremio.exec.planner.physical.PlannerSettings; @@ -49,6 +46,9 @@ import com.dremio.exec.store.CatalogService; import com.dremio.options.OptionManager; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.instrumentation.annotations.WithSpan; + /** * The default handler for queries. */ @@ -56,88 +56,90 @@ public class NormalHandler implements SqlToPlanHandler { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(NormalHandler.class); private String textPlan; + private Rel drel; + @WithSpan @Override public PhysicalPlan getPlan(SqlHandlerConfig config, String sql, SqlNode sqlNode) throws Exception { try{ + Span.current().setAttribute("dremio.planner.workload_type", config.getContext().getWorkloadType().name()); + Span.current().setAttribute("dremio.planner.current_default_schema", config.getContext().getContextInformation().getCurrentDefaultSchema()); final PlannerSettings plannerSettings = config.getContext().getPlannerSettings(); final PlanCache planCache = config.getContext().getPlanCache(); - final String cachedKey = planCache.generateCacheKey(sqlNode.toSqlString(CalciteSqlDialect.DEFAULT).getSql(), - config.getContext()); - config.getObserver().setCacheKey(cachedKey); - final ConvertedRelNode convertedRelNode = PrelTransformer.validateAndConvert(config, sqlNode); - final RelDataType validatedRowType = convertedRelNode.getValidatedRowType(); - final RelNode queryRelNode = convertedRelNode.getConvertedNode(); - ViewAccessEvaluator viewAccessEvaluator = null; - if (config.getConverter().getSubstitutionProvider().isDefaultRawReflectionEnabled()) { - final RelNode convertedRelWithExpansionNodes = ((DremioVolcanoPlanner) queryRelNode.getCluster().getPlanner()).getOriginalRoot(); - viewAccessEvaluator = new ViewAccessEvaluator(convertedRelWithExpansionNodes, config); - config.getContext().getExecutorService().submit(viewAccessEvaluator); - } final Catalog catalog = config.getContext().getCatalog(); final CatalogService catalogService = config.getContext().getCatalogService(); - CachedPlan cachedPlan = (planCache != null) ? planCache.getIfPresentAndValid(catalog, catalogService, cachedKey) : null; - Prel prel; - if (!plannerSettings.isPlanCacheEnabled() || cachedPlan == null) { - final Rel drel = PrelTransformer.convertToDrel(config, queryRelNode, validatedRowType); - - if (!plannerSettings.ignoreScannedColumnsLimit()) { - long maxScannedColumns = config.getContext().getOptions().getOption(CatalogOptions.METADATA_LEAF_COLUMN_SCANNED_MAX); - ScanLimitValidator.ensureLimit(drel, maxScannedColumns); - } - final Pair convertToPrel = PrelTransformer.convertToPrel(config, drel); - prel = convertToPrel.getKey(); - textPlan = convertToPrel.getValue(); + final ConvertedRelNode convertedRelNode = PrelTransformer.validateAndConvert(config, sqlNode); + try (ViewAccessEvaluator ignored = ViewAccessEvaluator.createAsyncEvaluator(config, convertedRelNode)) { + final RelDataType validatedRowType = convertedRelNode.getValidatedRowType(); + final RelNode queryRelNode = convertedRelNode.getConvertedNode(); - //after we generate a physical plan, save it in the plan cache if plan cache is present - if (PlanCache.supportPlanCache(planCache, config, sqlNode)) { - planCache.createNewCachedPlan(catalog, cachedKey, sql, prel, textPlan, config); - } - } else { - prel = cachedPlan.getPrel(); - // After the plan has been cached during planning, the job could be canceled during execution. - // Reset the cancel flag in cached plan, otherwise the job will always be canceled. - prel.getCluster().getPlanner().getContext().unwrap(org.apache.calcite.util.CancelFlag.class).clearCancel(); + final String cachedKey = PlanCache.generateCacheKey(sqlNode, queryRelNode, config.getContext()); + config.getObserver().setCacheKey(cachedKey); + CachedPlan cachedPlan = (planCache != null) ? planCache.getIfPresentAndValid(catalog, catalogService, cachedKey) : null; + Prel prel; - CachedAccelDetails accelDetails = cachedPlan.getAccelDetails(); - if (accelDetails != null && accelDetails.getSubstitutionInfo() != null) { - config.getObserver().applyAccelDetails(accelDetails); - } - config.getObserver().planCacheUsed(cachedPlan.updateUseCount()); - //update writer if needed - final OptionManager options = config.getContext().getOptions(); - final PlannerSettings.StoreQueryResultsPolicy storeQueryResultsPolicy = Optional - .ofNullable(options.getOption(STORE_QUERY_RESULTS.getOptionName())) - .map(o -> PlannerSettings.StoreQueryResultsPolicy.valueOf(o.getStringVal().toUpperCase(Locale.ROOT))) - .orElse(PlannerSettings.StoreQueryResultsPolicy.NO); - if (storeQueryResultsPolicy == PlannerSettings.StoreQueryResultsPolicy.PATH_AND_ATTEMPT_ID) { - //update writing path for this case only - prel = WriterPathUpdater.update(prel, config); - } - if (logger.isDebugEnabled() || config.getObserver() != null) { - textPlan = PrelSequencer.setPlansWithIds(prel, SqlExplainLevel.ALL_ATTRIBUTES, config.getObserver(), 0); - if (logger.isDebugEnabled()) { - logger.debug(String.format("%s:\n%s", "Final Physical Transformation", textPlan)); + Span.current().setAttribute("dremio.planner.cache.enabled", plannerSettings.isPlanCacheEnabled()); + Span.current().setAttribute("dremio.planner.cache.plan_cache_present_and_valid", (cachedPlan != null)); + + if (!plannerSettings.isPlanCacheEnabled() || cachedPlan == null) { + drel = PrelTransformer.convertToDrel(config, queryRelNode, validatedRowType); + + if (!plannerSettings.ignoreScannedColumnsLimit()) { + long maxScannedColumns = config.getContext().getOptions().getOption(CatalogOptions.METADATA_LEAF_COLUMN_SCANNED_MAX); + ScanLimitValidator.ensureLimit(drel, maxScannedColumns); + } + + final Pair convertToPrel = PrelTransformer.convertToPrel(config, drel); + prel = convertToPrel.getKey(); + textPlan = convertToPrel.getValue(); + + //after we generate a physical plan, save it in the plan cache if plan cache is present + if (PlanCache.supportPlanCache(planCache, config, sqlNode, catalog)) { + planCache.createNewCachedPlan(catalog, cachedKey, sql, prel, textPlan, config); } } else { - textPlan = ""; - } - } - final PhysicalOperator pop = PrelTransformer.convertToPop(config, prel); - PhysicalPlan plan = PrelTransformer.convertToPlan(config, pop); - logger.debug("Final Physical Plan {}", textPlan); - PrelTransformer.log(config, "Dremio Plan", plan, logger); - - if (viewAccessEvaluator != null) { - viewAccessEvaluator.getLatch().await(config.getContext().getPlannerSettings().getMaxPlanningPerPhaseMS(), TimeUnit.MILLISECONDS); - if (viewAccessEvaluator.getException() != null) { - throw viewAccessEvaluator.getException(); + prel = cachedPlan.getPrel(); + + // After the plan has been cached during planning, the job could be canceled during execution. + // Reset the cancel flag in cached plan, otherwise the job will always be canceled. + prel.getCluster().getPlanner().getContext().unwrap(org.apache.calcite.util.CancelFlag.class).clearCancel(); + + CachedAccelDetails accelDetails = cachedPlan.getAccelDetails(); + if (accelDetails != null && accelDetails.getSubstitutionInfo() != null) { + config.getObserver().applyAccelDetails(accelDetails); + } + config.getObserver().planCacheUsed(cachedPlan.updateUseCount()); + Span.current().setAttribute("dremio.planner.cache.plan_used_count", cachedPlan.getUseCount()); + //update writer if needed + final OptionManager options = config.getContext().getOptions(); + final PlannerSettings.StoreQueryResultsPolicy storeQueryResultsPolicy = Optional + .ofNullable(options.getOption(STORE_QUERY_RESULTS.getOptionName())) + .map(o -> PlannerSettings.StoreQueryResultsPolicy.valueOf(o.getStringVal().toUpperCase(Locale.ROOT))) + .orElse(PlannerSettings.StoreQueryResultsPolicy.NO); + Span.current().setAttribute("dremio.planner.store_query_results_policy", storeQueryResultsPolicy.name()); + if (storeQueryResultsPolicy == PlannerSettings.StoreQueryResultsPolicy.PATH_AND_ATTEMPT_ID) { + //update writing path for this case only + prel = WriterPathUpdater.update(prel, config); + } + if (logger.isDebugEnabled() || config.getObserver() != null) { + textPlan = PrelSequencer.setPlansWithIds(prel, SqlExplainLevel.ALL_ATTRIBUTES, config.getObserver(), 0); + if (logger.isDebugEnabled()) { + logger.debug(String.format("%s:\n%s", "Final Physical Transformation", textPlan)); + } + } else { + textPlan = ""; + } } + final PhysicalOperator pop = PrelTransformer.convertToPop(config, prel); + PhysicalPlan plan = PrelTransformer.convertToPlan(config, pop); + logger.debug("Final Physical Plan {}", textPlan); + PrelTransformer.log(config, "Dremio Plan", plan, logger); + return plan; } - - return plan; + }catch(Error ex){ + throw SqlExceptionHelper.coerceError(sql, ex); }catch(Exception ex){ throw SqlExceptionHelper.coerceException(logger, sql, ex, true); } @@ -148,4 +150,8 @@ public String getTextPlan() { return textPlan; } + @Override + public Rel getLogicalPlan() { + return drel; + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/OptimizeHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/OptimizeHandler.java index 6afe88ca47..8877ce2a7e 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/OptimizeHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/OptimizeHandler.java @@ -23,12 +23,11 @@ import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.util.Pair; -import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections4.CollectionUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.dremio.common.exceptions.UserException; -import com.dremio.exec.ExecConstants; import com.dremio.exec.calcite.logical.TableOptimizeCrel; import com.dremio.exec.catalog.Catalog; import com.dremio.exec.catalog.CatalogUtil; @@ -60,6 +59,7 @@ public class OptimizeHandler extends TableManagementHandler { private static final Logger logger = LoggerFactory.getLogger(OptimizeHandler.class); private String textPlan; + private Rel drel; @Override public NamespaceKey getTargetTablePath(SqlNode sqlNode) throws Exception { @@ -80,9 +80,7 @@ protected void validatePrivileges(Catalog catalog, NamespaceKey path, SqlNode sq @VisibleForTesting @Override void checkValidations(Catalog catalog, SqlHandlerConfig config, NamespaceKey path, SqlNode sqlNode) throws Exception { - validateOptimizeEnabled(config); validatePrivileges(catalog, path, sqlNode); - validateWhereClause(((SqlOptimize)sqlNode).getCondition()); validateCompatibleTableFormat(catalog, config, path, getSqlOperator()); } @@ -91,10 +89,11 @@ protected Rel convertToDrel(SqlHandlerConfig config, SqlNode sqlNode, NamespaceK DremioTable table = catalog.getTable(path); List partitionColumnsList = table.getDatasetConfig().getReadDefinition().getPartitionColumnsList(); - OptimizeOptions optimizeOptions = new OptimizeOptions(config.getContext().getOptions(), (SqlOptimize) sqlNode, CollectionUtils.isEmpty(partitionColumnsList)); + OptimizeOptions optimizeOptions = OptimizeOptions.createInstance(config.getContext().getOptions(), + (SqlOptimize) sqlNode, CollectionUtils.isEmpty(partitionColumnsList)); CreateTableEntry createTableEntry = IcebergUtils.getIcebergCreateTableEntry(config, catalog, - table, getSqlOperator().getKind(), optimizeOptions); + table, getSqlOperator(), optimizeOptions); Rel convertedRelNode = PrelTransformer.convertToDrel(config, rewriteCrel(relNode, createTableEntry)); convertedRelNode = SqlHandlerUtil.storeQueryResultsIfNeeded(config.getConverter().getParserConfig(), @@ -128,33 +127,22 @@ public Prel getNonPhysicalPlan(Catalog catalog, SqlHandlerConfig config, SqlNode DremioTable table = catalog.getTable(path); List partitionColumnsList = table.getDatasetConfig().getReadDefinition().getPartitionColumnsList(); - final RelNode optimizeRelNode = ((TableOptimizeCrel) relNode).createWith(new OptimizeOptions(config.getContext().getOptions(), (SqlOptimize) sqlNode, CollectionUtils.isEmpty(partitionColumnsList))); - final Rel drel = convertToDrel(config, sqlNode, path, catalog, optimizeRelNode); + final RelNode optimizeRelNode = ((TableOptimizeCrel) relNode).createWith( + OptimizeOptions.createInstance(config.getContext().getOptions(), (SqlOptimize) sqlNode, CollectionUtils.isEmpty(partitionColumnsList))); + drel = convertToDrel(config, sqlNode, path, catalog, optimizeRelNode); final Pair prelAndTextPlan = PrelTransformer.convertToPrel(config, drel); textPlan = prelAndTextPlan.getValue(); return prelAndTextPlan.getKey(); } - @VisibleForTesting - void validateWhereClause(SqlNode condition) { - if (condition != null) { - throw UserException.unsupportedError().message("OPTIMIZE TABLE does not support WHERE conditions.").buildSilently(); - } - } - - private void validateOptimizeEnabled(SqlHandlerConfig config) { - if (!config.getContext().getOptions().getOption(ExecConstants.ENABLE_ICEBERG_OPTIMIZE)) { - throw UserException.unsupportedError().message("OPTIMIZE TABLE command is not supported.").buildSilently(); - } - } - private void validateCompatibleTableFormat(Catalog catalog, SqlHandlerConfig config, NamespaceKey namespaceKey, SqlOperator sqlOperator) { // Validate table exists and is Iceberg table IcebergUtils.checkTableExistenceAndMutability(catalog, config, namespaceKey, sqlOperator, false); - // Validate table has no delete files + // Validate V2 tables are supported (if yes - verify table has no equality delete files) IcebergMetadata icebergMetadata = catalog.getTableNoResolve(namespaceKey).getDatasetConfig().getPhysicalDataset().getIcebergMetadata(); - if (icebergMetadata.getDeleteManifestStats().getRecordCount() > 0) { - throw UserException.unsupportedError().message("OPTIMIZE TABLE command does not support tables with delete files.").buildSilently(); + Long deleteStat = icebergMetadata.getEqualityDeleteStats().getRecordCount(); + if (deleteStat > 0) { + throw UserException.unsupportedError().message("OPTIMIZE TABLE command does not support tables with equality delete files.").buildSilently(); } } @@ -162,4 +150,9 @@ private void validateCompatibleTableFormat(Catalog catalog, SqlHandlerConfig con public String getTextPlan() { return textPlan; } + + @Override + public Rel getLogicalPlan() { + return drel; + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/OptimizeOptions.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/OptimizeOptions.java index 53583ba6fc..16b172050e 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/OptimizeOptions.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/OptimizeOptions.java @@ -16,9 +16,12 @@ package com.dremio.exec.planner.sql.handlers.query; +import java.util.Optional; + import com.dremio.exec.ExecConstants; import com.dremio.exec.planner.sql.parser.SqlOptimize; import com.dremio.options.OptionManager; +import com.fasterxml.jackson.annotation.JsonIgnore; import com.google.common.base.Preconditions; /** @@ -30,7 +33,7 @@ * OptimizeOptions#targetFileSizeBytes}. */ public final class OptimizeOptions { - + public static OptimizeOptions DEFAULT = new Builder().build(); private final Long targetFileSizeBytes; private final Long maxFileSizeBytes; private final Long minFileSizeBytes; @@ -42,35 +45,52 @@ public final class OptimizeOptions { * This avoids creation of small files when execution planning over parallelisms. */ private final boolean isSingleDataWriter; + private final boolean optimizeDataFiles; + private final boolean optimizeManifestFiles; + - public static OptimizeOptions DEFAULT = new OptimizeOptions(); + private OptimizeOptions(Long targetFileSizeBytes, Long maxFileSizeBytes, Long minFileSizeBytes, Long minInputFiles, + boolean isSingleDataWriter, boolean optimizeDataFiles, boolean optimizeManifestFiles) { + this.targetFileSizeBytes = targetFileSizeBytes; + this.maxFileSizeBytes = maxFileSizeBytes; + this.minFileSizeBytes = minFileSizeBytes; + this.minInputFiles = minInputFiles; + this.isSingleDataWriter = isSingleDataWriter; + this.optimizeDataFiles = optimizeDataFiles; + this.optimizeManifestFiles = optimizeManifestFiles; + } + public static OptimizeOptions createInstance(OptionManager optionManager, SqlOptimize call, boolean isSingleDataWriter) { + Builder instanceBuilder = new Builder(); - private OptimizeOptions() { - this.targetFileSizeBytes = ExecConstants.OPTIMIZE_TARGET_FILE_SIZE_MB.getDefault().getNumVal(); - this.maxFileSizeBytes = (long) (ExecConstants.OPTIMIZE_MAXIMUM_FILE_SIZE_DEFAULT_RATIO.getDefault().getFloatVal() * targetFileSizeBytes); - this.minFileSizeBytes = (long) (ExecConstants.OPTIMIZE_MINIMUM_FILE_SIZE_DEFAULT_RATIO.getDefault().getFloatVal() * targetFileSizeBytes); - this.minInputFiles = ExecConstants.OPTIMIZE_MINIMUM_INPUT_FILES.getDefault().getNumVal(); - this.isSingleDataWriter = false; + instanceBuilder.setSingleWriter(isSingleDataWriter); + instanceBuilder.setTargetFileSizeMB(call.getTargetFileSize().orElse(optionManager.getOption(ExecConstants.OPTIMIZE_TARGET_FILE_SIZE_MB))); + instanceBuilder.setMaxFileSizeRatio(optionManager.getOption(ExecConstants.OPTIMIZE_MAXIMUM_FILE_SIZE_DEFAULT_RATIO)); + instanceBuilder.setMinFileSizeRatio(optionManager.getOption(ExecConstants.OPTIMIZE_MINIMUM_FILE_SIZE_DEFAULT_RATIO)); + + instanceBuilder.setMaxFileSizeMB(call.getMaxFileSize()); // computed from ratio if not set + instanceBuilder.setMinFileSizeMB(call.getMinFileSize());// computed from ratio if not set + Long minInputFiles = call.getMinInputFiles().orElse(optionManager.getOption(ExecConstants.OPTIMIZE_MINIMUM_INPUT_FILES)); + instanceBuilder.setMinInputFiles(Optional.of(minInputFiles)); + + instanceBuilder.setOptimizeDataFiles(call.getRewriteDataFiles().booleanValue()); + instanceBuilder.setOptimizeManifestFiles(call.getRewriteManifests().booleanValue()); + + return instanceBuilder.build(); } - public OptimizeOptions(OptionManager optionManager, SqlOptimize call, boolean isSingleDataWriter) { - this.isSingleDataWriter = isSingleDataWriter; - Long targetFileSizeMB, maxFileSizeMB, minFileSizeMB; - targetFileSizeMB = call.getTargetFileSize().orElse(optionManager.getOption(ExecConstants.OPTIMIZE_TARGET_FILE_SIZE_MB)); - maxFileSizeMB = call - .getMaxFileSize() - .orElse( - (long) (optionManager.getOption(ExecConstants.OPTIMIZE_MAXIMUM_FILE_SIZE_DEFAULT_RATIO)*targetFileSizeMB)); - minFileSizeMB = call - .getMinFileSize() - .orElse( - (long) (optionManager.getOption(ExecConstants.OPTIMIZE_MINIMUM_FILE_SIZE_DEFAULT_RATIO)*targetFileSizeMB)); - minInputFiles = call.getMinInputFiles().orElse(optionManager.getOption(ExecConstants.OPTIMIZE_MINIMUM_INPUT_FILES)); - validateOptions(targetFileSizeMB, minFileSizeMB, maxFileSizeMB, minInputFiles); - targetFileSizeBytes = targetFileSizeMB * 1024L * 1024L; - maxFileSizeBytes = maxFileSizeMB * 1024L * 1024L; - minFileSizeBytes = minFileSizeMB * 1024L * 1024L; + public static OptimizeOptions createInstance(SqlOptimize call) { + Builder instanceBuilder = new Builder(); + + call.getTargetFileSize().ifPresent(instanceBuilder::setTargetFileSizeMB); + instanceBuilder.setMaxFileSizeMB(call.getMaxFileSize()); + instanceBuilder.setMinFileSizeMB(call.getMinFileSize()); + instanceBuilder.setMinInputFiles(call.getMinInputFiles()); + + instanceBuilder.setOptimizeDataFiles(call.getRewriteDataFiles().booleanValue()); + instanceBuilder.setOptimizeManifestFiles(call.getRewriteManifests().booleanValue()); + + return instanceBuilder.build(); } public static void validateOptions(Long targetFileSizeMB, Long minFileSizeMB, Long maxFileSizeMB, Long minInputFiles) { @@ -102,7 +122,7 @@ public static void validateOptions(Long targetFileSizeMB, Long minFileSizeMB, Lo minFileSizeMB); Preconditions.checkArgument( - maxFileSizeMB >= targetFileSizeMB, + maxFileSizeMB >= targetFileSizeMB, "Value of TARGET_FILE_SIZE_MB [%s] cannot be greater than MAX_FILE_SIZE_MB [%s].", targetFileSizeMB, maxFileSizeMB); @@ -132,4 +152,84 @@ public Long getMinInputFiles() { public boolean isSingleDataWriter() { return isSingleDataWriter; } + + public boolean isOptimizeDataFiles() { + return optimizeDataFiles; + } + + public boolean isOptimizeManifestFiles() { + return optimizeManifestFiles; + } + + @JsonIgnore + public boolean isOptimizeManifestsOnly() { + return isOptimizeManifestFiles() && !isOptimizeDataFiles(); + } + + private static class Builder { + private Long targetFileSizeMB = ExecConstants.OPTIMIZE_TARGET_FILE_SIZE_MB.getDefault().getNumVal(); + private Double maxFileSizeRatio = ExecConstants.OPTIMIZE_MAXIMUM_FILE_SIZE_DEFAULT_RATIO.getDefault().getFloatVal(); + private Double minFileSizeRatio = ExecConstants.OPTIMIZE_MINIMUM_FILE_SIZE_DEFAULT_RATIO.getDefault().getFloatVal(); + private Optional maxFileSizeMB = Optional.empty(); + private Optional minFileSizeMB = Optional.empty(); + private long minInputFiles = ExecConstants.OPTIMIZE_MINIMUM_INPUT_FILES.getDefault().getNumVal(); + private boolean optimizeDataFiles = true; + private boolean optimizeManifestFiles = true; + private boolean isSingleWriter = false; + + private Builder() {} + + private void setMaxFileSizeRatio(Double maxFileSizeRatio) { + this.maxFileSizeRatio = maxFileSizeRatio; + } + + private void setMinFileSizeRatio(Double minFileSizeRatio) { + this.minFileSizeRatio = minFileSizeRatio; + } + + private void setMinInputFiles(Optional minInputFiles) { + minInputFiles.ifPresent(val -> this.minInputFiles = val); + } + + private void setMaxFileSizeMB(Optional maxFileSizeMB) { + this.maxFileSizeMB = maxFileSizeMB; + } + + private void setMinFileSizeMB(Optional minFileSizeMB) { + this.minFileSizeMB = minFileSizeMB; + } + + private void setTargetFileSizeMB(Long targetFileSizeMB) { + this.targetFileSizeMB = targetFileSizeMB; + } + + private void setOptimizeDataFiles(boolean optimizeDataFiles) { + this.optimizeDataFiles = optimizeDataFiles; + } + + private void setOptimizeManifestFiles(boolean optimizeManifestFiles) { + this.optimizeManifestFiles = optimizeManifestFiles; + } + + public void setSingleWriter(boolean singleWriter) { + isSingleWriter = singleWriter; + } + + private long mbToBytes(long sizeMB) { + return sizeMB * 1024 * 1024; + } + + private OptimizeOptions build() { + long maxFileSizeMbVal = this.maxFileSizeMB.orElse((long) (this.targetFileSizeMB * maxFileSizeRatio)); + long minFileSizeMbVal = this.minFileSizeMB.orElse((long) (this.targetFileSizeMB * minFileSizeRatio)); + + validateOptions(targetFileSizeMB, minFileSizeMbVal, maxFileSizeMbVal, minInputFiles); + + long targetFileSizeBytes = mbToBytes(targetFileSizeMB); + long maxFileSizeBytes = mbToBytes(maxFileSizeMbVal); + long minFileSizeBytes = mbToBytes(minFileSizeMbVal); + + return new OptimizeOptions(targetFileSizeBytes, maxFileSizeBytes, minFileSizeBytes, minInputFiles, isSingleWriter, optimizeDataFiles, optimizeManifestFiles); + } + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/SqlToPlanHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/SqlToPlanHandler.java index 9b9bb8f664..8de3981edd 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/SqlToPlanHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/SqlToPlanHandler.java @@ -18,6 +18,7 @@ import org.apache.calcite.sql.SqlNode; import com.dremio.exec.physical.PhysicalPlan; +import com.dremio.exec.planner.logical.Rel; import com.dremio.exec.planner.sql.handlers.SqlHandlerConfig; public interface SqlToPlanHandler { @@ -26,6 +27,10 @@ public interface SqlToPlanHandler { String getTextPlan(); + default Rel getLogicalPlan() { + return null; + } + public interface Creator { SqlToPlanHandler toPlanHandler(); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/TableManagementHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/TableManagementHandler.java index 56c42703b9..a9d963cb8c 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/TableManagementHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/TableManagementHandler.java @@ -15,6 +15,8 @@ */ package com.dremio.exec.planner.sql.handlers.query; +import static com.dremio.exec.planner.sql.handlers.SqlHandlerUtil.PLANNER_SOURCE_TARGET_SOURCE_TYPE_SPAN_ATTRIBUTE_NAME; + import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.core.TableScan; import org.apache.calcite.rel.type.RelDataType; @@ -24,6 +26,7 @@ import com.dremio.exec.calcite.logical.ScanCrel; import com.dremio.exec.calcite.logical.TableModifyCrel; import com.dremio.exec.calcite.logical.TableOptimizeCrel; +import com.dremio.exec.calcite.logical.VacuumTableCrel; import com.dremio.exec.catalog.Catalog; import com.dremio.exec.catalog.DremioPrepareTable; import com.dremio.exec.physical.PhysicalPlan; @@ -32,10 +35,15 @@ import com.dremio.exec.planner.logical.CreateTableEntry; import com.dremio.exec.planner.logical.Rel; import com.dremio.exec.planner.sql.handlers.SqlHandlerConfig; +import com.dremio.exec.planner.sql.handlers.SqlHandlerUtil; import com.dremio.exec.planner.sql.parser.DmlUtils; +import com.dremio.exec.store.iceberg.IcebergUtils; import com.dremio.service.namespace.NamespaceKey; import com.google.common.annotations.VisibleForTesting; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.instrumentation.annotations.WithSpan; + /** * Abstraction of the plan building components, within table modification operations, such as DML and OPTIMIZE. */ @@ -48,10 +56,12 @@ public abstract class TableManagementHandler implements SqlToPlanHandler { * Run {@link #checkValidations(Catalog, SqlHandlerConfig, NamespaceKey, SqlNode)} * and return Plan {@link #getPlan(Catalog, SqlHandlerConfig, String, SqlNode, NamespaceKey)} */ + @WithSpan @Override public final PhysicalPlan getPlan(SqlHandlerConfig config, String sql, SqlNode sqlNode) throws Exception { final Catalog catalog = config.getContext().getCatalog(); final NamespaceKey path = DmlUtils.getTablePath(catalog, getTargetTablePath(sqlNode)); + Span.current().setAttribute(PLANNER_SOURCE_TARGET_SOURCE_TYPE_SPAN_ATTRIBUTE_NAME, SqlHandlerUtil.getSourceType(catalog, path.getRoot())); checkValidations(catalog, config, path, sqlNode); return getPlan(catalog, config, sql, sqlNode, path); } @@ -118,6 +128,10 @@ public RelNode visit(RelNode other) { other = ((TableOptimizeCrel) other).createWith(createTableEntry); } + if (other instanceof VacuumTableCrel) { + other = ((VacuumTableCrel) other).createWith(createTableEntry); + } + return super.visit(other); } } @@ -201,4 +215,8 @@ public RelNode visit(RelNode other) { } } + protected void validateTableExistenceAndMutability(Catalog catalog, SqlHandlerConfig config, NamespaceKey namespaceKey) { + // Validate table exists and is Iceberg table + IcebergUtils.checkTableExistenceAndMutability(catalog, config, namespaceKey, getSqlOperator(), false); + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/VacuumCatalogHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/VacuumCatalogHandler.java new file mode 100644 index 0000000000..83920a9382 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/VacuumCatalogHandler.java @@ -0,0 +1,87 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers.query; + +import org.apache.calcite.sql.SqlNode; +import org.apache.commons.lang3.NotImplementedException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.ExecConstants; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.CatalogUtil; +import com.dremio.exec.physical.PhysicalPlan; +import com.dremio.exec.planner.logical.Rel; +import com.dremio.exec.planner.sql.handlers.SqlHandlerConfig; +import com.dremio.exec.planner.sql.handlers.direct.SqlNodeUtil; +import com.dremio.exec.planner.sql.parser.SqlVacuumCatalog; +import com.dremio.service.namespace.NamespaceKey; + +/** + * Handler for {@link com.dremio.exec.planner.sql.parser.SqlVacuumCatalog} command. + */ +public class VacuumCatalogHandler implements SqlToPlanHandler { + private static final Logger LOGGER = LoggerFactory.getLogger(VacuumCatalogHandler.class); + + @Override + public PhysicalPlan getPlan(SqlHandlerConfig config, String sql, SqlNode sqlNode) throws Exception { + final Catalog catalog = config.getContext().getCatalog(); + NamespaceKey path = SqlNodeUtil.unwrap(sqlNode, SqlVacuumCatalog.class).getPath(); + + validate(catalog, config, path); + + throw new NotImplementedException(); + } + + private void validate(Catalog catalog, SqlHandlerConfig config, NamespaceKey path) { + validateFeatureEnabled(config); + validatePath(path); + validateCompatibleCatalog(catalog, path); + } + + private void validateFeatureEnabled(SqlHandlerConfig config) { + if (!config.getContext().getOptions().getOption(ExecConstants.ENABLE_ICEBERG_VACUUM_CATALOG)) { + throw UserException.unsupportedError().message("VACUUM CATALOG command is not supported.").buildSilently(); + } + } + + private void validatePath(NamespaceKey path) { + if (path.size() != 1) { + throw UserException.parseError() + .message("Catalog name cannot have multiple path components.") + .buildSilently(); + } + } + + private void validateCompatibleCatalog(Catalog catalog, NamespaceKey path) { + if (!CatalogUtil.requestedPluginSupportsVersionedTables(path, catalog)) { + throw UserException.unsupportedError() + .message("VACUUM CATALOG is supported only on versioned sources.") + .buildSilently(); + } + } + + @Override + public String getTextPlan() { + throw new NotImplementedException(); + } + + @Override + public Rel getLogicalPlan() { + throw new NotImplementedException(); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/VacuumTableHandler.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/VacuumTableHandler.java new file mode 100644 index 0000000000..7fd12106ab --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/query/VacuumTableHandler.java @@ -0,0 +1,140 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers.query; + +import static com.dremio.exec.planner.sql.handlers.query.DataAdditionCmdHandler.refreshDataset; + +import org.apache.calcite.rel.RelNode; +import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.SqlOperator; +import org.apache.calcite.util.Pair; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.ExecConstants; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.CatalogUtil; +import com.dremio.exec.physical.PhysicalPlan; +import com.dremio.exec.physical.base.PhysicalOperator; +import com.dremio.exec.planner.logical.CreateTableEntry; +import com.dremio.exec.planner.logical.Rel; +import com.dremio.exec.planner.logical.ScreenRel; +import com.dremio.exec.planner.physical.Prel; +import com.dremio.exec.planner.sql.SqlExceptionHelper; +import com.dremio.exec.planner.sql.handlers.ConvertedRelNode; +import com.dremio.exec.planner.sql.handlers.PrelTransformer; +import com.dremio.exec.planner.sql.handlers.SqlHandlerConfig; +import com.dremio.exec.planner.sql.handlers.SqlHandlerUtil; +import com.dremio.exec.planner.sql.handlers.direct.SqlNodeUtil; +import com.dremio.exec.planner.sql.parser.SqlVacuumTable; +import com.dremio.exec.store.iceberg.IcebergUtils; +import com.dremio.service.namespace.NamespaceKey; +import com.google.common.annotations.VisibleForTesting; + +/** + * Handler for {@link SqlVacuumTable} command. + */ +public class VacuumTableHandler extends TableManagementHandler { + private static final Logger logger = LoggerFactory.getLogger(VacuumTableHandler.class); + + private String textPlan; + private Rel drel; + + @Override + public NamespaceKey getTargetTablePath(SqlNode sqlNode) throws Exception { + return SqlNodeUtil.unwrap(sqlNode, SqlVacuumTable.class).getPath(); + } + + @Override + public SqlOperator getSqlOperator() { + return SqlVacuumTable.OPERATOR; + } + + @Override + protected void validatePrivileges(Catalog catalog, NamespaceKey path, SqlNode sqlNode) throws Exception { + // User must be admin,owner of the table. + catalog.validateOwnership(path); + } + + private void validateFeatureEnabled(SqlHandlerConfig config) { + if (!config.getContext().getOptions().getOption(ExecConstants.ENABLE_ICEBERG_VACUUM)) { + throw UserException.unsupportedError().message("VACUUM TABLE command is not supported.").buildSilently(); + } + } + + @VisibleForTesting + @Override + public void checkValidations(Catalog catalog, SqlHandlerConfig config, NamespaceKey path, SqlNode sqlNode) throws Exception { + validateFeatureEnabled(config); + validatePrivileges(catalog, path, sqlNode); + validateTableExistenceAndMutability(catalog, config, path); + } + + @Override + protected Rel convertToDrel(SqlHandlerConfig config, SqlNode sqlNode, NamespaceKey path, Catalog catalog, RelNode relNode) throws Exception { + CreateTableEntry createTableEntry = IcebergUtils.getIcebergCreateTableEntry(config, catalog, + catalog.getTable(path), getSqlOperator(), null); + Rel convertedRelNode = PrelTransformer.convertToDrel(config, rewriteCrel(relNode, createTableEntry)); + convertedRelNode = SqlHandlerUtil.storeQueryResultsIfNeeded(config.getConverter().getParserConfig(), + config.getContext(), convertedRelNode); + + return new ScreenRel(convertedRelNode.getCluster(), convertedRelNode.getTraitSet(), convertedRelNode); + } + + @Override + protected PhysicalPlan getPlan(Catalog catalog, SqlHandlerConfig config, String sql, SqlNode sqlNode, NamespaceKey path) throws Exception { + try { + Runnable refresh = null; + if (!CatalogUtil.requestedPluginSupportsVersionedTables(path, catalog)) { + refresh = () -> refreshDataset(catalog, path, false); + //Always use the latest snapshot before vacuum. + refresh.run(); + } else { + throw UserException.unsupportedError() + .message("VACUUM TABLE command is not supported for this source") + .buildSilently(); + } + + final Prel prel = getNonPhysicalPlan(catalog, config, sqlNode, path); + final PhysicalOperator pop = PrelTransformer.convertToPop(config, prel); + + return PrelTransformer.convertToPlan(config, pop, refresh, refresh); + } catch (Exception e) { + throw SqlExceptionHelper.coerceException(logger, sql, e, true); + } + } + + public Prel getNonPhysicalPlan(Catalog catalog, SqlHandlerConfig config, SqlNode sqlNode, NamespaceKey path) throws Exception { + final ConvertedRelNode convertedRelNode = PrelTransformer.validateAndConvert(config, sqlNode); + final RelNode relNode = convertedRelNode.getConvertedNode(); + + drel = convertToDrel(config, sqlNode, path, catalog, relNode); + final Pair prelAndTextPlan = PrelTransformer.convertToPrel(config, drel); + textPlan = prelAndTextPlan.getValue(); + return prelAndTextPlan.getKey(); + } + + @Override + public String getTextPlan() { + return textPlan; + } + + @Override + public Rel getLogicalPlan() { + return drel; + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/refresh/AbstractRefreshPlanBuilder.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/refresh/AbstractRefreshPlanBuilder.java index b391a917ec..c7340fb3ea 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/refresh/AbstractRefreshPlanBuilder.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/refresh/AbstractRefreshPlanBuilder.java @@ -123,6 +123,8 @@ import com.google.common.collect.Lists; import com.google.protobuf.InvalidProtocolBufferException; +import io.opentelemetry.instrumentation.annotations.WithSpan; + /** * Base plan builder class. Should be extended by all other plan builders. */ @@ -207,6 +209,7 @@ public static DatasetConfig setupDatasetConfig(DatasetConfig datasetConf, Namesp */ public abstract boolean updateDatasetConfigWithIcebergMetadataIfNecessary(); + @WithSpan protected boolean repairAndSaveDatasetConfigIfNecessary() { RepairKvstoreFromIcebergMetadata repairOperation = new RepairKvstoreFromIcebergMetadata(datasetConfig, metaStoragePlugin, config.getContext().getNamespaceService(SystemUser.SYSTEM_USERNAME), @@ -220,6 +223,7 @@ protected boolean repairAndSaveDatasetConfigIfNecessary() { * * @return */ + @Override public Prel buildPlan() { final Prel dirListingPrel = getDataFileListingPrel(); final Prel roundRobinExchange = getDirListToFooterReadExchange(dirListingPrel); @@ -229,8 +233,10 @@ public Prel buildPlan() { return new ScreenPrel(cluster, traitSet, writerCommitterPrel); } + @Override public abstract PartitionChunkListing listPartitionChunks(DatasetRetrievalOptions datasetRetrievalOptions) throws ConnectorException; + @Override public abstract void setupMetadataForPlanning(PartitionChunkListing partitionChunkListing, DatasetRetrievalOptions retrievalOptions) throws ConnectorException, InvalidProtocolBufferException; // Extended to calculate diff for incremental use-cases @@ -494,8 +500,7 @@ public List getPartitionPaths(Iterable splits) { if (!partitionValues.isEmpty()) { partitionPaths.add(dirListInputSplit.getOperatingPath()); } - } - catch (InvalidProtocolBufferException e) { + } catch (InvalidProtocolBufferException e) { throw new RuntimeException(e); } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/refresh/FileSystemRefreshIncrementalPlanBuilder.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/refresh/FileSystemRefreshIncrementalPlanBuilder.java index d5901f8bba..3591f0b65b 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/refresh/FileSystemRefreshIncrementalPlanBuilder.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/handlers/refresh/FileSystemRefreshIncrementalPlanBuilder.java @@ -47,6 +47,8 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; +import io.opentelemetry.instrumentation.annotations.WithSpan; + /** * Builds plan for filesystems in case of incremental and partial refresh. */ @@ -110,6 +112,7 @@ public Prel getDataFileListingPrel() { ImmutableList.of()); } + @Override public Prel getDirListToFooterReadExchange(Prel child) { return getHashToRandomExchangePrel(child); } @@ -119,6 +122,7 @@ protected void checkAndUpdateIsFileDataset() { isFileDataset = DatasetType.PHYSICAL_DATASET_SOURCE_FILE.equals(metadataProvider.getDatasetConfig().getType()); } + @Override public List generatePathsForPartialRefresh() { RefreshDatasetValidator validator = new FileSystemPartitionValidator(metadataProvider); validator.validate(sqlNode); @@ -172,6 +176,7 @@ public double getRowCountEstimates(String type) { return Math.max(baseRowCount, 1); } + @WithSpan @Override public boolean updateDatasetConfigWithIcebergMetadataIfNecessary() { return repairAndSaveDatasetConfigIfNecessary(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/CompoundIdentifierConverter.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/CompoundIdentifierConverter.java index 182366f720..843d35044d 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/CompoundIdentifierConverter.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/CompoundIdentifierConverter.java @@ -134,6 +134,9 @@ public SqlNode visitChild( break; case ENABLE: enableComplex = true; + break; + default: + break; } } SqlNode newOperand = operand.accept(CompoundIdentifierConverter.this); @@ -153,8 +156,11 @@ enum RewriteType { } static { + @SuppressWarnings("checkstyle:LocalFinalVariableName") final RewriteType E = RewriteType.ENABLE; + @SuppressWarnings("checkstyle:LocalFinalVariableName") final RewriteType D = RewriteType.DISABLE; + @SuppressWarnings("checkstyle:LocalFinalVariableName") final RewriteType U = RewriteType.UNCHANGED; /* @@ -173,8 +179,8 @@ RewriteType[] should be R(D, E, D, D). rules.put(SqlSelect.class, R(D, E, D, E, E, E, E, E, E, D, D, D)); rules.put(SqlInsertTable.class, R(D, E, D)); - rules.put(SqlCreateTable.class, R(D, D, D, D, D, D, D, E, D, D, D, D)); - rules.put(SqlCreateEmptyTable.class, R(D, D, D, D, D, D, D, D, D, D, D)); + rules.put(SqlCreateTable.class, R(D, D, D, D, D, D, D, E, D, D, D, D, D, D)); + rules.put(SqlCreateEmptyTable.class, R(D, D, D, D, D, D, D, D, D, D, D, D, D)); rules.put(SqlCreateView.class, R(D, E, E, D, D)); rules.put(SqlDescribeTable.class, R(D, D, E)); rules.put(SqlDropView.class, R(D, D)); @@ -186,9 +192,9 @@ RewriteType[] should be R(D, E, D, D). rules.put(SqlDropTable.class, R(D, D)); rules.put(SqlTruncateTable.class, R(D, D, D)); rules.put(SqlSetOption.class, R(D, D, D)); - rules.put(SqlCreateReflection.class, R(D,D,D,D,D,D,D,D,D,D,D)); - rules.put(SqlDropReflection.class, R(D,D)); - rules.put(SqlAccelToggle.class, R(D,D, D)); + rules.put(SqlCreateReflection.class, R(D,D,D,D,D,D,D,D,D,D,D,D)); + rules.put(SqlDropReflection.class, R(D,D,D)); + rules.put(SqlAccelToggle.class, R(D,D,D,D)); rules.put(SqlForgetTable.class, R(D)); rules.put(SqlRefreshDataset.class, R(D,D,D,D,D,D,D,D,D,D)); rules.put(SqlRefreshTable.class, R(D,D,D,D,D,D,D,D,D,D)); @@ -213,6 +219,7 @@ RewriteType[] should be R(D, E, D, D). // Each type in the input arguments refers to // each data field in the class + @SuppressWarnings("checkstyle:MethodName") private static RewriteType[] R(RewriteType... types){ return types; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/DremioSqlColumnDeclaration.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/DremioSqlColumnDeclaration.java index 92708563ac..d4f9a0a23d 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/DremioSqlColumnDeclaration.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/DremioSqlColumnDeclaration.java @@ -38,6 +38,7 @@ public class DremioSqlColumnDeclaration extends SqlCall { private static final SqlSpecialOperator OPERATOR = new SqlSpecialOperator("COLUMN_DECL", SqlKind.COLUMN_DECL) { + @Override public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNode... operands) { if (operands.length == 2) { return new DremioSqlColumnDeclaration(pos, (SqlColumnPolicyPair) operands[0], (SqlDataTypeSpec) operands[1], null); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/ParserUtil.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/ParserUtil.java index baab846238..44e4d3df35 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/ParserUtil.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/ParserUtil.java @@ -19,6 +19,7 @@ import java.util.List; import org.apache.calcite.avatica.util.Quoting; +import org.apache.calcite.sql.SqlCall; import org.apache.calcite.sql.SqlIdentifier; import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.SqlNode; @@ -30,10 +31,14 @@ import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.calcite.sql.parser.SqlParserUtil; import org.apache.calcite.sql.pretty.SqlPrettyWriter; +import org.apache.calcite.sql.util.SqlBasicVisitor; +import org.apache.calcite.sql.util.SqlVisitor; import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.TableVersionType; import com.dremio.exec.planner.physical.PlannerSettings; import com.dremio.exec.planner.sql.ParserConfig; +import com.dremio.service.Pointer; import com.google.common.collect.Lists; /** @@ -94,4 +99,44 @@ public static void validateViewQuery(String viewQuery) throws UserException { validateParsedViewQuery(sqlNode); } + public static boolean checkTimeTravelOnView(String viewQuery) throws UserException { + ParserConfig PARSER_CONFIG = new ParserConfig( + Quoting.DOUBLE_QUOTE, + 1000, + true, + PlannerSettings.FULL_NESTED_SCHEMA_SUPPORT.getDefault().getBoolVal()); + SqlParser parser = SqlParser.create(viewQuery, PARSER_CONFIG); + try { + SqlNode sqlNode = parser.parseStmt(); + return isTimeTravelQuery(sqlNode); + } catch (SqlParseException parseException) { + // Don't catch real parser exceptions here. The purpose for this methodis only for catching unsupported + // query types from successful parsed statments. If there is areal parser error, the existing flow + // to run the query and get back job results will catch and handle the parse exception correctly with the + // error handling that's already in place. + return false; + } + + } + + public static boolean isTimeTravelQuery(SqlNode sqlNode) { + Pointer timeTravel = new Pointer<>(false); + SqlVisitor visitor = new SqlBasicVisitor() { + @Override + public Void visit(SqlCall call) { + if ((call instanceof SqlVersionedTableMacroCall) && + ((((SqlVersionedTableMacroCall) call).getTableVersionSpec().getTableVersionType() == TableVersionType.TIMESTAMP) || + (((SqlVersionedTableMacroCall) call).getTableVersionSpec().getTableVersionType() == TableVersionType.SNAPSHOT_ID))) { + timeTravel.value = true; + return null; + } + + return super.visit(call); + } + }; + + sqlNode.accept(visitor); + return timeTravel.value; + } + } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlAccelToggle.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlAccelToggle.java index 06fa6fcaca..0d48d079a1 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlAccelToggle.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlAccelToggle.java @@ -34,20 +34,26 @@ public class SqlAccelToggle extends SqlSystemCall { public static final SqlSpecialOperator OPERATOR = new SqlSpecialOperator("ACCEL_TOGGLE", SqlKind.OTHER_DDL) { @Override public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNode... operands) { - Preconditions.checkArgument(operands.length == 3, "SqlAccelToggle.createCall() has to get 3 operands!"); - return new SqlAccelToggle(pos, (SqlIdentifier) operands[0], (SqlLiteral) operands[1], (SqlLiteral) operands[2]); + Preconditions.checkArgument(operands.length == 4, "SqlAccelToggle.createCall() has to get 4 operands!"); + return new SqlAccelToggle(pos, + (SqlIdentifier) operands[0], + (SqlLiteral) operands[1], + (SqlLiteral) operands[2], + (SqlTableVersionSpec) operands[3]); } }; private final SqlIdentifier tblName; private final SqlLiteral raw; private final SqlLiteral enable; + private final SqlTableVersionSpec tableVersionSpec; - public SqlAccelToggle(SqlParserPos pos, SqlIdentifier tblName, SqlLiteral raw, SqlLiteral enable) { + public SqlAccelToggle(SqlParserPos pos, SqlIdentifier tblName, SqlLiteral raw, SqlLiteral enable, SqlTableVersionSpec tableVersionSpec) { super(pos); this.tblName = tblName; this.raw = raw; this.enable = enable; + this.tableVersionSpec = tableVersionSpec; } public SqlIdentifier getTblName() { @@ -62,9 +68,13 @@ public boolean isEnable() { return enable.booleanValue(); } + public SqlTableVersionSpec getSqlTableVersionSpec() { + return tableVersionSpec; + } + @Override public List getOperandList() { - return ImmutableList.of(tblName, raw, enable); + return ImmutableList.of(tblName, raw, enable, tableVersionSpec); } @Override diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlAlterTableProperties.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlAlterTableProperties.java new file mode 100644 index 0000000000..69dc60cad8 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlAlterTableProperties.java @@ -0,0 +1,116 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.parser; + +import java.util.List; +import java.util.stream.Collectors; + +import org.apache.calcite.sql.SqlCall; +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlKind; +import org.apache.calcite.sql.SqlLiteral; +import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.SqlNodeList; +import org.apache.calcite.sql.SqlOperator; +import org.apache.calcite.sql.SqlSpecialOperator; +import org.apache.calcite.sql.SqlWriter; +import org.apache.calcite.sql.parser.SqlParserPos; + +import com.google.common.base.Preconditions; +import com.google.common.collect.Lists; + +/** + * Parse tree node for a ALTER TABLE ... SET|UNSET TBLPROPERTIES statement. + */ +public class SqlAlterTableProperties extends SqlAlterTable { + + public enum Mode { + SET, + UNSET + } + + public static final SqlSpecialOperator ALTER_TABLE_PROPERTIES_OPERATOR = + new SqlSpecialOperator("ALTER_TABLE_PROPERTIES_OPERATOR", SqlKind.ALTER_TABLE) { + + @Override + public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNode... operands) { + Preconditions.checkArgument(operands.length == 4, "SqlAlterTableProperties.createCall() " + + "has to get 4 operands!"); + + return new SqlAlterTableProperties( + pos, + (SqlIdentifier) operands[0], + (SqlLiteral) operands[1], + (SqlNodeList) operands[2], + (SqlNodeList) operands[3]); + } + }; + + private final SqlLiteral mode; + private final SqlNodeList tablePropertyNameList; + private final SqlNodeList tablePropertyValueList; + + public SqlAlterTableProperties(SqlParserPos pos, SqlIdentifier tableName, SqlLiteral mode, + SqlNodeList tablePropertyNameList, SqlNodeList tablePropertyValueList) { + super(pos, tableName); + this.mode = Preconditions.checkNotNull(mode); + this.tablePropertyNameList = tablePropertyNameList; + this.tablePropertyValueList = tablePropertyValueList; + } + + @Override + public void unparse(SqlWriter writer, int leftPrec, int rightPrec) { + super.unparse(writer, leftPrec, rightPrec); + writer.keyword(mode.toValue()); + writer.keyword("TBLPROPERTIES"); + if(tablePropertyNameList != null) { + writer.keyword("("); + for (int i = 0; i < tablePropertyNameList.size(); i++) { + if (i > 0) { + writer.keyword(","); + } + tablePropertyNameList.get(i).unparse(writer, leftPrec, rightPrec); + if (tablePropertyValueList != null && tablePropertyValueList.size() > i) { + writer.keyword("="); + tablePropertyValueList.get(i).unparse(writer, leftPrec, rightPrec); + } + } + writer.keyword(")"); + } + } + + @Override + public SqlOperator getOperator() { + return ALTER_TABLE_PROPERTIES_OPERATOR; + } + + @Override + public List getOperandList() { + return Lists.newArrayList(tblName, mode, tablePropertyNameList, tablePropertyValueList); + } + + public Mode getMode() { + return mode.symbolValue(Mode.class); + } + + public List getTablePropertyNameList() { + return tablePropertyNameList.getList().stream().map(x -> ((SqlLiteral)x).toValue()).collect(Collectors.toList()); + } + + public List getTablePropertyValueList() { + return tablePropertyValueList.getList().stream().map(x -> ((SqlLiteral)x).toValue()).collect(Collectors.toList()); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlColumnPolicyPair.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlColumnPolicyPair.java index a954666991..2d68e3eb5c 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlColumnPolicyPair.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlColumnPolicyPair.java @@ -35,6 +35,7 @@ public class SqlColumnPolicyPair extends SqlCall { private static final SqlSpecialOperator OPERATOR = new SqlSpecialOperator("COLUMN_POLICY_PAIR", SqlKind.OTHER) { + @Override public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNode... operands) { if(operands.length == 1) { return new SqlColumnPolicyPair(pos, (SqlIdentifier) operands[0], null); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlCopyIntoTable.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlCopyIntoTable.java index dab0ed49fe..3831ea38f7 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlCopyIntoTable.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlCopyIntoTable.java @@ -40,6 +40,7 @@ import com.dremio.exec.catalog.DremioTable; import com.dremio.exec.planner.sql.PartitionTransform; import com.dremio.exec.planner.sql.handlers.SqlHandlerUtil; +import com.dremio.service.namespace.NamespaceKey; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; @@ -66,7 +67,11 @@ public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNod @Override public RelDataType deriveType(SqlValidator validator, SqlValidatorScope scope, SqlCall call) { - SqlValidatorTable nsTable = validator.getCatalogReader().getTable(DmlUtils.getPath(((SqlCopyIntoTable)call).getTargetTable()).getPathComponents()); + NamespaceKey path = DmlUtils.getPath(((SqlCopyIntoTable) call).getTargetTable()); + SqlValidatorTable nsTable = validator.getCatalogReader().getTable(path.getPathComponents()); + if (nsTable == null) { + throw UserException.invalidMetadataError().message("Table with path %s cannot be found", path).buildSilently(); + } return nsTable.getRowType(); } }; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlCreateEmptyTable.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlCreateEmptyTable.java index b01b8809d7..8deccd6071 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlCreateEmptyTable.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlCreateEmptyTable.java @@ -44,8 +44,9 @@ public class SqlCreateEmptyTable extends SqlCall implements DataAdditionCmdCall public static final SqlSpecialOperator CREATE_EMPTY_TABLE_OPERATOR = new SqlSpecialOperator("CREATE_EMPTY_TABLE", SqlKind.OTHER_DDL) { @Override public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNode... operands) { - Preconditions.checkArgument(operands.length == 11, "SqlCreateEmptyTable.createCall() " + - "has to get 11 operands!"); + Preconditions.checkArgument(operands.length == 13, "SqlCreateEmptyTable.createCall() " + + "has to get 13 operands!"); + if (((SqlNodeList) operands[1]).getList().size() == 0) { throw UserException.parseError().message("Columns/Fields not specified for table.").buildSilently(); @@ -69,7 +70,9 @@ public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNod (SqlLiteral) operands[6], (SqlNodeList) operands[7], (SqlNodeList) operands[8], - (SqlPolicy) operands[10]); + (SqlPolicy) operands[10], + (SqlNodeList) operands[11], + (SqlNodeList) operands[12]); } }; @@ -84,6 +87,8 @@ public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNod protected final SqlPolicy policy; protected final SqlLiteral singleWriter; protected final boolean ifNotExists; + protected final SqlNodeList tablePropertyNameList; + protected final SqlNodeList tablePropertyValueList; public SqlCreateEmptyTable( SqlParserPos pos, @@ -97,7 +102,9 @@ public SqlCreateEmptyTable( SqlLiteral singleWriter, SqlNodeList sortFieldList, SqlNodeList distributionColumns, - SqlPolicy policy) { + SqlPolicy policy, + SqlNodeList tablePropertyNameList, + SqlNodeList tablePropertyValueList) { super(pos); this.tblName = tblName; this.fieldList = fieldList; @@ -110,6 +117,8 @@ public SqlCreateEmptyTable( this.distributionColumns = distributionColumns; this.ifNotExists = ifNotExists; this.policy = policy; + this.tablePropertyNameList = tablePropertyNameList; + this.tablePropertyValueList = tablePropertyValueList; } @Override @@ -131,6 +140,8 @@ public List getOperandList() { ops.add(distributionColumns); ops.add(SqlLiteral.createBoolean(ifNotExists, SqlParserPos.ZERO)); ops.add(policy); + ops.add(tablePropertyNameList); + ops.add(tablePropertyValueList); return ops; } @@ -190,12 +201,25 @@ public void unparse(SqlWriter writer, int leftPrec, int rightPrec) { writer.keyword("POLICY"); policy.unparse(writer, leftPrec, rightPrec); } + if(tablePropertyNameList != null) { + writer.keyword("TBLPROPERTIES"); + writer.keyword("("); + for (int i = 0; i < tablePropertyNameList.size(); i++) { + if (i > 0) { + writer.keyword(","); + } + tablePropertyNameList.get(i).unparse(writer, leftPrec, rightPrec); + tablePropertyValueList.get(i).unparse(writer, leftPrec, rightPrec); + } + writer.keyword(")"); + } } public NamespaceKey getPath() { return new NamespaceKey(tblName.names); } + @Override public List getFieldNames() { List columnNames = Lists.newArrayList(); for (SqlNode node : fieldList.getList()) { @@ -213,6 +237,7 @@ public SqlNode getQuery() { return null; } + @Override public List getSortColumns() { List columnNames = Lists.newArrayList(); for(SqlNode node : sortColumns.getList()) { @@ -221,6 +246,7 @@ public List getSortColumns() { return columnNames; } + @Override public List getDistributionColumns() { List columnNames = Lists.newArrayList(); for(SqlNode node : distributionColumns.getList()) { @@ -255,6 +281,14 @@ public List getPartitionTransforms(DremioTable dremioTable) .collect(Collectors.toList()); } + public List getTablePropertyNameList() { + return tablePropertyNameList.getList().stream().map(x -> ((SqlLiteral)x).toValue()).collect(Collectors.toList()); + } + + public List getTablePropertyValueList() { + return tablePropertyValueList.getList().stream().map(x -> ((SqlLiteral)x).toValue()).collect(Collectors.toList()); + } + public SqlNodeList getFieldList() { return fieldList; } @@ -275,10 +309,12 @@ public boolean getIfNotExists(){ return ifNotExists; } + @Override public boolean isSingleWriter() { return singleWriter.booleanValue(); } + @Override public PartitionDistributionStrategy getPartitionDistributionStrategy( SqlHandlerConfig config, List partitionFieldNames, Set fieldNames) { return partitionDistributionStrategy; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlCreateFolder.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlCreateFolder.java new file mode 100644 index 0000000000..a2c29037f9 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlCreateFolder.java @@ -0,0 +1,155 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.parser; + +import java.lang.reflect.Constructor; +import java.util.List; + +import org.apache.calcite.sql.SqlCall; +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlKind; +import org.apache.calcite.sql.SqlLiteral; +import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.SqlOperator; +import org.apache.calcite.sql.SqlSpecialOperator; +import org.apache.calcite.sql.SqlWriter; +import org.apache.calcite.sql.parser.SqlParserPos; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.ops.QueryContext; +import com.dremio.exec.planner.sql.handlers.direct.SqlDirectHandler; +import com.dremio.sabot.rpc.user.UserSession; +import com.dremio.service.namespace.NamespaceKey; +import com.google.common.base.Preconditions; +import com.google.common.collect.Lists; + +/** + * Implements SQL CREATE FOLDERS to create folder under Nessie Repository. Represents + * statements like: + * CREATE FOLDER [ IF NOT EXISTS ] [source.]parentFolderName[.childFolder] + * [ AT ( REF[ERENCE) | BRANCH | TAG | COMMIT ) refValue ] + */ + +public class SqlCreateFolder extends SqlCall { + private static final SqlLiteral sqlLiteralNull = SqlLiteral.createNull(SqlParserPos.ZERO); + + public static final SqlSpecialOperator OPERATOR = + new SqlSpecialOperator("CREATE_FOLDER", SqlKind.OTHER) { + @Override + public SqlCall createCall( + SqlLiteral functionQualifier, SqlParserPos pos, SqlNode... operands) { + Preconditions.checkArgument( + operands.length == 4, + "SqlCreateFolder.createCall() has to get 4 operands!"); + return new SqlCreateFolder( + pos, + (SqlLiteral) operands[0], + (SqlIdentifier) operands[1], + ((SqlLiteral) operands[2]).symbolValue(ReferenceType.class), + (SqlIdentifier) operands[3]); + } + }; + private final SqlLiteral ifNotExists; + private final SqlIdentifier folderName; + private final ReferenceType refType; + private final SqlIdentifier refValue; + + public SqlCreateFolder( + SqlParserPos pos, + SqlLiteral ifNotExists, + SqlIdentifier folderName, + ReferenceType refType, + SqlIdentifier refValue) { + super(pos); + this.ifNotExists = ifNotExists; + this.folderName = Preconditions.checkNotNull(folderName); + this.refType = refType; + this.refValue = refValue; + } + + @Override + public SqlOperator getOperator() { + return OPERATOR; + } + + @Override + public List getOperandList() { + List ops = Lists.newArrayList(); + ops.add(ifNotExists); + ops.add(folderName); + if (refType == null) { + ops.add(sqlLiteralNull); + } else { + ops.add(SqlLiteral.createSymbol(getRefType(), SqlParserPos.ZERO)); + } + ops.add(refValue); + return ops; + } + + @Override + public void unparse(SqlWriter writer, int leftPrec, int rightPrec) { + writer.keyword("CREATE"); + writer.keyword("FOLDER"); + + if (ifNotExists.booleanValue()) { + writer.keyword("IF"); + writer.keyword("NOT"); + writer.keyword("EXISTS"); + } + + folderName.unparse(writer, leftPrec, rightPrec); + + if (refType != null && refValue != null) { + writer.keyword("AT"); + writer.keyword(refType.toString()); + refValue.unparse(writer, leftPrec, rightPrec); + } + } + + public NamespaceKey getPath() { + return new NamespaceKey(folderName.names); + } + + public ReferenceType getRefType() { + return refType; + } + + public SqlIdentifier getRefValue() { + return refValue; + } + + public SqlLiteral getIfNotExists() { + return ifNotExists; + } + + public SqlDirectHandler toDirectHandler(QueryContext context) { + try { + final Class cl = Class.forName("com.dremio.exec.planner.sql.handlers.CreateFolderHandler"); + final Constructor ctor = cl.getConstructor(Catalog.class, UserSession.class); + + return (SqlDirectHandler) ctor.newInstance( + context.getCatalog(), + context.getSession()); + } catch (ClassNotFoundException e) { + throw UserException.unsupportedError(e) + .message("CREATE FOLDER action is not supported.") + .buildSilently(); + } catch (ReflectiveOperationException e) { + throw new RuntimeException(e); + } + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlCreateFunction.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlCreateFunction.java index 9ca7ccacd4..9acdd43455 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlCreateFunction.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlCreateFunction.java @@ -19,7 +19,6 @@ import java.util.stream.Collectors; import org.apache.calcite.sql.SqlCall; -import org.apache.calcite.sql.SqlDataTypeSpec; import org.apache.calcite.sql.SqlIdentifier; import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.SqlLiteral; @@ -45,24 +44,22 @@ public class SqlCreateFunction extends SqlCall { private final SqlIdentifier name; private final SqlNodeList fieldList; private final SqlNode expression; - private final SqlDataTypeSpec scalarReturnType; - private final SqlNodeList tabularReturnType; + private final SqlFunctionReturnType returnType; private boolean shouldReplace; private boolean ifNotExists; public static final SqlSpecialOperator OPERATOR = new SqlSpecialOperator("CREATE_FUNCTION", SqlKind.OTHER) { @Override public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNode... operands) { - Preconditions.checkArgument(operands.length == 7, "SqlCreateFunction.createCall() has to get 7 operands!"); + Preconditions.checkArgument(operands.length == 6, "SqlCreateFunction.createCall() has to get 6 operands!"); return new SqlCreateFunction( pos, (SqlLiteral) operands[0], (SqlIdentifier) operands[1], (SqlNodeList) operands[2], - (SqlDataTypeSpec) operands[3], - operands[4], - (SqlLiteral) operands[5], - (SqlNodeList) operands[6] + operands[3], + (SqlLiteral) operands[4], + (SqlFunctionReturnType) operands[5] ); } }; @@ -72,18 +69,16 @@ public SqlCreateFunction( SqlLiteral shouldReplace, SqlIdentifier name, SqlNodeList fieldList, - SqlDataTypeSpec scalarReturnType, SqlNode expression, SqlLiteral ifNotExists, - SqlNodeList tabularReturnType) { + SqlFunctionReturnType returnType) { super(pos); this.shouldReplace = shouldReplace.booleanValue(); this.name = name; this.fieldList = fieldList; - this.scalarReturnType = scalarReturnType; this.expression = expression; this.ifNotExists = ifNotExists.booleanValue(); - this.tabularReturnType = tabularReturnType; + this.returnType = returnType; } public SqlIdentifier getName() { @@ -105,12 +100,12 @@ public SqlNodeList getFieldList() { return fieldList; } - public SqlDataTypeSpec getScalarReturnType() { - return scalarReturnType; + public SqlFunctionReturnType getReturnType() { + return returnType; } - public SqlNodeList getTabularReturnType() { - return tabularReturnType; + public boolean isTabularFunction() { + return returnType.isTabular(); } public SqlNode getExpression() { @@ -136,10 +131,9 @@ public List getOperandList() { SqlLiteral.createBoolean(shouldReplace, SqlParserPos.ZERO), name, fieldList, - scalarReturnType, expression, SqlLiteral.createBoolean(ifNotExists, SqlParserPos.ZERO), - tabularReturnType); + returnType); } @Override @@ -161,11 +155,8 @@ public void unparse(SqlWriter writer, int leftPrec, int rightPrec) { } writer.keyword("RETURNS"); - if (scalarReturnType != null) { - scalarReturnType.unparse(writer, leftPrec, rightPrec); - } else if (tabularReturnType.size() > 0) { - SqlHandlerUtil.unparseSqlNodeList(writer, leftPrec, rightPrec, tabularReturnType); - } + returnType.unparse(writer, leftPrec, rightPrec); + writer.keyword("RETURN"); expression.unparse(writer, leftPrec, rightPrec); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlCreateReflection.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlCreateReflection.java index a8f6470f43..e3914eba91 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlCreateReflection.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlCreateReflection.java @@ -38,7 +38,7 @@ public class SqlCreateReflection extends SqlSystemCall { public static final SqlSpecialOperator OPERATOR = new SqlSpecialOperator("ADD_LAYOUT", SqlKind.OTHER_DDL) { @Override public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNode... operands) { - Preconditions.checkArgument(operands.length == 11, "SqlCreateReflection.createCall() has to get 11 operands!"); + Preconditions.checkArgument(operands.length == 12, "SqlCreateReflection.createCall() has to get 12 operands!"); return new SqlCreateReflection( pos, (SqlIdentifier) operands[0], @@ -51,7 +51,8 @@ public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNod (SqlNodeList) operands[7], (SqlLiteral) operands[8], ((SqlLiteral) operands[9]).symbolValue(PartitionDistributionStrategy.class), - (SqlIdentifier) operands[10] + (SqlIdentifier) operands[10], + (SqlTableVersionSpec) operands[11] ); } }; @@ -67,10 +68,22 @@ public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNod private final SqlLiteral arrowCachingEnabled; private final PartitionDistributionStrategy partitionDistributionStrategy; private final SqlIdentifier name; - - private SqlCreateReflection(SqlParserPos pos, SqlIdentifier tblName, SqlNode isRaw, SqlNodeList displayList, - SqlNodeList dimensionList, SqlNodeList measureList, SqlNodeList distributionList, SqlNodeList partitionList, - SqlNodeList sortList, SqlLiteral arrowCachingEnabled, PartitionDistributionStrategy partitionDistributionStrategy, SqlIdentifier name) { + private final SqlTableVersionSpec tableVersionSpec; + + private SqlCreateReflection(SqlParserPos pos, + SqlIdentifier tblName, + SqlNode isRaw, + SqlNodeList displayList, + SqlNodeList dimensionList, + SqlNodeList measureList, + SqlNodeList distributionList, + SqlNodeList partitionList, + SqlNodeList sortList, + SqlLiteral arrowCachingEnabled, + PartitionDistributionStrategy partitionDistributionStrategy, + SqlIdentifier name, + SqlTableVersionSpec tableVersionSpec + ) { super(pos); this.tblName = tblName; this.isRaw = isRaw; @@ -83,6 +96,7 @@ private SqlCreateReflection(SqlParserPos pos, SqlIdentifier tblName, SqlNode isR this.arrowCachingEnabled = arrowCachingEnabled; this.partitionDistributionStrategy = partitionDistributionStrategy; this.name = name; + this.tableVersionSpec = tableVersionSpec; } @Override @@ -98,6 +112,7 @@ public List getOperandList() { sortList, arrowCachingEnabled, SqlLiteral.createSymbol(partitionDistributionStrategy, SqlParserPos.ZERO))); operands.add(name); + operands.add(tableVersionSpec); return operands; } @@ -143,6 +158,10 @@ public PartitionDistributionStrategy getPartitionDistributionStrategy() { return partitionDistributionStrategy; } + public SqlTableVersionSpec getSqlTableVersionSpec() { + return tableVersionSpec; + } + private List toNameAndMeasures(SqlNodeList list){ if(list == null){ return ImmutableList.of(); @@ -184,18 +203,20 @@ private List toStrings(SqlNodeList list){ public static SqlCreateReflection createAggregation(SqlParserPos pos, SqlIdentifier tblName, SqlNodeList dimensionList, SqlNodeList measureList, SqlNodeList distributionList, SqlNodeList partitionList, SqlNodeList sortList, SqlLiteral arrowCachingEnabled, - PartitionDistributionStrategy partitionDistributionStrategy, SqlIdentifier name) { + PartitionDistributionStrategy partitionDistributionStrategy, SqlIdentifier name, + SqlTableVersionSpec tableVersionSpec) { return new SqlCreateReflection(pos, tblName, SqlLiteral.createBoolean(false, SqlParserPos.ZERO), null, dimensionList, - measureList, distributionList, partitionList, sortList, arrowCachingEnabled, partitionDistributionStrategy, name); + measureList, distributionList, partitionList, sortList, arrowCachingEnabled, partitionDistributionStrategy, name, tableVersionSpec); } public static SqlCreateReflection createRaw(SqlParserPos pos, SqlIdentifier tblName, SqlNodeList displayList, SqlNodeList distributionList, SqlNodeList partitionList, SqlNodeList sortList, SqlLiteral arrowCachingEnabled, PartitionDistributionStrategy partitionDistributionStrategy, - SqlIdentifier name) { + SqlIdentifier name, + SqlTableVersionSpec tableVersionSpec) { return new SqlCreateReflection(pos, tblName, SqlLiteral.createBoolean(true, SqlParserPos.ZERO), displayList, null, null, - distributionList, partitionList, sortList, arrowCachingEnabled, partitionDistributionStrategy, name); + distributionList, partitionList, sortList, arrowCachingEnabled, partitionDistributionStrategy, name, tableVersionSpec); } public static class NameAndGranularity { @@ -251,6 +272,7 @@ public static enum MeasureType { this.sqlName = sqlName; } + @Override public String toString() { return sqlName; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlCreateTable.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlCreateTable.java index cbf13c954e..e3382724e3 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlCreateTable.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlCreateTable.java @@ -36,7 +36,7 @@ public class SqlCreateTable extends SqlCreateEmptyTable { public static final SqlSpecialOperator CREATE_TABLE_OPERATOR = new SqlSpecialOperator("CREATE_TABLE", SqlKind.CREATE_TABLE) { @Override public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNode... operands) { - Preconditions.checkArgument(operands.length == 12, "SqlCreateTable.createCall() has to get 12 operands!"); + Preconditions.checkArgument(operands.length == 14, "SqlCreateTable.createCall() has to get 14 operands!"); return new SqlCreateTable( pos, (SqlIdentifier) operands[0], @@ -50,7 +50,9 @@ public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNod (SqlNodeList) operands[7], (SqlNodeList) operands[8], (SqlPolicy) operands[10], - operands[11]); + operands[13], + (SqlNodeList) operands[11], + (SqlNodeList) operands[12]); } }; @@ -69,9 +71,11 @@ public SqlCreateTable( SqlNodeList sortFieldList, SqlNodeList distributionColumns, SqlPolicy policy, - SqlNode query) { + SqlNode query, + SqlNodeList tablePropertyNameList, + SqlNodeList tablePropertyValueList) { super(pos, tblName, fieldList, ifNotExists, partitionDistributionStrategy, partitionColumns, formatOptions, location, singleWriter, - sortFieldList, distributionColumns, policy); + sortFieldList, distributionColumns, policy, tablePropertyNameList, tablePropertyValueList); this.query = query; } @@ -95,6 +99,7 @@ public void unparse(SqlWriter writer, int leftPrec, int rightPrec) { query.unparse(writer, leftPrec, rightPrec); } + @Override public SqlNode getQuery() { return query; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlDeleteFromTable.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlDeleteFromTable.java index 8884370ede..34e2cca9ae 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlDeleteFromTable.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlDeleteFromTable.java @@ -55,6 +55,7 @@ public SqlDeleteFromTable(SqlParserPos pos, SqlNode targetTable, SqlNode conditi this.sourceOperand = source; } + @Override public void extendTableWithDataFileSystemColumns() { if (getTargetTable().getKind() == SqlKind.IDENTIFIER) { setOperand(0, DmlUtils.extendTableWithDataFileSystemColumns(getTargetTable())); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlDropReflection.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlDropReflection.java index 1b15e92e1d..3c169b5433 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlDropReflection.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlDropReflection.java @@ -34,18 +34,20 @@ public class SqlDropReflection extends SqlSystemCall { public static final SqlSpecialOperator OPERATOR = new SqlSpecialOperator("DROP_REFLECTION", SqlKind.OTHER_DDL) { @Override public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNode... operands) { - Preconditions.checkArgument(operands.length == 2, "SqlDropReflection.createCall() has to get 2 operands!"); - return new SqlDropReflection(pos, (SqlIdentifier) operands[0], (SqlIdentifier) operands[1]); + Preconditions.checkArgument(operands.length == 3, "SqlDropReflection.createCall() has to get 3 operands!"); + return new SqlDropReflection(pos, (SqlIdentifier) operands[0], (SqlIdentifier) operands[1], (SqlTableVersionSpec) operands[2]); } }; private final SqlIdentifier tblName; private final SqlIdentifier layoutId; + private final SqlTableVersionSpec tableVersionSpec; - public SqlDropReflection(SqlParserPos pos, SqlIdentifier tblName, SqlIdentifier layoutId) { + public SqlDropReflection(SqlParserPos pos, SqlIdentifier tblName, SqlIdentifier layoutId, SqlTableVersionSpec tableVersionSpec) { super(pos); this.tblName = tblName; this.layoutId = layoutId; + this.tableVersionSpec = tableVersionSpec; } public SqlIdentifier getTblName() { @@ -56,6 +58,10 @@ public SqlIdentifier getLayoutId() { return layoutId; } + public SqlTableVersionSpec getSqlTableVersionSpec() { + return tableVersionSpec; + } + @Override public SqlOperator getOperator() { return OPERATOR; @@ -63,7 +69,7 @@ public SqlOperator getOperator() { @Override public List getOperandList() { - return ImmutableList.of(tblName, layoutId); + return ImmutableList.of(tblName, layoutId, tableVersionSpec); } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlFunctionReturnType.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlFunctionReturnType.java new file mode 100644 index 0000000000..0ac89bd085 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlFunctionReturnType.java @@ -0,0 +1,97 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.parser; + +import java.util.List; + +import org.apache.calcite.sql.SqlCall; +import org.apache.calcite.sql.SqlDataTypeSpec; +import org.apache.calcite.sql.SqlKind; +import org.apache.calcite.sql.SqlLiteral; +import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.SqlNodeList; +import org.apache.calcite.sql.SqlOperator; +import org.apache.calcite.sql.SqlSpecialOperator; +import org.apache.calcite.sql.SqlWriter; +import org.apache.calcite.sql.parser.SqlParserPos; + +import com.dremio.exec.planner.sql.handlers.SqlHandlerUtil; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableList; + +/** + * This class is hold information on the return type for a UDF Function. + * It is implemented as an Either Monad on Tabular and Scalar Function return types. + */ + public final class SqlFunctionReturnType extends SqlCall { + private static final SqlSpecialOperator OPERATOR = + new SqlSpecialOperator("FUNTION_RETURN_TYPE", SqlKind.OTHER) { + @Override + public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNode... operands) { + if (operands.length == 2) { + return new SqlFunctionReturnType(pos, (SqlDataTypeSpec) operands[0], (SqlNodeList) operands[1]); + } + throw new IllegalArgumentException("SqlFunctionReturnType.createCall() has to get 2 operands!"); + } + }; + + private SqlNodeList tabularReturnType; + private SqlDataTypeSpec scalarReturnType; + + public SqlFunctionReturnType(SqlParserPos pos, SqlDataTypeSpec scalarReturnType, SqlNodeList tabularReturnType) { + super(pos); + final boolean isTabularEmpty = tabularReturnType == null || tabularReturnType.size() == 0; + Preconditions.checkArgument((!isTabularEmpty && scalarReturnType == null) || (isTabularEmpty && scalarReturnType != null), + "Function return type should be either a scalar type or a tabular type"); + this.tabularReturnType = tabularReturnType; + this.scalarReturnType = scalarReturnType; + } + + public boolean isTabular() { + return tabularReturnType != null && tabularReturnType.size() != 0; + } + + public SqlDataTypeSpec getScalarReturnType() { + return scalarReturnType; + } + + public SqlNodeList getTabularReturnType() { + return tabularReturnType; + } + + @Override + public SqlOperator getOperator() { + return OPERATOR; + } + + @Override + public List getOperandList() { + if (scalarReturnType != null) { + return ImmutableList.of(scalarReturnType); + } else { + return ImmutableList.of(tabularReturnType); + } + } + + @Override + public void unparse(SqlWriter writer, int leftPrec, int rightPrec) { + if (scalarReturnType != null) { + scalarReturnType.unparse(writer, leftPrec, rightPrec); + } else { + SqlHandlerUtil.unparseSqlNodeList(writer, leftPrec, rightPrec, tabularReturnType); + } + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlGrantOnProjectEntities.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlGrantOnProjectEntities.java index f698c538ef..8f7655564f 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlGrantOnProjectEntities.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlGrantOnProjectEntities.java @@ -59,11 +59,8 @@ public enum Privilege { MANAGE_GRANTS, CREATE_CLOUD, CREATE_PROJECT, - CREATE_OAUTH_APPLICATION, - CREATE_EXTERNAL_TOKENS_PROVIDER, - CREATE_IDENTITY_PROVIDER, CONFIGURE_SECURITY, - CREATE_ARCTIC_CATALOG, + CREATE_CATALOG, ALL } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlInsertTable.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlInsertTable.java index f42276f147..2be8a1a4ac 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlInsertTable.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlInsertTable.java @@ -72,6 +72,7 @@ public SqlInsertTable( this.insertFields = insertFields; } + @Override public void extendTableWithDataFileSystemColumns() { if (extendedTargetTable == null) { extendedTargetTable = DmlUtils.extendTableWithDataFileSystemColumns(getTargetTable()); @@ -104,6 +105,7 @@ public void unparse(SqlWriter writer, int leftPrec, int rightPrec) { query.unparse(writer, leftPrec, rightPrec); } + @Override public NamespaceKey getPath() { return new NamespaceKey(tblName.names); } @@ -139,6 +141,7 @@ public List getFieldNames() { return insertFields.getList().stream().map(SqlNode::toString).collect(Collectors.toList()); } + @Override public SqlNode getQuery() { return query; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlMergeIntoTable.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlMergeIntoTable.java index a766a90780..3e26c3f227 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlMergeIntoTable.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlMergeIntoTable.java @@ -61,6 +61,7 @@ public SqlMergeIntoTable(SqlParserPos pos, super(pos, targetTable, condition, source, updateCall, insertCall, null, alias); } + @Override public void extendTableWithDataFileSystemColumns() { if (extendedTargetTable == null) { extendedTargetTable = DmlUtils.extendTableWithDataFileSystemColumns(getTargetTable()); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlOptimize.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlOptimize.java index af1360638f..01467824f7 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlOptimize.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlOptimize.java @@ -41,7 +41,6 @@ import org.apache.calcite.sql.validate.SqlValidator; import org.apache.calcite.sql.validate.SqlValidatorScope; -import com.dremio.exec.catalog.DremioTable; import com.dremio.exec.planner.sql.handlers.query.OptimizeHandler; import com.dremio.exec.planner.sql.handlers.query.SqlToPlanHandler; import com.dremio.service.namespace.NamespaceKey; @@ -53,23 +52,28 @@ public class SqlOptimize extends SqlCall implements SqlToPlanHandler.Creator { public static final SqlSpecialOperator OPERATOR = new SqlSpecialOperator("OPTIMIZE", SqlKind.OTHER) { - @Override - public SqlCall createCall(SqlLiteral functionQualifier, - SqlParserPos pos, SqlNode... operands) { - return new SqlOptimize(pos, - (SqlIdentifier) operands[0], - (SqlLiteral) operands[1], - ((SqlLiteral) operands[2]).symbolValue(CompactionType.class), - (SqlNode) operands[3], - (SqlNodeList) operands[4], - (SqlNodeList) operands[5]); - } - @Override - public RelDataType deriveType(SqlValidator validator, SqlValidatorScope scope, SqlCall call) { - final RelDataTypeFactory typeFactory = validator.getTypeFactory(); - return getRelDataType(typeFactory); - } + @Override + public SqlCall createCall(SqlLiteral functionQualifier, + SqlParserPos pos, SqlNode... operands) { + return new SqlOptimize(pos, + (SqlIdentifier) operands[0], + (SqlLiteral) operands[1], + (SqlLiteral) operands[2], + ((SqlLiteral) operands[3]).symbolValue(CompactionType.class), + (SqlNode) operands[4], + (SqlNodeList) operands[5], + (SqlNodeList) operands[6]); + } + + @Override + public RelDataType deriveType(SqlValidator validator, SqlValidatorScope scope, SqlCall call) { + final RelDataTypeFactory typeFactory = validator.getTypeFactory(); + final SqlOptimize sqlOptimize = (SqlOptimize) call; + final boolean onlyOptimizeManifests = sqlOptimize.getRewriteManifests().booleanValue() && + !sqlOptimize.getRewriteDataFiles().booleanValue(); + return getRelDataType(typeFactory, onlyOptimizeManifests); + } }; private static final List OPTION_KEYS = ImmutableList.of( @@ -81,6 +85,7 @@ public RelDataType deriveType(SqlValidator validator, SqlValidatorScope scope, S private SqlIdentifier table; private SqlLiteral rewriteManifests; + private SqlLiteral rewriteDataFiles; private CompactionType compactionType; private SqlNode condition; private SqlNodeList optionsList; @@ -96,13 +101,13 @@ public SqlSelect getSourceSelect() { return sourceSelect; } - /** * Creates a SqlOptimize. */ public SqlOptimize(SqlParserPos pos, SqlIdentifier table, SqlLiteral rewriteManifests, + SqlLiteral rewriteDataFiles, CompactionType compactionType, SqlNode condition, SqlNodeList optionsList, @@ -110,6 +115,7 @@ public SqlOptimize(SqlParserPos pos, super(pos); this.table = table; this.rewriteManifests = rewriteManifests; + this.rewriteDataFiles = rewriteDataFiles; this.compactionType = compactionType; this.condition = condition; this.optionsList = optionsList; @@ -123,9 +129,16 @@ public void unparse(SqlWriter writer, int leftPrec, int rightPrec) { writer.keyword("OPTIMIZE"); writer.keyword("TABLE"); getTable().unparse(writer, leftPrec, rightPrec); + if (!rewriteManifests.booleanValue()) { writer.keyword("REWRITE"); writer.keyword("DATA"); + } else if (!rewriteDataFiles.booleanValue()) { + writer.keyword("REWRITE"); + writer.keyword("MANIFESTS"); + } + + if (rewriteDataFiles.booleanValue()) { writer.keyword("USING"); if (compactionType != CompactionType.SORT) { writer.keyword("BIN_PACK"); @@ -133,10 +146,10 @@ public void unparse(SqlWriter writer, int leftPrec, int rightPrec) { writer.keyword("SORT"); } if (condition != null) { - writer.keyword("WHERE"); + writer.keyword("FOR PARTITIONS"); condition.unparse(writer, leftPrec, rightPrec); } - if(optionsList != null) { + if (optionsList != null) { writer.keyword("("); for (int i = 0; i < optionsList.size() - 1; i++) { optionsList.get(i).unparse(writer, leftPrec, rightPrec); @@ -144,14 +157,11 @@ public void unparse(SqlWriter writer, int leftPrec, int rightPrec) { optionsValueList.get(i).unparse(writer, leftPrec, rightPrec); writer.keyword(","); } - optionsList.get(optionsList.size()-1).unparse(writer, leftPrec, rightPrec); + optionsList.get(optionsList.size() - 1).unparse(writer, leftPrec, rightPrec); writer.keyword("="); - optionsValueList.get(optionsList.size()-1).unparse(writer, leftPrec, rightPrec); + optionsValueList.get(optionsList.size() - 1).unparse(writer, leftPrec, rightPrec); writer.keyword(")"); } - } else { - writer.keyword("REWRITE"); - writer.keyword("MANIFESTS"); } } @@ -165,16 +175,20 @@ public void setOperand(int i, SqlNode operand) { rewriteManifests = (SqlLiteral) operand; break; case 2: - compactionType = ((SqlLiteral) operand).symbolValue(CompactionType.class); + rewriteDataFiles = (SqlLiteral) operand; break; case 3: - condition = operand; + compactionType = ((SqlLiteral) operand).symbolValue(CompactionType.class); break; case 4: - optionsList = (SqlNodeList) operand; + condition = operand; break; case 5: + optionsList = (SqlNodeList) operand; + break; + case 6: optionsValueList = (SqlNodeList) operand; + break; default: throw new AssertionError(i); } @@ -192,7 +206,7 @@ public void setSourceSelect(SqlSelect select) { @Override public List getOperandList() { SqlLiteral compactionTypeSqlLiteral = SqlLiteral.createSymbol(compactionType, SqlParserPos.ZERO); - return Collections.unmodifiableList(Arrays.asList(getTable(), rewriteManifests, compactionTypeSqlLiteral, condition, optionsList, optionsValueList)); + return Collections.unmodifiableList(Arrays.asList(getTable(), rewriteManifests, rewriteDataFiles, compactionTypeSqlLiteral, condition, optionsList, optionsValueList)); } @Override @@ -216,6 +230,10 @@ public SqlLiteral getRewriteManifests() { return rewriteManifests; } + public SqlLiteral getRewriteDataFiles() { + return rewriteDataFiles; + } + public CompactionType getCompactionType() { return compactionType; } @@ -244,10 +262,6 @@ public Optional getMinInputFiles() { return Optional.ofNullable(minInputFiles); } - public List getPartitionCol(DremioTable dremioTable) { - return dremioTable.getDatasetConfig().getReadDefinition().getPartitionColumnsList(); - } - private void populateOptions(SqlNodeList optionsList, SqlNodeList optionsValueList) { if (optionsList == null) { return; @@ -295,4 +309,6 @@ private void populateOptions(SqlNodeList optionsList, SqlNodeList optionsValueLi public void validate(SqlValidator validator, SqlValidatorScope scope) { validator.validate(this.sourceSelect); } + + } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlPolicy.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlPolicy.java index f7ee4b8b40..d52832c4df 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlPolicy.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlPolicy.java @@ -41,6 +41,7 @@ public class SqlPolicy extends SqlCall { private static final SqlSpecialOperator OPERATOR = new SqlSpecialOperator("POLICY", SqlKind.OTHER) { + @Override public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNode... operands) { Preconditions.checkArgument(operands.length == 2, "SqlPolicy.createCall() has to get 2 operand!"); return new SqlPolicy(pos, (SqlIdentifier) operands[0], (SqlNodeList) operands[1]); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlReturnField.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlReturnField.java new file mode 100644 index 0000000000..6d6c8f8e33 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlReturnField.java @@ -0,0 +1,49 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.parser; + +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlNodeList; +import org.apache.calcite.sql.SqlWriter; +import org.apache.calcite.sql.parser.SqlParserPos; + +/** + * Defines a column in a tabular function return spec. + */ +public final class SqlReturnField extends SqlNodeList { + private SqlIdentifier name; + private SqlComplexDataTypeSpec spec; + + public SqlReturnField(SqlParserPos pos, SqlIdentifier name, SqlComplexDataTypeSpec spec) { + super(pos); + this.name = name; + this.spec = spec; + } + + public SqlIdentifier getName() { + return name; + } + + public SqlComplexDataTypeSpec getType() { + return spec; + } + + @Override + public void unparse(SqlWriter writer, int leftPrec, int rightPrec) { + name.unparse(writer, leftPrec, rightPrec); + spec.unparse(writer, leftPrec, rightPrec); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlTableVersionSpec.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlTableVersionSpec.java new file mode 100644 index 0000000000..39d03b6b87 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlTableVersionSpec.java @@ -0,0 +1,82 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.parser; + +import java.util.List; + +import org.apache.calcite.sql.SqlCall; +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlKind; +import org.apache.calcite.sql.SqlLiteral; +import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.SqlOperator; +import org.apache.calcite.sql.SqlSpecialOperator; +import org.apache.calcite.sql.SqlWriter; +import org.apache.calcite.sql.parser.SqlParserPos; + +import com.dremio.exec.catalog.TableVersionContext; +import com.dremio.exec.catalog.TableVersionType; +import com.google.common.collect.Lists; + +/** + * Implementation of SqlCall which serves as a conduit for passing the parsed + * version specification, kept in a {@link TableVersionSpec} instance. + */ +public class SqlTableVersionSpec extends SqlCall { + public static final SqlSpecialOperator OPERATOR = new SqlSpecialOperator("AT_VERSION", SqlKind.OTHER); + public static final SqlTableVersionSpec NOT_SPECIFIED = new SqlTableVersionSpec(SqlParserPos.ZERO, TableVersionType.NOT_SPECIFIED, SqlLiteral.createCharString("MAIN", SqlParserPos.ZERO)); + private final TableVersionSpec tableVersionSpec; + + public SqlTableVersionSpec(SqlParserPos pos, TableVersionType tableVersionType, + SqlNode versionSpecifier ) { + super( pos); + this.tableVersionSpec = new TableVersionSpec(tableVersionType, versionSpecifier); + } + + public TableVersionSpec getTableVersionSpec() { + return tableVersionSpec; + } + + public TableVersionContext getResolvedTableVersionContext() { + return tableVersionSpec.getResolvedTableVersionContext(); + } + + + @Override + public SqlOperator getOperator() { + return OPERATOR; + } + + @Override + public List getOperandList() { + List operandList = Lists.newArrayList(); + operandList.add(new SqlIdentifier(getTableVersionSpec().getTableVersionType().toSqlRepresentation(), SqlParserPos.ZERO)); + operandList.add(getTableVersionSpec().getVersionSpecifier()); + return operandList; + } + + @Override + public void unparse(SqlWriter writer, int leftPrec, int rightPrec) { + writer.keyword("AT"); + getTableVersionSpec().unparseVersionSpec(writer, leftPrec, rightPrec); + } + + @Override + public String toString() { + return tableVersionSpec.toString(); + } + +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlUpdateTable.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlUpdateTable.java index 120907062a..c167b02fa4 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlUpdateTable.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlUpdateTable.java @@ -69,6 +69,7 @@ public SqlUpdateTable(SqlParserPos pos, this.sourceOperand = source; } + @Override public void extendTableWithDataFileSystemColumns() { if (extendedTargetTable == null) { extendedTargetTable = DmlUtils.extendTableWithDataFileSystemColumns(getTargetTable()); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlVacuum.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlVacuum.java index 016a08e4f3..a4e82090e0 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlVacuum.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlVacuum.java @@ -15,109 +15,68 @@ */ package com.dremio.exec.planner.sql.parser; +import static org.apache.iceberg.TableProperties.MAX_SNAPSHOT_AGE_MS_DEFAULT; +import static org.apache.iceberg.TableProperties.MIN_SNAPSHOTS_TO_KEEP_DEFAULT; + import java.util.List; import org.apache.calcite.sql.SqlCall; import org.apache.calcite.sql.SqlIdentifier; -import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.SqlLiteral; import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNodeList; import org.apache.calcite.sql.SqlNumericLiteral; -import org.apache.calcite.sql.SqlOperator; -import org.apache.calcite.sql.SqlSpecialOperator; -import org.apache.calcite.sql.SqlWriter; +import org.apache.calcite.sql.SqlSelect; import org.apache.calcite.sql.parser.SqlParseException; import org.apache.calcite.sql.parser.SqlParserPos; +import org.apache.calcite.sql.validate.SqlValidator; +import org.apache.calcite.sql.validate.SqlValidatorScope; import com.dremio.common.exceptions.UserException; -import com.dremio.exec.catalog.VacuumOption; +import com.dremio.exec.catalog.VacuumOptions; import com.dremio.exec.planner.sql.handlers.SqlHandlerUtil; +import com.dremio.exec.planner.sql.handlers.query.SqlToPlanHandler; import com.dremio.service.namespace.NamespaceKey; -import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; - -public class SqlVacuum extends SqlCall { - public static final SqlSpecialOperator OPERATOR = new SqlSpecialOperator("VACUUM", SqlKind.OTHER) { - @Override - public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNode... operands) { - Preconditions.checkArgument(operands.length == 3, "SqlVacuum.createCall() " + - "has 3 operands!"); - return new SqlVacuum( - pos, - (SqlIdentifier) operands[0], - (SqlNodeList) operands[1], - (SqlNodeList) operands[2]); - } - }; - - private static final long MAX_SNAPSHOT_AGE_MS_DEFAULT = 5 * 24 * 60 * 60 * 1000; // 5 days - private static final int MIN_SNAPSHOTS_TO_KEEP_DEFAULT = 1; - private static final List OPTION_KEYS = ImmutableList.of( +public abstract class SqlVacuum extends SqlCall implements SqlToPlanHandler.Creator { + protected static final List OPTION_KEYS = ImmutableList.of( "older_than", "retain_last" ); - private final SqlIdentifier table; - private final SqlNodeList optionsList; - private final SqlNodeList optionsValueList; - private String oldThanTimestamp; - private Integer retainLastValue; + protected final SqlNodeList optionsList; + protected final SqlNodeList optionsValueList; + protected String oldThanTimestamp; + protected Integer retainLastValue; + + private SqlSelect sourceSelect; + + public SqlSelect getSourceSelect() { + return sourceSelect; + } /** * Creates a SqlVacuum. */ public SqlVacuum( SqlParserPos pos, - SqlIdentifier table, SqlNodeList optionsList, SqlNodeList optionsValueList) { super(pos); - this.table = table; this.optionsList = optionsList; this.optionsValueList = optionsValueList; populateOptions(optionsList, optionsValueList); } - @Override - public SqlOperator getOperator() { - return OPERATOR; + public void setSourceSelect(SqlSelect select) { + this.sourceSelect = select; } - @Override - public List getOperandList() { - final List ops = - ImmutableList.of( - table, - optionsList, - optionsValueList); - return ops; - } + public abstract NamespaceKey getPath(); - public NamespaceKey getPath() { - return new NamespaceKey(table.names); - } - - @Override - public void unparse(SqlWriter writer, int leftPrec, int rightPrec) { - writer.keyword("VACUUM"); - writer.keyword("TABLE"); - table.unparse(writer, leftPrec, rightPrec); - - writer.keyword("EXPIRE"); - writer.keyword("SNAPSHOTS"); - if(optionsList != null) { - for (int i = 0; i < optionsList.size(); i++) { - optionsList.get(i).unparse(writer, leftPrec, rightPrec); - writer.keyword("="); - optionsValueList.get(i).unparse(writer, leftPrec, rightPrec); - } - } - } - - public VacuumOption getVacuumOption() { - Long olderThanInMillis = null; + public VacuumOptions getVacuumOptions() { + long olderThanInMillis; if (oldThanTimestamp != null) { olderThanInMillis = SqlHandlerUtil.convertToTimeInMillis(oldThanTimestamp, pos); } else { @@ -125,7 +84,7 @@ public VacuumOption getVacuumOption() { olderThanInMillis = currentTime - MAX_SNAPSHOT_AGE_MS_DEFAULT; } int retainLast = retainLastValue != null ? retainLastValue : MIN_SNAPSHOTS_TO_KEEP_DEFAULT; - return new VacuumOption(VacuumOption.Type.TABLE, olderThanInMillis, retainLast); + return new VacuumOptions(VacuumOptions.Type.TABLE, olderThanInMillis, retainLast); } private void populateOptions(SqlNodeList optionsList, SqlNodeList optionsValueList) { @@ -161,4 +120,9 @@ private void populateOptions(SqlNodeList optionsList, SqlNodeList optionsValueLi idx++; } } + + @Override + public void validate(SqlValidator validator, SqlValidatorScope scope) { + validator.validate(this.sourceSelect); + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlVacuumCatalog.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlVacuumCatalog.java new file mode 100644 index 0000000000..b1b01a74c8 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlVacuumCatalog.java @@ -0,0 +1,119 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.parser; + +import static com.dremio.exec.planner.VacuumOutputSchema.getRelDataType; + +import java.util.List; + +import org.apache.calcite.rel.type.RelDataType; +import org.apache.calcite.rel.type.RelDataTypeFactory; +import org.apache.calcite.sql.SqlCall; +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlKind; +import org.apache.calcite.sql.SqlLiteral; +import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.SqlNodeList; +import org.apache.calcite.sql.SqlOperator; +import org.apache.calcite.sql.SqlSpecialOperator; +import org.apache.calcite.sql.SqlWriter; +import org.apache.calcite.sql.parser.SqlParserPos; +import org.apache.calcite.sql.validate.SqlValidator; +import org.apache.calcite.sql.validate.SqlValidatorScope; + +import com.dremio.exec.planner.sql.handlers.query.SqlToPlanHandler; +import com.dremio.exec.planner.sql.handlers.query.VacuumCatalogHandler; +import com.dremio.service.namespace.NamespaceKey; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableList; + +public class SqlVacuumCatalog extends SqlVacuum implements SqlToPlanHandler.Creator { + public static final SqlSpecialOperator OPERATOR = new SqlSpecialOperator("VACUUM", SqlKind.OTHER) { + @Override + public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNode... operands) { + Preconditions.checkArgument(operands.length == 3, "SqlVacuumCatalog.createCall() " + + "has 3 operands!"); + return new SqlVacuumCatalog( + pos, + (SqlIdentifier) operands[0], + (SqlNodeList) operands[1], + (SqlNodeList) operands[2]); + } + + @Override + public RelDataType deriveType(SqlValidator validator, SqlValidatorScope scope, SqlCall call) { + final RelDataTypeFactory typeFactory = validator.getTypeFactory(); + return getRelDataType(typeFactory); + } + }; + + private final SqlIdentifier catalogSource; + + /** + * Creates a SqlVacuum. + */ + public SqlVacuumCatalog( + SqlParserPos pos, + SqlIdentifier catalogSource, + SqlNodeList optionsList, + SqlNodeList optionsValueList) { + super(pos, optionsList, optionsValueList); + this.catalogSource = catalogSource; + } + + @Override + public SqlOperator getOperator() { + return OPERATOR; + } + + public SqlIdentifier getCatalogSource() { + return catalogSource; + } + + @Override + public List getOperandList() { + final List ops = + ImmutableList.of( + catalogSource, + optionsList, + optionsValueList); + return ops; + } + + @Override + public NamespaceKey getPath() { + return new NamespaceKey(catalogSource.names); + } + + @Override + public void unparse(SqlWriter writer, int leftPrec, int rightPrec) { + writer.keyword("VACUUM"); + writer.keyword("CATALOG"); + catalogSource.unparse(writer, leftPrec, rightPrec); + + if(optionsList != null) { + for (int i = 0; i < optionsList.size(); i++) { + optionsList.get(i).unparse(writer, leftPrec, rightPrec); + optionsValueList.get(i).unparse(writer, leftPrec, rightPrec); + } + } + } + + @Override + public SqlToPlanHandler toPlanHandler() { + return new VacuumCatalogHandler(); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlVacuumTable.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlVacuumTable.java new file mode 100644 index 0000000000..50a5b44438 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/SqlVacuumTable.java @@ -0,0 +1,120 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.parser; + +import static com.dremio.exec.planner.VacuumOutputSchema.getRelDataType; + +import java.util.List; + +import org.apache.calcite.rel.type.RelDataType; +import org.apache.calcite.rel.type.RelDataTypeFactory; +import org.apache.calcite.sql.SqlCall; +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlKind; +import org.apache.calcite.sql.SqlLiteral; +import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.SqlNodeList; +import org.apache.calcite.sql.SqlOperator; +import org.apache.calcite.sql.SqlSpecialOperator; +import org.apache.calcite.sql.SqlWriter; +import org.apache.calcite.sql.parser.SqlParserPos; +import org.apache.calcite.sql.validate.SqlValidator; +import org.apache.calcite.sql.validate.SqlValidatorScope; + +import com.dremio.exec.planner.sql.handlers.query.SqlToPlanHandler; +import com.dremio.exec.planner.sql.handlers.query.VacuumTableHandler; +import com.dremio.service.namespace.NamespaceKey; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableList; + + +public class SqlVacuumTable extends SqlVacuum implements SqlToPlanHandler.Creator { + public static final SqlSpecialOperator OPERATOR = new SqlSpecialOperator("VACUUM", SqlKind.OTHER) { + @Override + public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNode... operands) { + Preconditions.checkArgument(operands.length == 3, "SqlVacuumTable.createCall() " + + "has 3 operands!"); + return new SqlVacuumTable( + pos, + (SqlIdentifier) operands[0], + (SqlNodeList) operands[1], + (SqlNodeList) operands[2]); + } + + @Override + public RelDataType deriveType(SqlValidator validator, SqlValidatorScope scope, SqlCall call) { + final RelDataTypeFactory typeFactory = validator.getTypeFactory(); + return getRelDataType(typeFactory); + } + }; + + private final SqlIdentifier table; + + /** + * Creates a SqlVacuum. + */ + public SqlVacuumTable( + SqlParserPos pos, + SqlIdentifier table, + SqlNodeList optionsList, + SqlNodeList optionsValueList) { + super(pos, optionsList, optionsValueList); + this.table = table; + } + + @Override + public SqlOperator getOperator() { + return OPERATOR; + } + + public SqlIdentifier getTable() { + return table; + } + + @Override + public List getOperandList() { + return ImmutableList.of( + table, + optionsList, + optionsValueList); + } + + @Override + public NamespaceKey getPath() { + return new NamespaceKey(table.names); + } + + @Override + public void unparse(SqlWriter writer, int leftPrec, int rightPrec) { + writer.keyword("VACUUM"); + writer.keyword("TABLE"); + table.unparse(writer, leftPrec, rightPrec); + + writer.keyword("EXPIRE"); + writer.keyword("SNAPSHOTS"); + if(optionsList != null) { + for (int i = 0; i < optionsList.size(); i++) { + optionsList.get(i).unparse(writer, leftPrec, rightPrec); + optionsValueList.get(i).unparse(writer, leftPrec, rightPrec); + } + } + } + + @Override + public SqlToPlanHandler toPlanHandler() { + return new VacuumTableHandler(); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/TableVersionSpec.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/TableVersionSpec.java index 7115e25f79..089438a789 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/TableVersionSpec.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/TableVersionSpec.java @@ -83,6 +83,29 @@ public TableVersionContext getResolvedTableVersionContext() { return new TableVersionContext(tableVersionType, value); } + + public TableVersionContext getTableVersionContext() { + Preconditions.checkNotNull(versionSpecifier); + Object value = null; + switch (tableVersionType) { + case BRANCH: + case TAG: + case COMMIT_HASH_ONLY: + case REFERENCE: + case SNAPSHOT_ID: + Preconditions.checkState(versionSpecifier instanceof SqlCharStringLiteral); + value = ((SqlCharStringLiteral)versionSpecifier).getValueAs(String.class); + break; + case TIMESTAMP: + Preconditions.checkState(versionSpecifier instanceof SqlTimestampLiteral); + value = ((SqlTimestampLiteral)versionSpecifier).getValueAs(Calendar.class).getTimeInMillis(); + break; + } + + Preconditions.checkNotNull(value); + return new TableVersionContext(tableVersionType, value); + } + /** * Resolves a TableVersionSpec by performing constant folding on the versionSpecifier. An error will be reported * if the expression provided is not resolvable to a constant value of the appropriate type. diff --git a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/UnsupportedOperatorsVisitor.java b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/UnsupportedOperatorsVisitor.java index 6fb36f3bd5..17adf7d5e6 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/UnsupportedOperatorsVisitor.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/planner/sql/parser/UnsupportedOperatorsVisitor.java @@ -52,7 +52,6 @@ public class UnsupportedOperatorsVisitor extends SqlShuttle { disabledType.add(SqlTypeName.TINYINT.name()); disabledType.add(SqlTypeName.SMALLINT.name()); disabledType.add(SqlTypeName.REAL.name()); - disabledOperators.add("CARDINALITY"); dirExplorers.add("MAXDIR"); dirExplorers.add("IMAXDIR"); dirExplorers.add("MINDIR"); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/resolver/TypeCastRules.java b/sabot/kernel/src/main/java/com/dremio/exec/resolver/TypeCastRules.java index 304af63f11..614b7d7d6d 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/resolver/TypeCastRules.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/resolver/TypeCastRules.java @@ -910,18 +910,16 @@ public static int getCost(List argumentTypes, AbstractFunctionHold /* Precedence rules does not allow to implicitly cast, however check * if the seconday rules allow us to cast */ - Set rules; - if ((rules = (ResolverTypePrecedence.SECONDARY_IMPLICIT_CAST_RULES.get(parmType.toMinorType()))) != null && - rules.contains(argType.toMinorType())) { + Set rules = ResolverTypePrecedence.SECONDARY_IMPLICIT_CAST_RULES.get(parmType.toMinorType()); + if (rules != null && rules.contains(argType.toMinorType())) { secondaryCast = true; } else { return -1; } } - int castCost; - - if ((castCost = (parmVal - argVal)) >= 0) { + int castCost = parmVal - argVal; + if (castCost >= 0) { nCasts++; cost += castCost; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/server/MaterializationDescriptorProvider.java b/sabot/kernel/src/main/java/com/dremio/exec/server/MaterializationDescriptorProvider.java index 07b08b34fe..3adb946900 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/server/MaterializationDescriptorProvider.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/server/MaterializationDescriptorProvider.java @@ -18,6 +18,8 @@ import java.util.List; import java.util.Optional; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.TableVersionContext; import com.dremio.exec.planner.acceleration.MaterializationDescriptor; import com.dremio.service.namespace.NamespaceKey; import com.google.common.collect.ImmutableList; @@ -38,7 +40,9 @@ public interface MaterializationDescriptorProvider { * for the VDS with the given path * @return The default reflection for the VDS */ - Optional getDefaultRawMaterialization(NamespaceKey path, List vdsFields); + Optional getDefaultRawMaterialization(NamespaceKey path, + TableVersionContext versionContext, + List vdsFields, Catalog catalog); /** * Empty materialization provider. @@ -51,7 +55,9 @@ public List get() { } @Override - public Optional getDefaultRawMaterialization(NamespaceKey path, List vdsFields) { + public Optional getDefaultRawMaterialization(NamespaceKey path, + TableVersionContext versionContext, + List vdsFields, Catalog catalog) { return Optional.empty(); } }; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/server/ResultsCleanupService.java b/sabot/kernel/src/main/java/com/dremio/exec/server/ResultsCleanupService.java index dbd801c814..bbed64b989 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/server/ResultsCleanupService.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/server/ResultsCleanupService.java @@ -62,9 +62,15 @@ public void start() throws Exception { logger.info("JobResultsStoreConfig is not available exiting..."); return; } + final OptionManager optionManager = optionManagerProvider.get(); + if (!optionManager.getOption(ExecConstants.RESULTS_CLEANUP_SERVICE_ENABLED)) { + logger.info("Results cleanup service is disabled, quitting..."); + return; + } + logger.info("Starting ResultsCleanupService.."); - final OptionManager optionManager = optionManagerProvider.get(); + final long maxJobResultsAgeInDays = optionManager.getOption(ExecConstants.RESULTS_MAX_AGE_IN_DAYS); if (maxJobResultsAgeInDays != DISABLE_CLEANUP_VALUE) { final long jobResultsCleanupStartHour = optionManager.getOption(ExecConstants.JOB_RESULTS_CLEANUP_START_HOUR); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/server/SabotContext.java b/sabot/kernel/src/main/java/com/dremio/exec/server/SabotContext.java index 37cb16d0ab..57dbb8c45d 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/server/SabotContext.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/server/SabotContext.java @@ -51,6 +51,7 @@ import com.dremio.exec.server.options.SystemOptionManager; import com.dremio.exec.store.CatalogService; import com.dremio.exec.store.dfs.FileSystemWrapper; +import com.dremio.exec.store.dfs.LoggedFileSystemWrapper; import com.dremio.exec.store.sys.accel.AccelerationListManager; import com.dremio.exec.store.sys.accel.AccelerationManager; import com.dremio.exec.store.sys.accesscontrol.AccessControlListingManager; @@ -224,15 +225,17 @@ public SabotContext( this.viewCreatorFactory = viewCreatorFactory; this.queryPlanningAllocator = queryPlanningAllocator; this.spillService = spillService; - this.fileSystemWrapper = config.getInstance( - FileSystemWrapper.FILE_SYSTEM_WRAPPER_CLASS, - FileSystemWrapper.class, - (fs, storageId, conf, operatorContext, enableAsync, isMetadataEnabled) -> fs, - dremioConfig, - this.optionManager, - allocator, - new ServiceSetDecorator(coord.getServiceSet(Role.EXECUTOR)), - endpoint); + this.fileSystemWrapper = new LoggedFileSystemWrapper( + config.getInstance( + FileSystemWrapper.FILE_SYSTEM_WRAPPER_CLASS, + FileSystemWrapper.class, + (fs, storageId, conf, operatorContext, enableAsync, isMetadataEnabled) -> fs, + dremioConfig, + this.optionManager, + allocator, + new ServiceSetDecorator(coord.getServiceSet(Role.EXECUTOR)), + endpoint), + this.optionManager); this.credentialsService = credentialsService; this.jobResultInfoProvider = jobResultInfoProvider; this.rules = getRulesFactories(scan); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/server/options/FragmentOptionManager.java b/sabot/kernel/src/main/java/com/dremio/exec/server/options/FragmentOptionManager.java index dec68d663a..c8eb8655fd 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/server/options/FragmentOptionManager.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/server/options/FragmentOptionManager.java @@ -47,6 +47,7 @@ private static TreeMultimap getMapFromOptionList(final Opti return optionMap; } + @Override protected boolean supportsOptionType(OptionType type) { throw new UnsupportedOperationException("FragmentOptionManager does not support the given option value."); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/server/options/OptionManagerWrapper.java b/sabot/kernel/src/main/java/com/dremio/exec/server/options/OptionManagerWrapper.java index a30112f3ca..5cdc4e9963 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/server/options/OptionManagerWrapper.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/server/options/OptionManagerWrapper.java @@ -46,6 +46,7 @@ public final class OptionManagerWrapper extends BaseOptionManager { this.optionValidatorListing = optionValidatorListing; } + @Override public OptionValidatorListing getOptionValidatorListing() { return optionValidatorListing; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/service/executor/ExecutorServiceImpl.java b/sabot/kernel/src/main/java/com/dremio/exec/service/executor/ExecutorServiceImpl.java index 89ab3e9a06..642d7459d4 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/service/executor/ExecutorServiceImpl.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/service/executor/ExecutorServiceImpl.java @@ -116,8 +116,8 @@ public static CoordExecRPC.NodeStats getNodeStatsFromContext(SabotContext contex final WorkStats stats = context.getWorkStatsProvider().get(); final CoordinationProtos.NodeEndpoint ep = context.getEndpoint(); final double load = stats.getClusterLoad(); - final int configured_max_width = (int)context.getClusterResourceInformation().getAverageExecutorCores(context.getOptionManager()); - final int actual_max_width = (int) Math.max(1, configured_max_width * stats.getMaxWidthFactor()); + final int configuredMaxWidth = (int) context.getClusterResourceInformation().getAverageExecutorCores(context.getOptionManager()); + final int actualMaxWidth = (int) Math.max(1, configuredMaxWidth * stats.getMaxWidthFactor()); double memory = 0; double cpu = 0; @@ -152,8 +152,8 @@ public static CoordExecRPC.NodeStats getNodeStatsFromContext(SabotContext contex .setIp(ip) .setStatus("green") .setLoad(load) - .setConfiguredMaxWidth(configured_max_width) - .setActualMaxWith(actual_max_width) + .setConfiguredMaxWidth(configuredMaxWidth) + .setActualMaxWith(actualMaxWidth) .setCurrent(false) .build(); } @@ -165,6 +165,7 @@ public static CoordExecRPC.NodeStats getNodeStatsFromContext(SabotContext contex */ public static final class NoExecutorService extends ExecutorService { + @Override @SuppressWarnings("DremioGRPCStreamObserverOnError") public void startFragments(com.dremio.exec.proto.CoordExecRPC.InitializeFragments request, io.grpc.stub.StreamObserver responseObserver) { @@ -172,6 +173,7 @@ public void startFragments(com.dremio.exec.proto.CoordExecRPC.InitializeFragment "operations.")); } + @Override @SuppressWarnings("DremioGRPCStreamObserverOnError") public void activateFragment(com.dremio.exec.proto.CoordExecRPC.ActivateFragments request, io.grpc.stub.StreamObserver responseObserver) { @@ -179,6 +181,7 @@ public void activateFragment(com.dremio.exec.proto.CoordExecRPC.ActivateFragment "operations.")); } + @Override @SuppressWarnings("DremioGRPCStreamObserverOnError") public void cancelFragments(com.dremio.exec.proto.CoordExecRPC.CancelFragments request, io.grpc.stub.StreamObserver responseObserver) { @@ -202,6 +205,7 @@ public void propagatePluginChange(com.dremio.exec.proto.CoordExecRPC.SourceWrapp "operations.")); } + @Override @SuppressWarnings("DremioGRPCStreamObserverOnError") public void getNodeStats(com.google.protobuf.Empty request, io.grpc.stub.StreamObserver responseObserver) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/service/executor/ExecutorServiceProductClientFactory.java b/sabot/kernel/src/main/java/com/dremio/exec/service/executor/ExecutorServiceProductClientFactory.java index f61d726781..40d282528d 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/service/executor/ExecutorServiceProductClientFactory.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/service/executor/ExecutorServiceProductClientFactory.java @@ -40,6 +40,7 @@ public void close() throws Exception { } + @Override public ExecutorServiceProductClient getClientForEndpoint(CoordinationProtos.NodeEndpoint endpoint) { return new ExecutorServiceProductClient(tunnelCreator, endpoint); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/service/maestro/MaestroGrpcServerFacade.java b/sabot/kernel/src/main/java/com/dremio/exec/service/maestro/MaestroGrpcServerFacade.java index 418945eaf2..cdae9ac910 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/service/maestro/MaestroGrpcServerFacade.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/service/maestro/MaestroGrpcServerFacade.java @@ -45,6 +45,7 @@ public MaestroGrpcServerFacade(Provider execToCoordSta /** * Handles screen completion events from executors. */ + @Override public void screenComplete(com.dremio.exec.proto.CoordExecRPC.NodeQueryScreenCompletion request, io.grpc.stub.StreamObserver responseObserver) { handleMessage(request, new Consumer() { @@ -74,6 +75,7 @@ void handleMessage(T request, Consumer consumer, StreamObserver respon /** * Handles node query complete events from Executors. */ + @Override public void nodeQueryComplete(com.dremio.exec.proto.CoordExecRPC.NodeQueryCompletion request, io.grpc.stub.StreamObserver responseObserver) { handleMessage(request, new Consumer() { @@ -91,6 +93,7 @@ public void accept(com.dremio.exec.proto.CoordExecRPC.NodeQueryCompletion messag /** * Handles first error while processing a query from executors. */ + @Override public void nodeFirstError(com.dremio.exec.proto.CoordExecRPC.NodeQueryFirstError request, io.grpc.stub.StreamObserver responseObserver) { handleMessage(request, new Consumer() { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/service/maestro/MaestroSoftwareClient.java b/sabot/kernel/src/main/java/com/dremio/exec/service/maestro/MaestroSoftwareClient.java index 08d0d6bcea..a6ded74bb9 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/service/maestro/MaestroSoftwareClient.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/service/maestro/MaestroSoftwareClient.java @@ -66,11 +66,13 @@ public void addCallback(StreamObserver responseObserver, RpcFuture() { // we want this handler to run immediately after we push the big red button! + @Override public void onSuccess(GeneralRPCProtos.Ack explosion) { responseObserver.onNext(Empty.getDefaultInstance()); responseObserver.onCompleted(); } + @Override public void onFailure(Throwable thrown) { responseObserver.onError(thrown); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/ConnectionRefusedException.java b/sabot/kernel/src/main/java/com/dremio/exec/store/ConnectionRefusedException.java new file mode 100644 index 0000000000..b592408bca --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/ConnectionRefusedException.java @@ -0,0 +1,35 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store; + +import com.google.errorprone.annotations.FormatMethod; + +public class ConnectionRefusedException extends RuntimeException { + private static final long serialVersionUID = 1L; + + public ConnectionRefusedException() { + super(); + } + + public ConnectionRefusedException(Throwable cause) { + super(cause); + } + + @FormatMethod + public ConnectionRefusedException(Throwable cause, String message, Object... args) { + super(String.format(message, args), cause); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/DataplanePluginOptions.java b/sabot/kernel/src/main/java/com/dremio/exec/store/DataplanePluginOptions.java new file mode 100644 index 0000000000..d6c47717b8 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/DataplanePluginOptions.java @@ -0,0 +1,37 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store; + +import com.dremio.options.Options; +import com.dremio.options.TypeValidators; + +/** + * System options for Dataplane Plugin + */ +@Options +public final class DataplanePluginOptions { + public static final TypeValidators.BooleanValidator ARCTIC_PLUGIN_ENABLED + = new TypeValidators.BooleanValidator("plugins.arctic.enabled", false); + + public static final TypeValidators.BooleanValidator DATAPLANE_PLUGIN_ENABLED + = new TypeValidators.BooleanValidator("plugins.dataplane.enabled", false); + + public static final TypeValidators.BooleanValidator NESSIE_PLUGIN_ENABLED + = new TypeValidators.BooleanValidator("plugins.nessie.enabled", true); + + // Utility class does not need public constructor + private DataplanePluginOptions() {} +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/DatasetRetrievalFilesListOptions.java b/sabot/kernel/src/main/java/com/dremio/exec/store/DatasetRetrievalFilesListOptions.java index 33aeb3ffa7..62c8843ac4 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/DatasetRetrievalFilesListOptions.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/DatasetRetrievalFilesListOptions.java @@ -33,6 +33,7 @@ public List getFilesList() { return filesList; } + @Override protected void addCustomOptions(List options) { options.add(new RefreshTableFilterOption(filesList)); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/DatasetRetrievalPartitionOptions.java b/sabot/kernel/src/main/java/com/dremio/exec/store/DatasetRetrievalPartitionOptions.java index 7080615636..101df11295 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/DatasetRetrievalPartitionOptions.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/DatasetRetrievalPartitionOptions.java @@ -34,6 +34,7 @@ public Map getPartition() { return partition; } + @Override protected void addCustomOptions(List options) { options.add(new RefreshTableFilterOption(partition)); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/HttpClientRequestException.java b/sabot/kernel/src/main/java/com/dremio/exec/store/HttpClientRequestException.java new file mode 100644 index 0000000000..69474a1124 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/HttpClientRequestException.java @@ -0,0 +1,35 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store; + +import com.google.errorprone.annotations.FormatMethod; + +public class HttpClientRequestException extends RuntimeException { + private static final long serialVersionUID = 1L; + + public HttpClientRequestException() { + super(); + } + + public HttpClientRequestException(Throwable cause) { + super(cause); + } + + @FormatMethod + public HttpClientRequestException(Throwable cause, String message, Object... args) { + super(String.format(message, args), cause); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/InvalidSpecificationVersionException.java b/sabot/kernel/src/main/java/com/dremio/exec/store/InvalidSpecificationVersionException.java new file mode 100644 index 0000000000..f8af0cdc62 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/InvalidSpecificationVersionException.java @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store; + +import com.google.errorprone.annotations.FormatMethod; + +public class InvalidSpecificationVersionException extends RuntimeException { + private static final long serialVersionUID = 1L; + + public InvalidSpecificationVersionException() { + super(); + } + + public InvalidSpecificationVersionException(Throwable cause) { + super(cause); + } + + @FormatMethod + public InvalidSpecificationVersionException(Throwable cause, String message, Object... args) { + super(String.format(message, args), cause); + } + + @FormatMethod + public InvalidSpecificationVersionException(String message, Object... args) { + super(String.format(message, args)); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/InvalidURLException.java b/sabot/kernel/src/main/java/com/dremio/exec/store/InvalidURLException.java new file mode 100644 index 0000000000..81614efcea --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/InvalidURLException.java @@ -0,0 +1,35 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store; + +import com.google.errorprone.annotations.FormatMethod; + +public class InvalidURLException extends RuntimeException { + private static final long serialVersionUID = 1L; + + public InvalidURLException() { + super(); + } + + public InvalidURLException(Throwable cause) { + super(cause); + } + + @FormatMethod + public InvalidURLException(Throwable cause, String message, Object... args) { + super(String.format(message, args), cause); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/MFunctionCatalogMetadata.java b/sabot/kernel/src/main/java/com/dremio/exec/store/MFunctionCatalogMetadata.java index 04d766716c..8c19194cab 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/MFunctionCatalogMetadata.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/MFunctionCatalogMetadata.java @@ -45,7 +45,7 @@ public class MFunctionCatalogMetadata { private final FileType underlyingTable; /** * table function name such as - * TABLE_HISTORY,TABLE_MANIFESTS, TABLE_SNAPSHOT,TABLE_FILES + * TABLE_HISTORY,TABLE_MANIFESTS, TABLE_SNAPSHOT,TABLE_FILES, TABLE_PARTITIONS */ private final MetadataFunctionsMacro.MacroName mFunctionName; @@ -62,6 +62,7 @@ private FileType getMetadataFileType() { switch (mFunctionName) { case TABLE_HISTORY: case TABLE_SNAPSHOT: + case TABLE_PARTITIONS: return FileType.JSON; case TABLE_FILES: case TABLE_MANIFESTS: diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/MinMaxRewriter.java b/sabot/kernel/src/main/java/com/dremio/exec/store/MinMaxRewriter.java index 5027ad5f1e..322d9fb1b3 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/MinMaxRewriter.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/MinMaxRewriter.java @@ -47,6 +47,7 @@ public RexNode visitCall(RexCall call) { case LESS_THAN: case LESS_THAN_OR_EQUAL: isLessThan = true; + // fall through case GREATER_THAN: case GREATER_THAN_OR_EQUAL: final RexNode arg1 = call.getOperands().get(0); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/NamespaceTable.java b/sabot/kernel/src/main/java/com/dremio/exec/store/NamespaceTable.java index e4c95495a6..665a47d79d 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/NamespaceTable.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/NamespaceTable.java @@ -38,7 +38,7 @@ import org.apache.calcite.schema.Statistic; import org.apache.calcite.schema.Table; import org.apache.calcite.util.ImmutableBitSet; -import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections4.CollectionUtils; import com.dremio.common.exceptions.UserException; import com.dremio.exec.calcite.logical.ScanCrel; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/NessieApiProvider.java b/sabot/kernel/src/main/java/com/dremio/exec/store/NessieApiProvider.java new file mode 100644 index 0000000000..f35bc09ef0 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/NessieApiProvider.java @@ -0,0 +1,22 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store; + +import org.projectnessie.client.api.NessieApi; + +public interface NessieApiProvider { + NessieApi getNessieApi(); +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/NessieNamespaceAlreadyExistsException.java b/sabot/kernel/src/main/java/com/dremio/exec/store/NessieNamespaceAlreadyExistsException.java index d06c1940c9..faadf8188a 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/NessieNamespaceAlreadyExistsException.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/NessieNamespaceAlreadyExistsException.java @@ -28,4 +28,8 @@ public NessieNamespaceAlreadyExistsException() { public NessieNamespaceAlreadyExistsException(Throwable cause) { super(cause); } + + public NessieNamespaceAlreadyExistsException(String message) { + super(message); + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/NewValueFunction.java b/sabot/kernel/src/main/java/com/dremio/exec/store/NewValueFunction.java index 2e6840f0c6..f68e301459 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/NewValueFunction.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/NewValueFunction.java @@ -47,12 +47,14 @@ public static class NewValueVarCharNullable implements SimpleFunction { @Output NullableBitHolder out; @Inject ArrowBuf buf; + @Override public void setup() { initialized = false; previous.buffer = buf; previous.start = 0; } + @Override public void eval() { out.isSet = 1; @@ -65,7 +67,8 @@ public void eval() { } else { out.value = 1; // it's a new partition if (in.isSet == 1) { // copy the partition's value in previous holder - previous.buffer = buf = buf.reallocIfNeeded(in.end - in.start); + buf = buf.reallocIfNeeded(in.end - in.start); + previous.buffer = buf; previous.buffer.setBytes(0, in.buffer, in.start, in.end - in.start); previous.end = in.end - in.start; } @@ -87,12 +90,14 @@ public static class NewValueVarBinaryNullable implements SimpleFunction { @Output NullableBitHolder out; @Inject ArrowBuf buf; + @Override public void setup() { initialized = false; previous.buffer = buf; previous.start = 0; } + @Override public void eval() { out.isSet = 1; @@ -105,7 +110,8 @@ public void eval() { } else { out.value = 1; // it's a new partition if (in.isSet == 1) { // copy the partition's value in previous holder - previous.buffer = buf = buf.reallocIfNeeded(in.end - in.start); + buf = buf.reallocIfNeeded(in.end - in.start); + previous.buffer = buf; previous.buffer.setBytes(0, in.buffer, in.start, in.end - in.start); previous.end = in.end - in.start; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/OperationType.java b/sabot/kernel/src/main/java/com/dremio/exec/store/OperationType.java index b092bcbdcf..13c557c7fe 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/OperationType.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/OperationType.java @@ -24,7 +24,8 @@ public enum OperationType { ADD_DATAFILE(0), DELETE_DATAFILE(1), - ADD_MANIFESTFILE(2); + ADD_MANIFESTFILE(2), + DELETE_DELETEFILE(3); public final Integer value; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/ReferenceInfo.java b/sabot/kernel/src/main/java/com/dremio/exec/store/ReferenceInfo.java index 343b981947..6332e3eadb 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/ReferenceInfo.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/ReferenceInfo.java @@ -16,6 +16,8 @@ package com.dremio.exec.store; +import java.util.Objects; + /** * Reference info used to support versioning. */ @@ -29,4 +31,21 @@ public ReferenceInfo(String type, String refName, String commitHash) { this.refName = refName; this.commitHash = commitHash; } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ReferenceInfo that = (ReferenceInfo) o; + return Objects.equals(type, that.type) && Objects.equals(refName, that.refName) && Objects.equals(commitHash, that.commitHash); + } + + @Override + public int hashCode() { + return Objects.hash(type, refName, commitHash); + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/ResourceInputStream.java b/sabot/kernel/src/main/java/com/dremio/exec/store/ResourceInputStream.java index 808b705317..5c4525f869 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/ResourceInputStream.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/ResourceInputStream.java @@ -37,6 +37,7 @@ public void readFully(long position, byte[] buffer) throws IOException { } } + @Override public int read(long position, byte[] b, int off, int len) { int start = (int) position; if (b == null) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/RuntimeFilterEvaluator.java b/sabot/kernel/src/main/java/com/dremio/exec/store/RuntimeFilterEvaluator.java index 81a6c595c6..d27127c42d 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/RuntimeFilterEvaluator.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/RuntimeFilterEvaluator.java @@ -27,7 +27,7 @@ import org.apache.arrow.memory.ArrowBuf; import org.apache.arrow.memory.BufferAllocator; -import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections4.CollectionUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/SampleMutator.java b/sabot/kernel/src/main/java/com/dremio/exec/store/SampleMutator.java index 98e5b1277d..02d1cb7bec 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/SampleMutator.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/SampleMutator.java @@ -55,6 +55,7 @@ public SampleMutator(BufferAllocator allocator) { this.bufferManager = new BufferManagerImpl(allocator); } + @Override public void removeField(Field field) throws SchemaChangeException { ValueVector vector = fieldVectorMap.remove(field.getName().toLowerCase()); if (vector == null) { @@ -65,6 +66,7 @@ public void removeField(Field field) throws SchemaChangeException { } } + @Override public T addField(Field field, Class clazz) throws SchemaChangeException { ValueVector v = fieldVectorMap.get(field.getName().toLowerCase()); if (v == null || v.getClass() != clazz) { @@ -150,6 +152,7 @@ public boolean getSchemaChanged() { * the mutator might not get cleaned up elsewhere. TextRecordReader will call * this method to clear any allocations */ + @Override public void close() { logger.debug("closing mutator"); for (final ValueVector v : fieldVectorMap.values()) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/SemanticVersionParserException.java b/sabot/kernel/src/main/java/com/dremio/exec/store/SemanticVersionParserException.java new file mode 100644 index 0000000000..236304acfa --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/SemanticVersionParserException.java @@ -0,0 +1,35 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store; + +import com.google.errorprone.annotations.FormatMethod; + +public class SemanticVersionParserException extends RuntimeException { + private static final long serialVersionUID = 1L; + + public SemanticVersionParserException() { + super(); + } + + public SemanticVersionParserException(Throwable cause) { + super(cause); + } + + @FormatMethod + public SemanticVersionParserException(Throwable cause, String message, Object... args) { + super(String.format(message, args), cause); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/SystemSchemas.java b/sabot/kernel/src/main/java/com/dremio/exec/store/SystemSchemas.java index 18435e7622..27a672f765 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/SystemSchemas.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/SystemSchemas.java @@ -15,18 +15,23 @@ */ package com.dremio.exec.store; +import java.util.List; + import org.apache.arrow.vector.complex.ListVector; import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.Field; import org.apache.arrow.vector.types.pojo.FieldType; +import org.apache.iceberg.MetadataColumns; import com.dremio.exec.record.BatchSchema; import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; public final class SystemSchemas { public static final String SPLIT_IDENTITY = RecordReader.SPLIT_IDENTITY; public static final String SPLIT_INFORMATION = RecordReader.SPLIT_INFORMATION; public static final String COL_IDS = RecordReader.COL_IDS; + public static final String RECORDS = RecordWriter.RECORDS_COLUMN; public static final BatchSchema SPLIT_GEN_AND_COL_IDS_SCAN_SCHEMA = RecordReader.SPLIT_GEN_AND_COL_IDS_SCAN_SCHEMA; @@ -38,13 +43,22 @@ public final class SystemSchemas { public static final String FILE_CONTENT = "fileContent"; public static final String RECORD_COUNT = "recordCount"; public static final String SEQUENCE_NUMBER = "sequenceNumber"; + public static final String IMPLICIT_SEQUENCE_NUMBER = "$_dremio_$_sequence_no_$"; public static final String PARTITION_SPEC_ID = "partitionSpecId"; public static final String PARTITION_KEY = "partitionKey"; public static final String PARTITION_INFO = "partitionInfo"; public static final String EQUALITY_IDS = "equalityIds"; public static final String ICEBERG_METADATA = "icebergMetadata"; + public static final String DELETE_FILE_PATH = MetadataColumns.DELETE_FILE_PATH.name(); + public static final String POS = MetadataColumns.DELETE_FILE_POS.name(); + public static final String METADATA_FILE_PATH = "metadataFilePath"; + public static final String MANIFEST_LIST_PATH = "manifestListPath"; + public static final String SNAPSHOT_ID = "snapshotId"; + public static final String FILE_PATH = "filePath"; + public static final String FILE_TYPE = "fileType"; public static final Field ICEBERG_METADATA_FIELD = Field.nullable(ICEBERG_METADATA, Types.MinorType.VARBINARY.getType()); + public static final List CARRY_FORWARD_FILE_PATH_TYPE_COLS = Lists.newArrayList(FILE_PATH, FILE_TYPE); public static final BatchSchema ICEBERG_MANIFEST_SCAN_SCHEMA = BatchSchema.newBuilder() .addField(Field.nullable(DATAFILE_PATH, Types.MinorType.VARCHAR.getType())) @@ -54,17 +68,34 @@ public final class SystemSchemas { .addField(Field.nullable(PARTITION_KEY, Types.MinorType.VARBINARY.getType())) .addField(Field.nullable(PARTITION_INFO, Types.MinorType.VARBINARY.getType())) .addField(Field.nullable(COL_IDS, Types.MinorType.VARBINARY.getType())) + .addField(Field.nullable(FILE_CONTENT, Types.MinorType.VARCHAR.getType())) .setSelectionVectorMode(BatchSchema.SelectionVectorMode.NONE) .build(); + /* + * Delete file path, File size, Partition Info and Column IDs are being projected for delete manifest scans + * to follow SPLIT_GEN schema and allow delete files to be read using DATA_FILE_SCAN table function. + */ public static final BatchSchema ICEBERG_DELETE_MANIFEST_SCAN_SCHEMA = BatchSchema.newBuilder() .addField(buildDeleteFileStruct(DELETE_FILE)) + .addField(Field.nullable(DATAFILE_PATH, Types.MinorType.VARCHAR.getType())) + .addField(Field.nullable(FILE_SIZE, Types.MinorType.BIGINT.getType())) .addField(Field.nullable(SEQUENCE_NUMBER, Types.MinorType.BIGINT.getType())) .addField(Field.nullable(PARTITION_SPEC_ID, Types.MinorType.INT.getType())) .addField(Field.nullable(PARTITION_KEY, Types.MinorType.VARBINARY.getType())) + .addField(Field.nullable(PARTITION_INFO, Types.MinorType.VARBINARY.getType())) + .addField(Field.nullable(COL_IDS, Types.MinorType.VARBINARY.getType())) + .addField(Field.nullable(FILE_CONTENT, Types.MinorType.VARCHAR.getType())) .setSelectionVectorMode(BatchSchema.SelectionVectorMode.NONE) .build(); + public static final BatchSchema ICEBERG_POS_DELETE_FILE_SCHEMA = BatchSchema.newBuilder() + .addField(Field.nullable(DELETE_FILE_PATH, Types.MinorType.VARCHAR.getType())) + .addField(Field.nullable(IMPLICIT_SEQUENCE_NUMBER, Types.MinorType.BIGINT.getType())) + .addField(Field.nullable(POS, Types.MinorType.BIGINT.getType())) + .setSelectionVectorMode(BatchSchema.SelectionVectorMode.NONE) + .build(); + public static final BatchSchema ICEBERG_DELETE_FILE_AGG_SCHEMA = BatchSchema.newBuilder() .addField(Field.nullable(DATAFILE_PATH, Types.MinorType.VARCHAR.getType())) .addField(Field.nullable(FILE_SIZE, Types.MinorType.BIGINT.getType())) @@ -75,6 +106,26 @@ public final class SystemSchemas { .setSelectionVectorMode(BatchSchema.SelectionVectorMode.NONE) .build(); + public static final BatchSchema ICEBERG_SNAPSHOTS_SCAN_SCHEMA = BatchSchema.newBuilder() + .addField(Field.nullable(METADATA_FILE_PATH, Types.MinorType.VARCHAR.getType())) + .addField(Field.nullable(SNAPSHOT_ID, Types.MinorType.BIGINT.getType())) + .addField(Field.nullable(MANIFEST_LIST_PATH, Types.MinorType.VARCHAR.getType())) + .setSelectionVectorMode(BatchSchema.SelectionVectorMode.NONE) + .build(); + + public static final BatchSchema CARRY_FORWARD_FILE_PATH_TYPE_SCHEMA = BatchSchema.newBuilder() + .addField(Field.nullable(FILE_PATH, Types.MinorType.VARCHAR.getType())) + .addField(Field.nullable(FILE_TYPE, Types.MinorType.VARCHAR.getType())) + .setSelectionVectorMode(BatchSchema.SelectionVectorMode.NONE) + .build(); + + public static final BatchSchema ICEBERG_ORPHAN_FILE_DELETE_SCHEMA = BatchSchema.newBuilder() + .addField(Field.nullable(FILE_PATH, Types.MinorType.VARCHAR.getType())) + .addField(Field.nullable(FILE_TYPE, Types.MinorType.VARCHAR.getType())) + .addField(Field.nullable(RECORDS, Types.MinorType.BIGINT.getType())) + .setSelectionVectorMode(BatchSchema.SelectionVectorMode.NONE) + .build(); + public static final BatchSchema ICEBERG_SPLIT_GEN_WITH_DELETES_SCHEMA = SPLIT_GEN_AND_COL_IDS_SCAN_SCHEMA .addColumn(new Field(DELETE_FILES, FieldType.nullable(Types.MinorType.LIST.getType()), ImmutableList.of(buildDeleteFileStruct(ListVector.DATA_VECTOR_NAME)))); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/TableMetadata.java b/sabot/kernel/src/main/java/com/dremio/exec/store/TableMetadata.java index d6bf01544b..7a27eda435 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/TableMetadata.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/TableMetadata.java @@ -15,12 +15,22 @@ */ package com.dremio.exec.store; +import static com.dremio.exec.store.iceberg.IcebergSerDe.deserializedJsonAsSchema; + import java.util.Iterator; import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.apache.iceberg.PartitionSpec; import com.dremio.datastore.SearchTypes; import com.dremio.exec.catalog.StoragePluginId; +import com.dremio.exec.catalog.TableVersionContext; import com.dremio.exec.record.BatchSchema; +import com.dremio.exec.store.iceberg.IcebergSerDe; +import com.dremio.exec.store.iceberg.IcebergUtils; +import com.dremio.service.namespace.DatasetHelper; import com.dremio.service.namespace.NamespaceException; import com.dremio.service.namespace.NamespaceKey; import com.dremio.service.namespace.PartitionChunkMetadata; @@ -29,6 +39,7 @@ import com.dremio.service.namespace.dataset.proto.ReadDefinition; import com.dremio.service.namespace.file.proto.FileConfig; import com.google.common.base.Predicate; +import com.google.common.collect.ImmutableSet; /** * TableMetadata interface. This is how a table is exposed to the planning environment. @@ -92,4 +103,37 @@ public interface TableMetadata { * @return Primary key */ List getPrimaryKey(); + + + default TableVersionContext getVersionContext() { return null; } + + default Set getInvalidPartitionColumns() { + if (null == getDatasetConfig().getPhysicalDataset().getIcebergMetadata()) { + return ImmutableSet.of(); + } else if (null != getDatasetConfig().getPhysicalDataset().getIcebergMetadata().getPartitionSpecsJsonMap()) { + Map partitionSpecMap = + IcebergSerDe.deserializeJsonPartitionSpecMap( + deserializedJsonAsSchema(getDatasetConfig().getPhysicalDataset().getIcebergMetadata().getJsonSchema()), + getDatasetConfig().getPhysicalDataset().getIcebergMetadata().getPartitionSpecsJsonMap().toByteArray()); + return IcebergUtils.getInvalidColumnsForPruning(partitionSpecMap); + } else if (null != getDatasetConfig().getPhysicalDataset().getIcebergMetadata().getPartitionSpecs()) { + Map partitionSpecMap = IcebergSerDe.deserializePartitionSpecMap(getDatasetConfig().getPhysicalDataset().getIcebergMetadata().getPartitionSpecs().toByteArray()); + return IcebergUtils.getInvalidColumnsForPruning(partitionSpecMap); + } else { + return ImmutableSet.of(); + } + } + + default PartitionFilterGranularity getPartitionFilterGranularity() { + if(getDatasetConfig().getPhysicalDataset().getIcebergMetadata() != null + || DatasetHelper.isIcebergDataset(getDatasetConfig())) { + return PartitionFilterGranularity.FINE_GRAIN; + } else { + return PartitionFilterGranularity.RANGE; + } + } + + enum PartitionFilterGranularity { + FINE_GRAIN, RANGE + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/TimedRunnable.java b/sabot/kernel/src/main/java/com/dremio/exec/store/TimedRunnable.java index de4c37c12c..fc1f1e7dee 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/TimedRunnable.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/TimedRunnable.java @@ -190,7 +190,7 @@ public static List run(final String activity, final Logger logger, final max = Math.max(max, reader.getTimeSpentNanos()); earliestStart=Math.min(earliestStart, reader.getThreadStart() - timedRunnableStart); latestStart=Math.max(latestStart, reader.getThreadStart()-timedRunnableStart); - totalStart+=latestStart=Math.max(latestStart, reader.getThreadStart()-timedRunnableStart); + totalStart+=latestStart; }catch(IOException e){ if(excep == null){ excep = e; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/VarcharTruncationReader.java b/sabot/kernel/src/main/java/com/dremio/exec/store/VarcharTruncationReader.java index 9a1e4b9238..497ccd8cc8 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/VarcharTruncationReader.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/VarcharTruncationReader.java @@ -85,8 +85,7 @@ public void setupProjector(OperatorContext context, if (!transfers.isEmpty()) { this.transferPair = transfers.get(0); } - } - catch (Exception e) { + } catch (Exception e) { throw Throwables.propagate(e); } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/VersionedStoragePluginConfig.java b/sabot/kernel/src/main/java/com/dremio/exec/store/VersionedStoragePluginConfig.java new file mode 100644 index 0000000000..bf4795b281 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/VersionedStoragePluginConfig.java @@ -0,0 +1,22 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store; + +/** + * Marker interface for plugin configurations that support versioning + */ +public interface VersionedStoragePluginConfig { +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaCheckpointParquetSplitReaderCreator.java b/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaCheckpointParquetSplitReaderCreator.java index 80658be7ce..578c80ecb0 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaCheckpointParquetSplitReaderCreator.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaCheckpointParquetSplitReaderCreator.java @@ -51,6 +51,7 @@ import com.dremio.exec.store.parquet.InputStreamProviderFactory; import com.dremio.exec.store.parquet.MutableParquetMetadata; import com.dremio.exec.store.parquet.ParquetDictionaryConvertor; +import com.dremio.exec.store.parquet.ParquetFilterCreator; import com.dremio.exec.store.parquet.ParquetFilters; import com.dremio.exec.store.parquet.ParquetReaderFactory; import com.dremio.exec.store.parquet.ParquetReaderUtility; @@ -135,7 +136,7 @@ public RecordReader getParquetRecordReader(FileSystem fs, EasyScanOperatorCreato dataset, parquetXAttr.getLastModificationTime(), isArrowCachingEnabled, - false); + false, ParquetFilters.NONE, ParquetFilterCreator.DEFAULT); rollbackCloseable.add(inputStreamProvider); lastFooter = inputStreamProvider.getFooter(); lastPath = inputStreamProvider.getStreamPath().toString(); @@ -328,7 +329,7 @@ private ParquetProtobuf.ParquetDatasetSplitScanXAttr toParquetXAttrFromNoRowgrou private MutableParquetMetadata readFooter(String filePath, long fileSize) throws IOException { try (SingleStreamProvider singleStreamProvider = new SingleStreamProvider(fs, Path.of(filePath), fileSize, - maxFooterLen, false, null, null, false)) { + maxFooterLen, false, null, null, false, ParquetFilters.NONE,ParquetFilterCreator.DEFAULT )) { final MutableParquetMetadata footer = singleStreamProvider.getFooter(); return footer; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaLakeFormatDatasetAccessor.java b/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaLakeFormatDatasetAccessor.java index 4f7b9c5957..39afb05826 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaLakeFormatDatasetAccessor.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaLakeFormatDatasetAccessor.java @@ -189,12 +189,15 @@ public BytesOutput provideSignature(DatasetMetadata metadata) throws ConnectorEx @Override public boolean metadataValid(BytesOutput readSignature, DatasetHandle datasetHandle, DatasetMetadata metadata, FileSystem fileSystem) { + // if readSignature is NOT existing already, treat metadata as stale + if (readSignature == BytesOutput.NONE) { + return false; + } try { final DeltaLakeProtobuf.DeltaLakeReadSignature deltaLakeReadSignature = LegacyProtobufSerializer.parseFrom(DeltaLakeProtobuf.DeltaLakeReadSignature.PARSER, MetadataProtoUtils.toProtobuf(readSignature)); initializeDeltaTableWrapper(); return !deltaTable.checkMetadataStale(deltaLakeReadSignature); - } - catch (IOException e) { + } catch (IOException e) { //Do a refresh in case of exception logger.error("Exception occurred while trying to determine delta dataset metadata validity. Dataset path {}. ", fileSelection.toString(), e); return false; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaLakeFormatPlugin.java b/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaLakeFormatPlugin.java index adafc2266b..ebe8de4aec 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaLakeFormatPlugin.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaLakeFormatPlugin.java @@ -33,7 +33,7 @@ import java.util.stream.Stream; import org.apache.arrow.vector.types.pojo.Field; -import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.NotImplementedException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -199,6 +199,7 @@ static boolean isParquet(FileSystem fs, FileAttributes attributes) { } } + @Override public RecordReader getRecordReader( OperatorContext context, FileSystem dfs, @@ -273,13 +274,11 @@ protected RecordReaderIterator getRecordReaderIterator(FileSystem fs, OperatorCo } //Wrap the record reader to have the version column as additional columns return new AdditionalColumnsRecordReader(opCtx, deltaRecordReader, Arrays.asList(new ConstantColumnPopulators.BigIntNameValuePair(VERSION, version)), context.getAllocator()); - } - catch (com.google.protobuf.InvalidProtocolBufferException e) { + } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw UserException.dataReadError() .addContext("Unable to retrive version info of commit ", input.getExtended().getPath()) .build(logger); - } - catch (ExecutionSetupException e) { + } catch (ExecutionSetupException e) { if (e.getCause() instanceof FileNotFoundException) { throw UserException.invalidMetadataError(e.getCause()) .addContext("File not found") diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaLakeScanPrel.java b/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaLakeScanPrel.java index fe2dabe3d3..5f85df53bd 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaLakeScanPrel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaLakeScanPrel.java @@ -260,7 +260,8 @@ public Prel finalizeRel() { // Parquet scan phase TableFunctionConfig parquetScanTableFunctionConfig = TableFunctionUtil.getDataFileScanTableFunctionConfig( - tableMetadata, filter, getProjectedColumns(), arrowCachingEnabled, false, false, tableMetadata.getApproximateRecordCount()); + tableMetadata, filter, getProjectedColumns(), arrowCachingEnabled, false, + false, tableMetadata.getApproximateRecordCount(), Collections.EMPTY_LIST); return new TableFunctionPrel(getCluster(), getTraitSet().plus(DistributionTrait.ANY), table, parquetSplitsExchange, tableMetadata, parquetScanTableFunctionConfig, getRowType(), rm -> (double) tableMetadata.getApproximateRecordCount()); @@ -382,8 +383,7 @@ private RelNode createDeltaLakeCommitLogScan(RexBuilder rexBuilder, boolean scan if(scanForAddedPaths) { return creteAddSideScan(deltaLakeCommitLogScanPrel, rexBuilder); - } - else { + } else { return createRemoveSideScan(deltaLakeCommitLogScanPrel, rexBuilder); } } @@ -486,8 +486,7 @@ private RelNode createRemoveSideScan(RelNode deltaLakeCommitLogScanPrel, RexBuil ImmutableList.of(groupSet), ImmutableList.of(aggByMaxVersion), null); - } - catch (InvalidRelException e) { + } catch (InvalidRelException e) { throw new RuntimeException("Failed to create HashAggPrel during Deltalake scan expansion."); } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaLastCheckPointReader.java b/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaLastCheckPointReader.java index 2cc8a5dc05..0745c33cfb 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaLastCheckPointReader.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaLastCheckPointReader.java @@ -21,7 +21,10 @@ import java.util.Optional; import org.apache.calcite.util.Pair; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import com.dremio.common.exceptions.UserException; import com.dremio.io.FSInputStream; import com.dremio.io.file.FileSystem; import com.dremio.io.file.Path; @@ -35,6 +38,7 @@ */ public class DeltaLastCheckPointReader { + private static final Logger logger = LoggerFactory.getLogger(DeltaLastCheckPointReader.class); private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private DeltaLastCheckPointReader() { @@ -47,6 +51,10 @@ public static Pair, Optional> getLastCheckPoint(FileSystem final BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(lastCheckPointFs))) { final LastCheckpoint checkpoint = OBJECT_MAPPER.readValue(bufferedReader.readLine(), LastCheckpoint.class); return new Pair(Optional.of(checkpoint.version), Optional.ofNullable(checkpoint.parts)); + } catch (Exception e) { + throw UserException.dataReadError(e). + message(String.format("Failed to read _last_checkpoint file %s. Error %s", versionFilePath, e.getMessage())) + .build(logger); } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaLogCheckpointParquetReader.java b/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaLogCheckpointParquetReader.java index 7cb72598cd..a7e4b95ab9 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaLogCheckpointParquetReader.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaLogCheckpointParquetReader.java @@ -69,6 +69,8 @@ import com.dremio.exec.store.file.proto.FileProtobuf; import com.dremio.exec.store.parquet.InputStreamProvider; import com.dremio.exec.store.parquet.MutableParquetMetadata; +import com.dremio.exec.store.parquet.ParquetFilterCreator; +import com.dremio.exec.store.parquet.ParquetFilters; import com.dremio.exec.store.parquet.ParquetReaderUtility; import com.dremio.exec.store.parquet.ParquetScanProjectedColumns; import com.dremio.exec.store.parquet.SchemaDerivationHelper; @@ -95,9 +97,9 @@ public class DeltaLogCheckpointParquetReader implements DeltaLogReader { private static final long NUM_ROWS_IN_DATA_FILE = 200_000L; private static final String BUFFER_ALLOCATOR_NAME = "deltalake-checkpoint-alloc"; private static final List META_PROJECTED_COLS = ImmutableList.of( - SchemaPath.getSimplePath(DELTA_FIELD_ADD), - SchemaPath.getSimplePath(DELTA_FIELD_METADATA), - SchemaPath.getSimplePath(DELTA_FIELD_PROTOCOL)); + SchemaPath.getSimplePath(DELTA_FIELD_ADD), + SchemaPath.getSimplePath(DELTA_FIELD_METADATA), + SchemaPath.getSimplePath(DELTA_FIELD_PROTOCOL)); private static final int BATCH_SIZE = 500; private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); long netBytesAdded = 0, netRecordsAdded = 0, numFilesModified = 0; @@ -137,9 +139,10 @@ public int compare(FileAttributes o1, FileAttributes o2) { return o1.getPath().toString().compareTo(o2.getPath().toString()); } }); - FileAttributes fileAttributes = fileAttributesList.get(0); List snapSplitsList = new ArrayList<>(); long totalBlocks = 0L; + int numRowsRead = 0; + int numFilesReadToEstimateRowCount = 0; for (FileAttributes fileAttributesCurrent : fileAttributesList) { logger.debug("Reading footer and generating splits for checkpoint parquet file {}", fileAttributesCurrent.getPath()); MutableParquetMetadata currentMetadata = readCheckpointParquetFooter(fs, fileAttributesCurrent.getPath(), fileAttributesCurrent.size()); @@ -149,83 +152,86 @@ public int compare(FileAttributes o1, FileAttributes o2) { snapSplitsList.addAll(generateSplits(fileAttributesCurrent, version, currentMetadata)); if (checkParquetContainsMetadata(currentMetadata.getBlocks())) { - fileAttributes = fileAttributesCurrent; this.parquetMetadata = currentMetadata; } - } - Preconditions.checkState(totalBlocks > 0, "Illegal Deltalake checkpoint parquet file(s) with no row groups"); - final CompressionCodecFactory codec = CodecFactory.createDirectCodecFactory( - new Configuration(), new ParquetDirectByteBufferAllocator(operatorContext.getAllocator()), 0); - - // we know that these are deltalake files and do not have the date corruption bug in Drill - final SchemaDerivationHelper schemaHelper = SchemaDerivationHelper.builder() - .readInt96AsTimeStamp(operatorContext.getOptions().getOption(PARQUET_READER_INT96_AS_TIMESTAMP).getBoolVal()) - .dateCorruptionStatus(ParquetReaderUtility.DateCorruptionStatus.META_SHOWS_NO_CORRUPTION) - .mapDataTypeEnabled(operatorContext.getOptions().getOption(ENABLE_MAP_DATA_TYPE)) - .build(); - int numRowsRead = 0; - int numFilesReadToEstimateRowCount = 0; - long prevRecordCntEstimate = 0; - boolean isRowCountEstimateConverged = false; - try (InputStreamProvider streamProvider = new SingleStreamProvider(fs, fileAttributes.getPath(), fileAttributes.size(), - maxFooterLen, false, parquetMetadata, operatorContext, false)) { + // calculate row estimations for each checkpoint files if it is a multi-part checkpoint + final CompressionCodecFactory codec = CodecFactory.createDirectCodecFactory( + new Configuration(), new ParquetDirectByteBufferAllocator(operatorContext.getAllocator()), 0); - for (int rowGroupIdx = 0; rowGroupIdx < parquetMetadata.getBlocks().size() && !protocolVersionFound && !schemaFound; ++rowGroupIdx) { - long rowCount = parquetMetadata.getBlocks().get(rowGroupIdx).getRowCount(); - long noOfBatches = rowCount / BATCH_SIZE + 1; + // we know that these are deltalake files and do not have the date corruption bug in Drill + final SchemaDerivationHelper schemaHelper = SchemaDerivationHelper.builder() + .readInt96AsTimeStamp(operatorContext.getOptions().getOption(PARQUET_READER_INT96_AS_TIMESTAMP).getBoolVal()) + .dateCorruptionStatus(ParquetReaderUtility.DateCorruptionStatus.META_SHOWS_NO_CORRUPTION) + .mapDataTypeEnabled(operatorContext.getOptions().getOption(ENABLE_MAP_DATA_TYPE)) + .build(); - if (protocolVersionFound && schemaFound && (isRowCountEstimateConverged || numFilesReadToEstimateRowCount > numAddedFilesReadLimit)) { - break; - } + long prevRecordCntEstimate = 0; + boolean isRowCountEstimateConverged = false; + try (InputStreamProvider streamProvider = new SingleStreamProvider(fs, fileAttributesCurrent.getPath(), fileAttributesCurrent.size(), + maxFooterLen, false, currentMetadata, operatorContext, false, ParquetFilters.NONE, ParquetFilterCreator.DEFAULT)) { - try (RecordReader reader = new ParquetRowiseReader(operatorContext, parquetMetadata, rowGroupIdx, - fileAttributes.getPath().toString(), ParquetScanProjectedColumns.fromSchemaPaths(META_PROJECTED_COLS), - fs, schemaHelper, streamProvider, codec, true)) { - reader.setup(mutator); - mutator.allocate(BATCH_SIZE); - // Read the parquet file to populate inner list types - mutator.getContainer().buildSchema(BatchSchema.SelectionVectorMode.NONE); - long batchesRead = 0; - while (batchesRead < noOfBatches) { - int recordCount = reader.next(); - numRowsRead += recordCount; - batchesRead++; - prepareValueVectors(mutator); - numFilesReadToEstimateRowCount += estimateStats(fs, rootFolder, recordCount); - protocolVersionFound = protocolVersionFound || assertMinReaderVersion(); - schemaFound = schemaFound || findSchemaAndPartitionCols(); - long newRecordCountEstimate = numFilesReadToEstimateRowCount == 0 ? 0 : Math.round((netRecordsAdded * netFilesAdded * 1.0) / numFilesReadToEstimateRowCount); - isRowCountEstimateConverged = prevRecordCntEstimate == 0 ? false : - isRowCountEstimateConverged || isRecordEstimateConverged(prevRecordCntEstimate, newRecordCountEstimate); - if (protocolVersionFound && schemaFound && (isRowCountEstimateConverged || numFilesReadToEstimateRowCount > numAddedFilesReadLimit)) { - break; + for (int rowGroupIdx = 0; rowGroupIdx < currentMetadata.getBlocks().size() ; ++rowGroupIdx) { + long rowCount = currentMetadata.getBlocks().get(rowGroupIdx).getRowCount(); + long noOfBatches = rowCount / BATCH_SIZE + 1; + + if (protocolVersionFound && schemaFound && (isRowCountEstimateConverged || numFilesReadToEstimateRowCount > numAddedFilesReadLimit)) { + break; + } + + try (RecordReader reader = new ParquetRowiseReader(operatorContext, currentMetadata, rowGroupIdx, + fileAttributesCurrent.getPath().toString(), ParquetScanProjectedColumns.fromSchemaPaths(META_PROJECTED_COLS), + fs, schemaHelper, streamProvider, codec, true)) { + reader.setup(mutator); + mutator.allocate(BATCH_SIZE); + // Read the parquet file to populate inner list types + mutator.getContainer().buildSchema(BatchSchema.SelectionVectorMode.NONE); + long batchesRead = 0; + while (batchesRead < noOfBatches) { + int recordCount = reader.next(); + numRowsRead += recordCount; + batchesRead++; + prepareValueVectors(mutator); + numFilesReadToEstimateRowCount += estimateStats(fs, rootFolder, recordCount); + protocolVersionFound = protocolVersionFound || assertMinReaderVersion(); + schemaFound = schemaFound || findSchemaAndPartitionCols(); + long newRecordCountEstimate = numFilesReadToEstimateRowCount == 0 ? 0 : Math.round((netRecordsAdded * netFilesAdded * 1.0) / numFilesReadToEstimateRowCount); + isRowCountEstimateConverged = prevRecordCntEstimate == 0 ? false : + isRowCountEstimateConverged || isRecordEstimateConverged(prevRecordCntEstimate, newRecordCountEstimate); + if (protocolVersionFound && schemaFound && (isRowCountEstimateConverged || numFilesReadToEstimateRowCount > numAddedFilesReadLimit)) { + break; + } + prevRecordCntEstimate = newRecordCountEstimate; + // reset vectors as they'll be reused in next batch + resetVectors(); } - prevRecordCntEstimate = newRecordCountEstimate; - // reset vectors as they'll be reused in next batch - resetVectors(); + } catch (Exception e) { + logger.error("IOException occurred while reading deltalake table", e); + throw new IOException(e); + } finally { + codec.release(); } - } catch (Exception e) { - logger.error("IOException occurred while reading deltalake table", e); - throw new IOException(e); } } - } + Preconditions.checkState(totalBlocks > 0, "Illegal Deltalake checkpoint parquet file(s) with no row groups"); + + if (!protocolVersionFound || !schemaFound) { + UserException.invalidMetadataError() + .message("Metadata read Failed. Malformed checkpoint parquet %s", fileAttributesCurrent.getPath()) + .build(logger); + } - if (!protocolVersionFound || !schemaFound) { - UserException.invalidMetadataError() - .message("Metadata read Failed. Malformed checkpoint parquet %s", fileAttributes.getPath()) - .build(logger); + logger.debug("Checkpoint parquet file {}, numRowsRead {}, numFilesReadToEstimateRowCount {}", + fileAttributesCurrent.getPath(), numRowsRead, numFilesReadToEstimateRowCount); } - logger.debug("Total rows read: {}", numRowsRead); - estimatedNetBytesAdded = Math.round((netBytesAdded * netFilesAdded * 1.0) / numFilesReadToEstimateRowCount); - estimatedNetRecordsAdded = Math.round((netRecordsAdded * netFilesAdded * 1.0) / numFilesReadToEstimateRowCount); + logger.debug("Total rows read for combined multi-part checkpoint files: {}", numRowsRead); + estimatedNetBytesAdded = Math.round((netBytesAdded * netFilesAdded * 1.0) / numFilesReadToEstimateRowCount); + estimatedNetRecordsAdded = Math.round((netRecordsAdded * netFilesAdded * 1.0) / numFilesReadToEstimateRowCount); - logger.debug("Checkpoint parquet file {}, netFilesAdded {}, estimatedNetBytesAdded {}, estimatedNetRecordsAdded {}", - fileAttributes.getPath(), netFilesAdded, estimatedNetBytesAdded, estimatedNetRecordsAdded); + logger.debug("Stat Estimations: netFilesAdded {}, estimatedNetBytesAdded {}, estimatedNetRecordsAdded {}", netFilesAdded, estimatedNetBytesAdded, estimatedNetRecordsAdded); final DeltaLogSnapshot snap = new DeltaLogSnapshot("UNKNOWN", netFilesAdded, - estimatedNetBytesAdded, estimatedNetRecordsAdded, netFilesAdded, System.currentTimeMillis(), true); + estimatedNetBytesAdded, estimatedNetRecordsAdded, netFilesAdded, System.currentTimeMillis(), true); snap.setSchema(schemaString, partitionCols); snap.setSplits(snapSplitsList); return snap; @@ -242,7 +248,7 @@ private void resetVectors() { } private MutableParquetMetadata readCheckpointParquetFooter(FileSystem fs, Path filePath, long fileSize) throws Exception { - try (SingleStreamProvider singleStreamProvider = new SingleStreamProvider(fs, filePath, fileSize, maxFooterLen, false, null, null, false)) { + try (SingleStreamProvider singleStreamProvider = new SingleStreamProvider(fs, filePath, fileSize, maxFooterLen, false, null, null, false, ParquetFilters.NONE, ParquetFilterCreator.DEFAULT)) { final MutableParquetMetadata footer = singleStreamProvider.getFooter(); return footer; } @@ -261,7 +267,7 @@ private boolean assertMinReaderVersion() { if (!protocolVector.isNull(i)) { int minReaderVersion = ((IntVector) protocolVector.getChild(PROTOCOL_MIN_READER_VERSION)).get(i); Preconditions.checkState(minReaderVersion <= 1, - "Protocol version %s is incompatible for Dremio plugin", minReaderVersion); + "Protocol version %s is incompatible for Dremio plugin", minReaderVersion); return true; } } @@ -291,9 +297,9 @@ private void populateAddedFiles(List blockMetaDataList) throws IO long numFilesAdded, numFilesRemoved; try { ColumnChunkMetaData addColChunk = blockMetaData.getColumns().stream().filter( - colChunk -> colChunk.getPath().equals(ColumnPath.get(DELTA_FIELD_ADD, "path"))).findFirst().get(); + colChunk -> colChunk.getPath().equals(ColumnPath.get(DELTA_FIELD_ADD, "path"))).findFirst().get(); ColumnChunkMetaData removeColChunk = blockMetaData.getColumns().stream().filter( - colChunk -> colChunk.getPath().equals(ColumnPath.get(DELTA_FIELD_REMOVE, "path"))).findFirst().get(); + colChunk -> colChunk.getPath().equals(ColumnPath.get(DELTA_FIELD_REMOVE, "path"))).findFirst().get(); numFilesAdded = numRows - addColChunk.getStatistics().getNumNulls(); numFilesRemoved = numRows - removeColChunk.getStatistics().getNumNulls(); netFilesAdded += numFilesAdded; @@ -311,7 +317,7 @@ private boolean checkParquetContainsMetadata(List blockMetaDataLi long numRows = blockMetaData.getRowCount(); try { ColumnChunkMetaData metadataColChunk = blockMetaData.getColumns().stream().filter( - colChunk -> colChunk.getPath().equals(ColumnPath.get(DELTA_FIELD_METADATA, DELTA_FIELD_METADATA_SCHEMA_STRING))).findFirst().get(); + colChunk -> colChunk.getPath().equals(ColumnPath.get(DELTA_FIELD_METADATA, DELTA_FIELD_METADATA_SCHEMA_STRING))).findFirst().get(); numMetadataEntries = numRows - metadataColChunk.getStatistics().getNumNulls(); if (numMetadataEntries > 0) { break; @@ -417,36 +423,36 @@ private long estimateRecordsAdded(FileSystem fs, Path filePath, long fileSize) t private List generateSplits(FileAttributes fileAttributes, long version, MutableParquetMetadata currentMetadata) { FileProtobuf.FileSystemCachedEntity fileProto = FileProtobuf.FileSystemCachedEntity - .newBuilder() - .setPath(fileAttributes.getPath().toString()) - .setLength(fileAttributes.size()) - .setLastModificationTime(0) // using 0 as the mtime to signify that these splits are immutable - .build(); + .newBuilder() + .setPath(fileAttributes.getPath().toString()) + .setLength(fileAttributes.size()) + .setLastModificationTime(0) // using 0 as the mtime to signify that these splits are immutable + .build(); int rowGrpIdx = 0; final List datasetSplits = new ArrayList<>(currentMetadata.getBlocks().size()); for (BlockMetaData blockMetaData : currentMetadata.getBlocks()) { final DeltaLakeProtobuf.DeltaCommitLogSplitXAttr deltaExtended = DeltaLakeProtobuf.DeltaCommitLogSplitXAttr - .newBuilder().setRowGroupIndex(rowGrpIdx).setVersion(version).build(); + .newBuilder().setRowGroupIndex(rowGrpIdx).setVersion(version).build(); rowGrpIdx++; final EasyProtobuf.EasyDatasetSplitXAttr splitExtended = EasyProtobuf.EasyDatasetSplitXAttr.newBuilder() - .setPath(fileAttributes.getPath().toString()) - .setStart(blockMetaData.getStartingPos()) - .setLength(blockMetaData.getCompressedSize()) - .setUpdateKey(fileProto) - .setExtendedProperty(deltaExtended.toByteString()) - .build(); + .setPath(fileAttributes.getPath().toString()) + .setStart(blockMetaData.getStartingPos()) + .setLength(blockMetaData.getCompressedSize()) + .setUpdateKey(fileProto) + .setExtendedProperty(deltaExtended.toByteString()) + .build(); datasetSplits.add(DatasetSplit.of(Collections.EMPTY_LIST, blockMetaData.getCompressedSize(), - blockMetaData.getRowCount(), splitExtended::writeTo)); + blockMetaData.getRowCount(), splitExtended::writeTo)); } return datasetSplits; } private static JsonNode findNode(JsonNode node, String... paths) { for (String path : paths) { - if (node==null) { + if (node == null) { return node; } node = node.get(path); @@ -457,7 +463,7 @@ private static JsonNode findNode(JsonNode node, String... paths) { // get an optional value private static T get(JsonNode node, T defaultVal, Function typeFunc, String... paths) { node = findNode(node, paths); - return (node==null) ? defaultVal:typeFunc.apply(node); + return (node == null) ? defaultVal : typeFunc.apply(node); } private static boolean isRecordEstimateConverged(long prevRecordCntEstimate, long newRecordCntEstimate) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaLogCommitJsonReader.java b/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaLogCommitJsonReader.java index b4a46c2d7c..ec3a5ddc12 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaLogCommitJsonReader.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaLogCommitJsonReader.java @@ -120,7 +120,7 @@ public DeltaLogSnapshot parseMetadata(Path rootFolder, SabotContext context, Fil public void populateSchema(DeltaLogSnapshot snapshot, JsonNode metadata) throws IOException { // Check data file format final String format = get(metadata, "parquet", JsonNode::asText, "format", "provider"); - Preconditions.checkState(format.equalsIgnoreCase("parquet"), "Non-parquet delta lake tables aren't supported."); + Preconditions.checkState("parquet".equalsIgnoreCase(format), "Non-parquet delta lake tables aren't supported."); final List partitionCols = new ArrayList<>(); // Fetch partitions diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaLogSnapshot.java b/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaLogSnapshot.java index 9a73f9ae37..14ffb047c4 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaLogSnapshot.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaLogSnapshot.java @@ -154,6 +154,7 @@ public synchronized void merge(DeltaLogSnapshot that) { this.versionId = Math.max(this.versionId, that.versionId); } + @Override @VisibleForTesting public DeltaLogSnapshot clone() { DeltaLogSnapshot clone = new DeltaLogSnapshot(this.operationType, this.netFilesAdded, this.netBytesAdded, diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaMetadataFetchJob.java b/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaMetadataFetchJob.java index 52c6b3ba4b..cc949c7d45 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaMetadataFetchJob.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaMetadataFetchJob.java @@ -61,7 +61,7 @@ public class DeltaMetadataFetchJob implements Supplier { public long version; public long subparts; - private static Retryer retryer = new Retryer.Builder() + private static Retryer retryer = Retryer.newBuilder() .retryIfExceptionOfType(RuntimeException.class) .setWaitStrategy(Retryer.WaitStrategy.EXPONENTIAL, 250, 1500) .setMaxRetries(5).retryIfExceptionOfType(IOException.class).build(); @@ -153,7 +153,7 @@ private boolean checkFileExistsAndValid(Optional attrs) { private Optional getFileAttrs(Path p) throws IOException { FileAttributes attr; - Callable retryBlock = () -> { + Callable retryBlock = () -> { try { return fs.getFileAttributes(p); } catch (FileNotFoundException f) { @@ -164,9 +164,8 @@ private Optional getFileAttrs(Path p) throws IOException { }; try { - attr = (FileAttributes) retryer.call(retryBlock); - } - catch (Retryer.OperationFailedAfterRetriesException e) { + attr = retryer.call(retryBlock); + } catch (Retryer.OperationFailedAfterRetriesException e) { throw e.getWrappedCause(IOException.class, ex -> new IOException(ex)); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaMetadataFetchJobManager.java b/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaMetadataFetchJobManager.java index ee2f22ec37..7307ac63c8 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaMetadataFetchJobManager.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/deltalake/DeltaMetadataFetchJobManager.java @@ -143,8 +143,7 @@ private Pair, Optional> getStartVersion(Path metaDir) { Pair, Optional> lastCheckpointVersionSubpartsPair; try { lastCheckpointVersionSubpartsPair = DeltaLastCheckPointReader.getLastCheckPoint(fs, lastCheckpoint); - } - catch (IOException e) { + } catch (IOException e) { throw UserException.dataReadError() .message("Failed to read _last_checkpoint file for delta dataset %s. Error %s", selectionRoot, e.getMessage()) .build(logger); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/AddPrimaryKey.java b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/AddPrimaryKey.java index 3bea4634ad..7eb0f09231 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/AddPrimaryKey.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/AddPrimaryKey.java @@ -42,6 +42,7 @@ public AddPrimaryKey(NamespaceKey key, super(datasetConfig, context, key, schemaConfig, model, path, storagePlugin); } + @Override public void performOperation(List columns) { super.performOperation(columns); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/CreateParquetTableEntry.java b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/CreateParquetTableEntry.java index a2d87a9ea3..03b443602f 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/CreateParquetTableEntry.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/CreateParquetTableEntry.java @@ -138,6 +138,7 @@ public CreateParquetTableEntry cloneWithFields(WriterOptions writerOptions){ return new CreateParquetTableEntry(userName, plugin, location, icebergTableProps, writerOptions, datasetPath); } + @Override public String getUserName() { return userName; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/DremioFileSystemCache.java b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/DremioFileSystemCache.java index 919cd4ad45..248f916610 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/DremioFileSystemCache.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/DremioFileSystemCache.java @@ -122,8 +122,7 @@ public synchronized void closeAll(boolean onlyAutomatic) throws IOException { if (fs != null) { try { fs.close(); - } - catch(IOException ioe) { + } catch (IOException ioe) { exceptions.add(ioe); } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/EasyFileSystemCreateTableEntry.java b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/EasyFileSystemCreateTableEntry.java index 5fbf748513..26cbb3fa52 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/EasyFileSystemCreateTableEntry.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/EasyFileSystemCreateTableEntry.java @@ -126,16 +126,19 @@ public FormatPluginConfig getFormatConfig() { return formatPlugin.getConfig(); } + @Override @JsonProperty("location") public String getLocation() { return location; } + @Override @JsonIgnore public FileSystemPlugin getPlugin() { return plugin; } + @Override public EasyFileSystemCreateTableEntry cloneWithNewLocation(String newLocation){ return new EasyFileSystemCreateTableEntry(userName, plugin, formatPlugin, newLocation, icebergTableProps, options, datasetPath); } @@ -165,14 +168,17 @@ public Writer getWriter( return formatPlugin.getWriter(child, location, plugin, options, props); } + @Override public WriterOptions getOptions() { return options; } + @Override public IcebergTableProps getIcebergTableProps() { return icebergTableProps; } + @Override public NamespaceKey getDatasetPath() { return datasetPath; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/FileSystemConfigurationAdapter.java b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/FileSystemConfigurationAdapter.java new file mode 100644 index 0000000000..43c5fa7351 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/FileSystemConfigurationAdapter.java @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store.dfs; + +/** + * An abstraction providing a read-only property bag interface on top of some concrete filesystem configuration. + * This provides a way for exposing filesystem configuration without introducing direct dependencies on Hadoop + * Configuration objects. + */ +public interface FileSystemConfigurationAdapter { + String get(String name); + String get(String name, String defaultValue); +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/FileSystemPlugin.java b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/FileSystemPlugin.java index f014e0cbf7..8dcf9bf735 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/FileSystemPlugin.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/FileSystemPlugin.java @@ -39,7 +39,6 @@ import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; -import java.util.function.Supplier; import javax.annotation.Nullable; import javax.inject.Provider; @@ -52,11 +51,11 @@ import org.apache.hadoop.util.ReflectionUtils; import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.TableOperations; +import org.apache.iceberg.io.FileIO; import com.dremio.cache.AuthorizationCacheException; import com.dremio.cache.AuthorizationCacheService; import com.dremio.common.config.LogicalPlanPersistence; -import com.dremio.common.exceptions.ExecutionSetupException; import com.dremio.common.exceptions.InvalidMetadataErrorContext; import com.dremio.common.exceptions.UserException; import com.dremio.common.logical.FormatPluginConfig; @@ -90,7 +89,6 @@ import com.dremio.exec.catalog.SortColumnsOption; import com.dremio.exec.catalog.StoragePluginId; import com.dremio.exec.catalog.TableMutationOptions; -import com.dremio.exec.catalog.VacuumOption; import com.dremio.exec.catalog.conf.Property; import com.dremio.exec.dotfile.DotFile; import com.dremio.exec.dotfile.DotFileType; @@ -99,6 +97,7 @@ import com.dremio.exec.exception.NoSupportedUpPromotionOrCoercionException; import com.dremio.exec.hadoop.HadoopCompressionCodecFactory; import com.dremio.exec.hadoop.HadoopFileSystem; +import com.dremio.exec.hadoop.HadoopFileSystemConfigurationAdapter; import com.dremio.exec.physical.base.OpProps; import com.dremio.exec.physical.base.PhysicalOperator; import com.dremio.exec.physical.base.ViewOptions; @@ -131,6 +130,7 @@ import com.dremio.exec.store.dfs.SchemaMutability.MutationType; import com.dremio.exec.store.file.proto.FileProtobuf.FileSystemCachedEntity; import com.dremio.exec.store.file.proto.FileProtobuf.FileUpdateKey; +import com.dremio.exec.store.iceberg.DremioFileIO; import com.dremio.exec.store.iceberg.IcebergModelCreator; import com.dremio.exec.store.iceberg.IcebergUtils; import com.dremio.exec.store.iceberg.SupportsIcebergMutablePlugin; @@ -407,6 +407,7 @@ protected Configuration getFsConf() { return fsConf; } + @Override public Configuration getFsConfCopy() { return new Configuration(fsConf); } @@ -440,17 +441,6 @@ public AbstractRefreshPlanBuilder createRefreshDatasetPlanBuilder(SqlHandlerConf } } - @Override - public Supplier getHadoopFsSupplier(String path, Iterable> conf, String queryUser) { - return () -> { - try { - return hadoopFS.get(getFSUser(queryUser)); - } catch (ExecutionException e) { - throw new RuntimeException(String.format("Failed to get file system for path: %s", path.toString()), e); - } - }; - } - /** * Create a new {@link FileSystemWrapper} for a given user. * @@ -512,6 +502,7 @@ public String getFSUser(String userName) { return userName; } + @Override public void clear(String userOrGroup) throws AuthorizationCacheException { try { hadoopFS.invalidate(userOrGroup); @@ -521,6 +512,7 @@ public void clear(String userOrGroup) throws AuthorizationCacheException { } } + @Override public void clear() throws AuthorizationCacheException { try { hadoopFS.invalidateAll(); @@ -711,6 +703,7 @@ protected FileDatasetHandle getDatasetWithFormat(NamespaceKey datasetPath, Previ if (datasetAccessor == null && retrievalOptions.autoPromote()) { + boolean formatFound = false; for (final FormatMatcher matcher : matchers) { try { final FileSelectionProcessor fileSelectionProcessor = matcher.getFormatPlugin().getFileSelectionProcessor(fs, fileSelection); @@ -719,6 +712,7 @@ protected FileDatasetHandle getDatasetWithFormat(NamespaceKey datasetPath, Previ return null; } if (matcher.matches(fs, fileSelection, codecFactory)) { + formatFound = true; final DatasetType type = fs.isDirectory(Path.of(fileSelection.getSelectionRoot())) ? DatasetType.PHYSICAL_DATASET_SOURCE_FOLDER : DatasetType.PHYSICAL_DATASET_SOURCE_FILE; @@ -737,6 +731,13 @@ protected FileDatasetHandle getDatasetWithFormat(NamespaceKey datasetPath, Previ logger.debug("File read failed.", e); } } + + if (!formatFound) { + String errorMessage = String.format("The file format for '%s' could not be identified. In order for automatic format detection to succeed, " + + "files must include a file extension. Alternatively, manual promotion can be used to explicitly specify the format.", datasetPath.getSchemaPath()); + throw UserException.unsupportedError() + .message(errorMessage).buildSilently(); + } } return datasetAccessor; @@ -852,6 +853,7 @@ public FormatPlugin getFormatPlugin(String name) { return formatCreator.getFormatPluginByName(name); } + @Override public FormatPlugin getFormatPlugin(FormatPluginConfig config) { FormatPlugin plugin = formatCreator.getFormatPluginByConfig(config); if (plugin == null) { @@ -1009,6 +1011,7 @@ protected Boolean runInner() throws Exception { * * @return */ + @Override public FileSystem getSystemUserFS() { return systemUserFS; } @@ -1174,7 +1177,8 @@ public void dropTable(NamespaceKey tableSchemaPath, SchemaConfig schemaConfig, T boolean isLayeredTable = tableMutationOptions != null && tableMutationOptions.isLayered(); fileSelection = isLayeredTable ? FileSelection.createNotExpanded(fs, fullPath) : FileSelection.create(fs, fullPath); } catch (IOException e) { - throw new RuntimeException(e); + throw new RuntimeException( + String.format("Unable to drop table [%s]. %s", SqlUtils.quotedCompound(tableSchemaPath.getPathComponents()), e.getMessage())); } if (fileSelection == null) { @@ -1246,23 +1250,12 @@ public void rollbackTable(NamespaceKey tableSchemaPath, icebergModel.getTableIdentifier(validateAndGetPath(tableSchemaPath, schemaConfig.getUserName()).toString()), rollbackOption); } - @Override - public void vacuumTable(NamespaceKey tableSchemaPath, - DatasetConfig datasetConfig, - SchemaConfig schemaConfig, - VacuumOption vacuumOption, - TableMutationOptions tableMutationOptions) { - IcebergModel icebergModel = getIcebergModel(); - icebergModel.vacuumTable( - icebergModel.getTableIdentifier(validateAndGetPath(tableSchemaPath, schemaConfig.getUserName()).toString()), vacuumOption); - } - public void deleteIcebergTableRootPointer(String userName, Path icebergTablePath) { FileSystem fs; try { fs = createFS(userName); - IcebergModel icebergModel = getIcebergModel(fs); + IcebergModel icebergModel = getIcebergModel(fs, null, null); icebergModel.deleteTableRootPointer(icebergModel.getTableIdentifier(String.valueOf(icebergTablePath))); } catch (IOException e) { @@ -1278,32 +1271,6 @@ public void deleteIcebergTableRootPointer(String userName, Path icebergTablePath } - private FileSystem getMetadataFS() throws ExecutionSetupException { - FileSystem metadataFs = null; - if (metadataFs == null) { - try { - metadataFs = getSystemUserFS(); - } catch (Exception e) { - logger.debug("Could not get FS ",e); - } - } - return metadataFs; - } - - public void deleteMetadataIcebergTable(String icebergTableUuid) { - - Path icebergTablePath = Path.of(getConfig().getPath().toString()).resolve(icebergTableUuid); - IcebergModel icebergModel = null; - try { - icebergModel = getIcebergModel(getMetadataFS()); - icebergModel.deleteTable(icebergModel.getTableIdentifier(String.valueOf(icebergTablePath))); - } catch (ExecutionSetupException e) { - String message = String.format("The dataset is now forgotten by dremio, but there was an error while cleaning up respective metadata files residing at %s.",icebergTablePath); - logger.error(message); - throw new RuntimeException(e); - } - } - @Override public void addColumns(NamespaceKey key, DatasetConfig datasetConfig, @@ -1379,7 +1346,7 @@ public List getPrimaryKeyFromMetadata(NamespaceKey table, final Path path; if (DatasetHelper.isInternalIcebergTable(datasetConfig)) { final FileSystemPlugin metaStoragePlugin = context.getCatalogService().getSource(METADATA_STORAGE_PLUGIN_NAME); - icebergModel = metaStoragePlugin.getIcebergModel(metaStoragePlugin.getSystemUserFS()); + icebergModel = metaStoragePlugin.getIcebergModel(); String metadataTableName = datasetConfig.getPhysicalDataset().getIcebergMetadata().getTableUuid(); path = metaStoragePlugin.resolveTablePathToValidPath(metadataTableName); } else if (DatasetHelper.isIcebergDataset(datasetConfig)) { @@ -1741,7 +1708,17 @@ public Optional getDatasetHandle(EntityPath datasetPath, GetDatas } if(handle.isPresent()) { - return handle; + // handle is UnlimitedSplitsDatasetHandle, dataset is parquet + if(DatasetRetrievalOptions.of(options).autoPromote() ) { + // autoPromote will allow this handle to work, regardless whether dataset is/is-not promoted + return handle; + } else if(fileConfig != null){ + // dataset has already been promoted + return handle; + } else { + // dataset not promoted, handle cannot be used without incorrectly triggering auto-promote + return Optional.empty(); + } } final PreviousDatasetInfo pdi = new PreviousDatasetInfo(fileConfig, currentSchema, sortColumns, droppedColumns, updatedColumns, isSchemaLearningEnabled); @@ -1787,16 +1764,9 @@ public boolean containerExists(EntityPath containerPath) { return false; } } - public IcebergModel getIcebergModel() { - return getIcebergModel(null, null, null); - } - public IcebergModel getIcebergModel(FileSystem fs) { - return getIcebergModel(fs, null, null); - } - - public IcebergModel getIcebergModel(OperatorContext operatorContext) { - return getIcebergModel(null, operatorContext, null); + public IcebergModel getIcebergModel() { + return getIcebergModel(getSystemUserFS(), null, null); } @Override @@ -1808,10 +1778,9 @@ public String getRootLocation() { return basePath.toString(); } - /* if fs is null it will use iceberg HadoopFileIO class instead of DremioFileIO class */ - public IcebergModel getIcebergModel(FileSystem fs, OperatorContext operatorContext, List dataset) { + private IcebergModel getIcebergModel(FileSystem fs, OperatorContext operatorContext, List dataset) { return IcebergModelCreator.createIcebergModel( - getFsConfCopy(), context, fs, operatorContext, dataset, this); + getFsConfCopy(), context, fs, operatorContext, dataset, this); } @Override @@ -1842,7 +1811,16 @@ public boolean isSupportUserDefinedSchema(DatasetConfig dataset) { @Override public TableOperations createIcebergTableOperations(FileSystem fs, String queryUserName, IcebergTableIdentifier tableIdentifier) { - return new IcebergHadoopTableOperations(new org.apache.hadoop.fs.Path(((IcebergHadoopTableIdentifier)tableIdentifier).getTableFolder()), getFsConfCopy(), fs, null, this); + return new IcebergHadoopTableOperations( + new org.apache.hadoop.fs.Path(((IcebergHadoopTableIdentifier)tableIdentifier).getTableFolder()), + getFsConfCopy(), fs, createIcebergFileIO(fs, null, null, null, null)); + } + + @Override + public FileIO createIcebergFileIO(FileSystem fs, OperatorContext context, List dataset, + String datasourcePluginUID, Long fileLength) { + return new DremioFileIO(fs, context, dataset, datasourcePluginUID, fileLength, + new HadoopFileSystemConfigurationAdapter(fsConf)); } @Override @@ -1856,7 +1834,7 @@ public Optional getDatasetHandleForNewRefresh(NamespaceKey datase } Optional selection = generateFileSelectionForPathComponents(datasetPath, SystemUser.SYSTEM_USERNAME); - if(!retrievalOptions.autoPromote() || !selection.isPresent()) { + if(!selection.isPresent()) { return Optional.empty(); } @@ -1893,6 +1871,7 @@ public Optional generateFileSelectionForPathComponents(NamespaceK return Optional.of(fileSelection); } + @Override public String getDefaultCtasFormat() { return config.getDefaultCtasFormat(); } @@ -1901,6 +1880,7 @@ public boolean isPartitionInferenceEnabled() { return config.isPartitionInferenceEnabled(); } + @Override public DirListingRecordReader createDirListRecordReader(OperatorContext context, FileSystem fs, DirListInputSplitProto.DirListInputSplit dirListInputSplit, diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/FileSystemRulesFactory.java b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/FileSystemRulesFactory.java index de9fa16755..847d0d43b9 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/FileSystemRulesFactory.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/FileSystemRulesFactory.java @@ -30,20 +30,19 @@ import org.slf4j.LoggerFactory; import com.dremio.common.exceptions.UserException; -import com.dremio.datastore.LegacyProtobufSerializer; import com.dremio.exec.ExecConstants; import com.dremio.exec.calcite.logical.ScanCrel; import com.dremio.exec.catalog.conf.SourceType; import com.dremio.exec.ops.OptimizerRulesContext; import com.dremio.exec.physical.config.ManifestScanFilters; import com.dremio.exec.planner.PlannerPhase; -import com.dremio.exec.planner.common.ScanRelBase; import com.dremio.exec.planner.logical.Rel; import com.dremio.exec.planner.logical.partition.PruneScanRuleBase.PruneScanRuleFilterOnProject; import com.dremio.exec.planner.logical.partition.PruneScanRuleBase.PruneScanRuleFilterOnSampleScan; import com.dremio.exec.planner.logical.partition.PruneScanRuleBase.PruneScanRuleFilterOnScan; import com.dremio.exec.planner.physical.DistributionTrait; import com.dremio.exec.planner.physical.FileSystemTableOptimizePrule; +import com.dremio.exec.planner.physical.FileSystemVacuumTablePrule; import com.dremio.exec.planner.physical.PlannerSettings; import com.dremio.exec.planner.physical.Prel; import com.dremio.exec.store.StoragePlugin; @@ -59,14 +58,12 @@ import com.dremio.exec.store.mfunctions.TableFilesFunctionTableMetadata; import com.dremio.exec.store.parquet.ParquetScanPrel; import com.dremio.options.OptionResolver; -import com.dremio.sabot.exec.store.iceberg.proto.IcebergProtobuf; import com.dremio.service.namespace.DatasetHelper; import com.dremio.service.namespace.capabilities.SourceCapabilities; import com.dremio.service.namespace.dataset.proto.IcebergMetadata; import com.dremio.service.namespace.file.proto.FileType; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; -import com.google.protobuf.InvalidProtocolBufferException; /** * Rules for file system sources. @@ -232,7 +229,8 @@ public Set getRules(OptimizerRulesContext optimizerContext, PlannerP ConvertCountToDirectScan.getAggOnScan(pluginType), ConvertCountToDirectScan.getAggProjOnScan(pluginType), new TableFilesFunctionScanPrule(pluginType), - new FileSystemTableOptimizePrule(optimizerContext) + new FileSystemTableOptimizePrule(optimizerContext), + new FileSystemVacuumTablePrule(optimizerContext) ); default: @@ -329,20 +327,4 @@ public static boolean supportsConvertedIcebergDataset(OptimizerRulesContext cont return true; } } - - public static String getPartitionStatsFile(ScanRelBase drel) { - if(DatasetHelper.isInternalIcebergTable(drel.getTableMetadata().getDatasetConfig())) { - return drel.getTableMetadata().getDatasetConfig().getPhysicalDataset().getIcebergMetadata().getPartitionStatsFile(); - } else { - byte[] byteBuffer = drel.getTableMetadata().getReadDefinition().getExtendedProperty().toByteArray(); - - IcebergProtobuf.IcebergDatasetXAttr icebergDatasetXAttr; - try { - icebergDatasetXAttr = LegacyProtobufSerializer.parseFrom(IcebergProtobuf.IcebergDatasetXAttr.PARSER, byteBuffer); - } catch (InvalidProtocolBufferException e) { - throw new RuntimeException(e); - } - return icebergDatasetXAttr.getPartitionStatsFile(); - } - } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/FilesystemScanDrel.java b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/FilesystemScanDrel.java index 25cd3f4596..3d031480df 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/FilesystemScanDrel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/FilesystemScanDrel.java @@ -164,6 +164,7 @@ public FilesystemScanDrel applyArrowCachingEnabled(boolean arrowCachingEnabled) return new FilesystemScanDrel(this, arrowCachingEnabled); } + @Override public ParquetScanFilter getFilter() { return filter; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/FormatCreator.java b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/FormatCreator.java index 8fd0cc4bfd..a75a11eb32 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/FormatCreator.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/FormatCreator.java @@ -96,12 +96,12 @@ private static Map, Constructor> initConfigConstructors(Collection getDefaultFormats() { Map defaultFormats = new TreeMap<>(); - defaultFormats.put("csv", createTextFormatPlugin(false, ',', Lists.newArrayList("csv"))); - defaultFormats.put("csvh", createTextFormatPlugin(true, ',', Lists.newArrayList("csvh"))); - defaultFormats.put("tsv", createTextFormatPlugin(false, '\t', Lists.newArrayList("tsv"))); - defaultFormats.put("psv", createTextFormatPlugin(false, '|', Lists.newArrayList("psv", "tbl"))); - defaultFormats.put("txt", createTextFormatPlugin(false, '\u0000', Lists.newArrayList("txt"))); - TextFormatConfig psva = createTextFormatPlugin(false, '|', Lists.newArrayList("psva", "tbla")); + defaultFormats.put("csv", createTextFormatPlugin(false, ",", Lists.newArrayList("csv"))); + defaultFormats.put("csvh", createTextFormatPlugin(true, ",", Lists.newArrayList("csvh"))); + defaultFormats.put("tsv", createTextFormatPlugin(false, "\t", Lists.newArrayList("tsv"))); + defaultFormats.put("psv", createTextFormatPlugin(false, "|", Lists.newArrayList("psv", "tbl"))); + defaultFormats.put("txt", createTextFormatPlugin(false, "\u0000", Lists.newArrayList("txt"))); + TextFormatConfig psva = createTextFormatPlugin(false, "|", Lists.newArrayList("psva", "tbla")); psva.autoGenerateColumnNames = true; defaultFormats.put("psva", psva); @@ -124,7 +124,7 @@ public static Map getDefaultFormats() { * @return - a new TextFormatConfig */ public static TextFormatPlugin.TextFormatConfig createTextFormatPlugin(boolean extractHeader, - char fieldDelimiter, + String fieldDelimiter, List extensions) { TextFormatPlugin.TextFormatConfig newText = new TextFormatPlugin.TextFormatConfig(); newText.extractHeader = extractHeader; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/InternalFileConf.java b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/InternalFileConf.java index 62ac9cace8..43273c0456 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/InternalFileConf.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/InternalFileConf.java @@ -260,6 +260,7 @@ public boolean isPdfsBased() { return uri.get().getScheme().equals("pdfs"); } + @Override public String getDefaultCtasFormat(){ return defaultCtasFormat.getDefaultCtasFormat(); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/LoggedFileSystem.java b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/LoggedFileSystem.java new file mode 100644 index 0000000000..4a6d8637e9 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/LoggedFileSystem.java @@ -0,0 +1,470 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dremio.exec.store.dfs; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.file.AccessMode; +import java.nio.file.DirectoryStream; +import java.nio.file.FileAlreadyExistsException; +import java.nio.file.attribute.PosixFilePermission; +import java.security.AccessControlException; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.TimeUnit; +import java.util.function.Predicate; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.dremio.exec.ExecConstants; +import com.dremio.io.AsyncByteReader; +import com.dremio.io.FSInputStream; +import com.dremio.io.FSOutputStream; +import com.dremio.io.FilterFSInputStream; +import com.dremio.io.FilterFSOutputStream; +import com.dremio.io.file.FileAttributes; +import com.dremio.io.file.FileBlockLocation; +import com.dremio.io.file.FileSystem; +import com.dremio.io.file.FilterFileSystem; +import com.dremio.io.file.Path; +import com.dremio.options.OptionResolver; +import com.google.common.base.Stopwatch; + +import io.netty.buffer.ByteBuf; + + +/** + * A {@link FileSystem} implementation which logs calls to a wrapped FileSystem. + * + *

        Logging is done at three levels: + *

          + *
        • WARN - Calls that are >= the configured duration in the + * {@code filesystem.logger.warn.io_threshold_ms} support key.
        • + *
        • DEBUG - Calls that are >= the configured duration in the + * {@code filesystem.logger.debug.io_threshold_ms} support key.
        • + *
        • TRACE - All other calls are logged at TRACE level. WARNING: This can be very verbose.
        • + *
        + * + *

        Logging can be disabled entirely by setting the log level for {@code com.dremio.exec.store.dfs.LoggedFileSystem} + * to ERROR. + */ +public class LoggedFileSystem extends FilterFileSystem { + + private static final Logger LOG = LoggerFactory.getLogger(LoggedFileSystem.class); + + private final FileSystem fs; + private final long warnThresholdMs; + private final long debugThresholdMs; + + public LoggedFileSystem(FileSystem fs, OptionResolver options) { + super(fs); + this.fs = fs; + this.warnThresholdMs = options.getOption(ExecConstants.FS_LOGGER_WARN_THRESHOLD_MS); + this.debugThresholdMs = options.getOption(ExecConstants.FS_LOGGER_DEBUG_THRESHOLD_MS); + } + + public static boolean isLoggingEnabled() { + return LOG.isWarnEnabled(); + } + + @Override + public FSInputStream open(Path f) throws FileNotFoundException, IOException { + try (AutoLogger ignored = logDuration("open", f)) { + return new LoggedFSInputStream(super.open(f), f); + } + } + + @Override + public FSOutputStream create(Path f) throws FileNotFoundException, IOException { + try (AutoLogger ignored = logDuration("create", f)) { + return new LoggedFSOutputStream(super.create(f), f); + } + } + + @Override + public FSOutputStream create(Path f, boolean overwrite) throws FileAlreadyExistsException, IOException { + try (AutoLogger ignored = logDuration("create", f)) { + return new LoggedFSOutputStream(super.create(f, overwrite), f); + } + } + + @Override + public FileAttributes getFileAttributes(Path f) throws FileNotFoundException, IOException { + try (AutoLogger ignored = logDuration("getFileAttributes", f)) { + return super.getFileAttributes(f); + } + } + + @Override + public void setPermission(Path p, Set permissions) throws FileNotFoundException, IOException { + try (AutoLogger ignored = logDuration("setPermission", p)) { + super.setPermission(p, permissions); + } + } + + @Override + public boolean mkdirs(Path f, Set permissions) throws IOException { + try (AutoLogger ignored = logDuration("mkdirs", f)) { + return super.mkdirs(f, permissions); + } + } + + @Override + public boolean mkdirs(Path f) throws IOException { + try (AutoLogger ignored = logDuration("mkdirs", f)) { + return super.mkdirs(f); + } + } + + @Override + public AsyncByteReader getAsyncByteReader(AsyncByteReader.FileKey fileKey, Map options) throws IOException { + return new LoggedAsyncByteReader(super.getAsyncByteReader(fileKey,options), fileKey.getPath()); + } + + @Override + public DirectoryStream list(Path f) throws FileNotFoundException, IOException { + try (AutoLogger ignored = logDuration("list", f)) { + return new LoggedDirectoryStream<>(super.list(f), "list", f); + } + } + + @Override + public DirectoryStream list(Path f, Predicate filter) + throws FileNotFoundException, IOException { + try (AutoLogger ignored = logDuration("list", f)) { + return new LoggedDirectoryStream<>(super.list(f, filter), "list", f); + } + } + + @Override + public DirectoryStream listFiles(Path f, boolean recursive) + throws FileNotFoundException, IOException { + try (AutoLogger ignored = logDuration("listFiles", f)) { + return new LoggedDirectoryStream<>(super.listFiles(f, recursive), "listFiles", f); + } + } + + @Override + public DirectoryStream glob(Path pattern, Predicate filter) + throws FileNotFoundException, IOException { + try (AutoLogger ignored = logDuration("glob", pattern)) { + return new LoggedDirectoryStream<>(super.glob(pattern, filter), "glob", pattern); + } + } + + @Override + public boolean rename(Path src, Path dst) throws IOException { + try (AutoLogger ignored = logDuration("rename", src)) { + return super.rename(src, dst); + } + } + + @Override + public boolean delete(Path f, boolean recursive) throws IOException { + try (AutoLogger ignored = logDuration("delete", f)) { + return super.delete(f, recursive); + } + } + + @Override + public boolean exists(Path f) throws IOException { + try (AutoLogger ignored = logDuration("exists", f)) { + return super.exists(f); + } + } + + @Override + public boolean isDirectory(Path f) throws IOException { + try (AutoLogger ignored = logDuration("isDirectory", f)) { + return super.isDirectory(f); + } + } + + @Override + public boolean isFile(Path f) throws IOException { + try (AutoLogger ignored = logDuration("isFile", f)) { + return super.isFile(f); + } + } + + @Override + public Iterable getFileBlockLocations(FileAttributes file, long start, long len) + throws IOException { + try (AutoLogger ignored = logDuration("getFileBlockLocations", file.getPath())) { + return super.getFileBlockLocations(file, start, len); + } + } + + @Override + public Iterable getFileBlockLocations(Path p, long start, long len) throws IOException { + try (AutoLogger ignored = logDuration("getFileBlockLocations", p)) { + return super.getFileBlockLocations(p, start, len); + } + } + + @Override + public void access(Path path, Set mode) + throws AccessControlException, FileNotFoundException, IOException { + try (AutoLogger ignored = logDuration("access", path)) { + super.access(path, mode); + } + } + + private AutoLogger logDuration(String op, Path path) { + return new AutoLogger(op, path); + } + + private class LoggedFSInputStream extends FilterFSInputStream { + + private final Path path; + + public LoggedFSInputStream(FSInputStream stream, Path path) { + super(stream); + this.path = path; + } + + @Override + public int read(ByteBuffer dst) throws IOException { + int nbytes = -1; + Stopwatch stopwatch = Stopwatch.createStarted(); + try { + nbytes = super.read(dst); + } finally { + logRead(stopwatch.elapsed(TimeUnit.MILLISECONDS), path, nbytes); + } + + return nbytes; + } + + @Override + public int read(byte[] b, int off, int len) throws IOException { + int nbytes = -1; + Stopwatch stopwatch = Stopwatch.createStarted(); + try { + nbytes = super.read(b, off, len); + } finally { + logRead(stopwatch.elapsed(TimeUnit.MILLISECONDS), path, nbytes); + } + + return nbytes; + } + + @Override + public void close() throws IOException { + try (AutoLogger ignored = logDuration("close", path)) { + super.close(); + } + } + + private void logRead(long elapsed, Path path, int nbytes) { + if (elapsed >= warnThresholdMs) { + LOG.warn("read elapsed={}ms scheme={} path={} nbytes={}", elapsed, fs.getScheme(), path, nbytes); + } else if (elapsed >= debugThresholdMs) { + LOG.debug("read elapsed={}ms scheme={} path={} nbytes={}", elapsed, fs.getScheme(), path, nbytes); + } else if (nbytes > 0) { + LOG.trace("read elapsed={}ms scheme={} path={} nbytes={}", elapsed, fs.getScheme(), path, nbytes); + } + } + } + + private class LoggedFSOutputStream extends FilterFSOutputStream { + + private final Path path; + + public LoggedFSOutputStream(FSOutputStream stream, Path path) { + super(stream); + this.path = path; + } + + @Override + public void write(byte[] b, int off, int len) throws IOException { + Stopwatch stopwatch = Stopwatch.createStarted(); + try { + super.write(b, off, len); + } finally { + logWrite(stopwatch.elapsed(TimeUnit.MILLISECONDS), path, len); + } + } + + @Override + public void flush() throws IOException { + try (AutoLogger ignored = logDuration("flush", path)) { + super.flush(); + } + } + + @Override + public void close() throws IOException { + try (AutoLogger ignored = logDuration("close", path)) { + super.close(); + } + } + + private void logWrite(long elapsed, Path path, int nbytes) { + if (elapsed >= warnThresholdMs) { + LOG.warn("write elapsed={}ms scheme={} path={} nbytes={}", elapsed, fs.getScheme(), path, nbytes); + } else if (elapsed >= debugThresholdMs) { + LOG.debug("write elapsed={}ms scheme={} path={} nbytes={}", elapsed, fs.getScheme(), path, nbytes); + } else if (nbytes > 0) { + LOG.trace("write elapsed={}ms scheme={} path={} nbytes={}", elapsed, fs.getScheme(), path, nbytes); + } + } + } + + private class LoggedAsyncByteReader implements AsyncByteReader { + + private final AsyncByteReader reader; + private final Path path; + + public LoggedAsyncByteReader(AsyncByteReader reader, Path path) { + this.reader = reader; + this.path = path; + } + + @Override + public CompletableFuture readFully(long offset, ByteBuf dst, int dstOffset, int len) { + Stopwatch stopwatch = Stopwatch.createStarted(); + LOG.trace("asyncRead.start scheme={} path={} offset={} nbytes={}", fs.getScheme(), path, offset, len); + return reader.readFully(offset, dst, dstOffset, len) + .whenComplete((result, throwable) -> logAsyncRead(throwable, stopwatch, offset, len)); + } + + @Override + public CompletableFuture checkVersion(String version) { + // for most implementations this is a no-op - skip logging for now + return reader.checkVersion(version); + } + + @Override + public CompletableFuture versionedReadFully(String version, long offset, ByteBuf dst, int dstOffset, + int len) { + // no logging here as default implementation calls checkVersion and readFully + return reader.versionedReadFully(version, offset, dst, dstOffset, len); + } + + @Override + public CompletableFuture readFully(long offset, int len) { + // no logging here as default implementation calls ByteBuf-based readFully + return reader.readFully(offset, len); + } + + @Override + public List getStats() { + return reader.getStats(); + } + + @Override + public void close() throws Exception { + try (AutoLogger ignored = logDuration("close", path)) { + reader.close(); + } + } + + private void logAsyncRead(Throwable throwable, Stopwatch stopwatch, long offset, int nbytes) { + long elapsed = stopwatch.elapsed(TimeUnit.MILLISECONDS); + String state = throwable == null ? "complete" : "failed"; + if (elapsed >= warnThresholdMs) { + LOG.warn("asyncRead.{} elapsed={}ms scheme={} path={} offset={} nbytes={}", state, elapsed, fs.getScheme(), + path, offset, nbytes); + } else if (elapsed >= debugThresholdMs) { + LOG.debug("asyncRead.{} elapsed={}ms scheme={} path={} offset={} nbytes={}", state, elapsed, fs.getScheme(), + path, offset, nbytes); + } else if (nbytes > 0) { + LOG.trace("asyncRead.{} elapsed={}ms scheme={} path={} offset={} nbytes={}", state, elapsed, fs.getScheme(), + path, offset, nbytes); + } + } + } + + private class LoggedDirectoryStream implements DirectoryStream { + + private final DirectoryStream stream; + private final String parentOp; + private final Path path; + + public LoggedDirectoryStream(DirectoryStream stream, String parentOp, Path path) { + this.stream = stream; + this.parentOp = parentOp; + this.path = path; + } + + @Override + public Iterator iterator() { + Iterator iterator = stream.iterator(); + return new Iterator() { + + @Override + public boolean hasNext() { + try (AutoLogger ignored = logDuration(".hasNext")) { + return iterator.hasNext(); + } + } + + @Override + public T next() { + try (AutoLogger ignored = logDuration(".next")) { + return iterator.next(); + } + } + }; + } + + @Override + public void close() throws IOException { + stream.close(); + } + + private AutoLogger logDuration(String op) { + return new AutoLogger(parentOp + op, path); + } + } + + private class AutoLogger implements AutoCloseable { + + private final String op; + private final Path path; + private final Stopwatch stopwatch; + + + public AutoLogger(String op, Path path) { + this.op = op; + this.path = path; + this.stopwatch = Stopwatch.createStarted(); + } + + @Override + public void close() { + long elapsed = stopwatch.elapsed(TimeUnit.MILLISECONDS); + log(op, elapsed, path); + } + } + + private void log(String op, long elapsed, Path path) { + if (elapsed >= warnThresholdMs) { + LOG.warn("{} elapsed={}ms scheme={} path={}", op, elapsed, fs.getScheme(), path); + } else if (elapsed >= debugThresholdMs) { + LOG.debug("{} elapsed={}ms scheme={} path={}", op, elapsed, fs.getScheme(), path); + } else { + LOG.trace("{} elapsed={}ms scheme={} path={}", op, elapsed, fs.getScheme(), path); + } + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/LoggedFileSystemWrapper.java b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/LoggedFileSystemWrapper.java new file mode 100644 index 0000000000..2731bf633a --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/LoggedFileSystemWrapper.java @@ -0,0 +1,56 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dremio.exec.store.dfs; + +import java.io.IOException; + +import com.dremio.io.file.FileSystem; +import com.dremio.options.OptionResolver; +import com.dremio.sabot.exec.context.OperatorContext; + +/** + * A {@link FileSystemWrapper} implementation which wraps a {@link FileSystem} to enable logging of calls. + * The wrapping is only done if WARN level logging is enabled for {@link LoggedFileSystem}. See + * {@link LoggedFileSystem} for details on tuning the amount of logging produced. + */ +public class LoggedFileSystemWrapper implements FileSystemWrapper { + + private final FileSystemWrapper defaultWrapper; + private final OptionResolver globalOptions; + + public LoggedFileSystemWrapper(FileSystemWrapper defaultWrapper, OptionResolver globalOptions) { + this.defaultWrapper = defaultWrapper; + this.globalOptions = globalOptions; + } + + @Override + public FileSystem wrap(FileSystem fs, String storageId, AsyncStreamConf conf, OperatorContext context, + boolean enableAsync, boolean isMetadataRefresh) throws IOException { + FileSystem wrappedFs = defaultWrapper.wrap(fs, storageId, conf, context, enableAsync, isMetadataRefresh); + if (LoggedFileSystem.isLoggingEnabled()) { + // use options from the OperatorContext if available, otherwise fall back to global options + OptionResolver options = context != null && context.getOptions() != null ? context.getOptions() : globalOptions; + wrappedFs = new LoggedFileSystem(wrappedFs, options); + } + return wrappedFs; + } + + @Override + public void close() throws IOException { + defaultWrapper.close(); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/MayBeDistFileSystemPlugin.java b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/MayBeDistFileSystemPlugin.java index c2783e4f5b..e179794b8b 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/MayBeDistFileSystemPlugin.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/MayBeDistFileSystemPlugin.java @@ -42,6 +42,7 @@ public MayBeDistFileSystemPlugin(C config, SabotContext context, String name, Pr super(config, context, name, idProvider); } + @Override protected List getProperties() { List props = new ArrayList<>(super.getProperties()); @@ -68,6 +69,7 @@ protected List getProperties() { return props; } + @Override protected FileSystem newFileSystem(String userName, OperatorContext operatorContext) throws IOException { if (!Strings.isNullOrEmpty(getConfig().getSecretKey())) { getFsConf().set("fs.dremioS3.impl", "com.dremio.plugins.s3.store.S3FileSystem"); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/MetadataOperations.java b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/MetadataOperations.java index 6f24bc0dbd..f62cfeded7 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/MetadataOperations.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/MetadataOperations.java @@ -36,6 +36,7 @@ import com.dremio.service.namespace.dataset.proto.IcebergMetadata; import com.dremio.service.users.SystemUser; +import io.opentelemetry.instrumentation.annotations.WithSpan; import io.protostuff.ByteString; /** @@ -79,6 +80,7 @@ protected String getMetadataTableName() { return datasetConfig.getPhysicalDataset().getIcebergMetadata().getTableUuid(); } + @WithSpan protected void checkAndRepair() { RepairKvstoreFromIcebergMetadata repairOperation = new RepairKvstoreFromIcebergMetadata( datasetConfig, context.getCatalogService().getSource(METADATA_STORAGE_PLUGIN_NAME), diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/MetadataUtils.java b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/MetadataUtils.java index 8b88ee239b..40c6c6f043 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/MetadataUtils.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/MetadataUtils.java @@ -180,8 +180,10 @@ private static class RangeQueryInput { boolean includeMax; public RangeQueryInput(Object value, SqlKind type) { - this.min = this.max = value; - this.includeMax = this.includeMin = false; + this.min = value; + this.max = value; + this.includeMin = false; + this.includeMax = false; switch (type) { case GREATER_THAN: this.max = null; @@ -198,7 +200,8 @@ public RangeQueryInput(Object value, SqlKind type) { this.includeMax = true; break; case EQUALS: - this.includeMax = this.includeMin = true; + this.includeMax = true; + this.includeMin = true; break; default: throw new UnsupportedOperationException("Invalid kind " + type); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/PhysicalDatasetUtils.java b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/PhysicalDatasetUtils.java index bde7f8e8af..d29912bee2 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/PhysicalDatasetUtils.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/PhysicalDatasetUtils.java @@ -116,13 +116,13 @@ public static FormatPluginConfig toFormatPlugin(final FileConfig fileConfig, fin final TextFileConfig textFileConfig = (TextFileConfig)TextFileConfig.getForFile(fileConfig); final TextFormatConfig textFormatConfig = new TextFormatConfig(); - textFormatConfig.comment = textFileConfig.getComment().charAt(0); - textFormatConfig.escape = textFileConfig.getEscape().charAt(0); + textFormatConfig.comment = textFileConfig.getComment(); + textFormatConfig.escape = textFileConfig.getEscape(); textFormatConfig.extractHeader = textFileConfig.getExtractHeader(); textFormatConfig.skipFirstLine = textFileConfig.getSkipFirstLine(); - textFormatConfig.fieldDelimiter = textFileConfig.getFieldDelimiter().charAt(0); + textFormatConfig.fieldDelimiter = textFileConfig.getFieldDelimiter(); textFormatConfig.lineDelimiter = textFileConfig.getLineDelimiter(); - textFormatConfig.quote = textFileConfig.getQuote().charAt(0); + textFormatConfig.quote = textFileConfig.getQuote(); textFormatConfig.extensions = extensions; textFormatConfig.autoGenerateColumnNames = textFileConfig.getAutoGenerateColumnNames(); textFormatConfig.trimHeader = textFileConfig.getTrimHeader(); @@ -216,10 +216,10 @@ public static FileFormat toFileFormat(FormatPlugin formatPlugin) { final TextFileConfig textFileConfig = new TextFileConfig(); TextParsingSettings settings = new TextParsingSettings(); settings.set((TextFormatConfig) formatPlugin.getConfig()); - textFileConfig.setComment(new Character((char) settings.getComment()).toString()); - textFileConfig.setEscape(new Character((char) settings.getQuoteEscape()).toString()); - textFileConfig.setFieldDelimiter(new Character((char) settings.getDelimiter()).toString()); - textFileConfig.setQuote(new Character((char) settings.getQuote()).toString()); + textFileConfig.setComment(new String(settings.getComment())); + textFileConfig.setEscape(new String(settings.getQuoteEscape())); + textFileConfig.setFieldDelimiter(new String(settings.getDelimiter())); + textFileConfig.setQuote(new String(settings.getQuote())); textFileConfig.setExtractHeader(settings.isHeaderExtractionEnabled()); textFileConfig.setSkipFirstLine(settings.isSkipFirstLine()); textFileConfig.setLineDelimiter(new String(settings.getNewLineDelimiter())); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/RepairKvstoreFromIcebergMetadata.java b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/RepairKvstoreFromIcebergMetadata.java index 8329e4a607..c097c9536d 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/RepairKvstoreFromIcebergMetadata.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/RepairKvstoreFromIcebergMetadata.java @@ -33,6 +33,7 @@ import org.apache.iceberg.PartitionStatsReader; import org.apache.iceberg.Snapshot; import org.apache.iceberg.Table; +import org.apache.iceberg.io.FileIO; import org.apache.iceberg.io.InputFile; import com.dremio.common.exceptions.UserException; @@ -47,11 +48,11 @@ import com.dremio.connector.metadata.SourceMetadata; import com.dremio.datastore.LegacyProtobufSerializer; import com.dremio.exec.catalog.MetadataObjectsUtils; +import com.dremio.exec.planner.common.ImmutableDremioFileAttrs; import com.dremio.exec.proto.UserBitShared; import com.dremio.exec.record.BatchSchema; import com.dremio.exec.store.DatasetRetrievalOptions; import com.dremio.exec.store.StoragePlugin; -import com.dremio.exec.store.iceberg.DremioFileIO; import com.dremio.exec.store.iceberg.IcebergPartitionData; import com.dremio.exec.store.iceberg.IcebergSerDe; import com.dremio.exec.store.iceberg.IcebergUtils; @@ -76,6 +77,7 @@ import com.google.common.collect.Streams; import com.google.protobuf.InvalidProtocolBufferException; +import io.opentelemetry.instrumentation.annotations.WithSpan; import io.protostuff.ByteStringUtil; /** @@ -112,16 +114,19 @@ public RepairKvstoreFromIcebergMetadata(DatasetConfig datasetConfig, this.isMapDataTypeEnabled = isMapDataTypeEnabled; } + @WithSpan public boolean checkAndRepairDatasetWithQueryRetry() { return performCheckAndRepairDataset(true); } + @WithSpan public boolean checkAndRepairDatasetWithoutQueryRetry() { return performCheckAndRepairDataset(false); } + @WithSpan private boolean performCheckAndRepairDataset(boolean retryQuery) { - Retryer retryer = new Retryer.Builder() + Retryer retryer = Retryer.newBuilder() .setMaxRetries(MAX_REPAIR_ATTEMPTS) .retryOnExceptionFunc( ex -> @@ -140,13 +145,14 @@ private boolean performCheckAndRepairDataset(boolean retryQuery) { }); } + @WithSpan private boolean isRepairNeeded() { if(!DatasetHelper.isInternalIcebergTable(datasetConfig)) { return false; } oldIcebergMetadata = datasetConfig.getPhysicalDataset().getIcebergMetadata(); - icebergModel = metaStoragePlugin.getIcebergModel(metaStoragePlugin.getSystemUserFS()); + icebergModel = metaStoragePlugin.getIcebergModel(); Path icebergTableRootFolder = Path.of(metaStoragePlugin.getConfig().getPath().toString()).resolve(oldIcebergMetadata.getTableUuid()); final IcebergTableLoader icebergTableLoader = icebergModel.getIcebergTableLoader(icebergModel.getTableIdentifier(icebergTableRootFolder.toString())); currentIcebergTable = icebergTableLoader.getIcebergTable(); @@ -161,6 +167,7 @@ private boolean isRepairNeeded() { return true; } + @WithSpan private void performRepair(boolean retryQuery) throws NamespaceException, ConnectorException, InvalidProtocolBufferException { logger.info("DatasetConfig of table {} in catalog is not up to date with Iceberg metadata." + "Current iceberg table version [Snapshot ID: {}, RootMetadataFile: {}], version in catalog [Snapshot ID: {}, RootMetadataFile: {}]. " + @@ -168,15 +175,15 @@ private void performRepair(boolean retryQuery) throws NamespaceException, Connec oldIcebergMetadata.getSnapshotId(), oldIcebergMetadata.getMetadataFileLocation()); String oldPartitionStatsFile = oldIcebergMetadata.getPartitionStatsFile(); - String newPartitionStatsFile = null; + ImmutableDremioFileAttrs newPartitionStatsFileAttrs = null; if (oldPartitionStatsFile != null) { - newPartitionStatsFile = IcebergUtils.getPartitionStatsFile(currentRootPointerFileLocation, currentIcebergSnapshot.snapshotId(), metaStoragePlugin.getFsConfCopy(), metaStoragePlugin); + newPartitionStatsFileAttrs = IcebergUtils.getPartitionStatsFileAttrs(currentRootPointerFileLocation, currentIcebergSnapshot.snapshotId(), currentIcebergTable.io()); } repairSchema(); repairStats(); repairDroppedAndModifiedColumns(); - repairReadSignature(newPartitionStatsFile); + repairReadSignature(newPartitionStatsFileAttrs.fileName()); repairPrimaryKeys(); // update iceberg metadata @@ -189,8 +196,9 @@ private void performRepair(boolean retryQuery) throws NamespaceException, Connec newIcebergMetadata.setPartitionSpecsJsonMap(ByteStringUtil.wrap(specs)); newIcebergMetadata.setJsonSchema(serializedSchemaAsJson(currentIcebergTable.schema())); - if (newPartitionStatsFile != null) { - newIcebergMetadata.setPartitionStatsFile(newPartitionStatsFile); + if (newPartitionStatsFileAttrs.fileName() != null) { + newIcebergMetadata.setPartitionStatsFile(newPartitionStatsFileAttrs.fileName()); + newIcebergMetadata.setPartitionStatsFileSize(newPartitionStatsFileAttrs.fileLength()); } datasetConfig.getPhysicalDataset().setIcebergMetadata(newIcebergMetadata); @@ -293,7 +301,8 @@ private byte[] createReadSignatureFromPartitionStatsFiles(String dataTableRootFo logger.info("Restoring read signature of table {} from partition stats file {} of snapshot {}", datasetConfig.getFullPathList(), newPartitionStatsFile, currentIcebergSnapshot.snapshotId()); - final InputFile partitionStatsInputFile = new DremioFileIO(metaStoragePlugin.getFsConfCopy(), metaStoragePlugin).newInputFile(newPartitionStatsFile); + FileIO io = metaStoragePlugin.createIcebergFileIO(metaStoragePlugin.getSystemUserFS(), null, null, null, null); + final InputFile partitionStatsInputFile = io.newInputFile(newPartitionStatsFile); PartitionStatsReader partitionStatsReader = new PartitionStatsReader(partitionStatsInputFile, spec); Streams.stream(partitionStatsReader) diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/SplitAssignmentTableFunction.java b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/SplitAssignmentTableFunction.java index a8f81bfa46..078f63d633 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/SplitAssignmentTableFunction.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/SplitAssignmentTableFunction.java @@ -111,8 +111,7 @@ public SplitAssignmentTableFunction(FragmentExecutionContext fec, OperatorContex if (functionConfig.getFunctionContext().isIcebergMetadata() && functionConfig.getFunctionContext().getInternalTablePluginId() != null) { this.pluginId = functionConfig.getFunctionContext().getInternalTablePluginId(); this.plugin = fec.getStoragePlugin(pluginId); - } - else { + } else { this.pluginId = functionConfig.getFunctionContext().getPluginId(); this.plugin = IcebergUtils.getSupportsInternalIcebergTablePlugin(fec, pluginId); } @@ -280,15 +279,14 @@ private static void logJudiciously(Exception e) { } private FileSystem getFs(String filePath) { - if (fs != null) { - return fs; - } - - try { - return fs = plugin.createFS(filePath, props.getUserName(), context); - } catch (IOException e) { - throw new UncheckedIOException(e); + if (fs == null) { + try { + fs = plugin.createFS(filePath, props.getUserName(), context); + } catch (IOException e) { + throw new UncheckedIOException(e); + } } + return fs; } private class SplitWork implements CompleteWork { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/SplitGenTableFunction.java b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/SplitGenTableFunction.java index 923bb9a437..ebd4fe0172 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/SplitGenTableFunction.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/SplitGenTableFunction.java @@ -21,9 +21,8 @@ import java.io.IOException; import java.io.ObjectOutput; import java.io.ObjectOutputStream; -import java.io.UnsupportedEncodingException; import java.math.BigDecimal; -import java.net.URLDecoder; +import java.net.URI; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; @@ -129,11 +128,10 @@ public void startRow(int row) throws Exception { this.row = row; } - protected void setCurrentPath(int row) throws UnsupportedEncodingException { - final Path currentPathResolved = Path.of(functionConfig.getFunctionContext().getFormatSettings().getLocation()) - .resolve(new String(pathVector.get(row), StandardCharsets.UTF_8)); - currentPath = currentPathResolved.toString(); - currentPath = URLDecoder.decode(currentPath, "UTF-8"); + protected void setCurrentPath(int row) throws Exception { + final String tableLocation = functionConfig.getFunctionContext().getFormatSettings().getLocation(); + final String rowPath = new String(pathVector.get(row), StandardCharsets.UTF_8); + currentPath = Path.of(tableLocation).resolve(Path.of(new URI(rowPath))).toString(); } @Override diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/implicit/DecimalTools.java b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/implicit/DecimalTools.java index 124f84ec8d..5994793b4d 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/implicit/DecimalTools.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/implicit/DecimalTools.java @@ -67,9 +67,9 @@ public static String toBinary(byte[] bytes) { public byte[] fromBinary(String s) { int sLen = s.length(); byte[] toReturn = new byte[(sLen + Byte.SIZE - 1) / Byte.SIZE]; - char c; for (int i = 0; i < sLen; i++) { - if ((c = s.charAt(i)) == '1') { + char c = s.charAt(i); + if (c == '1') { toReturn[i / Byte.SIZE] = (byte) (toReturn[i / Byte.SIZE] | (0x80 >>> (i % Byte.SIZE))); } else if (c != '0') { throw new IllegalArgumentException(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/implicit/TwosComplementValuePair.java b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/implicit/TwosComplementValuePair.java index 03f72d7bfa..a28a580a04 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/implicit/TwosComplementValuePair.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/dfs/implicit/TwosComplementValuePair.java @@ -87,6 +87,7 @@ public void close() throws Exception { private final class BigDecimalPopulator implements Populator, AutoCloseable { private DecimalVector vector; + @Override public void setup(OutputMutator output){ vector = (DecimalVector)output.getVector(name); if (vector == null) { @@ -95,6 +96,7 @@ public void setup(OutputMutator output){ } } + @Override public void populate(final int count){ final byte[] value = TwosComplementValuePair.this.value; @@ -107,6 +109,7 @@ public void populate(final int count){ vector.setValueCount(count); } + @Override public void allocate(){ vector.allocateNew(); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/EasyFormatUtils.java b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/EasyFormatUtils.java index 819a238ea8..e3852202e1 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/EasyFormatUtils.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/EasyFormatUtils.java @@ -113,6 +113,13 @@ public static RuntimeException handleExceptionDuringCoercion(String varcharValue return new RuntimeException(errorMessageBuilder.toString()); } + public static boolean isVarcharOptimizationPossible(ExtendedFormatOptions extendedFormatOptions, ArrowType type) { + // We can follow an optimised codepath for writing data when the following conditions are satisfied: + // 1. The target field type is VARCHAR + // 2. No string transformations like NULL_IF are needed i.e. areStringTransformationsNeeded is set to false. + return (!extendedFormatOptions.getAreStringTransformationsNeeded() && CompleteType.VARCHAR.getType().equals(type)); + } + public static String applyStringTransformations(String varcharValue, ExtendedFormatOptions extendedFormatOptions, Boolean trimSpace) { if (extendedFormatOptions == null) { return varcharValue; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/EasySplitReaderCreator.java b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/EasySplitReaderCreator.java index 8ccc69d958..a38b567638 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/EasySplitReaderCreator.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/EasySplitReaderCreator.java @@ -106,6 +106,7 @@ public SplitAndPartitionInfo getSplit() { public void createInputStreamProvider(InputStreamProvider lastInputStreamProvider, MutableParquetMetadata lastFooter) { } + @Override protected T handleEx(RunnableIO r) { Preconditions.checkNotNull(easySplitXAttr); try { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/arrow/ArrowFlatBufRecordReader.java b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/arrow/ArrowFlatBufRecordReader.java index 49b6962fbd..86fb655dac 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/arrow/ArrowFlatBufRecordReader.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/arrow/ArrowFlatBufRecordReader.java @@ -80,8 +80,8 @@ public void setup(OutputMutator output) { .build(logger); } - final long TAIL_SIZE_GUESS = MAGIC_STRING_LENGTH + FOOTER_OFFSET_SIZE; - final int tailSize = (int)Math.min(size, TAIL_SIZE_GUESS); + final long tailSizeGuess = MAGIC_STRING_LENGTH + FOOTER_OFFSET_SIZE; + final int tailSize = (int) Math.min(size, tailSizeGuess); final byte[] tailBytes = new byte[tailSize]; try (OperatorStats.WaitRecorder waitRecorder = OperatorStats.getWaitRecorder(context.getStats())) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/excel/xls/poi/DocumentNode.java b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/excel/xls/poi/DocumentNode.java index 0654b01434..4f4585a7a3 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/excel/xls/poi/DocumentNode.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/excel/xls/poi/DocumentNode.java @@ -28,6 +28,7 @@ public class DocumentNode extends EntryNode implements DocumentEntry { super(property, parent); } + @Override public DocumentProperty getProperty() { return (DocumentProperty) super.getProperty(); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/excel/xls/poi/EntryNode.java b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/excel/xls/poi/EntryNode.java index b1d36eeef1..dbbe553d07 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/excel/xls/poi/EntryNode.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/excel/xls/poi/EntryNode.java @@ -74,6 +74,7 @@ protected boolean isRoot() { * * @return name */ + @Override public String getName() { return property.getName(); @@ -84,6 +85,7 @@ public String getName() * * @return true if the Entry is a DirectoryEntry, else false */ + @Override public boolean isDirectoryEntry() { return false; @@ -94,6 +96,7 @@ public boolean isDirectoryEntry() * * @return true if the Entry is a DocumentEntry, else false */ + @Override public boolean isDocumentEntry() { return false; @@ -105,6 +108,7 @@ public boolean isDocumentEntry() * * @return this Entry's parent; null iff this is the root Entry */ + @Override public DirectoryEntry getParent() { throw new IllegalStateException("Not Implemented"); } @@ -121,6 +125,7 @@ public DirectoryEntry getParent() { * * @return true if the Entry was successfully deleted, else false */ + @Override public boolean delete() { throw new IllegalStateException("Not Supported"); } @@ -140,6 +145,7 @@ public boolean delete() { * @return true if the operation succeeded, else false */ + @Override public boolean renameTo(final String newName) { throw new IllegalStateException("Not Supported"); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/excel/xls/properties/Property.java b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/excel/xls/properties/Property.java index 0a7a0c6b35..cebb6f6933 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/excel/xls/properties/Property.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/excel/xls/properties/Property.java @@ -64,8 +64,7 @@ protected Property(int index, byte [] array) if (name_length < 1) { _name = ""; - } - else + } else { char[] char_array = new char[ name_length ]; int name_offset = 0; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/excel/xls/properties/PropertyTable.java b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/excel/xls/properties/PropertyTable.java index 97bf03558e..5a4fe061b2 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/excel/xls/properties/PropertyTable.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/excel/xls/properties/PropertyTable.java @@ -72,13 +72,13 @@ public PropertyTable(final XlsInputStream is, HeaderBlock header, BlockStore blo } private void processSector(int sectorOffset) { - final int property_count = sectorSize / POIFSConstants.PROPERTY_SIZE; + final int propertyCount = sectorSize / POIFSConstants.PROPERTY_SIZE; byte[] bytes = new byte[POIFSConstants.PROPERTY_SIZE]; int index = properties.size(); is.seek(sectorOffset); - for (int k = 0; k < property_count; k++, index++) { + for (int k = 0; k < propertyCount; k++, index++) { try { is.read(bytes, 0, bytes.length); } catch (IOException e) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/json/JSONRecordReader.java b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/json/JSONRecordReader.java index d7f343d485..520f9a1efe 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/json/JSONRecordReader.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/json/JSONRecordReader.java @@ -28,6 +28,7 @@ import com.dremio.exec.ExecConstants; import com.dremio.exec.catalog.CatalogOptions; import com.dremio.exec.physical.config.ExtendedFormatOptions; +import com.dremio.exec.planner.physical.PlannerSettings; import com.dremio.exec.store.AbstractRecordReader; import com.dremio.exec.store.easy.json.JsonProcessor.ReadState; import com.dremio.exec.store.easy.json.reader.CountingJsonReader; @@ -179,7 +180,8 @@ public void setup(final OutputMutator output) throws ExecutionSetupException { final int maxLeafLimit = Math.toIntExact(this.context.getOptions().getOption(CatalogOptions.METADATA_LEAF_COLUMN_MAX)); this.jsonReader = new JsonReader( context.getManagedBuffer(), ImmutableList.copyOf(getColumns()), sizeLimit, maxLeafLimit, enableAllTextMode, true, readNumbersAsDouble, - schemaImposedMode, extendedFormatOptions, context, output.getContainer() != null && output.getContainer().hasSchema()? output.getContainer().getSchema() : null); + schemaImposedMode, extendedFormatOptions, context, output.getContainer() != null && output.getContainer().hasSchema()? output.getContainer().getSchema() : null, + context.getOptions().getOption(PlannerSettings.ENFORCE_VALID_JSON_DATE_FORMAT_ENABLED)); } setupParser(); } catch(final Exception e) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/json/reader/CountingJsonReader.java b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/json/reader/CountingJsonReader.java index 2bde9edef7..37f443ec46 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/json/reader/CountingJsonReader.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/json/reader/CountingJsonReader.java @@ -55,8 +55,10 @@ public ReadState write(BaseWriter.ComplexWriter writer) throws IOException { return ReadState.END_OF_STREAM; } throwIllegalStartException(); + break; default: throwIllegalStartException(); + break; } parser.skipChildren(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/text/TextFormatPlugin.java b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/text/TextFormatPlugin.java index c3fef687dd..ac08c957fa 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/text/TextFormatPlugin.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/text/TextFormatPlugin.java @@ -45,7 +45,6 @@ import com.dremio.io.file.FileSystem; import com.dremio.sabot.exec.context.OperatorContext; import com.dremio.sabot.exec.store.easy.proto.EasyProtobuf.EasyDatasetSplitXAttr; -import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.annotation.JsonProperty; @@ -110,10 +109,10 @@ public static class TextFormatConfig implements FormatPluginConfig { public List extensions = ImmutableList.of("txt"); public String lineDelimiter = "\n"; - public char fieldDelimiter = '\u0000'; - public char quote = '"'; - public char escape = '"'; - public char comment = '#'; + public String fieldDelimiter = "\u0000"; + public String quote = "\""; + public String escape = "\""; + public String comment = "#"; public boolean skipFirstLine = false; public boolean extractHeader = false; public boolean autoGenerateColumnNames = false; @@ -128,15 +127,15 @@ public List getExtensions() { return extensions; } - public char getQuote() { + public String getQuote() { return quote; } - public char getEscape() { + public String getEscape() { return escape; } - public char getComment() { + public String getComment() { return comment; } @@ -144,7 +143,7 @@ public String getLineDelimiter() { return lineDelimiter; } - public char getFieldDelimiter() { + public String getFieldDelimiter() { return fieldDelimiter; } @@ -157,14 +156,9 @@ public boolean isAutoGenerateColumnNames() { return autoGenerateColumnNames; } - @JsonIgnore - public String getFieldDelimiterAsString(){ - return new String(new char[]{fieldDelimiter}); - } - @Deprecated @JsonProperty("delimiter") - public void setFieldDelimiter(char delimiter){ + public void setFieldDelimiter(String delimiter){ this.fieldDelimiter = delimiter; } @@ -181,12 +175,12 @@ public boolean isTrimHeaderEnabled() { public int hashCode() { final int prime = 31; int result = 1; - result = prime * result + comment; - result = prime * result + escape; + result = prime * result + ((comment == null) ? 0 : comment.hashCode()); + result = prime * result + ((escape == null) ? 0 : escape.hashCode()); result = prime * result + ((extensions == null) ? 0 : extensions.hashCode()); - result = prime * result + fieldDelimiter; + result = prime * result + ((fieldDelimiter == null) ? 0 : fieldDelimiter.hashCode()); result = prime * result + ((lineDelimiter == null) ? 0 : lineDelimiter.hashCode()); - result = prime * result + quote; + result = prime * result + ((quote == null) ? 0 : quote.hashCode()); result = prime * result + (skipFirstLine ? 1231 : 1237); result = prime * result + (extractHeader? 1231 : 1237); result = prime * result + (autoGenerateColumnNames ? 1231 : 1237); @@ -207,10 +201,10 @@ public boolean equals(Object obj) { return false; } TextFormatConfig other = (TextFormatConfig) obj; - if (comment != other.comment) { + if (!Objects.equals(comment, other.comment)) { return false; } - if (escape != other.escape) { + if (!Objects.equals(escape, other.escape)) { return false; } if (extensions == null) { @@ -220,7 +214,7 @@ public boolean equals(Object obj) { } else if (!extensions.equals(other.extensions)) { return false; } - if (fieldDelimiter != other.fieldDelimiter) { + if (!Objects.equals(fieldDelimiter, other.fieldDelimiter)) { return false; } if (lineDelimiter == null) { @@ -230,7 +224,7 @@ public boolean equals(Object obj) { } else if (!lineDelimiter.equals(other.lineDelimiter)) { return false; } - if (quote != other.quote) { + if (!Objects.equals(quote, other.quote)) { return false; } if (skipFirstLine != other.skipFirstLine) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/text/compliant/FieldTypeOutput.java b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/text/compliant/FieldTypeOutput.java index a3b8fc906a..6aecc0de3f 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/text/compliant/FieldTypeOutput.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/text/compliant/FieldTypeOutput.java @@ -39,7 +39,7 @@ abstract class FieldTypeOutput extends TextOutput { // track chars within field protected int currentDataPointer = 0; // track if field is still getting appended - protected boolean fieldOpen = true; + protected boolean fieldOpen = false; // holds chars for a field protected final byte[] fieldBytes; @@ -96,7 +96,7 @@ public void append(byte data) { return; } - FieldSizeLimitExceptionHelper.checkSizeLimit(currentDataPointer, maxCellLimit, currentFieldIndex, logger); + FieldSizeLimitExceptionHelper.checkSizeLimit(currentDataPointer+1, maxCellLimit, currentFieldIndex, logger); fieldBytes[currentDataPointer++] = data; rowHasData =true; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/text/compliant/RepeatedVarCharOutput.java b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/text/compliant/RepeatedVarCharOutput.java index b8427d4bbc..36b87c5250 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/text/compliant/RepeatedVarCharOutput.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/text/compliant/RepeatedVarCharOutput.java @@ -107,7 +107,7 @@ public RepeatedVarCharOutput(OutputMutator outputMutator, Collection for (SchemaPath path : columns) { assert path.getRootSegment().isNamed() : "root segment should be named"; pathStr = path.getRootSegment().getPath(); - Preconditions.checkArgument(pathStr.equals(COL_NAME) || (pathStr.equals("*") && path.getRootSegment().getChild() == null), + Preconditions.checkArgument(pathStr.equals(COL_NAME) || ("*".equals(pathStr) && path.getRootSegment().getChild() == null), String.format("Selected column '%s' must have name 'columns' or must be plain '*'", pathStr)); if (path.getRootSegment().getChild() != null) { @@ -147,12 +147,11 @@ public void startBatch() { } private void expandTmpBufIfNecessary() { + FieldSizeLimitExceptionHelper.checkSizeLimit(charLengthOffset+1, maxCellLimit, fieldIndex, logger); if (charLengthOffset < tmpBuf.capacity()) { return; } - FieldSizeLimitExceptionHelper.checkSizeLimit(charLengthOffset, maxCellLimit, fieldIndex, logger); - byte[] tmp = new byte[LargeMemoryUtil.checkedCastToInt(tmpBuf.capacity())]; tmpBuf.getBytes(0, tmp); tmpBuf = tmpBuf.reallocIfNeeded(Math.min(tmpBuf.capacity() * 2, maxCellLimit + 1)); @@ -209,20 +208,18 @@ public boolean rowHasData() { @Override public void finishRecord() { - hasData = false; - - if(fieldOpen){ - endField(); + if (hasData) { + if(fieldOpen) { + endField(); + } + listWriter.endList(); + hasData = false; + } else { + listWriter.writeNull(); } - listWriter.endList(); - - // if there were no defined fields, skip. -// if(fieldIndex > -1){ - batchIndex++; - recordCount++; -// } - + batchIndex++; + recordCount++; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/text/compliant/SchemaImposedOutput.java b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/text/compliant/SchemaImposedOutput.java index 2b905e8cba..0e99d9feae 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/text/compliant/SchemaImposedOutput.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/text/compliant/SchemaImposedOutput.java @@ -16,6 +16,7 @@ package com.dremio.exec.store.easy.text.compliant; import static com.dremio.exec.store.easy.EasyFormatUtils.getValue; +import static com.dremio.exec.store.easy.EasyFormatUtils.isVarcharOptimizationPossible; import static com.dremio.exec.store.iceberg.IcebergUtils.writeToVector; import java.nio.charset.StandardCharsets; @@ -25,6 +26,7 @@ import java.util.Set; import java.util.stream.Collectors; +import org.apache.arrow.vector.VarCharVector; import org.apache.arrow.vector.types.pojo.Field; import com.dremio.common.exceptions.UserException; @@ -93,8 +95,20 @@ public SchemaImposedOutput(OperatorContext context, OutputMutator outputMutator, } @Override protected void writeValueInCurrentVector(int index, byte[] fieldBytes, int startIndex, int endIndex) { - String s = new String(fieldBytes, 0, currentDataPointer, StandardCharsets.UTF_8); - Object v = getValue(currentVector.getField(), s, extendedFormatOptions); - writeToVector(currentVector, recordCount, v); + if (isVarcharOptimizationPossible(extendedFormatOptions, currentVector.getField().getType())) { + // If we do not need to apply any string transformations and if our target field type is VARCHAR, + // then we can skip converting to String type and directly write to currentValueVector + if(currentDataPointer == 0 && extendedFormatOptions.getEmptyAsNull()) { + // We will enter this block when the input string is empty AND we are required to treat empty strings as null. + // Hence, write NULL to currentVector at position 'recordCount' + ((VarCharVector) currentVector).setNull(recordCount); + } else { + ((VarCharVector) currentVector).setSafe(recordCount, fieldBytes, 0, currentDataPointer); + } + } else { + String s = new String(fieldBytes, 0, currentDataPointer, StandardCharsets.UTF_8); + Object v = getValue(currentVector.getField(), s, extendedFormatOptions); + writeToVector(currentVector, recordCount, v); + } } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/text/compliant/TextInput.java b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/text/compliant/TextInput.java index 6ba9754956..7f82e1da1e 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/text/compliant/TextInput.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/text/compliant/TextInput.java @@ -132,7 +132,7 @@ public TextInput(TextParsingSettings settings, FSInputStream input, ArrowBuf rea * splitFirstLine is enabled, input will move to appropriate complete line. * @throws IOException */ - final void start() throws IOException { + final boolean start() throws IOException { lineCount = 0; if(startPos > 0) { input.setPosition(startPos); @@ -143,17 +143,11 @@ final void start() throws IOException { skipOptionalBOM(); } if (length > 0) { - if(startPos > 0 || settings.isSkipFirstLine()){ - - // move to next full record. - try { - skipLines(1); - } catch (StreamFinishedPseudoException sfpe) { - // just stop parsing - as end of the input reached - throw new IllegalArgumentException("Only one data line detected. Please consider changing line delimiter."); - } + if(startPos > 0 ){ + return true; } } + return false; } @@ -241,28 +235,18 @@ private void updateLengthBasedOnConstraint() { } } - /** - * Get next byte from stream. Also maintains the current line count. Will throw a StreamFinishedPseudoException - * when the stream has run out of bytes. - * @return next byte from stream. - * @throws IOException - */ - public final byte nextChar() throws IOException { - byte byteChar = nextCharNoNewLineCheck(); - int bufferPtrTemp = bufferPtr - 1; - if (byteChar == lineSeparator[0]) { - for (int i = 1; i < lineSeparator.length; i++, bufferPtrTemp++) { - if (lineSeparator[i] != buffer.getByte(bufferPtrTemp)) { - return byteChar; + boolean match(byte byteChar, byte[] parameter) throws IOException { + if (byteChar == parameter[0]) { + int bufferPtrTemp = bufferPtr - 1; + for (int i = 1; i < parameter.length; i++, bufferPtrTemp++) { + if (parameter[i] != buffer.getByte(bufferPtrTemp)) { + return false; } } - lineCount++; - byteChar = normalizedLineSeparator; - - // we don't need to update buffer position if line separator is one byte long - if (lineSeparator.length > 1) { - bufferPtr += (lineSeparator.length - 1); + // we don't need to update buffer position if parameter is one byte long + if (parameter.length > 1) { + bufferPtr += (parameter.length - 1); if (bufferPtr > length) { if (length != -1) { updateBuffer(); @@ -271,51 +255,34 @@ public final byte nextChar() throws IOException { } } } + return true; } - - return byteChar; + return false; } /** - * Get next byte from stream. newLine means a new line. - * Also maintains the current line count. Will throw a StreamFinishedPseudoException + * Get next byte from stream. Also maintains the current line count. Will throw a StreamFinishedPseudoException * when the stream has run out of bytes. - * @param newLine the char that means a new line * @return next byte from stream. - * @throws IOException + * @throws IOException Reached End of Input */ - public final byte nextChar(byte newLine) throws IOException { + public byte[] nextChar() throws IOException { + byte[] byteNType = new byte[2]; byte byteChar = nextCharNoNewLineCheck(); - int bufferPtrTemp = bufferPtr - 1; - if (byteChar == lineSeparator[0]) { - for (int i = 1; i < lineSeparator.length; i++, bufferPtrTemp++) { - if (lineSeparator[i] != buffer.getByte(bufferPtrTemp)) { - return byteChar; - } - } - // a new line - + byteNType[1] = byteChar; + byte[] fieldSeparator = settings.getDelimiter(); + if (match(byteChar, lineSeparator)) { + byteNType[0] = 1; lineCount++; - byteChar = normalizedLineSeparator; - - // we don't need to update buffer position if line separator is one byte long - if (lineSeparator.length > 1) { - bufferPtr += (lineSeparator.length - 1); - if (bufferPtr > length) { - if (length != -1) { - updateBuffer(); - } else { - throw StreamFinishedPseudoException.INSTANCE; - } - } - } - } else if (byteChar == newLine) { - // a new line + byteNType[1] = -1; + } else if (byteChar == normalizedLineSeparator) { lineCount++; - byteChar = normalizedLineSeparator; + byteNType[0] = 1; + } else if (match(byteChar, fieldSeparator)) { + byteNType[0] = 2; } - return byteChar; + return byteNType; } /** @@ -382,31 +349,6 @@ public final void skipLines(int lines) throws IOException { } } - /** - * Skip forward the number of line delimiters. newLine means a new line. - * If you are in the middle of a line, a value of 1 will skip to the start of the next record. - * @param newLine the char that means a new line - * @param lines Number of lines to skip. - * @throws IOException - */ - public final void skipLines(int lines, byte newLine) throws IOException { - if (lines < 1) { - return; - } - long expectedLineCount = this.lineCount + lines; - - try { - do { - nextChar(newLine); - } while (lineCount < expectedLineCount /*&& bufferPtr < READ_CHARS_LIMIT*/); - if (lineCount < lines) { - throw new IllegalArgumentException("Unable to skip " + lines + " lines from line " + (expectedLineCount - lines) + ". End of input reached"); - } - } catch (EOFException ex) { - throw new IllegalArgumentException("Unable to skip " + lines + " lines from line " + (expectedLineCount - lines) + ". End of input reached"); - } - } - // Check if the input stream has a specific byte-order-mark (BOM) private final boolean checkBom(ByteOrderMark bom) { int bomLength = bom.length(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/text/compliant/TextParsingSettings.java b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/text/compliant/TextParsingSettings.java index 44a1817972..1794c7bfa8 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/text/compliant/TextParsingSettings.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/text/compliant/TextParsingSettings.java @@ -17,6 +17,8 @@ import static java.nio.charset.StandardCharsets.UTF_8; +import java.util.Arrays; + import com.dremio.exec.store.easy.text.TextFormatPlugin.TextFormatConfig; import com.univocity.parsers.common.TextParsingException; @@ -27,17 +29,16 @@ public class TextParsingSettings { private String emptyValue = null; private boolean parseUnescapedQuotes = true; - private byte quote = b('"'); - private byte quoteEscape = b('"'); - private byte delimiter = b(','); - private byte comment = b('#'); + private byte[] quote = {b('"')}; + private byte[] quoteEscape = {b('"')}; + private byte[] delimiter = {b(',')}; + private byte[] comment = {b('#')}; private long maxCharsPerColumn = Character.MAX_VALUE; private byte normalizedNewLine = b('\n'); private byte[] newLineDelimiter = {normalizedNewLine}; private boolean ignoreLeadingWhitespaces = false; private boolean ignoreTrailingWhitespaces = false; - private String lineSeparatorString = "\n"; private boolean skipFirstLine = false; private boolean autoGenerateColumnNames = false; private boolean trimHeader = false; @@ -47,11 +48,11 @@ public class TextParsingSettings { private int numberOfRecordsToRead = -1; public void set(TextFormatConfig config){ - this.quote = bSafe(config.getQuote(), "quote"); - this.quoteEscape = bSafe(config.getEscape(), "escape"); + this.quote = config.getQuote().getBytes(UTF_8); + this.quoteEscape = config.getEscape().getBytes(UTF_8); this.newLineDelimiter = config.getLineDelimiter().getBytes(UTF_8); - this.delimiter = bSafe(config.getFieldDelimiter(), "fieldDelimiter"); - this.comment = bSafe(config.getComment(), "comment"); + this.delimiter = config.getFieldDelimiter().getBytes(UTF_8); + this.comment = config.getComment().getBytes(UTF_8); this.skipFirstLine = config.isSkipFirstLine(); this.headerExtractionEnabled = config.isHeaderExtractionEnabled(); this.autoGenerateColumnNames = config.isAutoGenerateColumnNames(); @@ -63,7 +64,7 @@ public void set(TextFormatConfig config){ } } - public byte getComment(){ + public byte[] getComment(){ return comment; } @@ -87,15 +88,6 @@ public void setUseRepeatedVarChar(boolean useRepeatedVarChar) { this.useRepeatedVarChar = useRepeatedVarChar; } - - private static byte bSafe(char c, String name){ - if(c > Byte.MAX_VALUE) { - throw new IllegalArgumentException(String.format("Failure validating configuration option %s. Expected a " - + "character between 0 and 127 but value was actually %d.", name, (int) c)); - } - return (byte) c; - } - private static byte b(char c){ return (byte) c; } @@ -105,83 +97,78 @@ public byte[] getNewLineDelimiter() { } /** - * Returns the character used for escaping values where the field delimiter is part of the value. Defaults to '"' - * @return the quote character + * Returns the string used for escaping values where the field delimiter is part of the value. Defaults to '"' + * @return the quote string */ - public byte getQuote() { + public byte[] getQuote() { return quote; } /** - * Defines the character used for escaping values where the field delimiter is part of the value. Defaults to '"' - * @param quote the quote character + * Defines the string used for escaping values where the field delimiter is part of the value. Defaults to '"' + * @param quote the quote string */ - public void setQuote(byte quote) { + public void setQuote(byte[] quote) { this.quote = quote; } - public String getLineSeparatorString(){ - return lineSeparatorString; - } - - /** - * Identifies whether or not a given character is used for escaping values where the field delimiter is part of the value - * @param ch the character to be verified - * @return true if the given character is the character used for escaping values, false otherwise + * Identifies whether or not a given String is used for escaping values where the field delimiter is part of the value + * @param str the string to be verified + * @return true if the given string is the string used for escaping values, false otherwise */ - public boolean isQuote(byte ch) { - return this.quote == ch; + public boolean isQuote(byte[] str) { + return Arrays.equals(this.quote, str); } /** - * Returns the character used for escaping quotes inside an already quoted value. Defaults to '"' - * @return the quote escape character + * Returns the string used for escaping quotes inside an already quoted value. Defaults to '"' + * @return the quote escape string */ - public byte getQuoteEscape() { + public byte[] getQuoteEscape() { return quoteEscape; } /** - * Defines the character used for escaping quotes inside an already quoted value. Defaults to '"' - * @param quoteEscape the quote escape character + * Defines the string used for escaping quotes inside an already quoted value. Defaults to '"' + * @param quoteEscape the quote escape string */ - public void setQuoteEscape(byte quoteEscape) { + public void setQuoteEscape(byte[] quoteEscape) { this.quoteEscape = quoteEscape; } /** - * Identifies whether or not a given character is used for escaping quotes inside an already quoted value. - * @param ch the character to be verified - * @return true if the given character is the quote escape character, false otherwise + * Identifies whether or not a given String is used for escaping quotes inside an already quoted value. + * @param str the String to be verified + * @return true if the given String is the quote escape String, false otherwise */ - public boolean isQuoteEscape(byte ch) { - return this.quoteEscape == ch; + public boolean isQuoteEscape(byte[] str) { + return Arrays.equals(this.quoteEscape, str); } /** - * Returns the field delimiter character. Defaults to ',' - * @return the field delimiter character + * Returns the field delimiter string. Defaults to ',' + * @return the field delimiter string */ - public byte getDelimiter() { + public byte[] getDelimiter() { return delimiter; } /** - * Defines the field delimiter character. Defaults to ',' - * @param delimiter the field delimiter character + * Defines the field delimiter string. Defaults to ',' + * @param delimiter the field delimiter string */ - public void setDelimiter(byte delimiter) { + public void setDelimiter(byte[] delimiter) { this.delimiter = delimiter; } /** - * Identifies whether or not a given character represents a field delimiter - * @param ch the character to be verified - * @return true if the given character is the field delimiter character, false otherwise + * Identifies whether or not a given string represents a field delimiter + * @param str the string to be verified + * @return true if the given string is the field delimiter string, false otherwise */ - public boolean isDelimiter(byte ch) { - return this.delimiter == ch; + public boolean isDelimiter(byte[] str) { + return Arrays.equals(this.delimiter, str); } /** @@ -264,7 +251,7 @@ public void setMaxCharsPerColumn(long maxCharsPerColumn) { this.maxCharsPerColumn = maxCharsPerColumn; } - public void setComment(byte comment) { + public void setComment(byte[] comment) { this.comment = comment; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/text/compliant/TextReader.java b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/text/compliant/TextReader.java index f70dbbdfb1..2708c433bb 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/easy/text/compliant/TextReader.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/easy/text/compliant/TextReader.java @@ -16,6 +16,7 @@ package com.dremio.exec.store.easy.text.compliant; import java.io.IOException; +import java.util.Arrays; import org.apache.arrow.memory.ArrowBuf; @@ -36,43 +37,107 @@ final class TextReader implements AutoCloseable { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TextReader.class); - private static final byte NULL_BYTE = (byte) '\0'; - private final TextParsingContext context; private final long recordsToRead; private final TextParsingSettings settings; private final TextInput input; - private final TextOutput output; + private final ArrowBuf workBuf; + // Records count i.e, comments are excluded + private long recordCount = 0; + private byte ch; - // index of the field within this record - private int fieldIndex; + /** + * 0 -> ch is general byte + * 1 -> line delimiter or normalized newLine is detected starting with ch + * 2 -> field delimiter is detected starting with ch + * */ + private byte chType; + + private boolean chIsLineDelimiter() { + return (chType == 1); + } + + private boolean chIsFieldDelimiter() { + return (chType == 2); + } + + private boolean chIsDelimiter() { + return (chType == 1 || chType == 2); + } + + /** + * Wrapper class to encapsulate the TextOutput to improve readability and + * simplify the testing needed by the calling code + * (i.e. eliminate repeated testing canAppend ) + */ + static class OutputWrapper { + /** 'canAppend' controls appending parsed content to output */ + private boolean canAppend = true; + private final TextOutput output; + + public OutputWrapper(TextOutput output) { this.output = output; } + public TextOutput Output() { return this.output; } + + public boolean canAppend() { return canAppend; } + public void setCanAppend(boolean append) { canAppend = append; } + + public void startBatch() { output.startBatch(); } + public void finishBatch() { output.finishBatch(); } + + public void startField(int n) { + if (canAppend) { output.startField(n); } + } + public void endField() { + if(canAppend) { canAppend = output.endField(); } + } + public void endEmptyField() { + if(canAppend) { canAppend = output.endEmptyField(); } + } + + public boolean rowHasData(){ return output.rowHasData(); } + public void finishRecord() { output.finishRecord(); } + + public void setFieldCurrentDataPointer(int cur) { + if(canAppend) { output.setFieldCurrentDataPointer(cur);} + } + public int getFieldCurrentDataPointer() { return output.getFieldCurrentDataPointer() ; } + + public void append(byte parameter) { + if(canAppend){ output.append(parameter); } + } + public void append(byte[] parameter) { + if(canAppend){ + for (byte pByte : parameter) { + output.append(pByte); + } + } + } + public void appendIgnoringWhitespace(byte cur) { + if(canAppend) { output.appendIgnoringWhitespace(cur); } + } + } + + private final OutputWrapper output; /** Behavior settings **/ private final boolean ignoreTrailingWhitespace; private final boolean ignoreLeadingWhitespace; private final boolean parseUnescapedQuotes; - /** - * Input line delimiter differs with normalized line delimiter in two cases: - * - It is multi-byte - * - It is single byte but not same as normalized one - * Is there a third case? What if it is multi-byte but the first byte is same as - * normalized one? - * This flag tells whether input line delimiter is same as normalized line delimiter. - */ - private final boolean isNormalLineDelimiter; + /** Temp buffer to save white spaces conditionally while parsing */ + private NettyArrowBuf tempWhiteSpaceBuff; - /** Key Characters **/ - private final byte comment; - private final byte delimiter; - private final byte quote; - private final byte quoteEscape; - private final byte newLine; + /** Key Parameters **/ + private final byte[] comment; + private final byte[] fieldDelimiter; + private final byte[] quote; + private final byte[] quoteEscape; + final byte[] lineDelimiter; private String filePath; private boolean schemaImposedMode; @@ -95,18 +160,14 @@ public TextReader(TextParsingSettings settings, TextInput input, TextOutput outp this.ignoreTrailingWhitespace = settings.isIgnoreTrailingWhitespaces(); this.ignoreLeadingWhitespace = settings.isIgnoreLeadingWhitespaces(); this.parseUnescapedQuotes = settings.isParseUnescapedQuotes(); - this.delimiter = settings.getDelimiter(); + this.fieldDelimiter = settings.getDelimiter(); this.quote = settings.getQuote(); this.quoteEscape = settings.getQuoteEscape(); - this.newLine = settings.getNormalizedNewLine(); this.comment = settings.getComment(); this.schemaImposedMode = false; - + this.lineDelimiter = settings.getNewLineDelimiter(); this.input = input; - this.output = output; - - final byte[] newLineDelimiter = settings.getNewLineDelimiter(); - isNormalLineDelimiter = newLineDelimiter.length == 1 && newLineDelimiter[0] == settings.getNormalizedNewLine(); + this.output = new OutputWrapper(output); } public TextReader(TextParsingSettings settings, TextInput input, TextOutput output, ArrowBuf workBuf, String filePath, boolean schemaImposedMode) { @@ -116,14 +177,14 @@ public TextReader(TextParsingSettings settings, TextInput input, TextOutput outp } public TextOutput getOutput(){ - return output; + return this.output.Output(); } /* Check if the given byte is a white space. As per the univocity text reader - * any ASCII <= ' ' is considered a white space. However since byte in JAVA is signed - * we have an additional check to make sure its not negative + * any ASCII <= ' ' is considered a white space. However, since byte in JAVA is signed + * we have an additional check to make sure it's not negative */ - static final boolean isWhite(byte b){ + static boolean isWhite(byte b){ return b <= ' ' && b > -1; } @@ -141,298 +202,289 @@ public long getPos(){ * fields to parseField() function. * We mark the start of the record and if there are any failures encountered (OOM for eg) * then we reset the input stream to the marked position - * @return true if parsing this record was successful; false otherwise - * @throws IOException + * @throws IOException if End of Input stream is reached */ - private boolean parseRecord() throws IOException { - final byte newLine = this.newLine; - final TextInput input = this.input; + private void parseRecord() throws IOException { + // index of the field within this record + int fieldIndex = 0; + - fieldIndex = 0; if (isWhite(ch) && ignoreLeadingWhitespace) { - skipWhitespace(); + parseWhiteSpaces(true); } - int fieldsWritten = 0; try{ - boolean earlyTerm = false; - while (ch != newLine) { - earlyTerm = !parseField(); - fieldsWritten++; - if (ch != newLine) { - ch = input.nextChar(); - if (ch == newLine) { - output.startField(fieldsWritten++); + while ( !chIsLineDelimiter() ) { + parseField(fieldIndex); + fieldIndex++; + if ( !chIsLineDelimiter() ) { + parseNextChar(); + if ( chIsLineDelimiter() ) { + output.startField(fieldIndex); output.endEmptyField(); - break; - } - } - if(earlyTerm){ - if(ch != newLine){ - input.skipLines(1, newLine); } - break; } } - }catch(StreamFinishedPseudoException e){ + // re-enable the output for the next record + output.setCanAppend(true); + recordCount++; + + } catch(StreamFinishedPseudoException e){ // if we've written part of a field or all of a field, we should send this row. - if(fieldsWritten == 0 && !output.rowHasData()){ + if(fieldIndex == 0 && !output.rowHasData()){ throw e; } } output.finishRecord(); - return true; + } + + private void parseNextChar() throws IOException { + byte[] byteNtype = input.nextChar(); + ch = byteNtype[1]; + chType = byteNtype[0]; } /** - * Function parses an individual field and ignores any white spaces encountered + * Function parses an individual field and skips Whitespaces if @ignoreTrailingWhitespace is true * by not appending it to the output vector - * @throws IOException + * @throws IOException if End of Input stream is reached */ - private void parseValueIgnore() throws IOException { - final byte newLine = this.newLine; - final byte delimiter = this.delimiter; - final TextOutput output = this.output; - final TextInput input = this.input; - - byte ch = this.ch; - while (ch != delimiter && ch != newLine) { - output.appendIgnoringWhitespace(ch); -// fieldSize++; - ch = input.nextChar(); - } - this.ch = ch; - } - - private void parseValueAndHandleTrailingWhitespaces() throws IOException { - final byte newLine = this.newLine; - final byte delimiter = this.delimiter; - final TextOutput output = this.output; - final TextInput input = this.input; + private void parseValue() throws IOException { int continuousSpace = 0; - - byte ch = this.ch; try { - while (ch != delimiter && ch != newLine) { - if (isWhite(ch)) { - continuousSpace++; + while (!chIsDelimiter()) { + if (ignoreTrailingWhitespace) { + if (schemaImposedMode) { + if (isWhite(ch)) { + continuousSpace++; + } else { + continuousSpace = 0; + } + output.append(ch); + } else { + output.appendIgnoringWhitespace(ch); + } } else { - continuousSpace = 0; + output.append(ch); } - output.append(ch); - ch = input.nextChar(); + parseNextChar(); } } finally { - output.setFieldCurrentDataPointer(output.getFieldCurrentDataPointer() - continuousSpace); // in case input.nextChar() fails with some exception or even StreamFinishedPseudoException, we still want currentDataPointer to be set properly before exit. - } - this.ch = ch; - } - - /** - * Function parses an individual field and appends all characters till the delimeter (or newline) - * to the output, including white spaces - * @throws IOException - */ - private void parseValueAll() throws IOException { - final byte newLine = this.newLine; - final byte delimiter = this.delimiter; - final TextOutput output = this.output; - final TextInput input = this.input; - - byte ch = this.ch; - while (ch != delimiter && ch != newLine) { - output.append(ch); - ch = input.nextChar(); - } - this.ch = ch; - } - - /** - * Function simply delegates the parsing of a single field to the actual implementation based on parsing config - * @throws IOException - */ - private void parseValue() throws IOException { - if (ignoreTrailingWhitespace) { - if (schemaImposedMode) { - parseValueAndHandleTrailingWhitespaces(); - } else { - parseValueIgnore(); + // in case parseNextChar fails with some exception or even StreamFinishedPseudoException + // we still want currentDataPointer to be set properly before exit. + if(continuousSpace > 0){ + output.setFieldCurrentDataPointer(output.getFieldCurrentDataPointer() - continuousSpace); } - }else{ - parseValueAll(); } } /** - * Recursive function invoked when a quote is encountered. Function also - * handles the case when there are non-white space characters in the field - * after the quoted value. - * @param prev previous byte read - * @throws IOException + * Function invoked when a quote is encountered. Function also + * handles the unescaped quotes conditionally. + * @throws IOException if End of Input stream is reached */ - private void parseQuotedValue(byte prev) throws IOException { - final byte newLine = this.newLine; - final byte delimiter = this.delimiter; - final TextOutput output = this.output; - final TextInput input = this.input; - final byte quote = this.quote; - - ch = input.nextCharNoNewLineCheck(); - - while (!(prev == quote && (ch == delimiter || ch == newLine || isWhite(ch)))) { - if (ch != quote) { - if (prev == quote) { // unescaped quote detected - if (parseUnescapedQuotes) { - output.append(quote); - if (ch != quoteEscape) { - output.append(ch); + private void parseQuotedValue() throws IOException { + boolean isPrevQuoteEscape = false; + boolean isPrevQuote = false; + boolean quoteNescapeSame = Arrays.equals(quote, quoteEscape); + boolean isQuoteMatched; + while (true) { + if (isPrevQuote) { // encountered quote previously + if ( chIsDelimiter() ) { // encountered delimiter (line or field) + break; + } + isQuoteMatched = input.match(ch, quote); + if (quoteNescapeSame) { // quote and escape are same + if (!isQuoteMatched) { + if (isEndOfQuotedField()) { + break; } - parseQuotedValue(ch); - break; } else { - throw new TextParsingException( - context, - "Unescaped quote character '" - + quote - + "' inside quoted value of CSV field. To allow unescaped quotes, set 'parseUnescapedQuotes' to 'true' in the CSV parser settings. Cannot parse CSV input."); + output.append(quote); + parseNextChar(); + } + } else { + if (isQuoteMatched){ + // previous was a quote, ch is a quote + // and since "" is equivalent to \" in SQL, treat previous as escaped quote + isPrevQuoteEscape = true; + } else if (isEndOfQuotedField()) { + break; } } - if (ch != quoteEscape) { - output.append(ch); - } - prev = ch; - } else if (prev == quoteEscape) { - output.append(quote); - prev = NULL_BYTE; - } else { - prev = ch; + isPrevQuote = false; } - ch = input.nextCharNoNewLineCheck(); - } - - // Handles whitespaces after quoted value: - // Whitespaces are ignored (i.e., ch <= ' ') if they are not used as delimiters (i.e., ch != ' ') - // For example, in tab-separated files (TSV files), '\t' is used as delimiter and should not be ignored - // Content after whitespaces may be parsed if 'parseUnescapedQuotes' is enabled. - if (ch != newLine && ch <= ' ' && ch != delimiter) { - final NettyArrowBuf workBuf = NettyArrowBuf.unwrapBuffer(this.workBuf); - workBuf.resetWriterIndex(); - do { - // saves whitespaces after value - workBuf.writeByte(ch); - ch = input.nextChar(); - // found a new line, go to next record. - if (ch == newLine) { - return; + if ( chIsLineDelimiter() ) { + if (isPrevQuoteEscape) { + output.append(quoteEscape); } - } while (ch <= ' ' && ch != delimiter); - - // there's more stuff after the quoted value, not only empty spaces. - if (!(ch == delimiter || ch == newLine) && parseUnescapedQuotes) { - output.append(quote); - for(int i =0; i < workBuf.writerIndex(); i++){ - output.append(workBuf.getByte(i)); + if (ch==-1) { + output.append(lineDelimiter); + } else { + output.append(ch); } - // the next character is not the escape character, put it there - if (ch != quoteEscape) { + isPrevQuoteEscape = false; + parseNextChar(); + continue; + } else if ( chIsFieldDelimiter() ) { + if (isPrevQuoteEscape) { + output.append(quoteEscape); + } + output.append(fieldDelimiter); + isPrevQuoteEscape = false; + parseNextChar(); + continue; + } + isQuoteMatched = input.match(ch, quote); + if (!isQuoteMatched) { + if (!quoteNescapeSame) { + if (isPrevQuoteEscape) { + output.append(quoteEscape); + } + if (input.match(ch, quoteEscape)) { + isPrevQuoteEscape = true; + } else { + isPrevQuoteEscape = false; + output.append(ch); + } + } else { output.append(ch); } - // sets this character as the previous character (may be escaping) - // calls recursively to keep parsing potentially quoted content - parseQuotedValue(ch); + } else { + if (!quoteNescapeSame) { + if (!isPrevQuoteEscape) { + isPrevQuote = true; + } else { + output.append(quote); + } + isPrevQuoteEscape = false; + } else { + isPrevQuote = true; + } } + parseNextChar(); } + } - if (!(ch == delimiter || ch == newLine)) { - throw new TextParsingException(context, "Unexpected character '" + ch - + "' following quoted value of CSV field. Expecting '" + delimiter + "'. Cannot parse CSV input."); + private boolean isEndOfQuotedField() throws IOException { + boolean savedWhitespaces = false; + if (isWhite(ch)) { + // Handles whitespaces after quoted value: + // Whitespaces are ignored (i.e., ch <= ' ') if they are not used as delimiters (i.e., ch != ' ') + // For example, in tab-separated files (TSV files), '\t' is used as delimiter and should not be ignored + savedWhitespaces = true; + parseWhiteSpaces(false); + if ( chIsDelimiter() ) { + return true; + } + } + if (!parseUnescapedQuotes) { + throw new TextParsingException( + context, + String.format("Unescaped quote '%s' inside quoted value of CSV field. To allow unescaped quotes, set 'parseUnescapedQuotes' to 'true' in the CSV parser settings. Cannot parse CSV input.", Arrays.toString(quote))); } + output.append(quote); + if (savedWhitespaces) { + for (int i = 0; i < tempWhiteSpaceBuff.writerIndex(); i++) { + output.append(tempWhiteSpaceBuff.getByte(i)); + } + } + return false; } /** - * Captures the entirety of parsing a single field and based on the input delegates to the appropriate function - * @return - * @throws IOException + * Captures the entirety of parsing a single field + * @throws IOException if End of Input stream is reached */ - private final boolean parseField() throws IOException { + private void parseField(int fieldIndex) throws IOException { - output.startField(fieldIndex++); + output.startField(fieldIndex); if (isWhite(ch) && ignoreLeadingWhitespace) { - skipWhitespace(); + parseWhiteSpaces(true); } - if (ch == delimiter) { - return output.endEmptyField(); + if ( chIsDelimiter() ) { + output.endEmptyField(); } else { - if (ch == quote) { - parseQuotedValue(NULL_BYTE); + if (input.match(ch, quote)) { + parseNextChar(); + parseQuotedValue(); } else { parseValue(); } - return output.endField(); + output.endField(); } - } /** - * Helper function to skip white spaces occurring at the current input stream. - * @throws IOException + * Helper function to skip white spaces occurring at the current input stream and save them to buffer conditionally. + * @throws IOException if End of Input stream is reached */ - private void skipWhitespace() throws IOException { - final byte delimiter = this.delimiter; - final byte newLine = this.newLine; - final TextInput input = this.input; + private void parseWhiteSpaces(boolean ignoreWhitespaces) throws IOException { - while (isWhite(ch) && ch != delimiter && ch != newLine) { - ch = input.nextChar(); + // don't create buffers if code will not be able to output the cached bytes + boolean bufferOn = output.canAppend(); + + if (!chIsDelimiter()) { + if(bufferOn) { + tempWhiteSpaceBuff = NettyArrowBuf.unwrapBuffer(this.workBuf); + tempWhiteSpaceBuff.resetWriterIndex(); + } + while (!chIsDelimiter() && isWhite(ch)) { + if (!ignoreWhitespaces && bufferOn) { + tempWhiteSpaceBuff.writeByte(ch); + } + parseNextChar(); + } } } /** * Starting point for the reader. Sets up the input interface. - * @throws IOException + * @throws IOException if the record count is zero */ - public final void start() throws IOException { + public void start() throws IOException { context.stopped = false; - input.start(); + if (input.start() || settings.isSkipFirstLine()) { + // block output + output.setCanAppend(false); + parseNext(); + if (recordCount == 0) { + // end of file most likely + throw new IllegalArgumentException("Only one data line detected. Please consider changing line delimiter."); + } + } } /** - * Parses the next record from the input. Will skip the line if its a comment, + * Parses the next record from the input. Will skip the line if it is a comment, * this is required when the file contains headers - * @throws IOException + * @throws IOException will rethrow some exceptions */ - public final boolean parseNext() throws IOException { + public boolean parseNext() throws IOException { try { while (!context.stopped) { - ch = input.nextChar(); - if (ch == comment) { + parseNextChar(); + if (chIsLineDelimiter()) { // empty line + break; + } else if (chIsFieldDelimiter()) { + break; + } else if (input.match(ch, comment)) { input.skipLines(1); continue; } - if ((ch == newLine) && !isNormalLineDelimiter) { - continue; - } break; } - final long initialLineNumber = input.lineCount(); - boolean success = parseRecord(); - if (initialLineNumber + 1 < input.lineCount()) { - throw new TextParsingException(context, "Cannot use newline character within quoted string"); - } + parseRecord(); - if(success){ - if (recordsToRead > 0 && context.currentRecord() >= recordsToRead) { - context.stop(); - } - return true; - }else{ - return false; + if (recordsToRead > 0 && context.currentRecord() >= recordsToRead) { + context.stop(); } + return true; } catch (StreamFinishedPseudoException ex) { stopParsing(); @@ -470,8 +522,8 @@ private String displayLineSeparators(String str, boolean addNewLine) { * Helper method to handle exceptions caught while processing text files and generate better error messages associated with * the exception. * @param ex Exception raised - * @return - * @throws IOException + * @return Exception replacement + * @throws IOException Selectively augments exception error messages and rethrows */ private TextParsingException handleException(Exception ex) throws IOException { @@ -501,7 +553,7 @@ private TextParsingException handleException(Exception ex) throws IOException { if (tmp.contains("\n") || tmp.contains("\r")) { tmp = displayLineSeparators(tmp, true); - String lineSeparator = displayLineSeparators(settings.getLineSeparatorString(), false); + String lineSeparator = displayLineSeparators(Arrays.toString(settings.getNewLineDelimiter()), false); message += "\nIdentified line separator characters in the parsed content. This may be the cause of the error. The line separator in your parser settings is set to '" + lineSeparator + "'. Parsed content:\n\t" + tmp; } @@ -539,14 +591,15 @@ private TextParsingException handleException(Exception ex) throws IOException { */ public void finishBatch(){ output.finishBatch(); -// System.out.println(String.format("line %d, cnt %d", input.getLineCount(), output.getRecordCount())); + // System.out.println(String.format("line %d, cnt %d", input.getLineCount(), output.getRecordCount())); } /** - * Invoked once there are no more records and we are done with the + * Invoked once there are no more records, and we are done with the * current record reader to clean up state. - * @throws IOException + * @throws IOException nested exception */ + @Override public void close() throws IOException{ input.close(); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/BaseIcebergExecutionDatasetAccessor.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/BaseIcebergExecutionDatasetAccessor.java index 4c74952d8a..2d85fcad5a 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/BaseIcebergExecutionDatasetAccessor.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/BaseIcebergExecutionDatasetAccessor.java @@ -47,6 +47,7 @@ import com.dremio.exec.ExecConstants; import com.dremio.exec.catalog.FileConfigMetadata; import com.dremio.exec.catalog.MutablePlugin; +import com.dremio.exec.planner.common.ImmutableDremioFileAttrs; import com.dremio.exec.planner.cost.ScanCostFactor; import com.dremio.exec.record.BatchSchema; import com.dremio.exec.store.PartitionChunkListingImpl; @@ -123,6 +124,7 @@ public DatasetMetadata getDatasetMetadata(GetMetadataOption... options) { Long.parseLong(snapshot.summary().getOrDefault("total-equality-deletes", "0")) : 0L; long numDeleteFiles = snapshot != null ? Long.parseLong(snapshot.summary().getOrDefault("total-delete-files", "0")) : 0L; + long lastModTime = snapshot != null ? snapshot.timestampMillis() : 0L; if (numDeleteFiles > 0 && !optionResolver.getOption(ExecConstants.ENABLE_ICEBERG_MERGE_ON_READ_SCAN)) { throw UserException.unsupportedError() @@ -142,6 +144,7 @@ public DatasetMetadata getDatasetMetadata(GetMetadataOption... options) { final DatasetStats manifestStats = DatasetStats.of(numDataFiles, ScanCostFactor.EASY.getFactor()); final DatasetStats deleteStats = DatasetStats.of(numPositionDeletes + numEqualityDeletes, ScanCostFactor.PARQUET.getFactor()); + final DatasetStats equalityDeleteStats = DatasetStats.of(numEqualityDeletes, ScanCostFactor.PARQUET.getFactor()); final DatasetStats deleteManifestStats = DatasetStats.of(numDeleteFiles, ScanCostFactor.EASY.getFactor()); final SchemaConverter schemaConverter = SchemaConverter.getBuilder().setTableName(table.name()).setMapTypeEnabled(optionResolver.getOption(ExecConstants.ENABLE_MAP_DATA_TYPE)).build(); @@ -161,12 +164,13 @@ public DatasetMetadata getDatasetMetadata(GetMetadataOption... options) { )); if (snapshot != null && !table.spec().isUnpartitioned()) { - String partitionStatsFile = IcebergUtils.getPartitionStatsFile( + ImmutableDremioFileAttrs partitionStatsFileAttrs = IcebergUtils.getPartitionStatsFileAttrs( getMetadataLocation(), snapshot.snapshotId(), - new Configuration(configuration), plugin); - if (partitionStatsFile != null) { - icebergDatasetBuilder.setPartitionStatsFile(partitionStatsFile); + table.io()); + if (partitionStatsFileAttrs.fileName() != null) { + icebergDatasetBuilder.setPartitionStatsFile(partitionStatsFileAttrs.fileName()); + icebergDatasetBuilder.setPartitionStatsFileSize(partitionStatsFileAttrs.fileLength()); } } final BytesOutput extraInfo = icebergDatasetBuilder.build()::writeTo; @@ -180,8 +184,8 @@ public DatasetMetadata getDatasetMetadata(GetMetadataOption... options) { final String metadataFileLocation = getMetadataLocation(); final long snapshotId = snapshot != null ? snapshot.snapshotId() : -1; - return new DatasetMetadataImpl(fileConfig, datasetStats, manifestStats, deleteStats, deleteManifestStats, - batchSchema, partitionColumns, extraInfo, metadataFileLocation, snapshotId, partitionSpecs, icebergSchema); + return new DatasetMetadataImpl(fileConfig, datasetStats, manifestStats, deleteStats, equalityDeleteStats, deleteManifestStats, + batchSchema, partitionColumns, extraInfo, metadataFileLocation, snapshotId, partitionSpecs, icebergSchema, lastModTime); } @Override @@ -210,6 +214,7 @@ private static class DatasetMetadataImpl implements FileConfigMetadata, Supports private final DatasetStats datasetStats; private final DatasetStats manifestStats; private final DatasetStats deleteStats; + private final DatasetStats equalityDeleteStats; private final DatasetStats deleteManifestStats; private final org.apache.arrow.vector.types.pojo.Schema batchSchema; private final List partitionColumns; @@ -218,25 +223,28 @@ private static class DatasetMetadataImpl implements FileConfigMetadata, Supports private final long snapshotId; private final BytesOutput partitionSpecs; private final String icebergSchema; + private final long modificationTime; private DatasetMetadataImpl( - FileConfig fileConfig, - DatasetStats datasetStats, - DatasetStats manifestStats, - DatasetStats deleteStats, - DatasetStats deleteManifestStats, - Schema batchSchema, - List partitionColumns, - BytesOutput extraInfo, - String metadataFileLocation, - long snapshotId, - BytesOutput partitionSpecs, - String icebergSchema - ) { + FileConfig fileConfig, + DatasetStats datasetStats, + DatasetStats manifestStats, + DatasetStats deleteStats, + DatasetStats equalityDeleteStats, + DatasetStats deleteManifestStats, + Schema batchSchema, + List partitionColumns, + BytesOutput extraInfo, + String metadataFileLocation, + long snapshotId, + BytesOutput partitionSpecs, + String icebergSchema, + long modificationTime) { this.fileConfig = fileConfig; this.datasetStats = datasetStats; this.manifestStats = manifestStats; this.deleteStats = deleteStats; + this.equalityDeleteStats = equalityDeleteStats; this.deleteManifestStats = deleteManifestStats; this.batchSchema = batchSchema; this.partitionColumns = partitionColumns; @@ -245,6 +253,7 @@ private DatasetMetadataImpl( this.snapshotId = snapshotId; this.partitionSpecs = partitionSpecs; this.icebergSchema = icebergSchema; + this.modificationTime = modificationTime; } @Override @@ -267,11 +276,19 @@ public DatasetStats getDeleteStats() { return deleteStats; } + @Override + public DatasetStats getEqualityDeleteStats() { + return equalityDeleteStats; + } + @Override public DatasetStats getDeleteManifestStats() { return deleteManifestStats; } + @Override + public long getMtime() { return modificationTime; } + @Override public org.apache.arrow.vector.types.pojo.Schema getRecordSchema() { return batchSchema; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/DataFileContentReader.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/DataFileContentReader.java index 35b22311f4..45411e21cf 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/DataFileContentReader.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/DataFileContentReader.java @@ -27,6 +27,7 @@ import java.util.stream.Collectors; import org.apache.arrow.memory.ArrowBuf; +import org.apache.arrow.memory.BufferManager; import org.apache.arrow.vector.FieldVector; import org.apache.arrow.vector.ValueVector; import org.apache.arrow.vector.complex.ListVector; @@ -34,14 +35,12 @@ import org.apache.arrow.vector.complex.writer.BaseWriter; import org.apache.arrow.vector.types.pojo.Field; import org.apache.iceberg.ContentFile; -import org.apache.iceberg.DataFile; import org.apache.iceberg.DremioManifestReaderUtils.ManifestEntryWrapper; import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.Schema; import org.apache.iceberg.types.Type; import org.apache.iceberg.types.Types; -import com.dremio.common.AutoCloseables; import com.dremio.common.expression.SchemaPath; import com.dremio.exec.expr.TypeHelper; import com.dremio.exec.physical.config.TableFunctionContext; @@ -59,12 +58,14 @@ public class DataFileContentReader implements ManifestEntryProcessor { private Schema fileSchema; private PartitionSpec icebergPartitionSpec; private boolean doneWithCurrentDatafile; - private final ArrowBuf tmpBuf; private Map idTypeMap; + private ArrowBuf tmpBuf; + private BufferManager bufferManager; public DataFileContentReader(OperatorContext context, TableFunctionContext functionContext) { outputSchema = functionContext.getFullSchema(); - tmpBuf = context.getAllocator().buffer(4096); + bufferManager = context.getBufferManager(); + tmpBuf = bufferManager.getManagedBuffer(4096); } @Override @@ -87,7 +88,7 @@ public VectorValueSupplier(ValueVector valueVector, Supplier valueSuppli } @Override - public void initialise(PartitionSpec partitionSpec) { + public void initialise(PartitionSpec partitionSpec, int row) { icebergPartitionSpec = partitionSpec; fileSchema = icebergPartitionSpec.schema(); idTypeMap = fileSchema.columns().stream().collect( @@ -99,7 +100,7 @@ public void initialise(PartitionSpec partitionSpec) { */ @Override public int processManifestEntry(ManifestEntryWrapper> manifestEntry, int startOutIndex, int maxOutputCount) { - DataFile currentDataFile = (DataFile) manifestEntry.file(); + ContentFile currentDataFile = manifestEntry.file(); if (!shouldProcessCurrentDatafile(maxOutputCount)) { return 0; } @@ -119,7 +120,7 @@ public int processManifestEntry(ManifestEntryWrapper> m return 1; } - private Supplier getFieldValueSupplier(String fieldName, DataFile currentDataFile) { + private Supplier getFieldValueSupplier(String fieldName, ContentFile currentDataFile) { switch (fieldName){ case "content": return () -> currentDataFile.content().name(); case "file_path": @@ -212,7 +213,7 @@ private void writeStringValue(BaseWriter.StructWriter structWriter, String field structWriter.varChar(fieldName).writeNull(); } else { byte[] path = value.getBytes(StandardCharsets.UTF_8); - tmpBuf.reallocIfNeeded(path.length); + tmpBuf = tmpBuf.reallocIfNeeded(path.length); tmpBuf.setBytes(0, path); structWriter.varChar(fieldName).writeVarChar(0, path.length, tmpBuf); } @@ -223,16 +224,16 @@ private void writeStringValue(BaseWriter.StructWriter structWriter, String field * @param currentDataFile * @return */ - private String getPartitionData(DataFile currentDataFile) { + private String getPartitionData(ContentFile currentDataFile) { StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append("{"); List fields = icebergPartitionSpec.partitionType().asStructType().fields(); for (int i = 0; i < fields.size(); i++) { - Types.NestedField nestedField = fields.get(i); - stringBuilder.append(currentDataFile.partition().get(i, nestedField.type().typeId().javaClass())); - if (i != fields.size()-1) { + if (i > 0) { stringBuilder.append(", "); } + Types.NestedField nestedField = fields.get(i); + stringBuilder.append(nestedField.name()).append("=").append(currentDataFile.partition().get(i, nestedField.type().typeId().javaClass())); } stringBuilder.append("}"); return stringBuilder.toString(); @@ -252,7 +253,8 @@ public void closeManifestEntry() { @Override public void close() throws Exception { - AutoCloseables.close(tmpBuf); + // release tmpBuf and allocate a zero-sized one, but rely on bufferManager to close the buffers it's allocated + bufferManager.replace(tmpBuf, 0); } private boolean shouldProcessCurrentDatafile(int maxOutputCount) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/DeletedDataFilesMetadataTableFunction.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/DeletedFilesMetadataTableFunction.java similarity index 88% rename from sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/DeletedDataFilesMetadataTableFunction.java rename to sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/DeletedFilesMetadataTableFunction.java index 2052a14a00..47e97cedbd 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/DeletedDataFilesMetadataTableFunction.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/DeletedFilesMetadataTableFunction.java @@ -24,6 +24,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.dremio.exec.physical.config.DeletedFilesMetadataTableFunctionContext; import com.dremio.exec.physical.config.TableFunctionConfig; import com.dremio.exec.record.VectorAccessible; import com.dremio.exec.store.OperationType; @@ -38,8 +39,8 @@ /** * A table function that handles updating of the metadata for deleted data files. */ -public class DeletedDataFilesMetadataTableFunction extends AbstractTableFunction { - private static final Logger LOGGER = LoggerFactory.getLogger(DeletedDataFilesMetadataTableFunction.class); +public class DeletedFilesMetadataTableFunction extends AbstractTableFunction { + private static final Logger LOGGER = LoggerFactory.getLogger(DeletedFilesMetadataTableFunction.class); private IntVector outputOperationTypeVector; private VarCharVector inputFilePathVector; @@ -55,9 +56,11 @@ public class DeletedDataFilesMetadataTableFunction extends AbstractTableFunction private Text inputFilePath; private boolean doneWithRow; private Optional icebergMetadataBytes; + private OperationType operationType; - public DeletedDataFilesMetadataTableFunction(OperatorContext context, TableFunctionConfig functionConfig) { + public DeletedFilesMetadataTableFunction(OperatorContext context, TableFunctionConfig functionConfig) { super(context, functionConfig); + operationType = ((DeletedFilesMetadataTableFunctionContext) functionConfig.getFunctionContext()).getOperationType(); } @Override @@ -98,7 +101,7 @@ public int processRow(int startOutIndex, int maxRecords) throws Exception { return 0; } else { // OperationType is always OperationType.DELETE_DATAFILE. - outputOperationTypeVector.setSafe(startOutIndex, OperationType.DELETE_DATAFILE.value); + outputOperationTypeVector.setSafe(startOutIndex, this.operationType.value); // inputFilePath is null for inserted rows in Merge query, skip if (inputFilePath != null) { outputPathVector.setSafe(startOutIndex, inputFilePath); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/DremioFileIO.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/DremioFileIO.java index 9d6950ffde..65dae1c4be 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/DremioFileIO.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/DremioFileIO.java @@ -17,22 +17,15 @@ import java.io.FileNotFoundException; import java.io.IOException; -import java.util.Iterator; +import java.io.UncheckedIOException; import java.util.List; -import java.util.Map; -import org.apache.hadoop.conf.Configuration; -import org.apache.iceberg.exceptions.RuntimeIOException; -import org.apache.iceberg.hadoop.DremioOutputFile; import org.apache.iceberg.io.FileIO; import org.apache.iceberg.io.InputFile; import org.apache.iceberg.io.OutputFile; import com.dremio.common.exceptions.UserException; -import com.dremio.common.util.Closeable; -import com.dremio.common.util.concurrent.ContextClassLoaderSwapper; -import com.dremio.exec.catalog.MutablePlugin; -import com.dremio.exec.hadoop.DremioHadoopUtils; +import com.dremio.exec.store.dfs.FileSystemConfigurationAdapter; import com.dremio.io.file.FileAttributes; import com.dremio.io.file.FileSystem; import com.dremio.io.file.Path; @@ -40,84 +33,47 @@ import com.google.common.base.Preconditions; /** - * DremioFileIO is an implementation of Iceberg FileIO interface. - * It mainly is used for returning the Dremio implementation of - * Iceberg InputFile and Outputfile interfaces - * - * Calls to DremioFileIO APIs maybe done from Hive classes from a different - * class loader context. So, always changing the context to application class loader in each method. + * An implementation of Iceberg's FileIO interface that delegates to a Dremio FileSystem instance for all IO + * operations. */ public class DremioFileIO implements FileIO { private final FileSystem fs; private final OperatorContext context; private final List dataset; - private org.apache.hadoop.fs.FileSystem hadoopFs; - private MutablePlugin plugin; /* - * Send FileLength as non null if we want to use FileIO for single file read. + * Send FileLength as non-null if we want to use FileIO for single file read. * For multiple file read send fileLength as a null. */ private final Long fileLength; - private final Configuration conf; + private final FileSystemConfigurationAdapter conf; private final String datasourcePluginUID; // this can be null if data files, metadata file can be accessed with same plugin - public DremioFileIO(Configuration conf, MutablePlugin plugin) { - this(null, null, null, null, null, conf, plugin); - } - - public DremioFileIO(FileSystem fs, Iterable> conf, MutablePlugin plugin) { - this(fs, null, null, null, null, conf.iterator(), plugin); - } - - public DremioFileIO(FileSystem fs, OperatorContext context, List dataset, String datasourcePluginUID, Long fileLength, Configuration conf, MutablePlugin plugin) { - Preconditions.checkNotNull(conf, "Configuration can not be null"); - Preconditions.checkNotNull(plugin, "Plugin can not be null"); - this.fs = fs; + public DremioFileIO(FileSystem fs, OperatorContext context, List dataset, String datasourcePluginUID, + Long fileLength, FileSystemConfigurationAdapter conf) { + this.fs = Preconditions.checkNotNull(fs); this.context = context; this.dataset = dataset; this.datasourcePluginUID = datasourcePluginUID; // this can be null if it is same as the plugin which created fs this.fileLength = fileLength; - this.conf = conf; - this.plugin = plugin; + this.conf = Preconditions.checkNotNull(conf); } - private DremioFileIO(FileSystem fs, OperatorContext context, List dataset, String datasourcePluginUID, Long fileLength, Iterator> conf, MutablePlugin plugin) { - try (Closeable swapper = ContextClassLoaderSwapper.swapClassLoader(DremioFileIO.class)) { - Preconditions.checkNotNull(conf, "Configuration can not be null"); - Preconditions.checkNotNull(plugin, "Plugin can not be null"); - this.fs = fs; - this.context = context; - this.dataset = dataset; - this.datasourcePluginUID = datasourcePluginUID; // this can be null if it is same as the plugin which created fs - this.fileLength = fileLength; - this.conf = new Configuration(); - while (conf.hasNext()) { - Map.Entry property = conf.next(); - this.conf.set(property.getKey(), property.getValue()); - } - this.plugin = plugin; - } - } - - - - // In case if FS is null then reading of file will be take care by HadoopInputFile. @Override public InputFile newInputFile(String path) { - try (Closeable swapper = ContextClassLoaderSwapper.swapClassLoader(DremioFileIO.class)) { + try { Long fileSize; Long mtime = 0L; - Path filePath = Path.of(path); - if (fs != null && !fs.supportsPathsWithScheme()) { - path = Path.getContainerSpecificRelativePath(filePath); - filePath = Path.of(path); + Path pluginRelativePath = Path.of(path); + if (!fs.supportsPathsWithScheme()) { + path = Path.getContainerSpecificRelativePath(pluginRelativePath); + pluginRelativePath = Path.of(path); } - if (fileLength == null && fs != null) { + if (fileLength == null) { try { - FileAttributes fileAttributes = fs.getFileAttributes(filePath); + FileAttributes fileAttributes = fs.getFileAttributes(pluginRelativePath); fileSize = fileAttributes.size(); mtime = fileAttributes.lastModifiedTime().toMillis(); } catch (FileNotFoundException e) { @@ -129,52 +85,46 @@ public InputFile newInputFile(String path) { fileSize = fileLength; } - initializeHadoopFs(filePath); - return new DremioInputFile(fs, filePath, fileSize, mtime, context, dataset, datasourcePluginUID, conf, hadoopFs); + return new DremioInputFile(this, pluginRelativePath, fileSize, mtime, conf); } catch (IOException e) { throw UserException.ioExceptionError(e).buildSilently(); } } - private void initializeHadoopFs(Path path) { - // initialize hadoop Fs firstTime - if(hadoopFs == null && (context == null || fs == null)) { - hadoopFs = plugin.getHadoopFsSupplier(path.toString(), conf).get(); - } - } - @Override public OutputFile newOutputFile(String path) { - try (Closeable swapper = ContextClassLoaderSwapper.swapClassLoader(DremioFileIO.class)) { - if (fs == null || !fs.supportsPathsWithScheme()) { - path = Path.getContainerSpecificRelativePath(Path.of(path)); - } - initializeHadoopFs(Path.of(path)); - return new DremioOutputFile(path, conf, hadoopFs); + String pluginRelativePath = path; + if (!fs.supportsPathsWithScheme()) { + pluginRelativePath = Path.getContainerSpecificRelativePath(Path.of(pluginRelativePath)); } + return new DremioOutputFile(this, Path.of(pluginRelativePath), conf); } @Override public void deleteFile(String path) { - deleteFile(path, false /* not recursive */, true); + if (!fs.supportsPathsWithScheme()) { + path = Path.getContainerSpecificRelativePath(Path.of(path)); + } + try { + fs.delete(Path.of(path), false); + } catch (IOException e) { + throw new UncheckedIOException(String.format("Failed to delete file: %s", path), e); + } } - public void deleteFile(String path, boolean recursive, boolean getContainerSpecificRelativePath) { - try (Closeable swapper = ContextClassLoaderSwapper.swapClassLoader(DremioFileIO.class)) { - if ((fs == null || !fs.supportsPathsWithScheme()) && getContainerSpecificRelativePath) { - path = Path.getContainerSpecificRelativePath(Path.of(path)); - } - org.apache.hadoop.fs.Path toDelete = DremioHadoopUtils.toHadoopPath(path); - org.apache.hadoop.fs.FileSystem fs = plugin.getHadoopFsSupplier(toDelete.toString(), conf).get(); - try { - fs.delete(toDelete, recursive ); - } catch (IOException e) { - throw new RuntimeIOException(e, "Failed to delete file: %s", path); - } - } + FileSystem getFs() { + return fs; + } + + OperatorContext getContext() { + return context; + } + + List getDataset() { + return dataset; } - public MutablePlugin getPlugin(){ - return this.plugin; + String getDatasourcePluginUID() { + return datasourcePluginUID; } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/DremioInputFile.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/DremioInputFile.java index 3d595d2951..146a98ee24 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/DremioInputFile.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/DremioInputFile.java @@ -16,17 +16,13 @@ package com.dremio.exec.store.iceberg; import java.io.IOException; -import java.util.List; +import java.io.UncheckedIOException; -import org.apache.hadoop.conf.Configuration; -import org.apache.iceberg.hadoop.HadoopInputFile; import org.apache.iceberg.io.InputFile; import org.apache.iceberg.io.SeekableInputStream; -import com.dremio.io.file.FileSystem; +import com.dremio.exec.store.dfs.FileSystemConfigurationAdapter; import com.dremio.io.file.Path; -import com.dremio.sabot.exec.context.OperatorContext; -import com.google.common.base.Preconditions; /** * DremioInputFile is a dremio implementation of Iceberg InputFile interface. @@ -35,54 +31,32 @@ */ public class DremioInputFile implements InputFile { - private final FileSystem fs; + private final DremioFileIO io; + private final Path path; - private final Long fileSize; + private Long fileSize; private final Long mtime; - private final OperatorContext context; - private final List dataset; - private final String datasourcePluginUID; // this can be null if data files, metadata file can be accessed with same plugin private final String locationWithScheme; - private HadoopInputFile hadoopInputFile; - private final Configuration configuration; - private final org.apache.hadoop.fs.FileSystem hadoopFs; - private final String filePath; - public DremioInputFile(FileSystem fs, Path path, Long fileSize, Long mtime, OperatorContext context, List dataset, - String datasourcePluginUID, Configuration conf, org.apache.hadoop.fs.FileSystem hadoopFs) { - this.fs = fs; + public DremioInputFile(DremioFileIO io, Path path, Long fileSize, Long mtime, FileSystemConfigurationAdapter conf) { + this.io = io; this.path = path; this.fileSize = fileSize; this.mtime = mtime; - this.context = context; - this.dataset = dataset; - this.datasourcePluginUID = datasourcePluginUID; // this can be null if it is same as the plugin which created fs - this.configuration = conf; - String scheme; - if (fs == null) { - Preconditions.checkArgument(hadoopFs != null, "HadoopFs can not be null"); - filePath = Path.getContainerSpecificRelativePath(path); - scheme = hadoopFs.getScheme(); - } else { - scheme = fs.getScheme(); - filePath = this.path.toString(); - } - this.locationWithScheme = IcebergUtils.getValidIcebergPath(new org.apache.hadoop.fs.Path(filePath), conf, scheme); - this.hadoopFs = hadoopFs; - } - - private HadoopInputFile getHadoopInputFile() { - if (hadoopInputFile != null) { - return hadoopInputFile; - } - Preconditions.checkState(hadoopFs != null, "Unexpected state"); - this.hadoopInputFile = HadoopInputFile.fromPath(new org.apache.hadoop.fs.Path(filePath), hadoopFs, configuration); - return hadoopInputFile; + this.locationWithScheme = IcebergUtils.getValidIcebergPath(path.toString(), conf, io.getFs().getScheme()); } @Override public long getLength() { - return fileSize != null? fileSize : getHadoopInputFile().getLength(); + if (fileSize == null) { + try { + fileSize = io.getFs().getFileAttributes(path).size(); + } catch (IOException e) { + throw new UncheckedIOException(String.format("Failed to get file attributes for file: %s", path), e); + } + } + + return fileSize; } public long getVersion() { @@ -92,15 +66,14 @@ public long getVersion() { @Override public SeekableInputStream newStream() { try { - if(context != null && fs != null) { - SeekableInputStreamFactory factory = context.getConfig().getInstance(SeekableInputStreamFactory.KEY, SeekableInputStreamFactory.class, SeekableInputStreamFactory.DEFAULT); - return factory.getStream(fs, context, path, - fileSize, mtime, dataset, datasourcePluginUID); - } else { - return getHadoopInputFile().newStream(); - } + SeekableInputStreamFactory factory = io.getContext() == null || io.getDataset() == null ? + SeekableInputStreamFactory.DEFAULT : + io.getContext().getConfig().getInstance(SeekableInputStreamFactory.KEY, SeekableInputStreamFactory.class, + SeekableInputStreamFactory.DEFAULT); + return factory.getStream(io.getFs(), io.getContext(), + path, fileSize, mtime, io.getDataset(), io.getDatasourcePluginUID()); } catch (IOException e) { - throw new RuntimeException(String.format("Failed to open a new stream for file : %s", path), e); + throw new UncheckedIOException(String.format("Failed to create new input stream for file: %s", path), e); } } @@ -112,9 +85,9 @@ public String location() { @Override public boolean exists() { try { - return fs != null? fs.exists(path) : getHadoopInputFile().exists(); + return io.getFs().exists(path); } catch (IOException e) { - throw new RuntimeException(String.format("Failed to check existence of file %s", path.toString()), e); + throw new UncheckedIOException(String.format("Failed to check existence of file: %s", path), e); } } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/DremioOutputFile.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/DremioOutputFile.java new file mode 100644 index 0000000000..7439203765 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/DremioOutputFile.java @@ -0,0 +1,116 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store.iceberg; + +import java.io.IOException; +import java.io.UncheckedIOException; + +import org.apache.iceberg.io.InputFile; +import org.apache.iceberg.io.OutputFile; +import org.apache.iceberg.io.PositionOutputStream; + +import com.dremio.exec.store.dfs.FileSystemConfigurationAdapter; +import com.dremio.io.FSOutputStream; +import com.dremio.io.file.Path; + +/** + * DremioOutputFile. used in DremioFileIO to output iceberg metadata file. + */ +public class DremioOutputFile implements OutputFile { + + private final DremioFileIO io; + + private final Path path; + private final String locationWithScheme; + + public DremioOutputFile(DremioFileIO io, Path path, FileSystemConfigurationAdapter conf) { + this.io = io; + this.path = path; + this.locationWithScheme = IcebergUtils.getValidIcebergPath(path.toString(), conf, io.getFs().getScheme()); + } + + @Override + public PositionOutputStream create() { + return create(false); + } + + @Override + public PositionOutputStream createOrOverwrite() { + return create(true); + } + + private PositionOutputStream create(boolean overwrite) { + try { + return new PositionOutputStreamWrapper(io.getFs().create(path, overwrite)); + } catch (IOException ex) { + throw new UncheckedIOException(String.format("Failed to create file: %s", path), ex); + } + } + + @Override + public String location() { + return locationWithScheme; + } + + @Override + public InputFile toInputFile() { + return io.newInputFile(path.toString()); + } + + @Override + public String toString() { + return location(); + } + + private static class PositionOutputStreamWrapper extends PositionOutputStream { + + private final FSOutputStream inner; + + public PositionOutputStreamWrapper(FSOutputStream inner) { + this.inner = inner; + } + + @Override + public long getPos() throws IOException { + return inner.getPosition(); + } + + @Override + public void write(int b) throws IOException { + inner.write(b); + } + + @Override + public void write(byte[] b) throws IOException { + inner.write(b); + } + + @Override + public void write(byte[] b, int off, int len) throws IOException { + inner.write(b, off, len); + } + + @Override + public void flush() throws IOException { + inner.flush(); + } + + @Override + public void close() throws IOException { + inner.close(); + } + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/FieldIdBroker.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/FieldIdBroker.java index 6425473dcd..8c53ebed77 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/FieldIdBroker.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/FieldIdBroker.java @@ -29,6 +29,7 @@ public interface FieldIdBroker { class UnboundedFieldIdBroker implements FieldIdBroker { private int id = 0; + @Override public int get(String fieldName) { int curid = id; id++; @@ -46,6 +47,7 @@ public SeededFieldIdBroker(CaseInsensitiveImmutableBiMap fieldIdMapping this.fieldIdMapping = fieldIdMapping; } + @Override public int get(String fieldName) { Integer fieldId = fieldIdMapping.get(fieldName); if (fieldId == null) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/HadoopFsCacheKey.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/HadoopFsCacheKey.java index bcd4641404..cdd530df06 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/HadoopFsCacheKey.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/HadoopFsCacheKey.java @@ -27,12 +27,14 @@ public class HadoopFsCacheKey { final String authority; final Configuration conf; final URI uri; + final String userName; - public HadoopFsCacheKey(URI uri, Iterable> conf) { + public HadoopFsCacheKey(URI uri, Iterable> conf, String userName) { this.conf = (Configuration) conf; this.uri = uri; scheme = uri.getScheme() == null ? "" : StringUtils.toLowerCase(uri.getScheme()); authority = uri.getAuthority() == null ? "" : StringUtils.toLowerCase(uri.getAuthority()); + this.userName = userName; } public URI getUri() { @@ -43,9 +45,13 @@ public Configuration getConf() { return conf; } + public String getUserName() { + return userName; + } + @Override public int hashCode() { - return Objects.hash(scheme, authority); + return Objects.hash(scheme, authority, userName); } @Override @@ -59,11 +65,12 @@ public boolean equals(Object o) { HadoopFsCacheKey key = (HadoopFsCacheKey) o; return com.google.common.base.Objects.equal(scheme, key.scheme) && - com.google.common.base.Objects.equal(authority, key.authority); + com.google.common.base.Objects.equal(authority, key.authority) && + com.google.common.base.Objects.equal(userName, key.userName); } @Override public String toString() { - return scheme + "://" + authority; + return userName + "@" + scheme + "://" + authority; } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergExecutionDatasetAccessor.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergExecutionDatasetAccessor.java index 5435c0253f..0197d15f04 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergExecutionDatasetAccessor.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergExecutionDatasetAccessor.java @@ -100,8 +100,7 @@ public BytesOutput provideSignature(DatasetMetadata metadata) throws ConnectorEx .newBuilder() .addCachedEntities(cachedEntity) .build()::writeTo; - } - catch (IOException ioe) { + } catch (IOException ioe) { Throwables.propagateIfPossible(ioe, ConnectorException.class); throw new ConnectorException(ioe); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergExpGenVisitor.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergExpGenVisitor.java index 25ae468fe6..3298b57317 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergExpGenVisitor.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergExpGenVisitor.java @@ -80,17 +80,18 @@ public Expression visitCall(RexCall call) { } if (inputRef != null && other != null) { Object val = getValueAsInputRef(inputRef, (RexLiteral) other); + String columnName = fieldNames.get(inputRef.getIndex()); + usedColumns.add(columnName); if (Objects.isNull(val)) { return Expressions.alwaysFalse(); } - String columnName = fieldNames.get(inputRef.getIndex()); - usedColumns.add(columnName); return getOperatorExpression(call, columnName, val, inputFirst); } else { boolean isAND = false; switch (call.getOperator().getKind()) { case AND: isAND = true; + // fall through case OR: List nodeList = call.getOperands(); Expression left = nodeList.get(0).accept(this); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergFileType.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergFileType.java new file mode 100644 index 0000000000..9449686d50 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergFileType.java @@ -0,0 +1,66 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store.iceberg; + +import java.util.HashMap; +import java.util.Map; + +public enum IcebergFileType { + DATA(0), // Data file + POSITION_DELETES(1), + EQUALITY_DELETES(2), + MANIFEST(3), + MANIFEST_LIST(4), + PARTITION_STATS(5), // Partition Stats file or Partition Stats metadata file + OTHER(6); + + + public final Integer id; + + IcebergFileType(Integer id) { + this.id = id; + } + + private static final Map IDS = new HashMap<>(); + static { + for (IcebergFileType type : values()) { + IDS.put(type.id, type); + } + } + + private static final Map NAMES = new HashMap<>(); + static { + for (IcebergFileType type : values()) { + NAMES.put(type.name().toLowerCase(), type); + } + } + + public static IcebergFileType valueById(Integer id) { + if (IDS.containsKey(id)) { + return IDS.get(id); + } else { + throw new UnsupportedOperationException(String.format("Id %d is not recognized.", id)); + } + } + + public static IcebergFileType valueByName(String name) { + if (NAMES.containsKey(name.toLowerCase())) { + return NAMES.get(name.toLowerCase()); + } else { + throw new UnsupportedOperationException(String.format("Name %s is not recognized.", name)); + } + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergGroupScan.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergGroupScan.java index 1ef7ed97a6..3a9503370e 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergGroupScan.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergGroupScan.java @@ -15,34 +15,26 @@ */ package com.dremio.exec.store.iceberg; +import static com.dremio.exec.store.iceberg.IcebergUtils.getMetadataLocation; +import static com.dremio.exec.store.iceberg.IcebergUtils.getSplitAndPartitionInfo; + import java.io.IOException; -import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import org.apache.iceberg.ManifestContent; import org.apache.iceberg.expressions.Expression; import com.dremio.common.exceptions.ExecutionSetupException; import com.dremio.common.expression.SchemaPath; -import com.dremio.connector.metadata.DatasetSplit; -import com.dremio.connector.metadata.DatasetSplitAffinity; -import com.dremio.datastore.LegacyProtobufSerializer; import com.dremio.exec.catalog.StoragePluginId; import com.dremio.exec.physical.base.OpProps; import com.dremio.exec.physical.base.SubScan; import com.dremio.exec.planner.fragment.ExecutionNodeMap; -import com.dremio.exec.store.SplitAndPartitionInfo; import com.dremio.exec.store.SplitWork; import com.dremio.exec.store.SplitWorkWithRuntimeAffinity; import com.dremio.exec.store.TableMetadata; import com.dremio.exec.store.dfs.easy.EasyGroupScan; -import com.dremio.sabot.exec.store.easy.proto.EasyProtobuf; -import com.dremio.sabot.exec.store.iceberg.proto.IcebergProtobuf; -import com.dremio.service.namespace.MetadataProtoUtils; import com.dremio.service.namespace.dataset.proto.IcebergMetadata; -import com.dremio.service.namespace.dataset.proto.PartitionProtobuf; -import com.google.protobuf.InvalidProtocolBufferException; import io.protostuff.ByteString; @@ -52,61 +44,19 @@ public class IcebergGroupScan extends EasyGroupScan { private final Expression icebergFilterExpression; - private final ManifestContent manifestContent; + private final ManifestContentType manifestContent; public IcebergGroupScan(OpProps props, TableMetadata dataset, List columns, - Expression icebergFilterExpression, ManifestContent manifestContent) { + Expression icebergFilterExpression, ManifestContentType manifestContent) { super(props, dataset, columns); this.icebergFilterExpression = icebergFilterExpression; this.manifestContent = manifestContent; } - private String getMetadataLocation(TableMetadata dataset, List works) { - if (dataset.getDatasetConfig().getPhysicalDataset().getIcebergMetadata() != null && - dataset.getDatasetConfig().getPhysicalDataset().getIcebergMetadata().getMetadataFileLocation() != null && - !dataset.getDatasetConfig().getPhysicalDataset().getIcebergMetadata().getMetadataFileLocation().isEmpty()) { - return dataset.getDatasetConfig().getPhysicalDataset().getIcebergMetadata().getMetadataFileLocation(); - } else { - EasyProtobuf.EasyDatasetSplitXAttr extended; - try { - if (works.size() == 0) { - //It's an in-valid scenario where splits size is zero. - throw new RuntimeException("Unexpected state with zero split."); - } - // All the split will have the same iceberg metadata location. - // It would be ideal to read it from any index in this case from the first index. - extended = LegacyProtobufSerializer.parseFrom(EasyProtobuf.EasyDatasetSplitXAttr.PARSER, - works.get(0).getSplitExtendedProperty()); - return extended.getPath(); - } catch (InvalidProtocolBufferException e) { - throw new RuntimeException("Could not deserialize split info", e); - } - } - } - - private List getSplitAndPartitionInfo(String splitPath) { - final List splits = new ArrayList<>(); - IcebergProtobuf.IcebergDatasetSplitXAttr splitExtended = IcebergProtobuf.IcebergDatasetSplitXAttr.newBuilder() - .setPath(splitPath) - .build(); - List splitAffinities = new ArrayList<>(); - DatasetSplit datasetSplit = DatasetSplit.of( - splitAffinities, 0, 0, splitExtended::writeTo); - - PartitionProtobuf.NormalizedPartitionInfo partitionInfo = PartitionProtobuf.NormalizedPartitionInfo.newBuilder().setId(String.valueOf(1)).build(); - PartitionProtobuf.NormalizedDatasetSplitInfo.Builder splitInfo = PartitionProtobuf.NormalizedDatasetSplitInfo - .newBuilder() - .setPartitionId(partitionInfo.getId()) - .setExtendedProperty(MetadataProtoUtils.toProtobuf(datasetSplit.getExtraInfo())); - splits.add(new SplitAndPartitionInfo(partitionInfo, splitInfo.build())); - return splits; - } - @Override public SubScan getSpecificScan(List works) throws ExecutionSetupException { final StoragePluginId pluginId; - final String metadataLocation = getMetadataLocation(dataset, works); if (dataset instanceof InternalIcebergScanTableMetadata) { InternalIcebergScanTableMetadata icebergDataset = (InternalIcebergScanTableMetadata) dataset; pluginId = icebergDataset.getIcebergTableStoragePlugin(); @@ -114,6 +64,7 @@ public SubScan getSpecificScan(List works) throws ExecutionSetupExcep pluginId = dataset.getStoragePluginId(); } + final String metadataLocation = getMetadataLocation(dataset, works); final IcebergExtendedProp icebergExtendedProp; try { ByteString partitionSpecMap = null; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergManifestFileContentScanPrel.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergManifestFileContentScanPrel.java index bd010a9f99..8a808a4859 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergManifestFileContentScanPrel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergManifestFileContentScanPrel.java @@ -33,7 +33,6 @@ import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.hint.RelHint; import org.apache.calcite.rel.type.RelDataType; -import org.apache.iceberg.ManifestContent; import com.dremio.common.expression.SchemaPath; import com.dremio.exec.physical.base.PhysicalOperator; @@ -57,7 +56,6 @@ * table_files Metadata Functions use IcebergManifestFileContentScanPrel to fetch data file content from manifest file. */ public class IcebergManifestFileContentScanPrel extends ScanPrelBase implements PrelFinalizable { - private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(IcebergManifestFileContentScanPrel.class); public IcebergManifestFileContentScanPrel(RelOptCluster cluster, RelTraitSet traitSet, RelOptTable table, TableMetadata dataset, List projectedColumns, double observedRowcountAdjustment, List hints) { super(cluster, traitSet, table, dataset.getStoragePluginId(), dataset, projectedColumns, observedRowcountAdjustment, hints, ImmutableList.of()); @@ -117,7 +115,7 @@ public Prel finalizeRel() { RelTraitSet relTraitSet = getCluster().getPlanner().emptyTraitSet().plus(Prel.PHYSICAL).plus(distributionTrait); IcebergManifestListPrel manifestListPrel = new IcebergManifestListPrel(getCluster(), getTraitSet(), tableMetadata, manifestListReaderSchema, manifestListReaderColumns, - getRowTypeFromProjectedColumns(manifestListReaderColumns, manifestListReaderSchema, getCluster()), null, ManifestContent.DATA); + getRowTypeFromProjectedColumns(manifestListReaderColumns, manifestListReaderSchema, getCluster()), null, ManifestContentType.ALL); // exchange above manifest list scan, which is a leaf level easy scan diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergManifestListPrel.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergManifestListPrel.java index d04e2fdf74..ad5daf6ffe 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergManifestListPrel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergManifestListPrel.java @@ -28,7 +28,6 @@ import org.apache.calcite.rel.RelWriter; import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.calcite.rel.type.RelDataType; -import org.apache.iceberg.ManifestContent; import org.apache.iceberg.expressions.Expression; import com.dremio.common.expression.SchemaPath; @@ -61,7 +60,7 @@ public class IcebergManifestListPrel extends AbstractRelNode implements LeafPre private final List projectedColumns; private final RelDataType relDataType; private final Expression icebergExpression; - private final ManifestContent manifestContent; + private final ManifestContentType manifestContent; public IcebergManifestListPrel( RelOptCluster cluster, @@ -71,7 +70,7 @@ public IcebergManifestListPrel( List projectedColumns, RelDataType relDataType, Expression icebergExpression, - ManifestContent manifestContent) { + ManifestContentType manifestContent) { super(cluster, traitSet); this.tableMetadata = tableMetadata; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergManifestListRecordReader.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergManifestListRecordReader.java index 19afebf7ba..6aa7dbcf8c 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergManifestListRecordReader.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergManifestListRecordReader.java @@ -38,7 +38,6 @@ import org.apache.arrow.vector.complex.StructVector; import org.apache.arrow.vector.complex.impl.NullableStructWriter; import org.apache.arrow.vector.types.pojo.Field; -import org.apache.iceberg.ManifestContent; import org.apache.iceberg.ManifestFile; import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.Schema; @@ -48,13 +47,13 @@ import org.apache.iceberg.exceptions.RuntimeIOException; import org.apache.iceberg.expressions.Expression; import org.apache.iceberg.expressions.ManifestEvaluator; +import org.apache.iceberg.io.FileIO; import org.apache.iceberg.types.Type; import com.dremio.common.AutoCloseables; import com.dremio.common.exceptions.ExecutionSetupException; import com.dremio.common.exceptions.UserException; import com.dremio.common.utils.PathUtils; -import com.dremio.exec.catalog.MutablePlugin; import com.dremio.exec.physical.base.OpProps; import com.dremio.exec.record.BatchSchema; import com.dremio.exec.store.RecordReader; @@ -82,18 +81,22 @@ public class IcebergManifestListRecordReader implements RecordReader { private Schema icebergTableSchema; private byte[] icebergDatasetXAttr; - private final SupportsIcebergRootPointer pluginForIceberg; + protected final SupportsIcebergRootPointer pluginForIceberg; private final OpProps props; private ArrowBuf tmpBuf; private boolean emptyTable; private final String datasourcePluginUID; private Expression icebergFilterExpression; - private final String path; - private final ManifestContent manifestContent; + private final String metadataLocation; + private final ManifestContentType manifestContent; private Map partitionSpecMap; + private StructVector splitIdentityVector; + private VarBinaryVector splitInfoVector; + private VarBinaryVector colIdsVector; + public IcebergManifestListRecordReader(OperatorContext context, - String path, + String metadataLocation, SupportsIcebergRootPointer pluginForIceberg, List dataset, String dataSourcePluginId, @@ -101,8 +104,8 @@ public IcebergManifestListRecordReader(OperatorContext context, OpProps props, List partitionCols, IcebergExtendedProp icebergExtendedProp, - ManifestContent manifestContent) { - this.path = path; + ManifestContentType manifestContent) { + this.metadataLocation = metadataLocation; this.context = context; this.pluginForIceberg = pluginForIceberg; this.dataset = dataset; @@ -129,13 +132,12 @@ public void setup(OutputMutator output) throws ExecutionSetupException { this.output = output; FileSystem fs; try { - fs = pluginForIceberg.createFSWithAsyncOptions(this.path, props.getUserName(), context); + fs = pluginForIceberg.createFSWithAsyncOptions(this.metadataLocation, props.getUserName(), context); } catch (IOException e) { throw new RuntimeException("Failed creating filesystem", e); } - DremioFileIO io = new DremioFileIO( - fs, context, dataset, datasourcePluginUID, null, pluginForIceberg.getFsConfCopy(), (MutablePlugin) pluginForIceberg); - TableMetadata tableMetadata = TableMetadataParser.read(io, this.path); + FileIO io = pluginForIceberg.createIcebergFileIO(fs, context, dataset, datasourcePluginUID, null); + TableMetadata tableMetadata = TableMetadataParser.read(io, this.metadataLocation); if (!context.getOptions().getOption(ENABLE_ICEBERG_SPEC_EVOL_TRANFORMATION)) { checkForPartitionSpecEvolution(tableMetadata); } @@ -167,13 +169,29 @@ public void setup(OutputMutator output) throws ExecutionSetupException { .message("Iceberg V2 tables with equality deletes are not supported.") .buildSilently(); } + List manifestFileList; + switch (manifestContent) { + case DATA: + manifestFileList = snapshot.dataManifests(io); + break; + case DELETES: + manifestFileList = snapshot.deleteManifests(io); + break; + case ALL: + manifestFileList = snapshot.allManifests(io); + break; + default: + throw new IllegalStateException("Invalid ManifestContentType " + manifestContent); + } - List manifestFileList = manifestContent == ManifestContent.DELETES ? - snapshot.deleteManifests(io) : snapshot.dataManifests(io); manifestFileList = filterManifestFiles(manifestFileList); manifestFileIterator = manifestFileList.iterator(); + Map colIdMap = manifestContent == ManifestContentType.DELETES ? + IcebergUtils.getColIDMapWithReservedDeleteFields(icebergTableSchema) : + IcebergUtils.getIcebergColumnNameToIDMap(icebergTableSchema); icebergDatasetXAttr = IcebergProtobuf.IcebergDatasetXAttr.newBuilder() - .addAllColumnIds(IcebergUtils.getIcebergColumnNameToIDMap(icebergTableSchema).entrySet().stream() + .addAllColumnIds(colIdMap.entrySet() + .stream() .map(c -> IcebergProtobuf.IcebergSchemaField.newBuilder() .setSchemaPath(c.getKey()) .setId(c.getValue()) @@ -182,6 +200,10 @@ public void setup(OutputMutator output) throws ExecutionSetupException { .build() .toByteArray(); tmpBuf = context.getAllocator().buffer(4096); + + splitIdentityVector = (StructVector) output.getVector(RecordReader.SPLIT_IDENTITY); + splitInfoVector = (VarBinaryVector)output.getVector(RecordReader.SPLIT_INFORMATION); + colIdsVector = (VarBinaryVector)output.getVector(RecordReader.COL_IDS); } @Override @@ -193,17 +215,18 @@ public void allocate(Map vectorMap) throws OutOfMemoryExcep @Override public int next() { + return nextBatch(0, context.getTargetBatchSize()); + } + + public int nextBatch(int startOutIndex, int maxOutIndex) { if (emptyTable) { return 0; } - int outIndex = 0; + int outIndex = startOutIndex; try { - StructVector splitIdentityVector = (StructVector) output.getVector(RecordReader.SPLIT_IDENTITY); NullableStructWriter splitIdentityWriter = splitIdentityVector.getWriter(); - VarBinaryVector splitInfoVector = (VarBinaryVector)output.getVector(RecordReader.SPLIT_INFORMATION); - VarBinaryVector colIdsVector = (VarBinaryVector)output.getVector(RecordReader.COL_IDS); - while (manifestFileIterator.hasNext() && outIndex < context.getTargetBatchSize()) { + while (manifestFileIterator.hasNext() && outIndex < maxOutIndex) { ManifestFile manifestFile = manifestFileIterator.next(); SplitIdentity splitIdentity = new SplitIdentity(manifestFile.path(), 0, manifestFile.length(), manifestFile.length()); @@ -219,15 +242,16 @@ public int next() { } outIndex++; } - int valueCount = outIndex; - output.getVectors().forEach(v -> v.setValueCount(valueCount)); + + int lastOutIndex = outIndex; + output.getVectors().forEach(v -> v.setValueCount(lastOutIndex)); + return lastOutIndex - startOutIndex; } catch (Exception e) { throw UserException .dataReadError(e) .message("Unable to read manifest list files for table '%s'", PathUtils.constructFullPath(dataset)) .build(logger); } - return outIndex; } private Object getStatValue(ManifestFile manifestFile, Field field) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergManifestListScanCreator.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergManifestListScanCreator.java index 5a7e31381d..a553162566 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergManifestListScanCreator.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergManifestListScanCreator.java @@ -54,10 +54,8 @@ public ProducerOperator create(FragmentExecutionContext fragmentExecContext, } catch (InvalidProtocolBufferException e) { throw new RuntimeException("Could not deserialize split info", e); } - final RecordReader reader = new - IcebergManifestListRecordReader(context, - splitXAttr.getPath(), plugin, config.getTableSchemaPath(), - config.getDatasourcePluginId().getName(), config.getFullSchema(), config.getProps(), + final RecordReader reader = new IcebergManifestListRecordReader(context, splitXAttr.getPath(), plugin, + config.getTableSchemaPath(), config.getDatasourcePluginId().getName(), config.getFullSchema(), config.getProps(), config.getPartitionColumns(), config.getIcebergExtendedProp(), config.getManifestContent()); return new ScanOperator(config, context, RecordReaderIterator.from(reader)); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergManifestListScanPrel.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergManifestListScanPrel.java new file mode 100644 index 0000000000..b1a065732b --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergManifestListScanPrel.java @@ -0,0 +1,95 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store.iceberg; + +import static com.dremio.exec.planner.physical.PlannerSettings.ICEBERG_MANIFEST_SCAN_RECORDS_PER_THREAD; + +import java.util.List; + +import org.apache.calcite.plan.RelOptCluster; +import org.apache.calcite.plan.RelOptTable; +import org.apache.calcite.plan.RelTraitSet; +import org.apache.calcite.rel.RelNode; +import org.apache.calcite.rel.metadata.RelMetadataQuery; +import org.apache.calcite.rel.type.RelDataType; + +import com.dremio.common.expression.SchemaPath; +import com.dremio.exec.physical.config.TableFunctionConfig; +import com.dremio.exec.planner.common.ScanRelBase; +import com.dremio.exec.planner.physical.PlannerSettings; +import com.dremio.exec.planner.physical.PrelUtil; +import com.dremio.exec.planner.physical.TableFunctionPrel; +import com.dremio.exec.planner.physical.TableFunctionUtil; +import com.dremio.exec.record.BatchSchema; +import com.dremio.exec.store.TableMetadata; + +/** + * Prel for the Iceberg manifest list scan table function. This supports both data and delete manifest scans. + */ +public class IcebergManifestListScanPrel extends TableFunctionPrel { + + public IcebergManifestListScanPrel( + RelOptCluster cluster, + RelTraitSet traitSet, + RelOptTable table, + RelNode child, + TableMetadata tableMetadata, + BatchSchema schema, + List projectedColumns, + Long survivingRecords) { + this( + cluster, + traitSet, + table, + child, + tableMetadata, + TableFunctionUtil.getManifestListScanTableFunctionConfig(tableMetadata, null, schema, projectedColumns), + ScanRelBase.getRowTypeFromProjectedColumns(projectedColumns, schema, cluster), + survivingRecords); + } + + protected IcebergManifestListScanPrel( + RelOptCluster cluster, + RelTraitSet traitSet, + RelOptTable table, + RelNode child, + TableMetadata tableMetadata, + TableFunctionConfig functionConfig, + RelDataType rowType, + Long survivingRecords) { + super(cluster, traitSet, table, child, tableMetadata, functionConfig, rowType, survivingRecords); + } + + @Override + public RelNode copy(RelTraitSet traitSet, List inputs) { + return new IcebergManifestListScanPrel(getCluster(), getTraitSet(), getTable(), sole(inputs), getTableMetadata(), + getTableFunctionConfig(), getRowType(), getSurvivingRecords()); + } + + @Override + protected double defaultEstimateRowCount(TableFunctionConfig functionConfig, RelMetadataQuery mq) { + if (getSurvivingRecords() == null) { + // we should always have a surviving records count provided which would be based on the data file count from table + // metadata, but if not make a guess based on config + final PlannerSettings plannerSettings = PrelUtil.getPlannerSettings(getCluster().getPlanner()); + double rowMultiplier = ((double) plannerSettings.getSliceTarget() / + plannerSettings.getOptions().getOption(ICEBERG_MANIFEST_SCAN_RECORDS_PER_THREAD)); + return Math.max(mq.getRowCount(input) * rowMultiplier, 1); + } + + return (double) getSurvivingRecords(); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergManifestListSubScan.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergManifestListSubScan.java index ef42c36868..824fd80542 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergManifestListSubScan.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergManifestListSubScan.java @@ -17,8 +17,6 @@ import java.util.List; -import org.apache.iceberg.ManifestContent; - import com.dremio.common.expression.SchemaPath; import com.dremio.exec.catalog.StoragePluginId; import com.dremio.exec.physical.base.OpProps; @@ -48,7 +46,7 @@ public class IcebergManifestListSubScan extends SubScanWithProjection { private final List partitionColumns; private final IcebergExtendedProp icebergExtendedProp; private final String location; - private final ManifestContent manifestContent; + private final ManifestContentType manifestContent; @JsonIgnore private List splits; @@ -64,7 +62,7 @@ public IcebergManifestListSubScan( @JsonProperty("partitionColumns") List partitionColumns, @JsonProperty("extendedProperty") ByteString extendedProperty, @JsonProperty("icebergExtendedProperties") IcebergExtendedProp icebergExtendedProp, - @JsonProperty("manifestContent") ManifestContent manifestContent) { + @JsonProperty("manifestContent") ManifestContentType manifestContent) { this(props, location, fullSchema, null, tablePath, pluginId, datasourcePluginId, columns, partitionColumns, extendedProperty, icebergExtendedProp, manifestContent); } @@ -81,7 +79,7 @@ public IcebergManifestListSubScan( List partitionColumns, ByteString extendedProperty, IcebergExtendedProp icebergExtendedProp, - ManifestContent manifestContent) { + ManifestContentType manifestContent) { super(props, fullSchema, (tablePath == null) ? null : ImmutableList.of(tablePath), columns); this.location = location; this.pluginId = pluginId; @@ -121,7 +119,7 @@ public List getSplits() { return splits; } - public ManifestContent getManifestContent() { + public ManifestContentType getManifestContent() { return manifestContent; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergManifestScanPrel.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergManifestScanPrel.java index b6e4b6dc16..67090272fa 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergManifestScanPrel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergManifestScanPrel.java @@ -47,6 +47,29 @@ */ public class IcebergManifestScanPrel extends TableFunctionPrel { + public IcebergManifestScanPrel( + RelOptCluster cluster, + RelTraitSet traitSet, + RelOptTable table, + RelNode child, + TableMetadata tableMetadata, + BatchSchema schema, + List projectedColumns, + ManifestScanFilters manifestScanFilters, + Long survivingRecords, + ManifestContent manifestContent) { + this( + cluster, + traitSet, + table, + child, + tableMetadata, + TableFunctionUtil.getManifestScanTableFunctionConfig(tableMetadata, projectedColumns, schema, null, + manifestContent, manifestScanFilters, false), + ScanRelBase.getRowTypeFromProjectedColumns(projectedColumns, schema, cluster), + survivingRecords); + } + public IcebergManifestScanPrel( RelOptCluster cluster, RelTraitSet traitSet, @@ -57,7 +80,8 @@ public IcebergManifestScanPrel( List projectedColumns, ManifestScanFilters manifestScanFilters, Long survivingRecords, - ManifestContent manifestContent) { + ManifestContent manifestContent, + boolean enableCarryForward) { this( cluster, traitSet, @@ -65,7 +89,7 @@ public IcebergManifestScanPrel( child, tableMetadata, TableFunctionUtil.getManifestScanTableFunctionConfig(tableMetadata, projectedColumns, schema, null, - manifestContent, manifestScanFilters), + manifestContent, manifestScanFilters, enableCarryForward), ScanRelBase.getRowTypeFromProjectedColumns(projectedColumns, schema, cluster), survivingRecords); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergModelCreator.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergModelCreator.java index c8927f8ba8..364c2dd5e5 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergModelCreator.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergModelCreator.java @@ -22,7 +22,6 @@ import org.apache.hadoop.conf.Configuration; -import com.dremio.exec.catalog.MutablePlugin; import com.dremio.exec.server.SabotContext; import com.dremio.exec.store.iceberg.hadoop.IcebergHadoopModel; import com.dremio.exec.store.iceberg.model.IcebergCatalogType; @@ -45,7 +44,7 @@ public static IcebergModel createIcebergModel( FileSystem fs, /* if fs is null it will use Iceberg HadoopFileIO class else it will use DremioFileIO class */ OperatorContext operatorContext, List dataset, - MutablePlugin plugin) { + SupportsIcebergMutablePlugin plugin) { // if parameter is not set then using Hadoop as default IcebergCatalogType catalogType = getIcebergCatalogType(configuration, context); String namespace = configuration.get(ICEBERG_NAMESPACE_KEY, DREMIO_NESSIE_DEFAULT_NAMESPACE); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergOptimizeSingleFileTracker.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergOptimizeSingleFileTracker.java index a40d24bf81..88e1f2e683 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergOptimizeSingleFileTracker.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergOptimizeSingleFileTracker.java @@ -21,7 +21,9 @@ import java.util.Objects; import java.util.Set; +import org.apache.iceberg.ContentFile; import org.apache.iceberg.DataFile; +import org.apache.iceberg.DeleteFile; import org.apache.iceberg.StructLike; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -46,6 +48,12 @@ public void consumeDeletedDataFile(DataFile dataFile) { rewriteFiles.consumeDeletedFilePath(dataFile); } + public void consumeDeletedDeleteFile(DeleteFile deleteFile) { + PartitionInfo partitionInfo = new PartitionInfo(deleteFile.specId(), deleteFile.partition()); + RewritablePartitionFiles rewriteFiles = rewriteCandidates.computeIfAbsent(partitionInfo, p -> new RewritablePartitionFiles()); + rewriteFiles.consumeDeletedFilePath(deleteFile); + } + /** * Removes the tracked single file rewrites per partition from the input lists */ @@ -70,12 +78,13 @@ class RewritablePartitionFiles { private String firstDeletedFilePath = null; private long firstDeletedFileRecords = 0L; private boolean hasMultipleFiles = false; + private boolean hasDeletedDeleteFiles = false; public String getFirstAddedFilePath() { return firstAddedFilePath; } - public void consumeAddedFilePath(DataFile addedFile) { + public void consumeAddedFilePath(ContentFile addedFile) { if (this.firstAddedFilePath != null) { this.hasMultipleFiles = true; } else { @@ -88,7 +97,10 @@ public String getFirstDeletedFilePath() { return firstDeletedFilePath; } - public void consumeDeletedFilePath(DataFile deletedFile) { + public void consumeDeletedFilePath(ContentFile deletedFile) { + if (deletedFile instanceof DeleteFile) { + this.hasDeletedDeleteFiles = true; + } if (this.firstDeletedFilePath != null) { this.hasMultipleFiles = true; } else { @@ -103,6 +115,7 @@ public boolean hasMultipleFiles() { public boolean isSameFileChange() { return !this.hasMultipleFiles() + && !this.hasDeletedDeleteFiles && this.getFirstAddedFilePath() != null && this.getFirstDeletedFilePath() != null && this.firstAddedFileRecords == this.firstDeletedFileRecords; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergOrphanFileDeletePrel.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergOrphanFileDeletePrel.java new file mode 100644 index 0000000000..e255fbcc4c --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergOrphanFileDeletePrel.java @@ -0,0 +1,89 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store.iceberg; + +import static com.dremio.exec.planner.physical.PlannerSettings.ORPHAN_FILE_DELETE_RECORDS_PER_THREAD; + +import java.util.List; + +import org.apache.calcite.plan.RelOptCluster; +import org.apache.calcite.plan.RelOptTable; +import org.apache.calcite.plan.RelTraitSet; +import org.apache.calcite.rel.RelNode; +import org.apache.calcite.rel.metadata.RelMetadataQuery; +import org.apache.calcite.rel.type.RelDataType; + +import com.dremio.exec.physical.config.TableFunctionConfig; +import com.dremio.exec.planner.physical.PlannerSettings; +import com.dremio.exec.planner.physical.PrelUtil; +import com.dremio.exec.planner.physical.TableFunctionPrel; +import com.dremio.exec.planner.physical.TableFunctionUtil; +import com.dremio.exec.planner.sql.CalciteArrowHelper; +import com.dremio.exec.store.SystemSchemas; +import com.dremio.exec.store.TableMetadata; + +/** + * A prel for IcebergOrphanFileDeleteTableFunction + */ +public class IcebergOrphanFileDeletePrel extends TableFunctionPrel { + + public IcebergOrphanFileDeletePrel( + RelOptCluster cluster, + RelTraitSet traitSet, + RelOptTable table, + RelNode child, + TableMetadata tableMetadata, + Long survivingRecords) { + this( + cluster, + traitSet, + table, + child, + tableMetadata, + TableFunctionUtil.getIcebergOrphanFileDeleteFunctionConfig(SystemSchemas.ICEBERG_ORPHAN_FILE_DELETE_SCHEMA, tableMetadata), + CalciteArrowHelper.wrap(SystemSchemas.ICEBERG_ORPHAN_FILE_DELETE_SCHEMA) + .toCalciteRecordType(cluster.getTypeFactory(), + PrelUtil.getPlannerSettings(cluster).isFullNestedSchemaSupport()), + survivingRecords); + } + + private IcebergOrphanFileDeletePrel( + RelOptCluster cluster, + RelTraitSet traitSet, + RelOptTable table, + RelNode child, + TableMetadata tableMetadata, + TableFunctionConfig functionConfig, + RelDataType rowType, + Long survivingRecords) { + super(cluster, traitSet, table, child, tableMetadata, functionConfig, rowType, survivingRecords); + } + + @Override + public RelNode copy(RelTraitSet traitSet, List inputs) { + return new IcebergOrphanFileDeletePrel(getCluster(), getTraitSet(), getTable(), sole(inputs), + getTableMetadata(), getTableFunctionConfig(), getRowType(), getSurvivingRecords()); + } + + @Override + protected double defaultEstimateRowCount(TableFunctionConfig functionConfig, RelMetadataQuery mq) { + // It needs to amplify the estimate row count in order to multiple threads. + final PlannerSettings plannerSettings = PrelUtil.getPlannerSettings(getCluster().getPlanner()); + double rowMultiplier = ((double) plannerSettings.getSliceTarget() / + plannerSettings.getOptions().getOption(ORPHAN_FILE_DELETE_RECORDS_PER_THREAD)); + return Math.max(getSurvivingRecords() * rowMultiplier, 1); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergOrphanFileDeleteTableFunction.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergOrphanFileDeleteTableFunction.java new file mode 100644 index 0000000000..db1a4039cc --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergOrphanFileDeleteTableFunction.java @@ -0,0 +1,168 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store.iceberg; + +import static com.dremio.exec.util.VectorUtil.getVectorFromSchemaPath; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; + +import org.apache.arrow.vector.BigIntVector; +import org.apache.arrow.vector.VarCharVector; +import org.apache.iceberg.exceptions.NotFoundException; +import org.apache.iceberg.util.Tasks; + +import com.dremio.exec.physical.base.OpProps; +import com.dremio.exec.physical.config.TableFunctionConfig; +import com.dremio.exec.record.VectorAccessible; +import com.dremio.exec.store.SystemSchemas; +import com.dremio.exec.store.dfs.AbstractTableFunction; +import com.dremio.exec.util.VectorUtil; +import com.dremio.io.file.FileSystem; +import com.dremio.io.file.Path; +import com.dremio.sabot.exec.context.OperatorContext; +import com.dremio.sabot.exec.context.OperatorStats; +import com.dremio.sabot.exec.fragment.FragmentExecutionContext; +import com.dremio.sabot.op.tablefunction.TableFunctionOperator; +import com.google.common.base.Stopwatch; + +/** + * A table function that deletes the orphan files + */ +public class IcebergOrphanFileDeleteTableFunction extends AbstractTableFunction { + private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(IcebergOrphanFileDeleteTableFunction.class); + private static final int DELETE_NUM_RETRIES = 3; + + private final FragmentExecutionContext fragmentExecutionContext; + private final OpProps props; + private final OperatorStats operatorStats; + private SupportsIcebergMutablePlugin icebergMutablePlugin; + private FileSystem fs; + + private VarCharVector inputFilePath; + private VarCharVector inputFileType; + + private VarCharVector outputFilePath; + private VarCharVector outputFileType; + private BigIntVector outputRecords; + private int inputIndex; + private boolean doneWithRow; + + public IcebergOrphanFileDeleteTableFunction( + FragmentExecutionContext fragmentExecutionContext, + OperatorContext context, + OpProps props, + TableFunctionConfig functionConfig) { + super(context, functionConfig); + this.fragmentExecutionContext = fragmentExecutionContext; + this.props = props; + this.operatorStats = context.getStats(); + } + + @Override + public VectorAccessible setup(VectorAccessible accessible) throws Exception { + super.setup(accessible); + icebergMutablePlugin = fragmentExecutionContext.getStoragePlugin(functionConfig.getFunctionContext().getPluginId()); + fs = icebergMutablePlugin.createFSWithAsyncOptions( + functionConfig.getFunctionContext().getFormatSettings().getLocation(), props.getUserName(), context); + + inputFilePath = (VarCharVector) getVectorFromSchemaPath(incoming, SystemSchemas.FILE_PATH); + inputFileType = (VarCharVector) getVectorFromSchemaPath(incoming, SystemSchemas.FILE_TYPE); + + outputFilePath = (VarCharVector) VectorUtil.getVectorFromSchemaPath(outgoing, SystemSchemas.FILE_PATH); + outputFileType = (VarCharVector) VectorUtil.getVectorFromSchemaPath(outgoing, SystemSchemas.FILE_TYPE); + outputRecords = (BigIntVector) VectorUtil.getVectorFromSchemaPath(outgoing, SystemSchemas.RECORDS); + + return outgoing; + } + + @Override + public void startRow(int row) throws Exception { + inputIndex = row; + doneWithRow = false; + } + + @Override + public int processRow(int startOutIndex, int maxRecords) throws Exception { + if (doneWithRow) { + return 0; + } + byte[] filePathBytes = inputFilePath.get(inputIndex); + String orphanFilePath = new String(filePathBytes, StandardCharsets.UTF_8); + byte[] fileTypeBytes = inputFileType.get(inputIndex); + String orphanFileType = new String(fileTypeBytes, StandardCharsets.UTF_8); + int deleteRecord = deleteFile(orphanFilePath, orphanFileType); + outputRecords.setSafe(inputIndex, deleteRecord); + outputFilePath.setSafe(inputIndex, inputFilePath.get(inputIndex)); + outputFileType.setSafe(inputIndex, inputFileType.get(inputIndex)); + outgoing.setAllCount(inputIndex + 1); + doneWithRow = true; + return 1; + } + + @Override + public void startBatch(int records) { + outgoing.allocateNew(); + } + + @Override + public void closeRow() throws Exception { + } + + private int deleteFile(String orphanFilePath, String fileType) { + Stopwatch stopwatch = Stopwatch.createStarted(); + // The orphan file list could have duplicate files. When fs.delete() tries to delete the duplicate file, it will + // return FALSE, if the file is already deleted. Here, we use 'deleteStatus' to track it. + // In addition, when fs.delete() tries to delete a file, it could fail with exception and the file is not deleted. + // In this case, we use 'failedToDelete' to track it and this type of failure could be logged into metric. + AtomicBoolean deleteStatus = new AtomicBoolean(true); + AtomicBoolean failedToDelete = new AtomicBoolean(false); + Tasks.foreach(orphanFilePath) + .retry(DELETE_NUM_RETRIES) + .stopRetryOn(NotFoundException.class) + .suppressFailureWhenFinished() + .onFailure( + (filePath, exc) -> { + logger.warn("Delete failed for {}: {}", fileType, filePath, exc); + failedToDelete.set(true); + }) + .run( + filePath -> { + try { + String containerRelativePath = Path.getContainerSpecificRelativePath(Path.of(filePath)); + boolean deleted = fs.delete(Path.of(containerRelativePath), false); + deleteStatus.set(deleted); + } catch (IOException e) { + logger.warn("Delete failed for {}: {}", fileType, filePath, e); + failedToDelete.set(true); + deleteStatus.set(false); + } + }); + long deleteTime = stopwatch.elapsed(TimeUnit.MILLISECONDS); + operatorStats.addLongStat(TableFunctionOperator.Metric.DELETE_ORPHAN_FILES_TIME, deleteTime); + + // Track whether a file is deleted successfully. + if (deleteStatus.get()) { + operatorStats.addLongStat(TableFunctionOperator.Metric.NUM_ORPHAN_FILES_DELETED, 1); + return 1; + } else if (failedToDelete.get()){ + operatorStats.addLongStat(TableFunctionOperator.Metric.NUM_ORPHAN_FILES_FAIL_TO_DELETE, 1); + } + return 0; + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergScanPlanBuilder.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergScanPlanBuilder.java index d7c6a3da6f..aa90c90462 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergScanPlanBuilder.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergScanPlanBuilder.java @@ -15,7 +15,15 @@ */ package com.dremio.exec.store.iceberg; +import static com.dremio.exec.store.SystemSchemas.DATAFILE_PATH; +import static com.dremio.exec.store.SystemSchemas.DELETE_FILE_PATH; +import static com.dremio.exec.store.SystemSchemas.ICEBERG_POS_DELETE_FILE_SCHEMA; +import static com.dremio.exec.store.SystemSchemas.IMPLICIT_SEQUENCE_NUMBER; +import static com.dremio.exec.store.SystemSchemas.SEQUENCE_NUMBER; +import static org.apache.calcite.sql.fun.SqlStdOperatorTable.EQUALS; + import java.util.List; +import java.util.stream.Collectors; import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.plan.RelOptTable; @@ -28,17 +36,22 @@ import org.apache.calcite.rex.RexNode; import org.apache.calcite.rex.RexUtil; import org.apache.calcite.sql.fun.SqlStdOperatorTable; +import org.apache.calcite.util.Pair; import org.apache.iceberg.ManifestContent; import com.dremio.common.expression.SchemaPath; import com.dremio.exec.ops.OptimizerRulesContext; +import com.dremio.exec.physical.config.ImmutableManifestScanFilters; import com.dremio.exec.physical.config.ManifestScanFilters; +import com.dremio.exec.planner.common.MoreRelOptUtil; import com.dremio.exec.planner.common.ScanRelBase; +import com.dremio.exec.planner.logical.partition.PruneFilterCondition; import com.dremio.exec.planner.physical.BroadcastExchangePrel; import com.dremio.exec.planner.physical.HashJoinPrel; import com.dremio.exec.planner.physical.Prel; import com.dremio.exec.planner.physical.ProjectPrel; import com.dremio.exec.record.BatchSchema; +import com.dremio.exec.store.DelegatingTableMetadata; import com.dremio.exec.store.SystemSchemas; import com.dremio.exec.store.TableMetadata; import com.dremio.exec.store.dfs.FilterableScan; @@ -59,7 +72,8 @@ public IcebergScanPlanBuilder( TableMetadata tableMetadata, List projectedColumns, OptimizerRulesContext context, - ManifestScanFilters manifestScanFilters) { + ManifestScanFilters manifestScanFilters, + PruneFilterCondition pruneFilterCondition) { this.icebergScanPrel = new IcebergScanPrel( cluster, traitSet, @@ -71,7 +85,7 @@ public IcebergScanPlanBuilder( ImmutableList.of(), null, false, - null, + pruneFilterCondition, context, false, null, @@ -158,8 +172,7 @@ public RelNode build() { new ImmutableManifestScanOptions.Builder().setManifestContent(ManifestContent.DELETES).build()); output = buildDataAndDeleteFileJoinAndAggregate(data, deletes); - output = buildSplitGen(output); - output = icebergScanPrel.buildDataFileScan(output); + output = buildDataScanWithSplitGen(output); } else { // no delete files, just return IcebergScanPrel which will get expanded in FinalizeRel stage output = icebergScanPrel; @@ -168,21 +181,74 @@ public RelNode build() { return output; } + /** + * Scan data manifests and HashJoin on file paths from reading delete files. + * Consuming operation: Selecting files to be optimized. + * + * HashJoin -----------------------------------------------| + * on path, TODO: sequencenum(<=) | + * | | + * | HashAgg + * | | + * | DataFileScan + * | | + * ManifestListScan(DATA) ManifestListScan(DELETES) + */ + public RelNode buildDataManifestScanWithDeleteJoin(RelNode delete) { + RelOptCluster cluster = icebergScanPrel.getCluster(); + + ManifestScanOptions manifestScanOptions = new ImmutableManifestScanOptions.Builder() + .setIncludesSplitGen(false) + .setManifestContent(ManifestContent.DATA) + .setIncludesIcebergMetadata(true) + .build(); + + RelNode manifestScan = buildManifestRel(manifestScanOptions, false); + RexBuilder rexBuilder = cluster.getRexBuilder(); + + Pair dataFilePathCol = MoreRelOptUtil.findFieldWithIndex(manifestScan.getRowType().getFieldList(), DATAFILE_PATH); + Pair deleteDataFilePathCol = MoreRelOptUtil.findFieldWithIndex(delete.getRowType().getFieldList(), DELETE_FILE_PATH); + Pair dataFileSeqNoCol = MoreRelOptUtil.findFieldWithIndex(manifestScan.getRowType().getFieldList(), SEQUENCE_NUMBER); + Pair deleteFileSeqNoCol = MoreRelOptUtil.findFieldWithIndex(delete.getRowType().getFieldList(), IMPLICIT_SEQUENCE_NUMBER); + int probeFieldCount = manifestScan.getRowType().getFieldCount(); + RexNode joinCondition = rexBuilder.makeCall( + EQUALS, + rexBuilder.makeInputRef(dataFilePathCol.right.getType(), dataFilePathCol.left), + rexBuilder.makeInputRef(deleteDataFilePathCol.right.getType(), probeFieldCount + deleteDataFilePathCol.left)); + RexNode extraJoinCondition = rexBuilder.makeCall( + SqlStdOperatorTable.LESS_THAN_OR_EQUAL, + rexBuilder.makeInputRef(dataFileSeqNoCol.right.getType(), dataFileSeqNoCol.left), + rexBuilder.makeInputRef(deleteFileSeqNoCol.right.getType(), probeFieldCount + deleteFileSeqNoCol.left)); + + return HashJoinPrel.create(cluster, manifestScan.getTraitSet(), manifestScan, delete, joinCondition, extraJoinCondition, JoinRelType.LEFT); + } + /** * This builds manifest scan plans both with and without delete files. */ public RelNode buildManifestRel(ManifestScanOptions manifestScanOptions) { + return buildManifestRel(manifestScanOptions, true); + } - RelNode output = icebergScanPrel.buildManifestScan(getDataManifestRecordCount(), - new ImmutableManifestScanOptions.Builder().from(manifestScanOptions).setManifestContent(ManifestContent.DATA).build()); - - if (hasDeleteFiles()) { - RelNode deletes = icebergScanPrel.buildManifestScan(getDataManifestRecordCount(), - new ImmutableManifestScanOptions.Builder().from(manifestScanOptions).setManifestContent(ManifestContent.DELETES).build()); - output = buildDataAndDeleteFileJoinAndAggregate(output, deletes); + /** + * This builds manifest scan plans (With both data and delete files if combineScan is set to true + * , else for manifests that fit the {@code manifestScanOptions}). + */ + public RelNode buildManifestRel(ManifestScanOptions manifestScanOptions, boolean combineScan) { + if (combineScan) { + RelNode output = icebergScanPrel.buildManifestScan(getDataManifestRecordCount(), + new ImmutableManifestScanOptions.Builder().from(manifestScanOptions).setManifestContent(ManifestContent.DATA).build()); + + if (hasDeleteFiles()) { + RelNode deletes = icebergScanPrel.buildManifestScan(getDataManifestRecordCount(), + new ImmutableManifestScanOptions.Builder().from(manifestScanOptions).setManifestContent(ManifestContent.DELETES).build()); + output = buildDataAndDeleteFileJoinAndAggregate(output, deletes); + } + + return output; + } else { + return icebergScanPrel.buildManifestScan(getDataManifestRecordCount(), manifestScanOptions); } - - return output; } public RelNode buildWithDmlDataFileFiltering(RelNode dataFileFilterList) { @@ -202,9 +268,7 @@ public RelNode buildWithDmlDataFileFiltering(RelNode dataFileFilterList) { } // perform split gen - output = buildSplitGen(output); - - return icebergScanPrel.buildDataFileScan(output); + return buildDataScanWithSplitGen(output); } private RelNode buildSplitGen(RelNode input) { @@ -218,7 +282,12 @@ private RelNode buildSplitGen(RelNode input) { icebergScanPrel.getTableMetadata(), splitGenOutputSchema, icebergScanPrel.isConvertedIcebergDataset()); } - private RelNode buildDataAndDeleteFileJoinAndAggregate(RelNode data, RelNode deletes) { + public RelNode buildDataScanWithSplitGen(RelNode input) { + RelNode output = buildSplitGen(input); + return icebergScanPrel.buildDataFileScan(output); + } + + public RelNode buildDataAndDeleteFileJoinAndAggregate(RelNode data, RelNode deletes) { // put delete files on the build side... we will always broadcast as regular table maintenance is assumed to keep // delete file counts at a reasonable level RelOptCluster cluster = icebergScanPrel.getCluster(); @@ -342,6 +411,66 @@ private Prel buildDataFileFilteringSemiJoin(RelNode inputDataFiles, RelNode data JoinRelType.INNER); } + /** + * Builds Manifest and File Scan for positional delete files + * Consuming operation: Optimize with positional deletes - Delete File scan is needed to determine which data files + * have positional deletes linked to them and subsequently, need to be rewritten. + * + * DataFileScan + * | + * | + * exchange on split identity + * | + * | + * SplitGen + * | + * | + * ManifestScan(DELETE) + * | + * | + * Exchange on split identity + * | + * | + * ManifestListScan(DELETE) + */ + public Prel buildDeleteFileScan(OptimizerRulesContext context) { + DelegatingTableMetadata deleteFileTableMetadata = new DelegatingTableMetadata(icebergScanPrel.getTableMetadata()) { + @Override + public BatchSchema getSchema() { + return ICEBERG_POS_DELETE_FILE_SCHEMA; + } + }; + IcebergScanPrel deleteFileScanPrel = new IcebergScanPrel( + icebergScanPrel.getCluster(), + icebergScanPrel.getTraitSet(), + icebergScanPrel.getTable(), + deleteFileTableMetadata.getStoragePluginId(), + deleteFileTableMetadata, + ICEBERG_POS_DELETE_FILE_SCHEMA.getFields().stream().map(i -> SchemaPath.getSimplePath(i.getName())).collect(Collectors.toList()), + 1.0, + ImmutableList.of(), + null, + false, + null, + context, + false, + null, + null, + false, + ImmutableManifestScanFilters.empty() + ); + + ManifestScanOptions deleteManifestScanOptions = new ImmutableManifestScanOptions.Builder() + .setIncludesSplitGen(false) + .setManifestContent(ManifestContent.DELETES) + .setIncludesIcebergMetadata(true) + .build(); + RelNode manifestDeleteScan = icebergScanPrel.buildManifestScan(getDeleteManifestRecordCount(), deleteManifestScanOptions); + manifestDeleteScan = new IcebergSplitGenPrel(manifestDeleteScan.getCluster(), manifestDeleteScan.getTraitSet(), icebergScanPrel.getTable(), manifestDeleteScan, + deleteFileTableMetadata, SystemSchemas.SPLIT_GEN_AND_COL_IDS_SCAN_SCHEMA, icebergScanPrel.isConvertedIcebergDataset()); + return deleteFileScanPrel.buildDataFileScanWithImplicitPartitionCols(manifestDeleteScan, ImmutableList.of(IMPLICIT_SEQUENCE_NUMBER)); + } + private static RelDataTypeField getField(RelNode rel, String fieldName) { return rel.getRowType().getField(fieldName, false, false); } @@ -366,7 +495,7 @@ private long getDeleteManifestRecordCount() { return deleteManifestStats != null ? deleteManifestStats.getRecordCount() : 0; } - private boolean hasDeleteFiles() { + public boolean hasDeleteFiles() { return getDeleteRecordCount() > 0; } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergScanPrel.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergScanPrel.java index 758bf93730..3838fa0313 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergScanPrel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergScanPrel.java @@ -314,13 +314,15 @@ public Prel buildManifestScan(Long survivingManifestRecordCount, ManifestScanOpt mfconditions.add(pruneCondition.getPartitionRange()); } + ManifestContentType contentType = manifestScanOptions.getManifestContent().equals(ManifestContent.DATA) ? + ManifestContentType.DATA : ManifestContentType.DELETES; IcebergManifestListPrel manifestListPrel = new IcebergManifestListPrel(getCluster(), getTraitSet(), tableMetadata, manifestListReaderSchema, manifestListReaderColumns, getRowTypeFromProjectedColumns(manifestListReaderColumns, manifestListReaderSchema, getCluster()), icebergPartitionPruneExpression, - manifestScanOptions.getManifestContent()); + contentType); RelNode input = manifestListPrel; @@ -381,6 +383,10 @@ public Prel buildManifestScan(Long survivingManifestRecordCount, ManifestScanOpt } public Prel buildDataFileScan(RelNode input2) { + return buildDataFileScanWithImplicitPartitionCols(input2, Collections.EMPTY_LIST); + } + + public Prel buildDataFileScanWithImplicitPartitionCols(RelNode input2, List implicitPartitionCols) { DistributionTrait.DistributionField distributionField = new DistributionTrait.DistributionField(0); DistributionTrait distributionTrait = new DistributionTrait(DistributionTrait.DistributionType.HASH_DISTRIBUTED, ImmutableList.of(distributionField)); RelTraitSet relTraitSet = getCluster().getPlanner().emptyTraitSet().plus(Prel.PHYSICAL).plus(distributionTrait); @@ -393,7 +399,8 @@ public Prel buildDataFileScan(RelNode input2) { // table scan phase TableFunctionConfig tableFunctionConfig = TableFunctionUtil.getDataFileScanTableFunctionConfig( - tableMetadata, filter, getProjectedColumns(), arrowCachingEnabled, isConvertedIcebergDataset, limitDataScanParallelism, survivingFileCount); + tableMetadata, filter, getProjectedColumns(), arrowCachingEnabled, isConvertedIcebergDataset, + limitDataScanParallelism, survivingFileCount, implicitPartitionCols); return new TableFunctionPrel(getCluster(), getTraitSet().plus(DistributionTrait.ANY), getTable(), parquetSplitsExchange, tableMetadata, tableFunctionConfig, getRowType(), getSurvivingRowCount()); @@ -421,6 +428,7 @@ private boolean isConditionOnImplicitCol() { int updateColIndex = projectedColumns.indexOf(SchemaPath.getSimplePath(IncrementalUpdateUtils.UPDATE_COLUMN)); final AtomicBoolean isImplicit = new AtomicBoolean(false); partitionExpression.accept(new RexVisitorImpl(true) { + @Override public Void visitInputRef(RexInputRef inputRef) { isImplicit.set(updateColIndex==inputRef.getIndex()); return null; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergSerDe.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergSerDe.java index 7c31c6c125..cefc14088e 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergSerDe.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergSerDe.java @@ -31,6 +31,7 @@ import org.apache.arrow.vector.types.pojo.Field; import org.apache.iceberg.DataFile; +import org.apache.iceberg.DeleteFile; import org.apache.iceberg.ManifestFile; import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.PartitionSpecParser; @@ -68,6 +69,24 @@ public static DataFile deserializeDataFile(byte[] serialized) { } } + public static byte[] serializeDeleteFile(DeleteFile deleteFile) { + try { + return serializeToByteArray(deleteFile); + } catch (IOException e) { + throw new RuntimeIOException(e, "failed to serialize DeleteFile"); + } + } + + public static DeleteFile deserializeDeleteFile(byte[] serialized) { + try { + return (DeleteFile) deserializeFromByteArray(serialized); + } catch (IOException e) { + throw new RuntimeIOException(e, "failed to deserialize DeleteFile"); + } catch (ClassNotFoundException e) { + throw new RuntimeException("failed to deserialize DeleteFile", e); + } + } + public static byte[] serializeManifestFile(ManifestFile manifestFile) { try { return serializeToByteArray(manifestFile); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergSnapshotsGroupScan.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergSnapshotsGroupScan.java new file mode 100644 index 0000000000..1f162cc55d --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergSnapshotsGroupScan.java @@ -0,0 +1,80 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store.iceberg; + +import static com.dremio.exec.store.iceberg.IcebergUtils.getMetadataLocation; +import static com.dremio.exec.store.iceberg.IcebergUtils.getSplitAndPartitionInfo; + +import java.util.List; + +import com.dremio.common.exceptions.ExecutionSetupException; +import com.dremio.common.expression.SchemaPath; +import com.dremio.exec.catalog.StoragePluginId; +import com.dremio.exec.physical.base.AbstractGroupScan; +import com.dremio.exec.physical.base.OpProps; +import com.dremio.exec.physical.base.SubScan; +import com.dremio.exec.proto.UserBitShared; +import com.dremio.exec.store.SplitWork; +import com.dremio.exec.store.TableMetadata; +import com.dremio.exec.store.dfs.IcebergTableProps; + +/** + * Iceberg snapshots group scan + */ +public class IcebergSnapshotsGroupScan extends AbstractGroupScan { + private final IcebergTableProps icebergTableProps; + private final SnapshotsScanOptions snapshotsScanOptions; + + public IcebergSnapshotsGroupScan(OpProps props, TableMetadata dataset, IcebergTableProps icebergTableProps, + List columns, SnapshotsScanOptions snapshotsScanOptions) { + super(props, dataset, columns); + this.icebergTableProps = icebergTableProps; + this.snapshotsScanOptions = snapshotsScanOptions; + } + + @Override + public int getMaxParallelizationWidth() { + return 1; + } + + @Override + public int getOperatorType() { + return UserBitShared.CoreOperatorType.ICEBERG_SNAPSHOTS_SUB_SCAN_VALUE; + } + + @Override + public SubScan getSpecificScan(List works) throws ExecutionSetupException { + final StoragePluginId pluginId; + if (dataset instanceof InternalIcebergScanTableMetadata) { + InternalIcebergScanTableMetadata icebergDataset = (InternalIcebergScanTableMetadata) dataset; + pluginId = icebergDataset.getIcebergTableStoragePlugin(); + } else { + pluginId = dataset.getStoragePluginId(); + } + + final String metadataLocation = getMetadataLocation(dataset, works); + return new IcebergSnapshotsSubScan( + props, + props.getSchema(), + getSplitAndPartitionInfo(metadataLocation), + getDataset().getName().getPathComponents(), + pluginId, + dataset.getStoragePluginId(), + columns, + icebergTableProps, + snapshotsScanOptions); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergSnapshotsPrel.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergSnapshotsPrel.java new file mode 100644 index 0000000000..198aa94a5c --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergSnapshotsPrel.java @@ -0,0 +1,210 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store.iceberg; + +import static com.dremio.exec.planner.common.ScanRelBase.getRowTypeFromProjectedColumns; +import static com.dremio.exec.store.SystemSchemas.ICEBERG_SNAPSHOTS_SCAN_SCHEMA; + +import java.io.IOException; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; + +import org.apache.calcite.plan.RelOptCluster; +import org.apache.calcite.plan.RelTraitSet; +import org.apache.calcite.rel.AbstractRelNode; +import org.apache.calcite.rel.RelNode; +import org.apache.calcite.rel.RelWriter; +import org.apache.calcite.rel.metadata.RelMetadataQuery; +import org.apache.calcite.rel.type.RelDataType; + +import com.dremio.common.expression.SchemaPath; +import com.dremio.exec.physical.base.PhysicalOperator; +import com.dremio.exec.planner.common.ScanRelBase; +import com.dremio.exec.planner.fragment.DistributionAffinity; +import com.dremio.exec.planner.physical.LeafPrel; +import com.dremio.exec.planner.physical.PhysicalPlanCreator; +import com.dremio.exec.planner.physical.Prel; +import com.dremio.exec.planner.physical.visitor.PrelVisitor; +import com.dremio.exec.record.BatchSchema; +import com.dremio.exec.store.TableMetadata; +import com.dremio.exec.store.dfs.IcebergTableProps; +import com.dremio.options.Options; +import com.dremio.options.TypeValidators; +import com.dremio.service.namespace.dataset.proto.DatasetConfig; +import com.dremio.service.namespace.dataset.proto.IcebergMetadata; +import com.dremio.service.namespace.dataset.proto.PhysicalDataset; +import com.google.common.base.Preconditions; + +/** + * Iceberg Snapshots reader prel. It generates snapshots based on different Snapshots reader modes. + */ +@Options +public class IcebergSnapshotsPrel extends AbstractRelNode implements LeafPrel { + public static final TypeValidators.LongValidator RESERVE = new TypeValidators.PositiveLongValidator("planner.op.scan.iceberg.snapshots.reserve_bytes", Long.MAX_VALUE, DEFAULT_RESERVE); + public static final TypeValidators.LongValidator LIMIT = new TypeValidators.PositiveLongValidator("planner.op.scan.iceberg.snapshots.limit_bytes", Long.MAX_VALUE, DEFAULT_LIMIT); + + private static final BatchSchema SNAPSHOTS_READER_SCHEMA = ICEBERG_SNAPSHOTS_SCAN_SCHEMA; + private static final List PROJECTED_COLUMNS = SNAPSHOTS_READER_SCHEMA.getFields().stream().map(f -> SchemaPath.getSimplePath(f.getName())).collect(Collectors.toList()); + + protected final TableMetadata tableMetadata; + private final BatchSchema schema; + private final List projectedColumns; + private final RelDataType relDataType; + private final IcebergTableProps icebergTableProps; + private final SnapshotsScanOptions snapshotsScanOptions; + private final long estimatedRows; + private final int maxParallelizationWidth; + + + public IcebergSnapshotsPrel( + RelOptCluster cluster, + RelTraitSet traitSet, + TableMetadata tableMetadata, + BatchSchema schema, + List projectedColumns, + RelDataType relDataType, + IcebergTableProps icebergTableProps, + SnapshotsScanOptions snapshotsScanOptions, + long estimatedRows, + int maxParallelizationWidth) { + super(cluster, traitSet); + this.tableMetadata = tableMetadata; + this.schema = schema; + this.projectedColumns = projectedColumns; + this.relDataType = relDataType; + this.icebergTableProps = icebergTableProps; + this.snapshotsScanOptions = Preconditions.checkNotNull(snapshotsScanOptions, "snapshotsScanOption cannot be null"); + this.estimatedRows = estimatedRows; + this.maxParallelizationWidth = maxParallelizationWidth; + } + + public IcebergSnapshotsPrel( + RelOptCluster cluster, + RelTraitSet traitSet, + TableMetadata tableMetadata, + IcebergTableProps icebergTableProps, + SnapshotsScanOptions snapshotsScanOptions, + long estimatedRows, + int maxParallelizationWidth) { + this(cluster, traitSet, tableMetadata, SNAPSHOTS_READER_SCHEMA, PROJECTED_COLUMNS, + getRowTypeFromProjectedColumns(PROJECTED_COLUMNS, SNAPSHOTS_READER_SCHEMA, cluster), icebergTableProps, + snapshotsScanOptions, estimatedRows, maxParallelizationWidth); + } + + @Override + public double estimateRowCount(RelMetadataQuery mq) { + // The number of all snapshots in a table. + return estimatedRows; + } + + @Override + public Iterator iterator() { + return Collections.emptyIterator(); + } + + @Override + public BatchSchema.SelectionVectorMode[] getSupportedEncodings() { + return BatchSchema.SelectionVectorMode.DEFAULT; + } + + @Override + public BatchSchema.SelectionVectorMode getEncoding() { + return BatchSchema.SelectionVectorMode.NONE; + } + + @Override + public boolean needsFinalColumnReordering() { + return false; + } + + @Override + public PhysicalOperator getPhysicalOperator(PhysicalPlanCreator creator) throws IOException { + return new IcebergSnapshotsGroupScan( + creator.props(this, tableMetadata.getUser(), schema, RESERVE, LIMIT), + tableMetadata, + icebergTableProps, + projectedColumns, + snapshotsScanOptions); + } + + @Override + public Prel copy(RelTraitSet traitSet, List inputs) { + return new IcebergSnapshotsPrel( + getCluster(), + getTraitSet(), + tableMetadata, + schema, + projectedColumns, + relDataType, + icebergTableProps, + snapshotsScanOptions, + estimatedRows, + maxParallelizationWidth); + } + + @Override + protected Object clone() throws CloneNotSupportedException { + return super.clone(); + } + + @Override + public T accept(PrelVisitor logicalVisitor, X value) throws E { + return logicalVisitor.visitLeaf(this, value); + } + + @Override + public int getMaxParallelizationWidth() { + return maxParallelizationWidth; + } + + @Override + public int getMinParallelizationWidth() { + return 1; + } + + @Override + public DistributionAffinity getDistributionAffinity() { + return DistributionAffinity.NONE; + } + + @Override + protected RelDataType deriveRowType() { + return relDataType; + } + + @Override + public RelWriter explainTerms(RelWriter pw) { + pw = ScanRelBase.explainScanRel(pw, tableMetadata, null, 1.0); + + /* To avoid NPE in the method chain Optional is used*/ + Optional metadataLocation = Optional.ofNullable(tableMetadata.getDatasetConfig()) + .map(DatasetConfig::getPhysicalDataset) + .map(PhysicalDataset::getIcebergMetadata) + .map(IcebergMetadata::getMetadataFileLocation); + + if (metadataLocation.isPresent()) { + pw.item("metadataFileLocation", metadataLocation.get()); + } + if (snapshotsScanOptions != null) { + pw.item("options value", snapshotsScanOptions); + } + + return pw; + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergSnapshotsReader.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergSnapshotsReader.java new file mode 100644 index 0000000000..b9fe371652 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergSnapshotsReader.java @@ -0,0 +1,179 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store.iceberg; + +import static com.dremio.exec.store.iceberg.model.IcebergOpCommitter.CONCURRENT_DML_OPERATION_ERROR; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.Iterator; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +import org.apache.arrow.memory.OutOfMemoryException; +import org.apache.arrow.vector.BigIntVector; +import org.apache.arrow.vector.ValueVector; +import org.apache.arrow.vector.VarCharVector; +import org.apache.iceberg.BaseTable; +import org.apache.iceberg.Snapshot; +import org.apache.iceberg.TableMetadata; +import org.apache.iceberg.TableOperations; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.dremio.common.exceptions.ExecutionSetupException; +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.physical.base.OpProps; +import com.dremio.exec.store.RecordReader; +import com.dremio.exec.store.SystemSchemas; +import com.dremio.exec.store.dfs.IcebergTableProps; +import com.dremio.exec.store.iceberg.model.IcebergModel; +import com.dremio.exec.store.iceberg.model.IcebergTableIdentifier; +import com.dremio.io.file.FileSystem; +import com.dremio.io.file.Path; +import com.dremio.sabot.exec.context.OperatorContext; +import com.dremio.sabot.op.scan.OutputMutator; +import com.dremio.sabot.op.scan.ScanOperator; +import com.google.common.base.Stopwatch; + +/** + * Output a list of snapshot ids accordingly. + */ +public class IcebergSnapshotsReader implements RecordReader { + private static final Logger logger = LoggerFactory.getLogger(IcebergSnapshotsReader.class); + + private final OperatorContext context; + private final String metadataLocation; + private final SupportsIcebergMutablePlugin icebergMutablePlugin; + private final OpProps props; + private final IcebergTableProps icebergTableProps; + private final SnapshotsScanOptions snapshotsScanOptions; + + private OutputMutator output; + private boolean emptyTable; + private TableOperations ops; + private Iterator> snapshotIdsIterator; + private VarCharVector metadataFilePathOutVector; + private VarCharVector manifestListOutVector; + private BigIntVector snapshotIdOutVector; + + public IcebergSnapshotsReader(OperatorContext context, + String metadataLocation, + SupportsIcebergMutablePlugin icebergMutablePlugin, + OpProps props, + IcebergTableProps icebergTableProps, + SnapshotsScanOptions snapshotsScanOptions) { + this.context = context; + this.metadataLocation = metadataLocation; + this.icebergMutablePlugin = icebergMutablePlugin; + this.props = props; + this.icebergTableProps = icebergTableProps; + this.snapshotsScanOptions = snapshotsScanOptions; + } + + @Override + public void setup(OutputMutator output) throws ExecutionSetupException { + this.output = output; + FileSystem fs; + try { + // Do not use Async options, as it can result in different URI schemes - s3a -> dremioS3 -> s3 + fs = icebergMutablePlugin.createFS(this.metadataLocation, props.getUserName(), context); + } catch (IOException e) { + logger.info("Failed creating filesystem", e); + throw new ExecutionSetupException("Failed creating filesystem", e); + } + + IcebergModel icebergModel = icebergMutablePlugin.getIcebergModel(icebergTableProps, props.getUserName(), null, fs); + IcebergTableIdentifier tableIdentifier = icebergModel.getTableIdentifier(icebergMutablePlugin.getTableLocation(icebergTableProps)); + BaseTable icebergTable = (BaseTable) icebergModel.getIcebergTableLoader(tableIdentifier).getIcebergTable(); + ops = icebergTable.operations(); + final TableMetadata tableMetadata = ops.current(); + if (tableMetadata.snapshots() == null || tableMetadata.snapshots().isEmpty()) { + // Currently, this reader does not generate any output for empty Iceberg table. However, it might be better to + // throw exception tell users that the Iceberg table is empty. + emptyTable = true; + return; + } + + if (isIcebergTableUpdated(tableMetadata.metadataFileLocation())) { + throw UserException.concurrentModificationError().message(CONCURRENT_DML_OPERATION_ERROR).buildSilently(); + } + + Stopwatch stopwatchIceberg = Stopwatch.createStarted(); + Map snapshotManifests; + switch (snapshotsScanOptions.getMode()) { + case EXPIRED_SNAPSHOTS: + snapshotManifests = icebergTable.expireSnapshots() + .expireOlderThan(snapshotsScanOptions.getOlderThanInMillis()) + .retainLast(snapshotsScanOptions.getRetainLast()) + .apply().stream().collect(Collectors.toMap(Snapshot::snapshotId, Snapshot::manifestListLocation)); + break; + case LIVE_SNAPSHOTS: + // Commit expire operation + snapshotManifests = icebergModel.expireSnapshots( + tableIdentifier, snapshotsScanOptions.getOlderThanInMillis(), snapshotsScanOptions.getRetainLast()); + break; + default: + throw new IllegalStateException(String.format("Unknown Snapshots scan mode: %s", snapshotsScanOptions.getMode())); + } + logger.info("Snapshot ids: {}", snapshotManifests); + snapshotIdsIterator = snapshotManifests.entrySet().iterator(); + long totalExpireTime = stopwatchIceberg.elapsed(TimeUnit.MILLISECONDS); + context.getStats().addLongStat(ScanOperator.Metric.ICEBERG_COMMIT_TIME, totalExpireTime); + + metadataFilePathOutVector = (VarCharVector) output.getVector(SystemSchemas.METADATA_FILE_PATH); + manifestListOutVector = (VarCharVector) output.getVector(SystemSchemas.MANIFEST_LIST_PATH); + snapshotIdOutVector = (BigIntVector) output.getVector(SystemSchemas.SNAPSHOT_ID); + } + + @Override + public void allocate(Map vectorMap) throws OutOfMemoryException { + for (final ValueVector v : vectorMap.values()) { + v.allocateNew(); + } + } + + @Override + public int next() { + if (emptyTable || !snapshotIdsIterator.hasNext()) { + return 0; + } + + int outIndex = 0; + while (snapshotIdsIterator.hasNext() && outIndex < context.getTargetBatchSize()) { + Map.Entry snapshotManifest = snapshotIdsIterator.next(); + metadataFilePathOutVector.setSafe(outIndex, metadataLocation.getBytes(StandardCharsets.UTF_8)); + snapshotIdOutVector.setSafe(outIndex, snapshotManifest.getKey()); + manifestListOutVector.setSafe(outIndex, snapshotManifest.getValue().getBytes(StandardCharsets.UTF_8)); + outIndex++; + } + + int valueCount = outIndex; + output.getVectors().forEach(v -> v.setValueCount(valueCount)); + return valueCount; + } + + @Override + public void close() throws Exception { + context.getStats().setReadIOStats(); + } + + private boolean isIcebergTableUpdated(String rootPointer) { + return !Path.getContainerSpecificRelativePath(Path.of(rootPointer)) + .equals(Path.getContainerSpecificRelativePath(Path.of(metadataLocation))); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergSnapshotsScanCreator.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergSnapshotsScanCreator.java new file mode 100644 index 0000000000..f45f45b539 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergSnapshotsScanCreator.java @@ -0,0 +1,63 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store.iceberg; + +import java.util.List; +import java.util.stream.Collectors; + +import com.dremio.common.exceptions.ExecutionSetupException; +import com.dremio.datastore.LegacyProtobufSerializer; +import com.dremio.exec.store.RecordReader; +import com.dremio.exec.store.SplitAndPartitionInfo; +import com.dremio.exec.store.parquet.RecordReaderIterator; +import com.dremio.sabot.exec.context.OperatorContext; +import com.dremio.sabot.exec.fragment.FragmentExecutionContext; +import com.dremio.sabot.exec.store.iceberg.proto.IcebergProtobuf; +import com.dremio.sabot.op.scan.ScanOperator; +import com.dremio.sabot.op.spi.ProducerOperator; +import com.google.protobuf.InvalidProtocolBufferException; + +/** + * Iceberg snapshots scan operator creator + */ +public class IcebergSnapshotsScanCreator implements ProducerOperator.Creator { + + @Override + public ProducerOperator create(FragmentExecutionContext fragmentExecContext, + OperatorContext context, + IcebergSnapshotsSubScan config) throws ExecutionSetupException { + final SupportsIcebergMutablePlugin plugin = fragmentExecContext.getStoragePlugin(config.getPluginId()); + List splits = config.getSplits(); + List readers = splits.stream().map(s -> { + IcebergProtobuf.IcebergDatasetSplitXAttr splitXAttr; + try { + splitXAttr = LegacyProtobufSerializer.parseFrom(IcebergProtobuf.IcebergDatasetSplitXAttr.PARSER, + s.getDatasetSplitInfo().getExtendedProperty()); + } catch (InvalidProtocolBufferException e) { + throw new RuntimeException("Could not deserialize split info", e); + } + return new IcebergSnapshotsReader( + context, + splitXAttr.getPath(), + plugin, + config.getProps(), + config.getIcebergTableProps(), + config.getSnapshotsScanOptions() + ); + }).collect(Collectors.toList()); + return new ScanOperator(config, context, RecordReaderIterator.from(readers.listIterator())); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergSnapshotsSubScan.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergSnapshotsSubScan.java new file mode 100644 index 0000000000..7ac537bb25 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergSnapshotsSubScan.java @@ -0,0 +1,113 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store.iceberg; + +import java.util.List; + +import com.dremio.common.expression.SchemaPath; +import com.dremio.exec.catalog.StoragePluginId; +import com.dremio.exec.physical.base.OpProps; +import com.dremio.exec.physical.base.SubScanWithProjection; +import com.dremio.exec.planner.fragment.MinorDataReader; +import com.dremio.exec.planner.fragment.MinorDataWriter; +import com.dremio.exec.planner.fragment.SplitNormalizer; +import com.dremio.exec.proto.UserBitShared; +import com.dremio.exec.record.BatchSchema; +import com.dremio.exec.store.SplitAndPartitionInfo; +import com.dremio.exec.store.dfs.IcebergTableProps; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeName; +import com.google.common.collect.ImmutableList; + +/** + * Iceberg snapshots subscan POP + */ +@JsonTypeName("iceberg-snapshots-sub-scan") +public class IcebergSnapshotsSubScan extends SubScanWithProjection { + private final StoragePluginId pluginId; + private final StoragePluginId datasourcePluginId; + private final IcebergTableProps icebergTableProps; + private final SnapshotsScanOptions snapshotsScanOptions; + + @JsonIgnore + private List splits; + + public IcebergSnapshotsSubScan( + @JsonProperty("props") OpProps props, + @JsonProperty("fullSchema") BatchSchema fullSchema, + @JsonProperty("tableSchemaPath") List tablePath, + @JsonProperty("pluginId") StoragePluginId pluginId, + @JsonProperty("datasourcePluginId") StoragePluginId datasourcePluginId, + @JsonProperty("columns") List columns, + @JsonProperty("icebergTableProps") IcebergTableProps icebergTableProps, + @JsonProperty("snapshotsScanOptions") SnapshotsScanOptions snapshotsScanOptions) { + this(props, fullSchema, null, tablePath, pluginId, datasourcePluginId, columns, icebergTableProps, snapshotsScanOptions); + } + + public IcebergSnapshotsSubScan( + OpProps props, + BatchSchema fullSchema, + List splits, + List tablePath, + StoragePluginId pluginId, + StoragePluginId datasourcePluginId, + List columns, + IcebergTableProps icebergTableProps, + SnapshotsScanOptions snapshotsScanOptions) { + super(props, fullSchema, (tablePath == null) ? null : ImmutableList.of(tablePath), columns); + this.pluginId = pluginId; + this.datasourcePluginId = datasourcePluginId; + this.splits = splits; + this.icebergTableProps = icebergTableProps; + this.snapshotsScanOptions = snapshotsScanOptions; + } + + public StoragePluginId getPluginId() { + return pluginId; + } + + public StoragePluginId getDatasourcePluginId() { + return datasourcePluginId; + } + + public IcebergTableProps getIcebergTableProps() { + return icebergTableProps; + } + + public List getSplits() { + return splits; + } + + public SnapshotsScanOptions getSnapshotsScanOptions() { + return snapshotsScanOptions; + } + + @Override + public int getOperatorType() { + return UserBitShared.CoreOperatorType.ICEBERG_SNAPSHOTS_SUB_SCAN_VALUE; + } + + @Override + public void collectMinorSpecificAttrs(MinorDataWriter writer) { + SplitNormalizer.write(getProps(), writer, splits); + } + + @Override + public void populateMinorSpecificAttrs(MinorDataReader reader) throws Exception { + splits = SplitNormalizer.read(getProps(), reader); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergSplitGenPrel.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergSplitGenPrel.java index 122be88427..965a8d7776 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergSplitGenPrel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergSplitGenPrel.java @@ -74,6 +74,7 @@ public RelNode copy(RelTraitSet traitSet, List inputs) { getTableFunctionConfig(), getRowType()); } + @Override protected double defaultEstimateRowCount(TableFunctionConfig functionConfig, RelMetadataQuery mq) { // estimate output row count based on planner.num_splits_per_file final PlannerSettings plannerSettings = PrelUtil.getPlannerSettings(getCluster().getPlanner()); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergUtils.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergUtils.java index acad93e18b..ee1340a640 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergUtils.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/IcebergUtils.java @@ -79,11 +79,11 @@ import org.apache.calcite.rex.RexInputRef; import org.apache.calcite.rex.RexNode; import org.apache.calcite.rex.RexVisitorImpl; -import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.SqlOperator; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.iceberg.DremioIndexByName; +import org.apache.iceberg.MetadataColumns; import org.apache.iceberg.PartitionField; import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.PartitionStatsFileLocations; @@ -93,6 +93,7 @@ import org.apache.iceberg.exceptions.NotFoundException; import org.apache.iceberg.expressions.Expressions; import org.apache.iceberg.expressions.Term; +import org.apache.iceberg.io.FileIO; import org.apache.iceberg.types.Type; import org.apache.iceberg.types.TypeUtil; import org.apache.iceberg.types.Types; @@ -104,6 +105,9 @@ import com.dremio.common.expression.Describer; import com.dremio.common.map.CaseInsensitiveMap; import com.dremio.common.utils.protos.QueryIdHelper; +import com.dremio.connector.metadata.DatasetSplit; +import com.dremio.connector.metadata.DatasetSplitAffinity; +import com.dremio.datastore.LegacyProtobufSerializer; import com.dremio.exec.ExecConstants; import com.dremio.exec.catalog.Catalog; import com.dremio.exec.catalog.CatalogUtil; @@ -114,12 +118,14 @@ import com.dremio.exec.catalog.SourceCatalog; import com.dremio.exec.catalog.StoragePluginId; import com.dremio.exec.catalog.VersionedPlugin; +import com.dremio.exec.hadoop.HadoopFileSystemConfigurationAdapter; import com.dremio.exec.physical.base.IcebergWriterOptions; import com.dremio.exec.physical.base.ImmutableIcebergWriterOptions; import com.dremio.exec.physical.base.ImmutableTableFormatWriterOptions; import com.dremio.exec.physical.base.TableFormatWriterOptions.TableFormatOperation; import com.dremio.exec.physical.base.WriterOptions; import com.dremio.exec.planner.acceleration.IncrementalUpdateUtils; +import com.dremio.exec.planner.common.ImmutableDremioFileAttrs; import com.dremio.exec.planner.logical.CreateTableEntry; import com.dremio.exec.planner.physical.PlannerSettings; import com.dremio.exec.planner.physical.WriterPrel; @@ -129,15 +135,22 @@ import com.dremio.exec.planner.sql.handlers.query.OptimizeOptions; import com.dremio.exec.planner.sql.parser.PartitionDistributionStrategy; import com.dremio.exec.planner.sql.parser.SqlGrant; +import com.dremio.exec.planner.sql.parser.SqlOptimize; +import com.dremio.exec.planner.sql.parser.SqlVacuumTable; import com.dremio.exec.proto.UserBitShared; import com.dremio.exec.record.BatchSchema; import com.dremio.exec.record.SchemaBuilder; import com.dremio.exec.server.SabotContext; import com.dremio.exec.store.RecordReader; import com.dremio.exec.store.SchemaConfig; +import com.dremio.exec.store.SplitAndPartitionInfo; import com.dremio.exec.store.SplitIdentity; +import com.dremio.exec.store.SplitWork; import com.dremio.exec.store.StoragePlugin; +import com.dremio.exec.store.SystemSchemas; +import com.dremio.exec.store.TableMetadata; import com.dremio.exec.store.dfs.FileSystemConf; +import com.dremio.exec.store.dfs.FileSystemConfigurationAdapter; import com.dremio.exec.store.dfs.FileSystemPlugin; import com.dremio.exec.store.dfs.IcebergTableProps; import com.dremio.exec.store.dfs.PrimaryKeyOperations; @@ -147,7 +160,9 @@ import com.dremio.options.OptionManager; import com.dremio.sabot.exec.fragment.FragmentExecutionContext; import com.dremio.sabot.exec.store.easy.proto.EasyProtobuf; +import com.dremio.sabot.exec.store.iceberg.proto.IcebergProtobuf; import com.dremio.service.namespace.DatasetHelper; +import com.dremio.service.namespace.MetadataProtoUtils; import com.dremio.service.namespace.NamespaceKey; import com.dremio.service.namespace.PartitionChunkMetadata; import com.dremio.service.namespace.dataset.proto.DatasetConfig; @@ -187,7 +202,22 @@ public Void visitInputRef(RexInputRef inputRef) { * @return column name to integer ID mapping */ public static Map getIcebergColumnNameToIDMap(Schema schema) { - Map schemaNameIDMap = TypeUtil.visit(Types.StructType.of(schema.columns()), new DremioIndexByName()); + return getIcebergColumnNameToIDMap(schema.columns()); + } + + public static Map getColIDMapWithReservedDeleteFields(Schema schema) { + List allCols = new ArrayList<>(schema.columns()); + + // Not an exhaustive list, add the reserved columns as per usage + // https://iceberg.apache.org/spec/#reserved-field-ids + allCols.add(MetadataColumns.DELETE_FILE_PATH); + allCols.add(MetadataColumns.DELETE_FILE_POS); + + return getIcebergColumnNameToIDMap(allCols); + } + + private static Map getIcebergColumnNameToIDMap(List cols) { + Map schemaNameIDMap = TypeUtil.visit(Types.StructType.of(cols), new DremioIndexByName()); CaseInsensitiveMap nameToIDMap = CaseInsensitiveMap.newHashMap(); nameToIDMap.putAll(schemaNameIDMap); // if two fields have the same name, ignore one of them return CaseInsensitiveMap.newImmutableMap(nameToIDMap); @@ -303,7 +333,8 @@ public static boolean isNonAddOnField(String fieldName) { return fieldName.equalsIgnoreCase(RecordReader.SPLIT_IDENTITY) || fieldName.equalsIgnoreCase(RecordReader.SPLIT_INFORMATION) || fieldName.equalsIgnoreCase(RecordReader.COL_IDS) - || fieldName.equalsIgnoreCase(RecordReader.DATAFILE_PATH); + || fieldName.equalsIgnoreCase(RecordReader.DATAFILE_PATH) + || SystemSchemas.CARRY_FORWARD_FILE_PATH_TYPE_COLS.contains(fieldName); } /** @@ -371,29 +402,59 @@ public static Optional checkTableExistenceAndMutability(Cat return Optional.empty(); } - public static String getPartitionStatsFile(String rootPointer, long snapshotId, Configuration conf, MutablePlugin plugin) { + public static ImmutableDremioFileAttrs getPartitionStatsFileAttrs(String rootPointer, long snapshotId, FileIO fileIO) { String partitionStatsMetadata = PartitionStatsMetadataUtil.toFilename(snapshotId); + String partitionStatsFile = null; + Long fileLength = null; + try { String fullPath = resolvePath(rootPointer, partitionStatsMetadata); - PartitionStatsFileLocations partitionStatsLocations = PartitionStatsMetadataUtil.readMetadata(new DremioFileIO(conf, plugin), fullPath); + PartitionStatsFileLocations partitionStatsLocations = PartitionStatsMetadataUtil.readMetadata( + fileIO, fullPath); if (partitionStatsLocations == null) { logger.debug("Partition stats metadata file: {} not found", partitionStatsMetadata); - return null; + return new ImmutableDremioFileAttrs.Builder() + .setFileName(partitionStatsFile) + .setFileLength(fileLength) + .build(); } Map partitionStatsFileBySpecId = partitionStatsLocations.all(); if (partitionStatsFileBySpecId.isEmpty()) { logger.debug("Partition stats metadata file: {} was empty", partitionStatsMetadata); - return null; + return new ImmutableDremioFileAttrs.Builder() + .setFileName(partitionStatsFile) + .setFileLength(fileLength) + .build(); } + int maxSpecId = 0; if (partitionStatsFileBySpecId.size() > 1) { - logger.warn("Partition stats metadata file: {} has multiple entries", partitionStatsMetadata); + logger.info("Partition stats metadata file: {} has multiple entries", partitionStatsMetadata); + maxSpecId = partitionStatsFileBySpecId.size() - 1; } // In the absence of partition spec evolution, we'll have just one partition spec file - return partitionStatsFileBySpecId.values().iterator().next(); + partitionStatsFile = partitionStatsLocations.getFileForSpecId(maxSpecId); + if (partitionStatsFile != null) { + try { + fileLength = fileIO.newInputFile(partitionStatsFile).getLength(); + } catch (UserException uex) { + // ignore UserException thrown by DremioFileIO while reading partition stats file + logger.warn("Unable to read partition stats file: {}. Ignoring partition stats", partitionStatsFile); + fileLength = null; + partitionStatsFile = null; + } + } } catch (NotFoundException | UncheckedIOException exception) { logger.debug("Partition stats metadata file: {} not found", partitionStatsMetadata); - return null; } + + return new ImmutableDremioFileAttrs.Builder() + .setFileName(partitionStatsFile) + .setFileLength(fileLength) + .build(); + } + + public static PartitionStatsFileLocations getPartitionStatsFiles(FileIO fileIO, String partitionStatsMetadataFilePath) { + return PartitionStatsMetadataUtil.readMetadata(fileIO, partitionStatsMetadataFilePath); } @VisibleForTesting @@ -540,48 +601,56 @@ public static PartitionSpec getIcebergPartitionSpec(BatchSchema batchSchema, } public static String getValidIcebergPath(Path path, Configuration conf, String fsScheme) { - try { - if (fsScheme == null || path.toUri().getScheme() != null) { - return path.toString(); - } - String modifiedPath = removeLeadingSlash(path.toString()); - if (fsScheme.equalsIgnoreCase(FileSystemConf.CloudFileSystemScheme.AZURE_STORAGE_FILE_SYSTEM_SCHEME.getScheme())) { - String accountName = conf.get("dremio.azure.account"); - StringBuilder urlBuilder = new StringBuilder(); - urlBuilder.append(AZURE_SCHEME); - urlBuilder.append(SCHEME_SEPARATOR); - urlBuilder.append(getContainerName(path)); - urlBuilder.append(CONTAINER_SEPARATOR + accountName + AZURE_AUTHORITY_SUFFIX); - urlBuilder.append(pathWithoutContainer(path).toString()); - return urlBuilder.toString(); - } else if (fsScheme.equalsIgnoreCase(FileSystemConf.CloudFileSystemScheme.S3_FILE_SYSTEM_SCHEME.getScheme())) { - return S3_SCHEME + SCHEME_SEPARATOR + modifiedPath; - } else if (fsScheme.equalsIgnoreCase(FileSystemConf.CloudFileSystemScheme.GOOGLE_CLOUD_FILE_SYSTEM.getScheme())) { - return GCS_SCHEME + SCHEME_SEPARATOR + modifiedPath; - } else if (fsScheme.equalsIgnoreCase(HDFS_SCHEME)) { - String hdfsEndPoint = conf.get("fs.defaultFS"); - if (hdfsEndPoint == null || !hdfsEndPoint.toLowerCase().startsWith(HDFS_SCHEME)) { - return HDFS_SCHEME + SCHEME_SEPARATOR + Path.SEPARATOR + modifiedPath; //Without authority - } else { - return hdfsEndPoint + modifiedPath; - } - } else if (fsScheme.equalsIgnoreCase(FILE_SCHEME)) { - return FILE_SCHEME + SCHEME_SEPARATOR + Path.SEPARATOR + modifiedPath; - } else if (fsScheme.equalsIgnoreCase(FileSystemConf.CloudFileSystemScheme.ADL_FILE_SYSTEM_SCHEME.getScheme())) { - String adlsEndPoint = conf.get("fs.defaultFS", SEPARATOR); - String[] endPointParts = adlsEndPoint.split(SCHEME_SEPARATOR); - adlsEndPoint = (endPointParts.length > 1) ? endPointParts[1] : SEPARATOR; - StringBuilder urlBuilder = new StringBuilder(); - return urlBuilder.append(ADL_SCHEME).append(SCHEME_SEPARATOR) - .append(adlsEndPoint).append(modifiedPath).toString(); - } else if (fsScheme.equalsIgnoreCase(MAPRFS_SCHEME)) { - return MAPRFS_SCHEME + SCHEME_SEPARATOR + SEPARATOR + modifiedPath; + return getValidIcebergPath(path, new HadoopFileSystemConfigurationAdapter(conf), fsScheme); + } + + public static String getValidIcebergPath(String path, FileSystemConfigurationAdapter conf, String fsScheme) { + return getValidIcebergPath(new Path(path), conf, fsScheme); + } + + public static String getValidIcebergPath(Path path, FileSystemConfigurationAdapter conf, String fsScheme) { + try { + if (fsScheme == null || path.toUri().getScheme() != null) { + return path.toString(); + } + String modifiedPath = removeLeadingSlash(path.toString()); + if (fsScheme.equalsIgnoreCase(FileSystemConf.CloudFileSystemScheme.AZURE_STORAGE_FILE_SYSTEM_SCHEME.getScheme())) { + String accountName = conf.get("dremio.azure.account"); + StringBuilder urlBuilder = new StringBuilder(); + urlBuilder.append(AZURE_SCHEME); + urlBuilder.append(SCHEME_SEPARATOR); + urlBuilder.append(getContainerName(path)); + urlBuilder.append(CONTAINER_SEPARATOR + accountName + AZURE_AUTHORITY_SUFFIX); + urlBuilder.append(pathWithoutContainer(path).toString()); + return urlBuilder.toString(); + } else if (fsScheme.equalsIgnoreCase(FileSystemConf.CloudFileSystemScheme.S3_FILE_SYSTEM_SCHEME.getScheme())) { + return S3_SCHEME + SCHEME_SEPARATOR + modifiedPath; + } else if (fsScheme.equalsIgnoreCase(FileSystemConf.CloudFileSystemScheme.GOOGLE_CLOUD_FILE_SYSTEM.getScheme())) { + return GCS_SCHEME + SCHEME_SEPARATOR + modifiedPath; + } else if (fsScheme.equalsIgnoreCase(HDFS_SCHEME)) { + String hdfsEndPoint = conf.get("fs.defaultFS"); + if (hdfsEndPoint == null || !hdfsEndPoint.toLowerCase().startsWith(HDFS_SCHEME)) { + return HDFS_SCHEME + SCHEME_SEPARATOR + Path.SEPARATOR + modifiedPath; //Without authority } else { - throw new Exception("No File System scheme matches"); + return hdfsEndPoint + modifiedPath; } - } catch (Exception ex) { - throw new UnknownFormatConversionException("Unknown format (" + fsScheme + ") conversion for path " + path + " Error Message : " + ex.getMessage()); + } else if (fsScheme.equalsIgnoreCase(FILE_SCHEME)) { + return FILE_SCHEME + SCHEME_SEPARATOR + Path.SEPARATOR + modifiedPath; + } else if (fsScheme.equalsIgnoreCase(FileSystemConf.CloudFileSystemScheme.ADL_FILE_SYSTEM_SCHEME.getScheme())) { + String adlsEndPoint = conf.get("fs.defaultFS", SEPARATOR); + String[] endPointParts = adlsEndPoint.split(SCHEME_SEPARATOR); + adlsEndPoint = (endPointParts.length > 1) ? endPointParts[1] : SEPARATOR; + StringBuilder urlBuilder = new StringBuilder(); + return urlBuilder.append(ADL_SCHEME).append(SCHEME_SEPARATOR) + .append(adlsEndPoint).append(modifiedPath).toString(); + } else if (fsScheme.equalsIgnoreCase(MAPRFS_SCHEME)) { + return MAPRFS_SCHEME + SCHEME_SEPARATOR + SEPARATOR + modifiedPath; + } else { + throw new Exception("No File System scheme matches"); } + } catch (Exception ex) { + throw new UnknownFormatConversionException("Unknown format (" + fsScheme + ") conversion for path " + path + " Error Message : " + ex.getMessage()); + } } public static SupportsInternalIcebergTable getSupportsInternalIcebergTablePlugin(FragmentExecutionContext fec, StoragePluginId pluginId) { @@ -650,8 +719,7 @@ public static void setPartitionSpecValue(IcebergPartitionData data, int position if(value != null) { BigInteger unscaledValue = new BigInteger((byte[])value); data.setBigDecimal(position, new BigDecimal(unscaledValue, type.getScale())); - } - else { + } else { data.setBigDecimal(position, null); } break; @@ -668,8 +736,7 @@ public static void setPartitionSpecValue(IcebergPartitionData data, int position if(value != null) { long days = TimeUnit.MILLISECONDS.toDays((Long)value); data.setInteger(position, Math.toIntExact(days)); - } - else { + } else { data.setInteger(position, null); } break; @@ -678,8 +745,7 @@ public static void setPartitionSpecValue(IcebergPartitionData data, int position if (value != null) { long longValue = ((Integer)(value)).longValue() * 1000L; data.setLong(position, longValue); - } - else { + } else { data.setLong(position, null); } break; @@ -851,6 +917,14 @@ public static boolean isIcebergDMLFeatureEnabled(SourceCatalog sourceCatalog, return options.getOption(ExecConstants.ENABLE_ICEBERG_DML); } + public static void validateTablePropertiesRequest(OptionManager options) { + if (!options.getOption(ExecConstants.ENABLE_ICEBERG_TABLE_PROPERTIES)) { + throw UserException.unsupportedError() + .message("TBLPROPERTIES is not supported in the query") + .buildSilently(); + } + } + public static boolean validatePluginSupportForIceberg(SourceCatalog sourceCatalog, NamespaceKey path) { StoragePlugin storagePlugin; try { @@ -893,8 +967,32 @@ public static Term getIcebergTerm(PartitionTransform transform) { } } + // TODO: TableProperties should not be part of iceberg, so this function should be moved out of icebergUtils. will do as part of DX-61190 + public static Map convertTableProperties(List tablePropertyNameList, List tablePropertyValueList, boolean expectEmptyValues) { + if (expectEmptyValues) { + if (!(tablePropertyValueList == null || tablePropertyValueList.isEmpty())) { + throw UserException.parseError() + .message("Property values list should be empty") + .buildSilently(); + } + } else { + if (tablePropertyNameList.size() != tablePropertyValueList.size()) { + throw UserException.parseError() + .message("Number of table property names does not match values") + .buildSilently(); + } + } + Map tableProperties = new HashMap<>(); + for (int index = 0; index < tablePropertyNameList.size(); index++) { + String nameString = tablePropertyNameList.get(index); + String valueString = expectEmptyValues ? "" : tablePropertyValueList.get(index); + tableProperties.put(nameString, valueString); + } + return tableProperties; + } + public static CreateTableEntry getIcebergCreateTableEntry(SqlHandlerConfig config, Catalog catalog, DremioTable table, - SqlKind sqlKind, OptimizeOptions optimizeOptions) { + SqlOperator sqlOperator, OptimizeOptions optimizeOptions) { final NamespaceKey key = table.getPath(); final DatasetConfig datasetConfig = table.getDatasetConfig(); final ReadDefinition readDefinition = datasetConfig.getReadDefinition(); @@ -910,7 +1008,7 @@ public static CreateTableEntry getIcebergCreateTableEntry(SqlHandlerConfig confi queryId, null, partitionColumnsList, - getIcebergCommandType(sqlKind), + getIcebergCommandType(sqlOperator), null, key.getName(), null, @@ -923,11 +1021,13 @@ public static CreateTableEntry getIcebergCreateTableEntry(SqlHandlerConfig confi IcebergWriterOptions icebergOptions = new ImmutableIcebergWriterOptions.Builder() .setIcebergTableProps(icebergTableProps).build(); ImmutableTableFormatWriterOptions.Builder tableFormatOptionsBuilder = new ImmutableTableFormatWriterOptions.Builder() - .setIcebergSpecificOptions(icebergOptions).setOperation(getTableFormatOperation(sqlKind)); + .setIcebergSpecificOptions(icebergOptions).setOperation(getTableFormatOperation(sqlOperator)); if (optimizeOptions != null) { tableFormatOptionsBuilder.setMinInputFilesBeforeOptimize(optimizeOptions.getMinInputFiles()); tableFormatOptionsBuilder.setTargetFileSize(optimizeOptions.getTargetFileSizeBytes()); + tableFormatOptionsBuilder.setSnapshotId( + table.getDatasetConfig().getPhysicalDataset().getIcebergMetadata().getSnapshotId()); isSingleWriter = optimizeOptions.isSingleDataWriter(); } @@ -955,33 +1055,45 @@ public static CreateTableEntry getIcebergCreateTableEntry(SqlHandlerConfig confi null); } - private static IcebergCommandType getIcebergCommandType(SqlKind sqlKind) { - switch (sqlKind) { + private static IcebergCommandType getIcebergCommandType(SqlOperator sqlOperator) { + switch (sqlOperator.getKind()) { case DELETE: return IcebergCommandType.DELETE; case UPDATE: return IcebergCommandType.UPDATE; case MERGE: return IcebergCommandType.MERGE; - case OTHER: - return IcebergCommandType.OPTIMIZE; + case OTHER: { + if (sqlOperator.getName().equalsIgnoreCase(SqlOptimize.OPERATOR.getName())) { + return IcebergCommandType.OPTIMIZE; + } else if (sqlOperator.getName().equalsIgnoreCase(SqlVacuumTable.OPERATOR.getName())) { + return IcebergCommandType.VACUUM; + } + throw new UnsupportedOperationException(String.format("Unrecoverable Error: Invalid type: %s", sqlOperator.getKind())); + } default: - throw new UnsupportedOperationException(String.format("Unrecoverable Error: Invalid type: %s", sqlKind)); + throw new UnsupportedOperationException(String.format("Unrecoverable Error: Invalid type: %s", sqlOperator.getKind())); } } - private static TableFormatOperation getTableFormatOperation(SqlKind sqlKind) { - switch (sqlKind) { + private static TableFormatOperation getTableFormatOperation(SqlOperator sqlOperator) { + switch (sqlOperator.getKind()) { case DELETE: return TableFormatOperation.DELETE; case UPDATE: return TableFormatOperation.UPDATE; case MERGE: return TableFormatOperation.MERGE; - case OTHER: - return TableFormatOperation.OPTIMIZE; + case OTHER: { + if (sqlOperator.getName().equalsIgnoreCase(SqlOptimize.OPERATOR.getName())) { + return TableFormatOperation.OPTIMIZE; + } else if (sqlOperator.getName().equalsIgnoreCase(SqlVacuumTable.OPERATOR.getName())) { + return TableFormatOperation.VACUUM; + } + throw new UnsupportedOperationException(String.format("Unrecoverable Error: Invalid type: %s", sqlOperator.getKind())); + } default: - throw new UnsupportedOperationException(String.format("Unrecoverable Error: Invalid type: %s", sqlKind)); + throw new UnsupportedOperationException(String.format("Unrecoverable Error: Invalid type: %s", sqlOperator.getKind())); } } @@ -1065,4 +1177,45 @@ public static List getPrimaryKeyFromTableMetadata(Table icebergTable) { .map(f -> f.getName().toLowerCase(Locale.ROOT)) .collect(Collectors.toList()); } + + public static String getMetadataLocation(TableMetadata dataset, List works) { + if (dataset.getDatasetConfig().getPhysicalDataset().getIcebergMetadata() != null && + dataset.getDatasetConfig().getPhysicalDataset().getIcebergMetadata().getMetadataFileLocation() != null && + !dataset.getDatasetConfig().getPhysicalDataset().getIcebergMetadata().getMetadataFileLocation().isEmpty()) { + return dataset.getDatasetConfig().getPhysicalDataset().getIcebergMetadata().getMetadataFileLocation(); + } else { + EasyProtobuf.EasyDatasetSplitXAttr extended; + try { + if (works.size() == 0) { + //It's an in-valid scenario where splits size is zero. + throw new RuntimeException("Unexpected state with zero split."); + } + // All the split will have the same iceberg metadata location. + // It would be ideal to read it from any index in this case from the first index. + extended = LegacyProtobufSerializer.parseFrom(EasyProtobuf.EasyDatasetSplitXAttr.PARSER, + works.get(0).getSplitExtendedProperty()); + return extended.getPath(); + } catch (InvalidProtocolBufferException e) { + throw new RuntimeException("Could not deserialize split info", e); + } + } + } + + public static List getSplitAndPartitionInfo(String splitPath) { + final List splits = new ArrayList<>(); + IcebergProtobuf.IcebergDatasetSplitXAttr splitExtended = IcebergProtobuf.IcebergDatasetSplitXAttr.newBuilder() + .setPath(splitPath) + .build(); + List splitAffinities = new ArrayList<>(); + DatasetSplit datasetSplit = DatasetSplit.of( + splitAffinities, 0, 0, splitExtended::writeTo); + + PartitionProtobuf.NormalizedPartitionInfo partitionInfo = PartitionProtobuf.NormalizedPartitionInfo.newBuilder().setId(String.valueOf(1)).build(); + PartitionProtobuf.NormalizedDatasetSplitInfo.Builder splitInfo = PartitionProtobuf.NormalizedDatasetSplitInfo + .newBuilder() + .setPartitionId(partitionInfo.getId()) + .setExtendedProperty(MetadataProtoUtils.toProtobuf(datasetSplit.getExtraInfo())); + splits.add(new SplitAndPartitionInfo(partitionInfo, splitInfo.build())); + return splits; + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/InputCarryForwardTableFunctionDecorator.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/InputCarryForwardTableFunctionDecorator.java new file mode 100644 index 0000000000..1f3d0ef8db --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/InputCarryForwardTableFunctionDecorator.java @@ -0,0 +1,209 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store.iceberg; + + +import static com.dremio.exec.util.VectorUtil.getVectorFromSchemaPath; + +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import org.apache.arrow.vector.ValueVector; +import org.apache.arrow.vector.VarCharVector; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.arrow.vector.util.TransferPair; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.dremio.common.expression.BasePath; +import com.dremio.common.expression.SchemaPath; +import com.dremio.exec.expr.TypeHelper; +import com.dremio.exec.record.TypedFieldId; +import com.dremio.exec.record.VectorAccessible; +import com.dremio.exec.record.VectorContainer; +import com.dremio.exec.record.VectorWrapper; +import com.dremio.sabot.exec.fragment.OutOfBandMessage; +import com.dremio.sabot.op.tablefunction.TableFunction; +import com.google.common.base.Preconditions; +import com.google.common.collect.Streams; + +/** + * Uses carry-forward columns to accumulate input rows. + * + * a. If carry-forward columns are present in the incoming schema, the rows which have these columns populated are + * copied over as they are into output. + * b. Carry-forward column values from the incoming row are set in a carry-forward row, as per a mapping rule. + */ +public class InputCarryForwardTableFunctionDecorator implements TableFunction { + private static final Logger LOGGER = LoggerFactory.getLogger(InputCarryForwardTableFunctionDecorator.class); + private final TableFunction baseTableFunction; + private final List carryForwardCols; + private final Map mappingRule; + private final String inputTypeCol; + private final String inputType; + private List carryForwardColTransfers; + private List mappingColTransfers = new ArrayList<>(); + private List carryForwardColVectors; + private VarCharVector inputTypeOutVector; + private boolean isCarryForwardRow = false; + private boolean mappingRuleProcessed = false; + private boolean rowCompleted = false; + private int row; + private VectorContainer outgoing; + private VectorContainer incoming; + + public InputCarryForwardTableFunctionDecorator(TableFunction baseTableFunction, List carryForwardCols, + Map mappingRule, String inputTypeCol, String inputType) { + Preconditions.checkState(!carryForwardCols.isEmpty()); + Preconditions.checkState(!mappingRule.isEmpty()); + this.baseTableFunction = baseTableFunction; + this.carryForwardCols = carryForwardCols; + this.mappingRule = mappingRule; + + // Feed constant value "inputType" to the "inputTypeCol" + this.inputTypeCol = inputTypeCol; + this.inputType = inputType; + } + + @Override + public VectorAccessible setup(VectorAccessible incoming) throws Exception { + this.incoming = (VectorContainer) incoming; + this.outgoing = (VectorContainer) baseTableFunction.setup(incoming); + + // create transfer pairs for any additional input columns + carryForwardColTransfers = Streams.stream(incoming) + .filter(vw -> carryForwardCols.contains(vw.getValueVector().getName()) && + outgoing.getSchema().getFieldId(BasePath.getSimple(vw.getValueVector().getName())) != null) + .map(vw -> vw.getValueVector().makeTransferPair( + getVectorFromSchemaPath(outgoing, vw.getValueVector().getName()))) + .collect(Collectors.toList()); + + mappingRule.keySet().forEach(inCol -> Preconditions.checkNotNull(incoming.getSchema().getFieldId(inCol), + String.format("Mapping rule source columns not found [required: %s]", mappingRule))); + mappingRule.values().forEach(outCol -> Preconditions.checkNotNull(outgoing.getSchema().getFieldId(outCol), + String.format("Mapping rule carry-forwarding columns not found in the outgoing schema [required: %s]", mappingRule))); + + for (Map.Entry mappedInputCol : mappingRule.entrySet()) { + ValueVector mapInputVector = getVector(incoming, mappedInputCol.getKey()); + ValueVector mapOutVector = getVector(outgoing, mappedInputCol.getValue()); + if (mapInputVector != null && mapOutVector != null) { + mappingColTransfers.add(mapInputVector.makeTransferPair(mapOutVector)); + } + } + + carryForwardColVectors = Streams.stream(incoming).filter(vw -> carryForwardCols.contains(vw.getValueVector().getName())) + .map(VectorWrapper::getValueVector).collect(Collectors.toList()); + + inputTypeOutVector = (VarCharVector) getVectorFromSchemaPath(outgoing, inputTypeCol); + + return outgoing; + } + + public static ValueVector getVector(VectorAccessible vectorAccessible, SchemaPath schemaPath) { + TypedFieldId typedFieldId = vectorAccessible.getSchema().getFieldId(schemaPath); + Field field = vectorAccessible.getSchema().getColumn(typedFieldId.getFieldIds()[0]); + return vectorAccessible.getValueAccessorById(TypeHelper.getValueVectorClass(field), typedFieldId.getFieldIds()).getValueVector(); + } + + @Override + public void startBatch(int records) throws Exception { + baseTableFunction.startBatch(records); + } + + @Override + public void startRow(int row) throws Exception { + this.isCarryForwardRow = (carryForwardColVectors.stream().anyMatch(vv -> !vv.isNull(row))); + this.row = row; + if (!isCarryForwardRow) { + baseTableFunction.startRow(row); + } + } + + @Override + public int processRow(int startOutIndex, int maxRecords) throws Exception { + if (rowCompleted) { + return 0; + } + + // For a carry-forward row, we directly copy the values from input vectors to output vectors on those carry-forward columns. + // This helps to keep those values in output vectors not changed. + if (isCarryForwardRow) { + carryForwardColTransfers.forEach(tx -> tx.copyValueSafe(row, startOutIndex)); + rowCompleted = true; + outgoing.setRecordCount(startOutIndex + 1); + outgoing.forEach(vw -> vw.getValueVector().setValueCount(startOutIndex + 1)); + return 1; + } + + int outIdx = startOutIndex; + int records = 0; + // Mapping rule appends one more row into output. This row helps to inject file path and type info that baseFunction + // needs to process. For instance, if the baseFunction is ManifestScanTF and processes a manifest file, it will inject + // the manifest file path and type into outputs. So that, we can inject and carry forward manifest file info. + if (!mappingRuleProcessed) { + int finalOutIdx = outIdx; + mappingColTransfers.forEach(tx -> tx.copyValueSafe(row, finalOutIdx)); + inputTypeOutVector.setSafe(outIdx, inputType.getBytes(StandardCharsets.UTF_8)); + outgoing.forEach(vw -> vw.getValueVector().setValueCount(finalOutIdx + 1)); + outgoing.setRecordCount(++outIdx); + mappingRuleProcessed = true; + records++; + } + int recordsBase = baseTableFunction.processRow(outIdx, maxRecords - records); + records += recordsBase; + LOGGER.debug("[IN:{}, row{}, out{}], base-func_processed:{}, total-records:{}, outgoing-record_cnt {}", + baseTableFunction.getClass().getSimpleName(), row, startOutIndex, recordsBase, records, outgoing.getRecordCount()); + return records; + } + + @Override + public void closeRow() throws Exception { + if (!isCarryForwardRow) { + baseTableFunction.closeRow(); + } + rowCompleted = false; + isCarryForwardRow = false; + mappingRuleProcessed = false; + } + + @Override + public boolean hasBufferedRemaining() { + return baseTableFunction.hasBufferedRemaining(); + } + + @Override + public void workOnOOB(OutOfBandMessage message) { + baseTableFunction.workOnOOB(message); + } + + @Override + public long getFirstRowSize() { + return baseTableFunction.getFirstRowSize(); + } + + @Override + public void noMoreToConsume() throws Exception { + baseTableFunction.noMoreToConsume(); + } + + @Override + public void close() throws Exception { + baseTableFunction.close(); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/ManifestContentType.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/ManifestContentType.java new file mode 100644 index 0000000000..f6dee88f4c --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/ManifestContentType.java @@ -0,0 +1,22 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store.iceberg; + +public enum ManifestContentType { + DATA, + DELETES, + ALL +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/ManifestEntryProcessor.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/ManifestEntryProcessor.java index c19df51b9c..d4d1c9962b 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/ManifestEntryProcessor.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/ManifestEntryProcessor.java @@ -30,8 +30,7 @@ public interface ManifestEntryProcessor extends AutoCloseable { void setup(VectorAccessible incoming, VectorAccessible outgoing); - default void initialise(PartitionSpec partitionSpec) { - } + default void initialise(PartitionSpec partitionSpec, int row) {} int processManifestEntry(ManifestEntryWrapper> manifestEntry, int startOutIndex, int currentOutputCount) throws IOException; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/ManifestEntryProcessorHelper.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/ManifestEntryProcessorHelper.java index fc60368a2d..6698f30be6 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/ManifestEntryProcessorHelper.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/ManifestEntryProcessorHelper.java @@ -32,6 +32,7 @@ import com.dremio.common.expression.CompleteType; import com.dremio.exec.planner.acceleration.IncrementalUpdateUtils; +import com.dremio.exec.store.SystemSchemas; import com.dremio.service.namespace.dataset.proto.PartitionProtobuf; import com.google.common.annotations.VisibleForTesting; import com.google.protobuf.ByteString; @@ -48,7 +49,8 @@ public static PartitionProtobuf.NormalizedPartitionInfo getDataFilePartitionInfo Schema fileSchema, Map nameToFieldMap, ContentFile> currentFile, - long version) { + long version, + long sequenceNo) { PartitionProtobuf.NormalizedPartitionInfo.Builder partitionInfoBuilder = PartitionProtobuf.NormalizedPartitionInfo.newBuilder().setId(String.valueOf(1)); // get table partition spec @@ -75,15 +77,20 @@ public static PartitionProtobuf.NormalizedPartitionInfo getDataFilePartitionInfo writePartitionValue(partitionValueBuilder, value, nameToFieldMap.get(partColName.toLowerCase())); partitionInfoBuilder.addValues(partitionValueBuilder.build()); } - addImplicitCols(partitionInfoBuilder, version); + addImplicitCols(partitionInfoBuilder, version, sequenceNo); return partitionInfoBuilder.build(); } - private static void addImplicitCols(PartitionProtobuf.NormalizedPartitionInfo.Builder partitionInfoBuilder, long version) { + private static void addImplicitCols(PartitionProtobuf.NormalizedPartitionInfo.Builder partitionInfoBuilder, long version, long sequenceNo) { PartitionProtobuf.PartitionValue.Builder partitionValueBuilder = PartitionProtobuf.PartitionValue.newBuilder(); partitionValueBuilder.setColumn(IncrementalUpdateUtils.UPDATE_COLUMN); partitionValueBuilder.setLongValue(version); partitionInfoBuilder.addValues(partitionValueBuilder.build()); + + // Sequence number as an implicit partition col value, so it can be projected if needed along with the data. + partitionInfoBuilder.addValues( + PartitionProtobuf.PartitionValue.newBuilder() + .setColumn(SystemSchemas.IMPLICIT_SEQUENCE_NUMBER).setLongValue(sequenceNo).build()); } private static void writePartitionValue(PartitionProtobuf.PartitionValue.Builder partitionValueBuilder, Object value, Field field) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/ManifestFileProcessor.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/ManifestFileProcessor.java index f5b927bb66..3a08a7567e 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/ManifestFileProcessor.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/ManifestFileProcessor.java @@ -33,12 +33,12 @@ import org.apache.iceberg.ManifestReader; import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.io.CloseableIterator; +import org.apache.iceberg.io.FileIO; import org.apache.iceberg.io.FilterIterator; import com.dremio.common.AutoCloseables; import com.dremio.common.exceptions.ExecutionSetupException; import com.dremio.common.exceptions.UserException; -import com.dremio.exec.catalog.MutablePlugin; import com.dremio.exec.catalog.StoragePluginId; import com.dremio.exec.physical.base.OpProps; import com.dremio.exec.physical.config.ManifestScanFilters; @@ -67,8 +67,6 @@ public class ManifestFileProcessor implements AutoCloseable { private final OperatorStats operatorStats; private final ManifestEntryProcessor manifestEntryProcessor; private final Configuration conf; - private final ManifestContent manifestContent; - private ManifestEntryWrapper currentManifestEntry; private CloseableIterator> iterator; private ManifestReader manifestReader; @@ -96,8 +94,6 @@ public ManifestFileProcessor(FragmentExecutionContext fec, } else if (functionContext.getPartitionSpecMap() != null){ partitionSpecMap = IcebergSerDe.deserializePartitionSpecMap(functionContext.getPartitionSpecMap().toByteArray()); } - this.manifestContent = functionContext.getManifestContent(); - this.manifestScanFilters = ((ManifestScanTableFunctionContext) functionConfig.getFunctionContext()) .getManifestScanFilters(); } @@ -106,7 +102,7 @@ public void setup(VectorAccessible incoming, VectorContainer outgoing) { manifestEntryProcessor.setup(incoming, outgoing); } - public void setupManifestFile(ManifestFile manifestFile) { + public void setupManifestFile(ManifestFile manifestFile, int row) { manifestReader = getManifestReader(manifestFile); if (manifestScanFilters.doesIcebergAnyColExpressionExists()) { manifestReader.filterRows(manifestScanFilters.getIcebergAnyColExpressionDeserialized()); @@ -115,7 +111,7 @@ public void setupManifestFile(ManifestFile manifestFile) { iterator = DremioManifestReaderUtils.liveManifestEntriesIterator(manifestReader).iterator(); applyManifestScanFilters(manifestFile); - manifestEntryProcessor.initialise(manifestReader.spec()); + manifestEntryProcessor.initialise(manifestReader.spec(), row); } private void applyManifestScanFilters(ManifestFile manifestFile) { @@ -169,22 +165,21 @@ public void close() throws Exception { @VisibleForTesting ManifestReader> getManifestReader(ManifestFile manifestFile) { - if (manifestContent == ManifestContent.DATA) { + if (manifestFile.content() == ManifestContent.DATA) { return ManifestFiles.read(manifestFile, getFileIO(manifestFile), partitionSpecMap); } else { return ManifestFiles.readDeleteManifest(manifestFile, getFileIO(manifestFile), partitionSpecMap); } } - private DremioFileIO getFileIO(ManifestFile manifestFile) { + private FileIO getFileIO(ManifestFile manifestFile) { FileSystem fs = createFs(manifestFile.path(), context, opProps, icebergRootPointerPlugin); Preconditions.checkState(fs != null, "Unexpected state"); - return new DremioFileIO(fs, - context, dataset, datasourcePluginUID, manifestFile.length(), conf, (MutablePlugin) icebergRootPointerPlugin); + return icebergRootPointerPlugin.createIcebergFileIO(fs, context, dataset, datasourcePluginUID, + manifestFile.length()); } - private void nextDataFile() { currentManifestEntry = iterator.next(); incrementFileCountMetric(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/ManifestListScanTableFunction.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/ManifestListScanTableFunction.java new file mode 100644 index 0000000000..c8345360ef --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/ManifestListScanTableFunction.java @@ -0,0 +1,132 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store.iceberg; + +import static com.dremio.exec.planner.physical.TableFunctionUtil.getDataset; +import static com.dremio.exec.store.SystemSchemas.METADATA_FILE_PATH; +import static com.dremio.exec.store.SystemSchemas.SNAPSHOT_ID; +import static com.dremio.exec.util.VectorUtil.getVectorFromSchemaPath; + +import java.nio.charset.StandardCharsets; +import java.util.List; + +import org.apache.arrow.vector.BigIntVector; +import org.apache.arrow.vector.VarCharVector; +import org.apache.iceberg.expressions.Expressions; + +import com.dremio.exec.physical.base.OpProps; +import com.dremio.exec.physical.config.TableFunctionConfig; +import com.dremio.exec.physical.config.TableFunctionContext; +import com.dremio.exec.record.VectorAccessible; +import com.dremio.exec.record.VectorContainer; +import com.dremio.exec.store.dfs.AbstractTableFunction; +import com.dremio.sabot.exec.context.OperatorContext; +import com.dremio.sabot.exec.context.OperatorStats; +import com.dremio.sabot.exec.fragment.FragmentExecutionContext; +import com.dremio.sabot.op.scan.MutatorSchemaChangeCallBack; +import com.dremio.sabot.op.scan.ScanOperator.ScanMutator; +import com.dremio.sabot.op.tablefunction.TableFunctionOperator; + +/** + * Table function for Iceberg manifest list file scan + */ +public class ManifestListScanTableFunction extends AbstractTableFunction { + private static final org.slf4j.Logger LOGGER = org.slf4j.LoggerFactory.getLogger(ManifestListScanTableFunction.class); + private final FragmentExecutionContext fragmentExecutionContext; + private final OperatorStats operatorStats; + private final OpProps props; + + private SupportsIcebergMutablePlugin icebergMutablePlugin; + private List tablePath; + + private ScanMutator mutator; + private MutatorSchemaChangeCallBack callBack = new MutatorSchemaChangeCallBack(); + + private IcebergManifestListRecordReader manifestListRecordReader; + + private VarCharVector inputMetadataLocation; + private BigIntVector inputSnapshotId; + + private int inputIndex; + + public ManifestListScanTableFunction( + FragmentExecutionContext fragmentExecutionContext, + OperatorContext context, + OpProps props, + TableFunctionConfig functionConfig) { + super(context, functionConfig); + this.fragmentExecutionContext = fragmentExecutionContext; + this.props = props; + this.operatorStats = context.getStats(); + } + + @Override + public VectorAccessible setup(VectorAccessible accessible) throws Exception { + super.setup(accessible); + + icebergMutablePlugin = fragmentExecutionContext.getStoragePlugin(functionConfig.getFunctionContext().getPluginId()); + tablePath = getDataset(functionConfig); + + inputMetadataLocation = (VarCharVector) getVectorFromSchemaPath(incoming, METADATA_FILE_PATH); + inputSnapshotId = (BigIntVector) getVectorFromSchemaPath(incoming, SNAPSHOT_ID); + + VectorContainer outgoing = (VectorContainer) super.setup(incoming); + this.mutator = new ScanMutator(outgoing, context, callBack); + this.mutator.allocate(); + + return outgoing; + } + + @Override + public void startRow(int row) throws Exception { + inputIndex = row; + + // Initialize the reader for the current processing snapshot id + byte[] pathBytes = inputMetadataLocation.get(inputIndex); + String metadataLocation = new String(pathBytes, StandardCharsets.UTF_8); + Long snapshotId = inputSnapshotId.get(inputIndex); + TableFunctionContext functionContext = functionConfig.getFunctionContext(); + + final IcebergExtendedProp icebergExtendedProp = new IcebergExtendedProp( + null, + IcebergSerDe.serializeToByteArray(Expressions.alwaysTrue()), + snapshotId, + null + ); + + manifestListRecordReader = new IcebergManifestListRecordReader(context, metadataLocation, icebergMutablePlugin, + tablePath, functionContext.getPluginId().getName(), functionContext.getFullSchema(), props, + functionContext.getPartitionColumns(), icebergExtendedProp, ManifestContentType.ALL); + + manifestListRecordReader.setup(mutator); + operatorStats.addLongStat(TableFunctionOperator.Metric.NUM_SNAPSHOT_IDS, 1L); + } + + @Override + public int processRow(int startOutIndex, int maxRecords) throws Exception { + int outputCount = manifestListRecordReader.nextBatch(startOutIndex, startOutIndex + maxRecords); + int totalRecordCount = startOutIndex + outputCount; + outgoing.forEach(vw -> vw.getValueVector().setValueCount(totalRecordCount)); + outgoing.setRecordCount(totalRecordCount); + return outputCount; + } + + @Override + public void closeRow() throws Exception { + manifestListRecordReader.close(); + manifestListRecordReader = null; + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/ManifestScanTableFunction.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/ManifestScanTableFunction.java index 0bb2ce8086..8ccf7ec7e4 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/ManifestScanTableFunction.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/ManifestScanTableFunction.java @@ -69,7 +69,7 @@ public void startBatch(int records) { @Override public void startRow(int row) throws Exception { ManifestFile manifestFile = getManifestFile(row); - manifestFileProcessor.setupManifestFile(manifestFile); + manifestFileProcessor.setupManifestFile(manifestFile, row); } @Override diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/OptimizeManifestsTableFunction.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/OptimizeManifestsTableFunction.java new file mode 100644 index 0000000000..af7ffb9c72 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/OptimizeManifestsTableFunction.java @@ -0,0 +1,209 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store.iceberg; + +import static org.apache.iceberg.TableProperties.MANIFEST_TARGET_SIZE_BYTES; +import static org.apache.iceberg.TableProperties.MANIFEST_TARGET_SIZE_BYTES_DEFAULT; + +import java.io.IOException; +import java.util.function.Function; + +import org.apache.iceberg.DataFile; +import org.apache.iceberg.ManifestFile; +import org.apache.iceberg.RewriteManifests; +import org.apache.iceberg.Snapshot; +import org.apache.iceberg.Table; +import org.apache.iceberg.io.FileIO; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.dremio.common.exceptions.ExecutionSetupException; +import com.dremio.common.exceptions.UserRemoteException; +import com.dremio.exec.physical.base.OpProps; +import com.dremio.exec.physical.config.TableFunctionConfig; +import com.dremio.exec.record.VectorAccessible; +import com.dremio.exec.store.dfs.AbstractTableFunction; +import com.dremio.exec.store.dfs.IcebergTableProps; +import com.dremio.exec.store.iceberg.model.IcebergModel; +import com.dremio.exec.store.iceberg.model.IcebergTableIdentifier; +import com.dremio.io.file.FileSystem; +import com.dremio.sabot.exec.context.OperatorContext; +import com.dremio.sabot.exec.context.OperatorStats; +import com.dremio.sabot.exec.fragment.FragmentExecutionContext; +import com.dremio.sabot.op.tablefunction.TableFunctionOperator; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Stopwatch; + +/** + * Table function to invoke the rewrite manifests optimization function. + * The table function is pluggable to any input/output streams. The table information is picked from the context. + * The input data is not used, but forwarded as is to the output streams. + * To avoid repeated rewrite manifest attempts in every minor fragment, this TableFunction should only be used with a Singleton Trait. + */ +public class OptimizeManifestsTableFunction extends AbstractTableFunction { + @VisibleForTesting + static final Function NO_CLUSTERING_RULE = df -> 1; + + private static final Logger LOGGER = LoggerFactory.getLogger(OptimizeManifestsTableFunction.class); + private final OpProps opProps; + private final FragmentExecutionContext fec; + private int outputRecords; + + public OptimizeManifestsTableFunction(FragmentExecutionContext fec, OperatorContext context, OpProps props, TableFunctionConfig functionConfig) { + super(context, functionConfig); + this.opProps = props; + this.fec = fec; + } + + @Override + public VectorAccessible setup(VectorAccessible incoming) throws Exception { + super.setup(incoming); + + return incoming; // outgoing = incoming + } + + @Override + public void startBatch(int records) throws Exception { + this.outputRecords = records; + } + + @Override + public void noMoreToConsume() throws Exception { + Stopwatch timer = Stopwatch.createStarted(); + try { + rewriteManifests(); + } catch (Exception e) { + LOGGER.error("Error while rewriting table manifests.", e); + throw UserRemoteException.dataWriteError(e).message("Error while rewriting table manifests.").buildSilently(); + } finally { + LOGGER.info("Time taken on rewrite manifests {}", timer.elapsed()); + } + } + + private void rewriteManifests() throws ExecutionSetupException, IOException { + // Set up IcebergModel + OptimizeManifestsTableFunctionContext ctx = (OptimizeManifestsTableFunctionContext) functionConfig.getFunctionContext(); + SupportsIcebergMutablePlugin icebergMutablePlugin = fec.getStoragePlugin(ctx.getPluginId()); + IcebergTableProps tableProps = ctx.getIcebergTableProps(); + try (FileSystem fs = icebergMutablePlugin.createFS(tableProps.getTableLocation(), opProps.getUserName(), context)) { + IcebergModel icebergModel = icebergMutablePlugin.getIcebergModel(tableProps, opProps.getUserName(), context, fs); + icebergModel.refreshVersionContext(); + IcebergTableIdentifier icebergTableIdentifier = icebergModel.getTableIdentifier(tableProps.getTableLocation()); + + LOGGER.info("Attempting rewrite manifests"); + Table table = icebergModel.getIcebergTable(icebergTableIdentifier); + + if (table.currentSnapshot() == null) { + LOGGER.info("Aborting rewrite manifests as the table has no snapshots"); + setCommitStatus(0); + return; + } + + RewriteManifests rewriteManifests = table.rewriteManifests() + .rewriteIf(m -> isNotInOptimalSizeRange(m, icebergModel, icebergTableIdentifier)) + .clusterBy(NO_CLUSTERING_RULE); + Snapshot newSnapshot = rewriteManifests.apply(); + + if (hasNoManifestChanges(newSnapshot)) { + cleanOrphans(table.io(), newSnapshot); + LOGGER.info("Optimization of manifest files skipped"); + setCommitStatus(0); + return; + } + + try { + rewriteManifests.commit(); + } catch (RuntimeException e) { + cleanOrphans(table.io(), newSnapshot); + throw e; + } + LOGGER.info("Optimization of manifest files is successful with snapshot id {}", newSnapshot.snapshotId()); + setCommitStatus(1); + } + } + + @VisibleForTesting + static void cleanOrphans(FileIO io, Snapshot snapshot) { + snapshot.allManifests(io).stream() + .filter(m -> m.snapshotId() == snapshot.snapshotId()) + .map(ManifestFile::path) + .forEach(path -> tryDeleteOrphanFile(io, path)); + tryDeleteOrphanFile(io, snapshot.manifestListLocation()); + } + + private static void tryDeleteOrphanFile(FileIO io, String path) { + try { + io.deleteFile(path); + } catch (RuntimeException e) { + LOGGER.warn("Error while trying to clean up orphans", e); + // Not throwing further as it's not a failure condition to leave the stale orphans. + } + } + + @VisibleForTesting + static boolean hasNoManifestChanges(Snapshot snapshot) { + // The iceberg implementation doesn't take care of skipping the NOOP cases. Hence, putting in this custom + // computation. NOOP if no manifests are created/replaced or the residual manifest was picked and + // was rewritten with the same content. + String manifestsCreated = snapshot.summary().get("manifests-created"); + String manifestsReplaced = snapshot.summary().get("manifests-replaced"); + String totalDeleteManifestsStr = snapshot.summary().get("total-delete-files"); + String totalDataManifestsStr = snapshot.summary().get("total-data-files"); + + int minDeleteManifests = totalDeleteManifestsStr != null && Integer.parseInt(totalDeleteManifestsStr) > 0 ? 1 : 0; + int minDataFileManifests = totalDataManifestsStr != null && Integer.parseInt(totalDataManifestsStr) > 0 ? 1 : 0; + + int minManifests = minDeleteManifests + minDataFileManifests; + + return manifestsCreated == null || manifestsReplaced == null || + (Integer.parseInt(manifestsCreated) == 0 && Integer.parseInt(manifestsReplaced) == 0) || + (Integer.parseInt(manifestsCreated) == minManifests && Integer.parseInt(manifestsReplaced) == minManifests); // only residual manifests + } + + private void setCommitStatus(long statusValue) { + OperatorStats operatorStats = context.getStats(); + if (operatorStats != null) { + operatorStats.addLongStat(TableFunctionOperator.Metric.SNAPSHOT_COMMIT_STATUS, statusValue); + } + } + + private boolean isNotInOptimalSizeRange(ManifestFile manifestFile, IcebergModel icebergModel, IcebergTableIdentifier icebergTableIdentifier) { + long manifestTargetSizeBytes = icebergModel.propertyAsLong(icebergTableIdentifier, + MANIFEST_TARGET_SIZE_BYTES, MANIFEST_TARGET_SIZE_BYTES_DEFAULT); + long minManifestFileSize = (long) (manifestTargetSizeBytes * 0.75); + long maxManifestFileSize = (long) (manifestTargetSizeBytes * 1.8); + + return manifestFile.length() < minManifestFileSize || manifestFile.length() > maxManifestFileSize; + } + + @Override + public void startRow(int row) throws Exception { + // Do nothing + } + + @Override + public int processRow(int startOutIndex, int maxRecords) throws Exception { + // Just indicate number of inputs as number of outputs. No action to be taken. + int rowsProjected = this.outputRecords; + this.outputRecords = 0; // reset to zero + return rowsProjected; + } + + @Override + public void closeRow() throws Exception { + // Do nothing + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/OptimizeManifestsTableFunctionContext.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/OptimizeManifestsTableFunctionContext.java new file mode 100644 index 0000000000..7f1657bf4e --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/OptimizeManifestsTableFunctionContext.java @@ -0,0 +1,93 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store.iceberg; + +import java.util.List; + +import com.dremio.common.expression.SchemaPath; +import com.dremio.exec.catalog.StoragePluginId; +import com.dremio.exec.physical.config.TableFunctionContext; +import com.dremio.exec.record.BatchSchema; +import com.dremio.exec.store.TableMetadata; +import com.dremio.exec.store.dfs.IcebergTableProps; +import com.dremio.service.namespace.dataset.proto.UserDefinedSchemaSettings; +import com.dremio.service.namespace.file.proto.FileConfig; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeName; +import com.google.common.collect.ImmutableList; + +import io.protostuff.ByteString; + +@JsonIgnoreProperties(ignoreUnknown = true) +@JsonTypeName("optimize-manifests") +public class OptimizeManifestsTableFunctionContext extends TableFunctionContext { + + private final IcebergTableProps icebergTableProps; + + @JsonIgnore + public OptimizeManifestsTableFunctionContext(TableMetadata tableMetadata, + BatchSchema outputSchema, + IcebergTableProps icebergTableProps) { + this( + tableMetadata.getFormatSettings(), + outputSchema, + outputSchema.getFields().stream() + .map(f -> SchemaPath.getSimplePath(f.getName())) + .collect(ImmutableList.toImmutableList()), + ImmutableList.of(tableMetadata.getName().getPathComponents()), + tableMetadata.getStoragePluginId(), + tableMetadata.getReadDefinition().getPartitionColumnsList(), + tableMetadata.getReadDefinition().getExtendedProperty(), + tableMetadata.getDatasetConfig().getPhysicalDataset().getInternalSchemaSettings(), + icebergTableProps + ); + } + + @JsonCreator + public OptimizeManifestsTableFunctionContext(@JsonProperty("formatSettings") FileConfig formatSettings, + @JsonProperty("schema") BatchSchema outputSchema, + @JsonProperty("columns") List columns, + @JsonProperty("referencedTables") List> tablePath, + @JsonProperty("pluginId") StoragePluginId pluginId, + @JsonProperty("partitionColumns") List partitionColumns, + @JsonProperty("extendedProperty") ByteString extendedProperty, + @JsonProperty("userDefinedSchemaSettings") UserDefinedSchemaSettings userDefinedSchemaSettings, + @JsonProperty("icebergTableProps") IcebergTableProps icebergTableProps) { + super( + formatSettings, + outputSchema, + outputSchema, + tablePath, + null, + pluginId, + null, + columns, + partitionColumns, + null, + extendedProperty, + false, false, false, + userDefinedSchemaSettings + ); + this.icebergTableProps = icebergTableProps; + } + + public IcebergTableProps getIcebergTableProps() { + return icebergTableProps; + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/PartitionStatsScanPrel.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/PartitionStatsScanPrel.java new file mode 100644 index 0000000000..7eddab28d8 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/PartitionStatsScanPrel.java @@ -0,0 +1,82 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store.iceberg; + +import java.util.List; + +import org.apache.calcite.plan.RelOptCluster; +import org.apache.calcite.plan.RelOptTable; +import org.apache.calcite.plan.RelTraitSet; +import org.apache.calcite.rel.RelNode; +import org.apache.calcite.rel.metadata.RelMetadataQuery; +import org.apache.calcite.rel.type.RelDataType; + +import com.dremio.exec.physical.config.TableFunctionConfig; +import com.dremio.exec.planner.physical.PrelUtil; +import com.dremio.exec.planner.physical.TableFunctionPrel; +import com.dremio.exec.planner.physical.TableFunctionUtil; +import com.dremio.exec.planner.sql.CalciteArrowHelper; +import com.dremio.exec.record.BatchSchema; +import com.dremio.exec.store.TableMetadata; + +/** + * A prel for PartitionStatsScanTableFunction + */ +public class PartitionStatsScanPrel extends TableFunctionPrel { + public PartitionStatsScanPrel( + RelOptCluster cluster, + RelTraitSet traitSet, + RelOptTable table, + RelNode child, + BatchSchema schema, + TableMetadata tableMetadata, + Long survivingRecords) { + this( + cluster, + traitSet, + table, + child, + tableMetadata, + TableFunctionUtil.getIcebergPartitionStatsFunctionConfig(schema, tableMetadata), + CalciteArrowHelper.wrap(schema) + .toCalciteRecordType(cluster.getTypeFactory(), + PrelUtil.getPlannerSettings(cluster).isFullNestedSchemaSupport()), + survivingRecords); + } + + private PartitionStatsScanPrel( + RelOptCluster cluster, + RelTraitSet traitSet, + RelOptTable table, + RelNode child, + TableMetadata tableMetadata, + TableFunctionConfig functionConfig, + RelDataType rowType, + Long survivingRecords) { + super(cluster, traitSet, table, child, tableMetadata, functionConfig, rowType, survivingRecords); + } + + @Override + public RelNode copy(RelTraitSet traitSet, List inputs) { + return new PartitionStatsScanPrel(getCluster(), getTraitSet(), getTable(), sole(inputs), + getTableMetadata(), getTableFunctionConfig(), getRowType(), getSurvivingRecords()); + } + + @Override + protected double defaultEstimateRowCount(TableFunctionConfig functionConfig, RelMetadataQuery mq) { + return (double) getSurvivingRecords(); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/PartitionStatsScanTableFunction.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/PartitionStatsScanTableFunction.java new file mode 100644 index 0000000000..3c00a5d508 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/PartitionStatsScanTableFunction.java @@ -0,0 +1,153 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store.iceberg; + +import static com.dremio.exec.store.SystemSchemas.CARRY_FORWARD_FILE_PATH_TYPE_COLS; +import static com.dremio.exec.store.SystemSchemas.METADATA_FILE_PATH; +import static com.dremio.exec.store.SystemSchemas.SNAPSHOT_ID; +import static com.dremio.exec.store.iceberg.IcebergUtils.getPartitionStatsFiles; +import static com.dremio.exec.util.VectorUtil.getVectorFromSchemaPath; + +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +import org.apache.arrow.vector.BigIntVector; +import org.apache.arrow.vector.VarCharVector; +import org.apache.arrow.vector.util.TransferPair; +import org.apache.iceberg.PartitionStatsFileLocations; +import org.apache.iceberg.Snapshot; +import org.apache.iceberg.TableMetadata; +import org.apache.iceberg.TableMetadataParser; +import org.apache.iceberg.io.FileIO; + +import com.dremio.common.expression.BasePath; +import com.dremio.exec.physical.base.OpProps; +import com.dremio.exec.physical.config.TableFunctionConfig; +import com.dremio.exec.record.VectorAccessible; +import com.dremio.exec.record.VectorContainer; +import com.dremio.exec.store.SystemSchemas; +import com.dremio.exec.store.dfs.AbstractTableFunction; +import com.dremio.io.file.FileSystem; +import com.dremio.sabot.exec.context.OperatorContext; +import com.dremio.sabot.exec.context.OperatorStats; +import com.dremio.sabot.exec.fragment.FragmentExecutionContext; +import com.google.common.collect.Streams; + +public class PartitionStatsScanTableFunction extends AbstractTableFunction { + + private final FragmentExecutionContext fragmentExecutionContext; + private final OperatorStats operatorStats; + private final OpProps props; + private SupportsIcebergMutablePlugin icebergMutablePlugin; + private VarCharVector metadataInVector; + private BigIntVector snapshotIdInVector; + private VarCharVector filePathOutVector; + private VarCharVector fileTypeOutVector; + private List transfers; + private int inputRow = 0; + private int partitionFileIdx; + private List partitionStatsFiles = new ArrayList<>(); + private boolean transfersProcessed = false; + private TableMetadata tableMetadata = null; + private FileIO io = null; + + public PartitionStatsScanTableFunction( + FragmentExecutionContext fragmentExecutionContext, + OperatorContext context, + OpProps props, + TableFunctionConfig functionConfig) { + super(context, functionConfig); + this.fragmentExecutionContext = fragmentExecutionContext; + this.props = props; + this.operatorStats = context.getStats(); + } + + @Override + public VectorAccessible setup(VectorAccessible incoming) throws Exception { + VectorContainer outgoing = (VectorContainer) super.setup(incoming); + metadataInVector = (VarCharVector) getVectorFromSchemaPath(incoming, METADATA_FILE_PATH); + snapshotIdInVector = (BigIntVector) getVectorFromSchemaPath(incoming, SNAPSHOT_ID); + filePathOutVector = (VarCharVector) getVectorFromSchemaPath(outgoing, SystemSchemas.FILE_PATH); + fileTypeOutVector = (VarCharVector) getVectorFromSchemaPath(outgoing, SystemSchemas.FILE_TYPE); + + // create transfer pairs for any additional input columns + transfers = Streams.stream(incoming) + .filter(vw -> !CARRY_FORWARD_FILE_PATH_TYPE_COLS.contains(vw.getValueVector().getName()) && + outgoing.getSchema().getFieldId(BasePath.getSimple(vw.getValueVector().getName())) != null) + .map(vw -> vw.getValueVector().makeTransferPair( + getVectorFromSchemaPath(outgoing, vw.getValueVector().getName()))) + .collect(Collectors.toList()); + + icebergMutablePlugin = fragmentExecutionContext.getStoragePlugin(functionConfig.getFunctionContext().getPluginId()); + + return outgoing; + } + + @Override + public void startRow(int row) throws Exception { + inputRow = row; + partitionFileIdx = 0; + + if (tableMetadata == null || io == null) { + byte[] pathBytes = metadataInVector.get(row); + String metadataLocation = new String(pathBytes, StandardCharsets.UTF_8); + FileSystem fs = icebergMutablePlugin.createFSWithAsyncOptions(metadataLocation, props.getUserName(), context); + io = icebergMutablePlugin.createIcebergFileIO(fs, context, null, functionConfig.getFunctionContext().getPluginId().getName(), null); + tableMetadata = TableMetadataParser.read(io, metadataLocation); + } + long snapshotId = snapshotIdInVector.get(row); + Snapshot snapshot = tableMetadata.snapshot(snapshotId); + + if (snapshot.partitionStatsMetadata() != null) { + String partitionStatsMetadataLocation = snapshot.partitionStatsMetadata().metadataFileLocation(); + PartitionStatsFileLocations partitionStatsLocations = getPartitionStatsFiles(io, partitionStatsMetadataLocation); + if (partitionStatsLocations != null) { + // Partition stats have metadata file and partition files. + partitionStatsFiles.add(partitionStatsMetadataLocation.getBytes(StandardCharsets.UTF_8)); + partitionStatsFiles.addAll(partitionStatsLocations.all().entrySet().stream() + .map(e -> e.getValue().getBytes(StandardCharsets.UTF_8)).collect(Collectors.toList())); + } + } + } + + @Override + public int processRow(int startOutIndex, int maxRecords) throws Exception { + int outIdx = startOutIndex; + int maxIdx = startOutIndex + maxRecords; + if (!transfersProcessed) { + transfers.forEach(t -> t.copyValueSafe(inputRow, startOutIndex)); + outIdx++; + transfersProcessed = true; + } + + while (partitionFileIdx < partitionStatsFiles.size() && outIdx < maxIdx) { + filePathOutVector.setSafe(outIdx, partitionStatsFiles.get(partitionFileIdx++)); + fileTypeOutVector.setSafe(outIdx, IcebergFileType.PARTITION_STATS.name().getBytes(StandardCharsets.UTF_8)); + outIdx++; + } + outgoing.setAllCount(outIdx); + return outIdx - startOutIndex; + } + + @Override + public void closeRow() throws Exception { + partitionStatsFiles.clear(); + transfersProcessed = false; + partitionFileIdx = 0; + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/PathGeneratingManifestEntryProcessor.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/PathGeneratingManifestEntryProcessor.java index 5f913d2d32..fb0b051e35 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/PathGeneratingManifestEntryProcessor.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/PathGeneratingManifestEntryProcessor.java @@ -24,6 +24,7 @@ import java.io.IOException; import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -75,10 +76,12 @@ public class PathGeneratingManifestEntryProcessor implements ManifestEntryProces private static final Set BASE_OUTPUT_FIELDS = Stream.concat( + Stream.concat( Stream.concat( SystemSchemas.ICEBERG_MANIFEST_SCAN_SCHEMA.getFields().stream(), SystemSchemas.ICEBERG_DELETE_MANIFEST_SCAN_SCHEMA.getFields().stream()), - Stream.of(SystemSchemas.ICEBERG_METADATA_FIELD)) + SystemSchemas.CARRY_FORWARD_FILE_PATH_TYPE_SCHEMA.getFields().stream()) , + Stream.of(SystemSchemas.ICEBERG_METADATA_FIELD)) .map(f -> f.getName().toLowerCase()) .collect(Collectors.toSet()); @@ -88,6 +91,7 @@ public class PathGeneratingManifestEntryProcessor implements ManifestEntryProces private final Map columnStatsVectorMap = new HashMap<>(); private final ManifestContent manifestContent; + private byte[] colIdMapRaw; private Map colToIDMap; private VarBinaryVector inputColIds; private VarCharVector outputFilePath; @@ -98,6 +102,7 @@ public class PathGeneratingManifestEntryProcessor implements ManifestEntryProces private VarBinaryVector outputPartitionInfo; private VarBinaryVector outputColIds; private StructVector outputDeleteFile; + private VarCharVector outputFileContent; private PartitionSpec icebergPartitionSpec; private Map partColToKeyMap; private ArrowBuf tempBuf; @@ -128,13 +133,13 @@ public void setup(VectorAccessible incoming, VectorAccessible outgoing) { outputSequenceNumber = (BigIntVector) getVectorFromSchemaPath(outgoing, SystemSchemas.SEQUENCE_NUMBER); outputSpecId = (IntVector) getVectorFromSchemaPath(outgoing, SystemSchemas.PARTITION_SPEC_ID); outputPartitionKey = (VarBinaryVector) getVectorFromSchemaPath(outgoing, SystemSchemas.PARTITION_KEY); + outputFilePath = (VarCharVector) getVectorFromSchemaPath(outgoing, SystemSchemas.DATAFILE_PATH); + outputFileSize = (BigIntVector) getVectorFromSchemaPath(outgoing, SystemSchemas.FILE_SIZE); + outputPartitionInfo = (VarBinaryVector) getVectorFromSchemaPath(outgoing, SystemSchemas.PARTITION_INFO); + outputColIds = (VarBinaryVector) getVectorFromSchemaPath(outgoing, SystemSchemas.COL_IDS); + outputFileContent = (VarCharVector) getVectorFromSchemaPath(outgoing, SystemSchemas.FILE_CONTENT); // output columns vary between data and delete manifest scans - if (manifestContent == ManifestContent.DATA) { - outputFilePath = (VarCharVector) getVectorFromSchemaPath(outgoing, SystemSchemas.DATAFILE_PATH); - outputFileSize = (BigIntVector) getVectorFromSchemaPath(outgoing, SystemSchemas.FILE_SIZE); - outputPartitionInfo = (VarBinaryVector) getVectorFromSchemaPath(outgoing, SystemSchemas.PARTITION_INFO); - outputColIds = (VarBinaryVector) getVectorFromSchemaPath(outgoing, SystemSchemas.COL_IDS); - } else { + if (manifestContent == ManifestContent.DELETES) { outputDeleteFile = (StructVector) getVectorFromSchemaPath(outgoing, SystemSchemas.DELETE_FILE); } @@ -150,9 +155,9 @@ public void setup(VectorAccessible incoming, VectorAccessible outgoing) { } @Override - public void initialise(PartitionSpec partitionSpec) { + public void initialise(PartitionSpec partitionSpec, int row) { icebergPartitionSpec = partitionSpec; - colToIDMap = getColToIDMap(); + setColIdMap(row); partColToKeyMap = new HashMap<>(); for (int i = 0; i < icebergPartitionSpec.fields().size(); i++) { @@ -163,6 +168,13 @@ public void initialise(PartitionSpec partitionSpec) { } } + private void setColIdMap(int row) { + if (colToIDMap == null || colIdMapRaw == null) { + colToIDMap = getColToIDMap(row); + colIdMapRaw = inputColIds.get(row); + } + } + @Override public int processManifestEntry(ManifestEntryWrapper manifestEntry, int startOutIndex, int maxOutputCount) throws IOException { @@ -175,22 +187,25 @@ public int processManifestEntry(ManifestEntryWrapper manifestEntry, int start outputSequenceNumber.setSafe(startOutIndex, manifestEntry.sequenceNumber()); outputSpecId.setSafe(startOutIndex, manifestEntry.file().specId()); outputPartitionKey.setSafe(startOutIndex, serializePartitionKey(manifestEntry.file().partition())); + outputFilePath.setSafe(startOutIndex, path); + outputFileSize.setSafe(startOutIndex, manifestEntry.file().fileSizeInBytes()); + outputColIds.setSafe(startOutIndex, colIdMapRaw); + IcebergFileType fileType = IcebergFileType.valueById(manifestEntry.file().content().id()); + outputFileContent.setSafe(startOutIndex, fileType.name().getBytes(StandardCharsets.UTF_8)); long version = PathUtils.getQueryParam(manifestEntry.file().path().toString(), FILE_VERSION, 0L, Long::parseLong); - if (manifestContent == ManifestContent.DATA) { - outputFilePath.setSafe(startOutIndex, path); - outputFileSize.setSafe(startOutIndex, manifestEntry.file().fileSizeInBytes()); - outputColIds.setSafe(startOutIndex, inputColIds.get(0)); - Schema fileSchema = icebergPartitionSpec.schema(); - PartitionProtobuf.NormalizedPartitionInfo partitionInfo = ManifestEntryProcessorHelper.getDataFilePartitionInfo( - icebergPartitionSpec, - invalidColumnsForPruning, - fileSchema, - nameToFieldMap, - manifestEntry.file(), - version); - outputPartitionInfo.setSafe(startOutIndex, IcebergSerDe.serializeToByteArray(partitionInfo)); - } else { + Schema fileSchema = icebergPartitionSpec.schema(); + PartitionProtobuf.NormalizedPartitionInfo partitionInfo = ManifestEntryProcessorHelper.getDataFilePartitionInfo( + icebergPartitionSpec, + invalidColumnsForPruning, + fileSchema, + nameToFieldMap, + manifestEntry.file(), + version, + manifestEntry.sequenceNumber()); + outputPartitionInfo.setSafe(startOutIndex, IcebergSerDe.serializeToByteArray(partitionInfo)); + + if (manifestContent == ManifestContent.DELETES) { NullableStructWriter structWriter = outputDeleteFile.getWriter(); structWriter.setPosition(startOutIndex); structWriter.start(); @@ -251,12 +266,13 @@ private boolean shouldProcessCurrentEntry(int maxOutputCount) { return !doneWithCurrentEntry && maxOutputCount > 0; } - private Map getColToIDMap() { + private Map getColToIDMap(int row) { if (colToIDMap == null) { Preconditions.checkArgument(inputColIds.getValueCount() > 0); IcebergProtobuf.IcebergDatasetXAttr icebergDatasetXAttr; try { - icebergDatasetXAttr = LegacyProtobufSerializer.parseFrom(IcebergProtobuf.IcebergDatasetXAttr.PARSER, inputColIds.get(0)); + icebergDatasetXAttr = LegacyProtobufSerializer.parseFrom( + IcebergProtobuf.IcebergDatasetXAttr.PARSER, inputColIds.get(row)); } catch (InvalidProtocolBufferException ie) { throw new RuntimeException("Could not deserialize Iceberg dataset info", ie); } catch (Exception e) { @@ -295,7 +311,7 @@ private Map getColumnStats(ContentFile> } Type fieldType = icebergField.type(); - Object value; + Object value = null; switch (suffix) { case "min": @@ -313,9 +329,15 @@ private Map getColumnStats(ContentFile> value = getValueFromByteBuffer(upperBound, fieldType); break; case "val": - Preconditions.checkArgument(partColToKeyMap.containsKey(colName), "partition column not found"); - int partColPos = partColToKeyMap.get(colName); - value = currentFile.partition().get(partColPos, getPartitionColumnClass(icebergPartitionSpec, partColPos)); + //For select, there will never be a case + // where partColToKeyMap doesn't have a colName. + // It always brings those data files which have identity partition details. + //In case of OPTIMIZE, + // it can fetch the data files which have non-identity partitions also where partition evolution has happened. + if (partColToKeyMap.containsKey(colName)) { + int partColPos = partColToKeyMap.get(colName); + value = currentFile.partition().get(partColPos, getPartitionColumnClass(icebergPartitionSpec, partColPos)); + } break; default: throw new RuntimeException("unexpected suffix for column: " + fieldName); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/SnapshotsScanOptions.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/SnapshotsScanOptions.java new file mode 100644 index 0000000000..b4801259ea --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/SnapshotsScanOptions.java @@ -0,0 +1,68 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store.iceberg; + + +import static com.dremio.exec.planner.sql.handlers.SqlHandlerUtil.getTimestampFromMillis; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public class SnapshotsScanOptions { + public enum Mode { + EXPIRED_SNAPSHOTS, // Expired snapshots + LIVE_SNAPSHOTS // Live snapshots after expiration + } + + private final Mode mode; + private final Long olderThanInMillis; + private final Integer retainLast; + + public SnapshotsScanOptions( + @JsonProperty("mode") Mode mode, + @JsonProperty("olderThanInMillis") Long olderThanInMillis, + @JsonProperty("retainLast") Integer retainLast) { + this.mode = mode; + this.olderThanInMillis = olderThanInMillis; + this.retainLast = retainLast; + } + + public Mode getMode() { + return mode; + } + + public Long getOlderThanInMillis() { + return olderThanInMillis; + } + + public Integer getRetainLast() { + return retainLast; + } + + @Override + public String toString() { + StringBuilder s = new StringBuilder(); + s.append("[mode="); + s.append(mode.toString()); + if (olderThanInMillis != null) { + s.append(", olderThan=" + getTimestampFromMillis(olderThanInMillis)); + } + if(retainLast != null) { + s.append(", retainLast=" + retainLast); + } + s.append("]"); + return s.toString(); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/SplitGeneratingDatafileProcessor.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/SplitGeneratingDatafileProcessor.java index cd0d0ade0e..9e8775bb5f 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/SplitGeneratingDatafileProcessor.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/SplitGeneratingDatafileProcessor.java @@ -147,7 +147,7 @@ public void setup(VectorAccessible incoming, VectorAccessible outgoing) { } @Override - public void initialise(PartitionSpec partitionSpec) { + public void initialise(PartitionSpec partitionSpec, int row) { icebergPartitionSpec = partitionSpec; fileSchema = icebergPartitionSpec.schema(); colToIDMap = getColToIDMap(); @@ -174,7 +174,7 @@ public int processManifestEntry(ManifestEntryWrapper> m final String fullPath = currentDataFile.path().toString(); long version = PathUtils.getQueryParam(fullPath, FILE_VERSION, 0L, Long::parseLong); - initialiseDatafileInfo(currentDataFile, version); + initialiseDatafileInfo(currentDataFile, version, manifestEntry.sequenceNumber()); int currentOutputCount = 0; final List splitsIdentity = new ArrayList<>(); @@ -246,7 +246,7 @@ Map getDataFileStats(DataFile currentDataFile, long version) { } Type fieldType = icebergField.type(); - Object value; + Object value = null; switch (suffix) { case "min": @@ -264,9 +264,15 @@ Map getDataFileStats(DataFile currentDataFile, long version) { value = getValueFromByteBuffer(upperBound, fieldType); break; case "val": - Preconditions.checkArgument(partColToKeyMap.containsKey(colName), "partition column not found"); - int partColPos = partColToKeyMap.get(colName); - value = currentDataFile.partition().get(partColPos, getPartitionColumnClass(icebergPartitionSpec, partColPos)); + //For select, there will never be a case + // where partColToKeyMap doesn't have a colName. + // It always brings those data files which have identity partition details. + //In case of OPTIMIZE, + // it can fetch the data files which have non-identity partitions also where partition evolution has happened. + if (partColToKeyMap.containsKey(colName)) { + int partColPos = partColToKeyMap.get(colName); + value = currentDataFile.partition().get(partColPos, getPartitionColumnClass(icebergPartitionSpec, partColPos)); + } break; default: throw new RuntimeException("unexpected suffix for column: " + fieldName); @@ -294,7 +300,7 @@ Map getColToIDMap() { } } - private void initialiseDatafileInfo(DataFile dataFile, long version) { + private void initialiseDatafileInfo(DataFile dataFile, long version, long sequenceNo) { if (currentDataFileOffset == 0) { dataFilePartitionAndStats = getDataFileStats(dataFile, version); dataFilePartitionInfo = ManifestEntryProcessorHelper.getDataFilePartitionInfo( @@ -303,7 +309,8 @@ private void initialiseDatafileInfo(DataFile dataFile, long version) { fileSchema, nameToFieldMap, dataFile, - version); + version, + sequenceNo); } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/SupportsIcebergMutablePlugin.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/SupportsIcebergMutablePlugin.java index 926bea7e97..04a946a13f 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/SupportsIcebergMutablePlugin.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/SupportsIcebergMutablePlugin.java @@ -49,6 +49,7 @@ default String getTableLocation(IcebergTableProps tableProps) { /** * @return A copy of the configuration to use for the plugin. */ + @Override default Configuration getFsConfCopy() { throw new UnsupportedOperationException(); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/SupportsIcebergRootPointer.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/SupportsIcebergRootPointer.java index 169c806ba3..771fd6f515 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/SupportsIcebergRootPointer.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/SupportsIcebergRootPointer.java @@ -17,9 +17,11 @@ package com.dremio.exec.store.iceberg; import java.io.IOException; +import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.iceberg.TableOperations; +import org.apache.iceberg.io.FileIO; import com.dremio.common.logical.FormatPluginConfig; import com.dremio.exec.planner.physical.PlannerSettings; @@ -102,4 +104,7 @@ default boolean isMetadataValidityCheckRecentEnough(Long lastMetadataValidityChe * Based on the source plugin, creates an instance of corresponding Iceberg Catalog table operations. */ TableOperations createIcebergTableOperations(FileSystem fs, String queryUserName, IcebergTableIdentifier tableIdentifier); + + FileIO createIcebergFileIO(FileSystem fs, OperatorContext context, List dataset, String datasourcePluginUID, + Long fileLength); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/SupportsInternalIcebergTable.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/SupportsInternalIcebergTable.java index 8e79cbdfa8..c892366a8a 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/SupportsInternalIcebergTable.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/SupportsInternalIcebergTable.java @@ -124,11 +124,9 @@ default ReadSignatureProvider createReadSignatureProvider(ByteString existingRea boolean isFullRefresh, boolean isPartialRefresh) { if (isFullRefresh) { return new FullRefreshReadSignatureProvider(dataTableRoot, queryStartTime); - } - else if (isPartialRefresh) { + } else if (isPartialRefresh) { return new PartialRefreshReadSignatureProvider(existingReadSignature, dataTableRoot, queryStartTime, partitionExists); - } - else { + } else { return new IncrementalRefreshReadSignatureProvider(existingReadSignature, dataTableRoot, queryStartTime, partitionExists); } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/deletes/PositionalDeleteFileReader.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/deletes/PositionalDeleteFileReader.java index 8c7e9cd3c3..026bd37302 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/deletes/PositionalDeleteFileReader.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/deletes/PositionalDeleteFileReader.java @@ -170,8 +170,7 @@ private int findFirstRecordInBatchForCurrentDataFile() { right = mid - 1; } else if (left != mid) { right = mid; - } - else { + } else { return mid; } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/glue/IcebergGlueCommand.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/glue/IcebergGlueCommand.java index befe54fa0a..27ae24a91f 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/glue/IcebergGlueCommand.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/glue/IcebergGlueCommand.java @@ -18,7 +18,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.iceberg.TableOperations; -import com.dremio.exec.catalog.MutablePlugin; import com.dremio.exec.store.iceberg.model.IcebergBaseCommand; import com.dremio.io.file.FileSystem; @@ -30,8 +29,8 @@ */ public class IcebergGlueCommand extends IcebergBaseCommand { - public IcebergGlueCommand(Configuration configuration, String tableFolder, FileSystem fs, TableOperations tableOperations, MutablePlugin plugin) { - super(configuration, tableFolder, fs, tableOperations, plugin); + public IcebergGlueCommand(Configuration configuration, String tableFolder, FileSystem fs, TableOperations tableOperations) { + super(configuration, tableFolder, fs, tableOperations); } @Override diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/glue/IcebergGlueModel.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/glue/IcebergGlueModel.java index 3117c9727d..29dd2c644c 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/glue/IcebergGlueModel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/glue/IcebergGlueModel.java @@ -17,8 +17,7 @@ import org.apache.iceberg.TableOperations; -import com.dremio.exec.catalog.MutablePlugin; -import com.dremio.exec.store.iceberg.SupportsIcebergRootPointer; +import com.dremio.exec.store.iceberg.SupportsIcebergMutablePlugin; import com.dremio.exec.store.iceberg.model.IcebergBaseModel; import com.dremio.exec.store.iceberg.model.IcebergCommand; import com.dremio.exec.store.iceberg.model.IcebergTableIdentifier; @@ -32,7 +31,7 @@ */ public class IcebergGlueModel extends IcebergBaseModel { - private final SupportsIcebergRootPointer plugin; + private final SupportsIcebergMutablePlugin plugin; private final String tableName; private final String queryUserName; public static final String GLUE = "glue"; @@ -42,8 +41,8 @@ public IcebergGlueModel(String namespace, FileSystem fs, String queryUserName, OperatorContext context, - SupportsIcebergRootPointer plugin) { - super(namespace, plugin.getFsConfCopy(), fs, context, null, (MutablePlugin)plugin); + SupportsIcebergMutablePlugin plugin) { + super(namespace, plugin.getFsConfCopy(), fs, context, null, plugin); this.queryUserName = queryUserName; this.plugin = plugin; this.tableName = tableName; @@ -53,7 +52,7 @@ public IcebergGlueModel(String namespace, protected IcebergCommand getIcebergCommand(IcebergTableIdentifier tableIdentifier) { TableOperations tableOperations = plugin.createIcebergTableOperations(fs, queryUserName, tableIdentifier); return new IcebergGlueCommand(configuration, - ((IcebergGlueTableIdentifier)tableIdentifier).getTableFolder(), fs, tableOperations, (MutablePlugin) plugin); + ((IcebergGlueTableIdentifier)tableIdentifier).getTableFolder(), fs, tableOperations); } @Override diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/hadoop/IcebergHadoopModel.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/hadoop/IcebergHadoopModel.java index 879b4674b5..22ba54f8e7 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/hadoop/IcebergHadoopModel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/hadoop/IcebergHadoopModel.java @@ -21,7 +21,7 @@ import org.apache.hadoop.fs.Path; import org.apache.iceberg.TableOperations; -import com.dremio.exec.catalog.MutablePlugin; +import com.dremio.exec.store.iceberg.SupportsIcebergMutablePlugin; import com.dremio.exec.store.iceberg.model.IcebergBaseCommand; import com.dremio.exec.store.iceberg.model.IcebergBaseModel; import com.dremio.exec.store.iceberg.model.IcebergCommand; @@ -34,31 +34,31 @@ * Entry point for Hadoop based Iceberg tables */ public class IcebergHadoopModel extends IcebergBaseModel { - private final MutablePlugin plugin; - public IcebergHadoopModel(Configuration configuration, MutablePlugin plugin) { - this(EMPTY_NAMESPACE, configuration, null, null, null, null, plugin); + private final SupportsIcebergMutablePlugin plugin; + public IcebergHadoopModel(SupportsIcebergMutablePlugin plugin) { + this(EMPTY_NAMESPACE, plugin.getFsConfCopy(), plugin.getSystemUserFS(), null, null, null, plugin); } public IcebergHadoopModel(String namespace, Configuration configuration, FileSystem fs, OperatorContext context, List dataset, - DatasetCatalogGrpcClient datasetCatalogGrpcClient, MutablePlugin plugin) { - super(namespace, configuration, fs, context, datasetCatalogGrpcClient, plugin); - this.plugin = plugin; + DatasetCatalogGrpcClient datasetCatalogGrpcClient, SupportsIcebergMutablePlugin plugin) { + super(namespace, configuration, fs, context, datasetCatalogGrpcClient, plugin); + this.plugin = plugin; } + @Override protected IcebergCommand getIcebergCommand(IcebergTableIdentifier tableIdentifier) { TableOperations tableOperations = new IcebergHadoopTableOperations( new Path(((IcebergHadoopTableIdentifier)tableIdentifier).getTableFolder()), configuration, fs, - context, plugin); - return new IcebergBaseCommand(configuration, - ((IcebergHadoopTableIdentifier)tableIdentifier).getTableFolder(), fs, tableOperations, plugin - ); + plugin.createIcebergFileIO(fs, context, null, null, null)); + return new IcebergBaseCommand(configuration, + ((IcebergHadoopTableIdentifier)tableIdentifier).getTableFolder(), fs, tableOperations); } @Override public IcebergTableIdentifier getTableIdentifier(String rootFolder) { - return new IcebergHadoopTableIdentifier(namespace, rootFolder); + return new IcebergHadoopTableIdentifier(namespace, rootFolder); } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/hadoop/IcebergHadoopTableOperations.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/hadoop/IcebergHadoopTableOperations.java index 7d0ccb1ce5..1da412f0e1 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/hadoop/IcebergHadoopTableOperations.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/hadoop/IcebergHadoopTableOperations.java @@ -15,30 +15,193 @@ */ package com.dremio.exec.store.iceberg.hadoop; +import java.io.IOException; +import java.io.InputStream; +import java.net.URI; +import java.nio.file.DirectoryStream; +import java.nio.file.attribute.PosixFilePermission; +import java.nio.file.attribute.PosixFilePermissions; +import java.util.ArrayList; +import java.util.List; +import java.util.Set; + import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FSDataInputStream; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.PositionedReadable; +import org.apache.hadoop.fs.Seekable; +import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.util.Progressable; import org.apache.iceberg.hadoop.HadoopTableOperations; +import org.apache.iceberg.io.FileIO; import org.apache.iceberg.util.LockManagers; -import com.dremio.exec.catalog.MutablePlugin; -import com.dremio.exec.store.iceberg.DremioFileIO; -import com.dremio.io.file.FileSystem; -import com.dremio.sabot.exec.context.OperatorContext; +import com.dremio.io.FSInputStream; +import com.dremio.io.FSOutputStream; +import com.dremio.io.file.FileAttributes; /** * Hadoop based iceberg table operations */ public class IcebergHadoopTableOperations extends HadoopTableOperations { - private final MutablePlugin plugin; + private final Configuration conf; + private final com.dremio.io.file.FileSystem fs; - public IcebergHadoopTableOperations(Path location, Configuration conf, FileSystem fs, OperatorContext context, MutablePlugin plugin) { - super(location, new DremioFileIO(fs, context, null, null, null, conf, plugin), conf, - LockManagers.defaultLockManager()); - this.plugin = plugin; + public IcebergHadoopTableOperations(Path location, Configuration conf, com.dremio.io.file.FileSystem fs, + FileIO fileIO) { + super(location, fileIO, conf, LockManagers.defaultLockManager()); + this.conf = conf; + this.fs = fs; } @Override protected org.apache.hadoop.fs.FileSystem getFileSystem(Path path, Configuration hadoopConf) { - return plugin.getHadoopFsSupplier(path.toString(), hadoopConf).get(); + return new DremioToHadoopFileSystemProxy(); + } + + /** + * Proxy class for exposing a Dremio FileSystem as a Hadoop FileSystem. + */ + private class DremioToHadoopFileSystemProxy extends FileSystem { + + @Override + public Configuration getConf() { + return conf; + } + + @Override + public URI getUri() { + return fs.getUri(); + } + + @Override + public FSDataInputStream open(Path f, int bufferSize) throws IOException { + FSInputStream fsInputStream = fs.open(com.dremio.io.file.Path.of(f.toUri())); + return new FSDataInputStream(new SeekableFSInputStream(fsInputStream)); + } + + @Override + public FSDataOutputStream create(Path f, FsPermission permission, boolean overwrite, int bufferSize, + short replication, long blockSize, Progressable progress) throws IOException { + FSOutputStream fsOutputStream = fs.create(com.dremio.io.file.Path.of(f.toUri()), overwrite); + return new FSDataOutputStream(fsOutputStream, null); + } + + @Override + public FSDataOutputStream append(Path f, int bufferSize, Progressable progress) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public boolean rename(Path src, Path dst) throws IOException { + return fs.rename(com.dremio.io.file.Path.of(src.toUri()), com.dremio.io.file.Path.of(dst.toUri())); + } + + @Override + public boolean delete(Path f, boolean recursive) throws IOException { + return fs.delete(com.dremio.io.file.Path.of(f.toUri()), recursive); + } + + @Override + public FileStatus[] listStatus(Path f) throws IOException { + final List fileStatusList = new ArrayList<>(); + com.dremio.io.file.Path dremioPath = com.dremio.io.file.Path.of(f.toUri()); + DirectoryStream attributes = fs.list(dremioPath); + long defaultBlockSize = fs.getDefaultBlockSize(dremioPath); + attributes.forEach(attribute -> fileStatusList.add(getFileStatusFromAttributes(attribute, defaultBlockSize))); + return fileStatusList.toArray(new FileStatus[0]); + } + + @Override + public void setWorkingDirectory(Path new_dir) { + throw new UnsupportedOperationException(); + } + + @Override + public Path getWorkingDirectory() { + throw new UnsupportedOperationException(); + } + + @Override + public boolean mkdirs(Path f, FsPermission permission) throws IOException { + String fsPerms = permission.toString(); + Set posixFilePermission = + PosixFilePermissions.fromString(fsPerms.substring(1)); + return fs.mkdirs(com.dremio.io.file.Path.of(f.toUri()), posixFilePermission); + } + + @Override + public FileStatus getFileStatus(Path f) throws IOException { + com.dremio.io.file.Path dremioPath = com.dremio.io.file.Path.of(f.toUri()); + FileAttributes attributes = fs.getFileAttributes(dremioPath); + long defaultBlockSize = fs.getDefaultBlockSize(dremioPath); + + return getFileStatusFromAttributes(attributes, defaultBlockSize); + } + + private FileStatus getFileStatusFromAttributes(FileAttributes attributes, long defaultBlockSize) { + return new FileStatus(attributes.size(), attributes.isDirectory(), 1, + defaultBlockSize, attributes.lastModifiedTime().toMillis(), new Path(String.valueOf(attributes.getPath()))); + } + + @Override + public String getScheme() { + return fs.getScheme(); + } + } + + private class SeekableFSInputStream extends InputStream implements Seekable, PositionedReadable { + private final FSInputStream fsInputStream; + + public SeekableFSInputStream(FSInputStream fsInputStream) { + this.fsInputStream = fsInputStream; + } + + @Override + public int read() throws IOException { + return fsInputStream.read(); + } + + @Override + public int read(long position, byte[] bytes, int offset, int length) throws IOException { + fsInputStream.setPosition(position); + return fsInputStream.read(bytes, offset, length); + } + + @Override + public void readFully(long position, byte[] bytes, int offset, int length) throws IOException { + fsInputStream.setPosition(position); + fsInputStream.read(bytes, offset, length); + } + + @Override + public void readFully(long position, byte[] bytes) throws IOException { + fsInputStream.setPosition(position); + fsInputStream.read(bytes); + } + + @Override + public void seek(long position) throws IOException { + fsInputStream.setPosition(position); + } + + @Override + public long getPos() throws IOException { + return fsInputStream.getPosition(); + } + + @Override + public boolean seekToNewSource(long position) throws IOException { + fsInputStream.setPosition(position); + return true; + } + + @Override + public void close() throws IOException { + fsInputStream.close(); + } } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/hive/IcebergHiveCommand.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/hive/IcebergHiveCommand.java index a08f7fda3e..2bbe81b6ad 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/hive/IcebergHiveCommand.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/hive/IcebergHiveCommand.java @@ -18,7 +18,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.iceberg.TableOperations; -import com.dremio.exec.catalog.MutablePlugin; import com.dremio.exec.store.iceberg.model.IcebergBaseCommand; import com.dremio.io.file.FileSystem; @@ -27,8 +26,8 @@ */ public class IcebergHiveCommand extends IcebergBaseCommand { - public IcebergHiveCommand(Configuration configuration, String tableFolder, FileSystem fs, TableOperations tableOperations, MutablePlugin plugin) { - super(configuration, tableFolder, fs, tableOperations, plugin); + public IcebergHiveCommand(Configuration configuration, String tableFolder, FileSystem fs, TableOperations tableOperations) { + super(configuration, tableFolder, fs, tableOperations); } @Override diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/hive/IcebergHiveModel.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/hive/IcebergHiveModel.java index b30bad013f..bf9e38f73a 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/hive/IcebergHiveModel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/hive/IcebergHiveModel.java @@ -17,7 +17,7 @@ import org.apache.iceberg.TableOperations; -import com.dremio.exec.catalog.MutablePlugin; +import com.dremio.exec.store.iceberg.SupportsIcebergMutablePlugin; import com.dremio.exec.store.iceberg.SupportsIcebergRootPointer; import com.dremio.exec.store.iceberg.model.IcebergBaseModel; import com.dremio.exec.store.iceberg.model.IcebergCommand; @@ -40,8 +40,8 @@ public IcebergHiveModel(String namespace, FileSystem fs, String queryUserName, OperatorContext context, - SupportsIcebergRootPointer plugin) { - super(namespace, plugin.getFsConfCopy(), fs, context, null, (MutablePlugin) plugin); + SupportsIcebergMutablePlugin plugin) { + super(namespace, plugin.getFsConfCopy(), fs, context, null, plugin); this.queryUserName = queryUserName; this.plugin = plugin; this.tableName = tableName; @@ -51,7 +51,7 @@ public IcebergHiveModel(String namespace, protected IcebergCommand getIcebergCommand(IcebergTableIdentifier tableIdentifier) { TableOperations tableOperations = plugin.createIcebergTableOperations(fs, queryUserName, tableIdentifier); return new IcebergHiveCommand(configuration, - ((IcebergHiveTableIdentifier)tableIdentifier).getTableFolder(), fs, tableOperations, (MutablePlugin) plugin); + ((IcebergHiveTableIdentifier)tableIdentifier).getTableFolder(), fs, tableOperations); } @Override diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/manifestwriter/IcebergCommitOpHelper.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/manifestwriter/IcebergCommitOpHelper.java index c89d59c4b8..7882fda628 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/manifestwriter/IcebergCommitOpHelper.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/manifestwriter/IcebergCommitOpHelper.java @@ -34,10 +34,12 @@ import org.apache.arrow.vector.complex.reader.FieldReader; import org.apache.arrow.vector.util.Text; import org.apache.iceberg.DataFile; +import org.apache.iceberg.DeleteFile; import org.apache.iceberg.GenericManifestFile; import org.apache.iceberg.ManifestFile; import org.apache.iceberg.ManifestFiles; import org.apache.iceberg.io.CloseableIterable; +import org.apache.iceberg.io.FileIO; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -50,11 +52,11 @@ import com.dremio.exec.store.OperationType; import com.dremio.exec.store.RecordWriter; import com.dremio.exec.store.dfs.IcebergTableProps; -import com.dremio.exec.store.iceberg.DremioFileIO; import com.dremio.exec.store.iceberg.IcebergMetadataInformation; import com.dremio.exec.store.iceberg.IcebergPartitionData; import com.dremio.exec.store.iceberg.IcebergSerDe; import com.dremio.exec.store.iceberg.SupportsIcebergMutablePlugin; +import com.dremio.exec.store.iceberg.SupportsIcebergRootPointer; import com.dremio.exec.store.iceberg.model.IcebergCommandType; import com.dremio.exec.store.iceberg.model.IcebergModel; import com.dremio.exec.store.iceberg.model.IcebergOpCommitter; @@ -83,6 +85,7 @@ public class IcebergCommitOpHelper implements AutoCloseable { protected final WriterCommitterPOP config; protected final OperatorContext context; + protected final FileSystem fs; protected VarBinaryVector icebergMetadataVector; protected IntVector operationTypeVector; protected VarCharVector pathVector; @@ -97,22 +100,26 @@ public class IcebergCommitOpHelper implements AutoCloseable { protected ReadSignatureProvider readSigProvider = (added, deleted) -> ByteString.EMPTY; protected List icebergManifestFiles = new ArrayList<>(); protected boolean success; - private final DremioFileIO dremioFileIO; + private final FileIO fileIO; - public static IcebergCommitOpHelper getInstance(OperatorContext context, WriterCommitterPOP config) { + public static IcebergCommitOpHelper getInstance(OperatorContext context, WriterCommitterPOP config, FileSystem fs) { if (config.getIcebergTableProps() == null) { - return new NoOpIcebergCommitOpHelper(context, config); + return new NoOpIcebergCommitOpHelper(context, config, fs); } else if (config.getIcebergTableProps().isDetectSchema()) { - return new SchemaDiscoveryIcebergCommitOpHelper(context, config); + return new SchemaDiscoveryIcebergCommitOpHelper(context, config, fs); } else { - return new IcebergCommitOpHelper(context, config); + return new IcebergCommitOpHelper(context, config, fs); } } - protected IcebergCommitOpHelper(OperatorContext context, WriterCommitterPOP config) { + protected IcebergCommitOpHelper(OperatorContext context, WriterCommitterPOP config, FileSystem fs) { this.config = config; this.context = context; - this.dremioFileIO = new DremioFileIO(config.getPlugin().getFsConfCopy(), config.getPlugin()); + this.fs = fs; + Preconditions.checkArgument(config.getPlugin() instanceof SupportsIcebergRootPointer, + "Invalid plugin in IcebergCommitOpHelper - plugin does not support Iceberg"); + this.fileIO = ((SupportsIcebergRootPointer) config.getPlugin()).createIcebergFileIO(fs, context, + config.getDatasetPath().getPathComponents(), config.getPluginId().getName(), null); this.partitionPaths = createPartitionPathsSet(config); } @@ -124,7 +131,7 @@ public void setup(VectorAccessible incoming) { final IcebergModel icebergModel; final IcebergTableIdentifier icebergTableIdentifier; final SupportsIcebergMutablePlugin icebergMutablePlugin = (SupportsIcebergMutablePlugin) config.getPlugin(); - icebergModel = icebergMutablePlugin.getIcebergModel(icebergTableProps, config.getProps().getUserName(), context, null); + icebergModel = icebergMutablePlugin.getIcebergModel(icebergTableProps, config.getProps().getUserName(), context, fs); icebergTableIdentifier = icebergModel.getTableIdentifier(icebergMutablePlugin.getTableLocation(icebergTableProps)); TypedFieldId metadataFileId = RecordWriter.SCHEMA.getFieldId(SchemaPath.getSimplePath(RecordWriter.ICEBERG_METADATA_COLUMN)); @@ -168,6 +175,7 @@ public void setup(VectorAccessible incoming) { icebergModel.getTableIdentifier(icebergTableProps.getTableLocation()), config.getDatasetConfig().get(), config.getTableFormatOptions().getMinInputFilesBeforeOptimize(), + config.getTableFormatOptions().getSnapshotId(), icebergTableProps, getFS(config)); break; @@ -236,8 +244,13 @@ public void consumeData(int records) throws Exception { break; case ADD_DATAFILE: // Consuming operations: OPTIMIZE TABLE - DataFile addedFile = IcebergSerDe.deserializeDataFile(getIcebergMetadataInformation(i).getIcebergMetadataFileByte()); - icebergOpCommitter.consumeAddDataFile(addedFile); + DataFile addedDataFile = IcebergSerDe.deserializeDataFile(getIcebergMetadataInformation(i).getIcebergMetadataFileByte()); + icebergOpCommitter.consumeAddDataFile(addedDataFile); + break; + case DELETE_DELETEFILE: + // Consuming operations: OPTIMIZE TABLE + consumeDeletedDeleteFile(getDeleteDeleteFile(i)); + consumeDeletedDataFilePartitionData(i); break; default: throw new Exception("Unsupported File Type: " + operationType); @@ -285,6 +298,11 @@ protected void consumeDeleteDataFilePath(int row) { icebergOpCommitter.consumeDeleteDataFilePath(deletedDataFilePath); } + protected void consumeDeletedDeleteFile(DeleteFile deletedDeleteFile) { + logger.debug("Removing delete file: {}", deletedDeleteFile.path()); + icebergOpCommitter.consumeDeleteDeleteFile(deletedDeleteFile); + } + protected IcebergMetadataInformation getIcebergMetadataInformation(int row) throws IOException, ClassNotFoundException { return IcebergSerDe.deserializeFromByteArray(icebergMetadataVector.get(row)); } @@ -297,6 +315,10 @@ protected DataFile getDeleteDataFile(int row) throws IOException, ClassNotFoundE return IcebergSerDe.deserializeDataFile(getIcebergMetadataInformation(row).getIcebergMetadataFileByte()); } + protected DeleteFile getDeleteDeleteFile(int row) throws IOException, ClassNotFoundException { + return IcebergSerDe.deserializeDeleteFile(getIcebergMetadataInformation(row).getIcebergMetadataFileByte()); + } + protected boolean isDmlCommandType() { IcebergCommandType commandType = config.getIcebergTableProps().getIcebergOpType(); return commandType == IcebergCommandType.DELETE || commandType == IcebergCommandType.UPDATE @@ -319,7 +341,7 @@ public void commit(WriterCommitterOutputHandler outputHandler) throws Exception try (AutoCloseable ac = OperatorStats.getWaitRecorder(context.getStats())) { icebergOpCommitter.commit(outputHandler); } catch (Exception ex) { - icebergOpCommitter.cleanup(dremioFileIO); + icebergOpCommitter.cleanup(fileIO); throw ex; } success = true; @@ -441,7 +463,7 @@ public void close() throws Exception { // we can safely delete the files as exception thrown before if ((icebergOpCommitter == null || (icebergOpCommitter != null && !icebergOpCommitter.isIcebergTableUpdated()))) { - deleteManifestFiles(dremioFileIO, icebergManifestFiles, false); + deleteManifestFiles(fileIO, icebergManifestFiles, false); } } } catch (Exception e) { @@ -461,29 +483,29 @@ private void addMetricStat(WriterCommitterOperator.Metric metric, long time) { /** * Delete all data files referenced in a manifestFile */ - private static void deleteDataFilesInManifestFile(DremioFileIO dremioFileIO, ManifestFile manifestFile) { + private static void deleteDataFilesInManifestFile(FileIO fileIO, ManifestFile manifestFile) { try { // ManifestFiles.readPaths requires snapshotId not null, created manifestFile has null snapshot id // use a NonNullSnapshotIdManifestFileWrapper to provide a non-null dummy snapshotId ManifestFile nonNullSnapshotIdManifestFile = manifestFile.snapshotId() == null ? GenericManifestFile.copyOf(manifestFile).withSnapshotId(-1L).build() : manifestFile; - CloseableIterable dataFiles = ManifestFiles.readPaths(nonNullSnapshotIdManifestFile, dremioFileIO); - dataFiles.forEach(f -> dremioFileIO.deleteFile(f)); + CloseableIterable dataFiles = ManifestFiles.readPaths(nonNullSnapshotIdManifestFile, fileIO); + dataFiles.forEach(f -> fileIO.deleteFile(f)); } catch (Exception e) { logger.warn(String.format("Failed to delete up data files in manifestFile %s", manifestFile), e); } } - public static void deleteManifestFiles(DremioFileIO dremioFileIO, List manifestFiles, boolean deleteDataFiles) { + public static void deleteManifestFiles(FileIO fileIO, List manifestFiles, boolean deleteDataFiles) { try { for (ManifestFile manifestFile : manifestFiles) { // delete data files if (deleteDataFiles) { - deleteDataFilesInManifestFile(dremioFileIO, manifestFile); + deleteDataFilesInManifestFile(fileIO, manifestFile); } // delete manifest file and corresponding crc file - ManifestWritesHelper.deleteManifestFileIfExists(dremioFileIO, manifestFile.path()); + ManifestWritesHelper.deleteManifestFileIfExists(fileIO, manifestFile.path()); } } catch (Exception e) { logger.warn("Failed to clean up manifest files", e); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/manifestwriter/LazyManifestWriter.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/manifestwriter/LazyManifestWriter.java new file mode 100644 index 0000000000..e74bb4dc84 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/manifestwriter/LazyManifestWriter.java @@ -0,0 +1,52 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dremio.exec.store.iceberg.manifestwriter; + +import org.apache.iceberg.DataFile; +import org.apache.iceberg.ManifestFiles; +import org.apache.iceberg.ManifestWriter; +import org.apache.iceberg.PartitionSpec; +import org.apache.iceberg.io.FileIO; +import org.apache.iceberg.io.OutputFile; + + +public class LazyManifestWriter { + private ManifestWriter manifestWriter = null; + + private final FileIO fileIO; + private final String manifestLocation; + private final PartitionSpec partitionSpec; + + public LazyManifestWriter(FileIO fileIO, String manifestLocation, PartitionSpec partitionSpec) { + this.fileIO = fileIO; + this.manifestLocation = manifestLocation; + this.partitionSpec = partitionSpec; + } + + public synchronized ManifestWriter getInstance() { + if (manifestWriter == null) { + final OutputFile manifestFile = fileIO.newOutputFile(manifestLocation); + this.manifestWriter = ManifestFiles.write(partitionSpec, manifestFile); + } + return manifestWriter; + } + + public boolean isInitialized() { + return manifestWriter != null; + } + +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/manifestwriter/ManifestWritesHelper.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/manifestwriter/ManifestWritesHelper.java index 212ff4ed87..db037b868c 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/manifestwriter/ManifestWritesHelper.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/manifestwriter/ManifestWritesHelper.java @@ -34,12 +34,10 @@ import org.apache.avro.file.DataFileConstants; import org.apache.iceberg.DataFile; import org.apache.iceberg.ManifestFile; -import org.apache.iceberg.ManifestFiles; -import org.apache.iceberg.ManifestWriter; import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.Schema; import org.apache.iceberg.TableProperties; -import org.apache.iceberg.io.OutputFile; +import org.apache.iceberg.io.FileIO; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -51,7 +49,6 @@ import com.dremio.exec.store.OperationType; import com.dremio.exec.store.RecordWriter; import com.dremio.exec.store.dfs.IcebergTableProps; -import com.dremio.exec.store.iceberg.DremioFileIO; import com.dremio.exec.store.iceberg.FieldIdBroker; import com.dremio.exec.store.iceberg.IcebergManifestWriterPOP; import com.dremio.exec.store.iceberg.IcebergMetadataInformation; @@ -59,6 +56,7 @@ import com.dremio.exec.store.iceberg.IcebergSerDe; import com.dremio.exec.store.iceberg.IcebergUtils; import com.dremio.exec.store.iceberg.SchemaConverter; +import com.dremio.exec.store.iceberg.SupportsIcebergRootPointer; import com.dremio.io.file.FileSystem; import com.dremio.io.file.Path; import com.dremio.sabot.exec.store.iceberg.proto.IcebergProtobuf; @@ -77,10 +75,11 @@ public class ManifestWritesHelper { private final String ICEBERG_METADATA_FOLDER = "metadata"; private final List listOfFilesCreated; - protected ManifestWriter manifestWriter; + protected LazyManifestWriter manifestWriter; protected IcebergManifestWriterPOP writer; protected long currentNumDataFileAdded = 0; - protected DremioFileIO dremioFileIO; + protected DataFile currentDataFile; + protected FileIO fileIO; protected VarBinaryVector inputDatafiles; protected IntVector operationTypes; @@ -111,7 +110,9 @@ protected ManifestWritesHelper(IcebergManifestWriterPOP writer) { throw new RuntimeException("Unable to create File System", e); } - this.dremioFileIO = new DremioFileIO(fs, writer.getPlugin().getFsConfCopy(), writer.getPlugin()); + Preconditions.checkArgument(writer.getPlugin() instanceof SupportsIcebergRootPointer, + "Invalid plugin in ManifestWritesHelper - plugin does not support Iceberg"); + this.fileIO = ((SupportsIcebergRootPointer) writer.getPlugin()).createIcebergFileIO(fs, null, null, null, null); } public void setIncoming(VectorAccessible incoming) { @@ -127,10 +128,10 @@ public void startNewWriter() { final PartitionSpec partitionSpec = getPartitionSpec(writer.getOptions()); this.partitionSpecId = Optional.of(partitionSpec.specId()); final String icebergManifestFileExt = "." + outputExtension; - final OutputFile manifestLocation = dremioFileIO.newOutputFile(baseMetadataLocation + Path.SEPARATOR + UUID.randomUUID() + icebergManifestFileExt); - listOfFilesCreated.add(manifestLocation.location()); + final String manifestLocation = baseMetadataLocation + Path.SEPARATOR + UUID.randomUUID() + icebergManifestFileExt; + listOfFilesCreated.add(manifestLocation); partitionDataInCurrentManifest.clear(); - this.manifestWriter = ManifestFiles.write(partitionSpec, manifestLocation); + this.manifestWriter = new LazyManifestWriter(fileIO, manifestLocation, partitionSpec); } public void processIncomingRow(int recordIndex) throws IOException { @@ -140,14 +141,19 @@ public void processIncomingRow(int recordIndex) throws IOException { final Integer operationTypeValue = operationTypes.get(recordIndex); final IcebergMetadataInformation icebergMetadataInformation = IcebergSerDe.deserializeFromByteArray(metaInfoBytes); final OperationType operationType = OperationType.valueOf(operationTypeValue); + currentDataFile = IcebergSerDe.deserializeDataFile(icebergMetadataInformation.getIcebergMetadataFileByte()); + if (currentDataFile == null) { + throw new IOException("Iceberg data file cannot be empty or null."); + } switch (operationType) { case ADD_DATAFILE: - final DataFile dataFile = IcebergSerDe.deserializeDataFile(icebergMetadataInformation.getIcebergMetadataFileByte()); - addDataFile(dataFile); + addDataFile(currentDataFile); currentNumDataFileAdded++; break; case DELETE_DATAFILE: - deletedDataFiles.put(IcebergSerDe.deserializeDataFile(icebergMetadataInformation.getIcebergMetadataFileByte()), metaInfoBytes); + deletedDataFiles.put(currentDataFile, metaInfoBytes); + break; + case DELETE_DELETEFILE: break; default: throw new IOException("Unsupported File type - " + operationType); @@ -165,7 +171,7 @@ protected void addDataFile(DataFile dataFile) { if (writer.getOptions().isReadSignatureSupport()) { partitionDataInCurrentManifest.add(ipd); } - manifestWriter.add(dataFile); + manifestWriter.getInstance().add(dataFile); } public void processDeletedFiles(BiConsumer processLogic) { @@ -175,7 +181,7 @@ public void processDeletedFiles(BiConsumer processLogic) { public long length() { Preconditions.checkNotNull(manifestWriter); - return manifestWriter.length(); + return manifestWriter.getInstance().length(); } public boolean hasReachedMaxLen() { @@ -188,8 +194,8 @@ public Optional write() throws IOException { deleteRunningManifestFile(); return Optional.empty(); } - manifestWriter.close(); - return Optional.of(manifestWriter.toManifestFile()); + manifestWriter.getInstance().close(); + return Optional.of(manifestWriter.getInstance().toManifestFile()); } public byte[] getWrittenSchema() { @@ -242,13 +248,13 @@ protected Schema getIcebergSchema(ByteString extendedProperty, BatchSchema batch protected void deleteRunningManifestFile() { try { - if (manifestWriter == null) { + if (manifestWriter == null || !manifestWriter.isInitialized()) { return; } - manifestWriter.close(); - ManifestFile manifestFile = manifestWriter.toManifestFile(); + manifestWriter.getInstance().close(); + ManifestFile manifestFile = manifestWriter.getInstance().toManifestFile(); logger.debug("Removing {} as it'll be re-written with a new schema", manifestFile.path()); - deleteManifestFileIfExists(dremioFileIO, manifestFile.path()); + deleteManifestFileIfExists(fileIO, manifestFile.path()); manifestWriter = null; } catch (Exception e) { logger.warn("Error while closing stale manifest", e); @@ -261,14 +267,14 @@ Set partitionDataInCurrentManifest() { protected void abort() { for (String path : this.listOfFilesCreated) { - ManifestWritesHelper.deleteManifestFileIfExists(dremioFileIO, path); + ManifestWritesHelper.deleteManifestFileIfExists(fileIO, path); } } - public static boolean deleteManifestFileIfExists(DremioFileIO dremioFileIO, String filePath) { + public static boolean deleteManifestFileIfExists(FileIO fileIO, String filePath) { try { - dremioFileIO.deleteFile(filePath); - deleteManifestCrcFileIfExists(dremioFileIO, filePath); + fileIO.deleteFile(filePath); + deleteManifestCrcFileIfExists(fileIO, filePath); return true; } catch (Exception e) { logger.warn("Error while deleting file {}", filePath, e); @@ -276,13 +282,13 @@ public static boolean deleteManifestFileIfExists(DremioFileIO dremioFileIO, Stri } } - public static boolean deleteManifestCrcFileIfExists(DremioFileIO dremioFileIO, String manifestFilePath) { + public static boolean deleteManifestCrcFileIfExists(FileIO fileIO, String manifestFilePath) { try{ com.dremio.io.file.Path p = com.dremio.io.file.Path.of(manifestFilePath); String fileName = p.getName(); com.dremio.io.file.Path parentPath = p.getParent(); String crcFilePath = parentPath + com.dremio.io.file.Path.SEPARATOR + "." + fileName + "." + CRC_FILE_EXTENTION; - dremioFileIO.deleteFile(crcFilePath); + fileIO.deleteFile(crcFilePath); return true; } catch (Exception e) { logger.warn("Error while deleting crc file for {}", manifestFilePath, e); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/manifestwriter/NoOpIcebergCommitOpHelper.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/manifestwriter/NoOpIcebergCommitOpHelper.java index 375b20dbd5..43c3037ee1 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/manifestwriter/NoOpIcebergCommitOpHelper.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/manifestwriter/NoOpIcebergCommitOpHelper.java @@ -17,6 +17,7 @@ import com.dremio.exec.physical.config.WriterCommitterPOP; import com.dremio.exec.record.VectorAccessible; +import com.dremio.io.file.FileSystem; import com.dremio.sabot.exec.context.OperatorContext; import com.dremio.sabot.op.writer.WriterCommitterOutputHandler; @@ -25,23 +26,23 @@ */ public class NoOpIcebergCommitOpHelper extends IcebergCommitOpHelper { - public NoOpIcebergCommitOpHelper(OperatorContext context, WriterCommitterPOP config) { - super(context, config); - } + public NoOpIcebergCommitOpHelper(OperatorContext context, WriterCommitterPOP config, FileSystem fs) { + super(context, config, fs); + } - @Override - public void setup(VectorAccessible incoming) { - } + @Override + public void setup(VectorAccessible incoming) { + } @Override - public void consumeData(int records) throws Exception { - } + public void consumeData(int records) throws Exception { + } - @Override - public void commit(WriterCommitterOutputHandler outputHandler) throws Exception { - } + @Override + public void commit(WriterCommitterOutputHandler outputHandler) throws Exception { + } - @Override - public void close() { - } + @Override + public void close() { + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/manifestwriter/SchemaDiscoveryIcebergCommitOpHelper.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/manifestwriter/SchemaDiscoveryIcebergCommitOpHelper.java index 8e5fe81bfc..d7ee9af0c5 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/manifestwriter/SchemaDiscoveryIcebergCommitOpHelper.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/manifestwriter/SchemaDiscoveryIcebergCommitOpHelper.java @@ -49,6 +49,7 @@ import com.dremio.exec.store.iceberg.model.IcebergCommandType; import com.dremio.exec.store.iceberg.model.IcebergModel; import com.dremio.exec.util.VectorUtil; +import com.dremio.io.file.FileSystem; import com.dremio.io.file.Path; import com.dremio.sabot.exec.context.OperatorContext; import com.dremio.sabot.exec.context.OperatorStats; @@ -59,150 +60,151 @@ * the icebergCommitterOp is lazily initialized only at the commit time. */ public class SchemaDiscoveryIcebergCommitOpHelper extends IcebergCommitOpHelper implements SupportsTypeCoercionsAndUpPromotions { - private static final Logger logger = LoggerFactory.getLogger(SchemaDiscoveryIcebergCommitOpHelper.class); - - private VarBinaryVector schemaVector; - private BatchSchema currentSchema; - private List deletedDataFiles = new ArrayList<>(); - private List partitionColumns; - private final int implicitColSize; - - protected SchemaDiscoveryIcebergCommitOpHelper(OperatorContext context, WriterCommitterPOP config) { - super(context, config); - this.partitionColumns = Optional.ofNullable(config.getIcebergTableProps().getPartitionColumnNames()).orElse(Collections.EMPTY_LIST); - this.implicitColSize = (int) partitionColumns.stream().filter(IncrementalUpdateUtils.UPDATE_COLUMN::equals).count(); - this.currentSchema = config.getIcebergTableProps().getFullSchema(); - } + private static final Logger logger = LoggerFactory.getLogger(SchemaDiscoveryIcebergCommitOpHelper.class); + + private VarBinaryVector schemaVector; + private BatchSchema currentSchema; + private List deletedDataFiles = new ArrayList<>(); + private List partitionColumns; + private final int implicitColSize; + + protected SchemaDiscoveryIcebergCommitOpHelper(OperatorContext context, WriterCommitterPOP config, FileSystem fs) { + super(context, config, fs); + this.partitionColumns = Optional.ofNullable(config.getIcebergTableProps().getPartitionColumnNames()).orElse(Collections.EMPTY_LIST); + this.implicitColSize = (int) partitionColumns.stream().filter(IncrementalUpdateUtils.UPDATE_COLUMN::equals).count(); + this.currentSchema = config.getIcebergTableProps().getFullSchema(); + } - @Override - public void setup(VectorAccessible incoming) { - TypedFieldId schemaFieldId = RecordWriter.SCHEMA.getFieldId(SchemaPath.getSimplePath(RecordWriter.FILE_SCHEMA_COLUMN)); - schemaVector = incoming.getValueAccessorById(VarBinaryVector.class, schemaFieldId.getFieldIds()).getValueVector(); + @Override + public void setup(VectorAccessible incoming) { + TypedFieldId schemaFieldId = RecordWriter.SCHEMA.getFieldId(SchemaPath.getSimplePath(RecordWriter.FILE_SCHEMA_COLUMN)); + schemaVector = incoming.getValueAccessorById(VarBinaryVector.class, schemaFieldId.getFieldIds()).getValueVector(); - TypedFieldId metadataFileId = RecordWriter.SCHEMA.getFieldId(SchemaPath.getSimplePath(RecordWriter.ICEBERG_METADATA_COLUMN)); - icebergMetadataVector = incoming.getValueAccessorById(VarBinaryVector.class, metadataFileId.getFieldIds()).getValueVector(); - TypedFieldId operationTypeId = RecordWriter.SCHEMA.getFieldId(SchemaPath.getSimplePath(RecordWriter.OPERATION_TYPE_COLUMN)); - operationTypeVector = incoming.getValueAccessorById(IntVector.class, operationTypeId.getFieldIds()).getValueVector(); - partitionDataVector = (ListVector) VectorUtil.getVectorFromSchemaPath(incoming, RecordWriter.PARTITION_DATA_COLUMN); - } + TypedFieldId metadataFileId = RecordWriter.SCHEMA.getFieldId(SchemaPath.getSimplePath(RecordWriter.ICEBERG_METADATA_COLUMN)); + icebergMetadataVector = incoming.getValueAccessorById(VarBinaryVector.class, metadataFileId.getFieldIds()).getValueVector(); + TypedFieldId operationTypeId = RecordWriter.SCHEMA.getFieldId(SchemaPath.getSimplePath(RecordWriter.OPERATION_TYPE_COLUMN)); + operationTypeVector = incoming.getValueAccessorById(IntVector.class, operationTypeId.getFieldIds()).getValueVector(); + partitionDataVector = (ListVector) VectorUtil.getVectorFromSchemaPath(incoming, RecordWriter.PARTITION_DATA_COLUMN); + } - @Override - public void consumeData(int records) throws Exception { - super.consumeData(records); - IntStream.range(0, records).filter(i -> schemaVector.isSet(i) != 0).forEach(this::consumeSchema); - IntStream.range(0, records).forEach(this::consumePartitionData); - } + @Override + public void consumeData(int records) throws Exception { + super.consumeData(records); + IntStream.range(0, records).filter(i -> schemaVector.isSet(i) != 0).forEach(this::consumeSchema); + IntStream.range(0, records).forEach(this::consumePartitionData); + } - private void consumeSchema(int recordIdx) { - byte[] schemaBytes = schemaVector.get(recordIdx); - BatchSchema schemaAtThisRow = BatchSchema.deserialize(schemaBytes); - if (!currentSchema.equals(schemaAtThisRow)) { - try { - currentSchema = currentSchema.mergeWithUpPromotion(schemaAtThisRow, this); - } catch (NoSupportedUpPromotionOrCoercionException e) { - e.addDatasetPath(config.getDatasetPath().getPathComponents()); + private void consumeSchema(int recordIdx) { + byte[] schemaBytes = schemaVector.get(recordIdx); + BatchSchema schemaAtThisRow = BatchSchema.deserialize(schemaBytes); + if (!currentSchema.equals(schemaAtThisRow)) { + try { + currentSchema = currentSchema.mergeWithUpPromotion(schemaAtThisRow, this); + } catch (NoSupportedUpPromotionOrCoercionException e) { + e.addDatasetPath(config.getDatasetPath().getPathComponents()); throw UserException.unsupportedError(e).message(e.getMessage()).build(); - } - if (currentSchema.getTotalFieldCount() > context.getOptions().getOption(CatalogOptions.METADATA_LEAF_COLUMN_MAX)) { - throw new ColumnCountTooLargeException((int) context.getOptions().getOption(CatalogOptions.METADATA_LEAF_COLUMN_MAX)); - } + } + if (currentSchema.getTotalFieldCount() > context.getOptions().getOption(CatalogOptions.METADATA_LEAF_COLUMN_MAX)) { + throw new ColumnCountTooLargeException((int) context.getOptions().getOption(CatalogOptions.METADATA_LEAF_COLUMN_MAX)); } } + } - private void consumePartitionData(int recordIdx) { - List partitionDataForThisManifest = getPartitionData(recordIdx); - int existingPartitionDepth = partitionColumns.size() - implicitColSize; - partitionDataForThisManifest.stream().forEach(x -> { - if(x.size() > existingPartitionDepth) { - partitionColumns = x.getPartitionType().fields().stream().map(Types.NestedField::name).collect(Collectors.toList()); - } - }); - } + private void consumePartitionData(int recordIdx) { + List partitionDataForThisManifest = getPartitionData(recordIdx); + int existingPartitionDepth = partitionColumns.size() - implicitColSize; + partitionDataForThisManifest.stream().forEach(x -> { + if(x.size() > existingPartitionDepth) { + partitionColumns = x.getPartitionType().fields().stream().map(Types.NestedField::name).collect(Collectors.toList()); + } + }); + } - @Override - protected void consumeManifestFile(ManifestFile manifestFile) { - logger.debug("Adding manifest file: {}", manifestFile.path()); - icebergManifestFiles.add(manifestFile); + @Override + protected void consumeManifestFile(ManifestFile manifestFile) { + logger.debug("Adding manifest file: {}", manifestFile.path()); + icebergManifestFiles.add(manifestFile); - int existingPartitionDepth = partitionColumns.size() - implicitColSize; - if(config.getIcebergTableProps().isDetectSchema() && manifestFile.partitions().size() > existingPartitionDepth - && config.getIcebergTableProps().getIcebergOpType() == IcebergCommandType.INCREMENTAL_METADATA_REFRESH) { - throw new UnsupportedOperationException ("Addition of a new level dir is not allowed in incremental refresh. Please forget and " + - "promote the table again."); - } + int existingPartitionDepth = partitionColumns.size() - implicitColSize; + if(config.getIcebergTableProps().isDetectSchema() && manifestFile.partitions().size() > existingPartitionDepth + && config.getIcebergTableProps().getIcebergOpType() == IcebergCommandType.INCREMENTAL_METADATA_REFRESH) { + throw new UnsupportedOperationException ("Addition of a new level dir is not allowed in incremental refresh. Please forget and " + + "promote the table again."); } + } - @Override - protected void consumeDeletedDataFile(DataFile deletedDataFile) { - logger.debug("Removing data file: {}", deletedDataFile.path()); - deletedDataFiles.add(deletedDataFile); - } + @Override + protected void consumeDeletedDataFile(DataFile deletedDataFile) { + logger.debug("Removing data file: {}", deletedDataFile.path()); + deletedDataFiles.add(deletedDataFile); + } - @Override - public void commit(WriterCommitterOutputHandler outputHandler) throws Exception { - initializeIcebergOpCommitter(); - super.commit(outputHandler); - icebergManifestFiles.clear(); - deletedDataFiles.clear(); - } + @Override + public void commit(WriterCommitterOutputHandler outputHandler) throws Exception { + initializeIcebergOpCommitter(); + super.commit(outputHandler); + icebergManifestFiles.clear(); + deletedDataFiles.clear(); + } - private void initializeIcebergOpCommitter() throws Exception { - // TODO: doesn't track wait times currently. need to use dremioFileIO after implementing newOutputFile method - IcebergModel icebergModel = ((SupportsIcebergMutablePlugin)config.getPlugin()). - getIcebergModel(config.getIcebergTableProps(), config.getProps().getUserName(), context, null); - IcebergTableProps icebergTableProps = config.getIcebergTableProps(); - - switch (icebergTableProps.getIcebergOpType()) { - case CREATE: - icebergOpCommitter = icebergModel.getCreateTableCommitter( - icebergTableProps.getTableName(), - icebergModel.getTableIdentifier(icebergTableProps.getTableLocation()), - currentSchema, - partitionColumns, context.getStats(), null); - break; - case INSERT: - icebergOpCommitter = icebergModel.getInsertTableCommitter(icebergModel.getTableIdentifier(icebergTableProps.getTableLocation()), context.getStats()); - break; - case FULL_METADATA_REFRESH: - createReadSignProvider(icebergTableProps, true); - icebergOpCommitter = icebergModel.getFullMetadataRefreshCommitter( + private void initializeIcebergOpCommitter() throws Exception { + // TODO: doesn't track wait times currently. need to use dremioFileIO after implementing newOutputFile method + IcebergModel icebergModel = ((SupportsIcebergMutablePlugin)config.getPlugin()). + getIcebergModel(config.getIcebergTableProps(), config.getProps().getUserName(), context, fs); + IcebergTableProps icebergTableProps = config.getIcebergTableProps(); + + switch (icebergTableProps.getIcebergOpType()) { + case CREATE: + icebergOpCommitter = icebergModel.getCreateTableCommitter( icebergTableProps.getTableName(), - config.getDatasetPath().getPathComponents(), - icebergTableProps.getDataTableLocation(), - icebergTableProps.getUuid(), icebergModel.getTableIdentifier(icebergTableProps.getTableLocation()), currentSchema, - partitionColumns, - config.getDatasetConfig().orElseThrow(() -> new IllegalStateException("DatasetConfig not found")), - context.getStats(), - null - ); - break; - case INCREMENTAL_METADATA_REFRESH: - createReadSignProvider(icebergTableProps, false); - icebergOpCommitter = icebergModel.getIncrementalMetadataRefreshCommitter( - context, - icebergTableProps.getTableName(), - config.getDatasetPath().getPathComponents(), - icebergTableProps.getDataTableLocation(), - icebergTableProps.getUuid(), - icebergModel.getTableIdentifier(icebergTableProps.getTableLocation()), - icebergTableProps.getFullSchema(), - partitionColumns, - true, - config.getDatasetConfig().orElseThrow(() -> new IllegalStateException("DatasetConfig not found")) - ); - icebergOpCommitter.updateSchema(currentSchema); - break; - } + partitionColumns, context.getStats(), null); + break; + case INSERT: + icebergOpCommitter = icebergModel.getInsertTableCommitter(icebergModel.getTableIdentifier(icebergTableProps.getTableLocation()), context.getStats()); + break; + case FULL_METADATA_REFRESH: + createReadSignProvider(icebergTableProps, true); + icebergOpCommitter = icebergModel.getFullMetadataRefreshCommitter( + icebergTableProps.getTableName(), + config.getDatasetPath().getPathComponents(), + icebergTableProps.getDataTableLocation(), + icebergTableProps.getUuid(), + icebergModel.getTableIdentifier(icebergTableProps.getTableLocation()), + currentSchema, + partitionColumns, + config.getDatasetConfig().orElseThrow(() -> new IllegalStateException("DatasetConfig not found")), + context.getStats(), + null + ); + break; + case INCREMENTAL_METADATA_REFRESH: + createReadSignProvider(icebergTableProps, false); + icebergOpCommitter = icebergModel.getIncrementalMetadataRefreshCommitter( + context, + icebergTableProps.getTableName(), + config.getDatasetPath().getPathComponents(), + icebergTableProps.getDataTableLocation(), + icebergTableProps.getUuid(), + icebergModel.getTableIdentifier(icebergTableProps.getTableLocation()), + icebergTableProps.getFullSchema(), + partitionColumns, + true, + config.getDatasetConfig().orElseThrow(() -> new IllegalStateException("DatasetConfig not found")) + ); + icebergOpCommitter.updateSchema(currentSchema); + break; + } - try (AutoCloseable ac = OperatorStats.getWaitRecorder(context.getStats())) { - icebergManifestFiles.forEach(icebergOpCommitter::consumeManifestFile); - deletedDataFiles.forEach(icebergOpCommitter::consumeDeleteDataFile); - } + try (AutoCloseable ac = OperatorStats.getWaitRecorder(context.getStats())) { + icebergManifestFiles.forEach(icebergOpCommitter::consumeManifestFile); + deletedDataFiles.forEach(icebergOpCommitter::consumeDeleteDataFile); } + } + @Override protected void createPartitionExistsPredicate(WriterCommitterPOP config, boolean isFullRefresh) { // SchemaDiscoveryIcebergCommitOpHelper is used for non-Hive sources where partition paths won't be // provided via IcebergTableProps.getPartitionPaths. Partition existence will be done only for paths diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/manifestwriter/SchemaDiscoveryManifestWritesHelper.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/manifestwriter/SchemaDiscoveryManifestWritesHelper.java index e3be9286a8..a2c84976c6 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/manifestwriter/SchemaDiscoveryManifestWritesHelper.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/manifestwriter/SchemaDiscoveryManifestWritesHelper.java @@ -85,6 +85,9 @@ public void processIncomingRow(int recordIndex) throws IOException { try { currentSchema = currentSchema.mergeWithUpPromotion(newSchema, this); } catch (NoSupportedUpPromotionOrCoercionException e) { + if (currentDataFile != null) { + e.addFilePath(currentDataFile.path().toString()); + } throw UserException.unsupportedError(e).message(e.getMessage()).build(); } if (currentSchema.getTotalFieldCount() > columnLimit) { @@ -100,7 +103,7 @@ public void processIncomingRow(int recordIndex) throws IOException { @Override protected void addDataFile(DataFile dataFile) { - manifestWriter.add(dataFile); + manifestWriter.getInstance().add(dataFile); dataFiles.add(dataFile); // File system partitions follow dremio-derived nomenclature - dir[idx]. Example - dir0, dir1.. and so on. @@ -124,7 +127,7 @@ public Optional write() throws IOException { deleteRunningManifestFile(); super.startNewWriter(); // using currentSchema addPartitionData(); - dataFiles.stream().forEach(manifestWriter::add); + dataFiles.stream().forEach(manifestWriter.getInstance()::add); hasSchemaChanged = false; currentNumDataFileAdded = dataFiles.size(); dataFiles.clear(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/FullMetadataRefreshCommitter.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/FullMetadataRefreshCommitter.java index d1845db551..e51d1a4d9c 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/FullMetadataRefreshCommitter.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/FullMetadataRefreshCommitter.java @@ -29,7 +29,9 @@ import com.dremio.common.exceptions.UserException; import com.dremio.exec.catalog.MutablePlugin; +import com.dremio.exec.planner.common.ImmutableDremioFileAttrs; import com.dremio.exec.record.BatchSchema; +import com.dremio.exec.store.iceberg.IcebergUtils; import com.dremio.exec.store.metadatarefresh.committer.DatasetCatalogGrpcClient; import com.dremio.exec.store.metadatarefresh.committer.DatasetCatalogRequestBuilder; import com.dremio.sabot.exec.context.OperatorStats; @@ -92,7 +94,10 @@ public Snapshot commit() { datasetCatalogRequestBuilder.setNumOfRecords(numRecords); long numDataFiles = Long.parseLong(snapshot.summary().getOrDefault("total-data-files", "0")); datasetCatalogRequestBuilder.setNumOfDataFiles(numDataFiles); - datasetCatalogRequestBuilder.setIcebergMetadata(getRootPointer(), tableUuid, snapshot.snapshotId(), conf, isPartitioned, getCurrentSpecMap(), plugin, getCurrentSchema()); + ImmutableDremioFileAttrs partitionStatsFileAttrs = IcebergUtils.getPartitionStatsFileAttrs(getRootPointer(), snapshot.snapshotId(), + icebergCommand.getFileIO()); + datasetCatalogRequestBuilder.setIcebergMetadata(getRootPointer(), tableUuid, snapshot.snapshotId(), + getCurrentSpecMap(), getCurrentSchema(), partitionStatsFileAttrs.fileName(), partitionStatsFileAttrs.fileLength()); try { addOrUpdateDataSet(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergBaseCommand.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergBaseCommand.java index a30f26411e..602a5a374c 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergBaseCommand.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergBaseCommand.java @@ -19,7 +19,9 @@ import static org.apache.iceberg.Transactions.createTableTransaction; import java.io.IOException; +import java.io.UncheckedIOException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -35,7 +37,9 @@ import org.apache.iceberg.AppendFiles; import org.apache.iceberg.BaseTable; import org.apache.iceberg.DataFile; +import org.apache.iceberg.DeleteFile; import org.apache.iceberg.DeleteFiles; +import org.apache.iceberg.ExpireSnapshots; import org.apache.iceberg.ManageSnapshots; import org.apache.iceberg.ManifestFile; import org.apache.iceberg.OverwriteFiles; @@ -49,23 +53,20 @@ import org.apache.iceberg.Transaction; import org.apache.iceberg.UpdateProperties; import org.apache.iceberg.UpdateSchema; -import org.apache.iceberg.exceptions.RuntimeIOException; import org.apache.iceberg.expressions.Expressions; import org.apache.iceberg.expressions.Term; +import org.apache.iceberg.io.FileIO; import org.apache.iceberg.types.TypeUtil; import org.apache.iceberg.types.Types; import com.dremio.common.exceptions.UserException; import com.dremio.common.expression.CompleteType; -import com.dremio.exec.catalog.MutablePlugin; import com.dremio.exec.catalog.PartitionSpecAlterOption; import com.dremio.exec.catalog.RollbackOption; import com.dremio.exec.planner.sql.CalciteArrowHelper; import com.dremio.exec.planner.sql.PartitionTransform; import com.dremio.exec.planner.sql.parser.SqlAlterTablePartitionColumns; import com.dremio.exec.record.BatchSchema; -import com.dremio.exec.store.dfs.FileSystemPlugin; -import com.dremio.exec.store.iceberg.DremioFileIO; import com.dremio.exec.store.iceberg.FieldIdBroker; import com.dremio.exec.store.iceberg.IcebergUtils; import com.dremio.exec.store.iceberg.SchemaConverter; @@ -90,23 +91,20 @@ public class IcebergBaseCommand implements IcebergCommand { protected final Path fsPath; private final FileSystem fs; private Snapshot currentSnapshot; - private MutablePlugin mutablePlugin; public IcebergBaseCommand(Configuration configuration, - String tableFolder, - FileSystem fs, - TableOperations tableOperations, - MutablePlugin mutablePlugin) { - Preconditions.checkArgument(mutablePlugin != null, "Mutable plugin can not be null"); + String tableFolder, + FileSystem fs, + TableOperations tableOperations) { this.configuration = configuration; transaction = null; currentSnapshot = null; fsPath = new Path(tableFolder); this.fs = fs; this.tableOperations = tableOperations; - this.mutablePlugin = mutablePlugin; } + @Override public void beginCreateTableTransaction(String tableName, BatchSchema writerSchema, List partitionColumns, Map tableParameters, PartitionSpec partitionSpec) { Preconditions.checkState(transaction == null, "Unexpected state - transaction should be null"); @@ -163,12 +161,12 @@ public Snapshot finishOverwrite() { } @Override - public Snapshot rewriteDataFiles(Set removedFiles, Set addedFiles) { + public Snapshot rewriteFiles(Set removedDataFiles, Set removedDeleteFiles, Set addedDataFiles, Set addedDeleteFiles, Long snapshotId) { if (transaction == null) { beginTransaction(); } try { - transaction.newRewrite().rewriteFiles(removedFiles, addedFiles).commit(); + transaction.newRewrite().validateFromSnapshot(snapshotId).rewriteFiles(removedDataFiles, removedDeleteFiles, addedDataFiles, addedDeleteFiles).commit(); return transaction.table().currentSnapshot(); } finally { endTransaction(); @@ -237,20 +235,21 @@ public Snapshot finishInsert() { } @Override - public Snapshot expireSnapshots(Long olderThanInMillis, int retainLast) { + public Map expireSnapshots(long olderThanInMillis, int retainLast) { Stopwatch stopwatch = Stopwatch.createStarted(); - if (transaction == null) { - beginTransaction(); - } + // perform expiration + Table table = loadTable(); + ExpireSnapshots expireSnapshots = table.expireSnapshots(); String olderThanTimestamp = getTimestampFromMillis(olderThanInMillis); try { logger.info("Trying to expire snapshots older than {}, and retain last {} snapshots.", olderThanTimestamp, retainLast); - transaction.expireSnapshots() + expireSnapshots .expireOlderThan(olderThanInMillis) .retainLast(retainLast) + .cleanExpiredFiles(false) .commit(); - transaction.table().refresh(); - return transaction.table().currentSnapshot(); + table.refresh(); + return findSnapshots(tableOperations.refresh()); } catch (Exception e) { final String errorMsg = String.format("Cannot expire snapshots older than %s and retain last %d snapshots.", olderThanTimestamp, retainLast); @@ -259,12 +258,18 @@ public Snapshot expireSnapshots(Long olderThanInMillis, int retainLast) { .message(errorMsg) .buildSilently(); } finally { - endTransaction(); long totalCommitTime = stopwatch.elapsed(TimeUnit.MILLISECONDS); logger.info("Iceberg ExpireSnapshots call takes {} milliseconds.", totalCommitTime); } } + private Map findSnapshots(TableMetadata metadata) { + if (metadata.snapshots() == null) { + return Collections.emptyMap(); + } + return metadata.snapshots().stream().collect(Collectors.toMap(Snapshot::snapshotId, Snapshot::manifestListLocation)); + } + @Override public void rollback(RollbackOption rollbackOption) { Stopwatch stopwatch = Stopwatch.createStarted(); @@ -351,6 +356,7 @@ public void consumeDeleteDataFilesByPaths(List filePathsList) { filePathsList.forEach(p -> deleteFiles.deleteFile(p)); } + @Override public void truncateTable() { Preconditions.checkState(transaction == null, "Unexpected state"); Table table = loadTable(); @@ -379,15 +385,15 @@ public void addColumns(List columnsToAdd) { updateSchema.commit(); } + @Override public void deleteTable() { try { com.dremio.io.file.Path p = com.dremio.io.file.Path.of(fsPath.toString()); - DremioFileIO dremioFileIO = new DremioFileIO(mutablePlugin.getFsConfCopy(), mutablePlugin); - dremioFileIO.deleteFile(p.toString(), true, mutablePlugin instanceof FileSystemPlugin); - } catch (RuntimeIOException e) { - String message = String.format("The dataset is now forgotten by dremio, but there was an error while cleaning up respective data and metadata files residing at %s.", fsPath.toString()); + fs.delete(p, true); + } catch (IOException e) { + String message = String.format("The dataset is now forgotten by dremio, but there was an error while cleaning up respective data and metadata files residing at %s.", fsPath); logger.error(message); - throw new RuntimeException(e); + throw new UncheckedIOException(message, e); } } @@ -443,6 +449,7 @@ public void dropColumnInternalTable(String columnToDrop) { dropColumn(columnToDrop, transaction.table(), transaction.updateSchema(), true); } + @Override public void changeColumnForInternalTable(String columnToChange, Field batchField) { UpdateSchema schema = transaction.updateSchema(); dropColumn(columnToChange, transaction.table(), schema, false); @@ -452,6 +459,7 @@ public void changeColumnForInternalTable(String columnToChange, Field batchField schema.commit(); } + @Override public void updatePropertiesMap(Map propertiesMap) { UpdateProperties properties = transaction.table().updateProperties(); for (Map.Entry property : propertiesMap.entrySet()) { @@ -460,6 +468,7 @@ public void updatePropertiesMap(Map propertiesMap) { properties.commit(); } + @Override public void dropColumn(String columnToDrop) { Table table = loadTable(); dropColumn(columnToDrop, table, table.updateSchema(), true); @@ -477,6 +486,7 @@ public void dropColumn(String columnToDrop, Table table, UpdateSchema updateSche } } + @Override public void changeColumn(String columnToChange, Field batchField) { Table table = loadTable(); UpdateSchema updateSchema = table.updateSchema(); @@ -490,11 +500,13 @@ public void changeColumn(String columnToChange, Field batchField) { * @param name existing name in the table * @param newName new name for the column */ + @Override public void renameColumn(String name, String newName) { Table table = loadTable(); table.updateSchema().renameColumn(name, newName).commit(); } + @Override public void updatePrimaryKey(List columns) { beginTransaction(); updatePropertiesMap(PrimaryKeyUpdateCommitter.getPropertiesMap(columns)); @@ -516,7 +528,7 @@ public String getTableName() { } public String getTableLocation() { - return IcebergUtils.getValidIcebergPath(fsPath, configuration, mutablePlugin.getHadoopFsSupplier(fsPath.toString(), configuration).get().getScheme()); + return IcebergUtils.getValidIcebergPath(fsPath, configuration, fs.getScheme()); } @Override @@ -650,4 +662,13 @@ private TableOperations getTableOps() { return tableOperations; } + @Override + public long propertyAsLong(String propertyName, long defaultValue) { + return tableOperations.current().propertyAsLong(propertyName, defaultValue); + } + + @Override + public FileIO getFileIO() { + return getTableOps().io(); + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergBaseModel.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergBaseModel.java index 053241f0f6..27374cd3b5 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergBaseModel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergBaseModel.java @@ -16,6 +16,7 @@ package com.dremio.exec.store.iceberg.model; import java.util.List; +import java.util.Map; import java.util.concurrent.TimeUnit; import org.apache.arrow.vector.types.pojo.Field; @@ -26,19 +27,19 @@ import com.dremio.common.exceptions.UserException; import com.dremio.exec.catalog.AlterTableOption; -import com.dremio.exec.catalog.MutablePlugin; import com.dremio.exec.catalog.PartitionSpecAlterOption; import com.dremio.exec.catalog.RollbackOption; -import com.dremio.exec.catalog.VacuumOption; import com.dremio.exec.record.BatchSchema; import com.dremio.exec.store.dfs.ColumnOperations; import com.dremio.exec.store.dfs.IcebergTableProps; +import com.dremio.exec.store.iceberg.SupportsIcebergMutablePlugin; import com.dremio.exec.store.metadatarefresh.committer.DatasetCatalogGrpcClient; import com.dremio.io.file.FileSystem; import com.dremio.sabot.exec.context.OperatorContext; import com.dremio.sabot.exec.context.OperatorStats; import com.dremio.sabot.op.writer.WriterCommitterOperator; import com.dremio.service.namespace.dataset.proto.DatasetConfig; +import com.google.common.base.Preconditions; import com.google.common.base.Stopwatch; /** @@ -49,18 +50,18 @@ public abstract class IcebergBaseModel implements IcebergModel { protected static final String EMPTY_NAMESPACE = ""; protected final String namespace; protected final Configuration configuration; - protected final FileSystem fs; /* if fs is null it will use iceberg HadoopFileIO class instead of DremioFileIO class */ + protected final FileSystem fs; protected final OperatorContext context; private final DatasetCatalogGrpcClient client; - private final MutablePlugin plugin; + protected final SupportsIcebergMutablePlugin plugin; protected IcebergBaseModel(String namespace, Configuration configuration, FileSystem fs, OperatorContext context, - DatasetCatalogGrpcClient datasetCatalogGrpcClient, MutablePlugin plugin) { + DatasetCatalogGrpcClient datasetCatalogGrpcClient, SupportsIcebergMutablePlugin plugin) { this.namespace = namespace; this.configuration = configuration; - this.fs = fs; + this.fs = Preconditions.checkNotNull(fs); this.context = context; this.client = datasetCatalogGrpcClient; this.plugin = plugin; @@ -68,7 +69,7 @@ protected IcebergBaseModel(String namespace, protected abstract IcebergCommand getIcebergCommand(IcebergTableIdentifier tableIdentifier); - private IcebergCommand getIcebergCommandWithMetricStat(IcebergTableIdentifier tableIdentifier) { + private IcebergCommand getIcebergCommandWithMetricStat(IcebergTableIdentifier tableIdentifier) { Stopwatch stopwatch = Stopwatch.createStarted(); try { return getIcebergCommand(tableIdentifier); @@ -139,10 +140,12 @@ public IcebergOpCommitter getOptimizeCommitter(OperatorStats operatorStats, IcebergTableIdentifier tableIdentifier, DatasetConfig datasetConfig, Long minInputFilesBeforeOptimize, + Long snapshotId, IcebergTableProps icebergTableProps, FileSystem fs) { IcebergCommand icebergCommand = getIcebergCommandWithMetricStat(tableIdentifier); - return new IcebergOptimizeOperationCommitter(icebergCommand, operatorStats, datasetConfig, minInputFilesBeforeOptimize, icebergTableProps, fs); + return new IcebergOptimizeOperationCommitter(icebergCommand, operatorStats, datasetConfig, + minInputFilesBeforeOptimize,snapshotId, icebergTableProps, fs); } @Override @@ -152,11 +155,11 @@ public void rollbackTable(IcebergTableIdentifier tableIdentifier, RollbackOption } @Override - public void vacuumTable(IcebergTableIdentifier tableIdentifier, VacuumOption vacuumOption) { + public Map expireSnapshots(IcebergTableIdentifier tableIdentifier, long olderThanInMillis, int retainLast) { IcebergCommand icebergCommand = getIcebergCommandWithMetricStat(tableIdentifier); - icebergCommand.expireSnapshots( - vacuumOption.getOlderThanInMillis(), - vacuumOption.getRetainLast()); + return icebergCommand.expireSnapshots( + olderThanInMillis, + retainLast); } @Override @@ -235,4 +238,10 @@ public IcebergTableLoader getIcebergTableLoader(IcebergTableIdentifier tableIden IcebergCommand icebergCommand = getIcebergCommandWithMetricStat(tableIdentifier); return new IcebergTableLoader(icebergCommand); } + + @Override + public long propertyAsLong(IcebergTableIdentifier tableIdentifier, String propertyName, long defaultValue) { + IcebergCommand icebergCommand = getIcebergCommandWithMetricStat(tableIdentifier); + return icebergCommand.propertyAsLong(propertyName, defaultValue); + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergCommand.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergCommand.java index 3f82d3133d..3d2596666d 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergCommand.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergCommand.java @@ -21,11 +21,13 @@ import org.apache.arrow.vector.types.pojo.Field; import org.apache.iceberg.DataFile; +import org.apache.iceberg.DeleteFile; import org.apache.iceberg.ManifestFile; import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.Schema; import org.apache.iceberg.Snapshot; import org.apache.iceberg.Table; +import org.apache.iceberg.io.FileIO; import org.apache.iceberg.types.Types; import com.dremio.exec.catalog.PartitionSpecAlterOption; @@ -36,159 +38,163 @@ * represents an Iceberg catalog */ public interface IcebergCommand { - /** - * Start of Create table command - * @param tableName name of the table - * @param writerSchema schema of the table - * @param partitionColumns partition specification of the table - * @param tableParameters icebeg table parameters - */ - void beginCreateTableTransaction(String tableName, BatchSchema writerSchema, List partitionColumns, Map tableParameters, PartitionSpec partitionSpec); - /** - * Start of a tansaction - */ - void beginTransaction(); + /** + * Start of Create table command + * @param tableName name of the table + * @param writerSchema schema of the table + * @param partitionColumns partition specification of the table + * @param tableParameters icebeg table parameters + */ + void beginCreateTableTransaction(String tableName, BatchSchema writerSchema, List partitionColumns, Map tableParameters, PartitionSpec partitionSpec); - /** - * End of a tansaction - */ + /** + * Start of a tansaction + */ + void beginTransaction(); - Table endTransaction(); + /** + * End of a tansaction + */ + + Table endTransaction(); - /** - * Start the overwrite operation - */ - void beginOverwrite(long snapshotId); + /** + * Start the overwrite operation + */ + void beginOverwrite(long snapshotId); - /** - * Commit the overwrite operation - */ - Snapshot finishOverwrite(); + /** + * Commit the overwrite operation + */ + Snapshot finishOverwrite(); /** * Performs rewrite operation and commits the transaction - * @param removedFiles - * @param addedFiles + * @param removedDataFiles + * @param removedDeleteFiles + * @param addedDataFiles + * @param addedDeleteFiles * @return updated snapshot */ - Snapshot rewriteDataFiles(Set removedFiles, Set addedFiles); - - /** - * Consumes list of deleted data files using Overwrite - * @param filePathsList list of DataFile entries - */ - void consumeDeleteDataFilesWithOverwriteByPaths(List filePathsList); - - /** - * Consumes list of Manifest files using Overwrite - * @param filesList list of DataFile entries - */ - void consumeManifestFilesWithOverwrite(List filesList); - - /** - * Start the delete operation - */ - void beginDelete(); - - /** - * Commit the delete operation - */ - Snapshot finishDelete(); - - /** - * Start the insert operation - */ - void beginInsert(); - - /** - * Finish the insert operation - */ - Snapshot finishInsert(); - - /** - * Remove older snapshots and their files which are no longer needed. - */ - Snapshot expireSnapshots(Long olderThanInMillis, int retainLast); - - /** - * Roll a table's data back to a specific snapshot identified either by id or before a given timestamp. - * @param rollbackOption rollback table option - */ - void rollback(RollbackOption rollbackOption); - - /** - * consumes list of Manifest files as part of the current transaction - * @param filesList list of DataFile entries - */ - void consumeManifestFiles(List filesList); - - /** - * consumes list of data files to be deleted as a part of - * the current transaction - * @param filesList list of DataFile entries - */ - void consumeDeleteDataFiles(List filesList); - - /** - * consumes list of deleted data files by file paths as a part of - * the current transaction - * @param filePathsList list of data file paths - */ - void consumeDeleteDataFilesByPaths(List filePathsList); - - /** - * consumes list of columns to be dropped - * as part of metadata refresh transaction. - * Used only in new metadata refresh flow - */ - void consumeDroppedColumns(List columns); - - /** - * consumes list of columns to be updated - * as part of metadata refresh transaction. - * Used only in new metadata refresh flow - */ - void consumeUpdatedColumns(List columns); - - /** - * consumes list of columns to be added to the schema - * as part of metadata refresh transaction. Used - * only in new metadata refresh flow - */ - void consumeAddedColumns(List columns); - - /** - * truncates the table - */ - void truncateTable(); - - /** - * adds new columns - * @param columnsToAdd list of columns fields to add - */ - void addColumns(List columnsToAdd); - - /** - * drop an existing column - * @param columnToDrop existing column name - */ - void dropColumn(String columnToDrop); - - /** - * change column type - * @param columnToChange existing column name - * @param batchField new column type - */ - void changeColumn(String columnToChange, Field batchField); - - /** - * change column name - * @param name existing column name - * @param newName new column name - */ - void renameColumn(String name, String newName); + Snapshot rewriteFiles(Set removedDataFiles, Set removedDeleteFiles, Set addedDataFiles, Set addedDeleteFiles, Long snapshotId); + + /** + * Consumes list of deleted data files using Overwrite + * @param filePathsList list of DataFile entries + */ + void consumeDeleteDataFilesWithOverwriteByPaths(List filePathsList); + + /** + * Consumes list of Manifest files using Overwrite + * @param filesList list of DataFile entries + */ + void consumeManifestFilesWithOverwrite(List filesList); + + /** + * Start the delete operation + */ + void beginDelete(); + + /** + * Commit the delete operation + */ + Snapshot finishDelete(); + + /** + * Start the insert operation + */ + void beginInsert(); + + /** + * Finish the insert operation + */ + Snapshot finishInsert(); + + /** + * Expire older snapshots, but don't clean orphan files. + * @return Live snapshots and their manifest list file paths + */ + Map expireSnapshots(long olderThanInMillis, int retainLast); + + /** + * Roll a table's data back to a specific snapshot identified either by id or before a given timestamp. + * @param rollbackOption rollback table option + */ + void rollback(RollbackOption rollbackOption); + + /** + * consumes list of Manifest files as part of the current transaction + * @param filesList list of DataFile entries + */ + void consumeManifestFiles(List filesList); + + /** + * consumes list of data files to be deleted as a part of + * the current transaction + * @param filesList list of DataFile entries + */ + void consumeDeleteDataFiles(List filesList); + + /** + * consumes list of deleted data files by file paths as a part of + * the current transaction + * @param filePathsList list of data file paths + */ + void consumeDeleteDataFilesByPaths(List filePathsList); + + /** + * consumes list of columns to be dropped + * as part of metadata refresh transaction. + * Used only in new metadata refresh flow + */ + void consumeDroppedColumns(List columns); + + /** + * consumes list of columns to be updated + * as part of metadata refresh transaction. + * Used only in new metadata refresh flow + */ + void consumeUpdatedColumns(List columns); + + /** + * consumes list of columns to be added to the schema + * as part of metadata refresh transaction. Used + * only in new metadata refresh flow + */ + void consumeAddedColumns(List columns); + + /** + * truncates the table + */ + void truncateTable(); + + /** + * adds new columns + * @param columnsToAdd list of columns fields to add + */ + void addColumns(List columnsToAdd); + + /** + * drop an existing column + * @param columnToDrop existing column name + */ + void dropColumn(String columnToDrop); + + /** + * change column type + * @param columnToChange existing column name + * @param batchField new column type + */ + void changeColumn(String columnToChange, Field batchField); + + /** + * change column name + * @param name existing column name + * @param newName new column name + */ + void renameColumn(String name, String newName); /** * Update primary key @@ -213,41 +219,45 @@ public interface IcebergCommand { * Load an Iceberg table from disk * @return Iceberg table instance */ - Table loadTable(); + Table loadTable(); /** * @return returns the latest snapshot on which the transaction is performed */ - Snapshot getCurrentSnapshot(); + Snapshot getCurrentSnapshot(); /** * @return return Iceberg table metadata file location */ - String getRootPointer(); + String getRootPointer(); /** * Delete the root pointer of the table * */ - void deleteTableRootPointer(); + void deleteTableRootPointer(); + + void deleteTable(); + + Map getPartitionSpecMap(); - void deleteTable(); + Schema getIcebergSchema(); - Map getPartitionSpecMap(); + void beginAlterTableTransaction(); - Schema getIcebergSchema(); + Table endAlterTableTransaction(); - void beginAlterTableTransaction(); + void addColumnsInternalTable(List columnsToAdd); - Table endAlterTableTransaction(); + void dropColumnInternalTable(String columnToDrop); - void addColumnsInternalTable(List columnsToAdd); + void changeColumnForInternalTable(String columnToChange, Field batchField); - void dropColumnInternalTable(String columnToDrop); + void updatePropertiesMap(Map propertiesMap); - void changeColumnForInternalTable(String columnToChange, Field batchField); + void updatePartitionSpec(PartitionSpecAlterOption partitionSpecAlterOption); - void updatePropertiesMap(Map propertiesMap); + long propertyAsLong(String propertyName, long defaultValue); - void updatePartitionSpec(PartitionSpecAlterOption partitionSpecAlterOption); + FileIO getFileIO(); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergConstants.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergConstants.java index 488c4401e2..a19c4799ee 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergConstants.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergConstants.java @@ -21,4 +21,6 @@ public interface IcebergConstants { String FILE_VERSION = "version"; + String ADDED_DATA_FILES = "added-data-files"; + String DELETED_DATA_FILES = "deleted-data-files"; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergDmlOperationCommitter.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergDmlOperationCommitter.java index 688212e82c..5f800d8214 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergDmlOperationCommitter.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergDmlOperationCommitter.java @@ -30,12 +30,12 @@ import org.apache.iceberg.Snapshot; import org.apache.iceberg.Table; import org.apache.iceberg.exceptions.ValidationException; +import org.apache.iceberg.io.FileIO; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.dremio.common.exceptions.UserException; import com.dremio.exec.record.BatchSchema; -import com.dremio.exec.store.iceberg.DremioFileIO; import com.dremio.exec.store.iceberg.manifestwriter.IcebergCommitOpHelper; import com.dremio.io.file.Path; import com.dremio.sabot.exec.context.OperatorStats; @@ -74,18 +74,20 @@ public IcebergDmlOperationCommitter(IcebergCommand icebergCommand, public Snapshot commit() { Stopwatch stopwatch = Stopwatch.createStarted(); SnapshotCommitStatus commitStatus = NONE; + Snapshot snapshot = null; try { beginDmlOperationTransaction(); Snapshot currentSnapshot = icebergCommand.getCurrentSnapshot(); performUpdates(); - Snapshot snapshot = endDmlOperationTransaction().currentSnapshot(); + snapshot = endDmlOperationTransaction().currentSnapshot(); commitStatus = (currentSnapshot != null) && (snapshot.snapshotId() == currentSnapshot.snapshotId()) ? NONE : COMMITTED; return snapshot; } finally { long totalCommitTime = stopwatch.elapsed(TimeUnit.MILLISECONDS); operatorStats.addLongStat(WriterCommitterOperator.Metric.ICEBERG_COMMIT_TIME, totalCommitTime); - operatorStats.addLongStat(WriterCommitterOperator.Metric.SNAPSHOT_COMMIT_STATUS, commitStatus.value()); + + IcebergOpCommitter.writeSnapshotStats(operatorStats, commitStatus, snapshot); } } @@ -144,8 +146,8 @@ public void performUpdates() { } @Override - public void cleanup(DremioFileIO dremioFileIO) { - IcebergCommitOpHelper.deleteManifestFiles(dremioFileIO, manifestFileList, true); + public void cleanup(FileIO fileIO) { + IcebergCommitOpHelper.deleteManifestFiles(fileIO, manifestFileList, true); } @Override diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergInsertOperationCommitter.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergInsertOperationCommitter.java index 3b7c5085ea..0a08e32444 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergInsertOperationCommitter.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergInsertOperationCommitter.java @@ -30,11 +30,11 @@ import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.Schema; import org.apache.iceberg.Snapshot; +import org.apache.iceberg.io.FileIO; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.dremio.exec.record.BatchSchema; -import com.dremio.exec.store.iceberg.DremioFileIO; import com.dremio.exec.store.iceberg.manifestwriter.IcebergCommitOpHelper; import com.dremio.sabot.exec.context.OperatorStats; import com.dremio.sabot.op.writer.WriterCommitterOperator; @@ -75,7 +75,7 @@ public Snapshot commit() { Snapshot snapshot = icebergCommand.endTransaction().currentSnapshot(); SnapshotCommitStatus commitStatus = (currentSnapshot != null) && (snapshot.snapshotId() == currentSnapshot.snapshotId()) ? NONE : COMMITTED; - operatorStats.addLongStat(WriterCommitterOperator.Metric.SNAPSHOT_COMMIT_STATUS, commitStatus.value()); + IcebergOpCommitter.writeSnapshotStats(operatorStats, commitStatus, snapshot); long totalCommitTime = stopwatch.elapsed(TimeUnit.MILLISECONDS); operatorStats.addLongStat(WriterCommitterOperator.Metric.ICEBERG_COMMIT_TIME, totalCommitTime); return snapshot; @@ -117,8 +117,8 @@ public Schema getCurrentSchema() { } @Override - public void cleanup(DremioFileIO dremioFileIO) { - IcebergCommitOpHelper.deleteManifestFiles(dremioFileIO, manifestFileList, true); + public void cleanup(FileIO fileIO) { + IcebergCommitOpHelper.deleteManifestFiles(fileIO, manifestFileList, true); } @Override diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergModel.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergModel.java index 471c183c4d..6e736b185e 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergModel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergModel.java @@ -16,6 +16,7 @@ package com.dremio.exec.store.iceberg.model; import java.util.List; +import java.util.Map; import org.apache.arrow.vector.types.pojo.Field; import org.apache.iceberg.PartitionSpec; @@ -24,7 +25,6 @@ import com.dremio.exec.catalog.AlterTableOption; import com.dremio.exec.catalog.RollbackOption; -import com.dremio.exec.catalog.VacuumOption; import com.dremio.exec.record.BatchSchema; import com.dremio.exec.store.dfs.ColumnOperations; import com.dremio.exec.store.dfs.IcebergTableProps; @@ -43,15 +43,17 @@ IcebergOpCommitter getCreateTableCommitter(String tableName, IcebergTableIdentif List partitionColumnNames, OperatorStats operatorStats, PartitionSpec partitionSpec); /** - * Get Iceberg Op committer for Insert command - * @param tableIdentifier Table identifier - * @param operatorStats + * Get Iceberg Op committer for Insert command + * + * @param tableIdentifier Table identifier + * @param operatorStats * @return Insert committer - */ - IcebergOpCommitter getInsertTableCommitter(IcebergTableIdentifier tableIdentifier, OperatorStats operatorStats); + */ + IcebergOpCommitter getInsertTableCommitter(IcebergTableIdentifier tableIdentifier, OperatorStats operatorStats); /** * Get committer for Full metadata refresh + * * @param tableName * @param datasetPath * @param tableLocation @@ -68,6 +70,7 @@ IcebergOpCommitter getFullMetadataRefreshCommitter(String tableName, List columnTypes); + IcebergOpCommitter getAlterTableCommitter(IcebergTableIdentifier tableIdentifier, ColumnOperations.AlterOperationType alterOperationType, BatchSchema droppedColumns, BatchSchema updatedColumns, + String columnName, List columnTypes); /** * Iceberg Op committer for DML (Delete, Merge, Update) commands + * * @param operatorStats * @param tableIdentifier * @param datasetConfig @@ -116,69 +121,73 @@ IcebergOpCommitter getAlterTableCommitter(IcebergTableIdentifier tableIdentifier */ IcebergOpCommitter getOptimizeCommitter(OperatorStats operatorStats, IcebergTableIdentifier tableIdentifier, DatasetConfig datasetConfig, Long minInputFilesBeforeOptimize, - IcebergTableProps icebergTableProps, FileSystem fs); + Long snapshotId, IcebergTableProps icebergTableProps, FileSystem fs); /** * Roll table back to the older snapshot. + * * @param tableIdentifier table identifier - * @param rollbackOption rollback table option + * @param rollbackOption rollback table option */ void rollbackTable(IcebergTableIdentifier tableIdentifier, RollbackOption rollbackOption); /** - * Vacuum a table to remove older snapshots or orphan files - * @param tableIdentifier table identifier - * @param vacuumOption vacuum table option + * Expire table's snapshots, and return live snapshots after expiry and their manifest list file paths */ - void vacuumTable(IcebergTableIdentifier tableIdentifier, VacuumOption vacuumOption); + Map expireSnapshots(IcebergTableIdentifier tableIdentifier, long olderThanInMillis, int retainLast); - /** - * Truncate a table - * @param tableIdentifier table identifier - */ - void truncateTable(IcebergTableIdentifier tableIdentifier); + /** + * Truncate a table + * + * @param tableIdentifier table identifier + */ + void truncateTable(IcebergTableIdentifier tableIdentifier); - void deleteTable(IcebergTableIdentifier tableIdentifier); + void deleteTable(IcebergTableIdentifier tableIdentifier); - void alterTable(IcebergTableIdentifier tableIdentifier, AlterTableOption alterTableOption); + void alterTable(IcebergTableIdentifier tableIdentifier, AlterTableOption alterTableOption); - void deleteTableRootPointer(IcebergTableIdentifier tableIdentifier); + void deleteTableRootPointer(IcebergTableIdentifier tableIdentifier); /** - * Add columns to a table - * @param tableIdentifier table identifier - * @param columnsToAdd list of columns to add - * @return New root pointer for iceberg table + * Add columns to a table + * + * @param tableIdentifier table identifier + * @param columnsToAdd list of columns to add + * @return New root pointer for iceberg table */ - String addColumns(IcebergTableIdentifier tableIdentifier, List columnsToAdd); + String addColumns(IcebergTableIdentifier tableIdentifier, List columnsToAdd); - /** - * Drop a column from a table - * @param tableIdentifier table identifier - * @param columnToDrop Column name to drop - * @return New root pointer for iceberg table - */ - String dropColumn(IcebergTableIdentifier tableIdentifier, String columnToDrop); + /** + * Drop a column from a table + * + * @param tableIdentifier table identifier + * @param columnToDrop Column name to drop + * @return New root pointer for iceberg table + */ + String dropColumn(IcebergTableIdentifier tableIdentifier, String columnToDrop); - /** - * Change column type of a table - * @param tableIdentifier table identifier - * @param columnToChange existing column name - * @param newDef new type - * @return New root pointer for iceberg table - */ - String changeColumn(IcebergTableIdentifier tableIdentifier, String columnToChange, Field newDef); + /** + * Change column type of a table + * + * @param tableIdentifier table identifier + * @param columnToChange existing column name + * @param newDef new type + * @return New root pointer for iceberg table + */ + String changeColumn(IcebergTableIdentifier tableIdentifier, String columnToChange, Field newDef); /** - * Rename an existing column of a table - * @param tableIdentifier table identifier - * @param name existing column name - * @param newName new column name - * @return New root pointer for iceberg table - */ - String renameColumn(IcebergTableIdentifier tableIdentifier, String name, String newName); + * Rename an existing column of a table + * + * @param tableIdentifier table identifier + * @param name existing column name + * @param newName new column name + * @return New root pointer for iceberg table + */ + String renameColumn(IcebergTableIdentifier tableIdentifier, String name, String newName); /** * @param tableIdentifier table identifier @@ -188,24 +197,38 @@ IcebergOpCommitter getOptimizeCommitter(OperatorStats operatorStats, IcebergTabl String updatePrimaryKey(IcebergTableIdentifier tableIdentifier, List columns); /** - * Load and return an Iceberg table - * @param tableIdentifier table identifier - * @return Iceberg table - */ - Table getIcebergTable(IcebergTableIdentifier tableIdentifier); + * Load and return an Iceberg table + * + * @param tableIdentifier table identifier + * @return Iceberg table + */ + Table getIcebergTable(IcebergTableIdentifier tableIdentifier); - /** - * Get table identifer - * - * @param rootFolder path to root folder of the table - * @return table identifier - */ - IcebergTableIdentifier getTableIdentifier(String rootFolder); + /** + * Get table identifer + * + * @param rootFolder path to root folder of the table + * @return table identifier + */ + IcebergTableIdentifier getTableIdentifier(String rootFolder); /** * Returns an instance of Iceberg table loader + * * @param tableIdentifier table identifier * @return An instance of Iceberg table loader */ - IcebergTableLoader getIcebergTableLoader(IcebergTableIdentifier tableIdentifier); + IcebergTableLoader getIcebergTableLoader(IcebergTableIdentifier tableIdentifier); + + /** + * Finds the value of a table property + */ + long propertyAsLong(IcebergTableIdentifier tableIdentifier, String propertyName, long defaultValue); + + /** + * Fetch and reset to the latest version context + */ + default void refreshVersionContext() { + // do nothing + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergOpCommitter.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergOpCommitter.java index 6a2d5a1c46..aa291fdcdb 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergOpCommitter.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergOpCommitter.java @@ -15,16 +15,25 @@ */ package com.dremio.exec.store.iceberg.model; +import static com.dremio.exec.store.iceberg.model.IcebergConstants.ADDED_DATA_FILES; +import static com.dremio.exec.store.iceberg.model.IcebergConstants.DELETED_DATA_FILES; +import static com.dremio.sabot.op.writer.WriterCommitterOperator.Metric.SNAPSHOT_COMMIT_STATUS; +import static com.dremio.sabot.op.writer.WriterCommitterOperator.SnapshotCommitStatus.COMMITTED; + import java.util.Map; +import java.util.Optional; import org.apache.iceberg.DataFile; +import org.apache.iceberg.DeleteFile; import org.apache.iceberg.ManifestFile; import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.Schema; import org.apache.iceberg.Snapshot; +import org.apache.iceberg.io.FileIO; import com.dremio.exec.record.BatchSchema; -import com.dremio.exec.store.iceberg.DremioFileIO; +import com.dremio.sabot.exec.context.OperatorStats; +import com.dremio.sabot.op.writer.WriterCommitterOperator.SnapshotCommitStatus; import com.dremio.sabot.op.writer.WriterCommitterOutputHandler; import com.google.protobuf.ByteString; @@ -57,6 +66,15 @@ default Snapshot commit(WriterCommitterOutputHandler outputHandler) { */ void consumeDeleteDataFile(DataFile icebergDeleteDatafile) throws UnsupportedOperationException; + /** + * Stores the DeleteFile instance to delete during commit operation + * @param icebergDeleteDeletefile DeleteFile instance to delete from table + * @throws UnsupportedOperationException + */ + default void consumeDeleteDeleteFile(DeleteFile icebergDeleteDeletefile) throws UnsupportedOperationException { + throw new UnsupportedOperationException(); + } + /** * Stores data file path to delete during commit operation * @param icebergDeleteDatafilePath The path to data file to delete from table @@ -116,5 +134,19 @@ default void updateReadSignature(ByteString newReadSignature) {} /** * Cleanup in case of exceptions during commit */ - default void cleanup(DremioFileIO dremioFileIO) {} + default void cleanup(FileIO fileIO) {} + + /** + * Writes operator stats if a new snapshot is created + */ + static void writeSnapshotStats(OperatorStats stats, SnapshotCommitStatus commitStatus, Snapshot snapshot) { + stats.addLongStat(SNAPSHOT_COMMIT_STATUS, commitStatus.value()); + + if (commitStatus.equals(COMMITTED) && snapshot != null) { + long addedFiles = Optional.ofNullable(snapshot.summary().get(ADDED_DATA_FILES)).map(Long::parseLong).orElse(0L); + long removedFiles = Optional.ofNullable(snapshot.summary().get(DELETED_DATA_FILES)).map(Long::parseLong).orElse(0L); + stats.recordAddedFiles(addedFiles); + stats.recordRemovedFiles(removedFiles); + } + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergOptimizeOperationCommitter.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergOptimizeOperationCommitter.java index 4a6a95ee35..b34a2ccf09 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergOptimizeOperationCommitter.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergOptimizeOperationCommitter.java @@ -16,9 +16,11 @@ package com.dremio.exec.store.iceberg.model; import static com.dremio.sabot.op.writer.WriterCommitterOperator.SnapshotCommitStatus.COMMITTED; +import static com.dremio.sabot.op.writer.WriterCommitterOperator.SnapshotCommitStatus.NONE; import static com.dremio.sabot.op.writer.WriterCommitterOperator.SnapshotCommitStatus.SKIPPED; import java.io.IOException; +import java.util.Collections; import java.util.HashSet; import java.util.Map; import java.util.Optional; @@ -27,10 +29,12 @@ import org.apache.arrow.vector.types.pojo.Field; import org.apache.iceberg.DataFile; +import org.apache.iceberg.DeleteFile; import org.apache.iceberg.ManifestFile; import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.Schema; import org.apache.iceberg.Snapshot; +import org.apache.iceberg.io.FileIO; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -38,7 +42,6 @@ import com.dremio.exec.store.OperationType; import com.dremio.exec.store.RecordWriter; import com.dremio.exec.store.dfs.IcebergTableProps; -import com.dremio.exec.store.iceberg.DremioFileIO; import com.dremio.exec.store.iceberg.IcebergOptimizeSingleFileTracker; import com.dremio.io.file.FileSystem; import com.dremio.io.file.Path; @@ -62,6 +65,7 @@ public class IcebergOptimizeOperationCommitter implements IcebergOpCommitter { private final Set addedDataFiles = new HashSet<>(); private final Set removedDataFiles = new HashSet<>(); + private final Set removedDeleteFiles = new HashSet<>(); private final IcebergCommand icebergCommand; private final OperatorStats operatorStats; @@ -70,11 +74,13 @@ public class IcebergOptimizeOperationCommitter implements IcebergOpCommitter { private final IcebergOptimizeSingleFileTracker singleRewriteTracker; private final IcebergTableProps tableProps; private final FileSystem fs; + private final Long startingSnapshotId; public IcebergOptimizeOperationCommitter(IcebergCommand icebergCommand, OperatorStats operatorStats, DatasetConfig datasetConfig, Long minInputFiles, + Long snapshotId, IcebergTableProps tableProps, FileSystem fs) { Preconditions.checkState(icebergCommand != null, "Unexpected state"); @@ -86,6 +92,7 @@ public IcebergOptimizeOperationCommitter(IcebergCommand icebergCommand, this.singleRewriteTracker = new IcebergOptimizeSingleFileTracker(); this.tableProps = tableProps; this.fs = fs; + this.startingSnapshotId = snapshotId; } @Override @@ -96,33 +103,45 @@ public Snapshot commit() { @Override public Snapshot commit(WriterCommitterOutputHandler outputHandler) { Stopwatch stopwatch = Stopwatch.createStarted(); + Snapshot snapshot = null; + WriterCommitterOperator.SnapshotCommitStatus commitStatus = NONE; try { Set skippedSingleRewrites = singleRewriteTracker.removeSingleFileChanges(addedDataFiles, removedDataFiles); boolean shouldCommit = hasAnythingChanged() && hasMinInputFilesCriteriaPassed(); - final Snapshot lastCommittedSnapshot = shouldCommit ? - icebergCommand.rewriteDataFiles(removedDataFiles, addedDataFiles) : icebergCommand.loadTable().currentSnapshot(); - writeOutput(outputHandler, !shouldCommit, removedDataFiles.size(), addedDataFiles.size()); + snapshot = shouldCommit ? + icebergCommand.rewriteFiles(removedDataFiles, removedDeleteFiles, addedDataFiles, Collections.EMPTY_SET, startingSnapshotId) + : icebergCommand.loadTable().currentSnapshot(); + writeOutput(outputHandler, !shouldCommit, removedDataFiles.size(), removedDeleteFiles.size(), addedDataFiles.size()); - LOGGER.info("OPTIMIZE ACTION: Rewritten data files count - {}, Added data files count - {}, Min input files - {}, Commit skipped {}", - removedDataFiles.size(), addedDataFiles.size(), minInputFiles.map(String::valueOf).orElse("NONE"), !shouldCommit); + LOGGER.info("OPTIMIZE ACTION: Rewritten data files count - {}, Rewritten delete files count - {}, Added data files count - {}, Min input files - {}, Commit skipped {}", + removedDataFiles.size(), removedDeleteFiles.size(), addedDataFiles.size(), minInputFiles.map(String::valueOf).orElse("NONE"), !shouldCommit); clear(shouldCommit, skippedSingleRewrites); + commitStatus = shouldCommit ? COMMITTED : SKIPPED; - return lastCommittedSnapshot; + return snapshot; } finally { long totalCommitTime = stopwatch.elapsed(TimeUnit.MILLISECONDS); operatorStats.addLongStat(WriterCommitterOperator.Metric.ICEBERG_COMMIT_TIME, totalCommitTime); + IcebergOpCommitter.writeSnapshotStats(operatorStats, commitStatus, snapshot); } } - private void writeOutput(WriterCommitterOutputHandler outputHandler, boolean commitSkipped, long rewrittenFilesCount, long addedFilesCount) { + private void writeOutput(WriterCommitterOutputHandler outputHandler, boolean commitSkipped, + long rewrittenFilesCount, long rewrittenDeleteFilesCount, long addedFilesCount) { if (commitSkipped) { - rewrittenFilesCount = addedFilesCount = 0L; + rewrittenFilesCount = 0L; + rewrittenDeleteFilesCount = 0L; + addedFilesCount = 0L; } WriterCommitterRecord rewrittenFiles = new ImmutableWriterCommitterRecord.Builder() .setOperationType(OperationType.DELETE_DATAFILE.value).setRecords(rewrittenFilesCount).build(); outputHandler.write(rewrittenFiles); + WriterCommitterRecord rewrittenDeleteFiles = new ImmutableWriterCommitterRecord.Builder() + .setOperationType(OperationType.DELETE_DELETEFILE.value).setRecords(rewrittenDeleteFilesCount).build(); + outputHandler.write(rewrittenDeleteFiles); + WriterCommitterRecord addedFiles = new ImmutableWriterCommitterRecord.Builder() .setOperationType(OperationType.ADD_DATAFILE.value).setRecords(addedFilesCount).build(); outputHandler.write(addedFiles); @@ -132,17 +151,15 @@ private void clear(boolean isCommitted, Set skippedFiles) { Stopwatch stopwatch = Stopwatch.createStarted(); skippedFiles.forEach(this::deleteOrphan); - if (isCommitted) { - operatorStats.addLongStat(WriterCommitterOperator.Metric.SNAPSHOT_COMMIT_STATUS, COMMITTED.value()); - } else { + if (!isCommitted) { // Remove new files, as they're now orphan addedDataFiles.forEach(file -> deleteOrphan(file.path().toString())); final String orphanDir = Path.of(tableProps.getTableLocation()).resolve(tableProps.getUuid()).toString(); deleteOrphan(orphanDir); - operatorStats.addLongStat(WriterCommitterOperator.Metric.SNAPSHOT_COMMIT_STATUS, SKIPPED.value()); } removedDataFiles.clear(); + removedDeleteFiles.clear(); addedDataFiles.clear(); long clearTime = stopwatch.elapsed(TimeUnit.MILLISECONDS); @@ -160,8 +177,8 @@ private void deleteOrphan(String path) { } @Override - public void cleanup(DremioFileIO dremioFileIO) { - addedDataFiles.forEach(addedDataFile -> dremioFileIO.deleteFile(addedDataFile.path().toString())); + public void cleanup(FileIO fileIO) { + addedDataFiles.forEach(addedDataFile -> fileIO.deleteFile(addedDataFile.path().toString())); } @Override @@ -186,6 +203,12 @@ public void consumeDeleteDataFilePath(String icebergDeleteDatafilePath) throws U throw new UnsupportedOperationException("OPTIMIZE TABLE can't consume string paths"); } + @Override + public void consumeDeleteDeleteFile(DeleteFile deleteFile) throws UnsupportedOperationException { + this.removedDeleteFiles.add(deleteFile); + this.singleRewriteTracker.consumeDeletedDeleteFile(deleteFile); + } + @Override public void updateSchema(BatchSchema newSchema) { throw new UnsupportedOperationException("Updating schema is not supported for OPTIMIZE TABLE transaction"); @@ -216,8 +239,12 @@ private boolean hasAnythingChanged() { return !removedDataFiles.isEmpty() && !addedDataFiles.isEmpty(); } + /* + * MIN_INPUT_FILES is applied to a total of removed files (both data and delete files). + * e.g. 1 Data file with 4 Delete files linked to it would qualify the default MIN_INPUT_FILES criteria of 5. + */ private boolean hasMinInputFilesCriteriaPassed() { - return minInputFiles.map(m -> m > 0 && removedDataFiles.size() >= m).orElse(true); + return minInputFiles.map(m -> m > 0 && (removedDataFiles.size() + removedDeleteFiles.size()) >= m).orElse(true); } @VisibleForTesting @@ -229,4 +256,9 @@ Set getAddedDataFiles() { Set getRemovedDataFiles() { return ImmutableSet.copyOf(removedDataFiles); } + + @VisibleForTesting + Set getRemovedDeleteFiles() { + return ImmutableSet.copyOf(removedDeleteFiles); + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergTableCreationCommitter.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergTableCreationCommitter.java index 91c9162174..bd40488dbc 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergTableCreationCommitter.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergTableCreationCommitter.java @@ -31,9 +31,9 @@ import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.Schema; import org.apache.iceberg.Snapshot; +import org.apache.iceberg.io.FileIO; import com.dremio.exec.record.BatchSchema; -import com.dremio.exec.store.iceberg.DremioFileIO; import com.dremio.exec.store.iceberg.manifestwriter.IcebergCommitOpHelper; import com.dremio.sabot.exec.context.OperatorStats; import com.dremio.sabot.op.writer.WriterCommitterOperator; @@ -77,7 +77,7 @@ public Snapshot commit() { /* OperatorStats are null when create empty table is executed via Coordinator*/ if(operatorStats != null) { operatorStats.addLongStat(WriterCommitterOperator.Metric.ICEBERG_COMMIT_TIME, totalCommitTime); - operatorStats.addLongStat(WriterCommitterOperator.Metric.SNAPSHOT_COMMIT_STATUS, COMMITTED.value()); + IcebergOpCommitter.writeSnapshotStats(operatorStats, COMMITTED, snapshot); } return snapshot; @@ -95,8 +95,8 @@ public Snapshot commit() { } @Override - public void cleanup(DremioFileIO dremioFileIO) { - IcebergCommitOpHelper.deleteManifestFiles(dremioFileIO, manifestFileList, true); + public void cleanup(FileIO fileIO) { + IcebergCommitOpHelper.deleteManifestFiles(fileIO, manifestFileList, true); } @Override @@ -114,6 +114,7 @@ public void consumeDeleteDataFilePath(String icebergDeleteDatafilePath) throws U throw new UnsupportedOperationException("Delete data file operation not allowed in Create table Transaction"); } + @Override public void updateSchema(BatchSchema newSchema) { throw new UnsupportedOperationException("Updating schema is not supported for Creation table Transaction"); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergTableLoader.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergTableLoader.java index bad7e73fa9..67fdb49ed8 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergTableLoader.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IcebergTableLoader.java @@ -31,7 +31,7 @@ public IcebergTableLoader(IcebergCommand icebergCommand) { public Table getIcebergTable() { if (icebergTable == null) { - return icebergTable = icebergCommand.loadTable(); + icebergTable = icebergCommand.loadTable(); } return icebergTable; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IncrementalMetadataRefreshCommitter.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IncrementalMetadataRefreshCommitter.java index d345b6c89e..f3d057e91b 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IncrementalMetadataRefreshCommitter.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/model/IncrementalMetadataRefreshCommitter.java @@ -45,7 +45,9 @@ import com.dremio.exec.catalog.MutablePlugin; import com.dremio.exec.exception.NoSupportedUpPromotionOrCoercionException; import com.dremio.exec.planner.acceleration.IncrementalUpdateUtils; +import com.dremio.exec.planner.common.ImmutableDremioFileAttrs; import com.dremio.exec.record.BatchSchema; +import com.dremio.exec.store.iceberg.IcebergUtils; import com.dremio.exec.store.iceberg.SchemaConverter; import com.dremio.exec.store.metadatarefresh.committer.DatasetCatalogGrpcClient; import com.dremio.exec.store.metadatarefresh.committer.DatasetCatalogRequestBuilder; @@ -207,7 +209,10 @@ public Snapshot postCommitTransaction() { datasetCatalogRequestBuilder.setNumOfRecords(numRecords); long numDataFiles = Long.parseLong(table.currentSnapshot().summary().getOrDefault("total-data-files", "0")); datasetCatalogRequestBuilder.setNumOfDataFiles(numDataFiles); - datasetCatalogRequestBuilder.setIcebergMetadata(getRootPointer(), tableUuid, table.currentSnapshot().snapshotId(), conf, isPartitioned, getCurrentSpecMap(), plugin, getCurrentSchema()); + ImmutableDremioFileAttrs partitionStatsFileAttrs = IcebergUtils.getPartitionStatsFileAttrs(getRootPointer(), + table.currentSnapshot().snapshotId(), icebergCommand.getFileIO()); + datasetCatalogRequestBuilder.setIcebergMetadata(getRootPointer(), tableUuid, table.currentSnapshot().snapshotId(), + getCurrentSpecMap(), getCurrentSchema(), partitionStatsFileAttrs.fileName(), partitionStatsFileAttrs.fileLength()); BatchSchema newSchemaFromIceberg = SchemaConverter.getBuilder().setMapTypeEnabled(isMapDataTypeEnabled).build().fromIceberg(table.schema()); newSchemaFromIceberg = BatchSchema.newBuilder().addFields(newSchemaFromIceberg.getFields()) .addField(Field.nullable(IncrementalUpdateUtils.UPDATE_COLUMN, new ArrowType.Int(64, true))).build(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieCommand.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieCommand.java index 320fb83939..5898d08540 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieCommand.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieCommand.java @@ -17,7 +17,6 @@ import org.apache.hadoop.conf.Configuration; -import com.dremio.exec.catalog.MutablePlugin; import com.dremio.exec.store.iceberg.model.IcebergBaseCommand; import com.dremio.exec.store.iceberg.model.IcebergTableIdentifier; import com.dremio.io.file.FileSystem; @@ -32,12 +31,12 @@ class IcebergNessieCommand extends IcebergBaseCommand { public IcebergNessieCommand(IcebergTableIdentifier tableIdentifier, Configuration configuration, FileSystem fs, - IcebergNessieTableOperations tableOperations, MutablePlugin plugin) { - super(configuration, ((IcebergNessieTableIdentifier) tableIdentifier).getTableFolder(), fs, tableOperations, - plugin); + IcebergNessieTableOperations tableOperations) { + super(configuration, ((IcebergNessieTableIdentifier) tableIdentifier).getTableFolder(), fs, tableOperations); this.nessieTableOperations = tableOperations; } + @Override public void deleteTable() { RuntimeException ex = null; try { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieModel.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieModel.java index c32609103a..f32c9b02df 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieModel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieModel.java @@ -20,8 +20,7 @@ import org.apache.hadoop.conf.Configuration; import org.projectnessie.client.api.NessieApiV1; -import com.dremio.exec.catalog.MutablePlugin; -import com.dremio.exec.store.iceberg.DremioFileIO; +import com.dremio.exec.store.iceberg.SupportsIcebergMutablePlugin; import com.dremio.exec.store.iceberg.model.IcebergBaseModel; import com.dremio.exec.store.iceberg.model.IcebergCommand; import com.dremio.exec.store.iceberg.model.IcebergTableIdentifier; @@ -34,24 +33,25 @@ */ public class IcebergNessieModel extends IcebergBaseModel { private final Provider nessieApi; - private final MutablePlugin plugin; + private final SupportsIcebergMutablePlugin plugin; public IcebergNessieModel(String namespace, Configuration configuration, Provider api, FileSystem fs, OperatorContext context, DatasetCatalogGrpcClient datasetCatalogGrpcClient, - MutablePlugin plugin) { + SupportsIcebergMutablePlugin plugin) { super(namespace, configuration, fs, context, datasetCatalogGrpcClient, plugin); this.nessieApi = api; this.plugin = plugin; } + @Override protected IcebergCommand getIcebergCommand(IcebergTableIdentifier tableIdentifier) { IcebergNessieTableOperations tableOperations = new IcebergNessieTableOperations((context == null ? null : context.getStats()), nessieApi, - new DremioFileIO(fs, context, null, null, null, configuration, plugin), + plugin.createIcebergFileIO(fs, context, null, null, null), ((IcebergNessieTableIdentifier) tableIdentifier)); - return new IcebergNessieCommand(tableIdentifier, configuration, fs, tableOperations, plugin); + return new IcebergNessieCommand(tableIdentifier, configuration, fs, tableOperations); } @Override diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieTableOperations.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieTableOperations.java index 38627820dc..f4a336f443 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieTableOperations.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieTableOperations.java @@ -36,10 +36,12 @@ import com.dremio.plugins.NessieClientTableMetadata; import com.dremio.sabot.exec.context.OperatorStats; import com.dremio.sabot.op.writer.WriterCommitterOperator; +import com.google.common.base.Preconditions; import com.google.common.base.Stopwatch; import io.grpc.Status; import io.grpc.StatusRuntimeException; +import io.opentelemetry.instrumentation.annotations.WithSpan; /** * Iceberg nessie table operations @@ -49,6 +51,7 @@ class IcebergNessieTableOperations extends BaseMetastoreTableOperations { private final FileIO fileIO; private final Provider nessieApi; private final IcebergNessieTableIdentifier nessieTableIdentifier; + private final List nessieTableKey; private ResolvedVersionContext reference; private final OperatorStats operatorStats; private String baseContentId; @@ -57,6 +60,7 @@ public IcebergNessieTableOperations(OperatorStats operatorStats, Provider getNessieKey(TableIdentifier tableIdentifier) { + private static List parseNessieKey(TableIdentifier tableIdentifier) { return Arrays.asList( tableIdentifier.namespace().toString(), tableIdentifier.name()); @@ -106,12 +117,14 @@ protected void doCommit(TableMetadata base, TableMetadata metadata) { boolean threw = true; try { Stopwatch stopwatchCatalogUpdate = Stopwatch.createStarted(); - nessieClient().commitTable(getNessieKey(nessieTableIdentifier.getTableIdentifier()), + nessieClient().commitTable( + nessieTableKey, newMetadataLocation, new NessieClientTableMetadata( metadata.currentSnapshot().snapshotId(), metadata.currentSchemaId(), metadata.defaultSpecId(), metadata.sortOrder().orderId()), reference, baseContentId, + null, null); threw = false; long totalCatalogUpdateTime = stopwatchCatalogUpdate.elapsed(TimeUnit.MILLISECONDS); @@ -136,20 +149,19 @@ protected void doCommit(TableMetadata base, TableMetadata metadata) { } } - public void deleteKey(){ - nessieClient().deleteCatalogEntry( - getNessieKey(nessieTableIdentifier.getTableIdentifier()), - getDefaultBranch()); + public void deleteKey() { + NessieClient nessieClient = nessieClient(); + ResolvedVersionContext version = getDefaultBranch(nessieClient); + nessieClient.deleteCatalogEntry(nessieTableKey, version, null); } - private ResolvedVersionContext getDefaultBranch() { + private ResolvedVersionContext getDefaultBranch(NessieClient nessieClient) { try { - return nessieClient().getDefaultBranch(); + return nessieClient.getDefaultBranch(); } catch (NoDefaultBranchException e) { throw UserException.sourceInBadState(e) .message("No default branch set.") .buildSilently(); } } - } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieVersionedCommand.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieVersionedCommand.java index 88b222ca9a..5b0a395cff 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieVersionedCommand.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieVersionedCommand.java @@ -17,7 +17,6 @@ import org.apache.hadoop.conf.Configuration; -import com.dremio.exec.catalog.MutablePlugin; import com.dremio.exec.store.iceberg.model.IcebergBaseCommand; import com.dremio.exec.store.iceberg.model.IcebergTableIdentifier; import com.dremio.io.file.FileSystem; @@ -29,11 +28,12 @@ public class IcebergNessieVersionedCommand extends IcebergBaseCommand { public IcebergNessieVersionedCommand(IcebergTableIdentifier tableIdentifier, Configuration configuration, FileSystem fs, - IcebergNessieVersionedTableOperations tableOperations, MutablePlugin plugin) { - super(configuration, ((IcebergNessieVersionedTableIdentifier) tableIdentifier).getTableFolder(), fs, tableOperations, plugin); + IcebergNessieVersionedTableOperations tableOperations) { + super(configuration, ((IcebergNessieVersionedTableIdentifier) tableIdentifier).getTableFolder(), fs, tableOperations); this.versionedTableOperations = tableOperations; } + @Override public void deleteTable() { //super.deleteTable(); // TODO Okay to only delete in Nessie? versionedTableOperations.deleteKey(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieVersionedModel.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieVersionedModel.java index 571a9b3901..41d2b29897 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieVersionedModel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieVersionedModel.java @@ -20,12 +20,13 @@ import org.apache.hadoop.conf.Configuration; import com.dremio.common.utils.protos.QueryIdHelper; -import com.dremio.exec.catalog.MutablePlugin; import com.dremio.exec.catalog.ResolvedVersionContext; -import com.dremio.exec.store.iceberg.DremioFileIO; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.store.iceberg.SupportsIcebergMutablePlugin; import com.dremio.exec.store.iceberg.model.IcebergBaseModel; import com.dremio.exec.store.iceberg.model.IcebergCommand; import com.dremio.exec.store.iceberg.model.IcebergTableIdentifier; +import com.dremio.io.file.FileSystem; import com.dremio.plugins.NessieClient; import com.dremio.sabot.exec.context.OperatorContext; import com.google.common.base.Preconditions; @@ -33,44 +34,66 @@ public class IcebergNessieVersionedModel extends IcebergBaseModel { private final List tableKey; private final NessieClient nessieClient; - private final ResolvedVersionContext version; - private final MutablePlugin plugin; + private ResolvedVersionContext version; + private final String userName; public IcebergNessieVersionedModel(List tableKey, Configuration fsConf, + FileSystem fs, final NessieClient nessieClient, OperatorContext context, // Used to create DremioInputFile (valid only for insert/ctas) ResolvedVersionContext version, - MutablePlugin plugin) { - super(null, fsConf, null, context, null, plugin); + SupportsIcebergMutablePlugin plugin, + String userName) { + super(null, fsConf, fs, context, null, plugin); this.tableKey = tableKey; this.nessieClient = nessieClient; + this.userName = userName; Preconditions.checkNotNull(version); this.version = version; - this.plugin = plugin; } + @Override protected IcebergCommand getIcebergCommand(IcebergTableIdentifier tableIdentifier) { - String jobId = null; - - //context is only available for executors - if (context != null) { - jobId = QueryIdHelper.getQueryId(context.getFragmentHandle().getQueryId()); - } - IcebergNessieVersionedTableOperations tableOperations = new IcebergNessieVersionedTableOperations( context == null ? null : context.getStats(), - new DremioFileIO(fs, context, null, null, null, configuration, plugin), + plugin.createIcebergFileIO(fs, context, null, null, null), nessieClient, - ((IcebergNessieVersionedTableIdentifier) tableIdentifier), jobId); + ((IcebergNessieVersionedTableIdentifier) tableIdentifier), getJobId(), userName); + + return new IcebergNessieVersionedCommand(tableIdentifier, configuration, fs, tableOperations); + } - return new IcebergNessieVersionedCommand(tableIdentifier, configuration, fs, tableOperations, plugin); + @Override + public void refreshVersionContext() { + VersionContext versionContext; + switch (version.getType()) { + case BRANCH: + versionContext = VersionContext.ofBranch(version.getRefName()); + break; + case TAG: + versionContext = VersionContext.ofTag(version.getRefName()); + break; + default: + throw new UnsupportedOperationException("refreshVersionContext is supported for branch and tag ref types only"); + } + version = nessieClient.resolveVersionContext(versionContext, getJobId()); } @Override public IcebergTableIdentifier getTableIdentifier(String rootFolder) { return new IcebergNessieVersionedTableIdentifier(tableKey, rootFolder, version); } + + private String getJobId() { + String jobId = null; + + //context is only available for executors + if (context != null) { + jobId = QueryIdHelper.getQueryId(context.getFragmentHandle().getQueryId()); + } + return jobId; + } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieVersionedTableOperations.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieVersionedTableOperations.java index 2f88581abd..b468c3cc1b 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieVersionedTableOperations.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieVersionedTableOperations.java @@ -43,13 +43,15 @@ public class IcebergNessieVersionedTableOperations extends BaseMetastoreTableOpe private final String fullTableName; private final ResolvedVersionContext version; private final String jobId; + private final String userName; private String baseContentId; public IcebergNessieVersionedTableOperations(OperatorStats operatorStats, FileIO fileIO, NessieClient nessieClient, IcebergNessieVersionedTableIdentifier nessieVersionedTableIdentifier, - String jobId) { + String jobId, + String userName) { this.operatorStats = operatorStats; this.fileIO = fileIO; this.fullTableName = nessieVersionedTableIdentifier.getTableIdentifier().toString(); @@ -60,6 +62,7 @@ public IcebergNessieVersionedTableOperations(OperatorStats operatorStats, this.version = nessieVersionedTableIdentifier.getVersion(); this.jobId = jobId; this.baseContentId = null; + this.userName = userName; } @Override @@ -75,7 +78,14 @@ protected String tableName() { @Override protected void doRefresh() { baseContentId = nessieClient.getContentId(tableKey, version, jobId); - String metadataLocation = nessieClient.getMetadataLocation(tableKey, version, jobId); + + String metadataLocation = null; + if (baseContentId != null) { + metadataLocation = nessieClient.getMetadataLocation(tableKey, version, jobId); + Preconditions.checkState(metadataLocation != null, + "No metadataLocation for iceberg table: " + tableKey + " ref: " + version); + } + refreshFromMetadataLocation(metadataLocation, 2); } @@ -99,7 +109,8 @@ protected void doCommit(TableMetadata base, TableMetadata metadata) { metadata.sortOrder().orderId()), version, baseContentId, - jobId); + jobId, + userName); threw = false; long totalCatalogUpdateTime = stopwatchCatalogUpdate.elapsed(TimeUnit.MILLISECONDS); if (operatorStats != null) { @@ -114,6 +125,6 @@ protected void doCommit(TableMetadata base, TableMetadata metadata) { } public void deleteKey() { - nessieClient.deleteCatalogEntry(tableKey, version); + nessieClient.deleteCatalogEntry(tableKey, version, userName); } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieVersionedViewOperations.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieVersionedViewOperations.java index 54b48d76db..69f1d23b3c 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieVersionedViewOperations.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieVersionedViewOperations.java @@ -31,6 +31,7 @@ import com.dremio.exec.catalog.ResolvedVersionContext; import com.dremio.plugins.NessieClient; +import com.google.common.base.Preconditions; /** * Versioned iceberg view operations. @@ -47,6 +48,7 @@ public class IcebergNessieVersionedViewOperations extends BaseMetastoreViewOpera private final List viewKey; private final String dialect; private final ResolvedVersionContext version; + private final String userName; private IcebergView icebergView; private String baseContentId; @@ -55,21 +57,26 @@ public IcebergNessieVersionedViewOperations( NessieClient nessieClient, List viewKey, String dialect, - ResolvedVersionContext version) { + ResolvedVersionContext version, + String userName) { this.fileIO = fileIO; this.nessieClient = requireNonNull(nessieClient); this.viewKey = requireNonNull(viewKey); this.dialect = dialect; this.version = version; this.baseContentId = null; - + this.userName = userName; } @Override public ViewVersionMetadata refresh() { baseContentId = nessieClient.getContentId(viewKey, version, null); - final String metadataLocation = nessieClient.getMetadataLocation(viewKey, version, null); - + String metadataLocation = null; + if (baseContentId != null) { + metadataLocation = nessieClient.getMetadataLocation(viewKey, version, null); + Preconditions.checkState(metadataLocation != null, + "No metadataLocation for iceberg view: " + viewKey + " ref: " + version); + } refreshFromMetadataLocation(metadataLocation, RETRY_IF, MAX_RETRIES, this::loadViewMetadata); return current(); @@ -83,7 +90,7 @@ private ViewVersionMetadata loadViewMetadata(String metadataLocation) { @Override public void drop(String viewIdentifier) { logger.debug("Deleting key for view {} at version {} from Nessie ", viewKey, version); - nessieClient.deleteCatalogEntry(viewKey, version); + nessieClient.deleteCatalogEntry(viewKey, version, userName); } @Override @@ -95,7 +102,7 @@ public void commit( boolean isFailedOperation = true; try { - nessieClient.commitView(viewKey, newMetadataLocation, icebergView, target, dialect, version, baseContentId); + nessieClient.commitView(viewKey, newMetadataLocation, icebergView, target, dialect, version, baseContentId, userName); isFailedOperation = false; } finally { if (isFailedOperation) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieVersionedViews.java b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieVersionedViews.java index f6393bed0c..8ed3b4a540 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieVersionedViews.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/iceberg/nessie/IcebergNessieVersionedViews.java @@ -33,9 +33,8 @@ import org.slf4j.LoggerFactory; import com.dremio.common.exceptions.UserException; -import com.dremio.exec.catalog.MutablePlugin; import com.dremio.exec.catalog.ResolvedVersionContext; -import com.dremio.exec.store.iceberg.DremioFileIO; +import com.dremio.exec.store.iceberg.SupportsIcebergMutablePlugin; import com.dremio.plugins.NessieClient; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; @@ -49,7 +48,8 @@ public class IcebergNessieVersionedViews implements IcebergVersionedViews { private final String warehouseLocation; private final NessieClient nessieClient; private final Configuration fileSystemConfig; - private final MutablePlugin plugin; + private final SupportsIcebergMutablePlugin plugin; + private final String userName; public static final String DIALECT = "DREMIO"; @@ -57,7 +57,8 @@ public IcebergNessieVersionedViews( String warehouseLocation, NessieClient nessieClient, Configuration fileSystemConfig, - MutablePlugin plugin) { + SupportsIcebergMutablePlugin plugin, + String userName) { requireNonNull(warehouseLocation); this.warehouseLocation = warehouseLocation.endsWith("/") @@ -66,6 +67,7 @@ public IcebergNessieVersionedViews( this.nessieClient = nessieClient; this.fileSystemConfig = fileSystemConfig; this.plugin = plugin; + this.userName = userName; } protected String defaultWarehouseLocation(List viewKey) { @@ -175,6 +177,7 @@ public void drop(List viewKey, ResolvedVersionContext version) { protected BaseMetastoreViewOperations newViewOps( List viewKey, ResolvedVersionContext version) { return new IcebergNessieVersionedViewOperations( - new DremioFileIO(fileSystemConfig, plugin), nessieClient, viewKey, DIALECT, version); + plugin.createIcebergFileIO(plugin.getSystemUserFS(), null, null, null, null), + nessieClient, viewKey, DIALECT, version, userName); } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/ischema/ExpressionConverter.java b/sabot/kernel/src/main/java/com/dremio/exec/store/ischema/ExpressionConverter.java index 4090e50347..b84298b63e 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/ischema/ExpressionConverter.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/ischema/ExpressionConverter.java @@ -45,7 +45,7 @@ /** * Enables conversion of a filter condition into a search query and remainder for pushdown purposes. */ -public class ExpressionConverter { +public final class ExpressionConverter { private static final ImmutableMap FIELDS = ImmutableMap.of( "TABLE_SCHEMA".toLowerCase(), DatasetIndexKeys.UNQUOTED_SCHEMA, "TABLE_NAME".toLowerCase(), DatasetIndexKeys.UNQUOTED_NAME, @@ -183,16 +183,15 @@ private SearchQuery handleLike(RexCall call) { switch(operands.size()) { case 3: - RexNode op3 = operands.get(2); if(op3 instanceof RexLiteral) { escape = ((RexLiteral) op3).getValue3().toString(); } else { return null; } + // fall through case 2: - RexNode op1 = operands.get(0); if(op1 instanceof RexInputRef) { RexInputRef input = ((RexInputRef) op1); @@ -208,7 +207,6 @@ private SearchQuery handleLike(RexCall call) { } else { return null; } - break; default: diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/metadatarefresh/MetadataRefreshUtils.java b/sabot/kernel/src/main/java/com/dremio/exec/store/metadatarefresh/MetadataRefreshUtils.java index fc0cc97a06..36a2c530d2 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/metadatarefresh/MetadataRefreshUtils.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/metadatarefresh/MetadataRefreshUtils.java @@ -46,8 +46,7 @@ public static boolean metadataSourceAvailable(final CatalogService catalogServic FileSystemPlugin metadataPlugin = null; try { metadataPlugin = catalogService.getSource(METADATA_STORAGE_PLUGIN_NAME); - } - catch (Exception e) { + } catch (Exception e) { logger.debug("Exception while getting the plugin for the source [{}].", METADATA_STORAGE_PLUGIN_NAME, e); return false; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/metadatarefresh/committer/DatasetCatalogRequestBuilder.java b/sabot/kernel/src/main/java/com/dremio/exec/store/metadatarefresh/committer/DatasetCatalogRequestBuilder.java index 83ec0a4388..32560bf95d 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/metadatarefresh/committer/DatasetCatalogRequestBuilder.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/metadatarefresh/committer/DatasetCatalogRequestBuilder.java @@ -23,19 +23,16 @@ import org.apache.arrow.vector.types.pojo.ArrowType; import org.apache.arrow.vector.types.pojo.Field; -import org.apache.hadoop.conf.Configuration; import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.Schema; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.dremio.common.exceptions.UserException; -import com.dremio.exec.catalog.MutablePlugin; import com.dremio.exec.planner.acceleration.IncrementalUpdateUtils; import com.dremio.exec.planner.cost.ScanCostFactor; import com.dremio.exec.record.BatchSchema; import com.dremio.exec.store.iceberg.IcebergSerDe; -import com.dremio.exec.store.iceberg.IcebergUtils; import com.dremio.sabot.exec.store.easy.proto.EasyProtobuf; import com.dremio.service.catalog.AddOrUpdateDatasetRequest; import com.dremio.service.catalog.GetDatasetRequest; @@ -228,7 +225,8 @@ public void overrideSchema(BatchSchema batchSchema) { .setBatchSchema(ByteString.copyFrom(batchSchema.serialize())); } - public void setIcebergMetadata(String rootPointer, String tableUuid, long snapshotId, Configuration conf, boolean isPartitioned, Map partitionSpecMap, MutablePlugin plugin, Schema schema) { + public void setIcebergMetadata(String rootPointer, String tableUuid, long snapshotId, + Map partitionSpecMap, Schema schema, String partitionStatsFile, Long partitionsStatsFileLength) { Preconditions.checkState(request != null, "Unexpected state"); Preconditions.checkState(request.getDatasetConfigBuilder().getIcebergMetadataEnabled(), "Unexpected state"); byte[] specs = IcebergSerDe.serializePartitionSpecAsJsonMap(partitionSpecMap); @@ -238,11 +236,11 @@ public void setIcebergMetadata(String rootPointer, String tableUuid, long snapsh .setSnapshotId(snapshotId) .setPartitionSpecsJsonMap(ByteString.copyFrom(specs)) .setJsonSchema(serializedSchemaAsJson(schema)); - if (isPartitioned) { - String partitionStatsFile = IcebergUtils.getPartitionStatsFile(rootPointer, snapshotId, conf, plugin); - if (partitionStatsFile != null) { - metadataBuilder.setPartitionStatsFile(partitionStatsFile); - } + if (partitionStatsFile != null) { + metadataBuilder.setPartitionStatsFile(partitionStatsFile); + } + if (partitionsStatsFileLength != null) { + metadataBuilder.setPartitionStatsFileSize(partitionsStatsFileLength); } request.getDatasetConfigBuilder().setIcebergMetadata(metadataBuilder.build()); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/metadatarefresh/dirlisting/DirListingInvocationPrel.java b/sabot/kernel/src/main/java/com/dremio/exec/store/metadatarefresh/dirlisting/DirListingInvocationPrel.java index 01707ea5fa..0c376e570e 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/metadatarefresh/dirlisting/DirListingInvocationPrel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/metadatarefresh/dirlisting/DirListingInvocationPrel.java @@ -38,7 +38,6 @@ import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.sql.validate.SqlValidatorUtil; import org.apache.calcite.util.Pair; -import org.apache.iceberg.ManifestContent; import com.dremio.common.expression.SchemaPath; import com.dremio.common.utils.PathUtils; @@ -67,6 +66,7 @@ import com.dremio.exec.store.dfs.FileSystemPlugin; import com.dremio.exec.store.iceberg.IcebergManifestListPrel; import com.dremio.exec.store.iceberg.InternalIcebergScanTableMetadata; +import com.dremio.exec.store.iceberg.ManifestContentType; import com.dremio.exec.store.metadatarefresh.MetadataRefreshExecConstants; import com.dremio.options.Options; import com.google.common.collect.ImmutableList; @@ -180,7 +180,7 @@ private Prel generateManifestListScanPrel(TableMetadata icebergScanTableMetadata final RelDataType manifestListRowType = getRowTypeFromProjectedColumns(manifestListReaderColumns, manifestListReaderSchema, getCluster()); IcebergManifestListPrel manifestListPrel = new IcebergManifestListPrel(getCluster(), traitSet, icebergScanTableMetadata, manifestListReaderSchema, manifestListReaderColumns, - manifestListRowType, null, ManifestContent.DATA); //TODO: check icebergExpression can be null or not + manifestListRowType, null, ManifestContentType.DATA); //TODO: check icebergExpression can be null or not return manifestListPrel; } @@ -230,6 +230,7 @@ private Pair findFieldWithIndex(RelNode relNode, Stri return fieldPair; } + @Override public double getObservedRowcountAdjustment() { return 1.0; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/metadatarefresh/dirlisting/DirListingRecordReader.java b/sabot/kernel/src/main/java/com/dremio/exec/store/metadatarefresh/dirlisting/DirListingRecordReader.java index 69e3495e7e..a0fa7a217b 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/metadatarefresh/dirlisting/DirListingRecordReader.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/metadatarefresh/dirlisting/DirListingRecordReader.java @@ -106,7 +106,7 @@ public class DirListingRecordReader implements RecordReader { private BigIntVector sizeVector; private VarCharVector pathVector; private VarBinaryVector partitionInfoVector; - private Retryer retryer = new Retryer.Builder() + private final Retryer retryer = Retryer.newBuilder() .retryIfExceptionOfType(IOException.class) .retryIfExceptionOfType(RuntimeException.class) .setWaitStrategy(Retryer.WaitStrategy.EXPONENTIAL, 250, 2500) @@ -152,8 +152,7 @@ public void setup(OutputMutator output) { partitionInfoVector = (VarBinaryVector) outgoing.getVector(DirList.OUTPUT_SCHEMA.PARTITION_INFO); try { initDirIterator(isFile); - } - catch (IOException e) { + } catch (IOException e) { throw new IllegalStateException("Error listing directory " + operatingPath.toString(), e); } } @@ -176,7 +175,7 @@ public int next() { String errorMessage = "Failed to list files of directory " + operatingPath.toString(); if (isRateLimitingException(e)) { try { - generatedRecords = (int) retryer.call(() -> iterateDirectory()); + generatedRecords = retryer.call(() -> iterateDirectory()); } catch (Retryer.OperationFailedAfterRetriesException retriesException) { hasExceptionHandled = false; errorMessage = "With retry attempt failed to list files of directory " + operatingPath.toString(); @@ -248,7 +247,7 @@ protected void initDirIterator(boolean isFile) throws IOException { } catch (IOException e) { if (isRateLimitingException(e)) { try { - dirIterator = (Iterator) retryer.call(() -> fs.listFiles(operatingPath, isRecursive).iterator()); + dirIterator = retryer.call(() -> fs.listFiles(operatingPath, isRecursive).iterator()); } catch (Retryer.OperationFailedAfterRetriesException retriesException) { String retryErrorMessage = "Retry attempted "; if (e.getMessage() != null) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/metadatarefresh/dirlisting/DirListingRecordReaderIterator.java b/sabot/kernel/src/main/java/com/dremio/exec/store/metadatarefresh/dirlisting/DirListingRecordReaderIterator.java index 086c994e32..2ac6a04f7a 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/metadatarefresh/dirlisting/DirListingRecordReaderIterator.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/metadatarefresh/dirlisting/DirListingRecordReaderIterator.java @@ -87,8 +87,7 @@ public RecordReader next() { if (fs == null) { try { fs = plugin.createFSWithoutHDFSCache(dirListInputSplit.getRootPath(), config.getProps().getUserName(), context); - } - catch (IOException e) { + } catch (IOException e) { throw UserException.ioExceptionError(e).buildSilently(); } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/metadatarefresh/dirlisting/DirListingScanPrel.java b/sabot/kernel/src/main/java/com/dremio/exec/store/metadatarefresh/dirlisting/DirListingScanPrel.java index 600ab7900b..9e3703d030 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/metadatarefresh/dirlisting/DirListingScanPrel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/metadatarefresh/dirlisting/DirListingScanPrel.java @@ -83,6 +83,7 @@ public PhysicalOperator getPhysicalOperator(PhysicalPlanCreator creator) throws tableMetadata, tableMetadata.getSchema(), projectedColumns, pluginId, allowRecursiveListing); } + @Override public double getObservedRowcountAdjustment() { return 1.0; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/metadatarefresh/footerread/ParquetFooterReader.java b/sabot/kernel/src/main/java/com/dremio/exec/store/metadatarefresh/footerread/ParquetFooterReader.java index 3cf7af61be..176a4a870d 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/metadatarefresh/footerread/ParquetFooterReader.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/metadatarefresh/footerread/ParquetFooterReader.java @@ -50,6 +50,8 @@ import com.dremio.exec.store.dfs.implicit.AdditionalColumnsRecordReader; import com.dremio.exec.store.parquet.InputStreamProvider; import com.dremio.exec.store.parquet.MutableParquetMetadata; +import com.dremio.exec.store.parquet.ParquetFilterCreator; +import com.dremio.exec.store.parquet.ParquetFilters; import com.dremio.exec.store.parquet.ParquetReaderUtility; import com.dremio.exec.store.parquet.ParquetScanProjectedColumns; import com.dremio.exec.store.parquet.ParquetTypeHelper; @@ -122,13 +124,12 @@ private long getEstimatedRowCount(long fileSize) { } private BatchSchema createBatchSchemaIfNeeded(MutableParquetMetadata parquetMetadata, String path, long fileSize) throws IOException { - if (tableSchema != null) { - return tableSchema; + if (tableSchema == null) { + tableSchema = batchSchemaFromParquetFooter(parquetMetadata, path, fileSize); } - return tableSchema = batchSchemaFromParquetFooter(parquetMetadata, path, fileSize); + return tableSchema; } - private List getFieldsUsingParquetTypeHelper(MutableParquetMetadata footer) throws Exception { List fields = new ArrayList<>(); final ParquetReaderUtility.DateCorruptionStatus dateStatus = ParquetReaderUtility.DateCorruptionStatus.META_SHOWS_NO_CORRUPTION; @@ -209,7 +210,7 @@ private BatchSchema getBatchSchemaFromReader(final FileSystem fs, final String p final long maxFooterLen = opContext.getOptions().getOption(ExecConstants.PARQUET_MAX_FOOTER_LEN_VALIDATOR); - try (InputStreamProvider streamProvider = new SingleStreamProvider(fs, Path.of(path), fileSize, maxFooterLen, false, null, null, false); + try (InputStreamProvider streamProvider = new SingleStreamProvider(fs, Path.of(path), fileSize, maxFooterLen, false, null, null, false, ParquetFilters.NONE, ParquetFilterCreator.DEFAULT); RecordReader reader = new AdditionalColumnsRecordReader(opContext, new ParquetRowiseReader(opContext, mutableParquetMetadata, 0, path, ParquetScanProjectedColumns.fromSchemaPaths(GroupScan.ALL_COLUMNS), fs, schemaHelper, streamProvider, codec, true), new ArrayList<>(), sampleAllocator)) { @@ -224,6 +225,8 @@ private BatchSchema getBatchSchemaFromReader(final FileSystem fs, final String p return mutator.getContainer().getSchema(); } catch (Exception e) { throw new IOException(e.getMessage()); + } finally { + codec.release(); } } } @@ -231,7 +234,7 @@ private BatchSchema getBatchSchemaFromReader(final FileSystem fs, final String p private MutableParquetMetadata readFooter(String path, long fileSize) throws IOException { logger.debug("Reading footer of file [{}]", path); try (SingleStreamProvider singleStreamProvider = new SingleStreamProvider(this.fs, Path.of(path), fileSize, - maxFooterLen(), false, null, opContext, false)) { + maxFooterLen(), false, null, opContext, false, ParquetFilters.NONE, ParquetFilterCreator.DEFAULT)) { return singleStreamProvider.getFooter(); } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/mfunctions/IcebergMetadataFunctionsRecordReader.java b/sabot/kernel/src/main/java/com/dremio/exec/store/mfunctions/IcebergMetadataFunctionsRecordReader.java index cf1d72f653..b0c811303a 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/mfunctions/IcebergMetadataFunctionsRecordReader.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/mfunctions/IcebergMetadataFunctionsRecordReader.java @@ -36,10 +36,8 @@ import com.dremio.common.exceptions.ExecutionSetupException; import com.dremio.common.exceptions.UserException; import com.dremio.common.expression.SchemaPath; -import com.dremio.exec.catalog.MutablePlugin; import com.dremio.exec.physical.base.OpProps; import com.dremio.exec.store.RecordReader; -import com.dremio.exec.store.iceberg.DremioFileIO; import com.dremio.exec.store.iceberg.SupportsIcebergRootPointer; import com.dremio.io.file.FileSystem; import com.dremio.sabot.exec.context.OperatorContext; @@ -61,6 +59,7 @@ final class IcebergMetadataFunctionsRecordReader implements RecordReader { private IcebergMetadataValueVectorWriter valueWriter; private final List columns; private final Table icebergTable ; + private final MetadataTableType tableType; public IcebergMetadataFunctionsRecordReader(OperatorContext context, SupportsIcebergRootPointer pluginForIceberg, @@ -75,18 +74,26 @@ public IcebergMetadataFunctionsRecordReader(OperatorContext context, this.dataset = config.getReferencedTables().iterator().next(); this.props = config.getProps(); this.columns = config.getColumns(); - MetadataTableType tableType = IcebergMetadataFunctionsTable.valueOf(config.getmFunction().getName().toUpperCase(Locale.ROOT)).getTableType(); + this.tableType = IcebergMetadataFunctionsTable.valueOf(config.getmFunction().getName().toUpperCase(Locale.ROOT)).getTableType(); this.icebergTable = MetadataTableUtils.createMetadataTableInstance(getTableOps(),null ,null, tableType); } @Override public void setup(OutputMutator output) throws ExecutionSetupException { this.tmpBuf = context.getAllocator().buffer(4096); - this.valueWriter = new IcebergMetadataValueVectorWriter(output, context.getTargetBatchSize(), - columns, icebergTable.schema(), icebergTable - .newScan() - .planFiles() - .iterator(),tmpBuf); + if (this.tableType == MetadataTableType.PARTITIONS) { + this.valueWriter = new IcebergMetadataValueVectorWriter(output, context.getTargetBatchSize(), + columns, icebergTable.schema(), getTableOps().current().spec(), icebergTable + .newScan() + .planFiles() + .iterator(), tmpBuf); + } else { + this.valueWriter = new IcebergMetadataValueVectorWriter(output, context.getTargetBatchSize(), + columns, icebergTable.schema(), icebergTable + .newScan() + .planFiles() + .iterator(), tmpBuf); + } } @@ -118,8 +125,8 @@ private TableOperations getTableOps() { } catch (IOException e) { throw new RuntimeException("Failed creating filesystem", e); } - return new StaticTableOperations(metadataLocation, new DremioFileIO( - fs, context, dataset, null, null, pluginForIceberg.getFsConfCopy(), (MutablePlugin)pluginForIceberg)); + return new StaticTableOperations(metadataLocation, pluginForIceberg.createIcebergFileIO( + fs, context, dataset, null, null)); } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/mfunctions/IcebergMetadataFunctionsSchema.java b/sabot/kernel/src/main/java/com/dremio/exec/store/mfunctions/IcebergMetadataFunctionsSchema.java index 5b4299fd42..583648361a 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/mfunctions/IcebergMetadataFunctionsSchema.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/mfunctions/IcebergMetadataFunctionsSchema.java @@ -33,11 +33,18 @@ final class IcebergMetadataFunctionsSchema { private static final BatchSchema SNAPSHOTS; private static final BatchSchema MANIFESTS; private static final BatchSchema TABLE_FILES; + private static final BatchSchema PARTITIONS; private static final SchemaConverter schemaConverter = SchemaConverter.getBuilder().build(); static { HISTORY = schemaConverter.fromIceberg(MetadataTableUtils.createMetadataTableInstance(null, null,null, MetadataTableType.HISTORY).schema()); + PARTITIONS = schemaConverter.fromIceberg(new Schema( + Types.NestedField.required(1, "partition", Types.StringType.get()), + Types.NestedField.required(2, "record_count", Types.LongType.get()), + Types.NestedField.required(3, "file_count", Types.IntegerType.get()), + Types.NestedField.required(4, "spec_id", Types.IntegerType.get()) + )); SNAPSHOTS = schemaConverter.fromIceberg(new Schema( Types.NestedField.required(1, "committed_at", Types.TimestampType.withZone()), Types.NestedField.required(2, "snapshot_id", Types.LongType.get()), @@ -109,6 +116,10 @@ public static BatchSchema getTableFilesRecordSchema() { return TABLE_FILES; } + public static BatchSchema getPartitionsRecordSchema() { + return PARTITIONS; + } + private IcebergMetadataFunctionsSchema() { } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/mfunctions/IcebergMetadataFunctionsTable.java b/sabot/kernel/src/main/java/com/dremio/exec/store/mfunctions/IcebergMetadataFunctionsTable.java index 8011c2e9b7..4992ac547a 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/mfunctions/IcebergMetadataFunctionsTable.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/mfunctions/IcebergMetadataFunctionsTable.java @@ -27,7 +27,8 @@ public enum IcebergMetadataFunctionsTable { TABLE_HISTORY(IcebergMetadataFunctionsSchema.getHistoryRecordSchema(),MetadataTableType.HISTORY), TABLE_SNAPSHOT(IcebergMetadataFunctionsSchema.getSnapshotRecordSchema(),MetadataTableType.SNAPSHOTS), TABLE_MANIFESTS(IcebergMetadataFunctionsSchema.getManifestFilesRecordSchema(),MetadataTableType.MANIFESTS), - TABLE_FILES(IcebergMetadataFunctionsSchema.getTableFilesRecordSchema(),MetadataTableType.FILES); + TABLE_FILES(IcebergMetadataFunctionsSchema.getTableFilesRecordSchema(),MetadataTableType.FILES), + TABLE_PARTITIONS(IcebergMetadataFunctionsSchema.getPartitionsRecordSchema(),MetadataTableType.PARTITIONS); private final BatchSchema recordSchema; private final MetadataTableType tableType; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/mfunctions/IcebergMetadataValueVectorWriter.java b/sabot/kernel/src/main/java/com/dremio/exec/store/mfunctions/IcebergMetadataValueVectorWriter.java index f0b5336a7e..c9a9494698 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/mfunctions/IcebergMetadataValueVectorWriter.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/mfunctions/IcebergMetadataValueVectorWriter.java @@ -19,6 +19,7 @@ import java.nio.charset.StandardCharsets; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Optional; @@ -32,9 +33,11 @@ import org.apache.arrow.vector.types.pojo.Field; import org.apache.iceberg.Accessor; import org.apache.iceberg.FileScanTask; +import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.Schema; import org.apache.iceberg.StructLike; import org.apache.iceberg.io.CloseableIterator; +import org.apache.iceberg.types.Types; import org.apache.iceberg.types.Types.NestedField; import com.dremio.common.exceptions.UserException; @@ -56,6 +59,21 @@ final class IcebergMetadataValueVectorWriter { private final Map> accessorByColumn; private final ArrowBuf tmpBuf; private CloseableIterator recordsIterator; + private PartitionSpec spec; + + public IcebergMetadataValueVectorWriter(OutputMutator output, int targetBatchSize, List columns, Schema icebergSchema, + PartitionSpec spec, CloseableIterator iterator, ArrowBuf tmpBuf) { + Preconditions.checkNotNull(tmpBuf); + this.output = output; + this.targetBatchSize = targetBatchSize; + this.projectedColumns = columns; + this.icebergSchema = icebergSchema; + this.accessorByColumn = Maps.newHashMap(); + this.tmpBuf = tmpBuf; + this.iterator = iterator; + this.recordsIterator = null; + this.spec = spec; + } public IcebergMetadataValueVectorWriter(OutputMutator output, int targetBatchSize, List columns, Schema icebergSchema, CloseableIterator iterator, ArrowBuf tmpBuf) { @@ -68,6 +86,7 @@ public IcebergMetadataValueVectorWriter(OutputMutator output, int targetBatchSiz this.tmpBuf = tmpBuf; this.iterator = iterator; this.recordsIterator = null; + this.spec = null; } public int write() { @@ -89,6 +108,8 @@ public int write() { } else if (targetVector instanceof StructVector) { StructVector vector = (StructVector) targetVector; writeToMapVector(vector, outIndex, (Map) valueToWrite); + } else if (rootColumn.toLowerCase(Locale.ROOT).equals("partition") && valueToWrite instanceof StructLike) { + writeToVector(targetVector, outIndex, getPartitionData((StructLike) valueToWrite)); } else { writeToVector(targetVector, outIndex, valueToWrite); } @@ -171,6 +192,27 @@ private void writeIntValue(BaseWriter.StructWriter structWriter, String fieldNam } } + private String getPartitionData(StructLike data) { + StringBuilder stringBuilder = new StringBuilder(); + stringBuilder.append("{"); + Preconditions.checkNotNull(spec, "Partition Spec not found"); + List fields = spec.partitionType().asStructType().fields(); + Preconditions.checkState(fields.size() == data.size(), "Number of partition columns not match in partition spec and partition data"); + for (int pos = 0; pos < fields.size(); pos++) { + Types.NestedField nestedField = fields.get(pos); + Object partitionValue = data.get(pos, nestedField.type().typeId().javaClass()); + // Don't output null partition values + if (partitionValue != null) { + if (pos > 0) { + stringBuilder.append(", "); + } + stringBuilder.append(nestedField.name()).append("=").append(partitionValue); + } + } + stringBuilder.append("}"); + return stringBuilder.toString(); + } + /** * Use this method to write to list vector. It reads from structLike data. * eg: manifests -> partition_summaries diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/mfunctions/MetadataFunctionsGroupScan.java b/sabot/kernel/src/main/java/com/dremio/exec/store/mfunctions/MetadataFunctionsGroupScan.java index 94a951b6e1..8eb14db10d 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/mfunctions/MetadataFunctionsGroupScan.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/mfunctions/MetadataFunctionsGroupScan.java @@ -71,6 +71,7 @@ public SubScan getSpecificScan(List work) throws ExecutionSe metadataLocation); } + @Override public int getOperatorType() { return UserBitShared.CoreOperatorType.ICEBERG_METADATA_FUNCTIONS_READER.getNumber(); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/AllRowGroupsParquetReader.java b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/AllRowGroupsParquetReader.java index fbc3778dad..178ffd2e66 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/AllRowGroupsParquetReader.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/AllRowGroupsParquetReader.java @@ -263,7 +263,7 @@ private InputStreamProvider createInputStreamProvider(int rowGroupIndex) { dataset, fileAttributes.lastModifiedTime().toMillis(), false, - true); + true, filters, parquetReaderFactory.newFilterCreator(context, ParquetReaderFactory.ManagedSchemaType.ICEBERG, null, context.getAllocator())); } catch (IOException ex) { throw new UncheckedIOException(ex); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/InputStreamProvider.java b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/InputStreamProvider.java index c7d5579413..0ba46781cb 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/InputStreamProvider.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/InputStreamProvider.java @@ -87,4 +87,9 @@ default AsyncByteReader getAsyncByteReader() { */ void enableColumnIndices(List selectedColumns) throws IOException; + default boolean isRowGroupFilteredForReading() { + return true; + } + + } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/InputStreamProviderFactory.java b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/InputStreamProviderFactory.java index 7296c2acad..2cc72a8f9b 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/InputStreamProviderFactory.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/InputStreamProviderFactory.java @@ -35,14 +35,14 @@ public interface InputStreamProviderFactory { InputStreamProvider create(FileSystem fs, OperatorContext context, Path path, long fileLength, long splitSize, ParquetScanProjectedColumns projectedColumns, MutableParquetMetadata footerIfKnown, InputStreamProvider inputStreamProviderIfKnown, Function rowGroupIndexProvider, - boolean readFullFile, List dataset, long mTime, boolean enableBoosting, boolean readIndices) throws IOException; + boolean readFullFile, List dataset, long mTime, boolean enableBoosting, boolean readIndices, ParquetFilters parquetFilters, ParquetFilterCreator parquetFilterCreator) throws IOException; InputStreamProviderFactory DEFAULT = new InputStreamProviderFactory() { @Override public InputStreamProvider create(FileSystem fs, OperatorContext context, Path path, long fileLength, long splitSize, ParquetScanProjectedColumns projectedColumns, MutableParquetMetadata footerIfKnown, InputStreamProvider inputStreamProviderIfKnown, Function rowGroupIndexProvider, - boolean readFullFile, List dataset, long mTime, boolean enableBoosting, boolean readColumnIndices) throws IOException { + boolean readFullFile, List dataset, long mTime, boolean enableBoosting, boolean readColumnIndices, ParquetFilters parquetFilters, ParquetFilterCreator parquetFilterCreator) throws IOException { OptionManager options = context.getOptions(); boolean useSingleStream = // option is set for single stream @@ -57,8 +57,8 @@ public InputStreamProvider create(FileSystem fs, OperatorContext context, final long maxFooterLen = context.getOptions().getOption(ExecConstants.PARQUET_MAX_FOOTER_LEN_VALIDATOR); return useSingleStream - ? new SingleStreamProvider(fs, path, fileLength, maxFooterLen, readFullFile, footerIfKnown, context, readColumnIndices) - : new StreamPerColumnProvider(fs, path, fileLength, maxFooterLen, footerIfKnown, context, readColumnIndices); + ? new SingleStreamProvider(fs, path, fileLength, maxFooterLen, readFullFile, footerIfKnown, context, readColumnIndices, parquetFilters, parquetFilterCreator) + : new StreamPerColumnProvider(fs, path, fileLength, maxFooterLen, footerIfKnown, context, readColumnIndices, parquetFilters, parquetFilterCreator); } }; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/MutableParquetMetadata.java b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/MutableParquetMetadata.java index 1ed3bee389..271ba4c285 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/MutableParquetMetadata.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/MutableParquetMetadata.java @@ -197,6 +197,7 @@ public void removePageHeaders(int rowGroupIndex) { blocks.set(rowGroupIndex, newBlockMetaData); } + @Override public String toString() { final long numNonNullBlocks = getBlocks() .stream() diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/OutputFile.java b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/OutputFile.java index 7ffaaa6433..865ece7b14 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/OutputFile.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/OutputFile.java @@ -19,53 +19,31 @@ import org.apache.parquet.io.PositionOutputStream; -import com.dremio.exec.hadoop.FSDataOutputStreamWithStatsWrapper; -import com.dremio.exec.hadoop.FSDataOutputStreamWrapper; import com.dremio.io.FSOutputStream; import com.dremio.io.file.FileSystem; import com.dremio.io.file.Path; -import com.dremio.sabot.exec.context.OperatorStats; public final class OutputFile implements org.apache.parquet.io.OutputFile { private final FileSystem fs; private final Path path; - private final org.apache.hadoop.fs.FileSystem hadoopFs; - final OperatorStats operatorStats; - - private OutputFile(FileSystem fs, Path path, org.apache.hadoop.fs.FileSystem hadoopFs, OperatorStats operatorStats) { + private OutputFile(FileSystem fs, Path path) { this.fs = fs; this.path = path; - this.hadoopFs = hadoopFs; - this.operatorStats = operatorStats; - } - public static OutputFile of(FileSystem fs, Path path, org.apache.hadoop.fs.FileSystem hadoopFs, OperatorStats operatorStats) { - return new OutputFile(fs, path, hadoopFs, operatorStats); - } - - private FSOutputStream getFSOutputStreamWithStats(OperatorStats operatorStats, boolean overwrite) throws IOException { - org.apache.hadoop.fs.Path fsPath = new org.apache.hadoop.fs.Path(path.toString()); - FSOutputStream fsOutputStream = new FSDataOutputStreamWrapper(hadoopFs.create(fsPath, overwrite)); - if (operatorStats != null) { - fsOutputStream = new FSDataOutputStreamWithStatsWrapper(fsOutputStream, operatorStats, path.toString()); - } - return fsOutputStream; + public static OutputFile of(FileSystem fs, Path path) { + return new OutputFile(fs, path); } @Override public PositionOutputStream create(long blockSizeHint) throws IOException { - try (OperatorStats.WaitRecorder recorder = OperatorStats.getWaitRecorder(operatorStats)) { - return new PositionOutputStreamWrapper(getFSOutputStreamWithStats(operatorStats, false)); - } + return new PositionOutputStreamWrapper(fs.create(path)); } @Override public PositionOutputStream createOrOverwrite(long blockSizeHint) throws IOException { - try (OperatorStats.WaitRecorder recorder = OperatorStats.getWaitRecorder(operatorStats)) { - return new PositionOutputStreamWrapper(getFSOutputStreamWithStats(operatorStats, true)); - } + return new PositionOutputStreamWrapper(fs.create(path, true)); } @Override @@ -122,6 +100,4 @@ public void close() throws IOException { os.close(); } } - - } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetColumnDefaultResolver.java b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetColumnDefaultResolver.java index aeaeb4d698..86d3d140e6 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetColumnDefaultResolver.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetColumnDefaultResolver.java @@ -38,42 +38,52 @@ public ParquetColumnDefaultResolver(List projectedColumns) { this.projectedColumns = projectedColumns; } + @Override public List getBatchSchemaProjectedColumns() { return projectedColumns; } + @Override public List getProjectedParquetColumns() { return projectedColumns; } + @Override public String getBatchSchemaColumnName(String columnInParquetFile) { return columnInParquetFile; } + @Override public List getBatchSchemaColumnName(List columnInParquetFile) { return columnInParquetFile; } + @Override public String getParquetColumnName(String name) { return name; } + @Override public List getBatchSchemaColumns(List parquestSchemaPaths) { return parquestSchemaPaths; } + @Override public SchemaPath getBatchSchemaColumnPath(SchemaPath pathInParquetFile) { return pathInParquetFile; } + @Override public List getNameSegments(SchemaPath schemaPath) { return schemaPath.getNameSegments(); } + @Override public List convertColumnDescriptor(MessageType schema, ColumnDescriptor columnDesc) { return ParquetReaderUtility.convertColumnDescriptor(schema, columnDesc); } + @Override public String toDotString(SchemaPath schemaPath, ValueVector vector) { StringBuilder pathBuilder = new StringBuilder(); boolean isListChild = (vector instanceof ListVector); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetColumnIcebergResolver.java b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetColumnIcebergResolver.java index 2373644259..0923ea95fa 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetColumnIcebergResolver.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetColumnIcebergResolver.java @@ -74,10 +74,12 @@ private void initializeProjectedColumnIDs(List getBatchSchemaProjectedColumns() { return projectedColumns; } + @Override public List getProjectedParquetColumns() { return this.projectedColumns.stream() .map(this::getParquetColumnPath) @@ -85,6 +87,7 @@ public List getProjectedParquetColumns() { .collect(Collectors.toList()); } + @Override public String getBatchSchemaColumnName(String columnInParquetFile) { if (!this.parquetColumnIDs.containsKey(columnInParquetFile)) { return null; @@ -99,6 +102,7 @@ public String getBatchSchemaColumnName(String columnInParquetFile) { return this.icebergColumnIDMap.inverse().get(id); } + @Override public List getBatchSchemaColumnName(List columnInParquetFile) { String columnName = String.join(".", columnInParquetFile); @@ -121,6 +125,7 @@ public List getBatchSchemaColumnName(List columnInParquetFile) { return Lists.newArrayList(columnInSchema.split("\\.")); } + @Override public String getParquetColumnName(String name) { if (!this.icebergColumnIDMap.containsKey(name)) { return null; @@ -135,6 +140,7 @@ public String getParquetColumnName(String name) { return this.parquetColumnIDs.inverse().get(id); } + @Override public List getBatchSchemaColumns(List parquestSchemaPaths) { return parquestSchemaPaths.stream() .map(this::getBatchSchemaColumnPath) @@ -142,6 +148,7 @@ public List getBatchSchemaColumns(List parquestSchemaPat .collect(Collectors.toList()); } + @Override public SchemaPath getBatchSchemaColumnPath(SchemaPath pathInParquetFile) { List pathSegmentsInParquet = pathInParquetFile.getComplexNameSegments(); List pathSegmentsInBatchSchema = getBatchSchemaColumnName(pathSegmentsInParquet); @@ -150,6 +157,7 @@ public SchemaPath getBatchSchemaColumnPath(SchemaPath pathInParquetFile) { SchemaPath.getCompoundPath(pathSegmentsInBatchSchema.toArray(new String[0])); } + @Override public List getNameSegments(SchemaPath schemaPath) { return shouldUseBatchSchemaForResolvingProjectedColumn && this.fieldInfoMap != null ? getComplexNameSegments(schemaPath) : schemaPath.getComplexNameSegments(); @@ -230,10 +238,12 @@ private List getParquetSchemaColumnName(List columnInBatchSchema return Lists.newArrayList(columnInParquet.split("\\.")); } + @Override public List convertColumnDescriptor(MessageType schema, ColumnDescriptor columnDescriptor) { return Lists.newArrayList(columnDescriptor.getPath()); } + @Override public String toDotString(SchemaPath schemaPath, ValueVector vector) { return schemaPath.toDotString().toLowerCase(); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetFormatDatasetAccessor.java b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetFormatDatasetAccessor.java index f23c435d7f..cbeec3910a 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetFormatDatasetAccessor.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetFormatDatasetAccessor.java @@ -291,7 +291,7 @@ private BatchSchema getBatchSchemaFromReader(final FileSelection selection, fina final ImplicitFilesystemColumnFinder finder = new ImplicitFilesystemColumnFinder(context.getOptionManager(), fs, GroupScan.ALL_COLUMNS, isAccelerator); final long maxFooterLen = context.getOptionManager().getOption(ExecConstants.PARQUET_MAX_FOOTER_LEN_VALIDATOR); - try (InputStreamProvider streamProvider = new SingleStreamProvider(fs, firstFile.getPath(), firstFile.size(), maxFooterLen, false, null, null, false); + try (InputStreamProvider streamProvider = new SingleStreamProvider(fs, firstFile.getPath(), firstFile.size(), maxFooterLen, false, null, null, false, ParquetFilters.NONE, ParquetFilterCreator.DEFAULT); RecordReader reader = new AdditionalColumnsRecordReader(operatorContext, new ParquetRowiseReader(operatorContext, mutableParquetMetadata, 0, firstFile.getPath().toString(), ParquetScanProjectedColumns.fromSchemaPaths(GroupScan.ALL_COLUMNS), fs, schemaHelper, streamProvider, codec, true), finder.getImplicitFieldsForSample(selection), sampleAllocator)) { @@ -304,6 +304,8 @@ private BatchSchema getBatchSchemaFromReader(final FileSelection selection, fina mutator.getContainer().buildSchema(BatchSchema.SelectionVectorMode.NONE); return mutator.getContainer().getSchema(); + } finally { + codec.release(); } } } catch (Exception e) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetFormatPlugin.java b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetFormatPlugin.java index 24067f443a..33835b03b8 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetFormatPlugin.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetFormatPlugin.java @@ -183,7 +183,7 @@ public PreviewReader( this.fs = fs; this.attributes = attributes; final long maxFooterLen = context.getOptions().getOption(ExecConstants.PARQUET_MAX_FOOTER_LEN_VALIDATOR); - this.streamProvider = new SingleStreamProvider(fs, attributes.getPath(), attributes.size(), maxFooterLen, false, null, null, false); + this.streamProvider = new SingleStreamProvider(fs, attributes.getPath(), attributes.size(), maxFooterLen, false, null, null, false, ParquetFilters.NONE, ParquetFilterCreator.DEFAULT); this.footer = this.streamProvider.getFooter(); boolean autoCorrectCorruptDates = context.getOptions().getOption(ExecConstants.PARQUET_AUTO_CORRECT_DATES_VALIDATOR) && getConfig().autoCorrectCorruptDates; @@ -200,7 +200,7 @@ public PreviewReader( @Override public void close() throws Exception { - AutoCloseables.close(current, streamProvider); + AutoCloseables.close(current, streamProvider, codec::release); } @Override diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetReaderFactory.java b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetReaderFactory.java index df605f589d..877af58538 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetReaderFactory.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetReaderFactory.java @@ -35,7 +35,7 @@ enum ManagedSchemaType { ICEBERG } - boolean isSupported(ColumnChunkMetaData chunk); + boolean isSupported(ColumnChunkMetaData chunk, OperatorContext context); RecordReader newReader(OperatorContext context, ParquetScanProjectedColumns projectedColumns, @@ -64,7 +64,7 @@ RecordReader newReader(OperatorContext context, ParquetReaderFactory NONE = new ParquetReaderFactory(){ @Override - public boolean isSupported(ColumnChunkMetaData chunk) { + public boolean isSupported(ColumnChunkMetaData chunk, OperatorContext context) { return false; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetRecordWriter.java b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetRecordWriter.java index 44e09e0a39..7e7c9f773b 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetRecordWriter.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetRecordWriter.java @@ -115,6 +115,7 @@ import com.dremio.exec.testing.ExecutionControls; import com.dremio.io.file.FileSystem; import com.dremio.io.file.Path; +import com.dremio.options.OptionManager; import com.dremio.parquet.reader.ParquetDirectByteBufferAllocator; import com.dremio.sabot.exec.context.MetricDef; import com.dremio.sabot.exec.context.OperatorContext; @@ -226,7 +227,7 @@ public ParquetRecordWriter(OperatorContext context, ParquetWriter writer, Parque this.context = context; this.codecAllocator = context.getAllocator().newChildAllocator("ParquetCodecFactory", 0, Long.MAX_VALUE); this.columnEncoderAllocator = context.getAllocator().newChildAllocator("ParquetColEncoder", 0, Long.MAX_VALUE); - this.codecFactory = CodecFactory.createDirectCodecFactory(new Configuration(), + this.codecFactory = CodecFactory.createDirectCodecFactory(createConfigForCodecFactory(context.getOptions()), new ParquetDirectByteBufferAllocator(codecAllocator), pageSize); this.extraMetaData.put(DREMIO_VERSION_PROPERTY, DremioVersionInfo.getVersion()); this.extraMetaData.put(IS_DATE_CORRECT_PROPERTY, "true"); @@ -276,6 +277,9 @@ public ParquetRecordWriter(OperatorContext context, ParquetWriter writer, Parque case "gzip": codec = CompressionCodecName.GZIP; break; + case "zstd": + codec = CompressionCodecName.ZSTD; + break; case "none": case "uncompressed": codec = CompressionCodecName.UNCOMPRESSED; @@ -292,6 +296,16 @@ public ParquetRecordWriter(OperatorContext context, ParquetWriter writer, Parque parquetFileWriteIoRateThresholdMbps = context.getOptions().getOption(ExecConstants.PARQUET_WRITE_IO_RATE_THRESHOLD_MBPS_VALIDATOR); } + private Configuration createConfigForCodecFactory(OptionManager options) { + Configuration conf = new Configuration(); + int zstdLevel = (int) options.getOption(ExecConstants.PARQUET_WRITER_COMPRESSION_ZSTD_LEVEL_VALIDATOR); + // Set config for the Hadoop ZSTD codec + conf.setInt("io.compression.codec.zstd.level", zstdLevel); + // Set config for the Parquet zstd-jni codec + conf.setInt("parquet.compression.codec.zstd.level", zstdLevel); + return conf; + } + @Override public void setup() throws IOException { this.fs = plugin.createFS(location, queryUser, context); @@ -354,8 +368,9 @@ private org.apache.hadoop.fs.FileSystem getHadoopFs(Path path) throws IOExceptio private void initRecordWriter() throws IOException { this.path = fs.canonicalizePath(partition.qualified(location, prefix + "_" + index + "." + extension)); - parquetFileWriter = new ParquetFileWriter(OutputFile.of(fs, path, plugin.getHadoopFsSupplier(path.toString(), plugin.getFsConfCopy(), queryUser).get(), context.getStats()), checkNotNull(schema), ParquetFileWriter.Mode.CREATE, DEFAULT_BLOCK_SIZE, - MAX_PADDING_SIZE_DEFAULT, DEFAULT_COLUMN_INDEX_TRUNCATE_LENGTH, false); + parquetFileWriter = new ParquetFileWriter(OutputFile.of(fs, path), checkNotNull(schema), + ParquetFileWriter.Mode.CREATE, DEFAULT_BLOCK_SIZE, MAX_PADDING_SIZE_DEFAULT, + DEFAULT_COLUMN_INDEX_TRUNCATE_LENGTH, false); parquetFileWriter.start(); } @@ -938,10 +953,10 @@ public FieldConverter getNewNullableDateMilliConverter(int fieldId, String field @Override public void startPartition(WritePartition partition) throws Exception { if (index >= maxPartitions) { + logger.error(String.format("Throwing dataWriteError() from startPartition() because the index of %d is greater than or equal to the limit of %d set by store.max_partitions.", index, maxPartitions)); throw UserException.dataWriteError() - .message("Materialization cancelled due to excessive partition creation. A single thread can only generate %d partitions. " + - "Typically, this is a problem if you configure a partition or distribution column that has high cardinality. " + - "If you want to increase this limit, you can change the \"store.max_partitions\" system option.", maxPartitions) + .message("CTAS query cancelled because it will generate more than the limit of %d partitions. " + + "You can retry the query using a different column for PARTITION BY.", maxPartitions) .build(logger); } @@ -1068,6 +1083,7 @@ public void writeField() throws IOException { } } + @Override public FieldConverter getNewMapConverter(int fieldId, String fieldName, FieldReader reader) { MapParquetConverter converter = new MapParquetConverter(fieldId, fieldName, reader); if (converter.keyConverter == null || converter.valueConverter == null) { @@ -1234,13 +1250,7 @@ public void close() throws Exception { try { NoExceptionAutoCloseables.close(store, pageStore, parquetFileWriter); } finally { - AutoCloseables.close(new AutoCloseable() { - @Override - public void close() throws Exception { - codecFactory.release(); - } - }, - codecAllocator, columnEncoderAllocator); + AutoCloseables.close(codecFactory::release, codecAllocator, columnEncoderAllocator); } } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetScanFilter.java b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetScanFilter.java index 1f7cb6fbf0..aeedb8006d 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetScanFilter.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetScanFilter.java @@ -119,6 +119,7 @@ public List getPaths() { return conditions.stream().map(ParquetFilterCondition::getPath).collect(Collectors.toList()); } + @Override @JsonIgnore public RexNode getRexFilter() { if (conditions.get(0) == null) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetScanPrel.java b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetScanPrel.java index cc7bcfd90f..71bc3e9507 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetScanPrel.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetScanPrel.java @@ -112,6 +112,7 @@ public boolean hasFilter() { return filter != null; } + @Override public ParquetScanFilter getFilter() { return filter; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetSplitReaderCreatorIterator.java b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetSplitReaderCreatorIterator.java index 2a5f1c0241..5963b5ec5a 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetSplitReaderCreatorIterator.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ParquetSplitReaderCreatorIterator.java @@ -646,13 +646,11 @@ private void expandBlockSplit(ParquetBlockBasedSplit blockSplit) throws IOExcept private MutableParquetMetadata safelyGetFooter() throws IOException { try { return inputStreamProviderOfFirstRowGroup.getFooter(); - } - catch (IOException e) { + } catch (IOException e) { //Close the inputStreamProvider try { inputStreamProviderOfFirstRowGroup.close(); - } - catch (Exception ex) { + } catch (Exception ex) { logger.debug("Ignoring the exception on inputStreamProvider close.", ex); } throw e; @@ -696,8 +694,13 @@ public InputStreamProvider createInputStreamProvider(InputStreamProvider lastInp Preconditions.checkArgument(formatSettings.getType() != FileType.ICEBERG || icebergSchemaFields != null); ParquetScanProjectedColumns projectedColumns = ParquetScanProjectedColumns.fromSchemaPathAndIcebergSchema( - realFields, icebergSchemaFields, isConvertedIcebergDataset, context, fullSchema); - + realFields, icebergSchemaFields, isConvertedIcebergDataset, context, fullSchema); + ParquetReaderFactory.ManagedSchemaType schemaType = null; + if (!isConvertedIcebergDataset && DatasetHelper.isIcebergFile(formatSettings)) { + schemaType = ParquetReaderFactory.ManagedSchemaType.ICEBERG; + } else if (DatasetHelper.isDeltaLake(formatSettings)) { + schemaType = ParquetReaderFactory.ManagedSchemaType.ICEBERG; + } // If the ExecOption to ReadColumnIndexes is True and the configuration has a Filter, set readColumnIndices to true. boolean readColumnIndices = context.getOptions().getOption(READ_COLUMN_INDEXES) && filters.hasPushdownFilters(); @@ -715,7 +718,9 @@ public InputStreamProvider createInputStreamProvider(InputStreamProvider lastInp dataset, mTime, arrowCachingEnabled, - readColumnIndices); + readColumnIndices, + filters, + readerFactory.newFilterCreator(context, schemaType, null, context.getAllocator())); } public void setIcebergExtendedProperty(byte[] extendedProperty) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ScanTableFunction.java b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ScanTableFunction.java index 47f0326b64..331e1d5f21 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ScanTableFunction.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/ScanTableFunction.java @@ -142,8 +142,7 @@ public void startRow(int row) throws Exception { try (ByteArrayInputStream bis = new ByteArrayInputStream(inputSplits.get(record)); ObjectInput in = new ObjectInputStream(bis)) { splits.add((SplitAndPartitionInfo) in.readObject()); - } - catch (Exception e) { + } catch (Exception e) { throw UserException .dataReadError(e) .message("Failed to read input split information.") @@ -230,6 +229,7 @@ protected void addBoostSplits() throws IOException { return; } + @Override public boolean hasBufferedRemaining() { produceFromBufferedSplits = true; getRecordReaderIterator().produceFromBuffered(true); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/SingleStreamProvider.java b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/SingleStreamProvider.java index d67c13f607..9b3e0f0c6d 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/SingleStreamProvider.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/SingleStreamProvider.java @@ -48,7 +48,11 @@ public class SingleStreamProvider implements InputStreamProvider { private MutableParquetMetadata footer; private boolean readColumnOffsetIndices; - public SingleStreamProvider(FileSystem fs, Path path, long fileLength, long maxFooterLen, boolean readFullFile, MutableParquetMetadata footer, OperatorContext context, boolean readColumnOffsetIndices) { + private ParquetFilters parquetFilters; + private ParquetFilterCreator parquetFilterCreator; + + public SingleStreamProvider(FileSystem fs, Path path, long fileLength, long maxFooterLen, boolean readFullFile, MutableParquetMetadata footer, OperatorContext context, boolean readColumnOffsetIndices, ParquetFilters parquetFilters, + ParquetFilterCreator parquetFilterCreator) { this.fs = fs; this.path = path; this.fileLength = fileLength; @@ -62,6 +66,8 @@ public SingleStreamProvider(FileSystem fs, Path path, long fileLength, long maxF this.allocator = null; } this.readColumnOffsetIndices = readColumnOffsetIndices; + this.parquetFilters = parquetFilters; + this.parquetFilterCreator = parquetFilterCreator; } @Override diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/SingletonParquetFooterCache.java b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/SingletonParquetFooterCache.java index c082b2bde7..4ad39e7f64 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/SingletonParquetFooterCache.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/SingletonParquetFooterCache.java @@ -18,6 +18,7 @@ import java.io.ByteArrayInputStream; import java.io.IOException; import java.util.Arrays; +import java.util.concurrent.TimeUnit; import org.apache.commons.lang3.ArrayUtils; import org.apache.parquet.bytes.BytesUtils; @@ -31,6 +32,7 @@ import com.dremio.io.file.FileSystem; import com.dremio.io.file.Path; import com.google.common.base.Preconditions; +import com.google.common.base.Stopwatch; /** * Single object cache that holds the parquet footer for last file. @@ -104,7 +106,7 @@ private ParquetMetadata readFooter(BulkInputStream file, String path, long fileL private static ParquetMetadata readFooter(BulkInputStream file, String path, long fileLength, MetadataFilter filter, FileSystem fs, long maxFooterLen) throws IOException { Preconditions.checkArgument(fileLength >= MIN_FILE_SIZE || fileLength == -1, "%s is not a Parquet file (too small)", path); - + Stopwatch w = Stopwatch.createStarted(); if (fileLength == -1) { fileLength = fs.getFileAttributes(Path.of(path)).size(); } @@ -135,7 +137,7 @@ private static ParquetMetadata readFooter(BulkInputStream file, String path, lon int start = footerBytes.length - (size + FOOTER_METADATA_SIZE); footerBytes = ArrayUtils.subarray(footerBytes, start, start + size); } - + logger.debug("Read footer of {} of length {} in {} ms ", path, size, w.elapsed(TimeUnit.MILLISECONDS)); return ParquetFormatPlugin.parquetMetadataConverter.readParquetMetadata(new ByteArrayInputStream(footerBytes), filter); } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/StreamPerColumnProvider.java b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/StreamPerColumnProvider.java index 04b332f9f3..478de2aaba 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/StreamPerColumnProvider.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/StreamPerColumnProvider.java @@ -43,9 +43,13 @@ public class StreamPerColumnProvider implements InputStreamProvider { private final BufferAllocator allocator; private final OperatorContext context; + private ParquetFilters parquetFilters; + private ParquetFilterCreator parquetFilterCreator; + private final List streams = new ArrayList<>(); - public StreamPerColumnProvider(FileSystem fs, Path path, long length, long maxFooterLen, MutableParquetMetadata footer, OperatorContext context, boolean readColumnOffsetIndexes) { + public StreamPerColumnProvider(FileSystem fs, Path path, long length, long maxFooterLen, MutableParquetMetadata footer, OperatorContext context, boolean readColumnOffsetIndexes, + ParquetFilters parquetFilters, ParquetFilterCreator parquetFilterCreator) { this.fs = fs; this.path = path; this.length = length; @@ -58,6 +62,8 @@ public StreamPerColumnProvider(FileSystem fs, Path path, long length, long maxFo this.allocator = null; } this.context = context; + this.parquetFilters = parquetFilters; + this.parquetFilterCreator = parquetFilterCreator; } @Override diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/UnifiedParquetReader.java b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/UnifiedParquetReader.java index 8846faa908..a8add6896e 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/UnifiedParquetReader.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/UnifiedParquetReader.java @@ -294,7 +294,9 @@ public void setIgnoreSchemaLearning(boolean ignoreSchemaLearning) { private void computeLocality(MutableParquetMetadata footer) throws ExecutionSetupException { try { BlockMetaData block = footer.getBlocks().get(readEntry.getRowGroupIndex()); - Preconditions.checkArgument(block != null, "Parquet footer does not contain information about row group"); + Preconditions.checkArgument(block != null, + "Parquet file '%s' footer does not have information about row group %s", + this.readEntry.getPath(), readEntry.getRowGroupIndex()); Iterable blockLocations = fs.getFileBlockLocations(Path.of(readEntry.getPath()), block.getStartingPos(), block.getCompressedSize()); @@ -536,6 +538,7 @@ public void close() throws Exception { closeables.add(validityBuf); closeables.add(sv2); closeables.add(filters); + closeables.add(codecFactory::release); AutoCloseables.close(closeables); } finally { delegates = null; @@ -549,7 +552,9 @@ private void splitColumns(final MutableParquetMetadata footer, List nonVectorizableReaderColumns) { boolean isArrowSchemaPresent = DremioArrowSchema.isArrowSchemaPresent(footer.getFileMetaData().getKeyValueMetaData()); final BlockMetaData block = footer.getBlocks().get(readEntry.getRowGroupIndex()); - Preconditions.checkArgument(block != null, "Parquet footer does not contain information about row group"); + Preconditions.checkArgument(block != null, + "Parquet file '%s' footer does not have information about row group %s", + this.readEntry.getPath(), readEntry.getRowGroupIndex()); final Map fieldsWithEncodingsSupportedByVectorizedReader = new HashMap<>(); final List nonVectorizableTypes = new ArrayList<>(); final List vectorizableTypes = new ArrayList<>(); @@ -557,7 +562,7 @@ private void splitColumns(final MutableParquetMetadata footer, for (ColumnChunkMetaData c : block.getColumns()) { String field = c.getPath().iterator().next(); - if (!readerFactory.isSupported(c)) { + if (!readerFactory.isSupported(c, context)) { // we'll skip columns we can't read. fieldsWithPartialOrNoEncodingsSupportedByVectorizedReader.add(field); fieldsWithEncodingsSupportedByVectorizedReader.remove(field); @@ -745,7 +750,7 @@ public List getReaders(UnifiedParquetReader unifiedReader) throws "Deprecated vectorized reader does not support reading the file name column"); Preconditions.checkArgument( !unifiedReader.tableSchema.findFieldIgnoreCase(ColumnUtils.ROW_INDEX_COLUMN_NAME).isPresent(), - "Deprecated vectorized reader does not support reading the file name column"); + "Deprecated vectorized reader does not support reading the row index column"); List returnList = new ArrayList<>(); returnList.add(unifiedReader.addFilterIfNecessary( @@ -895,7 +900,9 @@ public List getReaders(final UnifiedParquetReader unifiedReader) t } BlockMetaData block = blocks.get(rowGroupIdx); - Preconditions.checkArgument(block != null, "Parquet footer does not contain information about row group"); + Preconditions.checkArgument(block != null, + "Parquet file '%s' footer does not have information about row group %s", + unifiedReader.readEntry.getPath(), unifiedReader.readEntry.getRowGroupIndex()); final long rowCount = block.getRowCount(); final long accumulatedRowCount = footer.getAccumulatedRowCount(rowGroupIdx); logger.debug("Row group {}, accumulated row count {}", rowGroupIdx, accumulatedRowCount); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/columnreaders/ColumnReaderFactory.java b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/columnreaders/ColumnReaderFactory.java index 9d26808ca7..9fbb452c00 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/columnreaders/ColumnReaderFactory.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/columnreaders/ColumnReaderFactory.java @@ -71,8 +71,7 @@ static ColumnReader createFixedColumnReader(DeprecatedParquetVectorizedReader } } else if (convertedType == ConvertedType.INTERVAL) { throw new UnsupportedOperationException("unsupported type " + type); - } - else { + } else { return new FixedByteAlignedReader.FixedBinaryReader(recordReader, allocateSize, descriptor, columnChunkMetaData, (VariableWidthVector) v, schemaElement); } } else if (columnChunkMetaData.getType() == PrimitiveType.PrimitiveTypeName.INT32 && convertedType == ConvertedType.DATE){ @@ -91,7 +90,7 @@ static ColumnReader createFixedColumnReader(DeprecatedParquetVectorizedReader recordReader.getDateCorruptionStatus())); } } else{ - if (columnChunkMetaData.getEncodings().contains(Encoding.PLAIN_DICTIONARY)) { + if (columnChunkMetaData.getEncodings().contains(Encoding.PLAIN_DICTIONARY) || columnChunkMetaData.getEncodings().contains(Encoding.RLE_DICTIONARY)) { switch (columnChunkMetaData.getType()) { case INT32: if (convertedType == null) { @@ -136,8 +135,7 @@ static ColumnReader createFixedColumnReader(DeprecatedParquetVectorizedReader fixedLength, v, schemaElement); } } - } - else { // if the column is nullable + } else { // if the column is nullable if (columnChunkMetaData.getType() == PrimitiveType.PrimitiveTypeName.BOOLEAN){ return new BitReader(recordReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, (BitVector) v, schemaElement); @@ -217,7 +215,7 @@ public static NullableColumnReader getNullableColumnReader(DeprecatedParquetV SchemaElement schemaElement) throws ExecutionSetupException { ConvertedType convertedType = schemaElement.getConverted_type(); - if (! columnChunkMetaData.getEncodings().contains(Encoding.PLAIN_DICTIONARY)) { + if (!columnChunkMetaData.getEncodings().contains(Encoding.PLAIN_DICTIONARY) && !columnChunkMetaData.getEncodings().contains(Encoding.RLE_DICTIONARY)) { if (columnDescriptor.getType() == PrimitiveType.PrimitiveTypeName.INT96) { // TODO: check convertedType once parquet support TIMESTAMP_NANOS type annotation. if (parentReader.readInt96AsTimeStamp()) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/columnreaders/DeprecatedParquetVectorizedReader.java b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/columnreaders/DeprecatedParquetVectorizedReader.java index d3b12fa4e6..4899f9cde9 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/columnreaders/DeprecatedParquetVectorizedReader.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/columnreaders/DeprecatedParquetVectorizedReader.java @@ -258,6 +258,9 @@ public void setup(OutputMutator output) throws ExecutionSetupException { mockRecordsRead = 0; rowGroupMetadata = footer.getBlocks().get(rowGroupIndex); Preconditions.checkArgument(rowGroupMetadata != null, "Parquet footer does not contain information about row group"); + Preconditions.checkArgument(rowGroupMetadata != null, + "Parquet file '%s' footer does not have information about row group %s", + this.getFsPath(), rowGroupIndex); Field field; // ParquetMetadataConverter metaConverter = new ParquetMetadataConverter(); @@ -321,8 +324,7 @@ public void setup(OutputMutator output) throws ExecutionSetupException { if (columnsToScan != 0 && allFieldsFixedLength) { rowsPerBatch = (int) Math.min(Math.min(numBytesPerBatch / bitWidthAllFixedFields, rowGroupMetadata.getColumns().get(0).getValueCount()), 65535); - } - else { + } else { rowsPerBatch = DEFAULT_RECORDS_TO_READ_IF_NOT_FIXED_WIDTH; } this.numRowsPerBatch = Math.min(this.numRowsPerBatch, rowsPerBatch); @@ -495,12 +497,10 @@ public int next() { ColumnReader firstColumnStatus; if (columnStatuses.size() > 0) { firstColumnStatus = columnStatuses.iterator().next(); - } - else{ + } else { if (varLengthReader.columns.size() > 0) { firstColumnStatus = varLengthReader.columns.iterator().next(); - } - else{ + } else { firstColumnStatus = null; } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/columnreaders/NullableVarLengthValuesColumn.java b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/columnreaders/NullableVarLengthValuesColumn.java index 5f85fef855..b9427b6f95 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/columnreaders/NullableVarLengthValuesColumn.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/columnreaders/NullableVarLengthValuesColumn.java @@ -83,8 +83,7 @@ protected boolean readAndStoreValueSizeInformation() throws IOException { currDictValToWrite = currLengthDeterminingDictVal; // re-purposing this field here for length in BYTES to prevent repetitive multiplication/division dataTypeLengthInBits = currLengthDeterminingDictVal.length(); - } - else { + } else { // re-purposing this field here for length in BYTES to prevent repetitive multiplication/division dataTypeLengthInBits = pageReader.pageData.getInt((int) pageReader.readyToReadPosInBytes); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/columnreaders/ParquetToMinorTypeConverter.java b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/columnreaders/ParquetToMinorTypeConverter.java index dfa3c729c0..297900e5f4 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/columnreaders/ParquetToMinorTypeConverter.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/columnreaders/ParquetToMinorTypeConverter.java @@ -116,6 +116,8 @@ private static TypeProtos.MinorType getMinorType(PrimitiveType.PrimitiveTypeName return TypeProtos.MinorType.INTERVALDAY; case YEAR_MONTH: return TypeProtos.MinorType.INTERVALYEAR; + default: + break; } } throw new IllegalArgumentException("incompatible type " + arrowField); @@ -123,6 +125,7 @@ private static TypeProtos.MinorType getMinorType(PrimitiveType.PrimitiveTypeName // TODO: older versions of Drill generated this return TypeProtos.MinorType.VARBINARY; } + // fall through default: throw new UnsupportedOperationException("Type not supported: " + primitiveTypeName); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/columnreaders/VarLengthColumn.java b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/columnreaders/VarLengthColumn.java index f0af489b48..ff8e1e4ea1 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/columnreaders/VarLengthColumn.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/columnreaders/VarLengthColumn.java @@ -35,10 +35,10 @@ public abstract class VarLengthColumn extends ColumnReade ColumnChunkMetaData columnChunkMetaData, boolean fixedLength, V v, SchemaElement schemaElement) throws ExecutionSetupException { super(parentReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); - if (columnChunkMetaData.getEncodings().contains(Encoding.PLAIN_DICTIONARY)) { + if (columnChunkMetaData.getEncodings().contains(Encoding.PLAIN_DICTIONARY) + || columnChunkMetaData.getEncodings().contains(Encoding.RLE_DICTIONARY)) { usingDictionary = true; - } - else { + } else { usingDictionary = false; } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/columnreaders/VarLengthValuesColumn.java b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/columnreaders/VarLengthValuesColumn.java index 3ccbbda6fe..7be5c16a05 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/columnreaders/VarLengthValuesColumn.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet/columnreaders/VarLengthValuesColumn.java @@ -40,10 +40,10 @@ public abstract class VarLengthValuesColumn extends VarLe SchemaElement schemaElement) throws ExecutionSetupException { super(parentReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); variableWidthVector = (VariableWidthVector) valueVec; - if (columnChunkMetaData.getEncodings().contains(Encoding.PLAIN_DICTIONARY)) { + if (columnChunkMetaData.getEncodings().contains(Encoding.PLAIN_DICTIONARY) + || columnChunkMetaData.getEncodings().contains(Encoding.RLE_DICTIONARY)) { usingDictionary = true; - } - else { + } else { usingDictionary = false; } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet2/LogicalListL1Converter.java b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet2/LogicalListL1Converter.java index 9396b1ef02..ae6e49e9cc 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet2/LogicalListL1Converter.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet2/LogicalListL1Converter.java @@ -125,6 +125,7 @@ public void end() { listWriter.endList(); } + @Override public boolean hasWritten() { return written; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet2/ParquetGroupConverter.java b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet2/ParquetGroupConverter.java index fe3d4ff63c..2771102aed 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet2/ParquetGroupConverter.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet2/ParquetGroupConverter.java @@ -367,6 +367,14 @@ protected PrimitiveConverter getConverterForType(String fieldName, PrimitiveType TimeStampMilliWriter writer = isRepeated ? list(name).timeStampMilli() : getWriterProvider().timeStamp(name); return new TimeStampConverter(writer); } + case TIMESTAMP_MICROS: { + TimeStampMilliWriter writer = isRepeated ? list(name).timeStampMilli() : getWriterProvider().timeStamp(name); + return new TimeStampMicroConverter(writer); + } + case TIME_MICROS: { + TimeMilliWriter writer = isRepeated ? list(name).timeMilli() : getWriterProvider().time(name); + return new TimeMicroConverter(writer); + } default: // fall back to primitive type } @@ -467,6 +475,7 @@ void endListWriters() { } } + @Override public boolean hasWritten() { return written; } @@ -497,6 +506,7 @@ protected void setWritten() { written = true; } + @Override public boolean hasWritten() { return written; } @@ -656,6 +666,27 @@ public void writeNullListElement() { } } + private static class TimeMicroConverter extends ParquetPrimitiveConverter { + private TimeMilliWriter writer; + private TimeMicroConverter(TimeMilliWriter writer) { + this.writer = writer; + } + @Override + public void addInt(int value) { + writer.writeTimeMilli(value / 1000); + setWritten(); + } + @Override + public void addLong(long value) { + writer.writeTimeMilli((int)(value / 1000)); + setWritten(); + } + @Override + public void writeNullListElement() { + ((UnionListWriter)writer).writeNull(); + } + } + private static class BigIntConverter extends ParquetPrimitiveConverter { private BigIntWriter writer; private BigIntHolder holder = new BigIntHolder(); @@ -695,7 +726,24 @@ public void writeNullListElement() { ((UnionListWriter)writer).writeNull(); } } - + private static class TimeStampMicroConverter extends ParquetPrimitiveConverter { + private TimeStampMilliWriter writer; + private TimeStampMicroConverter(TimeStampMilliWriter writer) { + this.writer = writer; + } + @Override + public void addLong(long value) { + //value is microseconds since epoch. Truncate the microseconds + //to fit it into a millisecond range. + //ie 1674475994560123 / 1000 = 1674475994560 + writer.writeTimeStampMilli(value / 1000); + setWritten(); + } + @Override + public void writeNullListElement() { + ((UnionListWriter)writer).writeNull(); + } + } private static class Decimal18Converter extends ParquetPrimitiveConverter { private DecimalWriter writer; private DecimalHolder holder = new DecimalHolder(); @@ -809,7 +857,8 @@ public void addBinary(Binary value) { if (value.length() > this.varValueSizeLimit) { throw createFieldSizeLimitException(value.length(), this.varValueSizeLimit); } - holder.buffer = buf = buf.reallocIfNeeded(value.length()); + buf = buf.reallocIfNeeded(value.length()); + holder.buffer = buf; buf.setBytes(0, value.toByteBuffer()); holder.start = 0; holder.end = value.length(); @@ -840,7 +889,8 @@ public void addBinary(Binary value) { if (value.length() > this.varValueSizeLimit) { throw createFieldSizeLimitException(value.length(), this.varValueSizeLimit); } - holder.buffer = buf = buf.reallocIfNeeded(value.length()); + buf = buf.reallocIfNeeded(value.length()); + holder.buffer = buf; buf.setBytes(0, value.toByteBuffer()); holder.start = 0; holder.end = value.length(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet2/ParquetRowiseReader.java b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet2/ParquetRowiseReader.java index 81449ed534..99da9f8309 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/parquet2/ParquetRowiseReader.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/parquet2/ParquetRowiseReader.java @@ -32,7 +32,7 @@ import org.apache.arrow.vector.types.pojo.ArrowType; import org.apache.arrow.vector.types.pojo.Field; import org.apache.arrow.vector.types.pojo.Schema; -import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections4.CollectionUtils; import org.apache.parquet.column.ColumnDescriptor; import org.apache.parquet.column.ColumnReader; import org.apache.parquet.compression.CompressionCodecFactory; @@ -274,7 +274,9 @@ public void setup(OutputMutator output) throws ExecutionSetupException { Path filePath = Path.of(path); BlockMetaData blockMetaData = footer.getBlocks().get(rowGroupIndex); - Preconditions.checkArgument(blockMetaData != null, "Parquet footer does not contain information about row group"); + Preconditions.checkArgument(blockMetaData != null, + "Parquet file '%s' footer does not have information about row group %s", + this.path, rowGroupIndex); recordCount = blockMetaData.getRowCount(); @@ -307,7 +309,8 @@ public void setup(OutputMutator output) throws ExecutionSetupException { recordReader = columnIO.getRecordReader(pageReadStore, recordMaterializer, new UnboundRecordFilter() { @Override public RecordFilter bind(Iterable readers) { - return vectorizedBasedFilter = new VectorizedBasedFilter(readers, deltas); + vectorizedBasedFilter = new VectorizedBasedFilter(readers, deltas); + return vectorizedBasedFilter; } }); } else { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/pojo/AbstractWriter.java b/sabot/kernel/src/main/java/com/dremio/exec/store/pojo/AbstractWriter.java index 35c10d41f6..e7deaa3bcc 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/pojo/AbstractWriter.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/pojo/AbstractWriter.java @@ -53,6 +53,7 @@ public void allocate() { vector.allocateNew(); } + @Override public void setValueCount(int valueCount){ vector.setValueCount(valueCount); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/schedule/EndpointByteMapImpl.java b/sabot/kernel/src/main/java/com/dremio/exec/store/schedule/EndpointByteMapImpl.java index 26c7f59fdf..16b4af55a3 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/schedule/EndpointByteMapImpl.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/schedule/EndpointByteMapImpl.java @@ -28,23 +28,28 @@ public class EndpointByteMapImpl implements EndpointByteMap{ private long maxBytes; + @Override public boolean isSet(HostAndPort endpoint){ return map.containsKey(endpoint); } + @Override public long get(HostAndPort endpoint){ return map.get(endpoint); } + @Override public boolean isEmpty(){ return map.isEmpty(); } + @Override public void add(HostAndPort endpoint, long bytes){ assert endpoint != null; maxBytes = Math.max(maxBytes, map.putOrAdd(endpoint, bytes, bytes)+1); } + @Override public long getMaxBytes() { return maxBytes; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/schedule/SimpleCompleteWork.java b/sabot/kernel/src/main/java/com/dremio/exec/store/schedule/SimpleCompleteWork.java index 40a0b33fd0..f20077af01 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/schedule/SimpleCompleteWork.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/schedule/SimpleCompleteWork.java @@ -39,6 +39,7 @@ public long getSize() { return size; } + @Override public List getAffinity() { return affinity; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/sys/SystemStoragePlugin.java b/sabot/kernel/src/main/java/com/dremio/exec/store/sys/SystemStoragePlugin.java index c20254657c..4dd81d611f 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/sys/SystemStoragePlugin.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/sys/SystemStoragePlugin.java @@ -54,6 +54,7 @@ import com.google.common.collect.Iterables; public class SystemStoragePlugin implements StoragePlugin, SupportsReadSignature, SupportsListingDatasets { + public static final String NAME = "sys"; private static final ImmutableMap DATASET_MAP = ImmutableMap.copyOf(Stream.of(SystemTable.values()) @@ -66,7 +67,7 @@ public class SystemStoragePlugin implements StoragePlugin, SupportsReadSignature private final JobResultInfoProvider jobResultInfoProvider; SystemStoragePlugin(SabotContext context, String name) { - Preconditions.checkArgument("sys".equals(name)); + Preconditions.checkArgument(NAME.equals(name)); this.context = context; this.jobResultInfoProvider = context.getJobResultInfoProvider(); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/sys/TimezoneNames.java b/sabot/kernel/src/main/java/com/dremio/exec/store/sys/TimezoneNames.java index 3211b03839..39b575a50a 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/sys/TimezoneNames.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/sys/TimezoneNames.java @@ -45,12 +45,12 @@ public static Iterator getIterator() { .getSeconds()); String standardOffsetId = standardOffset.getId(); - if (standardOffsetId.equals("Z")) { + if ("Z".equals(standardOffsetId)) { standardOffsetId = "+00:00"; } String dsOffsetId = dsOffset.getId(); - if (dsOffsetId.equals("Z")) { + if ("Z".equals(dsOffsetId)) { dsOffsetId = "+00:00"; } return new TimezoneRegion(zone, standardOffsetId, dsOffsetId, zoneRules.isDaylightSavings(now)); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/sys/accel/AccelerationManager.java b/sabot/kernel/src/main/java/com/dremio/exec/store/sys/accel/AccelerationManager.java index 8876c06838..27caab9d3b 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/sys/accel/AccelerationManager.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/sys/accel/AccelerationManager.java @@ -18,13 +18,14 @@ import java.util.List; import com.dremio.exec.ops.ReflectionContext; +import com.dremio.exec.planner.sql.SchemaUtilities; public interface AccelerationManager { void dropAcceleration(List path, boolean raiseErrorIfNotFound); - void addLayout(List path, LayoutDefinition definition, ReflectionContext reflectionContext); + void addLayout(SchemaUtilities.TableWithPath tableWithPath, LayoutDefinition definition, ReflectionContext reflectionContext); void addExternalReflection(String name, List table, List targetTable, ReflectionContext reflectionContext); - void dropLayout(List path, String layoutId, ReflectionContext reflectionContext); - void toggleAcceleration(List path, LayoutDefinition.Type type, boolean enable, ReflectionContext reflectionContext); + void dropLayout(SchemaUtilities.TableWithPath tableWithPath, String layoutId, ReflectionContext reflectionContext); + void toggleAcceleration(SchemaUtilities.TableWithPath tableWithPath, LayoutDefinition.Type type, boolean enable, ReflectionContext reflectionContext); void replanlayout(String layoutId); T unwrap(Class clazz); @@ -39,7 +40,7 @@ public void dropAcceleration(List path, boolean raiseErrorIfNotFound) { } @Override - public void addLayout(List path, LayoutDefinition definition, ReflectionContext reflectionContext) { + public void addLayout(SchemaUtilities.TableWithPath tableWithPath, LayoutDefinition definition, ReflectionContext reflectionContext) { throw new UnsupportedOperationException("AccelerationManager.addLayout() called on a non-coordinator node"); } @@ -49,12 +50,12 @@ public void addExternalReflection(String name, List table, List } @Override - public void dropLayout(List path, String layoutId, ReflectionContext reflectionContext) { + public void dropLayout(SchemaUtilities.TableWithPath tableWithPath, String layoutId, ReflectionContext reflectionContext) { throw new UnsupportedOperationException("AccelerationManager.dropLayout() called on a non-coordinator node"); } @Override - public void toggleAcceleration(List path, LayoutDefinition.Type type, boolean enable, ReflectionContext reflectionContext) { + public void toggleAcceleration(SchemaUtilities.TableWithPath tableWithPath, LayoutDefinition.Type type, boolean enable, ReflectionContext reflectionContext) { throw new UnsupportedOperationException("AccelerationManager.toggleAcceleration() called on a non-coordinator node"); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/sys/statistics/StatisticsService.java b/sabot/kernel/src/main/java/com/dremio/exec/store/sys/statistics/StatisticsService.java index 6e7e0b040f..6a17671a79 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/sys/statistics/StatisticsService.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/sys/statistics/StatisticsService.java @@ -50,10 +50,13 @@ interface Histogram { boolean isItemsSketchSet(); } + @Override String requestStatistics(List fields, NamespaceKey key, Double samplingRate); + @Override List deleteStatistics(List fields, NamespaceKey key); + @Override boolean deleteRowCountStatistics(NamespaceKey key); @Override diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/sys/udf/FunctionOperatorTable.java b/sabot/kernel/src/main/java/com/dremio/exec/store/sys/udf/FunctionOperatorTable.java index 511f7ed11e..15ad023467 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/sys/udf/FunctionOperatorTable.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/sys/udf/FunctionOperatorTable.java @@ -17,6 +17,7 @@ import java.util.List; import java.util.Objects; +import java.util.stream.Collectors; import org.apache.calcite.schema.FunctionParameter; import org.apache.calcite.sql.SqlFunctionCategory; @@ -34,8 +35,10 @@ public class FunctionOperatorTable implements SqlOperatorTable { public FunctionOperatorTable(String udfName, List functionParameters) { - this.functionParameterList = - UserDefinedFunctionArgumentOperator.createArgumentOperator(udfName, functionParameters); + this.functionParameterList = functionParameters + .stream() + .map(parameter -> UserDefinedFunctionArgumentOperator.createArgumentOperator(udfName, parameter)) + .collect(Collectors.toList()); } @Override public void lookupOperatorOverloads( diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/sys/udf/UserDefinedFunction.java b/sabot/kernel/src/main/java/com/dremio/exec/store/sys/udf/UserDefinedFunction.java index 0961041ae2..c264324e5e 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/sys/udf/UserDefinedFunction.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/sys/udf/UserDefinedFunction.java @@ -23,32 +23,26 @@ import com.dremio.common.expression.CompleteType; -public class UserDefinedFunction { - +public final class UserDefinedFunction { private final String name; private final String functionSql; private final CompleteType returnType; private final List functionArgList; private final List fullPath; + private final byte[] serializedFunctionPlan; @Nullable private Timestamp createdAt; @Nullable private Timestamp modifiedAt; - public UserDefinedFunction(String name, String functionSql, CompleteType returnType, List functionArgList, List fullPath) { - this.name = name; - this.functionSql = functionSql; - this.returnType = returnType; - this.functionArgList = functionArgList; - this.fullPath = fullPath; - } - - public UserDefinedFunction(String name, + public UserDefinedFunction( + String name, String functionSql, CompleteType returnType, List functionArgList, List fullPath, + byte[] serializedFunctionPlan, Timestamp createdAt, Timestamp modifiedAt) { this.name = name; @@ -56,6 +50,7 @@ public UserDefinedFunction(String name, this.returnType = returnType; this.functionArgList = functionArgList; this.fullPath = fullPath; + this.serializedFunctionPlan = serializedFunctionPlan; this.createdAt = createdAt; this.modifiedAt = modifiedAt; } @@ -90,6 +85,10 @@ public List getFullPath() { return fullPath; } + public byte[] getSerializedFunctionPlan() { + return serializedFunctionPlan; + } + @Override public boolean equals(Object o) { if (this == o) { @@ -111,10 +110,12 @@ public int hashCode() { public static class FunctionArg { private final CompleteType dataType; private final String name; + private final String defaultExpression; - public FunctionArg(String name, CompleteType dataType) { + public FunctionArg(String name, CompleteType dataType, String defaultExpression) { this.dataType = dataType; this.name = name; + this.defaultExpression = defaultExpression; } public String getName() { @@ -125,6 +126,10 @@ public CompleteType getDataType() { return dataType; } + public String getDefaultExpression() { + return defaultExpression; + } + @Override public boolean equals(Object o) { if (this == o) { @@ -134,17 +139,19 @@ public boolean equals(Object o) { return false; } FunctionArg that = (FunctionArg) o; - return this.dataType.equals(that.dataType) && name.equals(that.name); + return this.dataType.equals(that.dataType) + && name.equals(that.name) + && defaultExpression.equals(that.defaultExpression); } @Override public String toString() { - return String.format("{\"name\": \"%s\", \"type\": \"%s\"}", name, dataType.toString()); + return String.format("{\"name\": \"%s\", \"type\": \"%s\", \"default_expr\": \"%s\"}", name, dataType.toString(), defaultExpression); } @Override public int hashCode() { - return Objects.hash(dataType, name); + return Objects.hash(dataType, name, defaultExpression); } } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/sys/udf/UserDefinedFunctionSerde.java b/sabot/kernel/src/main/java/com/dremio/exec/store/sys/udf/UserDefinedFunctionSerde.java index 8db5ab4571..06fc03e4fc 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/sys/udf/UserDefinedFunctionSerde.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/sys/udf/UserDefinedFunctionSerde.java @@ -29,23 +29,31 @@ import io.protostuff.ByteString; -public class UserDefinedFunctionSerde { +public final class UserDefinedFunctionSerde { + private UserDefinedFunctionSerde() {} public static UserDefinedFunction fromProto(FunctionConfig functionConfig) { FunctionDefinition functionDefinition = functionConfig.getFunctionDefinitionsList().get(0); List functionArgList = functionDefinition.getFunctionArgList() == null ? ImmutableList.of() - : functionDefinition.getFunctionArgList().stream() + : functionDefinition + .getFunctionArgList() + .stream() .map(functionArg -> - new UserDefinedFunction.FunctionArg(functionArg.getName(), - CompleteType.deserialize(functionArg.getRawDataType().toByteArray()))) + new UserDefinedFunction.FunctionArg( + functionArg.getName(), + CompleteType.deserialize(functionArg.getRawDataType().toByteArray()), + functionArg.getDefaultExpression())) .collect(ImmutableList.toImmutableList()); return new UserDefinedFunction( functionConfig.getName(), functionDefinition.getFunctionBody().getRawBody(), CompleteType.deserialize(functionConfig.getReturnType().getRawDataType().toByteArray()), - functionArgList, functionConfig.getFullPathList(), (functionConfig.getCreatedAt() != null) ? new Timestamp(functionConfig.getCreatedAt()) : null, + functionArgList, + functionConfig.getFullPathList(), + functionDefinition.getFunctionBody().getSerializedPlan().toByteArray(), + (functionConfig.getCreatedAt() != null) ? new Timestamp(functionConfig.getCreatedAt()) : null, (functionConfig.getLastModified() != null) ? new Timestamp(functionConfig.getLastModified()):null); } @@ -53,14 +61,19 @@ public static FunctionConfig toProto(UserDefinedFunction userDefinedFunction) { return new FunctionConfig() .setName(userDefinedFunction.getName()) .setFunctionDefinitionsList(ImmutableList.of( - new FunctionDefinition().setFunctionBody(new FunctionBody().setRawBody(userDefinedFunction.getFunctionSql())) + new FunctionDefinition().setFunctionBody( + new FunctionBody() + .setRawBody(userDefinedFunction.getFunctionSql()) + .setSerializedPlan(ByteString.copyFrom(userDefinedFunction.getSerializedFunctionPlan()))) .setFunctionArgList(userDefinedFunction.getFunctionArgsList().stream() .map(functionArg -> new FunctionArg() .setName(functionArg.getName()) - .setRawDataType(ByteString.copyFrom(functionArg.getDataType().serialize()))) + .setRawDataType(ByteString.copyFrom(functionArg.getDataType().serialize())) + .setDefaultExpression(functionArg.getDefaultExpression())) .collect(Collectors.toList())) )) .setFullPathList(userDefinedFunction.getFullPath()) .setReturnType(new ReturnType().setRawDataType(ByteString.copyFrom(userDefinedFunction.getReturnType().serialize()))); - }} + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/sys/udf/UserDefinedFunctionServiceImpl.java b/sabot/kernel/src/main/java/com/dremio/exec/store/sys/udf/UserDefinedFunctionServiceImpl.java index 1fb3b844bc..e4cbdffd2a 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/sys/udf/UserDefinedFunctionServiceImpl.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/sys/udf/UserDefinedFunctionServiceImpl.java @@ -58,8 +58,7 @@ public UserDefinedFunctionServiceImpl( Provider allocatorProvider, Provider dremioConfigProvider, boolean isMaster, - boolean isCoordinator - ) { + boolean isCoordinator) { this.namespaceServiceProvider = Preconditions.checkNotNull(namespaceServiceProvider, "NamespaceService service required"); this.allocatorProvider = Preconditions.checkNotNull(allocatorProvider, "buffer allocator required"); this.fabric = Preconditions.checkNotNull(fabric, "fabric service required"); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/store/text/TextRecordWriter.java b/sabot/kernel/src/main/java/com/dremio/exec/store/text/TextRecordWriter.java index fce0c39401..5bc3dc8cb6 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/store/text/TextRecordWriter.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/store/text/TextRecordWriter.java @@ -71,7 +71,7 @@ public TextRecordWriter(OperatorContext context, EasyWriter config, TextFormatCo this.context = context; this.location = config.getLocation(); this.prefix = String.format("%d_%d", handle.getMajorFragmentId(), handle.getMinorFragmentId()); - this.fieldDelimiter = textConfig.getFieldDelimiterAsString(); + this.fieldDelimiter = textConfig.getFieldDelimiter(); this.lineDelimiter = textConfig.getLineDelimiter(); this.extension = textConfig.outputExtension; this.currentRecord = new StringBuilder(); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/tablefunctions/MetadataFunctionsMacro.java b/sabot/kernel/src/main/java/com/dremio/exec/tablefunctions/MetadataFunctionsMacro.java index 867a9f9f3e..168235192e 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/tablefunctions/MetadataFunctionsMacro.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/tablefunctions/MetadataFunctionsMacro.java @@ -29,6 +29,7 @@ * select * from table(table_manifests('iceberg_table')) * select * from table(table_snapshot('iceberg_table')) * select * from table(table_files('iceberg_table')) + * select * from table(table_partitions('iceberg_table')) * will be translated into a call to this table macro, with the parsed TableVersionContext passed as a * parameter to apply(). The parsed table-id will be provided as a string in the 1st parameter to the macro. */ @@ -38,7 +39,8 @@ public enum MacroName { TABLE_HISTORY("table_history"), TABLE_MANIFESTS("table_manifests"), TABLE_SNAPSHOT("table_snapshot"), - TABLE_FILES("table_files"); + TABLE_FILES("table_files"), + TABLE_PARTITIONS("table_partitions"); private final String name; MacroName(String name) { this.name = name; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/util/BatchSchemaField.java b/sabot/kernel/src/main/java/com/dremio/exec/util/BatchSchemaField.java index a3d1bff3c0..13e418ae1f 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/util/BatchSchemaField.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/util/BatchSchemaField.java @@ -34,6 +34,7 @@ public BatchSchemaField(String name, boolean nullable, ArrowType type, List dictionaryEncodedColumns = createGlobalDictionaries(codecFactory, fs, tableDir, bufferAllocator).getColumnsToDictionaryFiles(); - long version = getDictionaryVersion(fs, tableDir); - Path dictionaryRootDir = getDictionaryVersionedRootPath(fs, tableDir, version); - for (ColumnDescriptor columnDescriptor: dictionaryEncodedColumns.keySet()) { - final VectorContainer data = readDictionary(fs, dictionaryRootDir, columnDescriptor, bufferAllocator); - System.out.println("Dictionary for column [" + columnDescriptor.toString() + " size " + data.getRecordCount()); - BatchPrinter.printBatch(data, true, false); - data.clear(); + try { + final FileSystem fs = HadoopFileSystem.get(tableDir, conf); + if (fs.exists(tableDir) && fs.isDirectory(tableDir)) { + Map dictionaryEncodedColumns = createGlobalDictionaries(codecFactory, fs, tableDir, bufferAllocator).getColumnsToDictionaryFiles(); + long version = getDictionaryVersion(fs, tableDir); + Path dictionaryRootDir = getDictionaryVersionedRootPath(fs, tableDir, version); + for (ColumnDescriptor columnDescriptor : dictionaryEncodedColumns.keySet()) { + final VectorContainer data = readDictionary(fs, dictionaryRootDir, columnDescriptor, bufferAllocator); + System.out.println("Dictionary for column [" + columnDescriptor.toString() + " size " + data.getRecordCount()); + BatchPrinter.printBatch(data, true, false); + data.clear(); + } } + } finally { + codecFactory.release(); } } catch (IOException ioe) { logger.error("Failed ", ioe); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/util/IntDecimalComparator.java b/sabot/kernel/src/main/java/com/dremio/exec/util/IntDecimalComparator.java index 0943796d35..9c4a42998c 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/util/IntDecimalComparator.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/util/IntDecimalComparator.java @@ -19,12 +19,14 @@ public class IntDecimalComparator extends DecimalMixedEndianComparatorImpl { + @Override public int compare(ArrowBuf left, int startIndexLeft, int valueLength, ArrowBuf right, int startIndexRight) { setCommonValues(left, startIndexLeft, right, startIndexRight); return compareInner(left, startIndexLeft, valueLength, right, startIndexRight); } + @Override protected int compareInner(ArrowBuf left, int startIndexLeft, int valueLength, ArrowBuf right, int startIndexRight) { long leftValLow = left.getInt(startIndexLeft); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/util/LocalDictionariesReader.java b/sabot/kernel/src/main/java/com/dremio/exec/util/LocalDictionariesReader.java index 8565141833..efeda4166f 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/util/LocalDictionariesReader.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/util/LocalDictionariesReader.java @@ -128,11 +128,15 @@ public static void main(String[] args) { final FileSystem fs = HadoopFileSystem.getLocal(fsConf); final Path filePath = Path.of(args[0]); final CompressionCodecFactory codecFactory = CodecFactory.createDirectCodecFactory(fsConf, new ParquetDirectByteBufferAllocator(bufferAllocator), 0); - final Pair, Set> dictionaries = readDictionaries(fs, filePath, codecFactory); - for (Map.Entry entry : dictionaries.getLeft().entrySet()) { - printDictionary(entry.getKey(), entry.getValue()); + try { + final Pair, Set> dictionaries = readDictionaries(fs, filePath, codecFactory); + for (Map.Entry entry : dictionaries.getLeft().entrySet()) { + printDictionary(entry.getKey(), entry.getValue()); + } + System.out.println("Binary columns which are not dictionary encoded: " + dictionaries.getRight()); + } finally { + codecFactory.release(); } - System.out.println("Binary columns which are not dictionary encoded: " + dictionaries.getRight()); } catch (IOException ioe) { logger.error("Failed ", ioe); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/util/LongDecimalComparator.java b/sabot/kernel/src/main/java/com/dremio/exec/util/LongDecimalComparator.java index 900424d931..89a4315497 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/util/LongDecimalComparator.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/util/LongDecimalComparator.java @@ -19,12 +19,14 @@ public class LongDecimalComparator extends DecimalMixedEndianComparatorImpl { + @Override public int compare(ArrowBuf left, int startIndexLeft, int valueLength, ArrowBuf right, int startIndexRight) { setCommonValues(left, startIndexLeft, right, startIndexRight); return compareInner(left, startIndexLeft, valueLength, right, startIndexRight); } + @Override protected int compareInner(ArrowBuf left, int startIndexLeft, int valueLength, ArrowBuf right, int startIndexRight) { long leftValLow = left.getLong(startIndexLeft); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/util/TestUtilities.java b/sabot/kernel/src/main/java/com/dremio/exec/util/TestUtilities.java index 8984f77236..02b2d2715b 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/util/TestUtilities.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/util/TestUtilities.java @@ -183,7 +183,7 @@ public static void addDefaultTestPlugins(CatalogService catalog, final String tm private static void addIcebergHadoopTables(CatalogService catalog, final String tmpDirPath) { CatalogServiceImpl catalogImpl = (CatalogServiceImpl) catalog; - // add dfs. + // add dfs_hadoop. { SourceConfig c = new SourceConfig(); InternalFileConf conf = new InternalFileConf(); @@ -195,6 +195,19 @@ private static void addIcebergHadoopTables(CatalogService catalog, final String c.setMetadataPolicy(CatalogService.NEVER_REFRESH_POLICY_WITH_AUTO_PROMOTE); catalogImpl.getSystemUserCatalog().createSource(c); } + //dfs_hadoop_mutable + { + SourceConfig c = new SourceConfig(); + InternalFileConf conf = new InternalFileConf(); + conf.connection = "file:///"; + conf.path = "/tmp"; + conf.mutability = SchemaMutability.ALL; + conf.defaultCtasFormat = DefaultCtasFormatSelection.ICEBERG; + c.setConnectionConf(conf); + c.setName("dfs_hadoop_mutable"); + c.setMetadataPolicy(CatalogService.NEVER_REFRESH_POLICY_WITH_AUTO_PROMOTE); + catalogImpl.getSystemUserCatalog().createSource(c); + } // add dfs_test { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/util/Utilities.java b/sabot/kernel/src/main/java/com/dremio/exec/util/Utilities.java index e736f907c6..4784a10933 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/util/Utilities.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/util/Utilities.java @@ -115,6 +115,10 @@ public static UserBitShared.WorkloadType getByClientType(UserBitShared.RpcEndpoi } final String name = clientInfos.getName().toLowerCase(Locale.ROOT); + if (name.contains("dremio-to-dremio")) { + return WorkloadType.D2D; + } + if (name.contains("jdbc") || name.contains("java")) { return WorkloadType.JDBC; } @@ -138,6 +142,8 @@ public static String getHumanReadableWorkloadType(WorkloadType workloadType) { return "Internal Run"; case INTERNAL_PREVIEW: return "Internal Preview"; + case D2D: + return "D2D"; case JDBC: return "JDBC"; case ODBC: diff --git a/sabot/kernel/src/main/java/com/dremio/exec/util/VectorUtil.java b/sabot/kernel/src/main/java/com/dremio/exec/util/VectorUtil.java index 78b5727eb8..249fa7ebdb 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/util/VectorUtil.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/util/VectorUtil.java @@ -65,8 +65,7 @@ public static void showVectorAccessibleContent(VectorAccessible va, final String //null value String value = "null"; System.out.printf("%s%s", value, lastColumn ? "\n" : delimiter); - } - else if (o instanceof byte[]) { + } else if (o instanceof byte[]) { String value = new String((byte[]) o); System.out.printf("%s%s", value, lastColumn ? "\n" : delimiter); } else { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/vector/complex/fn/JsonReader.java b/sabot/kernel/src/main/java/com/dremio/exec/vector/complex/fn/JsonReader.java index 27ca5150cd..4aaa66b9f6 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/vector/complex/fn/JsonReader.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/vector/complex/fn/JsonReader.java @@ -101,12 +101,15 @@ public class JsonReader extends BaseJsonProcessor { private Map> structWriterToFieldMap = new HashMap<>(); - public JsonReader(ArrowBuf managedBuf, int maxFieldSize, int maxLeafLimit, boolean allTextMode, boolean skipOuterList, boolean readNumbersAsDouble) { - this(managedBuf, GroupScan.ALL_COLUMNS, maxFieldSize, maxLeafLimit, allTextMode, skipOuterList, readNumbersAsDouble, false, null, null, null); + public JsonReader(ArrowBuf managedBuf, int maxFieldSize, int maxLeafLimit, boolean allTextMode, boolean skipOuterList, boolean readNumbersAsDouble, + boolean enforceValidJsonDateFormat) { + this(managedBuf, GroupScan.ALL_COLUMNS, maxFieldSize, maxLeafLimit, allTextMode, skipOuterList, readNumbersAsDouble, false, null, null, null, + enforceValidJsonDateFormat); } public JsonReader(ArrowBuf managedBuf, List columns, int maxFieldSize, int maxLeafLimit, boolean allTextMode, - boolean skipOuterList, boolean readNumbersAsDouble, boolean schemaImposedMode, ExtendedFormatOptions extendedFormatOptions, OperatorContext context, BatchSchema targetSchema) { + boolean skipOuterList, boolean readNumbersAsDouble, boolean schemaImposedMode, ExtendedFormatOptions extendedFormatOptions, OperatorContext context, BatchSchema targetSchema, + boolean enforceValidJsonDateFormat) { assert Preconditions.checkNotNull(columns).size() > 0 : "JSON record reader requires at least one column"; this.targetSchema = targetSchema; this.selection = FieldSelection.getFieldSelection(columns); @@ -114,8 +117,8 @@ public JsonReader(ArrowBuf managedBuf, List columns, int maxFieldSiz this.skipOuterList = skipOuterList; this.allTextMode = allTextMode; this.columns = columns; - this.mapOutput = new MapVectorOutput(workingBuffer); - this.listOutput = new ListVectorOutput(workingBuffer); + this.mapOutput = new MapVectorOutput(workingBuffer, enforceValidJsonDateFormat); + this.listOutput = new ListVectorOutput(workingBuffer, enforceValidJsonDateFormat); this.currentFieldName=""; this.readNumbersAsDouble = readNumbersAsDouble; this.maxFieldSize = maxFieldSize; @@ -235,6 +238,7 @@ public void setSource(String data) throws IOException { setSource(data.getBytes(UTF_8)); } + @Override public void setSource(byte[] bytes) throws IOException { setSource(new SeekableBAIS(bytes)); } @@ -345,6 +349,7 @@ private ReadState writeToVector(ComplexWriter writer, JsonToken t) throws IOExce addNullValueForStruct(writer); break; } + // fall through default: throw getExceptionWithContext( diff --git a/sabot/kernel/src/main/java/com/dremio/exec/vector/complex/fn/TransformationException.java b/sabot/kernel/src/main/java/com/dremio/exec/vector/complex/fn/TransformationException.java index 9de8bf7d1d..075c6edf21 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/vector/complex/fn/TransformationException.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/vector/complex/fn/TransformationException.java @@ -22,17 +22,13 @@ */ public class TransformationException extends IOException { - int lineNumber; + private final int lineNumber; public TransformationException(String message, int lineNumber) { super(message); this.lineNumber = lineNumber; } - public TransformationException(String message) { - super(message); - } - public int getLineNumber() { return lineNumber; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/vector/complex/fn/VectorOutput.java b/sabot/kernel/src/main/java/com/dremio/exec/vector/complex/fn/VectorOutput.java index e77a0cee92..0deabc7d9b 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/vector/complex/fn/VectorOutput.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/vector/complex/fn/VectorOutput.java @@ -30,6 +30,7 @@ import org.apache.arrow.vector.holders.TimeStampMilliHolder; import org.apache.arrow.vector.holders.VarBinaryHolder; import org.apache.arrow.vector.holders.VarCharHolder; +import org.apache.commons.lang3.StringUtils; import org.joda.time.DateTime; import org.joda.time.LocalDateTime; import org.joda.time.format.DateTimeFormatter; @@ -56,9 +57,11 @@ abstract class VectorOutput { protected final WorkingBuffer work; protected JsonParser parser; + private final boolean enforceValidJsonDateFormat; - public VectorOutput(WorkingBuffer work){ + public VectorOutput(WorkingBuffer work, boolean enforceValidJsonDateFormat){ this.work = work; + this.enforceValidJsonDateFormat = enforceValidJsonDateFormat; } public void setParser(JsonParser parser){ @@ -96,7 +99,12 @@ protected boolean innerRun() throws IOException{ checkNextToken(JsonToken.END_OBJECT); return true; case ExtendedTypeName.TIMESTAMP: - writeTimestamp(checkNextToken(JsonToken.VALUE_STRING, JsonToken.VALUE_NUMBER_INT)); + parser.nextToken(); + if (enforceValidJsonDateFormat) { + parseExtendedTimestamp(parser); + } else { + writeTimestamp(checkCurrentToken(JsonToken.VALUE_STRING, JsonToken.VALUE_NUMBER_INT)); + } checkNextToken(JsonToken.END_OBJECT); return true; case ExtendedTypeName.INTERVAL: @@ -112,6 +120,34 @@ protected boolean innerRun() throws IOException{ return false; } + private void parseExtendedTimestamp(JsonParser parser) throws IOException { + // We may have $date:{$numberLong: xxx}, in which case we still expect + // VALUE_STRING token for the xxx, but first we need to skip over the + // $numberLong token. + if (parser.getCurrentToken() == JsonToken.START_OBJECT) { + parser.nextToken(); + String parserValue = parser.getValueAsString(); + if (!ExtendedTypeName.INTEGER.equals(parserValue)) { + throw UserException.validationError() + .message(String.format("Invalid token %s", parserValue)) + .build(LOG); + } + writeTimestamp(checkNextToken(JsonToken.VALUE_STRING)); + // If we had a nested token, there is an extra end object (that of the outer token) that we need to skip over. + checkNextToken(JsonToken.END_OBJECT); + } else if (StringUtils.isNumeric(parser.getValueAsString())) { + // The fact that we have entered this method means ENFORCE_VALID_JSON_DATE_FORMAT_ENABLED flag is enabled. + // In this case, we don't support the invalid format of a numerical value for $date which we supported before. + throw UserException.unsupportedError() + .message("Invalid format for " + ExtendedTypeName.TIMESTAMP + ". Must be wrapped by $numberLong or " + + "follow any other valid ISO-8601 format.") + .build(LOG); + } else { + // Handle non-nested format + writeTimestamp(checkCurrentToken(JsonToken.VALUE_STRING)); + } + } + public boolean checkNextToken(final JsonToken expected) throws IOException{ return checkNextToken(expected, expected); } @@ -170,9 +206,11 @@ public boolean checkToken(final JsonToken t, final JsonToken expected1, final Js static class ListVectorOutput extends VectorOutput{ private ListWriter writer; + private boolean enforceValidJsonDateFormat; - public ListVectorOutput(WorkingBuffer work) { - super(work); + public ListVectorOutput(WorkingBuffer work, boolean enforceValidJsonDateFormat) { + super(work, enforceValidJsonDateFormat); + this.enforceValidJsonDateFormat = enforceValidJsonDateFormat; } public boolean run(ListWriter writer) throws IOException{ @@ -223,6 +261,12 @@ public void writeTimestamp(boolean isNull) throws IOException { if(!isNull){ switch (parser.getCurrentToken()) { case VALUE_NUMBER_INT: + if (enforceValidJsonDateFormat) { + throw UserException.unsupportedError() + .message("Invalid format for " + ExtendedTypeName.TIMESTAMP + ". Must be wrapped by $numberLong or " + + "follow any other valid ISO-8601 format.") + .build(LOG); + } LocalDateTime dt = new LocalDateTime(parser.getLongValue(), org.joda.time.DateTimeZone.UTC); ts.writeTimeStampMilli(com.dremio.common.util.DateTimes.toMillis(dt)); break; @@ -252,9 +296,11 @@ static class MapVectorOutput extends VectorOutput { private StructWriter writer; private String fieldName; + boolean enforceValidJsonDateFormat; - public MapVectorOutput(WorkingBuffer work) { - super(work); + public MapVectorOutput(WorkingBuffer work, boolean enforceValidJsonDateFormat) { + super(work, enforceValidJsonDateFormat); + this.enforceValidJsonDateFormat = enforceValidJsonDateFormat; } public boolean run(StructWriter writer, String fieldName) throws IOException{ @@ -307,6 +353,12 @@ public void writeTimestamp(boolean isNull) throws IOException { if(!isNull){ switch (parser.getCurrentToken()) { case VALUE_NUMBER_INT: + if (enforceValidJsonDateFormat) { + throw UserException.unsupportedError() + .message("Invalid format for " + ExtendedTypeName.TIMESTAMP + ". Must be wrapped by $numberLong or " + + "follow any other valid ISO-8601 format.") + .build(LOG); + } LocalDateTime dt = new LocalDateTime(parser.getLongValue(), org.joda.time.DateTimeZone.UTC); ts.writeTimeStampMilli(com.dremio.common.util.DateTimes.toMillis(dt)); break; @@ -323,8 +375,8 @@ public void writeTimestamp(boolean isNull) throws IOException { } } timestampLiteral = timestampLiteral.replace(' ', 'T'); // Support formats like 2020-039 09 that have a space between date and time - DateTime dateTime = new DateTime(timestampLiteral); - ts.writeTimeStampMilli(dateTime.getMillis()); + long millis = StringUtils.isNumeric(timestampLiteral) ? Long.parseLong(timestampLiteral) : new DateTime(timestampLiteral).getMillis(); + ts.writeTimeStampMilli(millis); break; default: throw UserException.unsupportedError() diff --git a/sabot/kernel/src/main/java/com/dremio/exec/work/foreman/AttemptManager.java b/sabot/kernel/src/main/java/com/dremio/exec/work/foreman/AttemptManager.java index 7ef4b34065..886b2650e8 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/work/foreman/AttemptManager.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/work/foreman/AttemptManager.java @@ -30,6 +30,7 @@ import com.dremio.common.utils.protos.QueryIdHelper; import com.dremio.common.utils.protos.QueryWritableBatch; import com.dremio.exec.ExecConstants; +import com.dremio.exec.maestro.MaestroObserver; import com.dremio.exec.maestro.MaestroService; import com.dremio.exec.ops.QueryContext; import com.dremio.exec.planner.observer.AttemptObserver; @@ -97,7 +98,7 @@ * messages are sent to running fragments to terminate * - when all fragments complete, state change messages drive the state to COMPLETED */ -public class AttemptManager implements Runnable { +public class AttemptManager implements Runnable, MaestroObserver.ExecutionStageChangeListener { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(AttemptManager.class); private static final ControlsInjector injector = ControlsInjectorFactory.getInjector(AttemptManager.class); @@ -147,6 +148,12 @@ public class AttemptManager implements Runnable { @VisibleForTesting public static final String INJECTOR_DURING_PLANNING_PAUSE = "during-planning-pause"; + @VisibleForTesting + public static final String INJECTOR_COMMIT_FAILURE = "commit-failure"; + + @VisibleForTesting + public static final String INJECTOR_CLEANING_FAILURE = "cleaning-failure"; + private final AttemptId attemptId; private final QueryId queryId; private RuleBasedEngineSelector ruleBasedEngineSelector; @@ -163,13 +170,22 @@ public class AttemptManager implements Runnable { private final AttemptResult foremanResult = new AttemptResult(); private Object extraResultData; private final AttemptProfileTracker profileTracker; - private final AttemptObserver observer; + private final AttemptObserver profileObserver; private final Pointer prepareId; private final CommandPool commandPool; + // Since there are two threads (REST api thread and foreman) vying, make this volatile. It is possible that + // the move to currentExecutionStage can race with incoming cancel REST API request. Due to this whenever execution + // stage is moved to the next stage by foreman, the cancel flag is checked again and cancel exception thrown. + // See clientCancelled flag above. + private volatile AttemptEvent.State currentExecutionStage; private CommandRunner command; private Optional committer = Optional.empty(); private Optional queryCleaner = Optional.empty(); + // set to true, if the query failed due to an engine timeout + private boolean timedoutWaitingForEngine = false; + private boolean runTimeExceeded = false; + /** * if set to true, query is not going to be scheduled on a separate thread */ @@ -185,7 +201,7 @@ public AttemptManager( final SabotContext sabotContext, final AttemptId attemptId, final UserRequest queryRequest, - final AttemptObserver observer, + final AttemptObserver attemptObserver, final OptionProvider options, final Cache preparedPlans, final QueryContext queryContext, @@ -206,6 +222,7 @@ public AttemptManager( this.commandPool = commandPool; this.maestroService = maestroService; this.runInSameThread = runInSameThread; + this.currentExecutionStage = AttemptEvent.State.INVALID_STATE; prepareId = new Pointer<>(); final OptionManager optionManager = this.queryContext.getOptions(); @@ -215,8 +232,8 @@ public AttemptManager( profileTracker = new AttemptProfileTracker(queryId, queryContext, queryRequest.getDescription(), () -> state, - observer, jobTelemetryClient); - this.observer = profileTracker.getObserver(); + attemptObserver, jobTelemetryClient); + this.profileObserver = profileTracker.getObserver(); RUN_15M.increment(); RUN_1D.increment(); @@ -225,6 +242,18 @@ public AttemptManager( injector.injectUnchecked(queryContext.getExecutionControls(), INJECTOR_CONSTRUCTOR_ERROR); } + @Override + public void moveToNextStage(AttemptEvent.State nextStage) { + this.currentExecutionStage = nextStage; + if (clientCancelled && !isTerminalStage(nextStage)) { + // double check here if client has cancelled before we moved to next stage. + // this will remove the window where the system is just entering a stage and therefor + // fails to get interrupted from its wait states as it is just entering it. + // for now, do this only when client has issued a cancel. + throw new UserCancellationException(profileTracker.getCancelReason()); + } + } + private class CompletionListenerImpl implements CompletionListener { @Override @@ -255,9 +284,9 @@ public void dataFromScreenArrived(QueryData header, ResponseSender sender, ByteB Arrays.stream(data).filter(d -> d != null).count() > 0) { // we're going to send this some place, we need increment to ensure this is around long enough to send. Arrays.stream(data).filter(d -> d != null).forEach(d->d.retain()); - observer.execDataArrived(new ScreenShuttle(sender), new QueryWritableBatch(header, data)); + profileObserver.execDataArrived(new ScreenShuttle(sender), new QueryWritableBatch(header, data)); } else { - observer.execDataArrived(new ScreenShuttle(sender), new QueryWritableBatch(header)); + profileObserver.execDataArrived(new ScreenShuttle(sender), new QueryWritableBatch(header)); } } @@ -341,18 +370,29 @@ public ListenableFuture sendPlanningProfile() { * @param clientCancelled true if the client application explicitly issued a cancellation (via end user action), or * false otherwise (i.e. when pushing the cancellation notification to the end user) */ - public void cancel(String reason, boolean clientCancelled, String cancelContext, boolean isCancelledByHeapMonitor) { + public void cancel(String reason, boolean clientCancelled, String cancelContext, boolean isCancelledByHeapMonitor, boolean runTimeExceeded) { // Note this can be called from outside of run() on another thread, or after run() completes - this.clientCancelled = clientCancelled; profileTracker.setCancelReason(reason); + this.clientCancelled = clientCancelled; + this.runTimeExceeded = runTimeExceeded; // Set the cancelFlag, so that query in planning phase will be canceled // by super.checkCancel() in DremioVolcanoPlanner and DremioHepPlanner queryContext.getPlannerSettings().cancelPlanning(reason, queryContext.getCurrentEndpoint(), cancelContext, isCancelledByHeapMonitor); + // interrupt execution immediately if query is blocked in any of the stages where it can get blocked. + // For instance, in ENGINE START stage, query could be blocked for engine to start or in QUEUED stage, + // the query could be blocked on a slot to become available when number of concurrent queries exceeds number of + // available slots (max concurrency). + maestroService.interruptExecutionInWaitStates(queryId, currentExecutionStage); // Do not cancel queries in running state when canceled by coordinator heap monitor if (!isCancelledByHeapMonitor) { + // Put the cancel in the event queue: + // Note: Since the event processor only processes events after the attempt manager has completed all coordinator + // stages (including maestro's executeQuery), it is assumed that the interruptions done above will make the + // meastro end the executeQuery prematurely so that the state machine gets started and all pending events + // including this cancel gets processed. addToEventQueue(QueryState.CANCELED, null); } } @@ -387,15 +427,15 @@ public void run() { final Thread currentThread = Thread.currentThread(); final String originalName = currentThread.getName(); currentThread.setName(queryIdString + ":foreman"); - + final MaestroObserverWrapper maestroObserver = new MaestroObserverWrapper(this.profileObserver, this); try { injector.injectChecked(queryContext.getExecutionControls(), INJECTOR_TRY_BEGINNING_ERROR, ForemanException.class); - observer.beginState(AttemptObserver.toEvent(AttemptEvent.State.PENDING)); + maestroObserver.beginState(AttemptObserver.toEvent(AttemptEvent.State.PENDING)); - observer.queryStarted(queryRequest, queryContext.getSession().getCredentials().getUserName()); + profileObserver.queryStarted(queryRequest, queryContext.getSession().getCredentials().getUserName()); String ruleSetEngine = ruleBasedEngineSelector.resolveAndUpdateEngine(queryContext); ResourceSchedulingProperties resourceSchedulingProperties = new ResourceSchedulingProperties(); @@ -413,7 +453,7 @@ public void run() { attemptId.toString() + ":foreman-planning", "foreman-planning", (waitInMillis) -> { - observer.commandPoolWait(waitInMillis); + profileObserver.commandPoolWait(waitInMillis); injector.injectPause(queryContext.getExecutionControls(), INJECTOR_PENDING_PAUSE, logger); injector.injectChecked(queryContext.getExecutionControls(), INJECTOR_PENDING_ERROR, @@ -426,7 +466,6 @@ public void run() { return null; }, runInSameThread).get(); - if (command.getCommandType() == CommandType.ASYNC_QUERY) { AsyncCommand asyncCommand = (AsyncCommand) command; committer = asyncCommand.getPhysicalPlan().getCommitter(); @@ -434,20 +473,21 @@ public void run() { moveToState(QueryState.STARTING, null); maestroService.executeQuery(queryId, queryContext, asyncCommand.getPhysicalPlan(), runInSameThread, - new MaestroObserverWrapper(observer), new CompletionListenerImpl()); + maestroObserver, new CompletionListenerImpl()); asyncCommand.executionStarted(); } - observer.beginState(AttemptObserver.toEvent(AttemptEvent.State.RUNNING)); + maestroObserver.beginState(AttemptObserver.toEvent(AttemptEvent.State.RUNNING)); moveToState(QueryState.RUNNING, null); injector.injectChecked(queryContext.getExecutionControls(), INJECTOR_TRY_END_ERROR, ForemanException.class); } catch (ResourceUnavailableException e) { + timedoutWaitingForEngine = true; // resource allocation failure is treated as a cancellation and not a failure try { // the caller (JobEventCollatingObserver) expects metadata event before a cancel/complete event. - observer.planCompleted(null); + profileObserver.planCompleted(null); } catch (Exception ignore) { } profileTracker.setCancelReason(e.getMessage()); @@ -510,9 +550,10 @@ public void run() { private void plan() throws Exception { // query parsing and dataset retrieval (both from source and kvstore). - observer.beginState(AttemptObserver.toEvent(AttemptEvent.State.METADATA_RETRIEVAL)); + profileObserver.beginState(AttemptObserver.toEvent(AttemptEvent.State.METADATA_RETRIEVAL)); + moveToNextStage(AttemptEvent.State.METADATA_RETRIEVAL); - CommandCreator creator = newCommandCreator(queryContext, observer, prepareId); + CommandCreator creator = newCommandCreator(queryContext, profileObserver, prepareId); command = creator.toCommand(); logger.debug("Using command: {}.", command); @@ -678,17 +719,14 @@ public void close() { currentThread.setName(queryIdString + ":foreman"); try { - injector.injectChecked(queryContext.getExecutionControls(), "commit-failure", UnsupportedOperationException.class); + injector.injectChecked(queryContext.getExecutionControls(), INJECTOR_CLEANING_FAILURE, UnsupportedOperationException.class); - if (resultState == QueryState.COMPLETED) { - // The commit handler can internally invoke other grpcs. So, forking the context here. - Context.current().fork().run(() -> committer.ifPresent(Runnable::run)); - } else if (resultState == QueryState.CANCELED || resultState == QueryState.FAILED) { + if (resultState == QueryState.CANCELED || resultState == QueryState.FAILED) { Context.current().fork().run(() -> queryCleaner.ifPresent(Runnable::run)); } } catch (Exception e) { addException(e); - logger.warn("Exception during commit after attempt completion", resultException); + logger.warn("Exception during cleaning after attempt completion", resultException); recordNewState(QueryState.FAILED); foremanResult.setForceFailure(e); } @@ -714,7 +752,9 @@ public void close() { if (resultState != state) { recordNewState(resultState); } - observer.beginState(AttemptObserver.toEvent(convertTerminalToAttemptState(resultState))); + AttemptEvent.State terminalStage = convertTerminalToAttemptState(resultState); + profileObserver.beginState(AttemptObserver.toEvent(terminalStage)); + moveToNextStage(terminalStage); UserException uex; if (resultException != null) { @@ -786,9 +826,10 @@ public void close() { } try { - final UserResult result = new UserResult(extraResultData, queryId, resultState, - queryProfile, uex, profileTracker.getCancelReason(), clientCancelled); - observer.attemptCompletion(result); + UserResult result = new UserResult(extraResultData, queryId, resultState, + queryProfile, uex, profileTracker.getCancelReason(), clientCancelled, + timedoutWaitingForEngine, runTimeExceeded); + profileObserver.attemptCompletion(result); } catch (final Exception e) { addException(e); logger.warn("Exception sending result to client", resultException); @@ -867,7 +908,7 @@ private void moveToState(final QueryState newState, final Exception exception) { case RUNNING: { recordNewState(QueryState.RUNNING); try { - observer.execStarted(profileTracker.getPlanningProfile()); + profileObserver.execStarted(profileTracker.getPlanningProfile()); } catch (UserCancellationException ucx) { //Ignore this exception here. } @@ -914,6 +955,16 @@ private void moveToState(final QueryState newState, final Exception exception) { case COMPLETED: { assert exception == null; + + try { + injector.injectChecked(queryContext.getExecutionControls(), INJECTOR_COMMIT_FAILURE, ForemanException.class); + // The commit handler can internally invoke other grpcs. So, forking the context here. + Context.current().fork().run(() -> committer.ifPresent(Runnable::run)); + } catch (ForemanException e) { + moveToState(QueryState.FAILED, e); + return; + } + recordNewState(QueryState.COMPLETED); foremanResult.setCompleted(QueryState.COMPLETED); foremanResult.close(); @@ -990,6 +1041,18 @@ private void recordNewState(final QueryState newState) { state = newState; } + private boolean isTerminalStage(AttemptEvent.State stage) { + switch(stage) { + case CANCELED: + case COMPLETED: + case FAILED: + case INVALID_STATE: + return true; + default: + return false; + } + } + private AttemptEvent.State convertTerminalToAttemptState(final QueryState state) { Preconditions.checkArgument((state == QueryState.COMPLETED || state == QueryState.CANCELED diff --git a/sabot/kernel/src/main/java/com/dremio/exec/work/foreman/AttemptProfileTracker.java b/sabot/kernel/src/main/java/com/dremio/exec/work/foreman/AttemptProfileTracker.java index 0701b448b8..c811efdbf2 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/work/foreman/AttemptProfileTracker.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/work/foreman/AttemptProfileTracker.java @@ -272,6 +272,12 @@ private void addPlanningDetails(UserBitShared.QueryProfile.Builder builder) thro builder.setTotalFragments(capturer.getNumFragments()); builder.addAllDatasetProfile(capturer.getDatasets()); builder.setNumPlanCacheUsed(capturer.getNumPlanCacheUses()); + if (capturer.getNumJoinsInUserQuery() != null) { + builder.setNumJoinsInUserQuery(capturer.getNumJoinsInUserQuery()); + } + if (capturer.getNumJoinsInFinalPrel() != null) { + builder.setNumJoinsInFinalPrel(capturer.getNumJoinsInFinalPrel()); + } final String planText = capturer.getText(); if (planText != null) { diff --git a/sabot/kernel/src/main/java/com/dremio/exec/work/foreman/MaestroObserverWrapper.java b/sabot/kernel/src/main/java/com/dremio/exec/work/foreman/MaestroObserverWrapper.java index 2b5e0d70c9..febf67e873 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/work/foreman/MaestroObserverWrapper.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/work/foreman/MaestroObserverWrapper.java @@ -25,13 +25,16 @@ public class MaestroObserverWrapper implements MaestroObserver { private final AttemptObserver observer; + private final ExecutionStageChangeListener stageChangeListener; - public MaestroObserverWrapper(final AttemptObserver observer) { + public MaestroObserverWrapper(final AttemptObserver observer, ExecutionStageChangeListener listener) { this.observer = observer; + this.stageChangeListener = listener; } @Override public void beginState(AttemptEvent event) { + this.stageChangeListener.moveToNextStage(event.getState()); observer.beginState(event); } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/work/foreman/SqlUnsupportedException.java b/sabot/kernel/src/main/java/com/dremio/exec/work/foreman/SqlUnsupportedException.java index d4132643cb..824e7b4a5f 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/work/foreman/SqlUnsupportedException.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/work/foreman/SqlUnsupportedException.java @@ -25,7 +25,7 @@ public static enum ExceptionType { DATA_TYPE(UnsupportedDataTypeException.class.getSimpleName()), FUNCTION(UnsupportedFunctionException.class.getSimpleName()); - private String exceptionType; + private final String exceptionType; ExceptionType(String exceptionType) { this.exceptionType = exceptionType; } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/work/protector/AttemptAnalyser.java b/sabot/kernel/src/main/java/com/dremio/exec/work/protector/AttemptAnalyser.java new file mode 100644 index 0000000000..fbc215fa3d --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/exec/work/protector/AttemptAnalyser.java @@ -0,0 +1,61 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.work.protector; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.proto.UserBitShared.DremioPBError.ErrorType; +import com.google.common.annotations.VisibleForTesting; + +import io.opentracing.Span; + +/** + * This class analyses a query attempt + */ +public class AttemptAnalyser { + private static final Logger logger = LoggerFactory.getLogger(AttemptAnalyser.class); + private static final String ATTEMPT_STATUS_ATTRIBUTE = "dremio.attempt.status"; + + // Used in unit tests to verify the state of the last attempt + @VisibleForTesting + public static String LAST_ATTEMPT_COMPLETION_STATE = null; + + // attempt span + private final Span attemptSpan; + + AttemptAnalyser(Span attemptSpan) { + this.attemptSpan = attemptSpan; + } + + private void setAttemptCompletionAttribute(String attemptCompletionState) { + LAST_ATTEMPT_COMPLETION_STATE = attemptCompletionState; + attemptSpan.setTag(ATTEMPT_STATUS_ATTRIBUTE, attemptCompletionState); + } + + protected void analyseAttemptCompletion(final UserResult result) { + UserException.AttemptCompletionState attemptCompletionState = result.getAttemptCompletionState(); + if (attemptCompletionState != UserException.AttemptCompletionState.DREMIO_PB_ERROR) { + setAttemptCompletionAttribute(attemptCompletionState.toString()); + return; + } + + UserException userException = result.getException(); + ErrorType errorType = userException.getErrorType(); + setAttemptCompletionAttribute("pb_" + errorType.toString()); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/exec/work/protector/Foreman.java b/sabot/kernel/src/main/java/com/dremio/exec/work/protector/Foreman.java index d55a3af78f..d680a27122 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/work/protector/Foreman.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/work/protector/Foreman.java @@ -18,6 +18,7 @@ import static com.dremio.service.users.SystemUser.SYSTEM_USERNAME; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.concurrent.Executor; @@ -29,8 +30,10 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.dremio.common.concurrent.ContextMigratingExecutorService; import com.dremio.common.exceptions.InvalidMetadataErrorContext; import com.dremio.common.exceptions.UserException; +import com.dremio.common.tracing.TracingUtils; import com.dremio.common.util.DremioVersionInfo; import com.dremio.common.utils.protos.AttemptId; import com.dremio.common.utils.protos.QueryIdHelper; @@ -75,6 +78,7 @@ import com.dremio.service.jobtelemetry.PutTailProfileRequest; import com.dremio.service.namespace.NamespaceKey; import com.dremio.service.namespace.dataset.proto.DatasetConfig; +import com.dremio.telemetry.utils.TracerFacade; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Predicate; import com.google.common.base.Predicates; @@ -84,7 +88,7 @@ import com.google.protobuf.Empty; import io.netty.buffer.ByteBuf; -import io.opentelemetry.api.trace.Span; +import io.opentracing.Span; /** * Can re-run a query if needed/possible without the user noticing. @@ -118,6 +122,7 @@ public class Foreman { private RuleBasedEngineSelector ruleBasedEngineSelector; private AttemptId attemptId; // id of last attempt + private Span attemptSpan; // span of the last attempt private volatile AttemptManager attemptManager; // last running query @@ -168,7 +173,8 @@ public void start() { } private void newAttempt(AttemptReason reason, Predicate datasetValidityChecker) { - Span.current().setAttribute("attemptId", attemptId.toString()); + attemptSpan = TracingUtils.buildChildSpan(TracerFacade.INSTANCE, "job-attempt"); + attemptSpan.setTag("attemptId", attemptId.toString()); try { // we should ideally check if the query wasn't cancelled before starting a new attempt but this will over-complicate // things as the observer expects a query profile at completion and this may not be available if the cancellation @@ -211,7 +217,7 @@ private void newAttempt(AttemptReason reason, Predicate datasetVa } final UserResult result = new UserResult(null, attemptId.toQueryId(), QueryState.FAILED, - profileBuilder.build(), uex, null, false); + profileBuilder.build(), uex, null, false, false, false); observer.execCompletion(result); throw t; } @@ -219,7 +225,7 @@ private void newAttempt(AttemptReason reason, Predicate datasetVa if (request.runInSameThread()) { attemptManager.run(); } else { - executor.execute(attemptManager); + executor.execute(ContextMigratingExecutorService.makeContextMigratingTask(attemptManager, "AttemptManager.run")); } } @@ -249,7 +255,6 @@ public void dataFromScreenArrived(QueryData header, ResponseSender sender, ByteB private boolean recoverFromFailure(AttemptReason reason, Predicate datasetValidityChecker) { // request a new attemptId attemptId = attemptId.nextAttempt(); - logger.info("{}: Starting new attempt because of {}", attemptId, reason); synchronized (this) { @@ -314,17 +319,17 @@ public ExternalId getExternalId() { return attemptId.getExternalId(); } - public synchronized void cancel(String reason, boolean clientCancelled) { - cancel(reason, clientCancelled, null, false); + public synchronized void cancel(String reason, boolean clientCancelled, boolean runTimeExceeded) { + cancel(reason, clientCancelled, null, false, runTimeExceeded); } public synchronized void cancel(String reason, boolean clientCancelled, String cancelContext, - boolean isCancelledByHeapMonitor) { + boolean isCancelledByHeapMonitor, boolean runTimeExceeded) { if (!canceled) { canceled = true; if (attemptManager != null) { - attemptManager.cancel(reason, clientCancelled, cancelContext, isCancelledByHeapMonitor); + attemptManager.cancel(reason, clientCancelled, cancelContext, isCancelledByHeapMonitor, runTimeExceeded); } } else { logger.debug("Cancel of queryId:{} was already attempted before. Ignoring cancelling request now.", @@ -354,8 +359,7 @@ protected RuleBasedEngineSelector getRuleBasedEngineSelector() { private static boolean containsHashAggregate(final RelNode relNode) { if (relNode instanceof HashAggPrel) { return true; - } - else { + } else { for (final RelNode child : relNode.getInputs()) { if (containsHashAggregate(child)) { return true; @@ -397,11 +401,12 @@ public void planValidated(RelDataType rowType, SqlNode node, long millisTaken) { } @Override - public void planRelTransform(PlannerPhase phase, RelOptPlanner planner, RelNode before, RelNode after, long millisTaken) { + public void planRelTransform(PlannerPhase phase, RelOptPlanner planner, RelNode before, RelNode after, + long millisTaken, Map timeBreakdownPerRule) { if (phase == PlannerPhase.PHYSICAL) { containsHashAgg = containsHashAggregate(after); } - super.planRelTransform(phase, planner, before, after, millisTaken); + super.planRelTransform(phase, planner, before, after, millisTaken, timeBreakdownPerRule); } private UserException handleSchemaChangeException(UserException schemaChange) { @@ -465,6 +470,13 @@ public void attemptCompletion(UserResult result) { // TODO(DX-10101): Define the guarantee of #attemptCompletion or rework #attemptCompletion attemptManager = null; // make sure we don't pass cancellation requests to this attemptManager anymore + AttemptAnalyser attemptAnalyser = new AttemptAnalyser(attemptSpan); + try { + attemptAnalyser.analyseAttemptCompletion(result); + } finally { + attemptSpan.finish(); + attemptAnalyser = null; + } final QueryState queryState = result.getState(); final boolean queryFailed = queryState == QueryState.FAILED; diff --git a/sabot/kernel/src/main/java/com/dremio/exec/work/protector/ForemenTool.java b/sabot/kernel/src/main/java/com/dremio/exec/work/protector/ForemenTool.java index 869213871d..3bdf98c218 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/work/protector/ForemenTool.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/work/protector/ForemenTool.java @@ -24,6 +24,7 @@ * Tool for interacting with the foremen manager. */ public interface ForemenTool { + // Used by REST APIs to cancel running queries boolean cancel(ExternalId id, String reason); Optional getProfile(ExternalId id); diff --git a/sabot/kernel/src/main/java/com/dremio/exec/work/protector/ForemenWorkManager.java b/sabot/kernel/src/main/java/com/dremio/exec/work/protector/ForemenWorkManager.java index 3206789d7b..1711284564 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/work/protector/ForemenWorkManager.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/work/protector/ForemenWorkManager.java @@ -40,6 +40,7 @@ import com.dremio.common.utils.protos.ExternalIdHelper; import com.dremio.common.utils.protos.QueryIdHelper; import com.dremio.config.DremioConfig; +import com.dremio.context.RequestContext; import com.dremio.exec.ExecConstants; import com.dremio.exec.maestro.MaestroForwarder; import com.dremio.exec.maestro.MaestroService; @@ -152,6 +153,7 @@ public class ForemenWorkManager implements Service, SafeExit { private final CloseableSchedulerThreadPool profileSender; private Cache cachedPlans; private PlanCache planCache; + private final Provider requestContextProvider; public ForemenWorkManager( final Provider fabric, @@ -162,7 +164,8 @@ public ForemenWorkManager( final Provider forwarder, final Tracer tracer, final Provider ruleBasedEngineSelector, - final BufferAllocator jobResultsAllocator) { + final BufferAllocator jobResultsAllocator, + final Provider requestContextProvider) { this.dbContext = dbContext; this.fabric = fabric; this.commandPool = commandPool; @@ -177,6 +180,7 @@ public ForemenWorkManager( this.queryCancelTool = new QueryCancelToolImpl(); this.profileSender = new CloseableSchedulerThreadPool("profile-sender", 1); this.jobResultsAllocator = jobResultsAllocator; + this.requestContextProvider = requestContextProvider; } public ExecToCoordResultsHandler getExecToCoordResultsHandler() { @@ -329,7 +333,7 @@ public ConnectionClosedListener() { @Override public void operationComplete(Future future) throws Exception { - foreman.cancel("User - Connection closed", false); + foreman.cancel("User - Connection closed", false, false); } } @@ -354,11 +358,12 @@ public void completed() { * @param reason description of the cancellation * @param clientCancelled true if the client application explicitly issued a cancellation (via end user action), or * false otherwise (i.e. when pushing the cancellation notification to the end user) + * @param runTimeExceeded true if the query is being cancelled because the max runtime has been exceeded */ - public boolean cancel(ExternalId externalId, String reason, boolean clientCancelled) { + public boolean cancel(ExternalId externalId, String reason, boolean clientCancelled, boolean runTimeExceeded) { final ManagedForeman managed = externalIdToForeman.get(externalId); if (managed != null) { - managed.foreman.cancel(reason, clientCancelled); + managed.foreman.cancel(reason, clientCancelled, runTimeExceeded); return true; } @@ -375,9 +380,10 @@ public void cancel(CancelQueryContext cancelQueryContext) { .stream() .filter(mf->mf.foreman.canCancelByHeapMonitor()) .forEach(mf->mf.foreman.cancel(cancelQueryContext.getCancelReason(), - false, - cancelQueryContext.getCancelContext(), - cancelQueryContext.isCancelledByHeapMonitor())); + false, + cancelQueryContext.getCancelContext(), + cancelQueryContext.isCancelledByHeapMonitor(), + false)); } public boolean resume(ExternalId externalId) { @@ -494,6 +500,7 @@ protected ByteBuf getData(JobResultsRequest request, ArrowBuf buf) { * *

        This is intended to be used by com.dremio.exec.server.SabotNode#close().

        */ + @Override public void waitToExit() { synchronized(this) { if (externalIdToForeman.isEmpty()) { @@ -551,7 +558,7 @@ public void submitLocalQuery( UserSession userSession) { try{ // make sure we keep a local observer out of band. - final QueryObserver oobJobObserver = new OutOfBandQueryObserver(observer, executor); + final QueryObserver oobJobObserver = new OutOfBandQueryObserver(observer, executor, requestContextProvider); if (userSession == null) { userSession = UserSession.Builder.newBuilder() @@ -604,6 +611,8 @@ void submitWork(ExternalId externalId, profile, exception, null, + false, + false, false); responseHandler.completed(result); } @@ -631,7 +640,7 @@ Void submitWorkCommand(ExternalId externalId, session.incrementQueryCount(); final QueryObserver observer = dbContext.get().getQueryObserverFactory().get().createNewQueryObserver( externalId, session, responseHandler); - final QueryObserver oobObserver = new OutOfBandQueryObserver(observer, executor); + final QueryObserver oobObserver = new OutOfBandQueryObserver(observer, executor, requestContextProvider); final ReAttemptHandler attemptHandler = newExternalAttemptHandler(session.getOptions()); submit(externalId, oobObserver, session, request, registry, null, attemptHandler); return null; @@ -661,7 +670,7 @@ public void submitWork(ExternalId externalId, UserSession session, @Override public Ack cancelQuery(ExternalId query, String username) { - cancel(query, String.format("Query cancelled by user '%s'", username), true); + cancel(query, String.format("Query cancelled by user '%s'", username), true, false); return Acks.OK; } @@ -680,9 +689,10 @@ public OptionManager getSystemOptions() { private class ForemenToolImpl implements ForemenTool { + // Used by REST APIs to cancel running queries @Override public boolean cancel(ExternalId id, String reason) { - return ForemenWorkManager.this.cancel(id, reason, false); + return ForemenWorkManager.this.cancel(id, reason, true, false); } @Override @@ -698,9 +708,11 @@ public Optional getProfile(ExternalId id) { private class QueryCancelToolImpl implements QueryCancelTool { + // Not used when REST APIs are used to cancel queries + // Hence, clientCancelled is false @Override - public boolean cancel(ExternalId id, String reason) { - return ForemenWorkManager.this.cancel(id, reason, false); + public boolean cancel(ExternalId id, String reason, boolean runTimeExceeded) { + return ForemenWorkManager.this.cancel(id, reason, false, runTimeExceeded); } } diff --git a/sabot/kernel/src/main/java/com/dremio/exec/work/protector/UserResult.java b/sabot/kernel/src/main/java/com/dremio/exec/work/protector/UserResult.java index ee085c1215..76a8b17701 100644 --- a/sabot/kernel/src/main/java/com/dremio/exec/work/protector/UserResult.java +++ b/sabot/kernel/src/main/java/com/dremio/exec/work/protector/UserResult.java @@ -20,6 +20,7 @@ import com.dremio.exec.proto.UserBitShared.QueryProfile; import com.dremio.exec.proto.UserBitShared.QueryResult; import com.dremio.exec.proto.UserBitShared.QueryResult.QueryState; +import com.dremio.sabot.exec.AbstractHeapClawBackStrategy; import com.google.common.base.Preconditions; /** @@ -36,11 +37,14 @@ public class UserResult { private final UserException exception; private final String cancelReason; private final boolean clientCancelled; + private final boolean timedoutWaitingForEngine; + private final boolean runTimeExceeded; private QueryResult result; public UserResult(Object extraValue, QueryId queryId, QueryState state, QueryProfile profile, - UserException exception, String cancelReason, boolean clientCancelled) { + UserException exception, String cancelReason, boolean clientCancelled, + boolean timedoutWaitingForEngine, boolean runTimeExceeded) { this.extraValue = extraValue; this.queryId = queryId; this.state = state; @@ -48,6 +52,44 @@ public UserResult(Object extraValue, QueryId queryId, QueryState state, QueryPro this.exception = exception; this.cancelReason = cancelReason; this.clientCancelled = clientCancelled; + this.timedoutWaitingForEngine = timedoutWaitingForEngine; + this.runTimeExceeded = runTimeExceeded; + } + + public UserException.AttemptCompletionState getAttemptCompletionState() { + if (state == QueryState.COMPLETED) { + // Attempt completed successfully + return UserException.AttemptCompletionState.SUCCESS; + } + + if (this.clientCancelled) { + return UserException.AttemptCompletionState.CLIENT_CANCELLED; + } + + // Query did not complete successfully and was not cancelled + if (this.timedoutWaitingForEngine) { + return UserException.AttemptCompletionState.ENGINE_TIMEOUT; + } + + if (this.runTimeExceeded) { + return UserException.AttemptCompletionState.RUNTIME_EXCEEDED; + } + + if (exception == null) { + return UserException.AttemptCompletionState.UNKNOWN; + } + + UserException.AttemptCompletionState attemptCompletionState = exception.getAttemptCompletionState(); + if (attemptCompletionState != UserException.AttemptCompletionState.UNKNOWN) { + return attemptCompletionState; + } + + String errMessage = exception.getMessage(); + if ((errMessage != null) && (errMessage.indexOf(AbstractHeapClawBackStrategy.FAIL_CONTEXT) >= 0)) { + return UserException.AttemptCompletionState.HEAP_MONITOR_E; + } + + return UserException.AttemptCompletionState.DREMIO_PB_ERROR; } public QueryResult toQueryResult() { @@ -104,7 +146,7 @@ public boolean hasException() { } public UserResult withNewQueryId(QueryId newQueryId) { - return new UserResult(extraValue, newQueryId, state, profile, exception, cancelReason, clientCancelled); + return new UserResult(extraValue, newQueryId, state, profile, exception, cancelReason, clientCancelled, timedoutWaitingForEngine, runTimeExceeded); } public UserResult withException(Exception ex) { @@ -125,7 +167,7 @@ public UserResult withException(Exception ex) { profile = addError(exception, builder).build(); } - return new UserResult(extraValue, queryId, QueryState.FAILED, profile, exception, cancelReason, clientCancelled); + return new UserResult(extraValue, queryId, QueryState.FAILED, profile, exception, cancelReason, clientCancelled, timedoutWaitingForEngine, runTimeExceeded); } public static QueryProfile.Builder addError(UserException ex, QueryProfile.Builder profileBuilder) { @@ -143,6 +185,6 @@ public static QueryProfile.Builder addError(UserException ex, QueryProfile.Build public UserResult replaceException(UserException e) { UserException exception = UserException.systemError(e) .build(logger); - return new UserResult(extraValue, queryId, QueryState.FAILED, profile, exception, cancelReason, clientCancelled); + return new UserResult(extraValue, queryId, QueryState.FAILED, profile, exception, cancelReason, clientCancelled, timedoutWaitingForEngine, runTimeExceeded); } } diff --git a/sabot/kernel/src/main/java/com/dremio/extra/exec/store/dfs/parquet/pushdownfilter/FilterExtractor.java b/sabot/kernel/src/main/java/com/dremio/extra/exec/store/dfs/parquet/pushdownfilter/FilterExtractor.java new file mode 100644 index 0000000000..6607a99222 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/extra/exec/store/dfs/parquet/pushdownfilter/FilterExtractor.java @@ -0,0 +1,97 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.extra.exec.store.dfs.parquet.pushdownfilter; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Predicate; + +import org.apache.calcite.rex.RexBuilder; +import org.apache.calcite.rex.RexCall; +import org.apache.calcite.rex.RexNode; +import org.apache.calcite.rex.RexUtil; + +/** + * Extract a sub filter form tree such that the resulting filter will return the minimal super set + * of results for the input filter. + */ +public class FilterExtractor { + private final RexBuilder rexBuilder; + private final Predicate leafAcceptor; + + private FilterExtractor(RexBuilder rexBuilder, Predicate leafAcceptor) { + this.rexBuilder = rexBuilder; + this.leafAcceptor = leafAcceptor; + } + + private RexNode extract(RexNode rexNode) { + if (rexNode instanceof RexCall) { + RexCall rexCall = (RexCall) rexNode; + switch (rexCall.getOperator().getKind()) { + case AND: { + boolean changed = false; + List nodeList = new ArrayList<>(); + for (RexNode sub : rexCall.getOperands()) { + RexNode subRewritten = extract(sub); + changed |= subRewritten != sub; + if (!subRewritten.isAlwaysTrue()) { + nodeList.add(subRewritten); + } + } + if (changed) { + return RexUtil.composeConjunction(rexBuilder, nodeList, false); + } else { + return rexCall; + } + + } + case OR: { + boolean changed = false; + List nodeList = new ArrayList<>(); + for (RexNode sub : rexCall.getOperands()) { + RexNode subRewritten = extract(sub); + changed |= subRewritten != sub; + if (subRewritten.isAlwaysTrue()) { + return rexBuilder.makeLiteral(true); + } else { + nodeList.add(subRewritten); + } + } + if (changed) { + return RexUtil.composeDisjunction(rexBuilder, nodeList, false); + } else { + return rexCall; + } + } + } + } + + if (leafAcceptor.test(rexNode)) { + return rexNode; + } else { + return rexBuilder.makeLiteral(true); + } + } + + public static RexNode extractFilter( + RexBuilder rexBuilder, + RexNode filter, + Predicate filterPredicate) { + FilterExtractor filterExtractor = new FilterExtractor(rexBuilder, filterPredicate); + return filterExtractor.extract(filter); + } + +} diff --git a/sabot/kernel/src/main/java/com/dremio/plugins/ExternalNamespaceEntry.java b/sabot/kernel/src/main/java/com/dremio/plugins/ExternalNamespaceEntry.java index 42f2d57767..efdefd312c 100644 --- a/sabot/kernel/src/main/java/com/dremio/plugins/ExternalNamespaceEntry.java +++ b/sabot/kernel/src/main/java/com/dremio/plugins/ExternalNamespaceEntry.java @@ -17,6 +17,11 @@ import java.util.List; +import javax.annotation.Nullable; + +import org.projectnessie.model.Content; + +import com.dremio.exec.catalog.TableVersionContext; import com.google.common.base.Preconditions; public final class ExternalNamespaceEntry { @@ -24,23 +29,47 @@ public enum Type { UNKNOWN, FOLDER, ICEBERG_TABLE, - ICEBERG_VIEW + ICEBERG_VIEW; + public Content.Type toNessieContentType() { + switch (this) { + case ICEBERG_TABLE: + return Content.Type.ICEBERG_TABLE; + case ICEBERG_VIEW: + return Content.Type.ICEBERG_VIEW; + case FOLDER: + return Content.Type.NAMESPACE; + default: + throw new IllegalArgumentException("toNessieContentType failed: " + this); + } + } } - private Type type; - private List nameElements; + private final Type type; + private final List nameElements; + private final String id; + private final TableVersionContext tableVersionContext; - private ExternalNamespaceEntry(Type type, List nameElements) { + private ExternalNamespaceEntry( + Type type, List nameElements, String id, TableVersionContext tableVersionContext) { Preconditions.checkNotNull(nameElements); Preconditions.checkArgument(nameElements.size() >= 1); + this.type = type; this.nameElements = nameElements; + this.id = id; + this.tableVersionContext = tableVersionContext; } public static ExternalNamespaceEntry of(String type, List nameElements) { Preconditions.checkNotNull(type); - return new ExternalNamespaceEntry(mapType(type), nameElements); + return new ExternalNamespaceEntry(mapType(type), nameElements, null, null); + } + + public static ExternalNamespaceEntry of( + String type, List nameElements, String id, TableVersionContext tableVersionContext) { + Preconditions.checkNotNull(type); + return new ExternalNamespaceEntry(mapType(type), nameElements, id, tableVersionContext); } public Type getType() { @@ -59,6 +88,16 @@ public String getName() { return nameElements.get(nameElements.size() - 1); } + @Nullable + public String getId() { + return id; + } + + @Nullable + public TableVersionContext getTableVersionContext() { + return tableVersionContext; + } + private static Type mapType(String type) { switch(type) { case "NAMESPACE": diff --git a/sabot/kernel/src/main/java/com/dremio/plugins/NessieClient.java b/sabot/kernel/src/main/java/com/dremio/plugins/NessieClient.java index 8f07d19799..3c06e48c2e 100644 --- a/sabot/kernel/src/main/java/com/dremio/plugins/NessieClient.java +++ b/sabot/kernel/src/main/java/com/dremio/plugins/NessieClient.java @@ -17,9 +17,13 @@ import java.util.List; import java.util.Optional; +import java.util.Set; import java.util.stream.Stream; +import javax.annotation.Nullable; + import org.apache.iceberg.view.ViewVersionMetadata; +import org.projectnessie.client.api.NessieApi; import org.projectnessie.model.IcebergView; import com.dremio.common.exceptions.UserException; @@ -38,7 +42,7 @@ /** * Client interface to communicate with Nessie. */ -public interface NessieClient { +public interface NessieClient extends AutoCloseable { /** * Get the default branch. @@ -56,6 +60,11 @@ public interface NessieClient { */ ResolvedVersionContext resolveVersionContext(VersionContext versionContext); + /** + * Executor enabled method for retrieving resolveVersionContext. JobID is used for referencing the context. + */ + ResolvedVersionContext resolveVersionContext(VersionContext versionContext, String jobId); + /** * Checks that a commit hash exists in Nessie. */ @@ -71,6 +80,11 @@ public interface NessieClient { */ Stream listTags(); + /** + * List all references (both branches and tags). + */ + Stream listReferences(); + /** * List all changes for the given version. * @@ -82,29 +96,31 @@ public interface NessieClient { */ Stream listChanges(VersionContext version); - /** - * List only entries under the given path for the given version. - * - * @param catalogPath Acts as the namespace filter. It will scope entries to this namespace. - * @param version If the version is NOT_SPECIFIED, the default branch is used (if it exists). - * - * @throws ReferenceNotFoundException If the given reference cannot be found. - * @throws NoDefaultBranchException If the Nessie server does not have a default branch set. - * @throws ReferenceTypeConflictException If the requested version does not match the server. - */ - Stream listEntries(List catalogPath, VersionContext version); + enum NestingMode { + INCLUDE_NESTED, + SAME_DEPTH_ONLY + } /** * List all entries under the given path and subpaths for the given version. * * @param catalogPath Acts as the namespace filter. It will act as the root namespace. - * @param version If the version is NOT_SPECIFIED, the default branch is used (if it exists). + * @param resolvedVersion If the version is NOT_SPECIFIED, the default branch is used (if it exists). + * @param nestingMode whether to include nested elements + * @param contentTypeFilter optional content type to filter for (null or empty means no filtering) + * @param celFilter optional CEL filter * * @throws ReferenceNotFoundException If the given reference cannot be found. * @throws NoDefaultBranchException If the Nessie server does not have a default branch set. * @throws ReferenceTypeConflictException If the requested version does not match the server. */ - Stream listEntriesIncludeNested(List catalogPath, VersionContext version, String celFilter); + Stream listEntries( + @Nullable List catalogPath, + ResolvedVersionContext resolvedVersion, + NestingMode nestingMode, + @Nullable Set contentTypeFilter, + @Nullable String celFilter + ); /** * Create a namespace by the given path for the given version. @@ -231,6 +247,7 @@ public interface NessieClient { * @param version The source reference name * @param baseContentId The content id of the object that we started the commit operation on * @param jobId The JobId of the query + * @param userName The username executing the query * Note : JobId param is only used when Executor calls this API. It sends the jobId to the controlplane * to lookup the userId . * @throws ReferenceConflictException If the tag hash or source reference hash changes during update @@ -242,7 +259,8 @@ void commitTable( NessieClientTableMetadata nessieClientTableMetadata, ResolvedVersionContext version, String baseContentId, - String jobId); + String jobId, + String userName); void commitView( List catalogKey, @@ -251,9 +269,10 @@ void commitView( ViewVersionMetadata metadata, String dialect, ResolvedVersionContext version, - String baseContentId); + String baseContentId, + String userName); - void deleteCatalogEntry(List catalogKey, ResolvedVersionContext version); + void deleteCatalogEntry(List catalogKey, ResolvedVersionContext version, String userName); /** * @@ -277,4 +296,10 @@ void commitView( * @throws ReferenceNotFoundException If the given tag or source reference cannot be found* */ String getContentId(List tableKey, ResolvedVersionContext version, String jobId); + + NessieApi getNessieApi(); + + // Overridden to remove 'throws Exception' as per NessieApi interface + @Override + void close(); } diff --git a/sabot/kernel/src/main/java/com/dremio/plugins/NessieClientImpl.java b/sabot/kernel/src/main/java/com/dremio/plugins/NessieClientImpl.java index b7c0831b07..0bfa23ee72 100644 --- a/sabot/kernel/src/main/java/com/dremio/plugins/NessieClientImpl.java +++ b/sabot/kernel/src/main/java/com/dremio/plugins/NessieClientImpl.java @@ -16,17 +16,25 @@ package com.dremio.plugins; +import java.net.ConnectException; +import java.util.ArrayList; import java.util.List; import java.util.Optional; +import java.util.Set; import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; import java.util.stream.Stream; +import javax.annotation.Nullable; + import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.iceberg.exceptions.CommitFailedException; import org.apache.iceberg.view.ViewVersionMetadata; -import org.projectnessie.api.params.FetchOption; +import org.projectnessie.client.api.GetAllReferencesBuilder; import org.projectnessie.client.api.GetEntriesBuilder; +import org.projectnessie.client.api.NessieApi; import org.projectnessie.client.api.NessieApiV1; +import org.projectnessie.client.http.HttpClientException; import org.projectnessie.client.rest.NessieNotAuthorizedException; import org.projectnessie.error.ErrorCode; import org.projectnessie.error.NessieBadRequestException; @@ -35,27 +43,39 @@ import org.projectnessie.error.NessieNamespaceNotFoundException; import org.projectnessie.error.NessieNotFoundException; import org.projectnessie.error.NessieReferenceAlreadyExistsException; +import org.projectnessie.error.NessieReferenceConflictException; import org.projectnessie.error.NessieReferenceNotFoundException; import org.projectnessie.model.Branch; import org.projectnessie.model.CommitMeta; import org.projectnessie.model.Content; import org.projectnessie.model.ContentKey; +import org.projectnessie.model.Detached; +import org.projectnessie.model.EntriesResponse.Entry; +import org.projectnessie.model.FetchOption; import org.projectnessie.model.IcebergTable; import org.projectnessie.model.IcebergView; import org.projectnessie.model.ImmutableIcebergTable; import org.projectnessie.model.ImmutableIcebergView; +import org.projectnessie.model.LogResponse.LogEntry; import org.projectnessie.model.Namespace; import org.projectnessie.model.Operation; import org.projectnessie.model.Reference; +import org.projectnessie.model.Reference.ReferenceType; import org.projectnessie.model.Tag; +import org.projectnessie.model.Validation; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.dremio.common.exceptions.UserException; +import com.dremio.context.RequestContext; +import com.dremio.context.UsernameContext; import com.dremio.exec.catalog.ResolvedVersionContext; +import com.dremio.exec.catalog.TableVersionContext; import com.dremio.exec.catalog.VersionContext; import com.dremio.exec.catalog.VersionedPlugin; import com.dremio.exec.store.ChangeInfo; +import com.dremio.exec.store.ConnectionRefusedException; +import com.dremio.exec.store.HttpClientRequestException; import com.dremio.exec.store.NessieNamespaceAlreadyExistsException; import com.dremio.exec.store.NoDefaultBranchException; import com.dremio.exec.store.ReferenceAlreadyExistsException; @@ -65,16 +85,13 @@ import com.dremio.exec.store.ReferenceTypeConflictException; import com.dremio.exec.store.UnAuthenticatedException; import com.dremio.telemetry.api.metrics.MetricsInstrumenter; +import com.github.benmanes.caffeine.cache.CacheLoader; +import com.github.benmanes.caffeine.cache.Caffeine; +import com.github.benmanes.caffeine.cache.LoadingCache; import com.google.common.base.Preconditions; import com.google.common.base.Strings; -import com.google.common.base.Throwables; -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.LoadingCache; -import com.google.common.collect.Lists; -import com.google.common.util.concurrent.UncheckedExecutionException; -import io.opentelemetry.extension.annotations.WithSpan; +import io.opentelemetry.instrumentation.annotations.WithSpan; /** * Implementation of the NessieClient interface for REST. @@ -82,21 +99,44 @@ public class NessieClientImpl implements NessieClient { private static final Logger logger = LoggerFactory.getLogger(NessieClientImpl.class); - private static final String DETACHED = "DETACHED"; private static final String SQL_TEXT = "N/A"; + static final String BRANCH_REFERENCE = "Branch"; + static final String TAG_REFERENCE = "Tag"; private final NessieApiV1 nessieApi; + private final boolean produceImplicitNamespaces; + private static final MetricsInstrumenter metrics = new MetricsInstrumenter(NessieClient.class); - private final LoadingCache, Content> nessieContentsCache = CacheBuilder - .newBuilder() - .maximumSize(1000) // items - .softValues() - .expireAfterAccess(1, TimeUnit.HOURS) - .build(new NessieContentsCacheLoader()); + private final LoadingCache, Content> nessieContentCache; public NessieClientImpl(NessieApiV1 nessieApi) { + this(nessieApi, true); + } + + public NessieClientImpl(NessieApiV1 nessieApi, boolean produceImplicitNamespaces) { this.nessieApi = nessieApi; + this.produceImplicitNamespaces = produceImplicitNamespaces; + this.nessieContentCache = Caffeine + .newBuilder() + .maximumSize(1000) // items + .softValues() + .expireAfterAccess(1, TimeUnit.HOURS) + .build(new NessieContentCacheLoader()); + } + + private class NessieContentCacheLoader implements CacheLoader, Content> { + @Override + public Content load(ImmutablePair pair) throws Exception { + return metrics.log("loadNessieContent", + () -> loadNessieContent(pair.left, pair.right)).orElse(null); + } + } + + @Nullable + private Content getContent(ContentKey contentKey, ResolvedVersionContext version) { + return metrics.log("getNessieContent", + () -> nessieContentCache.get(ImmutablePair.of(contentKey, version))); } @Override @@ -109,17 +149,30 @@ public ResolvedVersionContext getDefaultBranch() { throw new NoDefaultBranchException(e); } catch (NessieNotAuthorizedException e) { throw new UnAuthenticatedException(e, "Unable to authenticate to the Nessie server. Make sure that the token is valid and not expired"); + } catch (HttpClientException e) { + logger.error("Unable to get the default branch from the Nessie", e); + if (e.getCause() instanceof ConnectException) { + throw new ConnectionRefusedException(e, "Connection refused while connecting to the Nessie Server."); + } + + throw new HttpClientRequestException(e, "Failed to get the default branch from Nessie"); } } @Override @WithSpan public ResolvedVersionContext resolveVersionContext(VersionContext versionContext) { - Preconditions.checkNotNull(versionContext); + return metrics.log("resolveVersionContext", () -> resolveVersionContextHelper(versionContext)); + } + + @Override + @WithSpan + public ResolvedVersionContext resolveVersionContext(VersionContext versionContext, String jobId) { return metrics.log("resolveVersionContext", () -> resolveVersionContextHelper(versionContext)); } private ResolvedVersionContext resolveVersionContextHelper(VersionContext versionContext) { + Preconditions.checkNotNull(versionContext); switch (versionContext.getType()) { case UNSPECIFIED: return getDefaultBranch(); @@ -154,23 +207,13 @@ private ResolvedVersionContext resolveVersionContextHelper(VersionContext versio } } - /** - * Note: Nessie does not provide a published specification for their commit hashes, so this - * function is based only on implementation details and may be subject to change. - * - * See model/src/main/java/org/projectnessie/model/Validation.java in Nessie codebase. - */ - private boolean matchesCommitPattern(String commitHash) { + private static boolean matchesCommitPattern(String commitHash) { if (Strings.isNullOrEmpty(commitHash)) { logger.debug("Null or empty string provided when trying to match Nessie commit pattern."); return false; // Defensive, shouldn't be possible } - if (commitHash.length() < 8 || commitHash.length() > 64) { - logger.debug("Provided string {} does not match Nessie commit pattern (wrong length).", commitHash); - return false; - } - if (!Lists.charactersOf(commitHash).stream().allMatch(c -> Character.digit(c, 16) >= 0)) { - logger.debug("Provided string {} does not match Nessie commit pattern (not hexadecimal).", commitHash); + if (!Validation.isValidHash(commitHash)) { + logger.debug("Provided string {} does not match Nessie commit pattern.", commitHash); return false; } logger.debug("Provided string {} matches Nessie commit pattern.", commitHash); @@ -182,8 +225,7 @@ private boolean matchesCommitPattern(String commitHash) { public boolean commitExists(String commitHash) { try { nessieApi.getCommitLog() - .refName(DETACHED) - .hashOnRef(commitHash) + .reference(Detached.of(commitHash)) .fetch(FetchOption.MINIMAL) // Might be slightly faster .maxRecords(1) // Might be slightly faster .get(); @@ -197,23 +239,39 @@ public boolean commitExists(String commitHash) { @Override @WithSpan public Stream listBranches() { - return nessieApi.getAllReferences() - .get() - .getReferences() - .stream() - .filter(ref -> ref instanceof Branch) - .map(ref -> new ReferenceInfo("Branch", ref.getName(), ref.getHash())); + return listReferences(ReferenceType.BRANCH); } @Override @WithSpan public Stream listTags() { - return nessieApi.getAllReferences() - .get() - .getReferences() + return listReferences(ReferenceType.TAG); + } + + @Override + @WithSpan + public Stream listReferences() { + return listReferences(null); + } + + private Stream listReferences(@Nullable ReferenceType typeFilter) { + GetAllReferencesBuilder builder = nessieApi.getAllReferences(); + if (typeFilter != null) { + // i.e. refType == 'BRANCH' + builder.filter(String.format("refType == '%s'", typeFilter.name())); + } + return builder.get().getReferences() .stream() - .filter(ref -> ref instanceof Tag) - .map(ref -> new ReferenceInfo("Tag", ref.getName(), ref.getHash())); + .map(ref -> toReferenceInfo(ref, typeFilter)); + } + + private static ReferenceInfo toReferenceInfo(Reference ref, @Nullable ReferenceType typeFilter) { + if (typeFilter != null && ref.getType() != typeFilter) { + throw new IllegalStateException("Nessie responded with wrong reference type: " + + ref + " expected: " + typeFilter); + } + String type = ref.getType() == ReferenceType.BRANCH ? BRANCH_REFERENCE : TAG_REFERENCE; + return new ReferenceInfo(type, ref.getName(), ref.getHash()); } @Override @@ -226,62 +284,124 @@ public Stream listChanges(VersionContext version) { .get() .getLogEntries() .stream() - .map(log -> new ChangeInfo( - log.getCommitMeta().getHash(), - log.getCommitMeta().getAuthor(), - (log.getCommitMeta().getAuthorTime() != null) ? log.getCommitMeta().getAuthorTime().toString() : "", - log.getCommitMeta().getMessage())); + .map(NessieClientImpl::toChangeInfo); } catch (NessieNotFoundException e) { throw new ReferenceNotFoundException(e); } } - @Override - @WithSpan - public Stream listEntries(List catalogPath, VersionContext version) { - return listEntries(catalogPath, version, false, null); + private static ChangeInfo toChangeInfo(LogEntry log) { + CommitMeta commitMeta = log.getCommitMeta(); + String authorTime = commitMeta.getAuthorTime() != null + ? commitMeta.getAuthorTime().toString() + : ""; + return new ChangeInfo( + commitMeta.getHash(), + commitMeta.getAuthor(), + authorTime, + commitMeta.getMessage() + ); } @Override @WithSpan - public Stream listEntriesIncludeNested(List catalogPath, VersionContext version, String celFilter) { - return listEntries(catalogPath, version, true, celFilter); - } - - private Stream listEntries(List catalogPath, VersionContext version, boolean shouldIncludeNestedTables, String celFilter) { + public Stream listEntries( + @Nullable List catalogPath, + ResolvedVersionContext version, + NestingMode nestingMode, + @Nullable Set contentTypeFilter, + @Nullable String celFilter) { + return metrics.log("listEntries", + () -> listEntriesHelper(catalogPath, version, nestingMode, contentTypeFilter, celFilter)); + } + + private Stream listEntriesHelper( + @Nullable List catalogPath, + ResolvedVersionContext resolvedVersion, + NestingMode nestingMode, + @Nullable Set contentTypeFilter, + @Nullable String celFilter) { try { - ResolvedVersionContext resolvedVersion = resolveVersionContext(version); - final GetEntriesBuilder requestBuilder = nessieApi.getEntries() .reference(toRef(resolvedVersion)); - int depth = (catalogPath != null && !catalogPath.isEmpty()) - ? catalogPath.size() + 1 - : 1; - - if (!shouldIncludeNestedTables) { - requestBuilder.namespaceDepth(depth); + List filterTerms = new ArrayList<>(); + int depth = 1; + if (catalogPath != null) { + depth += catalogPath.size(); + if (depth > 1) { + filterTerms.add(String.format("entry.encodedKey.startsWith('%s.')", Namespace.of(catalogPath).name())); + } } - - if (depth > 1) { - // TODO: Escape "."s within individual path names - requestBuilder.filter(String.format("entry.namespace.matches('%s(\\\\.|$)')", String.join("\\\\.", catalogPath))); + if (nestingMode == NestingMode.SAME_DEPTH_ONLY) { + if (produceImplicitNamespaces) { + // namespaceDepth causes implicit namespaces to be returned + // namespaceDepth is not supported in Nessie REST API V2 + requestBuilder.namespaceDepth(depth); + } else { + filterTerms.add(String.format("size(entry.keyElements) == %d", depth)); + } + } + if (contentTypeFilter != null && !contentTypeFilter.isEmpty()) { + // build filter string i.e. entry.contentType in ['ICEBERG_TABLE', 'DELTA_LAKE_TABLE'] + String setElements = contentTypeFilter.stream() + .map(ExternalNamespaceEntry.Type::toNessieContentType) + .map(Content.Type::name) + .map(typeName -> String.format("'%s'", typeName)) + .collect(Collectors.joining(", ")); + filterTerms.add(String.format("entry.contentType in [%s]", setElements)); } - if (celFilter != null) { - requestBuilder.filter(celFilter); + filterTerms.add(celFilter); + } + if (!filterTerms.isEmpty()) { + String combinedFilter = filterTerms.stream() + .map(term -> String.format("(%s)", term)) + .collect(Collectors.joining(" && ")); + requestBuilder.filter(combinedFilter); } - return requestBuilder - .get() - .getEntries() - .stream() - .map(entry -> ExternalNamespaceEntry.of(entry.getType().toString(), entry.getName().getElements())); + final List externalNamespaceEntries = new ArrayList<>(); + // Don't switch to stream().map! Due to the thread it may use, proper RequestContext may not be present! + requestBuilder.stream().forEach(entry -> externalNamespaceEntries.add(toExternalNamespaceEntry(entry, resolvedVersion))); + return externalNamespaceEntries.stream(); } catch (NessieNotFoundException e) { throw UserException.dataReadError(e).buildSilently(); } } + private ExternalNamespaceEntry toExternalNamespaceEntry(Entry entry, + ResolvedVersionContext resolvedVersion) { + List catalogKey = entry.getName().getElements(); + String contentId = entry.getContentId(); + if (contentId == null && !Content.Type.NAMESPACE.equals(entry.getType())) { + // use content from response if available, otherwise try loading + // note: content is not available unless explicity requested + // note: implicit namespaces have no contentId, so there is no need to try loading + Content content = entry.getContent(); + if (content == null) { + content = getContent(ContentKey.of(catalogKey), resolvedVersion); + if (logger.isWarnEnabled()) { + String contentInfo = "null"; + if (content != null) { + contentInfo = content.getType() + " - " + content.getId(); + } + logger.warn("Slow nessie listEntries content load (catalogKey: {}, version: {}): {}", + catalogKey, resolvedVersion, contentInfo); + } + } + if (content != null) { + contentId = content.getId(); + } + } + String type = entry.getType().toString(); + if (contentId == null) { + return ExternalNamespaceEntry.of(type, catalogKey); + } + TableVersionContext tableVersionContext = TableVersionContext.of(resolvedVersion); + return ExternalNamespaceEntry.of(type, catalogKey, contentId, tableVersionContext); + } + @Override @WithSpan public void createNamespace(List namespacePathList, VersionContext version) { @@ -291,21 +411,44 @@ public void createNamespace(List namespacePathList, VersionContext versi private void createNamespaceHelper(List namespacePathList, VersionContext version) { try { ResolvedVersionContext resolvedVersion = resolveVersionContext(version); + if (!resolvedVersion.isBranch()) { + throw UserException.validationError() + .message("Cannot create folders for non-branch references.") + .buildSilently(); + } + final UsernameContext usernameContext = RequestContext.current().get(UsernameContext.CTX_KEY); + final String authorName = usernameContext != null ? usernameContext.getUserName() : null; + + // we are checking if the namespace already exists in nessie. + // if we already have the content, we are creating duplicate namespace so we are throwing an error. + ContentKey contentKey = ContentKey.of(namespacePathList); + Content content = getContent(contentKey, resolvedVersion); + if (content != null) { + throw new NessieNamespaceAlreadyExistsException(String.format("Folder %s already exists", contentKey.toPathString())); + } nessieApi - .createNamespace() - .reference(toRef(resolvedVersion)) - .namespace(Namespace.of(namespacePathList)) - .create(); - } catch (IllegalStateException - | ReferenceTypeConflictException - | ReferenceNotFoundException - | NoDefaultBranchException e) { - throw e; + .commitMultipleOperations() + .branch((Branch) toRef(resolvedVersion)) + .operation(Operation.Put.of(contentKey, Namespace.of(namespacePathList))) + .commitMeta(CommitMeta.builder() + .author(authorName) + .message("Create namespace key: " + contentKey) + .build()) + .commit(); } catch (org.projectnessie.error.NessieNamespaceAlreadyExistsException e) { + logger.error("Failed to create namespace as Namespace already exists", e); throw new NessieNamespaceAlreadyExistsException(e); } catch (NessieReferenceNotFoundException e) { + logger.error("Failed to create namespace due to Reference not found", e); throw new ReferenceNotFoundException(e); - } catch (Exception e) { + } catch (NessieConflictException e) { + if (e instanceof NessieReferenceConflictException) { + throw UserException.validationError().message(e.getMessage()).buildSilently(); + } + logger.error("Failed to create namespace due to Nessie conflict", e); + throw new RuntimeException(e); + } catch (NessieNotFoundException e) { + logger.error("Failed to create namespace due to Nessie not found", e); throw new RuntimeException(e); } } @@ -317,21 +460,50 @@ public void deleteNamespace(List namespacePathList, VersionContext versi } private void deleteNamespaceHelper(List namespacePathList, VersionContext version) { + ContentKey contentKey = ContentKey.of(namespacePathList); try { ResolvedVersionContext resolvedVersion = resolveVersionContext(version); + if (!resolvedVersion.isBranch()) { + throw UserException.validationError() + .message("Cannot delete folders for non-branch references.") + .buildSilently(); + } + final UsernameContext usernameContext = RequestContext.current().get(UsernameContext.CTX_KEY); + final String authorName = usernameContext != null ? usernameContext.getUserName() : null; + boolean isNamespaceNotEmpty = listEntries(namespacePathList, resolvedVersion, + NestingMode.SAME_DEPTH_ONLY, null, null) + .findAny().isPresent(); + + if (isNamespaceNotEmpty) { + throw UserException.validationError().message("Folder '%s' is not empty", contentKey.toPathString()) + .buildSilently(); + } + nessieApi - .deleteNamespace() - .reference(toRef(resolvedVersion)) - .namespace(Namespace.of(namespacePathList)) - .delete(); + .commitMultipleOperations() + .branch((Branch) toRef(resolvedVersion)) + .operation(Operation.Delete.of(contentKey)) + .commitMeta(CommitMeta.builder() + .author(authorName) + .message("Delete namespace key: " + contentKey) + .build()) + .commit(); } catch (NessieNamespaceNotFoundException e) { logger.warn("NessieNamespaceNotFound from path {}.", namespacePathList); return; } catch (NessieReferenceNotFoundException e) { + logger.error("Failed to delete namespace as Reference not found", e); throw new ReferenceNotFoundException(e); } catch (NessieNamespaceNotEmptyException e) { - throw UserException.validationError().message("Folder is not empty") + logger.error("Failed to delete namespace as Namespace not empty", e); + throw UserException.validationError().message("Folder '%s' is not empty", contentKey.toPathString()) .buildSilently(); + } catch (NessieConflictException e) { + logger.error("Failed to create namespace due to Nessie conflict", e); + throw new RuntimeException(e); + } catch (NessieNotFoundException e) { + logger.error("Failed to create namespace due to Nessie not found", e); + throw new RuntimeException(e); } } @@ -511,7 +683,8 @@ public void assignTag(String tagName, VersionContext sourceVersion) { @Override @WithSpan public String getMetadataLocation(List catalogKey, ResolvedVersionContext version, String jobId) { - return metrics.log("nessieGetContents", () -> getMetadataLocationHelper(catalogKey, version)); + return metrics.log("nessieGetMetadataLocation", + () -> getMetadataLocationHelper(catalogKey, version)); } private String getMetadataLocationHelper(List catalogKey, ResolvedVersionContext version) { @@ -545,62 +718,48 @@ private Optional getViewDialectHelper(List catalogKey, ResolvedV if (!(content instanceof IcebergView)) { return Optional.empty(); } - final String dialect = ((IcebergView) content).getDialect(); - - return (dialect == null) ? Optional.empty() : Optional.of(dialect); - } - - private Content getContent(ContentKey contentKey, ResolvedVersionContext version) { - Content content = null; - try { - content = nessieContentsCache.getUnchecked(ImmutablePair.of(contentKey, version)); - if (content != null && !(content instanceof IcebergTable) && !(content instanceof IcebergView)) { - logger.warn( - "Unexpected content type from Nessie for key {} : type : {} ", - contentKey, - content.getType()); - } - } catch (UncheckedExecutionException e) { - if (e.getCause() instanceof NullMetadataException) { - return null; - } - Throwables.throwIfInstanceOf(e.getCause(), UserException.class); - throw e; - } - return content; + return Optional.ofNullable(dialect); } - private Optional getIcebergContentsHelper(ContentKey contentKey, ResolvedVersionContext version) { + @WithSpan + private Optional loadNessieContent(ContentKey contentKey, ResolvedVersionContext version) { + String logPrefix = String.format("Load of Nessie content (key: %s, version: %s)", + contentKey, version); + Content content; try { - Content content = nessieApi.getContent() + content = nessieApi.getContent() .key(contentKey) .reference(toRef(version)) .get() .get(contentKey); - logger.debug("Content for key '{}' at '{}': Content type :{} content {}", - contentKey, - version, - content == null ? "null" : content.getType(), - content == null ? "null" : content); - - if (content == null) { - logger.warn("Content from Nessie for key {} return null ", contentKey); - return Optional.empty(); - } - if (!(content instanceof IcebergTable) && !(content instanceof IcebergView)) { - logger.warn("Unexpected content type from Nessie for key {} : type : {} ", contentKey, content.getType()); - } - return Optional.of(content); } catch (NessieNotFoundException e) { - logger.error("Failed to get metadata location for table: {}", contentKey, e); - if (e.getErrorCode() == ErrorCode.REFERENCE_NOT_FOUND // TODO: Cleanup - || e.getErrorCode() != ErrorCode.CONTENT_NOT_FOUND) { - throw UserException.dataReadError(e).buildSilently(); + if (e.getErrorCode() == ErrorCode.CONTENT_NOT_FOUND) { + logger.warn("{} returned CONTENT_NOT_FOUND", logPrefix); + return Optional.empty(); } + logger.error("{} failed", logPrefix, e); + throw UserException.dataReadError(e).buildSilently(); + } + if (content == null) { + logger.warn("{} returned null", logPrefix); + return Optional.empty(); } + logger.debug("{} returned content type: {}, content: {}", + logPrefix, content.getType(), content); + if (!(content instanceof IcebergTable + || content instanceof IcebergView + || content instanceof Namespace)) { + logger.warn("{} returned unexpected content type: {} ", logPrefix, content.getType()); + } + return Optional.of(content); + } - return Optional.empty(); + private void ensureOperationOnBranch(ResolvedVersionContext version) { + if (!version.isBranch()) { + throw new IllegalArgumentException( + "Requested operation is not supported for non-branch reference: " + version); + } } @Override @@ -611,10 +770,11 @@ public void commitTable( NessieClientTableMetadata nessieClientTableMetadata, ResolvedVersionContext version, String baseContentId, - String jobId) { + String jobId, + String userName) { metrics.log( "commitTable", - () -> commitTableHelper(catalogKey, newMetadataLocation, nessieClientTableMetadata, version, baseContentId)); + () -> commitTableHelper(catalogKey, newMetadataLocation, nessieClientTableMetadata, version, baseContentId, userName)); } private void commitTableHelper( @@ -622,8 +782,9 @@ private void commitTableHelper( String newMetadataLocation, NessieClientTableMetadata nessieClientTableMetadata, ResolvedVersionContext version, - String baseContentId) { - Preconditions.checkArgument(version.isBranch()); + String baseContentId, + String userName) { + ensureOperationOnBranch(version); ContentKey contentKey = ContentKey.of(catalogKey); logger.debug("Committing new metadatalocation {} snapshotId {} currentSchemaId {} defaultSpecId {} sortOrder {} for key {} and id {}", @@ -644,21 +805,33 @@ private void commitTableHelper( if (baseContentId != null) { newTableBuilder.id(baseContentId); } - commitOperationHelper(contentKey, newTableBuilder.build(), version); + commitOperationHelper(contentKey, newTableBuilder.build(), version, userName); } - //TODO (DX-57285) : Pass expected/base content to this method. + private void commitOperationHelper( - ContentKey contentKey, - Content content, - ResolvedVersionContext version) { + ContentKey contentKey, + Content content, + ResolvedVersionContext version, + String userName) { + + //TODO (DX-59840): Remove the UsernameContext and get the info from userName in DCS after testing there + final UsernameContext usernameContext = RequestContext.current().get(UsernameContext.CTX_KEY); + final String authorName = usernameContext != null ? usernameContext.getUserName() : userName; + try { nessieApi .commitMultipleOperations() .branch((Branch) toRef(version)) .operation(Operation.Put.of(contentKey, content)) - .commitMeta(CommitMeta.fromMessage("Put key: " + contentKey)) + .commitMeta(CommitMeta.builder() + .author(authorName) + .message("Put key: " + contentKey) + .build()) .commit(); } catch (NessieConflictException e) { + if (e instanceof NessieReferenceConflictException) { + throw UserException.validationError().message(e.getMessage()).buildSilently(); + } throw new CommitFailedException(e, "Failed to commit operation"); } catch (NessieNotFoundException e) { throw UserException.dataReadError(e).buildSilently(); @@ -674,12 +847,13 @@ public void commitView( ViewVersionMetadata metadata, String dialect, ResolvedVersionContext version, - String baseContentId) { + String baseContentId, + String userName) { metrics.log( "commitView", () -> commitViewHelper( - catalogKey, newMetadataLocation, icebergView, metadata, dialect, version, baseContentId)); + catalogKey, newMetadataLocation, icebergView, metadata, dialect, version, baseContentId, userName)); } private void commitViewHelper( @@ -689,8 +863,9 @@ private void commitViewHelper( ViewVersionMetadata metadata, String dialect, ResolvedVersionContext version, - String baseContentId) { - Preconditions.checkArgument(version.isBranch()); + String baseContentId, + String userName) { + ensureOperationOnBranch(version); ContentKey contentKey = ContentKey.of(catalogKey); logger.debug( "Committing new metadatalocation {} versionId {} schemaId {} dialect {} sqlText {} for key {} id {}", @@ -703,7 +878,7 @@ private void commitViewHelper( ((baseContentId == null) ? "new object (null id) " : baseContentId)); ImmutableIcebergView.Builder viewBuilder = ImmutableIcebergView.builder(); - if (icebergView != null) { + if (icebergView != null && icebergView.getId() != null) { viewBuilder.id(icebergView.getId()); logger.debug("The view id {} for key {}", icebergView.getId(), contentKey); } @@ -717,23 +892,21 @@ private void commitViewHelper( if (baseContentId != null) { newViewBuilder.id(baseContentId); } - commitOperationHelper(contentKey, newViewBuilder.build(), version); + commitOperationHelper(contentKey, newViewBuilder.build(), version, userName); } @Override @WithSpan - public void deleteCatalogEntry(List catalogKey, ResolvedVersionContext version) { - Preconditions.checkArgument(version.isBranch()); - metrics.log("deleteCatalogEntry", () -> deleteCatalogEntryHelper(catalogKey, version)); + public void deleteCatalogEntry(List catalogKey, ResolvedVersionContext version, String userName) { + metrics.log("deleteCatalogEntry", () -> deleteCatalogEntryHelper(catalogKey, version, userName)); } - private void deleteCatalogEntryHelper(List catalogKey, ResolvedVersionContext version) { + private void deleteCatalogEntryHelper(List catalogKey, ResolvedVersionContext version, String userName) { + ensureOperationOnBranch(version); final Reference versionRef = toRef(version); final ContentKey contentKey = ContentKey.of(catalogKey); logger.debug("Deleting entry in Nessie for key {} ", contentKey); // Check if reference exists to give back a proper error - // TODO(DX-44309): Get the expected commit from the getContents and provide that to the commitMultipleOperations - // So the deleteKey is atomic. String metadataLocation = getMetadataLocation(catalogKey, version, null); if(metadataLocation == null){ logger.debug("Tried to delete key : {} but it was not found in nessie ", catalogKey); @@ -742,19 +915,25 @@ private void deleteCatalogEntryHelper(List catalogKey, ResolvedVersionCo .buildSilently(); } + //TODO (DX-59840): Remove the UsernameContext and get the info from userName in DCS after testing there + final UsernameContext usernameContext = RequestContext.current().get(UsernameContext.CTX_KEY); + final String authorName = usernameContext != null ? usernameContext.getUserName() : userName; + try { nessieApi .commitMultipleOperations() .branchName(versionRef.getName()) .hash(versionRef.getHash()) .operation(Operation.Delete.of(contentKey)) - .commitMeta(CommitMeta.fromMessage("Deleting key: " + contentKey)) + .commitMeta(CommitMeta.builder() + .author(authorName) + .message("Deleting key: " + contentKey) + .build()) .commit(); } catch (NessieNotFoundException e) { - // TODO: DX-46713 Cleanup and add doc for the nessie client logger.debug("Tried to delete key : {} but it was not found in nessie ", catalogKey); throw UserException.validationError(e) - .message(String.format("Version reference not found in nessie for %s", catalogKey)) + .message(String.format("Version reference not found in nessie for %s", catalogKey)) .buildSilently(); } catch (NessieConflictException e) { logger.debug("The catalog entry {} could not be removed from Nessie", catalogKey); @@ -765,8 +944,10 @@ private void deleteCatalogEntryHelper(List catalogKey, ResolvedVersionCo } @Override + @WithSpan public VersionedPlugin.EntityType getVersionedEntityType(List tableKey, ResolvedVersionContext version) { - return metrics.log("IcebergGetContents", () -> getVersionedEntityTypeHelper(tableKey, version)); + return metrics.log("nessieGetVersionedEntityType", + () -> getVersionedEntityTypeHelper(tableKey, version)); } private VersionedPlugin.EntityType getVersionedEntityTypeHelper(List catalogKey, ResolvedVersionContext version) { @@ -795,8 +976,9 @@ private Reference getReference(VersionContext versionContext) { .refName(versionContext.getValue()) .get(); } catch (NessieNotFoundException e) { - logger.error(e.getMessage()); - throw new ReferenceNotFoundException("Reference " + versionContext.getType().toString().toLowerCase() + " " + versionContext.getValue() + " not found"); + String error = versionContext.toStringFirstLetterCapitalized() + " is not found"; + logger.error(error, e); + throw new ReferenceNotFoundException(error); } return reference; @@ -810,29 +992,12 @@ private Reference toRef(ResolvedVersionContext resolvedVersionContext) { case TAG: return Tag.of(resolvedVersionContext.getRefName(), resolvedVersionContext.getCommitHash()); case BARE_COMMIT: - return Branch.of(DETACHED, resolvedVersionContext.getCommitHash()); + return Detached.of(resolvedVersionContext.getCommitHash()); default: throw new IllegalStateException("Unexpected value: " + resolvedVersionContext.getType()); } } - private class NessieContentsCacheLoader extends CacheLoader, Content> { - @Override - public Content load(ImmutablePair pair) { - ContentKey contentkey = pair.left; - ResolvedVersionContext version = pair.right; - - Optional icebergContent = getIcebergContentsHelper(contentkey, version); - if (icebergContent == null || !icebergContent.isPresent()) { - throw new NullMetadataException(); - } - return icebergContent.get(); - } - } - - public static final class NullMetadataException extends RuntimeException { - } - @Override @WithSpan public String getContentId(List tableKey, ResolvedVersionContext version, String jobId) { @@ -845,4 +1010,13 @@ public String getContentId(List tableKey, ResolvedVersionContext version return content.getId(); } + @Override + public NessieApi getNessieApi() { + return nessieApi; + } + + @Override + public void close() { + nessieApi.close(); + } } diff --git a/sabot/kernel/src/main/java/com/dremio/plugins/UsernameAwareNessieClientImpl.java b/sabot/kernel/src/main/java/com/dremio/plugins/UsernameAwareNessieClientImpl.java new file mode 100644 index 0000000000..70823fe7c8 --- /dev/null +++ b/sabot/kernel/src/main/java/com/dremio/plugins/UsernameAwareNessieClientImpl.java @@ -0,0 +1,254 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dremio.plugins; + +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.Callable; +import java.util.stream.Stream; + +import javax.annotation.Nullable; + +import org.apache.iceberg.view.ViewVersionMetadata; +import org.projectnessie.client.api.NessieApi; +import org.projectnessie.model.IcebergView; + +import com.dremio.context.RequestContext; +import com.dremio.context.UserContext; +import com.dremio.context.UsernameContext; +import com.dremio.exec.catalog.ResolvedVersionContext; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.catalog.VersionedPlugin; +import com.dremio.exec.store.ChangeInfo; +import com.dremio.exec.store.ReferenceInfo; +import com.dremio.service.users.User; +import com.dremio.service.users.UserNotFoundException; +import com.dremio.service.users.UserService; +import com.dremio.service.users.proto.UID; +import com.google.common.base.Preconditions; + +/** + * This class acts as a decorator and resolves the uuid to the corresponding username string. + * The username string can be used by referencing the UsernameContext. + * + * TODO: + * Once the ticket DX-64013 [Refactoring of "user_group_ctx_key"] is completed, this class is + * unnecessary and should be removed in lieu of simply able to extract the context key directly + * in createNamespace/ deleteNamespace methods where we need to pass the authorname only while committing to Nessie + * Refer to the epic DX-64087: Remove UsernameAwareNessieClientImpl class + */ +public class UsernameAwareNessieClientImpl implements NessieClient { + + private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(UsernameAwareNessieClientImpl.class); + private final NessieClient nessieClient; + private final UserService userService; + + public UsernameAwareNessieClientImpl(NessieClient nessieClient, UserService userService) { + this.nessieClient = nessieClient; + logger.debug("UserService is not null: {}", !Objects.isNull(userService)); + Preconditions.checkArgument(!Objects.isNull(userService), "User Service is required"); + this.userService = userService; + } + + private RequestContext getRequestContextWithUsernameContext() { + RequestContext currentRequestContext = RequestContext.current(); + UserContext userContext = currentRequestContext.get(UserContext.CTX_KEY); + boolean hasUsernameContext = Objects.nonNull(currentRequestContext.get(UsernameContext.CTX_KEY)); + if (userContext != null && !hasUsernameContext) { + try { + User user = userService.getUser(new UID(userContext.getUserId())); + UsernameContext usernameContext = new UsernameContext(user.getUserName()); + currentRequestContext = currentRequestContext.with(UsernameContext.CTX_KEY, usernameContext); + } catch (UserNotFoundException e) { // User not found. Skip adding UsernameContext. + logger.debug("User not found due to : {}", e.getMessage()); + } + } + return currentRequestContext; + } + + private T callWithUsernameContext(Callable callable) { + try { + return getRequestContextWithUsernameContext().call(callable); + } catch (RuntimeException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + @Override + public ResolvedVersionContext getDefaultBranch() { + return callWithUsernameContext(nessieClient::getDefaultBranch); + } + + @Override + public ResolvedVersionContext resolveVersionContext(VersionContext versionContext) { + return callWithUsernameContext(() -> nessieClient.resolveVersionContext(versionContext)); + } + + @Override + public ResolvedVersionContext resolveVersionContext(VersionContext versionContext, String jobId) { + return callWithUsernameContext(() -> nessieClient.resolveVersionContext(versionContext)); + } + + @Override + public boolean commitExists(String commitHash) { + return callWithUsernameContext(() -> nessieClient.commitExists(commitHash)); + } + + @Override + public Stream listBranches() { + return callWithUsernameContext(nessieClient::listBranches); + } + + @Override + public Stream listTags() { + return callWithUsernameContext(nessieClient::listTags); + } + + @Override + public Stream listReferences() { + try { + return getRequestContextWithUsernameContext().call(nessieClient::listReferences); + } catch (RuntimeException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + @Override + public Stream listChanges(VersionContext version) { + return callWithUsernameContext(() -> nessieClient.listChanges(version)); + } + + @Override + public Stream listEntries( + @Nullable List catalogPath, + ResolvedVersionContext resolvedVersion, + NestingMode nestingMode, + @Nullable Set contentTypeFilter, + @Nullable String celFilter) { + return callWithUsernameContext(() -> + nessieClient.listEntries(catalogPath, resolvedVersion, nestingMode, contentTypeFilter, celFilter)); + } + + @Override + public void createNamespace(List namespacePathList, VersionContext version) { + getRequestContextWithUsernameContext().run(() -> nessieClient.createNamespace(namespacePathList, version)); + } + + @Override + public void deleteNamespace(List namespacePathList, VersionContext version) { + getRequestContextWithUsernameContext().run(() -> nessieClient.deleteNamespace(namespacePathList, version)); + } + + @Override + public void createBranch(String branchName, VersionContext sourceVersion) { + getRequestContextWithUsernameContext().run(() -> nessieClient.createBranch(branchName, sourceVersion)); + } + + @Override + public void createTag(String tagName, VersionContext sourceVersion) { + getRequestContextWithUsernameContext().run(() -> nessieClient.createTag(tagName, sourceVersion)); + } + + @Override + public void dropBranch(String branchName, String branchHash) { + getRequestContextWithUsernameContext().run(() -> nessieClient.dropBranch(branchName, branchHash)); + } + + @Override + public void dropTag(String tagName, String tagHash) { + getRequestContextWithUsernameContext().run(() -> nessieClient.dropTag(tagName, tagHash)); + } + + @Override + public void mergeBranch(String sourceBranchName, String targetBranchName) { + getRequestContextWithUsernameContext().run(() -> nessieClient.mergeBranch(sourceBranchName, targetBranchName)); + } + + @Override + public void assignBranch(String branchName, VersionContext sourceVersion) { + getRequestContextWithUsernameContext().run(() -> nessieClient.assignBranch(branchName, sourceVersion)); + } + + @Override + public void assignTag(String tagName, VersionContext sourceVersion) { + getRequestContextWithUsernameContext().run(() -> nessieClient.assignTag(tagName, sourceVersion)); + } + + @Override + public String getMetadataLocation(List catalogKey, ResolvedVersionContext version, String jobId) { + return callWithUsernameContext(() -> nessieClient.getMetadataLocation(catalogKey, version, jobId)); + } + + @Override + public Optional getViewDialect(List catalogKey, ResolvedVersionContext version) { + return callWithUsernameContext(() -> nessieClient.getViewDialect(catalogKey, version)); + } + + @Override + public void commitTable(List catalogKey, + String newMetadataLocation, + NessieClientTableMetadata nessieClientTableMetadata, + ResolvedVersionContext version, + String baseContent, + String jobId, + String userName) { + getRequestContextWithUsernameContext().run(() -> nessieClient.commitTable(catalogKey, newMetadataLocation, nessieClientTableMetadata, version, baseContent, jobId, userName)); + } + + @Override + public void commitView(List catalogKey, + String newMetadataLocation, + IcebergView icebergView, + ViewVersionMetadata metadata, + String dialect, + ResolvedVersionContext version, + String baseContentId, + String userName) { + getRequestContextWithUsernameContext().run(() -> nessieClient.commitView(catalogKey, newMetadataLocation, icebergView, metadata, dialect, version, baseContentId, userName)); + } + + @Override + public void deleteCatalogEntry(List catalogKey, ResolvedVersionContext version, String userName) { + getRequestContextWithUsernameContext().run(() -> nessieClient.deleteCatalogEntry(catalogKey, version, userName)); + } + + @Override + public VersionedPlugin.EntityType getVersionedEntityType(List tableKey, ResolvedVersionContext version) { + return callWithUsernameContext(() -> nessieClient.getVersionedEntityType(tableKey, version)); + } + + @Override + public String getContentId(List tableKey, ResolvedVersionContext version, String jobId) { + return callWithUsernameContext(() -> nessieClient.getContentId(tableKey, version, jobId)); + } + + @Override + public NessieApi getNessieApi() { + return nessieClient.getNessieApi(); + } + + @Override + public void close() { + getRequestContextWithUsernameContext().run(nessieClient::close); + } +} diff --git a/sabot/kernel/src/main/java/com/dremio/plugins/Version.java b/sabot/kernel/src/main/java/com/dremio/plugins/Version.java index ff91021b2d..51d79ffb76 100644 --- a/sabot/kernel/src/main/java/com/dremio/plugins/Version.java +++ b/sabot/kernel/src/main/java/com/dremio/plugins/Version.java @@ -51,8 +51,10 @@ public static Version parse(String s) { switch(components.length) { case 3: patch = Integer.parseInt(components[2]); + // fall through case 2: minor = Integer.parseInt(components[1]); + // fall through case 1: major = Integer.parseInt(components[0]); break; diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/driver/Pipe.java b/sabot/kernel/src/main/java/com/dremio/sabot/driver/Pipe.java index b3dfc68403..5318a15719 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/driver/Pipe.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/driver/Pipe.java @@ -57,6 +57,7 @@ public interface Visitor { UP visitStraightPipe(StraightPipe pipe, DOWN down) throws EXCEP; } + @Override public abstract String toString(); diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/driver/Pipeline.java b/sabot/kernel/src/main/java/com/dremio/sabot/driver/Pipeline.java index c8087866b4..10adf35dec 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/driver/Pipeline.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/driver/Pipeline.java @@ -181,6 +181,10 @@ private State doPump() throws Exception { } } + public int numOperators() { + return operators.size(); + } + @Override public String toString(){ StringBuilder sb = new StringBuilder(); diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/driver/SmartOp.java b/sabot/kernel/src/main/java/com/dremio/sabot/driver/SmartOp.java index c1a77f4d30..521ca27b0f 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/driver/SmartOp.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/driver/SmartOp.java @@ -82,6 +82,7 @@ void checkSchema(BatchSchema initialSchema) { )); } + @Override public int getOperatorId() { return popConfig.getProps().getOperatorId(); } @@ -109,10 +110,12 @@ public boolean shrinkMemory(long memoryUsed) throws Exception { } } + @Override public T getInner(){ return inner; } + @Override public OperatorContext getContext() { return context; } @@ -609,6 +612,7 @@ public State getState() { } + @Override public String toString(){ return inner.toString(); } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/driver/WyePipe.java b/sabot/kernel/src/main/java/com/dremio/sabot/driver/WyePipe.java index 9bf85cdff7..d479121f44 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/driver/WyePipe.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/driver/WyePipe.java @@ -47,6 +47,7 @@ public WyePipe(DualInputOperator sink, OpPipe left, OpPipe right) { this.right = (Producer) rightOp; } + @Override public Result pump() { try { switch (downstream.getState()) { @@ -110,6 +111,7 @@ public UP accept(Visitor vi return visitor.visitWyePipe(this, down); } + @Override public Pipe getRequiredUpstream() { switch (downstream.getState()) { case CAN_CONSUME_L: diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/exec/AbstractHeapClawBackStrategy.java b/sabot/kernel/src/main/java/com/dremio/sabot/exec/AbstractHeapClawBackStrategy.java index 88db8ad76e..229d351544 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/exec/AbstractHeapClawBackStrategy.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/exec/AbstractHeapClawBackStrategy.java @@ -26,11 +26,11 @@ * Abstract strategy for reducing heap usage. */ public abstract class AbstractHeapClawBackStrategy implements HeapClawBackStrategy { + public static final String FAIL_CONTEXT = "Query canceled by executor heap monitor"; + protected FragmentExecutors fragmentExecutors; protected QueriesClerk queriesClerk; - private final String failContext = "Query canceled by executor heap monitor"; - public AbstractHeapClawBackStrategy(FragmentExecutors fragmentExecutors, QueriesClerk queriesClerk) { this.queriesClerk = queriesClerk; this.fragmentExecutors = fragmentExecutors; @@ -73,7 +73,7 @@ protected void failQueries(List queries) { for (QueryId queryId : queries) { fragmentExecutors.failFragments(queryId, queriesClerk, new OutOfHeapMemoryException("heap monitor detected that the heap is almost full"), - failContext); + FAIL_CONTEXT); } } } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/exec/CoordinatorHeapClawBackStrategy.java b/sabot/kernel/src/main/java/com/dremio/sabot/exec/CoordinatorHeapClawBackStrategy.java index 79bdcda05e..96bb777c14 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/exec/CoordinatorHeapClawBackStrategy.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/exec/CoordinatorHeapClawBackStrategy.java @@ -27,8 +27,8 @@ public class CoordinatorHeapClawBackStrategy implements HeapClawBackStrategy { // CancelQueryContext for cancelling queries in planning phase. private static final CancelQueryContext CANCEL_QUERY_CONTEXT = new CancelQueryContext(UserException.OOM_MSG, - "Query cancelled by coordinator heap monitor", - true); + "Query cancelled by coordinator heap monitor", + true); public CoordinatorHeapClawBackStrategy(Consumer cancelConsumer) { this.cancelConsumer = cancelConsumer; diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/exec/FragmentExecutors.java b/sabot/kernel/src/main/java/com/dremio/sabot/exec/FragmentExecutors.java index 67061e8ee0..6b78c8380d 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/exec/FragmentExecutors.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/exec/FragmentExecutors.java @@ -425,6 +425,10 @@ public void buildAndStartQuery(final QueryTicket queryTicket) { } Map priorityToWeightMap = buildPriorityToWeightMap(); + double memorySetAsidePct = options.getOption(ExecConstants.PCT_MEMORY_SET_ASIDE); + if (useMemoryArbiter) { + memoryArbiter.setMemorySetAsidePct(memorySetAsidePct); + } for (PlanFragmentFull fragment : fullFragments) { FragmentExecutor fe = buildFragment(queryTicket, fragment, priorityToWeightMap.getOrDefault(fragment.getMajor().getFragmentExecWeight(), 1), schedulingInfo); diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/exec/FragmentWorkManager.java b/sabot/kernel/src/main/java/com/dremio/sabot/exec/FragmentWorkManager.java index 935850a1af..b919ae6954 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/exec/FragmentWorkManager.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/exec/FragmentWorkManager.java @@ -143,6 +143,7 @@ public FragmentWorkManager( * *

        This is intended to be used by {@link SabotNode#close()}.

        */ + @Override public void waitToExit() { synchronized(this) { if (fragmentExecutors == null || fragmentExecutors.size() == 0) { diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/exec/HeapMonitorManager.java b/sabot/kernel/src/main/java/com/dremio/sabot/exec/HeapMonitorManager.java index 98836b531c..b63c700f1f 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/exec/HeapMonitorManager.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/exec/HeapMonitorManager.java @@ -100,6 +100,7 @@ public void addLowMemListener(HeapLowMemListener lowMemListener) { } } + @Override public void start() { OptionManager optionManager = optionManagerProvider.get(); startHeapMonitorThread(optionManager.getOption(enableHeapMonitoringOption), diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/exec/HeapMonitorThread.java b/sabot/kernel/src/main/java/com/dremio/sabot/exec/HeapMonitorThread.java index 5c6104f5e3..239d358b20 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/exec/HeapMonitorThread.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/exec/HeapMonitorThread.java @@ -81,6 +81,7 @@ public HeapMonitorThread(HeapClawBackStrategy strategy, long clawbackThreshold, private class LowMemListener implements javax.management.NotificationListener { + @Override public void handleNotification(Notification notification, Object handback) { if (notification.getType().equals(MemoryNotificationInfo.MEMORY_COLLECTION_THRESHOLD_EXCEEDED)) { // wakeup the main thread. diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/exec/MaestroProxyQueryTracker.java b/sabot/kernel/src/main/java/com/dremio/sabot/exec/MaestroProxyQueryTracker.java index 8b54c5802e..442b4417a1 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/exec/MaestroProxyQueryTracker.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/exec/MaestroProxyQueryTracker.java @@ -152,6 +152,7 @@ public synchronized boolean tryStart(QueryTicket queryTicket, return true; } + @Override public NodeEndpoint getForeman() { return foreman; } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/exec/ThreadsStatsCollector.java b/sabot/kernel/src/main/java/com/dremio/sabot/exec/ThreadsStatsCollector.java index 1fb2475add..fdf83bae9e 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/exec/ThreadsStatsCollector.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/exec/ThreadsStatsCollector.java @@ -155,6 +155,7 @@ private Integer getTrailingAverage(Deque> list, int seconds) { } } + @Override public void close(){ this.interrupt(); } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/exec/WorkloadTicketDepotService.java b/sabot/kernel/src/main/java/com/dremio/sabot/exec/WorkloadTicketDepotService.java index 75cc9c9738..79b7f0c020 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/exec/WorkloadTicketDepotService.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/exec/WorkloadTicketDepotService.java @@ -47,10 +47,12 @@ public WorkloadTicketDepotService(final Provider allocator, fin ticketDepot = null; } + @Override public void start() throws Exception { ticketDepot = newTicketDepot(allocator.get(), dremioConfig.get().getSabotConfig()); } + @Override public void close() throws Exception { AutoCloseables.close(ticketDepot); } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/exec/context/BufferManagerImpl.java b/sabot/kernel/src/main/java/com/dremio/sabot/exec/context/BufferManagerImpl.java index 173ea2c661..b1926167f8 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/exec/context/BufferManagerImpl.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/exec/context/BufferManagerImpl.java @@ -63,6 +63,7 @@ public boolean apply(long key, ArrowBuf value) { managedBuffers.clear(); } + @Override public ArrowBuf replace(ArrowBuf old, long newSize) { if (managedBuffers.remove(old.memoryAddress()) == null) { throw new IllegalStateException("Tried to remove unmanaged buffer."); @@ -71,16 +72,19 @@ public ArrowBuf replace(ArrowBuf old, long newSize) { return getManagedBuffer(newSize); } + @Override public ArrowBuf getManagedBuffer() { return getManagedBuffer(256); } + @Override public ArrowBuf getManagedBuffer(long size) { ArrowBuf newBuf = allocator.buffer(size, this); managedBuffers.put(newBuf.memoryAddress(), newBuf); return newBuf; } + @Override public ArrowBuf getManagedBufferSliced(long size) { if (size >= largeBufCapacity) { diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/exec/context/FunctionContext.java b/sabot/kernel/src/main/java/com/dremio/sabot/exec/context/FunctionContext.java index 0efba87ad9..3366fe0f03 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/exec/context/FunctionContext.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/exec/context/FunctionContext.java @@ -148,5 +148,5 @@ public interface FunctionContext { */ ValueHolder getConstantValueHolder(String value, MinorType type, Function holderInitializer); - Pair getSurvivingRowCountWithPruneFilter(ScanRelBase scan, PruneFilterCondition pruneCondition); + Pair getSurvivingRowCountWithPruneFilter(ScanRelBase scan, PruneFilterCondition pruneCondition) throws Exception; } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/exec/context/OperatorContextImpl.java b/sabot/kernel/src/main/java/com/dremio/sabot/exec/context/OperatorContextImpl.java index 0f5a8b4a7f..ff1cb5c8fc 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/exec/context/OperatorContextImpl.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/exec/context/OperatorContextImpl.java @@ -203,6 +203,7 @@ public ExecutorService getExecutor() { return executor; } + @Override public ExpressionSplitCache getExpressionSplitCache() { return expressionSplitCache; } @@ -215,6 +216,7 @@ public QueryId getQueryIdForLocalQuery() { return fragmentExecutorBuilder.getFragmentExecutors().getQueryIdForLocalQuery(); } + @Override public LogicalPlanPersistence getLpPersistence() { Preconditions.checkNotNull(fragmentExecutorBuilder, "Cannot get LogicalPlanPersistence without initializing FragmentExecutorBuilder"); return fragmentExecutorBuilder.getPlanReader().getLpPersistance(); @@ -242,14 +244,17 @@ public BufferAllocator getAllocator() { return allocator; } + @Override public TunnelProvider getTunnelProvider() { return tunnelProvider; } + @Override public List getAssignments() { return assignments; } + @Override public EndpointsIndex getEndpointsIndex() { return endpointsIndex; } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/exec/context/OperatorStats.java b/sabot/kernel/src/main/java/com/dremio/sabot/exec/context/OperatorStats.java index c22bb1ccfd..fdff1d394c 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/exec/context/OperatorStats.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/exec/context/OperatorStats.java @@ -71,6 +71,10 @@ public class OperatorStats { private long numberOfBatches = 0; private long outputSizeInBytes = 0; + // DML specific stats + private long addedFilesCount = 0; + private long removedFilesCount = 0; + private boolean recordOutput = false; enum State { @@ -363,7 +367,9 @@ public OperatorProfile getProfile(boolean withDetails) { .setWaitNanos(getWaitNanos()) .setOperatorSubtype(operatorSubType) .setOutputRecords(outputRecords) - .setOutputBytes(outputSizeInBytes); + .setOutputBytes(outputSizeInBytes) + .setAddedFiles(addedFilesCount) + .setRemovedFiles(removedFilesCount); if (allocator != null) { b.setPeakLocalMemoryAllocated(Long.max(allocator.getPeakMemoryAllocation(), allocator.getInitReservation())); @@ -384,8 +390,7 @@ public void addAllMetrics(OperatorProfile.Builder builder) { public long getRecordsProcessed() { if (recordOutput) { return outputRecords; - } - else { + } else { long recordsProcessed = 0; for(int i = 0; i < recordsReceivedByInput.length; i++) { recordsProcessed += recordsReceivedByInput[i]; @@ -413,6 +418,14 @@ public void addStreamProfile(OperatorProfile.Builder builder) { } } + public void recordAddedFiles(long addedFilesCount) { + this.addedFilesCount += addedFilesCount; + } + + public void recordRemovedFiles(long removedFilesCount) { + this.removedFilesCount += removedFilesCount; + } + private class LongProc implements IntLongProcedure { private final OperatorProfile.Builder builder; @@ -585,8 +598,7 @@ public static WaitRecorder getWaitRecorder(OperatorStats operatorStats) { public static WaitRecorder getMetadataWaitRecorder(OperatorStats operatorStats, Path path) { if(operatorStats == null || path == null) { return NO_OP_RECORDER; - } - else { + } else { return operatorStats.createMetadataWaitRecorder(path.toString(), OperatorStats.getWaitRecorder(operatorStats)); } } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/exec/fragment/FlushableSendingAccountor.java b/sabot/kernel/src/main/java/com/dremio/sabot/exec/fragment/FlushableSendingAccountor.java index 0ea2607edc..6c4e48c337 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/exec/fragment/FlushableSendingAccountor.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/exec/fragment/FlushableSendingAccountor.java @@ -36,6 +36,7 @@ public SendingAccountor getAccountor(){ return sendingAccountor; } + @Override public boolean flushMessages() { return sendingAccountor.markBlockingWhenMessagesOutstanding(resourceGroup); } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/exec/fragment/FragmentExecutor.java b/sabot/kernel/src/main/java/com/dremio/sabot/exec/fragment/FragmentExecutor.java index d43588d7f9..c4db96c62b 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/exec/fragment/FragmentExecutor.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/exec/fragment/FragmentExecutor.java @@ -21,6 +21,7 @@ import java.security.PrivilegedExceptionAction; import java.util.ArrayDeque; import java.util.ArrayList; +import java.util.Deque; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -108,6 +109,7 @@ public class FragmentExecutor implements MemoryArbiterTask { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(FragmentExecutor.class); private static final ControlsInjector injector = ControlsInjectorFactory.getInjector(FragmentExecutor.class); public static final long MB = 1024 * 1024; + private static final long STARTING_GRANT = 16 * MB; @VisibleForTesting public static final String INJECTOR_DO_WORK = "injectOOMOnRun"; @@ -179,7 +181,11 @@ public class FragmentExecutor implements MemoryArbiterTask { // This is used to keep track of fragments that use the memory arbiter private final MemoryArbiter memoryArbiter; private long memoryGrantInBytes = 0; - private long maxMemoryUsedPerPump = 16 * MB; + // the memory used per pump. We start with 16MB and reserve the max used so far upto an upper bound + private long memoryRequiredForNextPump = STARTING_GRANT; + private final long maxMemoryUsedPerPump; + private final boolean dynamicallyTrackAllocations; + private final Deque lastNAllocations = new ArrayDeque<>(); private final List shrinkableOperators = new ArrayList<>(); // This is the list of operators that have been asked to spill private final Map spillingOperators = new HashMap<>(); @@ -255,6 +261,8 @@ public FragmentExecutor( this.executionControls = executionControls; this.allocatorLock = sharedResources.getGroup(PIPELINE_RES_GRP).createResource("frag-allocator", SharedResourceType.UNKNOWN); this.memoryResource = sharedResources.getGroup(PIPELINE_RES_GRP).createResource("blocked-on-memory", SharedResourceType.WAIT_FOR_MEMORY); + this.maxMemoryUsedPerPump = fragmentOptions.getOption(ExecConstants.MAX_MEMORY_GRANT_SIZE); + this.dynamicallyTrackAllocations = fragmentOptions.getOption(ExecConstants.DYNAMICALLY_TRACK_ALLOCATIONS); } @Override @@ -272,10 +280,8 @@ public void blockOnMemory() { @Override public void unblockOnMemory() { - if (this.taskState == State.BLOCKED_ON_MEMORY) { - logger.debug("Fragment {}:{} was blocked on memory, unblocked now", fragment.getHandle().getMajorFragmentId(), fragment.getHandle().getMinorFragmentId()); - this.memoryResource.markAvailable(); - } + logger.debug("Fragment {}:{} was blocked on memory, unblocked now", fragment.getHandle().getMajorFragmentId(), fragment.getHandle().getMinorFragmentId()); + this.memoryResource.markAvailable(); } @Override @@ -298,9 +304,8 @@ public List getShrinkableOperators() { return shrinkableOperators; } - // TODO: Improve this based on actual usage private long getMemoryToAcquire() { - return maxMemoryUsedPerPump; + return memoryRequiredForNextPump; } private String preRunUpdate(int load) { @@ -470,12 +475,22 @@ private void run(){ // pump the pipeline taskState = pumper.run(); - long memoryUsedAfterPump = getUsedMemory(); - if (memoryUsedAfterPump > memoryUsedBeforePump) { - long diff = memoryUsedAfterPump - memoryUsedBeforePump; - if (diff > maxMemoryUsedPerPump) { - logger.debug("Used {} more memory than granted {}", diff, maxMemoryUsedPerPump); - maxMemoryUsedPerPump = diff; + if (memoryArbiter != null) { + long memoryUsedAfterPump = getUsedMemory(); + long extraMem = Math.max(memoryUsedAfterPump - memoryUsedBeforePump, 0); + if (extraMem > maxMemoryUsedPerPump) { + logger.debug("Used {} more memory than max configured memory {}", extraMem, maxMemoryUsedPerPump); + extraMem = maxMemoryUsedPerPump; + } + if (dynamicallyTrackAllocations) { + lastNAllocations.addLast(extraMem); + if (lastNAllocations.size() > 2 * pipeline.numOperators()) { + lastNAllocations.removeFirst(); + } + memoryRequiredForNextPump = lastNAllocations.stream().max(Long::compareTo).get(); + memoryRequiredForNextPump = memoryRequiredForNextPump == 0 ? STARTING_GRANT : memoryRequiredForNextPump; + } else { + memoryRequiredForNextPump = Math.max(memoryRequiredForNextPump, extraMem); } } @@ -625,9 +640,11 @@ private void finishRun() { case FINISHED: case CANCELLED: retire(); + break; default: // noop + break; } } finally { @@ -849,6 +866,7 @@ public TaskDescriptor getTaskDescriptor() { // This fragment got a shrink memory request. Add this to the list of spilling operators private void handleShrinkMemoryRequest(OutOfBandMessage message) { unblockOnMemory(); + memoryArbiter.removeFromBlocked(this); ExecProtos.ShrinkMemoryUsage shrinkMemoryUsage = message.getPayload(ExecProtos.ShrinkMemoryUsage.parser()); Long prevValue = spillingOperators.put(message.getOperatorId(), shrinkMemoryUsage.getMemoryInBytes()); if (prevValue != null) { @@ -924,6 +942,9 @@ public void handle(OutOfBandMessage message) { } catch (IllegalStateException e) { logger.warn("Failure while handling OOB message. {}", finalMessage, e); throw e; + } catch (OutOfMemoryException e) { + logger.warn("Failure while handling OOB message. {}", finalMessage, e); + throw e; } catch (Exception e) { //propagate the exception logger.warn("Failure while handling OOB message. {}", finalMessage, e); @@ -1016,7 +1037,6 @@ public void updateBlockedOnUpstreamDuration(long duration) { @Override public void updateBlockedOnMemoryDuration(long duration) { stats.setBlockedOnMemoryDuration(duration); - memoryArbiter.removeFromBlocked(FragmentExecutor.this); } @Override diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/exec/fragment/TunnelProviderImpl.java b/sabot/kernel/src/main/java/com/dremio/sabot/exec/fragment/TunnelProviderImpl.java index f7bdf36fa7..7c3069f90b 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/exec/fragment/TunnelProviderImpl.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/exec/fragment/TunnelProviderImpl.java @@ -78,6 +78,7 @@ public AccountingExecToCoordTunnel getCoordTunnel() { return coordTunnel; } + @Override public AccountingExecTunnel getExecTunnel(final NodeEndpoint endpoint) { AccountingExecTunnel tunnel = tunnels.get(endpoint); if (tunnel == null) { diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/exec/rpc/ExecProtocol.java b/sabot/kernel/src/main/java/com/dremio/sabot/exec/rpc/ExecProtocol.java index 31f69ecdcf..a2e9d7387f 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/exec/rpc/ExecProtocol.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/exec/rpc/ExecProtocol.java @@ -69,6 +69,7 @@ public ExecProtocol(SabotConfig config, BufferAllocator allocator, FragmentExecu this.fragmentsManager = fragmentsManager; } + @Override public void handle(PhysicalConnection connection, int rpcType, ByteString pBody, ByteBuf body, ResponseSender sender) throws RpcException { switch(rpcType){ case RpcType.REQ_RECORD_BATCH_VALUE: { diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/exec/rpc/ExecToCoordTunnel.java b/sabot/kernel/src/main/java/com/dremio/sabot/exec/rpc/ExecToCoordTunnel.java index 9f1513ce5f..1e1c66d8bd 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/exec/rpc/ExecToCoordTunnel.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/exec/rpc/ExecToCoordTunnel.java @@ -46,6 +46,7 @@ public ExecToCoordTunnel(NodeEndpoint endpoint, FabricCommandRunner runner) { this.endpoint = endpoint; } + @Override public void sendData(RpcOutcomeListener outcomeListener, QueryWritableBatch data) { manager.runCommand(new SendBatch(outcomeListener, data)); } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/memory/MemoryArbiter.java b/sabot/kernel/src/main/java/com/dremio/sabot/memory/MemoryArbiter.java index aca53e1f70..e89e1a8d08 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/memory/MemoryArbiter.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/memory/MemoryArbiter.java @@ -54,4 +54,12 @@ default void startTask(MemoryArbiterTask memoryArbiterTask) { default boolean removeFromBlocked(MemoryArbiterTask memoryArbiterTask) { return false; } + + /** + * Sets the memory set-aside by the MemoryArbiter + * + * @param memorySetAsidePct: percentage of memory that needs to be set-aside + */ + default void setMemorySetAsidePct(double memorySetAsidePct) { + } } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/hash/HashAggOperator.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/hash/HashAggOperator.java index 654c6d45e0..b8f84738d9 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/hash/HashAggOperator.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/hash/HashAggOperator.java @@ -272,12 +272,13 @@ private boolean isCardinalityLimitNeeded(LogicalExpression expr, CompleteType ex return false; } String functionName = ((FunctionHolderExpr)expr).getName(); - if (!functionName.equals("min") && !functionName.equals("max")) { + if (!"min".equals(functionName) && !"max".equals(functionName)) { return false; } return !exprType.isFixedWidthScalar(); } + @SuppressWarnings("checkstyle:LocalFinalVariableName") private void setupUpdateAggrValues(ClassGenerator cg) { final GeneratorMapping UPDATE_AGGR_INSIDE = GeneratorMapping.create("setupInterior", "updateAggrValuesInternal", "resetValues", "cleanup"); final GeneratorMapping UPDATE_AGGR_OUTSIDE = GeneratorMapping.create("setupInterior", "outputRecordValues", "resetValues", "cleanup"); @@ -309,9 +310,9 @@ private void setupGetIndex(ClassGenerator cg) { cg.getBlock("getVectorIndex")._return(var.invoke("getIndex").arg(JExpr.direct("recordIndex"))); return; } - + default: + throw new IllegalStateException("Unhandled SelectionVectorMode: " + incoming.getSchema().getSelectionVectorMode()); } - } @Override diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/AccumulatorBuilder.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/AccumulatorBuilder.java index 09a881990c..7248d27fc4 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/AccumulatorBuilder.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/AccumulatorBuilder.java @@ -100,7 +100,7 @@ public static MaterializedAggExpressionsResult getAccumulatorTypesFromMaterializ ImmutableList exprCopy = ImmutableList.copyOf(expr); /* COUNT(1) */ - if (funcName.equals("count") && (exprCopy.isEmpty() || (exprCopy.size() == 1 && isCountLiteral(exprCopy.get(0))))) { + if ("count".equals(funcName) && (exprCopy.isEmpty() || (exprCopy.size() == 1 && isCountLiteral(exprCopy.get(0))))) { accumulatorTypes[i] = (byte)AccumulatorType.COUNT1.ordinal(); /* count1 doesn't need an input accumulator vector */ inputVectors.add(null); diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/CountColumnAccumulator.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/CountColumnAccumulator.java index 608c56f39f..00c016e925 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/CountColumnAccumulator.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/CountColumnAccumulator.java @@ -33,6 +33,7 @@ public CountColumnAccumulator(FieldVector input, FieldVector output, computationVectorAllocator); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask){ final long maxAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/CountOneAccumulator.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/CountOneAccumulator.java index 3c28f2fa49..fb3b33b15d 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/CountOneAccumulator.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/CountOneAccumulator.java @@ -33,6 +33,7 @@ public CountOneAccumulator(FieldVector input, FieldVector output, computationVectorAllocator); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask){ final long maxAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/MaxAccumulators.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/MaxAccumulators.java index b0aec2b3fe..82975b9ce0 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/MaxAccumulators.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/MaxAccumulators.java @@ -56,6 +56,7 @@ void initialize(FieldVector vector) { setNullAndValue(vector, INIT); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { final long maxMemAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; @@ -105,6 +106,7 @@ void initialize(FieldVector vector) { setNullAndValue(vector, INIT); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { final long maxMemAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; @@ -153,6 +155,7 @@ void initialize(FieldVector vector) { setNullAndValue(vector, Long.MIN_VALUE); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { final long maxMemAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; @@ -252,6 +255,7 @@ void initialize(FieldVector vector) { setNullAndValue(vector, INIT); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { final long maxMemAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; @@ -301,6 +305,7 @@ void initialize(FieldVector vector) { setNullAndValue(vector, INIT); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { final long maxMemAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; @@ -351,6 +356,7 @@ void initialize(FieldVector vector) { setNullAndValue(vector, INIT); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { final long maxMemAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; @@ -415,6 +421,7 @@ void initialize(FieldVector vector) { setNullAndZero(vector); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { FieldVector inputVector = getInput(); @@ -488,6 +495,7 @@ void initialize(FieldVector vector) { setNullAndValue(vector, INIT); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { final long maxMemAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; @@ -544,6 +552,7 @@ public VarLenMaxAccumulator(FieldVector input, FieldVector transferVector, int m maxVarWidthVecUsagePercent, accumIndex, tempAccumulatorHolder, varLenVectorResizer); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { final long maxAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/MinAccumulators.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/MinAccumulators.java index d3e09ae590..2784cd35ea 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/MinAccumulators.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/MinAccumulators.java @@ -56,6 +56,7 @@ void initialize(FieldVector vector) { setNullAndValue(vector, INIT); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { final long maxAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; @@ -105,6 +106,7 @@ void initialize(FieldVector vector) { setNullAndValue(vector, INIT); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { final long maxAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; @@ -153,6 +155,7 @@ void initialize(FieldVector vector) { setNullAndValue(vector, Long.MAX_VALUE); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { final long maxAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; @@ -253,6 +256,7 @@ void initialize(FieldVector vector) { } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { final long maxAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; @@ -304,6 +308,7 @@ void initialize(FieldVector vector) { setNullAndValue(vector, INIT); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { final long maxAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; @@ -354,6 +359,7 @@ void initialize(FieldVector vector) { setNullAndValue(vector, INIT); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { final long maxAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; @@ -419,6 +425,7 @@ void initialize(FieldVector vector) { setNullAndValue(vector, INIT); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { FieldVector inputVector = getInput(); @@ -493,6 +500,7 @@ void initialize(FieldVector vector) { setNullAndValue(vector, INIT); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { final long maxMemAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; @@ -549,6 +557,7 @@ public VarLenMinAccumulator(FieldVector input, FieldVector transferVector, int m maxVarWidthVecUsagePercent, accumIndex, tempAccumulator, varLenVectorResizer); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { final long maxAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/SumAccumulators.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/SumAccumulators.java index 341a03a44a..ed14b87c31 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/SumAccumulators.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/SumAccumulators.java @@ -48,6 +48,7 @@ void initialize(FieldVector vector) { setNullAndZero(vector); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { final long maxAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; @@ -95,6 +96,7 @@ void initialize(FieldVector vector) { setNullAndZero(vector); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { final long maxAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; @@ -209,6 +211,7 @@ void initialize(FieldVector vector) { setNullAndZero(vector); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { final long maxAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; @@ -327,6 +330,7 @@ void initialize(FieldVector vector) { setNullAndZero(vector); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { final long maxAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; @@ -374,6 +378,7 @@ void initialize(FieldVector vector) { setNullAndZero(vector); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { final long maxAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; @@ -423,6 +428,7 @@ void initialize(FieldVector vector) { setNullAndZero(vector); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { final long maxAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/SumZeroAccumulators.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/SumZeroAccumulators.java index b79ede3a48..c68e33076f 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/SumZeroAccumulators.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/SumZeroAccumulators.java @@ -42,6 +42,7 @@ public IntSumZeroAccumulator(FieldVector input, FieldVector output, computationVectorAllocator); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { final long maxAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; @@ -79,6 +80,7 @@ public FloatSumZeroAccumulator(FieldVector input, FieldVector output, computationVectorAllocator); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { final long maxAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; @@ -140,6 +142,7 @@ private BigIntSumZeroAccumulator(final FieldVector input, final FieldVector outp intSumZeroAccumulator.getAccumulators()); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { final long maxAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; @@ -247,6 +250,7 @@ private DoubleSumZeroAccumulator(final FieldVector input, final FieldVector outp ); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { final long maxAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; @@ -286,6 +290,7 @@ public DecimalSumZeroAccumulator(FieldVector input, FieldVector output, computationVectorAllocator); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { final long maxAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; @@ -327,6 +332,7 @@ public DecimalSumZeroAccumulatorV2(FieldVector input, FieldVector output, computationVectorAllocator); } + @Override public void accumulate(final long memoryAddr, final int count, final int bitsInChunk, final int chunkOffsetMask) { final long maxAddr = memoryAddr + count * PARTITIONINDEX_HTORDINAL_WIDTH; diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/VectorizedHashAggOperator.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/VectorizedHashAggOperator.java index 45d667a38c..d31c3673a6 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/VectorizedHashAggOperator.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/VectorizedHashAggOperator.java @@ -917,7 +917,11 @@ public boolean shrinkMemory(long size) throws Exception { // TODO: Change this to true to get sibling fragments to spill as well boolean spilled = spillPartition(false); if (!spilled) { - throw UserException.memoryError(new Exception("Unable to find a partition to spill")).build(logger); + // did not find a victim partition that can be spilled + // this can happen in rare race conditions. Returning true + // to declare that this operator cannot shrink any more + logger.info("Unable to find a victim partition to spill"); + return true; } // if micro-spilling is enabled, the state changes to CAN_PRODUCE @@ -1197,8 +1201,7 @@ private long insertIntoPartitions(final int records, final int recordsPivoted, f inputCount = recordInfo.getNumOfElements(); } - } - else { + } else { final BaseVariableWidthVector inVec = (BaseVariableWidthVector) varLenAccums.get(i).getInput(); final int recordLen = VectorHelper.getVariableWidthVectorValueLength(inVec, keyIndex + recordsConsumed); diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/VectorizedHashAggPartitionSpillHandler.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/VectorizedHashAggPartitionSpillHandler.java index 7c848ff412..f7cd5198ff 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/VectorizedHashAggPartitionSpillHandler.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/VectorizedHashAggPartitionSpillHandler.java @@ -923,8 +923,7 @@ public void close() throws Exception { } try { AutoCloseables.close(activeSpilledPartitions); - } - catch (IOException ignored) { + } catch (IOException ignored) { /* Enter this catch block when disk is already full, and therefore cannot flush (write data) to disk * Making sure to catch this exception to allow this close() to complete * This ensures calling AutoCloseables.close(spillManager), hence clearing spill files @@ -932,8 +931,7 @@ public void close() throws Exception { } try { AutoCloseables.close(spilledPartitions); - } - catch (IOException ignored) { + } catch (IOException ignored) { /* Enter this catch block when disk is already full, and therefore cannot flush (write data) to disk * Making sure to catch this exception to allow this close() to complete * This ensures calling AutoCloseables.close(spillManager), hence clearing spill files diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/nospill/CountColumnAccumulatorNoSpill.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/nospill/CountColumnAccumulatorNoSpill.java index d7d11abd8a..1eec8fc22e 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/nospill/CountColumnAccumulatorNoSpill.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/nospill/CountColumnAccumulatorNoSpill.java @@ -27,6 +27,7 @@ public CountColumnAccumulatorNoSpill(FieldVector input, FieldVector output) { super(input, output); } + @Override public void accumulate(final long offsetAddr, final int count){ final long maxAddr = offsetAddr + count * 4; final long incomingBit = getInput().getValidityBufferAddress(); diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/nospill/CountOneAccumulatorNoSpill.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/nospill/CountOneAccumulatorNoSpill.java index 9c020d3be5..00e041abfc 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/nospill/CountOneAccumulatorNoSpill.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/nospill/CountOneAccumulatorNoSpill.java @@ -27,6 +27,7 @@ public CountOneAccumulatorNoSpill(FieldVector output) { super(null, output); } + @Override public void accumulate(final long offsetAddr, final int count){ final long maxAddr = offsetAddr + count * 4; for(long ordinalAddr = offsetAddr; ordinalAddr < maxAddr; ordinalAddr += 4){ diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/nospill/MaxAccumulatorsNoSpill.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/nospill/MaxAccumulatorsNoSpill.java index 586e671e31..8286e80c37 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/nospill/MaxAccumulatorsNoSpill.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/nospill/MaxAccumulatorsNoSpill.java @@ -49,6 +49,7 @@ void initialize(FieldVector vector) { setNullAndValue(vector, INIT); } + @Override public void accumulate(final long memoryAddr, final int count) { final long maxMemAddr = memoryAddr + count * 4; final long incomaxgBit = getInput().getValidityBufferAddress(); @@ -77,6 +78,7 @@ public VarLenMaxAccumulatorNoSpill(FieldVector input, FieldVector output) { super(input, output); } + @Override public void accumulate(final long memoryAddr, final int count) { final long maxAddr = memoryAddr + count * 4; final long incomingBit = getInput().getValidityBufferAddress(); @@ -140,6 +142,7 @@ void initialize(FieldVector vector) { setNullAndValue(vector, INIT); } + @Override public void accumulate(final long memoryAddr, final int count) { final long maxMemAddr = memoryAddr + count * 4; final long incomaxgBit = getInput().getValidityBufferAddress(); @@ -174,6 +177,7 @@ void initialize(FieldVector vector) { setNullAndValue(vector, Long.MIN_VALUE); } + @Override public void accumulate(final long memoryAddr, final int count) { final long maxMemAddr = memoryAddr + count * 4; final long incomaxgBit = getInput().getValidityBufferAddress(); @@ -210,6 +214,7 @@ void initialize(FieldVector vector) { setNullAndValue(vector, INIT); } + @Override public void accumulate(final long memoryAddr, final int count) { final long maxMemAddr = memoryAddr + count * 4; final long incomaxgBit = getInput().getValidityBufferAddress(); @@ -247,6 +252,7 @@ void initialize(FieldVector vector) { setNullAndValue(vector, INIT); } + @Override public void accumulate(final long memoryAddr, final int count) { final long maxMemAddr = memoryAddr + count * WIDTH_ORDINAL; FieldVector inputVector = getInput(); @@ -285,6 +291,7 @@ void initialize(FieldVector vector) { setNullAndValue(vector, INIT); } + @Override public void accumulate(final long memoryAddr, final int count) { final long maxMemAddr = memoryAddr + count * WIDTH_ORDINAL; FieldVector inputVector = getInput(); @@ -339,6 +346,7 @@ void initialize(FieldVector vector) { setNullAndZero(vector); } + @Override public void accumulate(final long memoryAddr, final int count) { List buffers = getInput().getFieldBuffers(); final long incomingBit = buffers.get(0).memoryAddress(); @@ -410,6 +418,7 @@ void initialize(FieldVector vector) { setNullAndValue(vector, INIT); } + @Override public void accumulate(final long memoryAddr, final int count) { final long maxAddr = memoryAddr + count * WIDTH_ORDINAL; List buffers = getInput().getFieldBuffers(); diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/nospill/MinAccumulatorsNoSpill.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/nospill/MinAccumulatorsNoSpill.java index dd4b2cbed7..563eda4cb1 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/nospill/MinAccumulatorsNoSpill.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/nospill/MinAccumulatorsNoSpill.java @@ -48,6 +48,7 @@ void initialize(FieldVector vector) { setNullAndValue(vector, INIT); } + @Override public void accumulate(final long memoryAddr, final int count) { final long maxAddr = memoryAddr + count * 4; final long incomingBit = getInput().getValidityBufferAddress(); @@ -76,6 +77,7 @@ public VarLenMinAccumulatorNoSpill(FieldVector input, FieldVector output) { super(input, output); } + @Override public void accumulate(final long memoryAddr, final int count) { final long maxAddr = memoryAddr + count * 4; final long incomingBit = getInput().getValidityBufferAddress(); @@ -141,6 +143,7 @@ void initialize(FieldVector vector) { setNullAndValue(vector, INIT); } + @Override public void accumulate(final long memoryAddr, final int count) { final long maxAddr = memoryAddr + count * 4; final long incomingBit = getInput().getValidityBufferAddress(); @@ -175,6 +178,7 @@ void initialize(FieldVector vector) { setNullAndValue(vector, Long.MAX_VALUE); } + @Override public void accumulate(final long memoryAddr, final int count) { final long maxAddr = memoryAddr + count * 4; final long incomingBit = getInput().getValidityBufferAddress(); @@ -211,6 +215,7 @@ void initialize(FieldVector vector) { setNullAndValue(vector, INIT); } + @Override public void accumulate(final long memoryAddr, final int count) { final long maxAddr = memoryAddr + count * 4; final long incomingBit = getInput().getValidityBufferAddress(); @@ -249,6 +254,7 @@ void initialize(FieldVector vector) { setNullAndValue(vector, INIT); } + @Override public void accumulate(final long memoryAddr, final int count) { final long maxAddr = memoryAddr + count * WIDTH_ORDINAL; FieldVector inputVector = getInput(); @@ -288,6 +294,7 @@ void initialize(FieldVector vector) { setNullAndValue(vector, INIT); } + @Override public void accumulate(final long memoryAddr, final int count) { final long maxAddr = memoryAddr + count * WIDTH_ORDINAL; FieldVector inputVector = getInput(); @@ -341,6 +348,7 @@ void initialize(FieldVector vector) { setNullAndValue(vector, INIT); } + @Override public void accumulate(final long memoryAddr, final int count) { FieldVector inputVector = getInput(); final long incomingBit = inputVector.getValidityBufferAddress(); @@ -415,6 +423,7 @@ void initialize(FieldVector vector) { setNullAndValue(vector, INIT); } + @Override public void accumulate(final long memoryAddr, final int count) { final long maxAddr = memoryAddr + count * WIDTH_ORDINAL; FieldVector inputVector = getInput(); diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/nospill/SumAccumulatorsNoSpill.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/nospill/SumAccumulatorsNoSpill.java index 19a537da81..627f2f2a47 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/nospill/SumAccumulatorsNoSpill.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/nospill/SumAccumulatorsNoSpill.java @@ -43,6 +43,7 @@ void initialize(FieldVector vector) { setNullAndZero(vector); } + @Override public void accumulate(final long memoryAddr, final int count) { final long maxAddr = memoryAddr + count * 4; final long incomingBit = getInput().getValidityBufferAddress(); @@ -77,6 +78,7 @@ void initialize(FieldVector vector) { setNullAndZero(vector); } + @Override public void accumulate(final long memoryAddr, final int count) { final long maxAddr = memoryAddr + count * 4; final long incomingBit = getInput().getValidityBufferAddress(); @@ -111,6 +113,7 @@ void initialize(FieldVector vector) { setNullAndZero(vector); } + @Override public void accumulate(final long memoryAddr, final int count) { final long maxAddr = memoryAddr + count * 4; final long incomingBit = getInput().getValidityBufferAddress(); @@ -146,6 +149,7 @@ void initialize(FieldVector vector) { setNullAndZero(vector); } + @Override public void accumulate(final long memoryAddr, final int count) { final long maxAddr = memoryAddr + count * 4; final long incomingBit = getInput().getValidityBufferAddress(); @@ -182,6 +186,7 @@ void initialize(FieldVector vector) { setNullAndZero(vector); } + @Override public void accumulate(final long memoryAddr, final int count) { final long maxAddr = memoryAddr + count * WIDTH_ORDINAL; FieldVector inputVector = getInput(); @@ -220,6 +225,7 @@ void initialize(FieldVector vector) { setNullAndZero(vector); } + @Override public void accumulate(final long memoryAddr, final int count) { final long maxAddr = memoryAddr + count * WIDTH_ORDINAL; FieldVector inputVector = getInput(); diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/nospill/SumZeroAccumulatorsNoSpill.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/nospill/SumZeroAccumulatorsNoSpill.java index 2d6e1cdbe7..65a6de5423 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/nospill/SumZeroAccumulatorsNoSpill.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/aggregate/vectorized/nospill/SumZeroAccumulatorsNoSpill.java @@ -38,6 +38,7 @@ public IntSumZeroAccumulatorNoSpill(FieldVector input, FieldVector output) { super(input, output); } + @Override public void accumulate(final long memoryAddr, final int count) { final long maxAddr = memoryAddr + count * 4; final long incomingBit = getInput().getValidityBufferAddress(); @@ -63,6 +64,7 @@ public FloatSumZeroAccumulatorNoSpill(FieldVector input, FieldVector output) { super(input, output); } + @Override public void accumulate(final long memoryAddr, final int count) { final long maxAddr = memoryAddr + count * 4; final long incomingBit = getInput().getValidityBufferAddress(); @@ -87,6 +89,7 @@ public BigIntSumZeroAccumulatorNoSpill(FieldVector input, FieldVector output) { super(input, output); } + @Override public void accumulate(final long memoryAddr, final int count) { final long maxAddr = memoryAddr + count * 4; final long incomingBit = getInput().getValidityBufferAddress(); @@ -112,6 +115,7 @@ public DoubleSumZeroAccumulatorNoSpill(FieldVector input, FieldVector output) { super(input, output); } + @Override public void accumulate(final long memoryAddr, final int count) { final long maxAddr = memoryAddr + count * 4; final long incomingBit = getInput().getValidityBufferAddress(); @@ -139,6 +143,7 @@ public DecimalSumZeroAccumulatorNoSpill(FieldVector input, FieldVector output) { super(input, output); } + @Override public void accumulate(final long memoryAddr, final int count) { final long maxAddr = memoryAddr + count * WIDTH_ORDINAL; FieldVector inputVector = getInput(); @@ -169,6 +174,7 @@ public DecimalSumZeroAccumulatorNoSpillV2(FieldVector input, FieldVector output) super(input, output); } + @Override public void accumulate(final long memoryAddr, final int count) { final long maxAddr = memoryAddr + count * WIDTH_ORDINAL; FieldVector inputVector = getInput(); diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/boost/BoostOperator.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/boost/BoostOperator.java index d03071daf1..b5185f3170 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/boost/BoostOperator.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/boost/BoostOperator.java @@ -125,8 +125,8 @@ public int outputData() throws Exception { state.is(State.CAN_PRODUCE); currentReader.allocate(fieldVectorMap); - int recordCount; - if ((recordCount = currentReader.next()) == 0) { + int recordCount = currentReader.next(); + if (recordCount == 0) { if (!readers.hasNext()) { // We're on the last reader, and it has no (more) rows. // no need to close the reader (will be done when closing the operator) diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/common/hashtable/HashTableTemplate.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/common/hashtable/HashTableTemplate.java index c59702b8ef..a5037ff1d4 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/common/hashtable/HashTableTemplate.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/common/hashtable/HashTableTemplate.java @@ -394,6 +394,7 @@ private void dump(int idx) { } } + @Override public void close() throws Exception { AutoCloseables.close(Arrays.asList(htContainer, links, hashValues)); } @@ -499,10 +500,12 @@ private int numBuckets() { return startIndices.getValueCount(); } + @Override public int size() { return numEntries; } + @Override public void getStats(HashTableStats stats) { assert stats != null; stats.numBuckets = numBuckets(); @@ -511,10 +514,12 @@ public void getStats(HashTableStats stats) { stats.resizingTime = resizingTime.elapsed(TimeUnit.NANOSECONDS); } + @Override public boolean isEmpty() { return numEntries == 0; } + @Override public void close() throws Exception { List closeables = new ArrayList<>(); if (batchHolders != null) { @@ -530,6 +535,7 @@ private static int getBucketIndex(int hash, int numBuckets) { return hash & (numBuckets - 1); } + @SuppressWarnings("checkstyle:InnerAssignment") private static int roundUpToPowerOf2(int number) { int rounded = number >= MAXIMUM_CAPACITY ? MAXIMUM_CAPACITY @@ -540,6 +546,7 @@ private static int roundUpToPowerOf2(int number) { return rounded; } + @Override public int put(int incomingRowIdx) { final int hash = getHashBuild(incomingRowIdx); final int i = getBucketIndex(hash, numBuckets()); @@ -738,6 +745,7 @@ private void resizeAndRehashIfNeeded() { numResizing++; } + @Override public void outputKeys(int batchIdx, VectorContainer outContainer) { assert batchIdx < batchHolders.size(); batchHolders.get(batchIdx).outputKeys(outContainer); diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/common/ht2/HashTableFilterUtil.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/common/ht2/HashTableFilterUtil.java index 766a9da4ae..155d7d90cb 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/common/ht2/HashTableFilterUtil.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/common/ht2/HashTableFilterUtil.java @@ -21,7 +21,7 @@ import java.util.Optional; import org.apache.arrow.memory.ArrowBuf; -import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections4.CollectionUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/common/ht2/HashTableKeyReader.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/common/ht2/HashTableKeyReader.java index aef4da94cd..0c8e981f71 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/common/ht2/HashTableKeyReader.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/common/ht2/HashTableKeyReader.java @@ -29,7 +29,7 @@ import org.apache.arrow.memory.ArrowBuf; import org.apache.arrow.memory.BufferAllocator; import org.apache.arrow.vector.types.pojo.Field; -import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections4.CollectionUtils; import com.dremio.common.AutoCloseables; import com.dremio.exec.util.KeyFairSliceCalculator; diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/common/ht2/LBlockHashTable.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/common/ht2/LBlockHashTable.java index ec47eab3d1..229f904c8b 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/common/ht2/LBlockHashTable.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/common/ht2/LBlockHashTable.java @@ -1270,14 +1270,17 @@ private static boolean memEqual(final long laddr, final long raddr, int len) { return true; } + @Override public int hashCode() { return System.identityHashCode(this); } + @Override public String toString() { return "BlockHashTable"; } + @Override public boolean equals(Object obj) { return this == obj; } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/common/ht2/LBlockHashTableNoSpill.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/common/ht2/LBlockHashTableNoSpill.java index cc1da9fe95..4240f0a4f4 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/common/ht2/LBlockHashTableNoSpill.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/common/ht2/LBlockHashTableNoSpill.java @@ -458,14 +458,17 @@ private static final boolean memEqual(final long laddr, final long raddr, int le return true; } + @Override public int hashCode() { return System.identityHashCode(this); } + @Override public String toString() { return "BlockHashTable"; } + @Override public boolean equals(Object obj) { return this == obj; } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/common/ht2/QBlockHashTable.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/common/ht2/QBlockHashTable.java index d60c89a6ad..761475923f 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/common/ht2/QBlockHashTable.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/common/ht2/QBlockHashTable.java @@ -107,6 +107,7 @@ public final int find(final long keyFixedVectorAddr, final long keyVarVectorAddr return getOrInsert(keyFixedVectorAddr, keyVarVectorAddr, keyIndex, false); } + @SuppressWarnings("checkstyle:InnerAssignment") // complex legacy code private final int getOrInsert(final long keyFixedVectorAddr, final long keyVarVectorAddr, final int keyIndex, boolean insertNew) { final int blockWidth = pivot.getBlockWidth(); final int capacity = this.capacity; @@ -305,6 +306,7 @@ private void addDataBlocks() { } } + @SuppressWarnings("checkstyle:InnerAssignment") // complex legacy code private final void rehash(int newCapacity) { // grab old references. @@ -444,14 +446,17 @@ private static final boolean memEqual(final long laddr, final long raddr, int le return true; } + @Override public int hashCode() { return System.identityHashCode(this); } + @Override public String toString() { return "BlockHashTable"; } + @Override public boolean equals(Object obj) { return this == obj; } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/common/ht2/ResizeListenerNoSpill.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/common/ht2/ResizeListenerNoSpill.java index 15e6640dcc..e3d5cdbac5 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/common/ht2/ResizeListenerNoSpill.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/common/ht2/ResizeListenerNoSpill.java @@ -16,7 +16,7 @@ package com.dremio.sabot.op.common.ht2; public interface ResizeListenerNoSpill { - public static ResizeListenerNoSpill NO_OP = new ResizeListenerNoSpill() {public void resized(int newCapacity) {}}; + public static ResizeListenerNoSpill NO_OP = newCapacity -> {}; void resized(int newCapacity); diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/copier/ConditionalFieldBufferCopier6Util.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/copier/ConditionalFieldBufferCopier6Util.java index 8c76c55f6f..fa79a6128e 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/copier/ConditionalFieldBufferCopier6Util.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/copier/ConditionalFieldBufferCopier6Util.java @@ -82,6 +82,7 @@ public void copy(long offsetAddr, int count, long nullAddr, int nullCount) { throw new UnsupportedOperationException("set null not supported"); } + @Override public void allocate(int records){ targetAlt.allocateNew(records); } @@ -240,6 +241,7 @@ public void copy(long offsetAddr, int count, Cursor cursor) { cursor.setTargetIndex(targetIndex + count); } + @Override public void allocate(int records){ targetAlt.allocateNew(records * AVG_VAR_WIDTH, records); } @@ -316,6 +318,7 @@ public void copy(long offsetAddr, int count, long nullAddr, int nullCount) { throw new UnsupportedOperationException("set null not supported"); } + @Override public void allocate(int records){ if(targetAlt != null){ targetAlt.allocateNew(records); @@ -370,6 +373,7 @@ public void copy(long offsetAddr, int count, long nullAddr, int nullCount) { throw new UnsupportedOperationException("set null not supported"); } + @Override public void allocate(int records){ AllocationHelper.allocate(dst, records, 10); } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/copier/FieldBufferCopier2Util.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/copier/FieldBufferCopier2Util.java index 9199e0081b..7c42e602c5 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/copier/FieldBufferCopier2Util.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/copier/FieldBufferCopier2Util.java @@ -80,10 +80,12 @@ public void copy(long offsetAddr, int count, Cursor cursor) { cursor.setTargetIndex(targetIndex + count); } + @Override public void copy(long offsetAddr, int count, long nullAddr, int nullCount, Cursor cursor) { copy(offsetAddr, count, cursor); } + @Override public void allocate(int records){ targetAlt.allocateNew(records); } @@ -298,6 +300,7 @@ public void copy(long sv2, int count, Cursor cursor) { cursor.setTargetIndex(targetIndex + count); } + @Override public void copy(long offsetAddr, int count, long nullAddr, int nullCount, Cursor cursor) { copy(offsetAddr, count, cursor); } @@ -362,6 +365,7 @@ public void copyInnerList(long listOffsetBufAddr, int count, int seekTo) { realloc.setCount(seekTo + offsetCount); } + @Override public void allocate(int records){ targetAlt.allocateNew(records * AVG_VAR_WIDTH, records); } @@ -411,8 +415,7 @@ private void seekAndCopy(long offsetAddr, int count, int seekTo) { final long addr = dstAddr + (targetIndex >>> 3); PlatformDependent.putByte(addr, (byte) (PlatformDependent.getByte(addr) | bitVal)); } - } - else { + } else { for (; offsetAddr < maxAddr; offsetAddr += STEP_SIZE, targetIndex++) { final int recordIndex = Short.toUnsignedInt(PlatformDependent.getShort(offsetAddr)); final int byteValue = PlatformDependent.getByte(srcAddr + (recordIndex >>> 3)); @@ -470,8 +473,7 @@ public void seekAndCopy(long offsetAddr, int count, long nullAddr, int nullCount final long addr = dstAddr + (targetIndex >>> 3); PlatformDependent.putByte(addr, (byte) (PlatformDependent.getByte(addr) | bitVal)); } - } - else { + } else { for (; offsetAddr < maxAddr; offsetAddr += STEP_SIZE, targetIndex++) { final int recordIndex = Short.toUnsignedInt(PlatformDependent.getShort(offsetAddr)); final int byteValue = PlatformDependent.getByte(srcAddr + (recordIndex >>> 3)); @@ -502,6 +504,7 @@ public void copy(long offsetAddr, int count, long nullAddr, int nullCount) { seekAndCopy(offsetAddr, count, nullAddr, nullCount, 0); } + @Override public void copy(long offsetAddr, int count, long nullAddr, int nullCount, Cursor cursor) { int targetIndex = cursor.getTargetIndex(); if (allocateAsFixed) { @@ -586,6 +589,7 @@ public void copyInnerList(long listOffsetBufAddr, int count, int seekTo) { } } + @Override public void allocate(int records){ if(targetAlt != null){ targetAlt.allocateNew(records); @@ -628,6 +632,7 @@ public void copy(long offsetAddr, int count, long nullAddr, int nullCount) { copy(offsetAddr, count); } + @Override public void copy(long offsetAddr, int count, long nullAddr, int nullCount, Cursor cursor) { copy(offsetAddr, count, cursor); } @@ -742,6 +747,7 @@ public void copy(long offsetAddr, int count, Cursor cursor) { cursor.setTargetIndex(targetIndex + count); } + @Override public void copy(long offsetAddr, int count, long nullAddr, int nullCount, Cursor cursor) { copy(offsetAddr, count, cursor); } @@ -849,6 +855,7 @@ public void copy(long offsetAddr, int count, Cursor cursor) { cursor.setTargetIndex(targetIndex + count); } + @Override public void copy(long offsetAddr, int count, long nullAddr, int nullCount, Cursor cursor) { copy(offsetAddr, count, cursor); } @@ -888,6 +895,7 @@ public void copyInnerList(long listOffsetBufAddr, int count, int seekTo) { childCopiers.forEach(c -> c.copyInnerList(listOffsetBufAddr, count, seekTo)); } + @Override public void allocate(int records){ AllocationHelper.allocate(dst, records, 10); } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/copier/FieldBufferCopier4Util.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/copier/FieldBufferCopier4Util.java index 6323f1a6dc..eaf41f825d 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/copier/FieldBufferCopier4Util.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/copier/FieldBufferCopier4Util.java @@ -74,6 +74,7 @@ public void copy(long offsetAddr, int count, long nullAddr, int nullCount) { throw new UnsupportedOperationException("set null not supported"); } + @Override public void allocate(int records){ targetAlt.allocateNew(records); } @@ -112,6 +113,7 @@ public void copy(long offsetAddr, int count, long nullAddr, int nullCount) { throw new UnsupportedOperationException("set null not supported"); } + @Override public void allocate(int records){ targetAlt.allocateNew(records); } @@ -150,6 +152,7 @@ public void copy(long offsetAddr, int count, long nullAddr, int nullCount) { throw new UnsupportedOperationException("set null not supported"); } + @Override public void allocate(int records){ targetAlt.allocateNew(records); } @@ -217,6 +220,7 @@ public void copy(long offsetAddr, int count, long nullAddr, int nullCount) { throw new UnsupportedOperationException("set null not supported"); } + @Override public void allocate(int records){ targetAlt.allocateNew(records * 15, records); } @@ -276,6 +280,7 @@ public void copy(long offsetAddr, int count, long nullAddr, int nullCount) { throw new UnsupportedOperationException("set null not supported"); } + @Override public void allocate(int records){ if(targetAlt != null){ targetAlt.allocateNew(records); @@ -313,6 +318,7 @@ public void copy(long offsetAddr, int count, long nullAddr, int nullCount) { throw new UnsupportedOperationException("set null not supported"); } + @Override public void allocate(int records){ AllocationHelper.allocate(dst, records, 10); } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/copier/FieldBufferCopier6Util.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/copier/FieldBufferCopier6Util.java index 2be3a2dbf3..fc5187926a 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/copier/FieldBufferCopier6Util.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/copier/FieldBufferCopier6Util.java @@ -76,6 +76,7 @@ public void copy(long offsetAddr, int count, long nullAddr, int nullCount) { throw new UnsupportedOperationException("set null not supported"); } + @Override public void allocate(int records){ targetAlt.allocateNew(records); } @@ -226,6 +227,7 @@ public void copy(long offsetAddr, int count, Cursor cursor) { cursor.setTargetIndex(targetIndex + count); } + @Override public void allocate(int records){ targetAlt.allocateNew(records * 15, records); } @@ -301,6 +303,7 @@ public void copy(long offsetAddr, int count, long nullAddr, int nullCount) { throw new UnsupportedOperationException("set null not supported"); } + @Override public void allocate(int records){ if(targetAlt != null){ targetAlt.allocateNew(records); @@ -354,6 +357,7 @@ public void copy(long offsetAddr, int count, Cursor cursor) { cursor.setTargetIndex(curTargetIndex + count); } + @Override public void allocate(int records){ AllocationHelper.allocate(dst, records, 10); } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/copier/FieldBufferPreAllocedCopier.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/copier/FieldBufferPreAllocedCopier.java index 054c5d9938..d452ee680d 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/copier/FieldBufferPreAllocedCopier.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/copier/FieldBufferPreAllocedCopier.java @@ -182,7 +182,8 @@ private Cursor seekAndCopy(long sv2Addr, int count, Cursor cursor) { int targetIndex; int targetDataIndex; if (cursor == null) { - targetIndex = targetDataIndex = 0; + targetIndex = 0; + targetDataIndex = 0; } else { targetIndex = cursor.targetIndex; targetDataIndex = cursor.targetDataIndex; diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/filter/FilterTemplate2.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/filter/FilterTemplate2.java index 3434e2279f..39192ca9bb 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/filter/FilterTemplate2.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/filter/FilterTemplate2.java @@ -48,6 +48,7 @@ public void setup(FunctionContext context, VectorAccessible incoming, VectorAcce doSetup(context, incoming, outgoing); } + @Override public int filterBatch(int recordCount){ if (recordCount == 0) { return 0; diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/flatten/FlattenTemplate.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/flatten/FlattenTemplate.java index b01af0ff60..708dc143e8 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/flatten/FlattenTemplate.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/flatten/FlattenTemplate.java @@ -62,6 +62,7 @@ public void setFlattenField(RepeatedValueVector flattenField) { this.fieldToFlatten = flattenField; } + @Override public RepeatedValueVector getFlattenField() { return fieldToFlatten; } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/fromjson/ConvertFromJsonConverter.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/fromjson/ConvertFromJsonConverter.java index b5ef11b660..3a235f1455 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/fromjson/ConvertFromJsonConverter.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/fromjson/ConvertFromJsonConverter.java @@ -50,6 +50,7 @@ import com.dremio.exec.catalog.CatalogUser; import com.dremio.exec.catalog.MetadataRequestOptions; import com.dremio.exec.ops.QueryContext; +import com.dremio.exec.planner.physical.PlannerSettings; import com.dremio.exec.planner.physical.Prel; import com.dremio.exec.planner.physical.ProjectPrel; import com.dremio.exec.planner.physical.visitor.BasePrelVisitor; @@ -212,7 +213,8 @@ private static CompleteType getLiteralSchema(QueryContext context, byte[] bytes) final int sizeLimit = Math.toIntExact(context.getOptions().getOption(ExecConstants.LIMIT_FIELD_SIZE_BYTES)); final int maxLeafLimit = Math.toIntExact(context.getOptions().getOption(CatalogOptions.METADATA_LEAF_COLUMN_MAX)); JsonReader jsonReader = new JsonReader(bufferManager.getManagedBuffer(), sizeLimit, maxLeafLimit, - context.getOptions().getOption(ExecConstants.JSON_READER_ALL_TEXT_MODE_VALIDATOR), false, false); + context.getOptions().getOption(ExecConstants.JSON_READER_ALL_TEXT_MODE_VALIDATOR), false, false, + context.getOptions().getOption(PlannerSettings.ENFORCE_VALID_JSON_DATE_FORMAT_ENABLED)); jsonReader.setSource(bytes); ComplexWriter writer = new ComplexWriterImpl("dummy", vc); diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/fromjson/ConvertFromJsonOperator.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/fromjson/ConvertFromJsonOperator.java index 677b0dba3b..a49f11ea86 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/fromjson/ConvertFromJsonOperator.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/fromjson/ConvertFromJsonOperator.java @@ -38,6 +38,7 @@ import com.dremio.exec.catalog.CatalogOptions; import com.dremio.exec.exception.JsonFieldChangeExceptionContext; import com.dremio.exec.exception.SetupException; +import com.dremio.exec.planner.physical.PlannerSettings; import com.dremio.exec.record.TypedFieldId; import com.dremio.exec.record.VectorAccessible; import com.dremio.exec.record.VectorAccessibleComplexWriter; @@ -207,7 +208,8 @@ public JsonConverter(ConversionColumn column, int sizeLimit, int maxLeafLimit, T this.column = column; this.vector = vector; this.writer = VectorAccessibleComplexWriter.getWriter(column.getInputField(), outgoing); - this.reader = new JsonReader(context.getManagedBuffer(), sizeLimit, maxLeafLimit, context.getOptions().getOption(ExecConstants.JSON_READER_ALL_TEXT_MODE_VALIDATOR), false, false); + this.reader = new JsonReader(context.getManagedBuffer(), sizeLimit, maxLeafLimit, context.getOptions().getOption(ExecConstants.JSON_READER_ALL_TEXT_MODE_VALIDATOR), false, false, + context.getOptions().getOption(PlannerSettings.ENFORCE_VALID_JSON_DATE_FORMAT_ENABLED)); this.outgoingVector = outgoingVector; this.sizeLimit = sizeLimit; } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/hash/HashJoinProbeTemplate.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/hash/HashJoinProbeTemplate.java index e7b6a646c1..47c4b8e135 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/hash/HashJoinProbeTemplate.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/hash/HashJoinProbeTemplate.java @@ -210,6 +210,7 @@ public int projectBuildNonMatches() { * @return Negative if partial batch complete. Otherwise, all of probe batch * is complete. */ + @Override public int probeBatch() { final int targetRecordsPerBatch = this.targetRecordsPerBatch; final boolean projectUnmatchedProbe = this.projectUnmatchedProbe; diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/merge/MergeJoinOperator.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/merge/MergeJoinOperator.java index 10becdf457..3dddc33fdb 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/merge/MergeJoinOperator.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/merge/MergeJoinOperator.java @@ -397,19 +397,19 @@ private void generateComparator() { g.setMappingSet(mainMappingSet); - final boolean nulls_equal = + final boolean nullsEqual = JoinUtils.checkAndReturnSupportedJoinComparator(conditions.get(i)) == Comparator.IS_NOT_DISTINCT_FROM; - final boolean nulls_high = true; /* TODO null_high, should use upstream config */ - final boolean asc_sorted = true; // TODO: ASC or DESC order sorted? + final boolean nullsHigh = true; /* TODO null_high, should use upstream config */ + final boolean ascSorted = true; // TODO: ASC or DESC order sorted? // handle case when null != null - if (!nulls_equal) { + if (!nullsEqual) { JConditional jc; jc = g.getEvalBlock()._if(left.getIsSet().eq(JExpr.lit(0)).cand(right.getIsSet().eq(JExpr.lit(0)))); jc._then()._return(JExpr.lit(-1)); // ordering does not really matter in null comparison } - LogicalExpression fh = FunctionGenerationHelper.getOrderingComparator(nulls_high, + LogicalExpression fh = FunctionGenerationHelper.getOrderingComparator(nullsHigh, left, right, producer); HoldingContainer out = g.addExpr(fh, ClassGenerator.BlockCreateMode.MERGE); @@ -418,7 +418,7 @@ private void generateComparator() { JConditional jc = g.getEvalBlock()._if(out.getValue().ne(JExpr.lit(0))); // Not equal case - if (asc_sorted) { + if (ascSorted) { jc._then()._return(out.getValue()); } else { jc._then()._return(out.getValue().minus()); diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/nlj/NLJWorkerTemplate.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/nlj/NLJWorkerTemplate.java index 4b1bc43c52..0653e5791f 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/nlj/NLJWorkerTemplate.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/nlj/NLJWorkerTemplate.java @@ -49,6 +49,7 @@ public abstract class NLJWorkerTemplate implements NLJWorker { * @param rightContainer Hyper container * @param outgoing Output batch */ + @Override public void setupNestedLoopJoin( FunctionContext context, VectorAccessible left, @@ -67,6 +68,7 @@ public void setupNestedLoopJoin( * @param targetTotalOutput * @return */ + @Override public int emitRecords(int outputIndex, int targetTotalOutput) { assert outputIndex < targetTotalOutput; diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/nlje/EvaluatingJoinMatcher.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/nlje/EvaluatingJoinMatcher.java index 865ce7aacf..429d230424 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/nlje/EvaluatingJoinMatcher.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/nlje/EvaluatingJoinMatcher.java @@ -161,10 +161,12 @@ private int outputNonMatches() { } } + @Override public long getCopyNanos() { return copyWatch.elapsed(TimeUnit.NANOSECONDS); } + @Override public long getMatchNanos() { return matchWatch.elapsed(TimeUnit.NANOSECONDS); } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/nlje/IndexRange.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/nlje/IndexRange.java index 143e687ecf..e4031c3783 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/nlje/IndexRange.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/nlje/IndexRange.java @@ -55,6 +55,7 @@ public IndexRange( this.buildBatchSizes = buildBatchSizes; } + @Override public boolean hasNext() { return hasRemainingProbe() || hasRemainingBuild(); } @@ -72,6 +73,7 @@ public boolean isEmpty() { return currentProbeRange.isEmpty(); } + @Override public IndexRange nextOutput() { if (hasRemainingProbe()) { final IntRange nextProbe = IntRange.of(currentProbeRange.end, Math.min(totalProbeRange.end, currentProbeRange.end + probeBatchSize)); diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/nlje/JoinMatcher.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/nlje/JoinMatcher.java index d674b9c7b5..7152650be4 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/nlje/JoinMatcher.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/nlje/JoinMatcher.java @@ -45,6 +45,7 @@ void setup(LogicalExpression expr, ClassProducer classProducer, VectorAccessible */ boolean needNextInput(); + @Override void close() throws Exception; /** diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/nlje/NLJEOperator.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/nlje/NLJEOperator.java index ac3edad3e0..04f0e66666 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/nlje/NLJEOperator.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/nlje/NLJEOperator.java @@ -29,6 +29,8 @@ import com.dremio.common.AutoCloseables; import com.dremio.common.exceptions.UserException; import com.dremio.common.expression.FunctionCall; +import com.dremio.common.expression.LogicalExpression; +import com.dremio.common.expression.ValueExpressions.BooleanExpression; import com.dremio.exec.physical.config.NestedLoopJoinPOP; import com.dremio.exec.planner.physical.NestedLoopJoinPrel; import com.dremio.exec.record.ExpandableHyperContainer; @@ -93,8 +95,15 @@ public NLJEOperator(OperatorContext context, NestedLoopJoinPOP config) { case INNER: case LEFT: break; // supported. + case FULL: + LogicalExpression condition = config.getCondition(); + if(condition instanceof BooleanExpression && ((BooleanExpression)condition).getBoolean()){ + // DX-59222 support FOJ with true condition + break; + } + throw UserException.unsupportedError().message("When using NLJ, we only support full outer joins with a 'true' condition.").buildSilently(); default: - throw UserException.unsupportedError().message("Joins of type %s using NLJ are not currenlty supported.", joinType.name()).buildSilently(); + throw UserException.unsupportedError().message("Joins of type %s using NLJ are not currently supported.", joinType.name()).buildSilently(); } } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/nlje/StraightThroughMatcher.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/nlje/StraightThroughMatcher.java index 5aada75757..ed341820ff 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/nlje/StraightThroughMatcher.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/nlje/StraightThroughMatcher.java @@ -57,10 +57,12 @@ public int output() { return 0; } + @Override public long getCopyNanos() { return 0; } + @Override public long getMatchNanos() { return 0; } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/EightByteInnerLeftProbeOff.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/EightByteInnerLeftProbeOff.java index 4349d0b888..f9f51cd9a0 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/EightByteInnerLeftProbeOff.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/EightByteInnerLeftProbeOff.java @@ -25,7 +25,7 @@ import org.apache.arrow.memory.BufferAllocator; import org.apache.arrow.vector.FieldVector; import org.apache.arrow.vector.SimpleBigIntVector; -import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections4.CollectionUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/NonPartitionColFilters.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/NonPartitionColFilters.java index bcc242cc2a..8a14929742 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/NonPartitionColFilters.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/NonPartitionColFilters.java @@ -23,7 +23,7 @@ import org.apache.arrow.memory.BufferAllocator; import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.ArrowType; -import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; import com.dremio.common.AutoCloseables; diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/PartitionColFilters.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/PartitionColFilters.java index d6a0b5d404..c448fd678b 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/PartitionColFilters.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/PartitionColFilters.java @@ -21,7 +21,7 @@ import java.util.Optional; import org.apache.arrow.memory.BufferAllocator; -import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections4.CollectionUtils; import com.dremio.common.AutoCloseables; import com.dremio.exec.physical.config.RuntimeFilterProbeTarget; diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/RuntimeFilterUtil.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/RuntimeFilterUtil.java index 09f182eba3..f0f8dbc639 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/RuntimeFilterUtil.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/RuntimeFilterUtil.java @@ -26,7 +26,7 @@ import java.util.stream.Collectors; import org.apache.arrow.memory.ArrowBuf; -import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections4.CollectionUtils; import com.dremio.exec.ExecConstants; import com.dremio.exec.physical.config.HashJoinPOP; diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/VectorizedHashJoinOperator.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/VectorizedHashJoinOperator.java index 3db7e73f36..1e7a952020 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/VectorizedHashJoinOperator.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/VectorizedHashJoinOperator.java @@ -168,6 +168,7 @@ public State getState() { return state; } + @Override public VectorAccessible setup(VectorAccessible left, VectorAccessible right) throws Exception { state.is(State.NEEDS_SETUP); diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/VectorizedSpillingHashJoinOperator.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/VectorizedSpillingHashJoinOperator.java index 652f2c4e7b..9a9c03c02d 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/VectorizedSpillingHashJoinOperator.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/VectorizedSpillingHashJoinOperator.java @@ -178,6 +178,7 @@ public State getState() { return state; } + @Override public VectorAccessible setup(VectorAccessible left, VectorAccessible right) throws Exception { state.is(State.NEEDS_SETUP); @@ -892,7 +893,8 @@ private static class ProbePivotCursor { void init(int batchSize) { this.batchSize = batchSize; - startPivotIdx = numPivoted = 0; + this.startPivotIdx = 0; + this.numPivoted = 0; } void update(int startPivotIdx, int numPivoted) { diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/io/SpillSerializable.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/io/SpillSerializable.java index d26dabb95b..ced18ef5dd 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/io/SpillSerializable.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/io/SpillSerializable.java @@ -17,19 +17,18 @@ import java.io.IOException; -import org.apache.arrow.memory.ArrowBuf; -import org.apache.arrow.memory.util.LargeMemoryUtil; import org.apache.arrow.vector.FieldVector; import com.dremio.exec.record.BatchSchema; import com.dremio.exec.record.VectorAccessible; import com.dremio.exec.record.VectorWrapper; -import com.dremio.exec.util.RoundUtil; import com.dremio.sabot.op.join.vhash.spill.pool.PageSupplier; +import com.dremio.sabot.op.join.vhash.spill.slicer.Sizer; import com.dremio.sabot.op.sort.external.SpillManager.SpillInputStream; import com.dremio.sabot.op.sort.external.SpillManager.SpillOutputStream; public interface SpillSerializable { + /** * Serialize and write a chunk to the output stream. * @@ -54,11 +53,13 @@ public interface SpillSerializable { static int computeUnpivotedSizeRounded(VectorAccessible va) { int total = 0; for (VectorWrapper wrapper : va) { - for (ArrowBuf buf : ((FieldVector) wrapper.getValueVector()).getFieldBuffers()) { - // we do a roundup for 64-bit alignment since that is a more accurate value for the size of buffers that - // need to be allocated for unload/merge. - total += RoundUtil.round8up(LargeMemoryUtil.checkedCastToInt(buf.readableBytes())); - } + FieldVector fieldVector = (FieldVector) wrapper.getValueVector(); + //Get Sizer implementation for the current vector type + Sizer sizer = Sizer.get(fieldVector); + + + total += sizer.getSizeInBytesStartingFromOrdinal(0, fieldVector.getValueCount()); + } return total; } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/pool/PagePool.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/pool/PagePool.java index 9ed991d281..97eca9fccf 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/pool/PagePool.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/pool/PagePool.java @@ -151,6 +151,7 @@ public Page newPage() { return p; } + @Override public void close() { Preconditions.checkArgument(state != State.CLOSED); diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/pool/ReusingPageSupplier.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/pool/ReusingPageSupplier.java index 806fa3caf5..00e7bd8a14 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/pool/ReusingPageSupplier.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/pool/ReusingPageSupplier.java @@ -37,6 +37,7 @@ public BufferAllocator getAllocator() { return pool.getAllocator(); } + @Override public Page getPage(int size) { // round-up so that vector/buffer allocations happen at 8-byte alignments. size = RoundUtil.round8up(size); diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/replay/JoinReplayer.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/replay/JoinReplayer.java index 69571c45ae..3cab1dd736 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/replay/JoinReplayer.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/replay/JoinReplayer.java @@ -115,6 +115,7 @@ public JoinReplayer(JoinReplayEntry replayEntry, JoinSetupParams setupParams, Pa replayEntry.getProbeFiles(), setupParams.getBuildKeyPivot(), setupParams.getLeft().getSchema(), setupParams.getMaxInputBatchSize()); } + @Override public int run() throws Exception { logger.trace("entry replayState {}", replayState); int ret = 0; @@ -159,6 +160,7 @@ public int run() throws Exception { return ret; } + @Override public boolean isFinished() { return replayState == ReplayState.DONE; } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/slicer/FixedMerger.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/slicer/FixedMerger.java index f95064454e..014409a509 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/slicer/FixedMerger.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/slicer/FixedMerger.java @@ -50,6 +50,7 @@ public class FixedMerger implements Merger { this.allocator = allocator; } + @Override public void merge(VectorContainerList srcContainers, Page dst, List vectorOutput) { List src = new ArrayList<>(); for (VectorWrapper wrapper : srcContainers.getWrappers(wrapperIdx)) { diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/slicer/FixedSizer.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/slicer/FixedSizer.java index 1bde87e871..3ffe75f501 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/slicer/FixedSizer.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/slicer/FixedSizer.java @@ -63,8 +63,7 @@ public int getSizeInBitsStartingFromOrdinal(final int ordinal, final int numberO if (dataSizeInBits == 1 ){ //numberOfRecords number of bits to store data buffer + numberOfRecords number of bits to store validity bitmap buffer return 2 * RoundUtil.round64up(numberOfRecords); - } - else { + } else { final int dataBits = RoundUtil.round64up(dataSizeInBits * numberOfRecords); //data buffer final int validityBits = Sizer.getValidityBufferSizeInBits(numberOfRecords);//validity buffer diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/slicer/Sizer.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/slicer/Sizer.java index 6d034d2a91..1c23e0da7d 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/slicer/Sizer.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/slicer/Sizer.java @@ -37,7 +37,7 @@ * A tool that determines size of an operation and then provides a copier to * later do that operation. */ -interface Sizer { +public interface Sizer { int BYTE_SIZE_BITS = 8; int VALID_SIZE_BITS = 1; int OFFSET_SIZE_BYTES = 4; @@ -86,6 +86,16 @@ interface Sizer { */ int getSizeInBitsStartingFromOrdinal(int ordinal, int len); + /** + * Size in bytes required for 'len' number of records starting from 'ordinal'th record in the vector. + * @param ordinal + * @param len + * @return + */ + default int getSizeInBytesStartingFromOrdinal(int ordinal, int len) { + return getSizeInBitsStartingFromOrdinal(ordinal, len) / BYTE_SIZE_BITS; + } + /** * Bits required to store offset values for given number of records * @param numberOfRecords diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/slicer/VectorContainerList.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/slicer/VectorContainerList.java index 01c4c37d00..9b5b49fbd1 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/slicer/VectorContainerList.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/join/vhash/spill/slicer/VectorContainerList.java @@ -37,11 +37,7 @@ public class VectorContainerList { int wrapperIdx = 0; for (VectorWrapper wrapper : current) { - List> oldList; - if ((oldList = wrapperIdToWrappers.get(wrapperIdx)) == null) { - oldList = new ArrayList<>(); - wrapperIdToWrappers.put(wrapperIdx, oldList); - } + List> oldList = wrapperIdToWrappers.computeIfAbsent(wrapperIdx, i -> new ArrayList<>()); oldList.add(wrapper); ++wrapperIdx; } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/llvm/GandivaExpressionBuilder.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/llvm/GandivaExpressionBuilder.java index 6bc929a8c2..43097cb4d2 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/llvm/GandivaExpressionBuilder.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/llvm/GandivaExpressionBuilder.java @@ -317,6 +317,7 @@ public TreeNode visitNullConstant(TypedNullConstant constant, Void value) { return TreeBuilder.makeNull(constant.getCompleteType().getType()); } + @Override public TreeNode visitDecimalConstant(DecimalExpression decimalExpression, Void value) { BigInteger unScaledValue = decimalExpression.getDecimal().unscaledValue(); return TreeBuilder.makeDecimalLiteral(unScaledValue.toString(), diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/llvm/GandivaUtils.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/llvm/GandivaUtils.java index 0020fc6ba1..c72f96d118 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/llvm/GandivaUtils.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/llvm/GandivaUtils.java @@ -46,8 +46,7 @@ public static VectorSchemaRoot getSchemaRoot(VectorAccessible input, Set{ if(rf.isComplexType()) { return rf.getModifiedField(); - } - else { + } else { return rf.getReferencedFieldVector().getField(); } }) diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/receiver/BatchBufferFromFilesProvider.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/receiver/BatchBufferFromFilesProvider.java index d9597ac1da..fd13122f61 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/receiver/BatchBufferFromFilesProvider.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/receiver/BatchBufferFromFilesProvider.java @@ -33,6 +33,7 @@ import com.google.common.base.Preconditions; public class BatchBufferFromFilesProvider implements RawFragmentBatchProvider { + private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BatchBufferFromFilesProvider.class); private final FileCursorManager cursorManager; private final FileCursorManager.Observer observer; private final BufferAllocator allocator; @@ -40,6 +41,7 @@ public class BatchBufferFromFilesProvider implements RawFragmentBatchProvider { private int currentFileSeq = 0; private InputStream currentInputStream; private boolean isStreamDone; + private final String resourceName; public BatchBufferFromFilesProvider(String uniqueId, int readerMajorFragId, @@ -48,7 +50,8 @@ public BatchBufferFromFilesProvider(String uniqueId, FileCursorManagerFactory cursorManagerFactory) { this.cursorManager = cursorManagerFactory.getManager(uniqueId); - final SharedResource resource = resourceGroup.createResource("reader-" + readerMajorFragId + "-file-" + uniqueId, + this.resourceName = "reader-" + readerMajorFragId + "-file-" + uniqueId; + final SharedResource resource = resourceGroup.createResource(this.resourceName, SharedResourceType.SEND_MSG_DATA); this.observer = cursorManager.registerReader(resource); this.allocator = parentAllocator.newChildAllocator("bridgeFileReader", 0, Long.MAX_VALUE); @@ -79,6 +82,11 @@ private RawFragmentBatch getNextMessageFromStream() throws IOException { } FileExec.FileMessage fileMessage = FileExec.FileMessage.parseDelimitedFrom(currentInputStream); + if (fileMessage == null) { + String message = "Failure reading next batch of data from file - " + this.resourceName + "."; + logger.error(message); + throw new IOException(message); + } Preconditions.checkState(fileMessage.hasMsgSeq(), "invalid msg found in file"); if (!fileMessage.hasType()) { // indicates end-of-file, switch to next file diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/receiver/DataCollector.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/receiver/DataCollector.java index 4b9015758d..30159567ad 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/receiver/DataCollector.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/receiver/DataCollector.java @@ -23,5 +23,6 @@ interface DataCollector extends AutoCloseable { int getOppositeMajorFragmentId(); RawBatchBuffer[] getBuffers(); int getTotalIncomingFragments(); + @Override void close() throws Exception; } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/receiver/UnlimitedRawBatchBuffer.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/receiver/UnlimitedRawBatchBuffer.java index 69c3ccc627..04a115f433 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/receiver/UnlimitedRawBatchBuffer.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/receiver/UnlimitedRawBatchBuffer.java @@ -77,6 +77,7 @@ public void clear() { } } + @Override protected void enqueueInner(final RawFragmentBatch batch) { if (bufferQueue.size() < softlimit) { batch.sendOk(); @@ -84,6 +85,7 @@ protected void enqueueInner(final RawFragmentBatch batch) { bufferQueue.add(batch); } + @Override protected void upkeep(RawFragmentBatch batch) { } } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/receiver/merging/MergerTemplate.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/receiver/merging/MergerTemplate.java index 333f307eb2..06f23dd2f8 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/receiver/merging/MergerTemplate.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/receiver/merging/MergerTemplate.java @@ -26,12 +26,15 @@ public abstract class MergerTemplate implements Merger { public MergerTemplate() throws SchemaChangeException { } + @Override public abstract void doSetup(@Named("context") FunctionContext context, @Named("incoming") VectorAccessible incoming, @Named("outgoing") VectorAccessible outgoing) throws SchemaChangeException; + @Override public abstract int doEval(@Named("leftIndex") int leftIndex, @Named("rightIndex") int rightIndex); + @Override public abstract void doCopy(@Named("inIndex") int inIndex, @Named("outIndex") int outIndex); } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/scan/ScanOperator.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/scan/ScanOperator.java index 2186e665cb..a5f6ff068c 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/scan/ScanOperator.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/scan/ScanOperator.java @@ -32,6 +32,7 @@ import org.apache.arrow.memory.ArrowBuf; import org.apache.arrow.memory.OutOfMemoryException; import org.apache.arrow.vector.AllocationHelper; +import org.apache.arrow.vector.FieldVector; import org.apache.arrow.vector.ValueVector; import org.apache.arrow.vector.types.pojo.ArrowType; import org.apache.arrow.vector.types.pojo.Field; @@ -173,7 +174,10 @@ public enum Metric implements MetricDef { HIVE_FILE_FORMATS,// File Formats in hive sub scan represented as bitmap. Indices correspond to HiveFilFormat enum NUM_ZERO_SIZED_COLUMN, // Number of zero sized column NUM_EXTRA_FOOTER_READS, // Number of times footer is read for a split. - MAX_RECORD_READ_PER_READER // Maximum number of record read from Reader + MAX_RECORD_READ_PER_READER, // Maximum number of record read from Reader + ICEBERG_COMMIT_TIME, // Time to commit an operation to Iceberg table + ORPHAN_FILE_DISCOVERY_TIME, // Time to discover the orphan files + NUM_ORPHAN_FILES // Number of orphan files ; private final DisplayType displayType; @@ -485,6 +489,18 @@ public ScanMutator(VectorContainer outgoing, Map fieldVecto this.callBack = callBack; } + public ScanMutator(VectorContainer outgoing, OperatorContext context, MutatorSchemaChangeCallBack callBack) { + this.outgoing = outgoing; + this.fieldVectorMap = new HashMap<>(); + this.context = context; + this.callBack = callBack; + + outgoing.getSchema().getFields().forEach(field -> + fieldVectorMap.put(field.getName().toLowerCase(), outgoing.getValueAccessorById(FieldVector.class, + outgoing.getValueVectorId(SchemaPath.getSimplePath(field.getName())).getFieldIds()).getValueVector())); + } + + @Override public void removeField(Field field) throws SchemaChangeException { ValueVector vector = fieldVectorMap.remove(field.getName().toLowerCase()); if (vector == null) { @@ -553,6 +569,12 @@ public void allocate(int recordCount) { } } + public void allocate() { + for (final ValueVector v : fieldVectorMap.values()) { + v.allocateNew(); + } + } + @Override public ArrowBuf getManagedBuffer() { return context.getManagedBuffer(); diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/scan/VectorContainerMutator.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/scan/VectorContainerMutator.java index 5b7df861d1..4e08efb086 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/scan/VectorContainerMutator.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/scan/VectorContainerMutator.java @@ -29,7 +29,7 @@ public class VectorContainerMutator implements OutputMutator { - private static CallBack NOOP = new CallBack(){public void doWork(){}}; + private static CallBack NOOP = () -> {}; private final VectorContainer container; private final BufferManager manager; diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/screen/VectorRecordMaterializer.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/screen/VectorRecordMaterializer.java index 64addbb8cf..a4d70ac686 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/screen/VectorRecordMaterializer.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/screen/VectorRecordMaterializer.java @@ -38,6 +38,7 @@ public VectorRecordMaterializer(OperatorContext context, VectorAccessible incomi assert incoming.getSchema() != null : "Schema must be defined."; } + @Override public QueryWritableBatch convertNext(int count) { final WritableBatch w = WritableBatch.get(incoming).transfer(allocator); diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/sender/partition/PartitionerDecorator.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/sender/partition/PartitionerDecorator.java index c8787b6179..9bf03168fc 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/sender/partition/PartitionerDecorator.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/sender/partition/PartitionerDecorator.java @@ -78,6 +78,7 @@ public void finishWork() throws IOException { /** * decorator method to call multiple Partitioners close() */ + @Override public void close() throws Exception { AutoCloseables.close(partitioners); } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/sender/partition/PartitionerTemplate.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/sender/partition/PartitionerTemplate.java index 24451d4a14..f90301e68c 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/sender/partition/PartitionerTemplate.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/sender/partition/PartitionerTemplate.java @@ -419,6 +419,7 @@ public SelectionVector4 getSelectionVector4() { throw new UnsupportedOperationException(); } + @Override public void close(){ vectorContainer.close(); } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/DiskRunManager.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/DiskRunManager.java index d834d9fc3a..e1008d11a0 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/DiskRunManager.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/DiskRunManager.java @@ -651,8 +651,7 @@ private int spillBatch(VectorContainer outgoing, int records, SpillOutputStream * data. */ outputBatch = new VectorAccessibleSerializable(batch, null, compressSpilledBatchAllocator, true); - } - else { + } else { /* no need for an allocator on the spill path if compression is not enabled */ outputBatch = new VectorAccessibleSerializable(batch, null, null, false); } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/MemoryRun.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/MemoryRun.java index 29afb88abf..672d42b415 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/MemoryRun.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/MemoryRun.java @@ -199,7 +199,8 @@ public boolean addBatch(VectorAccessible incoming) throws Exception { final boolean first = size == 0; if (first) { maxBatchSize = rbi.getMemorySize(); - head = tail = rbi; + head = rbi; + tail = rbi; sorter.setup(rbi.data.getContainer()); } else { maxBatchSize = Math.max(maxBatchSize, batchSize); diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/PriorityQueueCopier.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/PriorityQueueCopier.java index 29a4bbb38d..9efc29a26d 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/PriorityQueueCopier.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/PriorityQueueCopier.java @@ -39,5 +39,6 @@ void setup( VectorAccessible incoming, VectorContainer outgoing) throws SchemaChangeException, IOException ; + @Override int copy(int targetRecordCount); } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/QuickSorter.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/QuickSorter.java index b8484cbedf..7727972b3f 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/QuickSorter.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/QuickSorter.java @@ -56,6 +56,7 @@ public QuickSorter(ExternalSort sortConfig, ClassProducer classProducer, Schema quickSorterBuffer = new SimpleIntVector("QuickSorterSimpleIntVector", allocator); } + @Override public boolean expandMemoryIfNecessary(int newRequiredSize) { try { // Realloc QuickSorter SimpleIntVector, doubles size each time. @@ -69,6 +70,7 @@ public boolean expandMemoryIfNecessary(int newRequiredSize) { return true; } + @Override public void setup(VectorAccessible batch) throws ClassTransformationException, SchemaChangeException, IOException { // Compile sorting classes. CodeGenerator cg = classProducer.createGenerator(QuickSorterInterface.TEMPLATE_DEFINITION); @@ -80,12 +82,14 @@ public void setup(VectorAccessible batch) throws ClassTransformationException, S quickSorter.setDataBuffer(quickSorterBuffer); } + @Override public void addBatch(RecordBatchData data, BufferAllocator copyTargetAllocator) throws SchemaChangeException { // No need to sort individual batches here, will sort all at end, just insert the values into the // quick sorter implementation here. quickSorter.add(data); } + @Override public ExpandableHyperContainer getHyperBatch() { if (quickSorter != null) { return quickSorter.getHyperBatch(); @@ -94,6 +98,7 @@ public ExpandableHyperContainer getHyperBatch() { } } + @Override public int getHyperBatchSize() { if (quickSorter != null) { return quickSorter.getHyperBatch().size(); @@ -102,10 +107,12 @@ public int getHyperBatchSize() { } } + @Override public SelectionVector4 getFinalSort(BufferAllocator copyTargetAllocator, int targetBatchSize) { return quickSorter.getFinalSort(copyTargetAllocator, targetBatchSize); } + @Override public void close() throws Exception { final List closeables = Lists.newArrayList(); diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/QuickSorterInterface.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/QuickSorterInterface.java index e27a5bc33c..61c4a7ce74 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/QuickSorterInterface.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/QuickSorterInterface.java @@ -34,5 +34,6 @@ public interface QuickSorterInterface extends AutoCloseable { SelectionVector4 getFinalSort(BufferAllocator allocator, int targetBatchSize); ExpandableHyperContainer getHyperBatch(); void setDataBuffer(SimpleIntVector intVectorBuffer); + @Override void close() throws Exception; } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/QuickSorterTemplate.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/QuickSorterTemplate.java index f022e13800..672feaa005 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/QuickSorterTemplate.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/QuickSorterTemplate.java @@ -49,6 +49,7 @@ public void init(FunctionContext context, ExpandableHyperContainer hyperContaine doSetup(context, hyperContainer, null); } + @Override public void setDataBuffer(SimpleIntVector intVectorBuffer) { intVector = intVectorBuffer; totalCount = 0; diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/SingleBatchSorterTemplate.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/SingleBatchSorterTemplate.java index 4295367915..89db490652 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/SingleBatchSorterTemplate.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/SingleBatchSorterTemplate.java @@ -34,6 +34,7 @@ public abstract class SingleBatchSorterTemplate implements SingleBatchSorterInte private SelectionVector2 vector2; + @Override public void setup(FunctionContext context, SelectionVector2 vector2, VectorAccessible incoming) throws SchemaChangeException{ Preconditions.checkNotNull(vector2); this.vector2 = vector2; diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/Sorter.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/Sorter.java index ffbaaf4721..744f5c1a5d 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/Sorter.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/Sorter.java @@ -33,5 +33,6 @@ public interface Sorter extends AutoCloseable { public ExpandableHyperContainer getHyperBatch(); public int getHyperBatchSize(); public SelectionVector4 getFinalSort(BufferAllocator copyTargetAllocator, int targetBatchSize); + @Override public void close() throws Exception; } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/SpillManager.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/SpillManager.java index a0d5ab0049..1b794fab1c 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/SpillManager.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/SpillManager.java @@ -179,6 +179,7 @@ public ABOutputStreamWithStats(OutputStream out) { super(out); } + @Override public int write(ArrowBuf buf) throws IOException { write.start(); try { diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/SplaySorter.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/SplaySorter.java index 61558dc60b..51a55555e1 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/SplaySorter.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/SplaySorter.java @@ -62,6 +62,7 @@ public SplaySorter(ExternalSort sortConfig, ClassProducer classProducer, Schema this.splayTreeBuffer = allocator.getEmpty(); } + @Override public boolean expandMemoryIfNecessary(int newRequiredSize) { try { // Expand the SplayTree buffer, double size each time. @@ -83,6 +84,7 @@ public boolean expandMemoryIfNecessary(int newRequiredSize) { return true; } + @Override public void setup(VectorAccessible batch) throws ClassTransformationException, SchemaChangeException, IOException { // Compile sorting classes. { // Local (single batch) sorter @@ -104,6 +106,7 @@ public void setup(VectorAccessible batch) throws ClassTransformationException, S } } + @Override public void addBatch(RecordBatchData data, BufferAllocator copyTargetAllocator) throws SchemaChangeException { // We now generate an sv2 for the local sort. We do this even if the incoming // batch has an sv2. This is because we need to treat that one as immutable. @@ -137,6 +140,7 @@ public void addBatch(RecordBatchData data, BufferAllocator copyTargetAllocator) } } + @Override public ExpandableHyperContainer getHyperBatch() { if (treeManager != null) { return treeManager.getHyperBatch(); @@ -145,6 +149,7 @@ public ExpandableHyperContainer getHyperBatch() { } } + @Override public int getHyperBatchSize() { if (treeManager != null) { return treeManager.getHyperBatch().size(); @@ -153,10 +158,12 @@ public int getHyperBatchSize() { } } + @Override public SelectionVector4 getFinalSort(BufferAllocator copyTargetAllocator, int targetBatchSize) { return treeManager.getFinalSort(copyTargetAllocator, targetBatchSize); } + @Override public void close() throws Exception { final List closeables = Lists.newArrayList(); diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/SplaySorterInterface.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/SplaySorterInterface.java index 1caff0f5f0..531bd89dd1 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/SplaySorterInterface.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/SplaySorterInterface.java @@ -35,5 +35,6 @@ public interface SplaySorterInterface extends AutoCloseable { SelectionVector4 getFinalSort(BufferAllocator allocator, int targetBatchSize); ExpandableHyperContainer getHyperBatch(); void setDataBuffer(ArrowBuf data); + @Override void close() throws Exception; } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/SplaySorterTemplate.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/SplaySorterTemplate.java index c26c49291f..723b6772e5 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/SplaySorterTemplate.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/external/SplaySorterTemplate.java @@ -76,6 +76,7 @@ public void add(final SelectionVector2 sv2, final RecordBatchData batch) throws logger.debug("Took {} us to add {} records", watch.elapsed(TimeUnit.MICROSECONDS), batch.getRecordCount()); } + @Override public void setDataBuffer(ArrowBuf data){ tree.setData(data); } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/topn/PriorityQueueTemplate.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/topn/PriorityQueueTemplate.java index 7562601bbb..e46ac539ad 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/topn/PriorityQueueTemplate.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/sort/topn/PriorityQueueTemplate.java @@ -54,8 +54,8 @@ public void init(Sv4HyperContainer hyperBatch, int limit, FunctionContext contex this.limit = limit; this.context = context; this.allocator = allocator; - final ArrowBuf ArrowBuf = allocator.buffer(4 * (limit + 1)); - this.heapSv4 = new SelectionVector4(ArrowBuf, limit, Character.MAX_VALUE); + ArrowBuf buffer = allocator.buffer(4 * (limit + 1)); + this.heapSv4 = new SelectionVector4(buffer, limit, Character.MAX_VALUE); this.hasSv2 = hasSv2; this.hyperBatch = hyperBatch; this.maxSize = maxSize; @@ -73,8 +73,8 @@ public void resetQueue(final VectorContainer newQueue, final SelectionVector4 ol hyperBatch = new Sv4HyperContainer(allocator, schema); hyperBatch.addBatch(newQueue); batchCount = hyperBatch.iterator().next().getValueVectors().length; - final ArrowBuf ArrowBuf = allocator.buffer(4 * (limit + 1)); - heapSv4 = new SelectionVector4(ArrowBuf, limit, Character.MAX_VALUE); + ArrowBuf buffer = allocator.buffer(4 * (limit + 1)); + heapSv4 = new SelectionVector4(buffer, limit, Character.MAX_VALUE); // Reset queue size (most likely to be set to limit). queueSize = 0; for (int i = 0; i < oldHeap.getTotalCount(); i++) { @@ -120,8 +120,8 @@ public void add(RecordBatchData batch) throws SchemaChangeException{ @Override public void generate() throws SchemaChangeException { Stopwatch watch = Stopwatch.createStarted(); - final ArrowBuf ArrowBuf = allocator.buffer(4 * queueSize); - finalSv4 = new SelectionVector4(ArrowBuf, queueSize, maxSize); + ArrowBuf buffer = allocator.buffer(4 * queueSize); + finalSv4 = new SelectionVector4(buffer, queueSize, maxSize); for (int i = queueSize - 1; i >= 0; i--) { finalSv4.set(i, pop()); } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/spi/DualInputOperator.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/spi/DualInputOperator.java index 174a477811..5d4e4678bb 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/spi/DualInputOperator.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/spi/DualInputOperator.java @@ -53,6 +53,7 @@ public void is(State expected) { * Returns the current state of the operator. * @return current operator state. */ + @Override DualInputOperator.State getState(); void noMoreToConsumeLeft() throws Exception; void noMoreToConsumeRight() throws Exception; diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/spi/ProducerOperator.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/spi/ProducerOperator.java index 6a75b7027f..b520a24fa1 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/spi/ProducerOperator.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/spi/ProducerOperator.java @@ -49,6 +49,7 @@ public MasterState getMasterState() { } } + @Override ProducerOperator.State getState(); /** diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/spi/SingleInputOperator.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/spi/SingleInputOperator.java index 3937e739c0..8c5e9da12d 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/spi/SingleInputOperator.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/spi/SingleInputOperator.java @@ -62,6 +62,7 @@ public MasterState getMasterState() { * * @return current operator state. */ + @Override SingleInputOperator.State getState(); interface Creator { diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/spi/TerminalOperator.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/spi/TerminalOperator.java index 6e1c0ffea4..1f144ffe37 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/spi/TerminalOperator.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/spi/TerminalOperator.java @@ -50,6 +50,7 @@ public void is(State expected) { }; + @Override State getState(); void setup(VectorAccessible incoming) throws Exception; void receivingFragmentFinished(FragmentHandle handle) throws Exception; diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/tablefunction/InternalTableFunctionFactory.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/tablefunction/InternalTableFunctionFactory.java index 3627e672f0..8f9f773ef0 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/tablefunction/InternalTableFunctionFactory.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/tablefunction/InternalTableFunctionFactory.java @@ -15,26 +15,40 @@ */ package com.dremio.sabot.op.tablefunction; +import java.util.Map; + import com.dremio.common.exceptions.ExecutionSetupException; +import com.dremio.common.expression.SchemaPath; import com.dremio.exec.physical.base.OpProps; +import com.dremio.exec.physical.config.ManifestListScanTableFunctionContext; +import com.dremio.exec.physical.config.ManifestScanTableFunctionContext; import com.dremio.exec.physical.config.TableFunctionConfig; +import com.dremio.exec.store.SplitIdentity; +import com.dremio.exec.store.SystemSchemas; import com.dremio.exec.store.dfs.EasySplitGenTableFunction; import com.dremio.exec.store.dfs.SplitAssignmentTableFunction; import com.dremio.exec.store.dfs.SplitGenTableFunction; import com.dremio.exec.store.easy.EasyScanTableFunction; -import com.dremio.exec.store.iceberg.DeletedDataFilesMetadataTableFunction; +import com.dremio.exec.store.iceberg.DeletedFilesMetadataTableFunction; import com.dremio.exec.store.iceberg.IcebergDeleteFileAggTableFunction; import com.dremio.exec.store.iceberg.IcebergDmlMergeDuplicateCheckTableFunction; +import com.dremio.exec.store.iceberg.IcebergFileType; +import com.dremio.exec.store.iceberg.IcebergOrphanFileDeleteTableFunction; import com.dremio.exec.store.iceberg.IcebergPartitionTransformTableFunction; import com.dremio.exec.store.iceberg.IcebergSplitGenTableFunction; import com.dremio.exec.store.iceberg.IcebergUtils; +import com.dremio.exec.store.iceberg.InputCarryForwardTableFunctionDecorator; import com.dremio.exec.store.iceberg.ManifestFileProcessor; +import com.dremio.exec.store.iceberg.ManifestListScanTableFunction; import com.dremio.exec.store.iceberg.ManifestScanTableFunction; +import com.dremio.exec.store.iceberg.OptimizeManifestsTableFunction; +import com.dremio.exec.store.iceberg.PartitionStatsScanTableFunction; import com.dremio.exec.store.iceberg.SupportsInternalIcebergTable; import com.dremio.exec.store.metadatarefresh.schemaagg.SchemaAggTableFunction; import com.dremio.sabot.exec.context.OperatorContext; import com.dremio.sabot.exec.fragment.FragmentExecutionContext; import com.dremio.sabot.op.boost.BoostTableFunction; +import com.google.common.collect.ImmutableMap; /** * Internal factory that creates various scan table functions @@ -55,7 +69,22 @@ public TableFunction createTableFunction(FragmentExecutionContext fec, case METADATA_MANIFEST_FILE_SCAN: case ICEBERG_MANIFEST_SCAN: ManifestFileProcessor manifestFileProcessor = new ManifestFileProcessor(fec, context, props, functionConfig); - return new ManifestScanTableFunction(context, functionConfig, manifestFileProcessor); + TableFunction manifestScanTF = new ManifestScanTableFunction(context, functionConfig, manifestFileProcessor); + if (((ManifestScanTableFunctionContext) functionConfig.getFunctionContext()).isCarryForwardEnabled()) { + Map inputMapping = ImmutableMap.of(SchemaPath.getCompoundPath(SystemSchemas.SPLIT_IDENTITY, SplitIdentity.PATH), SchemaPath.getSimplePath(SystemSchemas.FILE_PATH)); + return new InputCarryForwardTableFunctionDecorator(manifestScanTF, SystemSchemas.CARRY_FORWARD_FILE_PATH_TYPE_COLS, + inputMapping, SystemSchemas.FILE_TYPE, IcebergFileType.MANIFEST.name()); + } + return manifestScanTF; + case ICEBERG_MANIFEST_LIST_SCAN: + TableFunction manifestListScanTF = new ManifestListScanTableFunction(fec, context, props, functionConfig); + if (((ManifestListScanTableFunctionContext) functionConfig.getFunctionContext()).isCarryForwardEnabled()) { + Map inputMapping = ImmutableMap.of(SchemaPath.getSimplePath(SystemSchemas.MANIFEST_LIST_PATH), SchemaPath.getSimplePath(SystemSchemas.FILE_PATH)); + return new InputCarryForwardTableFunctionDecorator(manifestListScanTF, SystemSchemas.CARRY_FORWARD_FILE_PATH_TYPE_COLS, inputMapping, SystemSchemas.FILE_TYPE, IcebergFileType.MANIFEST_LIST.name()); + } + return manifestListScanTF; + case ICEBERG_PARTITION_STATS_SCAN: + return new PartitionStatsScanTableFunction(fec, context, props, functionConfig); case SPLIT_GENERATION: return new SplitGenTableFunction(fec, context, functionConfig); case EASY_SPLIT_GENERATION: @@ -71,14 +100,18 @@ public TableFunction createTableFunction(FragmentExecutionContext fec, return new BoostTableFunction(fec, context, props, functionConfig); case ICEBERG_PARTITION_TRANSFORM: return new IcebergPartitionTransformTableFunction(context, functionConfig); - case DELETED_DATA_FILES_METADATA: - return new DeletedDataFilesMetadataTableFunction(context, functionConfig); + case DELETED_FILES_METADATA: + return new DeletedFilesMetadataTableFunction(context, functionConfig); case ICEBERG_SPLIT_GEN: return new IcebergSplitGenTableFunction(fec, context, functionConfig); case ICEBERG_DELETE_FILE_AGG: return new IcebergDeleteFileAggTableFunction(context, functionConfig); case ICEBERG_DML_MERGE_DUPLICATE_CHECK: return new IcebergDmlMergeDuplicateCheckTableFunction(context, functionConfig); + case ICEBERG_OPTIMIZE_MANIFESTS: + return new OptimizeManifestsTableFunction(fec, context, props, functionConfig); + case ICEBERG_ORPHAN_FILE_DELETE: + return new IcebergOrphanFileDeleteTableFunction(fec, context, props, functionConfig); case UNKNOWN: default: throw new UnsupportedOperationException("Unknown table function type " + functionConfig.getType()); diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/tablefunction/TableFunction.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/tablefunction/TableFunction.java index b17534f8b2..6982dd97a7 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/tablefunction/TableFunction.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/tablefunction/TableFunction.java @@ -35,7 +35,7 @@ public interface TableFunction extends AutoCloseable { * Start processing an input record batch. * @param records Number of records in the input batch. */ - default void startBatch(int records) { + default void startBatch(int records) throws Exception { // do nothing } @@ -84,4 +84,11 @@ default void workOnOOB(OutOfBandMessage message) { default long getFirstRowSize() { return -1L; } + + /** + * Signal to the table function that there's no more records to consume. + */ + default void noMoreToConsume() throws Exception { + // Do nothing + } } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/tablefunction/TableFunctionOperator.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/tablefunction/TableFunctionOperator.java index 4b6c437ebd..d1110edd88 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/tablefunction/TableFunctionOperator.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/tablefunction/TableFunctionOperator.java @@ -50,7 +50,12 @@ public enum Metric implements MetricDef { NUM_DELETE_FILE_READERS, PARQUET_DELETE_FILE_BYTES_READ, NUM_POS_DELETED_ROWS, - NUM_EQ_DELETED_ROWS; + NUM_EQ_DELETED_ROWS, + SNAPSHOT_COMMIT_STATUS, + NUM_SNAPSHOT_IDS, // Number of snapshot ids + DELETE_ORPHAN_FILES_TIME, // Time taken to delete orphan files + NUM_ORPHAN_FILES_DELETED, // Number of orphan files deleted + NUM_ORPHAN_FILES_FAIL_TO_DELETE; // Number of orphan files not deleted successfully @Override public int metricId() { @@ -159,6 +164,8 @@ public int outputData() throws Exception { @Override public void noMoreToConsume() throws Exception { + tableFunction.noMoreToConsume(); + state.is(State.CAN_CONSUME); // if there are any buffered records remaining, we transition back to CAN_PRODUCE state else we are done state = tableFunction.hasBufferedRemaining() ? State.CAN_PRODUCE : State.DONE; diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/windowframe/WindowFrameOperator.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/windowframe/WindowFrameOperator.java index 16a402c4ce..b5e76d6d21 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/windowframe/WindowFrameOperator.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/windowframe/WindowFrameOperator.java @@ -276,6 +276,7 @@ private WindowFramer generateFramer(final List keyExprs, fina { // generating framer.isSamePartition() + @SuppressWarnings("checkstyle:LocalFinalVariableName") final GeneratorMapping IS_SAME_PARTITION_READ = GeneratorMapping.create("isSamePartition", "isSamePartition", null, null); final MappingSet isaB1 = new MappingSet("b1Index", null, "b1", null, IS_SAME_PARTITION_READ, IS_SAME_PARTITION_READ); final MappingSet isaB2 = new MappingSet("b2Index", null, "b2", null, IS_SAME_PARTITION_READ, IS_SAME_PARTITION_READ); @@ -284,6 +285,7 @@ private WindowFramer generateFramer(final List keyExprs, fina { // generating framer.isPeer() + @SuppressWarnings("checkstyle:LocalFinalVariableName") final GeneratorMapping IS_SAME_PEER_READ = GeneratorMapping.create("isPeer", "isPeer", null, null); final MappingSet isaP1 = new MappingSet("b1Index", null, "b1", null, IS_SAME_PEER_READ, IS_SAME_PEER_READ); final MappingSet isaP2 = new MappingSet("b2Index", null, "b2", null, IS_SAME_PEER_READ, IS_SAME_PEER_READ); diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/windowframe/WindowFunction.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/windowframe/WindowFunction.java index 99eaa43eaa..31cb02cb74 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/windowframe/WindowFunction.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/windowframe/WindowFunction.java @@ -165,6 +165,7 @@ public Field materialize(final NamedExpression ne, final BatchSchema schema, Err return aggregate.getCompleteType().toField(ne.getRef()); } + @SuppressWarnings("checkstyle:LocalFinalVariableName") @Override void generateCode(ClassGenerator cg) { final GeneratorMapping EVAL_INSIDE = GeneratorMapping.create("setupEvaluatePeer", "evaluatePeer", null, null); diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/writer/WriterCommitterOperator.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/writer/WriterCommitterOperator.java index 1da4628f84..3c4d34aa2f 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/writer/WriterCommitterOperator.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/writer/WriterCommitterOperator.java @@ -16,12 +16,9 @@ package com.dremio.sabot.op.writer; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; import java.util.Optional; import java.util.concurrent.TimeUnit; -import org.apache.arrow.vector.ValueVector; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -31,7 +28,6 @@ import com.dremio.exec.proto.UserBitShared; import com.dremio.exec.record.BatchSchema; import com.dremio.exec.record.VectorAccessible; -import com.dremio.exec.record.VectorWrapper; import com.dremio.exec.store.RecordWriter; import com.dremio.exec.store.iceberg.manifestwriter.IcebergCommitOpHelper; import com.dremio.exec.store.iceberg.model.IcebergCommandType; @@ -114,17 +110,14 @@ public long value() { private final ExecutionControls executionControls; private boolean success = false; - private final List vectors = new ArrayList<>(); - private final IcebergCommitOpHelper icebergCommitHelper; + private IcebergCommitOpHelper icebergCommitHelper; private WriterCommitterOutputHandler outputHandler; public WriterCommitterOperator(OperatorContext context, WriterCommitterPOP config) { this.config = config; this.context = context; - this.icebergCommitHelper = IcebergCommitOpHelper.getInstance(context, config); this.executionControls = context.getExecutionControls(); - this.outputHandler = WriterCommitterOutputHandler.getInstance(context, config, this.icebergCommitHelper.hasCustomOutput()); } @Override @@ -134,9 +127,6 @@ public State getState() { @Override public VectorAccessible setup(VectorAccessible accessible) throws Exception { - for (VectorWrapper vectorWrapper : accessible) { - vectors.add(vectorWrapper.getValueVector()); - } final BatchSchema schema = accessible.getSchema(); if (!schema.equals(RecordWriter.SCHEMA)) { throw new IllegalStateException(String.format("Incoming record writer schema doesn't match intended. Expected %s, received %s", RecordWriter.SCHEMA, schema)); @@ -147,8 +137,10 @@ public VectorAccessible setup(VectorAccessible accessible) throws Exception { config.getProps().getUserName(), context); addMetricStat(Metric.FILE_SYSTEM_CREATE_TIME, stopwatch.elapsed(TimeUnit.MILLISECONDS)); - this.icebergCommitHelper.setup(accessible); - return this.outputHandler.setup(accessible); + icebergCommitHelper = IcebergCommitOpHelper.getInstance(context, config, fs); + outputHandler = WriterCommitterOutputHandler.getInstance(context, config, icebergCommitHelper.hasCustomOutput()); + icebergCommitHelper.setup(accessible); + return outputHandler.setup(accessible); } @Override @@ -168,8 +160,7 @@ public void close() throws Exception { cleanUpFiles(); } catch (Exception e) { logger.warn("Cleanup of files in writer committer failed.", e); - } - finally { + } finally { final OperatorStats operatorStats = context.getStats(); AutoCloseables.close(outputHandler, fs, icebergCommitHelper); diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/op/writer/WriterOperator.java b/sabot/kernel/src/main/java/com/dremio/sabot/op/writer/WriterOperator.java index e2ba6a9878..4b10d5b986 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/op/writer/WriterOperator.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/op/writer/WriterOperator.java @@ -30,7 +30,7 @@ import org.apache.arrow.vector.VarCharVector; import org.apache.arrow.vector.complex.ListVector; import org.apache.arrow.vector.complex.impl.UnionListWriter; -import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections4.CollectionUtils; import com.dremio.common.AutoCloseables; import com.dremio.exec.physical.base.WriterOptions; diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/rpc/user/UserRPCServer.java b/sabot/kernel/src/main/java/com/dremio/sabot/rpc/user/UserRPCServer.java index 146e8e5d08..c5dd2cf08c 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/rpc/user/UserRPCServer.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/rpc/user/UserRPCServer.java @@ -33,6 +33,8 @@ import com.carrotsearch.hppc.IntHashSet; import com.dremio.common.utils.protos.ExternalIdHelper; import com.dremio.common.utils.protos.QueryWritableBatch; +import com.dremio.context.RequestContext; +import com.dremio.context.UserContext; import com.dremio.exec.ExecConstants; import com.dremio.exec.planner.sql.handlers.commands.MetadataProvider; import com.dremio.exec.planner.sql.handlers.commands.PreparedStatementProvider; @@ -83,6 +85,7 @@ import com.dremio.options.OptionValue.OptionType; import com.dremio.service.users.AuthResult; import com.dremio.service.users.UserLoginException; +import com.dremio.service.users.UserNotFoundException; import com.dremio.service.users.UserService; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; @@ -206,8 +209,7 @@ protected void handle( final Span span; if(connection.getSession().isTracingEnabled()) { span = tracer.buildSpan("user_rpc_request").start(); - } - else { + } else { span = NoopSpan.INSTANCE; } @@ -233,7 +235,30 @@ public void sendFailure(UserRpcException e) { protected void feedWork(UserClientConnectionImpl connection, int rpcType, byte[] pBody, ByteBuf dBody, RequestHandle requestHandle, Span span, ResponseSender wrappedSender) throws RpcException { - feedWorkHelper(connection, rpcType, pBody, dBody, requestHandle, span, wrappedSender); + String userUUID; + final String userName = connection.getSession().getCredentials().getUserName(); + try { + //Connection will always have the user session; Without user credentials, we must throw error if user not found + userUUID = getUserServiceProvider().get().getUser(userName).getUID().getId(); + } catch (UserNotFoundException e) { + throw new RpcException("Could not find user UUID for userName: " + userName, e); + } + + try { + RequestContext.current() + .with(UserContext.CTX_KEY, new UserContext(userUUID)) + .call(() -> { + feedWorkHelper(connection, rpcType, pBody, dBody, requestHandle, span, wrappedSender); + return null; + }); + } catch (Exception e) { + if (e instanceof RpcException) { + throw (RpcException) e; + } else { + throw new RpcException(e); + } + } + } protected void feedWorkHelper(UserClientConnectionImpl connection, int rpcType, byte[] pBody, ByteBuf dBody, diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/task/AsyncTaskWrapper.java b/sabot/kernel/src/main/java/com/dremio/sabot/task/AsyncTaskWrapper.java index 0243c23c3a..2d96075f3f 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/task/AsyncTaskWrapper.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/task/AsyncTaskWrapper.java @@ -107,6 +107,7 @@ public SchedulingGroup getSchedulingGroup() { return schedulingGroup; } + @Override public long getTaskWeight() { return taskWeight; } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/task/TaskManager.java b/sabot/kernel/src/main/java/com/dremio/sabot/task/TaskManager.java index 81561099b5..a4e046fdfb 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/task/TaskManager.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/task/TaskManager.java @@ -23,7 +23,7 @@ * In practice, each scheduler selects the best task from it's own runqueue, and the load balancer moves * tasks between the threads whenever there is an imbalance between their runqueues. */ -public interface TaskManager extends GroupManager { +public interface TaskManager extends GroupManager, AutoCloseable { long MAX_WEIGHT = 1000; // using higher values that MAX_WEIGHT will cause vRuntime overflow to occur sooner than can be handled by the system @@ -137,4 +137,7 @@ interface TaskProvider { SchedulerStats getSchedulerStats(); void changeObserver(Observer.Type observerType); + + @Override + void close(); } diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/threads/sharedres/SharedResourceGroup.java b/sabot/kernel/src/main/java/com/dremio/sabot/threads/sharedres/SharedResourceGroup.java index b07bb53f41..5f2c907f86 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/threads/sharedres/SharedResourceGroup.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/threads/sharedres/SharedResourceGroup.java @@ -87,6 +87,7 @@ public SharedResourceType getFirstBlockedResource() { return null; } + @Override public String toString(){ StringBuilder sb = new StringBuilder(); int unavailable = 0; diff --git a/sabot/kernel/src/main/java/com/dremio/sabot/threads/sharedres/SharedResourceManager.java b/sabot/kernel/src/main/java/com/dremio/sabot/threads/sharedres/SharedResourceManager.java index 480ecff3c4..927793eac2 100644 --- a/sabot/kernel/src/main/java/com/dremio/sabot/threads/sharedres/SharedResourceManager.java +++ b/sabot/kernel/src/main/java/com/dremio/sabot/threads/sharedres/SharedResourceManager.java @@ -99,6 +99,7 @@ public boolean isAvailable(){ return availableGroups.get() > 0; } + @Override public String toString(){ StringBuilder sb = new StringBuilder(); int unavailable = 0; diff --git a/sabot/kernel/src/main/java/org/apache/arrow/vector/AbstractVector.java b/sabot/kernel/src/main/java/org/apache/arrow/vector/AbstractVector.java index 56886ddbe6..f92119d8ff 100644 --- a/sabot/kernel/src/main/java/org/apache/arrow/vector/AbstractVector.java +++ b/sabot/kernel/src/main/java/org/apache/arrow/vector/AbstractVector.java @@ -94,6 +94,7 @@ public void clear() { valueCount = 0; } + @Override public void close() { clear(); } diff --git a/sabot/kernel/src/main/java/org/apache/arrow/vector/FixedListVarcharVector.java b/sabot/kernel/src/main/java/org/apache/arrow/vector/FixedListVarcharVector.java index e6ef08cedb..8f2c17bfb8 100644 --- a/sabot/kernel/src/main/java/org/apache/arrow/vector/FixedListVarcharVector.java +++ b/sabot/kernel/src/main/java/org/apache/arrow/vector/FixedListVarcharVector.java @@ -369,6 +369,7 @@ public byte[] get(int index) { * @param index position of element to get * @return Text object for non-null element, null otherwise */ + @Override public Text getObject(int index) { Text result = new Text(); byte[] b; diff --git a/sabot/kernel/src/main/java/org/apache/arrow/vector/MutableVarcharVector.java b/sabot/kernel/src/main/java/org/apache/arrow/vector/MutableVarcharVector.java index 3fd2e44ba9..8a8666d1e6 100644 --- a/sabot/kernel/src/main/java/org/apache/arrow/vector/MutableVarcharVector.java +++ b/sabot/kernel/src/main/java/org/apache/arrow/vector/MutableVarcharVector.java @@ -273,6 +273,7 @@ public byte[] get(int index) { * @param index position of element to get * @return Text object for non-null element, null otherwise */ + @Override public Text getObject(int index) { Text result = new Text(); byte[] b; @@ -623,6 +624,7 @@ public void setSafe(int index, Text text) { * @param index position of the element to set * @param value array of bytes to write */ + @Override public void set(int index, byte[] value) { assert index >= 0; @@ -645,6 +647,7 @@ public void set(int index, byte[] value) { * @param index position of the element to set * @param value array of bytes to write */ + @Override public void setSafe(int index, byte[] value) { assert index >= 0; @@ -669,6 +672,7 @@ public void setSafe(int index, byte[] value) { * @param start start index in array of bytes * @param length length of data in array of bytes */ + @Override public void set(int index, byte[] value, int start, int length) { assert index >= 0; @@ -693,6 +697,7 @@ public void set(int index, byte[] value, int start, int length) { * @param start start index in array of bytes * @param length length of data in array of bytes */ + @Override public void setSafe(int index, byte[] value, int start, int length) { assert index >= 0; @@ -717,6 +722,7 @@ public void setSafe(int index, byte[] value, int start, int length) { * @param start start index in ByteBuffer * @param length length of data in ByteBuffer */ + @Override public void set(int index, ByteBuffer value, int start, int length) { assert index >= 0; @@ -741,6 +747,7 @@ public void set(int index, ByteBuffer value, int start, int length) { * @param start start index in ByteBuffer * @param length length of data in ByteBuffer */ + @Override public void setSafe(int index, ByteBuffer value, int start, int length) { assert index >= 0; @@ -768,6 +775,7 @@ public void setSafe(int index, ByteBuffer value, int start, int length) { * @param buffer data buffer containing the variable width element to be stored * in the vector */ + @Override public void set(int index, int isSet, int start, int end, ArrowBuf buffer) { assert index >= 0; @@ -794,6 +802,7 @@ public void set(int index, int isSet, int start, int end, ArrowBuf buffer) { * @param buffer data buffer containing the variable width element to be stored * in the vector */ + @Override public void setSafe(int index, int isSet, int start, int end, ArrowBuf buffer) { assert index >= 0; @@ -819,6 +828,7 @@ public void setSafe(int index, int isSet, int start, int end, ArrowBuf buffer) { * @param buffer data buffer containing the variable width element to be stored * in the vector */ + @Override public void set(int index, int start, int length, ArrowBuf buffer) { assert index >= 0; @@ -844,6 +854,7 @@ public void set(int index, int start, int length, ArrowBuf buffer) { * @param buffer data buffer containing the variable width element to be stored * in the vector */ + @Override public void setSafe(int index, int start, int length, ArrowBuf buffer) { assert index >= 0; diff --git a/sabot/kernel/src/main/java/org/apache/calcite/rel/rules/DremioLoptOptimizeJoinRule.java b/sabot/kernel/src/main/java/org/apache/calcite/rel/rules/DremioLoptOptimizeJoinRule.java index 0f27e142f8..90eb46f64a 100644 --- a/sabot/kernel/src/main/java/org/apache/calcite/rel/rules/DremioLoptOptimizeJoinRule.java +++ b/sabot/kernel/src/main/java/org/apache/calcite/rel/rules/DremioLoptOptimizeJoinRule.java @@ -93,6 +93,7 @@ public DremioLoptOptimizeJoinRule(Config config) { //~ Methods ---------------------------------------------------------------- + @Override public void onMatch(RelOptRuleCall call) { final MultiJoin multiJoinRel = call.rel(0); final LoptMultiJoin multiJoin = new LoptMultiJoin(multiJoinRel); diff --git a/sabot/kernel/src/main/java/org/apache/calcite/sql/validate/DremioParameterScope.java b/sabot/kernel/src/main/java/org/apache/calcite/sql/validate/DremioParameterScope.java index 04d10b5ed2..f7bf50f585 100644 --- a/sabot/kernel/src/main/java/org/apache/calcite/sql/validate/DremioParameterScope.java +++ b/sabot/kernel/src/main/java/org/apache/calcite/sql/validate/DremioParameterScope.java @@ -45,15 +45,18 @@ public DremioParameterScope( //~ Methods ---------------------------------------------------------------- + @Override public SqlQualified fullyQualify(SqlIdentifier identifier) { return SqlQualified.create(this, 1, null, identifier); } + @Override public SqlValidatorScope getOperandScope(SqlCall call) { return this; } - @Override public RelDataType resolveColumn(String name, SqlNode ctx) { + @Override + public RelDataType resolveColumn(String name, SqlNode ctx) { return nameToTypeMap.get(name); } } diff --git a/sabot/kernel/src/main/java/org/apache/iceberg/DremioManifestReaderUtils.java b/sabot/kernel/src/main/java/org/apache/iceberg/DremioManifestReaderUtils.java index e7bfa2d240..539c9b6153 100644 --- a/sabot/kernel/src/main/java/org/apache/iceberg/DremioManifestReaderUtils.java +++ b/sabot/kernel/src/main/java/org/apache/iceberg/DremioManifestReaderUtils.java @@ -31,11 +31,13 @@ public static class ManifestEntryWrapper> { } public ManifestEntryWrapper(F file, long sequenceNumber) { - this.entry = new GenericManifestEntry((Schema) null).wrapExisting(0L, sequenceNumber, file); + // TODO: add a constructor with 'file sequence number'. + // This method is only used for test code simulation. Also, 'data sequence number' is always passed as 0. + this.entry = new GenericManifestEntry((Schema) null).wrapExisting(0L, sequenceNumber, sequenceNumber, file); } public Long sequenceNumber() { - return entry.sequenceNumber(); + return entry.dataSequenceNumber(); } public F file() { diff --git a/sabot/kernel/src/main/java/org/apache/iceberg/hadoop/DremioOutputFile.java b/sabot/kernel/src/main/java/org/apache/iceberg/hadoop/DremioOutputFile.java deleted file mode 100644 index 13e894bfe7..0000000000 --- a/sabot/kernel/src/main/java/org/apache/iceberg/hadoop/DremioOutputFile.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.iceberg.hadoop; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; -import org.apache.iceberg.io.InputFile; -import org.apache.iceberg.io.OutputFile; -import org.apache.iceberg.io.PositionOutputStream; - -import com.dremio.exec.store.iceberg.IcebergUtils; - -/** - * DremioOutputFile. used in DremioFileIO to output iceberg metadata file. wrapper for HadoopOutputFile. - */ -public class DremioOutputFile implements OutputFile { - - private final Path path; - private final String locationWithScheme; - private final OutputFile hadoopOutputFile; - - public DremioOutputFile(String path, Configuration conf, org.apache.hadoop.fs.FileSystem hadoopFs) { - hadoopOutputFile = HadoopOutputFile.fromPath(new org.apache.hadoop.fs.Path(path), hadoopFs, conf); - this.path = new Path(path); - this.locationWithScheme = IcebergUtils.getValidIcebergPath(this.path, conf, hadoopFs.getScheme()); - } - - @Override - public PositionOutputStream create() { - return hadoopOutputFile.create(); - } - - @Override - public PositionOutputStream createOrOverwrite() { - return hadoopOutputFile.createOrOverwrite(); - } - - @Override - public String location() { - return locationWithScheme; - } - - @Override - public InputFile toInputFile() { - return hadoopOutputFile.toInputFile(); - } - - @Override - public String toString() { - return location(); - } -} diff --git a/sabot/kernel/src/main/protobuf/iceberg.proto b/sabot/kernel/src/main/protobuf/iceberg.proto index 22aff786e7..0ec1688787 100644 --- a/sabot/kernel/src/main/protobuf/iceberg.proto +++ b/sabot/kernel/src/main/protobuf/iceberg.proto @@ -32,9 +32,10 @@ message IcebergDatasetXAttr { optional com.dremio.sabot.exec.store.parquet.proto.ParquetDatasetXAttr parquetDatasetXAttr = 1; repeated IcebergSchemaField column_ids = 2; optional string partition_stats_file = 3; // path to partition stats file for the latest snapshot + optional int64 partition_stats_file_size = 4; // size of the partition stats file } message IcebergDatasetSplitXAttr { optional string path = 1; optional bytes extended_property = 2; // source specific information about this split -} \ No newline at end of file +} diff --git a/sabot/kernel/src/test/java/com/dremio/BaseTestQuery.java b/sabot/kernel/src/test/java/com/dremio/BaseTestQuery.java index eccb5bbe1b..2ff4c95c49 100644 --- a/sabot/kernel/src/test/java/com/dremio/BaseTestQuery.java +++ b/sabot/kernel/src/test/java/com/dremio/BaseTestQuery.java @@ -16,7 +16,6 @@ package com.dremio; import static com.dremio.exec.rpc.user.security.testing.UserServiceTestImpl.DEFAULT_PASSWORD; -import static com.dremio.exec.store.iceberg.IcebergModelCreator.DREMIO_NESSIE_DEFAULT_NAMESPACE; import static com.dremio.exec.store.parquet.ParquetFormatDatasetAccessor.PARQUET_SCHEMA_FALLBACK_DISABLED; import static java.lang.String.format; import static java.nio.charset.StandardCharsets.UTF_8; @@ -38,6 +37,7 @@ import java.net.URISyntaxException; import java.net.URL; import java.nio.file.DirectoryStream; +import java.nio.file.FileSystems; import java.nio.file.Paths; import java.nio.file.StandardCopyOption; import java.util.Arrays; @@ -51,7 +51,6 @@ import java.util.Random; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.Supplier; import java.util.stream.Stream; import org.apache.arrow.vector.ValueVector; @@ -59,14 +58,15 @@ import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.iceberg.Table; +import org.apache.iceberg.hadoop.HadoopFileIO; +import org.apache.iceberg.hadoop.HadoopTableOperations; +import org.apache.iceberg.util.LockManagers; import org.assertj.core.api.InstanceOfAssertFactories; import org.assertj.core.api.ObjectAssert; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.rules.ExternalResource; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; import com.dremio.common.AutoCloseables; import com.dremio.common.config.SabotConfig; @@ -79,8 +79,8 @@ import com.dremio.exec.catalog.CatalogOptions; import com.dremio.exec.client.DremioClient; import com.dremio.exec.exception.SchemaChangeException; -import com.dremio.exec.hadoop.DremioHadoopUtils; import com.dremio.exec.hadoop.HadoopFileSystem; +import com.dremio.exec.hadoop.HadoopFileSystemConfigurationAdapter; import com.dremio.exec.planner.physical.PlannerSettings; import com.dremio.exec.proto.CoordinationProtos; import com.dremio.exec.proto.UserBitShared.DremioPBError.ErrorType; @@ -94,12 +94,12 @@ import com.dremio.exec.server.SabotContext; import com.dremio.exec.server.SabotNode; import com.dremio.exec.server.SimpleJobRunner; +import com.dremio.exec.store.StoragePlugin; import com.dremio.exec.store.dfs.FileSystemPlugin; -import com.dremio.exec.store.iceberg.hadoop.IcebergHadoopModel; +import com.dremio.exec.store.iceberg.DremioFileIO; +import com.dremio.exec.store.iceberg.SupportsIcebergMutablePlugin; import com.dremio.exec.store.iceberg.model.IcebergCatalogType; import com.dremio.exec.store.iceberg.model.IcebergModel; -import com.dremio.exec.store.iceberg.nessie.IcebergNessieModel; -import com.dremio.exec.store.metadatarefresh.committer.DatasetCatalogGrpcClient; import com.dremio.exec.util.TestUtilities; import com.dremio.exec.util.VectorUtil; import com.dremio.exec.work.user.LocalQueryExecutor; @@ -122,7 +122,6 @@ import com.dremio.service.BindingProvider; import com.dremio.service.coordinator.ClusterCoordinator; import com.dremio.service.coordinator.local.LocalClusterCoordinator; -import com.dremio.service.users.SystemUser; import com.dremio.services.credentials.CredentialsService; import com.google.common.base.Function; import com.google.common.base.Preconditions; @@ -359,7 +358,7 @@ protected static void openClient() throws Exception { setEnableReAttempts(false); } - protected static void setEnableReAttempts(boolean enabled) throws Exception { + public static void setEnableReAttempts(boolean enabled) throws Exception { runSQL("ALTER SYSTEM SET " + SqlUtils.QUOTE + ExecConstants.ENABLE_REATTEMPTS.getOptionName() + SqlUtils.QUOTE + " = " + enabled); } @@ -734,9 +733,23 @@ protected static void copy(java.nio.file.Path source, java.nio.file.Path dest) { protected static void copyFromJar(String sourceElement, final java.nio.file.Path target) throws URISyntaxException, IOException { URI resource = Resources.getResource(sourceElement).toURI(); - java.nio.file.Path srcDir = java.nio.file.Paths.get(resource); - try (Stream stream = java.nio.file.Files.walk(srcDir)) { - stream.forEach(source -> copy(source, target.resolve(srcDir.relativize(source)))); + + if (resource.getScheme().equals("jar")) { + try (java.nio.file.FileSystem fileSystem = FileSystems.newFileSystem(resource, Collections.emptyMap())) { + sourceElement = !sourceElement.startsWith("/") ? "/" + sourceElement : sourceElement; + java.nio.file.Path srcDir = fileSystem.getPath(sourceElement); + try (Stream stream = java.nio.file.Files.walk(srcDir)) { + stream.forEach(source -> { + java.nio.file.Path dest = target.resolve(Paths.get(srcDir.relativize(source).toString())); + copy(source, dest); + }); + } + } + } else { + java.nio.file.Path srcDir = java.nio.file.Paths.get(resource); + try (Stream stream = java.nio.file.Files.walk(srcDir)) { + stream.forEach(source -> copy(source, target.resolve(srcDir.relativize(source)))); + } } } @@ -921,20 +934,6 @@ protected static AutoCloseable disableExchanges() { PlannerSettings.EXCHANGE.getDefault().getBoolVal().toString()); } - protected static AutoCloseable enableINPushDown() { - setSystemOption(PlannerSettings.ENABLE_PARQUET_IN_EXPRESSION_PUSH_DOWN, "true"); - return () -> - setSystemOption(PlannerSettings.ENABLE_PARQUET_IN_EXPRESSION_PUSH_DOWN, - PlannerSettings.ENABLE_PARQUET_IN_EXPRESSION_PUSH_DOWN.getDefault().getBoolVal().toString()); - } - - protected static AutoCloseable enableMultipleConditionPushDown() { - setSystemOption(PlannerSettings.ENABLE_PARQUET_MULTI_COLUMN_FILTER_PUSH_DOWN, "true"); - return () -> - setSystemOption(PlannerSettings.ENABLE_PARQUET_MULTI_COLUMN_FILTER_PUSH_DOWN, - PlannerSettings.ENABLE_PARQUET_MULTI_COLUMN_FILTER_PUSH_DOWN.getDefault().getBoolVal().toString()); - } - protected static AutoCloseable treatScanAsBoost() { setSystemOption(ExecConstants.ENABLE_BOOSTING, "true"); return () -> @@ -960,6 +959,13 @@ protected static AutoCloseable enableMapDataType() { ExecConstants.ENABLE_MAP_DATA_TYPE.getDefault().getBoolVal().toString()); } + protected static AutoCloseable enableComplexHiveType() { + setSystemOption(ExecConstants.ENABLE_COMPLEX_HIVE_DATA_TYPE, "true"); + return () -> + setSystemOption(ExecConstants.ENABLE_COMPLEX_HIVE_DATA_TYPE, + ExecConstants.ENABLE_COMPLEX_HIVE_DATA_TYPE.getDefault().getBoolVal().toString()); + } + protected static AutoCloseable disableHiveParquetComplexTypes() { return setHiveParquetComplexTypes("false"); } @@ -1215,35 +1221,29 @@ public static void checkFirstRecordContains(String query, String column, String assertThat(getValueInFirstRecord(query, column)).contains(expected); } - protected static IcebergModel getIcebergModel(File tableRoot, IcebergCatalogType catalogType) { - FileSystemPlugin fileSystemPlugin = BaseTestQuery.getMockedFileSystemPlugin(); - IcebergModel icebergModel = null; - switch (catalogType) { - case UNKNOWN: - break; - case NESSIE: - icebergModel = new IcebergNessieModel(DREMIO_NESSIE_DEFAULT_NAMESPACE, new Configuration(), - getSabotContext().getNessieClientProvider(), - null, null, new DatasetCatalogGrpcClient(getSabotContext().getDatasetCatalogBlockingStub().get()), fileSystemPlugin); - - when(fileSystemPlugin.getIcebergModel()).thenReturn(icebergModel); - break; - case HADOOP: - - icebergModel = new IcebergHadoopModel(new Configuration(), fileSystemPlugin); - when(fileSystemPlugin.getIcebergModel()).thenReturn(icebergModel); - break; + protected static IcebergModel getIcebergModel(String pluginName) { + StoragePlugin plugin = getSabotContext().getCatalogService().getSource(pluginName); + if (plugin instanceof SupportsIcebergMutablePlugin) { + SupportsIcebergMutablePlugin icebergMutablePlugin = (SupportsIcebergMutablePlugin) plugin; + return icebergMutablePlugin.getIcebergModel(null, null, null, localFs); + } else { + throw new UnsupportedOperationException( + String.format("Plugin %s does not implement SupportsIcebergMutablePlugin", pluginName)); } - return icebergModel; } - public static Table getIcebergTable(File tableRoot, IcebergCatalogType catalogType) { - IcebergModel icebergModel = getIcebergModel(tableRoot, catalogType); + public static Table getIcebergTable(IcebergModel icebergModel, File tableRoot) { return icebergModel.getIcebergTable(icebergModel.getTableIdentifier(tableRoot.getPath())); } + public static Table getIcebergTable(File tableRoot, IcebergCatalogType catalogType) { + IcebergModel icebergModel = + getIcebergModel(catalogType == IcebergCatalogType.NESSIE ? TEMP_SCHEMA : TEMP_SCHEMA_HADOOP); + return getIcebergTable(icebergModel, tableRoot); + } + public static Table getIcebergTable(File tableRoot) { - return getIcebergTable(tableRoot, IcebergCatalogType.NESSIE); + return getIcebergTable(getIcebergModel(TEMP_SCHEMA), tableRoot); } protected static String getDfsTestTmpDefaultCtasFormat(String pluginName) { @@ -1252,40 +1252,19 @@ protected static String getDfsTestTmpDefaultCtasFormat(String pluginName) { } public static FileSystemPlugin getMockedFileSystemPlugin() { - FileSystemPlugin fileSystemPlugin = mock(FileSystemPlugin.class); - when(fileSystemPlugin.getHadoopFsSupplier(any(String.class), any(Configuration.class), any(String.class))). - thenAnswer( - new Answer() { - @Override - public Object answer(InvocationOnMock invocation) throws Throwable { - Object[] args = invocation.getArguments(); - Supplier fileSystemSupplier = getFileSystemSupplier(DremioHadoopUtils.toHadoopPath((String) args[0]).toUri(), (Configuration) args[1], (String) args[2]); - return fileSystemSupplier; - } - }); - - when(fileSystemPlugin.getHadoopFsSupplier(any(String.class), any(Configuration.class))). - thenAnswer( - new Answer() { - @Override - public Object answer(InvocationOnMock invocation) throws Throwable { - Object[] args = invocation.getArguments(); - Supplier fileSystemSupplier = getFileSystemSupplier(DremioHadoopUtils.toHadoopPath((String) args[0]).toUri(), (Configuration) args[1], SystemUser.SYSTEM_USERNAME); - - return fileSystemSupplier; - } - }); - return fileSystemPlugin; - } + try { - private static Supplier getFileSystemSupplier(final URI uri, final Configuration conf, String user) { - return () -> { - try { - return org.apache.hadoop.fs.FileSystem.get(uri, conf, user); - } catch (IOException | InterruptedException e) { - throw new RuntimeException(); - } - }; + FileSystemPlugin fileSystemPlugin = mock(FileSystemPlugin.class); + FileSystem fs = HadoopFileSystem.getLocal(new Configuration()); + when(fileSystemPlugin.getSystemUserFS()).thenReturn(fs); + when(fileSystemPlugin.getFsConfCopy()).thenReturn(new Configuration()); + when(fileSystemPlugin.createIcebergFileIO(any(), any(), any(), any(), any())) + .thenReturn(new DremioFileIO(fs, null, null, null, null, + new HadoopFileSystemConfigurationAdapter(new Configuration()))); + return fileSystemPlugin; + } catch (IOException ex) { + throw new UncheckedIOException(ex); + } } public static boolean areAzureStorageG1CredentialsNull(){ @@ -1322,4 +1301,20 @@ public static boolean areAzureStorageG2V2NonHierCredentialsNull(){ } return false; } + + protected static org.apache.hadoop.fs.FileSystem setupLocalFS() throws IOException { + Configuration conf = new Configuration(); + conf.set("fs.default.name", "local"); + return org.apache.hadoop.fs.FileSystem.get(conf); + } + + protected static void refresh(String table) throws Exception { + runSQL(String.format("alter table %s refresh metadata", table)); + } + + protected static final class TestHadoopTableOperations extends HadoopTableOperations { + public TestHadoopTableOperations(org.apache.hadoop.fs.Path location, Configuration conf) { + super(location, new HadoopFileIO(conf), conf, LockManagers.defaultLockManager()); + } + } } diff --git a/sabot/kernel/src/test/java/com/dremio/DremioTestWrapper.java b/sabot/kernel/src/test/java/com/dremio/DremioTestWrapper.java index 6d4599fd05..c14bfdf507 100644 --- a/sabot/kernel/src/test/java/com/dremio/DremioTestWrapper.java +++ b/sabot/kernel/src/test/java/com/dremio/DremioTestWrapper.java @@ -400,8 +400,7 @@ public static Map> addToCombinedVectorResults(Iterable 0; i--) { - final int region_idx = (i >= regions.length) ? 0 : i; - sb.append("WHEN ").append(i).append(" THEN ").append((getNextClause(1, regions[region_idx]))) + final int regionIdx = (i >= regions.length) ? 0 : i; + sb.append("WHEN ").append(i).append(" THEN ").append((getNextClause(1, regions[regionIdx]))) .append(System.lineSeparator()); } sb.append("ELSE lower(concat(n_name, '_UNKNOWN')) END").append(System.lineSeparator()); diff --git a/sabot/kernel/src/test/java/com/dremio/TestAltSortQueries.java b/sabot/kernel/src/test/java/com/dremio/TestAltSortQueries.java index 81a45f2625..0e4eb63d9e 100644 --- a/sabot/kernel/src/test/java/com/dremio/TestAltSortQueries.java +++ b/sabot/kernel/src/test/java/com/dremio/TestAltSortQueries.java @@ -66,4 +66,24 @@ public void testJoinWithLimit() throws Exception{ " limit 5"); } + /** + * Test for DX-48015 a column aliased as the same name + * as the original column in a group by expression + * returns an error that it's not being grouped. + * @throws Exception + */ + @Test + public void testColumnAliasWithGroupBy() throws Exception{ + test("SELECT\n" + + " DATE_TRUNC('day', hire_date) AS hire_date,\n" + + " position_id AS position_id,\n" + + " sum(salary) AS salary\n" + + "FROM cp.\"employee.json\"\n" + + "WHERE hire_date >= TO_DATE('1987-06-17', 'YYYY-MM-DD')\n" + + " AND hire_date < TO_DATE('1989-09-21', 'YYYY-MM-DD')\n" + + "GROUP BY DATE_TRUNC('day', hire_date),\n" + + " position_id\n" + + "ORDER BY salary"); + } + } diff --git a/sabot/kernel/src/test/java/com/dremio/TestDropTable.java b/sabot/kernel/src/test/java/com/dremio/TestDropTable.java index 93bc09c98b..c06a980d04 100644 --- a/sabot/kernel/src/test/java/com/dremio/TestDropTable.java +++ b/sabot/kernel/src/test/java/com/dremio/TestDropTable.java @@ -174,6 +174,7 @@ public void testNonHomogenousDrop() throws Exception { .go(); } + @SuppressWarnings("checkstyle:LocalFinalVariableName") @Test public void testIsHomogenousShouldNotBeCalledForIcebergTables() throws Exception { test("use dfs_test_hadoop"); diff --git a/sabot/kernel/src/test/java/com/dremio/TestExampleQueries.java b/sabot/kernel/src/test/java/com/dremio/TestExampleQueries.java index e2d91e2171..ac152f30d7 100644 --- a/sabot/kernel/src/test/java/com/dremio/TestExampleQueries.java +++ b/sabot/kernel/src/test/java/com/dremio/TestExampleQueries.java @@ -53,8 +53,7 @@ public class TestExampleQueries extends PlanTestBase { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestExampleQueries.class); - final String WORKING_PATH = TestTools.getWorkingPath(); - final String TEST_RES_PATH = WORKING_PATH + "/src/test/resources"; + private static final String TEST_RES_PATH = TestTools.getWorkingPath() + "/src/test/resources"; @Rule public TemporarySystemProperties properties = new TemporarySystemProperties(); @@ -321,13 +320,13 @@ public void testPrimaryCacheInCodeCompilerFlow() throws Exception { public void testCacheFlowWithAQueryWithSplitsInGandivaAndJava() throws Exception { try { test(String.format("alter session set %s = true", ExecConstants.PARQUET_AUTO_CORRECT_DATES)); - final String SqlQuery = "SELECT o_orderkey, extractYear(castDate(o_orderdate)) as y1, " + + final String sqlQuery = "SELECT o_orderkey, extractYear(castDate(o_orderdate)) as y1, " + "extractYear(TO_DATE(o_orderdate, 'yyyy-mm-dd')) as y2 " + "FROM " + "cp.\"tpch/orders.parquet\" " + "ORDER BY o_orderkey limit 1"; testBuilder() - .sqlQuery(SqlQuery) + .sqlQuery(sqlQuery) .unOrdered() .baselineColumns("o_orderkey", "y1", "y2") .baselineValues(1, 1996L, 1996L) @@ -1154,6 +1153,15 @@ public void testJoin() throws Exception { } } + @Test + public void testFullOuterJoinTrueCondition() throws Exception { + try(AutoCloseable ac = withOption(PlannerSettings.ENABLE_REDUCE_JOIN, true)){ + test("with n as (SELECT * FROM cp.\"tpch/nation.parquet\" nations WHERE nations.N_REGIONKEY = 1),\n" + + "r as (SELECT * FROM cp.\"tpch/region.parquet\" regions WHERE regions.R_REGIONKEY = 1)\n" + + "SELECT count(*) FROM n FULL OUTER JOIN r\n" + + "on n.N_REGIONKEY = r.R_REGIONKEY"); + } + } @Test public void testWhere() throws Exception { @@ -1522,9 +1530,6 @@ public void testAggExpressionWithGroupByHaving() throws Exception { @Test public void testExchangeRemoveForJoinPlan() throws Exception { - final String WORKING_PATH = TestTools.getWorkingPath(); - final String TEST_RES_PATH = WORKING_PATH + "/src/test/resources"; - String sql = String.format("select t2.n_nationkey from dfs.\"%s/tpchmulti/region\" t1 join dfs.\"%s/tpchmulti/nation\" t2 on t2.n_regionkey = t1.r_regionkey", TEST_RES_PATH, TEST_RES_PATH); testBuilder() @@ -2424,7 +2429,7 @@ public void testCopier4() throws Exception { } @Test - public void TestCopier5() throws Exception { + public void testCopier5() throws Exception { String query = "SELECT * FROM cp.\"parquet/52864-1.parquet\" t where id0 = '2' or id0 = '4' or id0 = '6' or id0 = '7' or id0 = '8'"; testBuilder() @@ -2458,4 +2463,24 @@ public void TestCoalesceOnSameColJava() throws Exception { .build() .run(); } + + @Test + public void TestColLike() throws Exception { + String query = "select " + + "b.pat, col_like(a.term, replace(b.pat, '*', '%')) as c1 " + + "from cp.\"parquet/like_test.parquet\" as a " + + "JOIN cp.\"parquet/like_test_2.parquet\" as b " + + "ON a.id = b.id"; + + testBuilder() + .unOrdered() + .sqlQuery(query) + .baselineColumns("pat", "c1") + .baselineValues("*hp*", true) + .baselineValues("*hp*", true) + .baselineValues("*zinni*", true) + .baselineValues("*rugged shark*", true) + .build() + .run(); + } } diff --git a/sabot/kernel/src/test/java/com/dremio/TestFunctionsQuery.java b/sabot/kernel/src/test/java/com/dremio/TestFunctionsQuery.java index 0aded062bf..647743c5f9 100644 --- a/sabot/kernel/src/test/java/com/dremio/TestFunctionsQuery.java +++ b/sabot/kernel/src/test/java/com/dremio/TestFunctionsQuery.java @@ -1086,8 +1086,8 @@ public void timestampAddMicroSecond() { @Test // TODO (DX-11268): Fix TIMESTAMPADD(SQL_TSI_FRAC_SECOND, ..., ...) function public void timestampAddNanoSecond() { UserExceptionAssert.assertThatThrownBy(() -> test("select timestampadd(NANOSECOND, 2, timestamp '2015-03-30 20:49:59.000') as ts from (values(1))")) - .hasErrorType(ErrorType.PARSE) - .hasMessageContaining("Failure parsing the query."); + .hasErrorType(ErrorType.UNSUPPORTED_OPERATION) + .hasMessageContaining("TIMESTAMPADD function supports the following time units: YEAR, QUARTER, MONTH, WEEK, DAY, HOUR, MINUTE, SECOND"); } @Test diff --git a/sabot/kernel/src/test/java/com/dremio/TestFunctionsWithTypeExpoQueries.java b/sabot/kernel/src/test/java/com/dremio/TestFunctionsWithTypeExpoQueries.java index 58995f3a09..c13459a7aa 100644 --- a/sabot/kernel/src/test/java/com/dremio/TestFunctionsWithTypeExpoQueries.java +++ b/sabot/kernel/src/test/java/com/dremio/TestFunctionsWithTypeExpoQueries.java @@ -464,6 +464,7 @@ public void testWindowSumAvg() throws Exception { .run(); } + @SuppressWarnings("checkstyle:LocalFinalVariableName") @Test public void testWindowRanking() throws Exception { final String queryCUME_DIST = "select CUME_DIST() over(order by n_nationkey) as col \n" + diff --git a/sabot/kernel/src/test/java/com/dremio/TestHints.java b/sabot/kernel/src/test/java/com/dremio/TestHints.java index 1b801d894e..dd1e4a27db 100644 --- a/sabot/kernel/src/test/java/com/dremio/TestHints.java +++ b/sabot/kernel/src/test/java/com/dremio/TestHints.java @@ -15,8 +15,12 @@ */ package com.dremio; +import static com.dremio.exec.proto.UserBitShared.DremioPBError.ErrorType.PARSE; + import org.junit.Test; +import com.dremio.test.UserExceptionAssert; + /** * Tests for query hints. */ @@ -34,7 +38,9 @@ public void invalidHintPlacement() throws Exception { }; for (String query : testQueries) { - this.errorMsgTestHelper(query, "Failure parsing the query"); + UserExceptionAssert + .assertThatThrownBy(() -> test(query)) + .hasErrorType(PARSE); } } diff --git a/sabot/kernel/src/test/java/com/dremio/TestMfsBlockLoader.java b/sabot/kernel/src/test/java/com/dremio/TestMfsBlockLoader.java index 6e177f342a..f188476119 100644 --- a/sabot/kernel/src/test/java/com/dremio/TestMfsBlockLoader.java +++ b/sabot/kernel/src/test/java/com/dremio/TestMfsBlockLoader.java @@ -35,7 +35,6 @@ import java.util.concurrent.atomic.AtomicLong; import org.apache.commons.math3.stat.descriptive.moment.Mean; -import org.apache.commons.math3.stat.descriptive.moment.StandardDeviation; import org.apache.hadoop.fs.BlockLocation; import org.junit.Test; @@ -123,15 +122,6 @@ private static long max(List data) { return max; } - private static double stdDev(List data) { - double[] asDouble = new double[data.size()]; - int i = 0; - for (Long l : data) { - asDouble[i++] = (double) l; - } - return new StandardDeviation().evaluate(asDouble); - } - private static double mean(List data) { double[] asDouble = new double[data.size()]; int i = 0; diff --git a/sabot/kernel/src/test/java/com/dremio/TestMultipleCountDistinct.java b/sabot/kernel/src/test/java/com/dremio/TestMultipleCountDistinct.java new file mode 100644 index 0000000000..ca8d9f1934 --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/TestMultipleCountDistinct.java @@ -0,0 +1,47 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio; + +import org.junit.Test; + +import com.dremio.exec.planner.physical.PlannerSettings; + +public class TestMultipleCountDistinct extends PlanTestBase { + @Test + public void testPlan() throws Exception { + try (AutoCloseable option = withOption(PlannerSettings.ENABLE_DISTINCT_AGG_WITH_GROUPING_SETS, true)) { + String sql = "select n_regionkey, count(distinct n_name) dist_name, count(distinct n_nationkey) dist_key from cp.\"tpch/nation.parquet\" group by n_regionkey"; + testPlanMatchingPatterns(sql, new String[]{"NestedLoopJoin"}, "HashJoin"); + } + } + + @Test + public void testExecution() throws Exception { + try (AutoCloseable option = withOption(PlannerSettings.ENABLE_DISTINCT_AGG_WITH_GROUPING_SETS, true)) { + String sql = "select n_regionkey, count(distinct n_name) dist_name, count(distinct n_nationkey) dist_key from cp.\"tpch/nation.parquet\" group by n_regionkey"; + testBuilder() + .sqlQuery(sql) + .unOrdered() + .baselineColumns("n_regionkey", "dist_name", "dist_key") + .baselineValues(0, 5L, 5L) + .baselineValues(1, 5L, 5L) + .baselineValues(2, 5L, 5L) + .baselineValues(3, 5L, 5L) + .baselineValues(4, 5L, 5L) + .go(); + } + } +} diff --git a/sabot/kernel/src/test/java/com/dremio/TestQueryExceptionHandling.java b/sabot/kernel/src/test/java/com/dremio/TestQueryExceptionHandling.java new file mode 100644 index 0000000000..2d8ed1ff67 --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/TestQueryExceptionHandling.java @@ -0,0 +1,92 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.Assert.assertThrows; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; + +import org.apache.calcite.sql.SqlNode; +import org.junit.Test; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.ExecTest; +import com.dremio.exec.PassthroughQueryObserver; +import com.dremio.exec.ops.QueryContext; +import com.dremio.exec.planner.observer.AttemptObserver; +import com.dremio.exec.planner.sql.SqlConverter; +import com.dremio.exec.planner.sql.SqlExceptionHelper; +import com.dremio.exec.planner.sql.handlers.SqlHandlerConfig; +import com.dremio.exec.planner.sql.handlers.query.NormalHandler; +import com.dremio.exec.planner.sql.handlers.query.SqlToPlanHandler; +import com.dremio.exec.proto.UserBitShared; +import com.dremio.exec.proto.UserProtos; +import com.dremio.exec.rpc.user.security.testing.UserServiceTestImpl; +import com.dremio.exec.server.SabotContext; +import com.dremio.exec.server.options.SessionOptionManagerImpl; +import com.dremio.sabot.rpc.user.UserSession; + +public class TestQueryExceptionHandling extends PlanTestBase { + + private static UserSession session() { + return UserSession.Builder.newBuilder() + .withSessionOptionManager( + new SessionOptionManagerImpl(getSabotContext().getOptionValidatorListing()), + getSabotContext().getOptionManager()) + .withUserProperties(UserProtos.UserProperties.getDefaultInstance()) + .withCredentials(UserBitShared.UserCredentials.newBuilder().setUserName(UserServiceTestImpl.ANONYMOUS).build()) + .setSupportComplexTypes(true) + .build(); + } + + /** + * Verifies StackOverflowError thrown during planning is caught and re-thrown as PLAN ERROR UserException. + */ + @Test + public void testStackOverflowErrorDuringPlanning() { + final String sql = "select 1"; + + final SabotContext context = getSabotContext(); + final QueryContext queryContext = new QueryContext(session(), context, UserBitShared.QueryId.getDefaultInstance()); + final AttemptObserver observer = new PassthroughQueryObserver(ExecTest.mockUserClientConnection(null)); + final SqlConverter converter = spy(new SqlConverter( + queryContext.getPlannerSettings(), + queryContext.getOperatorTable(), + queryContext, + queryContext.getMaterializationProvider(), + queryContext.getFunctionRegistry(), + queryContext.getSession(), + observer, + queryContext.getCatalog(), + queryContext.getSubstitutionProviderFactory(), + queryContext.getConfig(), + queryContext.getScanResult(), + queryContext.getRelMetadataQuerySupplier())); + final SqlNode node = converter.parse(sql); + final SqlHandlerConfig config = new SqlHandlerConfig(queryContext, converter, observer, null); + final SqlToPlanHandler handler = new NormalHandler(); + + // Simulates unexpected StackOverflowError is thrown during planning. + when(converter.getConvertletTableNotes()).thenThrow(new StackOverflowError()); + + // Verifies when unexpected StackOverflowError is thrown during planning, it is caught and re-thrown as PLAN ERROR + // UserException with the error message of SqlExceptionHelper.PLANNING_STACK_OVERFLOW_ERROR. + UserException exception = assertThrows(UserException.class, () -> handler.getPlan(config, sql, node)); + assertThat(exception.getErrorType().name()).isEqualTo("PLAN"); + assertThat(exception.getMessage()).isEqualTo(SqlExceptionHelper.PLANNING_STACK_OVERFLOW_ERROR); + } +} diff --git a/sabot/kernel/src/test/java/com/dremio/TestSelectWithOption.java b/sabot/kernel/src/test/java/com/dremio/TestSelectWithOption.java index 677436ff3d..6a909fc182 100644 --- a/sabot/kernel/src/test/java/com/dremio/TestSelectWithOption.java +++ b/sabot/kernel/src/test/java/com/dremio/TestSelectWithOption.java @@ -154,7 +154,17 @@ public void testMultiByteLineDelimiter() throws Exception { testWithResult(format("select columns from table(%s(type=>'TeXT', lineDelimiter => 'abc'))", tableName), listOf("1"), listOf("2"), - listOf("3")); + listOf("3"), + null); + } + + @Test + public void testExtendedCharDelimiters() throws Exception { + String tableName = genCSVTable("testExtendedCharDelimiters", + "1¦22¦333∆x¦yy¦zzz"); + testWithResult(format("select columns from table(%s(type=>'TeXT', fieldDelimiter => '¦', lineDelimiter => '∆'))", tableName), + listOf("1", "22", "333"), + listOf("x", "yy", "zzz")); } @Test @@ -164,7 +174,8 @@ public void testDataWithPartOfMultiByteLineDelimiter() throws Exception { testWithResult(format("select columns from table(%s(type=>'TeXT', lineDelimiter => 'abc'))", tableName), listOf("ab1"), listOf("2"), - listOf("3")); + listOf("3"), + null); } @Test diff --git a/sabot/kernel/src/test/java/com/dremio/TestTruncateTable.java b/sabot/kernel/src/test/java/com/dremio/TestTruncateTable.java index f6ac713ba2..97e40080d4 100644 --- a/sabot/kernel/src/test/java/com/dremio/TestTruncateTable.java +++ b/sabot/kernel/src/test/java/com/dremio/TestTruncateTable.java @@ -15,18 +15,17 @@ */ package com.dremio; -import static org.assertj.core.api.Assertions.assertThatThrownBy; - import java.io.File; import org.apache.commons.io.FileUtils; import org.junit.Before; import org.junit.Test; -import com.dremio.common.exceptions.UserException; import com.dremio.config.DremioConfig; import com.dremio.exec.planner.sql.DmlQueryTestUtils; +import com.dremio.exec.proto.UserBitShared; import com.dremio.test.TemporarySystemProperties; +import com.dremio.test.UserExceptionAssert; public class TestTruncateTable extends PlanTestBase { @@ -41,9 +40,9 @@ public void before() throws Exception { @Test public void truncateInvalidSQL() { String truncSql = "TRUNCATE"; - assertThatThrownBy(() -> test(truncSql)) - .isInstanceOf(UserException.class) - .hasMessageContaining("PARSE ERROR: Failure parsing the query."); + UserExceptionAssert + .assertThatThrownBy(() -> test(truncSql)) + .hasErrorType(UserBitShared.DremioPBError.ErrorType.PARSE); } @Test @@ -54,8 +53,8 @@ public void icebergNotEnabledShouldThrowError() throws Exception { test(ctas); try (AutoCloseable ignoredAgain = disableIcebergFlag()) { String truncSql = String.format("TRUNCATE TABLE %s.%s", TEMP_SCHEMA_HADOOP, tableName); - assertThatThrownBy(() -> test(truncSql)) - .isInstanceOf(UserException.class) + UserExceptionAssert + .assertThatThrownBy(() -> test(truncSql)) .hasMessageContaining("Please contact customer support for steps to enable the iceberg tables feature."); } } finally { @@ -69,8 +68,8 @@ public void tableDoesNotExistShouldThrowError() throws Exception { for (String testSchema: SCHEMAS_FOR_TEST) { String truncSql = "TRUNCATE TABLE " + testSchema + ".truncTable6"; try (AutoCloseable c = enableIcebergTables()) { - assertThatThrownBy(() -> test(truncSql)) - .isInstanceOf(UserException.class) + UserExceptionAssert + .assertThatThrownBy(() -> test(truncSql)) .hasMessageContaining("Table [" + testSchema + ".truncTable6] does not exist."); } } @@ -99,8 +98,8 @@ public void nonIcebergTableShouldThrowError() throws Exception { test(ctas); String truncSql = "TRUNCATE TABLE " + TEMP_SCHEMA + ".truncTable5"; try (AutoCloseable c = enableIcebergTables()) { - assertThatThrownBy(() -> test(truncSql)) - .isInstanceOf(UserException.class) + UserExceptionAssert + .assertThatThrownBy(() -> test(truncSql)) .hasMessageContaining("Table [" + TEMP_SCHEMA + ".truncTable5] is not configured to support DML operations"); } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), "truncTable5")); @@ -115,8 +114,8 @@ public void truncateView() throws Exception { String name = DmlQueryTestUtils.createRandomId(); test("CREATE VIEW %s.%s AS SELECT * FROM INFORMATION_SCHEMA.CATALOGS", TEMP_SCHEMA, name); - assertThatThrownBy(() -> test("TRUNCATE TABLE %s.%s", TEMP_SCHEMA, name)) - .isInstanceOf(UserException.class) + UserExceptionAssert + .assertThatThrownBy(() -> test("TRUNCATE TABLE %s.%s", TEMP_SCHEMA, name)) .hasMessageContaining("TRUNCATE TABLE is not supported on this VIEW at [%s.%s].", TEMP_SCHEMA, name); test("DROP VIEW %s.%s", TEMP_SCHEMA, name); @@ -162,8 +161,8 @@ public void truncateEmptyWithPathTableWithWrongContext() throws Exception { String ctas = String.format("CREATE TABLE %s.%s.%s(id INT)", TEMP_SCHEMA_HADOOP, path, tableName); test(ctas); String truncSql = String.format("TRUNCATE TABLE %s.%s", path, tableName); - assertThatThrownBy(() -> test(truncSql)) - .isInstanceOf(UserException.class) + UserExceptionAssert + .assertThatThrownBy(() -> test(truncSql)) .hasMessageContaining("Table [%s.%s] does not exist.", path, tableName); } finally { test("DROP TABLE %s.%s.%s", TEMP_SCHEMA_HADOOP, path, tableName); @@ -258,5 +257,4 @@ public void truncateInsertSelect() throws Exception { } } } - } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/TestGlobalDictionaryPlan.java b/sabot/kernel/src/test/java/com/dremio/exec/TestGlobalDictionaryPlan.java index 5627929b3d..809222903e 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/TestGlobalDictionaryPlan.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/TestGlobalDictionaryPlan.java @@ -62,14 +62,17 @@ public static void setup() throws Exception { testNoResult("CREATE TABLE dfs_test.places AS SELECT * FROM cp.\"places.json\""); final Configuration conf = new Configuration(); codec = CodecFactory.createDirectCodecFactory(conf, new ParquetDirectByteBufferAllocator(testAllocator), 0); + try { + fs = HadoopFileSystem.getLocal(conf); - fs = HadoopFileSystem.getLocal(conf); + tableDirPath1 = Path.of(getDfsTestTmpSchemaLocation() + "/globaldictionary"); + GlobalDictionaryBuilder.createGlobalDictionaries(codec, fs, tableDirPath1, testAllocator); - tableDirPath1 = Path.of(getDfsTestTmpSchemaLocation() + "/globaldictionary"); - GlobalDictionaryBuilder.createGlobalDictionaries(codec, fs, tableDirPath1, testAllocator); - - tableDirPath2 = Path.of(getDfsTestTmpSchemaLocation() + "/places"); - GlobalDictionaryBuilder.createGlobalDictionaries(codec, fs, tableDirPath2, testAllocator); + tableDirPath2 = Path.of(getDfsTestTmpSchemaLocation() + "/places"); + GlobalDictionaryBuilder.createGlobalDictionaries(codec, fs, tableDirPath2, testAllocator); + } finally { + codec.release(); + } } @AfterClass diff --git a/sabot/kernel/src/test/java/com/dremio/exec/TestTpchDistributedWithGlobalDictionaries.java b/sabot/kernel/src/test/java/com/dremio/exec/TestTpchDistributedWithGlobalDictionaries.java index f9d681a259..58490f7bc5 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/TestTpchDistributedWithGlobalDictionaries.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/TestTpchDistributedWithGlobalDictionaries.java @@ -63,35 +63,38 @@ public static void setup() throws Exception { testNoResult("alter session set \"store.parquet.enable_dictionary_encoding_binary_type\"=true"); final Configuration conf = new Configuration(); final CompressionCodecFactory codec = CodecFactory.createDirectCodecFactory(conf, new ParquetDirectByteBufferAllocator(testAllocator), 0); - - fs = HadoopFileSystem.getLocal(conf); - testNoResult("CREATE TABLE dfs_test.tpch_lineitem_gd AS SELECT * FROM cp.\"tpch/lineitem.parquet\""); - testNoResult("CREATE TABLE dfs_test.tpch_customer_gd AS SELECT * FROM cp.\"tpch/customer.parquet\""); - testNoResult("CREATE TABLE dfs_test.tpch_part_gd AS SELECT * FROM cp.\"tpch/part.parquet\""); - testNoResult("CREATE TABLE dfs_test.tpch_partsupp_gd AS SELECT * FROM cp.\"tpch/partsupp.parquet\""); - testNoResult("CREATE TABLE dfs_test.tpch_region_gd AS SELECT * FROM cp.\"tpch/region.parquet\""); - testNoResult("CREATE TABLE dfs_test.tpch_nation_gd AS SELECT * FROM cp.\"tpch/nation.parquet\""); - testNoResult("CREATE TABLE dfs_test.tpch_supplier_gd AS SELECT * FROM cp.\"tpch/supplier.parquet\""); - testNoResult("CREATE TABLE dfs_test.tpch_orders_gd AS SELECT * FROM cp.\"tpch/orders.parquet\""); - - lineitem = Path.of(getDfsTestTmpSchemaLocation() + "/tpch_lineitem_gd"); - customer = Path.of(getDfsTestTmpSchemaLocation() + "/tpch_customer_gd"); - part = Path.of(getDfsTestTmpSchemaLocation() + "/tpch_part_gd"); - partsupp = Path.of(getDfsTestTmpSchemaLocation() + "/tpch_partsupp_gd"); - region = Path.of(getDfsTestTmpSchemaLocation() + "/tpch_region_gd"); - nation = Path.of(getDfsTestTmpSchemaLocation() + "/tpch_nation_gd"); - supplier = Path.of(getDfsTestTmpSchemaLocation() + "/tpch_supplier_gd"); - orders = Path.of(getDfsTestTmpSchemaLocation() + "/tpch_orders_gd"); - - GlobalDictionaryBuilder.createGlobalDictionaries(codec, fs, lineitem, testAllocator); - GlobalDictionaryBuilder.createGlobalDictionaries(codec, fs, customer, testAllocator); - GlobalDictionaryBuilder.createGlobalDictionaries(codec, fs, part, testAllocator); - GlobalDictionaryBuilder.createGlobalDictionaries(codec, fs, partsupp, testAllocator); - GlobalDictionaryBuilder.createGlobalDictionaries(codec, fs, region, testAllocator); - GlobalDictionaryBuilder.createGlobalDictionaries(codec, fs, nation, testAllocator); - GlobalDictionaryBuilder.createGlobalDictionaries(codec, fs, supplier, testAllocator); - GlobalDictionaryBuilder.createGlobalDictionaries(codec, fs, orders, testAllocator); - disableGlobalDictionary(); + try { + fs = HadoopFileSystem.getLocal(conf); + testNoResult("CREATE TABLE dfs_test.tpch_lineitem_gd AS SELECT * FROM cp.\"tpch/lineitem.parquet\""); + testNoResult("CREATE TABLE dfs_test.tpch_customer_gd AS SELECT * FROM cp.\"tpch/customer.parquet\""); + testNoResult("CREATE TABLE dfs_test.tpch_part_gd AS SELECT * FROM cp.\"tpch/part.parquet\""); + testNoResult("CREATE TABLE dfs_test.tpch_partsupp_gd AS SELECT * FROM cp.\"tpch/partsupp.parquet\""); + testNoResult("CREATE TABLE dfs_test.tpch_region_gd AS SELECT * FROM cp.\"tpch/region.parquet\""); + testNoResult("CREATE TABLE dfs_test.tpch_nation_gd AS SELECT * FROM cp.\"tpch/nation.parquet\""); + testNoResult("CREATE TABLE dfs_test.tpch_supplier_gd AS SELECT * FROM cp.\"tpch/supplier.parquet\""); + testNoResult("CREATE TABLE dfs_test.tpch_orders_gd AS SELECT * FROM cp.\"tpch/orders.parquet\""); + + lineitem = Path.of(getDfsTestTmpSchemaLocation() + "/tpch_lineitem_gd"); + customer = Path.of(getDfsTestTmpSchemaLocation() + "/tpch_customer_gd"); + part = Path.of(getDfsTestTmpSchemaLocation() + "/tpch_part_gd"); + partsupp = Path.of(getDfsTestTmpSchemaLocation() + "/tpch_partsupp_gd"); + region = Path.of(getDfsTestTmpSchemaLocation() + "/tpch_region_gd"); + nation = Path.of(getDfsTestTmpSchemaLocation() + "/tpch_nation_gd"); + supplier = Path.of(getDfsTestTmpSchemaLocation() + "/tpch_supplier_gd"); + orders = Path.of(getDfsTestTmpSchemaLocation() + "/tpch_orders_gd"); + + GlobalDictionaryBuilder.createGlobalDictionaries(codec, fs, lineitem, testAllocator); + GlobalDictionaryBuilder.createGlobalDictionaries(codec, fs, customer, testAllocator); + GlobalDictionaryBuilder.createGlobalDictionaries(codec, fs, part, testAllocator); + GlobalDictionaryBuilder.createGlobalDictionaries(codec, fs, partsupp, testAllocator); + GlobalDictionaryBuilder.createGlobalDictionaries(codec, fs, region, testAllocator); + GlobalDictionaryBuilder.createGlobalDictionaries(codec, fs, nation, testAllocator); + GlobalDictionaryBuilder.createGlobalDictionaries(codec, fs, supplier, testAllocator); + GlobalDictionaryBuilder.createGlobalDictionaries(codec, fs, orders, testAllocator); + disableGlobalDictionary(); + } finally { + codec.release(); + } } @AfterClass diff --git a/sabot/kernel/src/test/java/com/dremio/exec/catalog/TestCatalogEntityKey.java b/sabot/kernel/src/test/java/com/dremio/exec/catalog/TestCatalogEntityKey.java new file mode 100644 index 0000000000..37ac0d1598 --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/exec/catalog/TestCatalogEntityKey.java @@ -0,0 +1,211 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog; + +import static com.dremio.common.utils.ReservedCharacters.getInformationSeparatorOne; +import static org.assertj.core.api.AssertionsForClassTypes.assertThat; +import static org.assertj.core.api.AssertionsForClassTypes.assertThatThrownBy; + +import java.util.Arrays; +import java.util.List; + +import org.junit.After; +import org.junit.Before; +import org.junit.jupiter.api.Test; + +import com.dremio.common.exceptions.UserException; +import com.dremio.common.utils.PathUtils; +import com.dremio.service.namespace.NamespaceKey; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableList; + +public class TestCatalogEntityKey { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void serdeTestWithNullTableVersionContext() throws JsonProcessingException { + + List key1 = ImmutableList.of("x","y","z"); + final CatalogEntityKey catalogEntityKey = CatalogEntityKey.newBuilder() + .keyComponents(key1) + .tableVersionContext(null) + .build(); + + ObjectMapper objectMapper = new ObjectMapper(); + String json = objectMapper.writeValueAsString(catalogEntityKey); + + assertThat(json).isNotNull(); + + CatalogEntityKey keyFromJson = objectMapper.readValue(json, CatalogEntityKey.class); + assertThat(keyFromJson.equals(catalogEntityKey)).isTrue(); + } + + @Test + public void serdeTestWithTableVersionContext() throws JsonProcessingException { + + List key1 = ImmutableList.of("x","y","z"); + TableVersionContext tableVersionContext = TableVersionContext.LATEST_VERSION; + final CatalogEntityKey catalogEntityKey = CatalogEntityKey.newBuilder() + .keyComponents(key1) + .tableVersionContext(tableVersionContext) + .build(); + ObjectMapper objectMapper = new ObjectMapper(); + String json = objectMapper.writeValueAsString(catalogEntityKey); + assertThat(json).isNotNull(); + CatalogEntityKey keyFromJson = objectMapper.readValue(json, CatalogEntityKey.class); + assertThat(keyFromJson.equals(catalogEntityKey)).isTrue(); + } + + @Test + public void toStringTestWithNamespaceKey() { + List key1 = ImmutableList.of("x","y","z"); + NamespaceKey namespaceKey = new NamespaceKey(key1); + final CatalogEntityKey catalogEntityKey = CatalogEntityKey.newBuilder() + .keyComponents(key1) + .tableVersionContext(null) + .build(); + assertThat(namespaceKey.toString().equals(catalogEntityKey.toString())).isTrue(); + } + + @Test + public void nullKeyComponents() { + // Test constructing key with invalid table version context + TableVersionContext tableVersionContext = TableVersionContext.LATEST_VERSION; + assertThatThrownBy(() -> + CatalogEntityKey.newBuilder() + .keyComponents(null) + .tableVersionContext(tableVersionContext) + .build()).isInstanceOf(NullPointerException.class); + } + + @Test + public void testSerialize() { + String serializedKey = null; + TableVersionContext tableVersionContext = new TableVersionContext(TableVersionType.BRANCH, "testBranch"); + List keyComponents = Arrays.asList("catalog", "schema", "folder", "tname"); + CatalogEntityKey catalogEntityKey = CatalogEntityKey.newBuilder() + .keyComponents(keyComponents) + .tableVersionContext(tableVersionContext) + .build(); + + serializedKey = catalogEntityKey.toString(); + assertThat(serializedKey.contains(CatalogEntityKey.KEY_DELIMITER)).isTrue(); + + } + + @Test + public void testDeserialize() { + TableVersionContext tableVersionContext = new TableVersionContext(TableVersionType.BRANCH, "testBranch"); + List keyComponents = Arrays.asList("catalog", "schema", "folder", "tname"); + String serializedKeyComponents = PathUtils.constructFullPath(keyComponents); + String serializedTableVersionContext = tableVersionContext.serialize(); + String stringKey = new StringBuilder() + .append(serializedKeyComponents) + .append(CatalogEntityKey.KEY_DELIMITER) + .append(serializedTableVersionContext) + .toString(); + CatalogEntityKey catalogEntityKey = new CatalogEntityKey(stringKey); + assertThat(catalogEntityKey.getKeyComponents()).isEqualTo(keyComponents); + assertThat(catalogEntityKey.getTableVersionContext()).isEqualTo(tableVersionContext); + + } + + @Test + public void testDeserializeNegative() { + TableVersionContext tableVersionContext = new TableVersionContext(TableVersionType.BRANCH, "testBranch"); + String BAD_KEY_DELIMITER = new String("xyxy"); + List keyComponents = Arrays.asList("catalog", "schema", "folder", "tname"); + String serializedKeyComponents = PathUtils.constructFullPath(keyComponents); + String serializedTableVersionContext = tableVersionContext.serialize(); + String stringKey = new StringBuilder() + .append(serializedKeyComponents) + .append(BAD_KEY_DELIMITER) + .append(serializedTableVersionContext) + .toString(); + + CatalogEntityKey catalogEntityKey = new CatalogEntityKey(stringKey); + assertThat(catalogEntityKey.getKeyComponents()).isNotEqualTo(keyComponents); + } + + @Test + public void testKeyWithInvalidName() { + TableVersionContext tableVersionContext = new TableVersionContext(TableVersionType.BRANCH, "testBranch"); + String tablename = new StringBuilder().append("tableWithInvalidCharacter").append(getInformationSeparatorOne()).toString(); + List keyComponents = Arrays.asList("catalog", "schema", "folder", tablename); + CatalogEntityKey.Builder catalogEntityKeyBuilder = CatalogEntityKey.newBuilder(); + assertThatThrownBy(() -> catalogEntityKeyBuilder.keyComponents(keyComponents) + .tableVersionContext(tableVersionContext).build()).isInstanceOf(UserException.class); + assertThatThrownBy(() -> catalogEntityKeyBuilder.keyComponents(keyComponents) + .tableVersionContext(tableVersionContext).build()).hasMessageContaining("Invalid CatalogEntityKey format "); + } + + @Test + public void testKeyWithInvalidFolder() { + TableVersionContext tableVersionContext = new TableVersionContext(TableVersionType.BRANCH, "testBranch"); + String foldername = new StringBuilder().append("folderWithInvalidCharacter").append(getInformationSeparatorOne()).toString(); + List keyComponents = Arrays.asList("catalog", "schema", foldername, "tname"); + CatalogEntityKey.Builder catalogEntityKeyBuilder = CatalogEntityKey.newBuilder(); + assertThatThrownBy(() -> catalogEntityKeyBuilder.keyComponents(keyComponents) + .tableVersionContext(tableVersionContext).build()).isInstanceOf(UserException.class); + assertThatThrownBy(() -> catalogEntityKeyBuilder.keyComponents(keyComponents) + .tableVersionContext(tableVersionContext).build()).hasMessageContaining("Invalid CatalogEntityKey format "); + } + + @Test + public void testKeyForImmutableEntity() { + List key1 = ImmutableList.of("x","y","z"); + TableVersionContext tableVersionContextImmutable = new TableVersionContext(TableVersionType.SNAPSHOT_ID, "1234564"); + final CatalogEntityKey catalogEntityKeyImmutable = CatalogEntityKey.newBuilder() + .keyComponents(key1) + .tableVersionContext(tableVersionContextImmutable) + .build(); + TableVersionContext tableVersionContextImmutable2 = new TableVersionContext(TableVersionType.TIMESTAMP, 1000L); + final CatalogEntityKey catalogEntityKeyImmutable2 = CatalogEntityKey.newBuilder() + .keyComponents(key1) + .tableVersionContext(tableVersionContextImmutable2) + .build(); + assertThat(catalogEntityKeyImmutable2.isKeyForImmutableEntity()).isTrue(); + TableVersionContext tableVersionContextImmutable3 = new TableVersionContext(TableVersionType.COMMIT_HASH_ONLY, "1234564"); + final CatalogEntityKey catalogEntityKeyImmutable3 = CatalogEntityKey.newBuilder() + .keyComponents(key1) + .tableVersionContext(tableVersionContextImmutable3) + .build(); + assertThat(catalogEntityKeyImmutable3.isKeyForImmutableEntity()).isTrue(); + + TableVersionContext tableVersionContext1 = new TableVersionContext(TableVersionType.BRANCH, "foo"); + final CatalogEntityKey catalogEntityKey1 = CatalogEntityKey.newBuilder() + .keyComponents(key1) + .tableVersionContext(tableVersionContext1) + .build(); + assertThat(catalogEntityKey1.isKeyForImmutableEntity()).isFalse(); + + TableVersionContext tableVersionContext2 = new TableVersionContext(TableVersionType.TAG, "foo"); + final CatalogEntityKey catalogEntityKey2 = CatalogEntityKey.newBuilder() + .keyComponents(key1) + .tableVersionContext(tableVersionContext2) + .build(); + assertThat(catalogEntityKey2.isKeyForImmutableEntity()).isFalse(); + } + +} diff --git a/sabot/kernel/src/test/java/com/dremio/exec/catalog/TestCatalogFeatures.java b/sabot/kernel/src/test/java/com/dremio/exec/catalog/TestCatalogFeatures.java new file mode 100644 index 0000000000..8cfe5e42ab --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/exec/catalog/TestCatalogFeatures.java @@ -0,0 +1,58 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.catalog; + +import static com.dremio.exec.catalog.CatalogFeatures.Feature.ARS; +import static com.dremio.exec.catalog.CatalogFeatures.Feature.DATA_GRAPH; +import static com.dremio.exec.catalog.CatalogFeatures.Feature.HOME; +import static com.dremio.exec.catalog.CatalogFeatures.Feature.SEARCH; +import static com.dremio.exec.catalog.CatalogFeatures.Feature.SPACE; +import static com.dremio.exec.catalog.CatalogFeatures.Feature.STARRING; +import static org.junit.Assert.assertEquals; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import org.junit.Test; + +import com.dremio.options.OptionManager; + +public class TestCatalogFeatures { + @Test + public void testCatalogARSFeatureEnabled() { + OptionManager mockOptions = mock(OptionManager.class); + when(mockOptions.getOption(CatalogOptions.CATALOG_ARS_ENABLED)).thenReturn(true); + CatalogFeatures catalogFeatures = CatalogFeatures.get(mockOptions); + assertEquals(true, catalogFeatures.isFeatureEnabled(ARS)); + assertEquals(false, catalogFeatures.isFeatureEnabled(DATA_GRAPH)); + assertEquals(false, catalogFeatures.isFeatureEnabled(HOME)); + assertEquals(false, catalogFeatures.isFeatureEnabled(SEARCH)); + assertEquals(false, catalogFeatures.isFeatureEnabled(SPACE)); + assertEquals(false, catalogFeatures.isFeatureEnabled(STARRING)); + } + + @Test + public void testCatalogARSFeatureDisabled() { + OptionManager mockOptions = mock(OptionManager.class); + when(mockOptions.getOption(CatalogOptions.CATALOG_ARS_ENABLED)).thenReturn(false); + CatalogFeatures catalogFeatures = CatalogFeatures.get(mockOptions); + assertEquals(false, catalogFeatures.isFeatureEnabled(ARS)); + assertEquals(true, catalogFeatures.isFeatureEnabled(DATA_GRAPH)); + assertEquals(true, catalogFeatures.isFeatureEnabled(HOME)); + assertEquals(true, catalogFeatures.isFeatureEnabled(SEARCH)); + assertEquals(true, catalogFeatures.isFeatureEnabled(SPACE)); + assertEquals(true, catalogFeatures.isFeatureEnabled(STARRING)); + } +} diff --git a/sabot/kernel/src/test/java/com/dremio/exec/catalog/TestCatalogServiceImpl.java b/sabot/kernel/src/test/java/com/dremio/exec/catalog/TestCatalogServiceImpl.java index 7985a8676f..a56e970108 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/catalog/TestCatalogServiceImpl.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/catalog/TestCatalogServiceImpl.java @@ -18,7 +18,6 @@ import static com.dremio.test.DremioTest.CLASSPATH_SCAN_RESULT; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; @@ -45,6 +44,7 @@ import org.apache.arrow.vector.types.pojo.FieldType; import org.apache.arrow.vector.types.pojo.Schema; import org.junit.After; +import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -557,9 +557,7 @@ public void testDeleteMissingPlugin() { catalogService.getSystemUserCatalog().createSource(missingConfig); catalogService.deleteSource(MISSING_CONFIG_NAME); - // Check nullity of the state to confirm it's been deleted. - assertNull(catalogService.getSourceState(MISSING_CONFIG_NAME)); - + Assert.assertEquals(catalogService.getSourceState(MISSING_CONFIG_NAME), SourceState.badState(String.format("Source %s could not be found. Please verify the source name.", MISSING_CONFIG_NAME), "Unable to find source.")); } @Test @@ -577,7 +575,7 @@ public void refreshMissingPlugin() throws Exception { } @Test - public void badSourceShouldNotBlockStorageRules() throws Exception { + public void badSourceShouldNotBlockStorageRules_UNSAFE_WAIT() throws Exception { OptimizerRulesContext mock = mock(OptimizerRulesContext.class); ManagedStoragePlugin managedStoragePlugin = catalogService.getPlugins().get(MOCK_UP_BAD); @@ -594,7 +592,7 @@ public void badSourceShouldNotBlockStorageRules() throws Exception { // we get the writelock and run code in a different thread that should not use a readlock try (AutoCloseableLock unused = managedStoragePlugin.writeLock()) { thread.start(); - thread.join(1000); + thread.join(2000); } thread.interrupt(); assertTrue(test.get()); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/catalog/TestDatasetManager.java b/sabot/kernel/src/test/java/com/dremio/exec/catalog/TestDatasetManager.java index 516febdaaa..9617dc4f23 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/catalog/TestDatasetManager.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/catalog/TestDatasetManager.java @@ -17,7 +17,9 @@ import static com.dremio.exec.planner.physical.PlannerSettings.FULL_NESTED_SCHEMA_SUPPORT; import static com.dremio.exec.store.Views.isComplexType; +import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.assertj.core.api.Fail.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.doThrow; @@ -41,9 +43,13 @@ import org.apache.calcite.sql.type.SqlTypeName; import org.junit.Assert; import org.junit.Test; +import org.mockito.ArgumentMatchers; import com.dremio.common.exceptions.UserException; import com.dremio.connector.impersonation.extensions.SupportsImpersonation; +import com.dremio.connector.metadata.DatasetHandle; +import com.dremio.connector.metadata.EntityPath; +import com.dremio.connector.metadata.GetDatasetOption; import com.dremio.exec.catalog.CatalogImpl.IdentityResolver; import com.dremio.exec.catalog.conf.ConnectionConf; import com.dremio.exec.dotfile.View; @@ -69,6 +75,7 @@ import com.dremio.service.namespace.dataset.proto.VirtualDataset; import com.dremio.service.namespace.proto.EntityId; import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; /** * Tests for DatasetManager @@ -356,4 +363,155 @@ public String getAccessUserName(String delegatedUser, String queryUserName) { .isInstanceOf(UserException.class) .hasCauseInstanceOf(InvalidImpersonationTargetException.class); } + + @Test + public void checkNeverPromoteWithNullConfig() throws Exception { + final NamespaceKey namespaceKey = new NamespaceKey("test"); + + + final SchemaConfig schemaConfig = mock(SchemaConfig.class); + when(schemaConfig.getUserName()).thenReturn("username"); + + final MetadataRequestOptions metadataRequestOptions = MetadataRequestOptions.newBuilder() + .setSchemaConfig(schemaConfig) + .setCheckValidity(false) + .setNeverPromote(true) + .build(); + + ExtendedStoragePlugin sp = mock(ExtendedStoragePlugin.class); + + DatasetHandle handle = () -> new EntityPath(Lists.newArrayList("test")); + when(sp.getDatasetHandle(any(), ArgumentMatchers.any())) + .thenReturn(Optional.of(handle)); + + final ManagedStoragePlugin managedStoragePlugin = mock(ManagedStoragePlugin.class); + when(managedStoragePlugin.getId()).thenReturn(mock(StoragePluginId.class)); + when(managedStoragePlugin.getDefaultRetrievalOptions()).thenReturn(DatasetRetrievalOptions.DEFAULT); + when(managedStoragePlugin.getDatasetHandle(any(), any(),any())) + .thenReturn(Optional.of(handle)); + + final PluginRetriever pluginRetriever = mock(PluginRetriever.class); + when(pluginRetriever.getPlugin(namespaceKey.getRoot(), false)).thenReturn(managedStoragePlugin); + + final NamespaceService namespaceService = mock(NamespaceService.class); + when(namespaceService.getDataset(namespaceKey)).thenReturn(null); + + final OptionManager optionManager = mock(OptionManager.class); + + final DatasetManager datasetManager = new DatasetManager(pluginRetriever, namespaceService, optionManager, "username", + null, null); + DremioTable table = datasetManager.getTable(namespaceKey, metadataRequestOptions, true); + assertThat(table).isNull(); + } + + @Test + public void checkNeverPromoteWithShallowConfig() throws Exception { + final NamespaceKey namespaceKey = new NamespaceKey("test"); + + + final SchemaConfig schemaConfig = mock(SchemaConfig.class); + when(schemaConfig.getUserName()).thenReturn("username"); + + + final MetadataRequestOptions metadataRequestOptions = MetadataRequestOptions.newBuilder() + .setSchemaConfig(schemaConfig) + .setCheckValidity(false) + .setNeverPromote(true) + .build(); + + final ReadDefinition readDefinition = new ReadDefinition(); + readDefinition.setSplitVersion(0L); + + final DatasetConfig shallowDatasetConfig = new DatasetConfig(); + shallowDatasetConfig.setType(DatasetType.PHYSICAL_DATASET); + shallowDatasetConfig.setId(new EntityId("test")); + shallowDatasetConfig.setFullPathList(ImmutableList.of("test", "file", "foobar")); + shallowDatasetConfig.setTotalNumSplits(0); + + ExtendedStoragePlugin sp = mock(ExtendedStoragePlugin.class); + + DatasetHandle handle = () -> new EntityPath(Lists.newArrayList("test")); + when(sp.getDatasetHandle(any(), ArgumentMatchers.any())) + .thenReturn(Optional.of(handle)); + + final ManagedStoragePlugin managedStoragePlugin = mock(ManagedStoragePlugin.class); + when(managedStoragePlugin.getId()).thenReturn(mock(StoragePluginId.class)); + when(managedStoragePlugin.getDefaultRetrievalOptions()).thenReturn(DatasetRetrievalOptions.DEFAULT); + when(managedStoragePlugin.getDatasetHandle(any(), any(),any())) + .thenReturn(Optional.of(handle)); + + final PluginRetriever pluginRetriever = mock(PluginRetriever.class); + when(pluginRetriever.getPlugin(namespaceKey.getRoot(), false)).thenReturn(managedStoragePlugin); + + final NamespaceService namespaceService = mock(NamespaceService.class); + when(namespaceService.getDataset(namespaceKey)).thenReturn(shallowDatasetConfig); + + final OptionManager optionManager = mock(OptionManager.class); + + final DatasetManager datasetManager = new DatasetManager(pluginRetriever, namespaceService, optionManager, "username", + null, null); + DremioTable table = datasetManager.getTable(namespaceKey, metadataRequestOptions, true); + assertThat(table).isNull(); + } + + /** + * Validates metadata request option that is used to throw an exception when a table's version context is resolved + * using the default source version mapping (instead of via AT syntax or via session's source version mapping) + * + * The main use case for this option is with the REFRESH REFLECTION job where we need to validate + * that any cross Arctic catalog joins do not resolve using the default source version mapping. + * + * @throws Exception + */ + @Test + public void checkErrorOnUnspecifiedSourceVersion() throws Exception { + final NamespaceKey sourceKey = new NamespaceKey("ArcticCatalog"); + + final SchemaConfig schemaConfig = mock(SchemaConfig.class); + when(schemaConfig.getUserName()).thenReturn("username"); + + // Same options as what the planner would use for a REFRESH REFLECTION job + final MetadataRequestOptions metadataRequestOptions = MetadataRequestOptions.newBuilder() + .setSchemaConfig(schemaConfig) + .setCheckValidity(true) + .setNeverPromote(false) + .setErrorOnUnspecifiedSourceVersion(true) + .build(); + + final DatasetConfig shallowDatasetConfig = new DatasetConfig(); + shallowDatasetConfig.setType(DatasetType.PHYSICAL_DATASET); + shallowDatasetConfig.setFullPathList(ImmutableList.of("ArcticCatalog", "Table")); + + FakeVersionedPlugin sp = mock(FakeVersionedPlugin.class); + + final ManagedStoragePlugin managedStoragePlugin = mock(ManagedStoragePlugin.class); + when(managedStoragePlugin.getId()).thenReturn(mock(StoragePluginId.class)); + when(managedStoragePlugin.getDefaultRetrievalOptions()).thenReturn(DatasetRetrievalOptions.DEFAULT); + when(managedStoragePlugin.getPlugin()).thenReturn(sp); + when(managedStoragePlugin.getName()).thenReturn(sourceKey); + + final PluginRetriever pluginRetriever = mock(PluginRetriever.class); + when(pluginRetriever.getPlugin(sourceKey.getRoot(), false)).thenReturn(managedStoragePlugin); + + final NamespaceService namespaceService = mock(NamespaceService.class); + when(namespaceService.getDataset(sourceKey)).thenReturn(shallowDatasetConfig); + + final OptionManager optionManager = mock(OptionManager.class); + + final DatasetManager datasetManager = new DatasetManager(pluginRetriever, namespaceService, optionManager, "username", + null, null); + try { + datasetManager.getTable(sourceKey, metadataRequestOptions, true); + } catch (UserException e) { + assertThat(e.getMessage()).contains("Version context for table ArcticCatalog.\"Table\" must be specified using AT SQL syntax"); + return; + } + fail("getTable should have thrown exception"); + } + + /** + * Fake Versioned Plugin interface for test + */ + private interface FakeVersionedPlugin extends VersionedPlugin, StoragePlugin { + } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/catalog/TestPluginsManager.java b/sabot/kernel/src/test/java/com/dremio/exec/catalog/TestPluginsManager.java index 201ca4a924..7764450745 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/catalog/TestPluginsManager.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/catalog/TestPluginsManager.java @@ -170,6 +170,7 @@ public void setup() throws Exception { PositiveLongValidator option = ExecConstants.MAX_CONCURRENT_METADATA_REFRESHES; modifiableSchedulerService = new ModifiableLocalSchedulerService(1, "modifiable-scheduler-", option, () -> optionManager) { + @Override public Cancellable schedule(Schedule schedule, Runnable task) { Cancellable wakeupTask = super.schedule(schedule, task); scheduledTasks.add(wakeupTask); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/catalog/TestResolvedVersionContext.java b/sabot/kernel/src/test/java/com/dremio/exec/catalog/TestResolvedVersionContext.java index f843aeb78d..d5f0c6e838 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/catalog/TestResolvedVersionContext.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/catalog/TestResolvedVersionContext.java @@ -60,7 +60,7 @@ public void bareCommit() { assertTrue(version.isBareCommit()); assertEquals(ResolvedVersionContext.Type.BARE_COMMIT, version.getType()); - assertEquals("DETACHED", version.getRefName()); + assertEquals(ResolvedVersionContext.DETACHED_REF_NAME, version.getRefName()); assertEquals(REASONABLE_HASH, version.getCommitHash()); } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/catalog/TestVersionedDatasetId.java b/sabot/kernel/src/test/java/com/dremio/exec/catalog/TestVersionedDatasetId.java index 6f06669536..e0e495c3d2 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/catalog/TestVersionedDatasetId.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/catalog/TestVersionedDatasetId.java @@ -29,14 +29,9 @@ import com.fasterxml.jackson.core.JsonProcessingException; public class TestVersionedDatasetId { - private static final String REF_NAME = "refName"; - private static final String BRANCH_NAME = "branchName"; - private static final String TAG_NAME = "tagName"; - final String folderName = "folder"; final String tableName = "table"; final String branchName = "branchName"; - final String DATAPLANE_PLUGIN_NAME = "dataplane"; List tableKey = Arrays.asList(tableName); VersionContext sourceVersion = VersionContext.ofBranch(branchName); final String contentId ="contentId"; @@ -90,7 +85,7 @@ public void testFromString() throws JsonProcessingException { } @Test - public void testFromStringInvalid() throws JsonProcessingException { + public void testFromStringInvalid() { //Setup TableVersionContext sourceVersion = new TableVersionContext(TableVersionType.BRANCH,branchName); VersionedDatasetId versionedDatasetId = VersionedDatasetId.newBuilder() @@ -105,4 +100,42 @@ public void testFromStringInvalid() throws JsonProcessingException { assertThatThrownBy( ()->VersionedDatasetId.fromString(invalidDatasetId)).hasMessageContaining("Unrecognized field "); } + @Test + public void testTimeTravelId() throws JsonProcessingException { + //Setup + long timestamp = System.currentTimeMillis(); + TableVersionContext timeTravelVersion = new TableVersionContext(TableVersionType.TIMESTAMP, timestamp); + + VersionedDatasetId versionedDatasetId = VersionedDatasetId.newBuilder() + .setTableKey(tableKey) + .setContentId(null) + .setTableVersionContext(timeTravelVersion) + .build(); + //Act + String convertedDatasetId = versionedDatasetId.asString(); + + //Assert + assertThat(versionedDatasetId.getContentId() == null).isTrue(); + assertThat(versionedDatasetId.equals(VersionedDatasetId.fromString(convertedDatasetId))).isTrue(); + } + + @Test + public void testSnapshotId() throws JsonProcessingException { + //Setup + String snapshotId = "1000"; + TableVersionContext timeTravelVersion = new TableVersionContext(TableVersionType.SNAPSHOT_ID, snapshotId); + + VersionedDatasetId versionedDatasetId = VersionedDatasetId.newBuilder() + .setTableKey(tableKey) + .setContentId(null) + .setTableVersionContext(timeTravelVersion) + .build(); + //Act + String convertedDatasetId = versionedDatasetId.asString(); + + //Assert + assertThat(versionedDatasetId.getContentId() == null).isTrue(); + assertThat(versionedDatasetId.equals(VersionedDatasetId.fromString(convertedDatasetId))).isTrue(); + } + } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/compile/ExampleTemplateWithInner.java b/sabot/kernel/src/test/java/com/dremio/exec/compile/ExampleTemplateWithInner.java index d86c195af4..fc05739577 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/compile/ExampleTemplateWithInner.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/compile/ExampleTemplateWithInner.java @@ -20,6 +20,7 @@ public abstract class ExampleTemplateWithInner implements ExampleInner{ static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ExampleTemplateWithInner.class); + @Override public abstract void doOutside(); public class TheInnerClass{ @@ -39,6 +40,7 @@ public class DoubleInner{ } + @Override public void doInsideOutside(){ TheInnerClass inner = new TheInnerClass(); inner.doInside(); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/expr/fn/impl/TestConcatFunctions.java b/sabot/kernel/src/test/java/com/dremio/exec/expr/fn/impl/TestConcatFunctions.java index 7bbacaf210..4067106380 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/expr/fn/impl/TestConcatFunctions.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/expr/fn/impl/TestConcatFunctions.java @@ -15,31 +15,159 @@ */ package com.dremio.exec.expr.fn.impl; +import java.util.List; + import org.junit.Test; import com.dremio.BaseTestQuery; +import com.dremio.common.util.DremioGetObject; +import com.dremio.exec.record.RecordBatchLoader; +import com.dremio.exec.record.VectorWrapper; +import com.dremio.sabot.rpc.user.QueryDataBatch; +/** + * Test the CONCAT function. + */ public class TestConcatFunctions extends BaseTestQuery { - + /** + * Test concat with different numbers of arguments of type VARCHAR. + * @throws Exception + */ @Test public void testConcat() throws Exception { - helper("s1s2", "s1", "s2"); - helper("s1", "s1", null); - helper("s1s2s3s4", "s1", "s2", "s3", "s4"); - helper("s1s2s3s4s5s6s7s8s9s10", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "s8", "s9", "s10"); - helper("s1s2s4s5s6s8s9s10", "s1", "s2", null, "s4", "s5", "s6", null, "s8", "s9", "s10"); - helper("s1s2s4s5s6s8s9s10s11", "s1", "s2", null, "s4", "s5", "s6", null, "s8", "s9", "s10", "s11"); - helper("s1s2s4s5s6s8s9s10s11s12s13s14s15s16s17s18s19", - "s1", "s2", null, "s4", "s5", "s6", null, "s8", "s9", "s10", - "s11", "s12", "s13", "s14", "s15", "s16", "s17", "s18", "s19"); - helper("s1s2s4s5s6s8s9s10s11s12s13s14s15s16s17s18s19s20s21", - "s1", "s2", null, "s4", "s5", "s6", null, "s8", "s9", "s10", - "s11", "s12", "s13", "s14", "s15", "s16", "s17", "s18", "s19", "s20", "s21"); - helper("", null, null, null, null, null, null, null, null, null, null); - helper("", null, null, null, null, null, null, null, null, null, null, null, null, null); + concatVarcharHelper("s1s2", "s1", "s2"); + concatVarcharHelper("s1", "s1", null); + concatVarcharHelper("s1s2s3s4", "s1", "s2", "s3", "s4"); + concatVarcharHelper("s1s2s3s4s5s6s7s8s9s10", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "s8", "s9", "s10"); + concatVarcharHelper("s1s2s4s5s6s8s9s10", "s1", "s2", null, "s4", "s5", "s6", null, "s8", "s9", "s10"); + concatVarcharHelper("s1s2s4s5s6s8s9s10s11", "s1", "s2", null, "s4", "s5", "s6", null, "s8", "s9", "s10", "s11"); + concatVarcharHelper("s1s2s4s5s6s8s9s10s11s12s13s14s15s16s17s18s19", + "s1", "s2", null, "s4", "s5", "s6", null, "s8", "s9", "s10", + "s11", "s12", "s13", "s14", "s15", "s16", "s17", "s18", "s19"); + concatVarcharHelper("s1s2s4s5s6s8s9s10s11s12s13s14s15s16s17s18s19s20s21", + "s1", "s2", null, "s4", "s5", "s6", null, "s8", "s9", "s10", + "s11", "s12", "s13", "s14", "s15", "s16", "s17", "s18", "s19", "s20", "s21"); + concatVarcharHelper("", null, null, null, null, null, null, null, null, null, null); + concatVarcharHelper("", null, null, null, null, null, null, null, null, null, null, null, null, null); + } + + @Test + public void testConcatWithInts() throws Exception { + concatHelper("-1000000000", + "-1000000000"); + concatHelper("hello-1000000000world", + "'hello'", "-1000000000", "'world'"); + } + + @Test + public void testConcatWithDate() throws Exception { + concatHelper("Date: 1995-02-20", + "'Date: '", "CAST('1995-02-20' AS DATE)"); + } + + @Test + public void testConcatWithDecimal() throws Exception { + concatHelper("Decimal: 123456.789", + "'Decimal: '", "CAST(123456.789 AS DECIMAL(9,3))"); + } + + @Test + public void testConcatWithDouble() throws Exception { + concatHelper("Double: 1.1234567890123457", + "'Double: '", "CAST(1.1234567890123457 AS DOUBLE)"); + concatHelper("Double: {1.1234567890123457}", + "'Double: {'", "CAST(1.1234567890123457 AS DOUBLE)", "'}'"); + } + + /** + * Test case using the parquet/alltypes.json data set to give a little more variation in our testing. + * We do not query the json data directly, because in that path we do not enforce the result of our type inference, + * and instead just default to a max-width VARCHAR. + * + * We instead read the test data, and generate queries that concatenate the literals. For the baseline query we + * cast to varchar, and for the test query we concatenate using the proper type. + * @throws Exception + */ + @Test + public void testConcatAllTypes() throws Exception { + + // These two arrays need to be kept aligned with each other, and must match the column in alltypes.json. + final String[] colNames = {"timestamp_col", "intervalday_col", "time_col", "bigint_col", "varbinary_col", + "int_col", "bit_col", "float8_col", "date_col", "float4_col", "varchar_col"}; + final String[] typeNames = {"TIMESTAMP", "INTERVAL DAY", "TIME", "BIGINT", "VARBINARY", + "INT", "BOOLEAN", "DOUBLE", "DATE", "FLOAT", "VARCHAR"}; + + // First, read the test data. + List allTypesStringData = + testSqlWithResults(String.format("SELECT %s FROM cp.\"/parquet/alltypes.json\"", String.join(", ", colNames))); + RecordBatchLoader loader = new RecordBatchLoader(getAllocator()); + for(QueryDataBatch batch : allTypesStringData) { + if(batch.getData() != null) { + loader.load(batch.getHeader().getDef(), batch.getData()); + for (int i = 0; i < loader.getRecordCount(); ++i) { + // For each row in the file, we generate a query that will return a single row with all the columns in + // alltypes.json. + int colIdx = 0; + StringBuilder baselineConcatQuery = new StringBuilder("SELECT "); + StringBuilder testConcatQuery = new StringBuilder("SELECT "); + for (VectorWrapper w : loader) { + Object value = DremioGetObject.getObject(w.getValueVector(), i); + String valueAsStringLiteral = value != null ? "'" + value.toString() + "'" : "null"; + + if (colIdx > 0) { + baselineConcatQuery.append(','); + testConcatQuery.append(','); + } + + baselineConcatQuery + .append(String.format("CONCAT('{', CAST(CAST(%s AS %s) AS VARCHAR), '}') %s", + valueAsStringLiteral, + typeNames[colIdx], + colNames[colIdx])); + testConcatQuery + .append(String.format("CONCAT('{', CAST(%s AS %s), '}') %s", + valueAsStringLiteral, + typeNames[colIdx], + colNames[colIdx])); + ++colIdx; + } + + testBuilder() + .sqlQuery(testConcatQuery.toString()) + .unOrdered() + .sqlBaselineQuery(baselineConcatQuery.toString()) + .build().run(); + } + } + loader.clear(); + batch.release(); + } + } + + /** + * Execute a SELECT CONCAT query using the passed in arguments and compare it to the expected string. + * @param expected The expected result of the query. + * @param args A list of strings that can be used directly in a CONCAT call. Must be a valid SQL literal/expression. + * @throws Exception + */ + private void concatHelper(String expected, String... args) throws Exception { + String query = "SELECT concat("+ String.join(", ", args) +") c1"; + testBuilder() + .sqlQuery(query) + .unOrdered() + .baselineColumns("c1") + .baselineValues(expected) + .go(); } - private void helper(String expected, String... args) throws Exception { + /** + * Execute a SELECT CONCAT query using the passed in arguments as string literals and compare it to the expected + * string. + * @param expected The expected result of the query. + * @param args A list of strings that will be represented as string literals in the CONCAT query. Nulls allowed. + * @throws Exception + */ + private void concatVarcharHelper(String expected, String... args) throws Exception { final StringBuilder queryBuilder = new StringBuilder(); queryBuilder.append("SELECT concat("); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/expr/fn/impl/TestStringFunctions.java b/sabot/kernel/src/test/java/com/dremio/exec/expr/fn/impl/TestStringFunctions.java index 9bab9696e3..e2430959d4 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/expr/fn/impl/TestStringFunctions.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/expr/fn/impl/TestStringFunctions.java @@ -21,7 +21,7 @@ import java.io.File; import java.io.PrintWriter; -import org.junit.ClassRule; +import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -31,8 +31,8 @@ public class TestStringFunctions extends BaseTestQuery { - @ClassRule - public static final TemporaryFolder tempDir = new TemporaryFolder(); + @Rule + public final TemporaryFolder tempDir = new TemporaryFolder(); @Test public void testStrPosMultiByte() throws Exception { @@ -573,96 +573,96 @@ public void testSubstr() throws Exception { @Test public void testLpadTwoArgConvergeToLpad() throws Exception { - final String query_1 = "SELECT lpad(r_name, 25) \n" + + final String query1 = "SELECT lpad(r_name, 25) \n" + "FROM cp.\"tpch/region.parquet\""; - final String query_2 = "SELECT lpad(r_name, 25, ' ') \n" + + final String query2 = "SELECT lpad(r_name, 25, ' ') \n" + "FROM cp.\"tpch/region.parquet\""; testBuilder() - .sqlQuery(query_1) + .sqlQuery(query1) .unOrdered() - .sqlBaselineQuery(query_2) + .sqlBaselineQuery(query2) .build() .run(); } @Test public void testRpadTwoArgConvergeToRpad() throws Exception { - final String query_1 = "SELECT rpad(r_name, 25) \n" + + final String query1 = "SELECT rpad(r_name, 25) \n" + "FROM cp.\"tpch/region.parquet\""; - final String query_2 = "SELECT rpad(r_name, 25, ' ') \n" + + final String query2 = "SELECT rpad(r_name, 25, ' ') \n" + "FROM cp.\"tpch/region.parquet\""; testBuilder() - .sqlQuery(query_1) + .sqlQuery(query1) .unOrdered() - .sqlBaselineQuery(query_2) + .sqlBaselineQuery(query2) .build() .run(); } @Test public void testLtrimOneArgConvergeToLtrim() throws Exception { - final String query_1 = "SELECT ltrim(concat(' ', r_name, ' ')) \n" + + final String query1 = "SELECT ltrim(concat(' ', r_name, ' ')) \n" + "FROM cp.\"tpch/region.parquet\""; - final String query_2 = "SELECT ltrim(concat(' ', r_name, ' '), ' ') \n" + + final String query2 = "SELECT ltrim(concat(' ', r_name, ' '), ' ') \n" + "FROM cp.\"tpch/region.parquet\""; testBuilder() - .sqlQuery(query_1) + .sqlQuery(query1) .unOrdered() - .sqlBaselineQuery(query_2) + .sqlBaselineQuery(query2) .build() .run(); } @Test public void testRtrimOneArgConvergeToRtrim() throws Exception { - final String query_1 = "SELECT rtrim(concat(' ', r_name, ' ')) \n" + + final String query1 = "SELECT rtrim(concat(' ', r_name, ' ')) \n" + "FROM cp.\"tpch/region.parquet\""; - final String query_2 = "SELECT rtrim(concat(' ', r_name, ' '), ' ') \n" + + final String query2 = "SELECT rtrim(concat(' ', r_name, ' '), ' ') \n" + "FROM cp.\"tpch/region.parquet\""; testBuilder() - .sqlQuery(query_1) + .sqlQuery(query1) .unOrdered() - .sqlBaselineQuery(query_2) + .sqlBaselineQuery(query2) .build() .run(); } @Test public void testBtrimOneArgConvergeToBtrim() throws Exception { - final String query_1 = "SELECT btrim(concat(' ', r_name, ' ')) \n" + + final String query1 = "SELECT btrim(concat(' ', r_name, ' ')) \n" + "FROM cp.\"tpch/region.parquet\""; - final String query_2 = "SELECT btrim(concat(' ', r_name, ' '), ' ') \n" + + final String query2 = "SELECT btrim(concat(' ', r_name, ' '), ' ') \n" + "FROM cp.\"tpch/region.parquet\""; testBuilder() - .sqlQuery(query_1) + .sqlQuery(query1) .unOrdered() - .sqlBaselineQuery(query_2) + .sqlBaselineQuery(query2) .build() .run(); } @Test public void testInitCap() throws Exception { - final String query_1 = "SELECT x, initcap(x) as y FROM (VALUES ('abc'), ('ABC'), ('12ABC')) as t1(x)"; + final String query1 = "SELECT x, initcap(x) as y FROM (VALUES ('abc'), ('ABC'), ('12ABC')) as t1(x)"; final String expected = "SELECT x, y FROM (VALUES ('abc', 'Abc'), ('ABC', 'Abc'), ('12ABC', '12abc')) as t1(x, y)"; testBuilder() - .sqlQuery(query_1) + .sqlQuery(query1) .unOrdered() .sqlBaselineQuery(expected) .build() @@ -679,4 +679,30 @@ public void testReverse() throws Exception { .baselineValues("Sheri Nowmer", " ireh") .go(); } + + @Test + public void testRegexpColLike() throws Exception { + final String query = "select *, regexp_col_like(columns[0], columns[1]) as col_matches " + + "from cp.\"csv/regexp_col_like_test.csv\" " + + "where columns[2] != col_matches"; + + testBuilder() + .unOrdered() + .sqlQuery(query) + .expectsEmptyResultSet() + .go(); + } + + @Test + public void testRegexpColMatches() throws Exception { + final String query = "select *, regexp_col_matches(columns[0], columns[1]) as col_matches " + + "from cp.\"csv/regexp_col_like_test.csv\" " + + "where columns[2] != col_matches"; + + testBuilder() + .unOrdered() + .sqlQuery(query) + .expectsEmptyResultSet() + .go(); + } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/fn/impl/TestMathFunctions.java b/sabot/kernel/src/test/java/com/dremio/exec/fn/impl/TestMathFunctions.java index d72317d3b7..0d6ad6c197 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/fn/impl/TestMathFunctions.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/fn/impl/TestMathFunctions.java @@ -49,7 +49,8 @@ public void testJavaFloatingPointDivide() throws Exception { Assert.fail(); } catch (Exception exception) { Assert.assertEquals("divide by zero", - exception.getCause().getCause().getMessage()); } + exception.getCause().getCause().getMessage()); + } try { testFunctions(new Object[][]{ {"c0/c1", 1.1d, 0.0d, Double.NaN} diff --git a/sabot/kernel/src/test/java/com/dremio/exec/fn/impl/testing/GeneratorFunctions.java b/sabot/kernel/src/test/java/com/dremio/exec/fn/impl/testing/GeneratorFunctions.java index 70a3767dad..081342e83f 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/fn/impl/testing/GeneratorFunctions.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/fn/impl/testing/GeneratorFunctions.java @@ -39,10 +39,12 @@ public static class IncreasingBigInt implements SimpleFunction { @Workspace long current; @Output NullableBigIntHolder out; + @Override public void setup() { current = 0; } + @Override public void eval() { out.isSet = 1; out.value = start.value + current++; @@ -55,9 +57,11 @@ public static class RandomBigIntGauss implements SimpleFunction { @Param BigIntHolder range; @Output NullableBigIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; out.value = (long)(com.dremio.exec.fn.impl.testing.GeneratorFunctions.random.nextGaussian() * range.value); @@ -71,9 +75,11 @@ public static class RandomBigInt implements SimpleFunction { @Param BigIntHolder max; @Output NullableBigIntHolder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; out.value = (long)(com.dremio.exec.fn.impl.testing.GeneratorFunctions.random.nextFloat() * (max.value - min.value) + min.value); @@ -86,9 +92,11 @@ public static class RandomFloat8Gauss implements SimpleFunction { @Param BigIntHolder range; @Output NullableFloat8Holder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; out.value = com.dremio.exec.fn.impl.testing.GeneratorFunctions.random.nextGaussian() * range.value; @@ -102,9 +110,11 @@ public static class RandomFloat8 implements SimpleFunction { @Param BigIntHolder max; @Output NullableFloat8Holder out; + @Override public void setup() { } + @Override public void eval() { out.isSet = 1; out.value = com.dremio.exec.fn.impl.testing.GeneratorFunctions.random.nextFloat() * (max.value - min.value) + min.value; diff --git a/sabot/kernel/src/test/java/com/dremio/exec/fn/impl/testing/TestCardinality.java b/sabot/kernel/src/test/java/com/dremio/exec/fn/impl/testing/TestCardinality.java new file mode 100644 index 0000000000..69769267c1 --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/exec/fn/impl/testing/TestCardinality.java @@ -0,0 +1,210 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dremio.exec.fn.impl.testing; + +import org.junit.Assert; +import org.junit.Test; + +import com.dremio.BaseTestQuery; + + +public class TestCardinality extends BaseTestQuery { + @Test + public void testNonNullArray() throws Exception { + //[1,2,3,4,5] + testBuilder() + .unOrdered() + .sqlQuery("SELECT CARDINALITY(int_array) c1 FROM cp.\"null_str_int_array_map.parquet\" WHERE case_id = 1") + .baselineColumns("c1") + .baselineValues(5) + .go(); + //[a,b,c,d,e] + testBuilder() + .unOrdered() + .sqlQuery("SELECT CARDINALITY(str_array) c1 FROM cp.\"null_str_int_array_map.parquet\" WHERE case_id = 1") + .baselineColumns("c1") + .baselineValues(5) + .go(); + } + + @Test + public void testMixedArray() throws Exception { + // [1,2,null,3,null,5] + testBuilder() + .unOrdered() + .sqlQuery("SELECT CARDINALITY(int_array) c1 FROM cp.\"null_str_int_array_map.parquet\" WHERE case_id = 2") + .baselineColumns("c1") + .baselineValues(6) + .go(); + //[a, null, c, null, e] + testBuilder() + .unOrdered() + .sqlQuery("SELECT CARDINALITY(str_array) c1 FROM cp.\"null_str_int_array_map.parquet\" WHERE case_id = 2") + .baselineColumns("c1") + .baselineValues(5) + .go(); + } + + @Test + public void testNullElementsArray() throws Exception { + // [null,null,null,null,null,null] in LIST field + testBuilder() + .unOrdered() + .sqlQuery("SELECT CARDINALITY(int_array) c1 FROM cp.\"null_str_int_array_map.parquet\" WHERE case_id = 3") + .baselineColumns("c1") + .baselineValues(6) + .go(); + // [null,null,null,null,null] in LIST field + testBuilder() + .unOrdered() + .sqlQuery("SELECT CARDINALITY(str_array) c1 FROM cp.\"null_str_int_array_map.parquet\" WHERE case_id = 3") + .baselineColumns("c1") + .baselineValues(5) + .go(); + } + + @Test + public void testNullArray() throws Exception { + // null value in LIST + testBuilder() + .unOrdered() + .sqlQuery("SELECT CARDINALITY(int_array) c1 FROM cp.\"null_str_int_array_map.parquet\" WHERE case_id = 4") + .baselineColumns("c1") + .baselineValues(null) + .go(); + // null value in LIST + testBuilder() + .unOrdered() + .sqlQuery("SELECT CARDINALITY(str_array) c1 FROM cp.\"null_str_int_array_map.parquet\" WHERE case_id = 4") + .baselineColumns("c1") + .baselineValues(null) + .go(); + } + + @Test + public void testMapCardinality() throws Exception { + // {1:'a', 2:'b'} + testBuilder() + .unOrdered() + .sqlQuery("SELECT CARDINALITY(mapdata) c1 FROM cp.\"null_str_int_array_map.parquet\" WHERE case_id = 1") + .baselineColumns("c1") + .baselineValues(2) + .go(); + } + + @Test + public void testMapCardinalityWithNullValues() throws Exception { + // {1:null, 2:'b'} + testBuilder() + .unOrdered() + .sqlQuery("SELECT CARDINALITY(mapdata) c1 FROM cp.\"null_str_int_array_map.parquet\" WHERE case_id = 2") + .baselineColumns("c1") + .baselineValues(2) + .go(); + // {1:null, 2:null} + testBuilder() + .unOrdered() + .sqlQuery("SELECT CARDINALITY(mapdata) c1 FROM cp.\"null_str_int_array_map.parquet\" WHERE case_id = 3") + .baselineColumns("c1") + .baselineValues(2) + .go(); + } + + @Test + public void testNullMap() throws Exception { + // null value in MAP + testBuilder() + .unOrdered() + .sqlQuery("SELECT CARDINALITY(mapdata) c1 FROM cp.\"null_str_int_array_map.parquet\" WHERE case_id = 4") + .baselineColumns("c1") + .baselineValues(null) + .go(); + } + + @Test + public void testCardinalityNestedArray() throws Exception { + testBuilder() + .unOrdered() + .sqlQuery("SELECT CARDINALITY(CONVERT_FROM('[[1,2], [3,4], {5:\"a\",6:\"b\"}]', 'json')) c1") + .baselineColumns("c1") + .baselineValues(3) + .go(); + } + + @Test + public void testCardinalityStringArray() throws Exception { + testBuilder() + .unOrdered() + .sqlQuery("SELECT CARDINALITY(CONVERT_FROM('[\"a\", \"b\", \"c\", \"d\"]', 'json')) c1") + .baselineColumns("c1") + .baselineValues(4) + .go(); + } + + @Test + public void testCardinalityWithNullElements() throws Exception { + testBuilder() + .unOrdered() + .optionSettingQueriesForTestQuery("alter session set \"store.json.all_text_mode\"= true") + .sqlQuery("SELECT CARDINALITY(CONVERT_FROM('[\"a\", null, \"b\"]', 'json')) c1") + .baselineColumns("c1") + .baselineValues(3) + .go(); + } + + @Test + public void testIncompatibleTypes() { + Exception ex = Assert.assertThrows(Exception.class, + () -> testBuilder() + .unOrdered() + .sqlQuery("SELECT CARDINALITY('1') c1") + .baselineColumns("c1") + .baselineValues(1) + .go() + ); + Assert.assertTrue(ex.getMessage().contains( + "VALIDATION ERROR: Cannot apply 'CARDINALITY' to arguments of type 'CARDINALITY()'")); + } + + @Test + public void testStructInput() { + // STRUCT{1:null, 2:'b'} + Exception ex = Assert.assertThrows(Exception.class, + () -> testBuilder() + .unOrdered() + .sqlQuery("SELECT CARDINALITY(struct_data) c1 FROM cp.\"null_str_int_array_map.parquet\" WHERE case_id = 1") + .baselineColumns("c1") + .baselineValues(1) + .go() + ); + Assert.assertTrue(ex.getMessage().contains( + "VALIDATION ERROR: Cannot apply 'CARDINALITY' to arguments of type 'CARDINALITY()'")); + } + + @Test + public void testRuntimeTypeChecking() { + Exception ex = Assert.assertThrows(Exception.class, + () -> testBuilder() + .unOrdered() + .sqlQuery("SELECT CARDINALITY(CONVERT_FROM('{\"name\":\"Gnarly\", \"age\":7}', 'json'))") + .baselineColumns("c1") + .baselineValues(1) + .go() + ); + Assert.assertTrue(ex.getMessage().contains("Cannot apply 'CARDINALITY' to arguments of type 'CARDINALITY()'")); + } +} diff --git a/sabot/kernel/src/test/java/com/dremio/exec/fn/interp/TestConstantFolding.java b/sabot/kernel/src/test/java/com/dremio/exec/fn/interp/TestConstantFolding.java index 6c519ecd86..bf25c601df 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/fn/interp/TestConstantFolding.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/fn/interp/TestConstantFolding.java @@ -58,9 +58,9 @@ public SmallFileCreator setRecord(String record) { public void createFiles(int smallFileLines, int bigFileLines, String extension, String delimiter) throws Exception{ if (record == null) { - if (extension.equals("csv") || extension.equals("tsv")) { + if ("csv".equals(extension) || "tsv".equals(extension)) { record = Joiner.on(delimiter).join(values); - } else if (extension.equals("json") ){ + } else if ("json".equals(extension)) { record = jsonRecord; } else { throw new UnsupportedOperationException( @@ -92,11 +92,11 @@ record = jsonRecord; public void createFiles(int smallFileLines, int bigFileLines, String extension) throws Exception{ String delimiter; - if (extension.equals("json")) { + if ("json".equals(extension)) { delimiter = null; - } else if (extension.equals("csv")) { + } else if ("csv".equals(extension)) { delimiter = ","; - } else if (extension.equals("tsv")) { + } else if ("tsv".equals(extension)) { delimiter = "\t"; } else { throw new UnsupportedOperationException("Extension not recognized, please explicitly provide a delimiter."); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/maestro/TestQueuePosition.java b/sabot/kernel/src/test/java/com/dremio/exec/maestro/TestQueuePosition.java index ea647eb4ee..084b73fcf9 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/maestro/TestQueuePosition.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/maestro/TestQueuePosition.java @@ -79,7 +79,8 @@ public void testQueue() throws Exception { }).when(observer).resourcesScheduled(Mockito.any()); BasicResourceAllocator ra = new BasicResourceAllocator(() -> clusterCoordinator, null); - ResourceTracker resourceTracker = new ResourceTracker(plan, context, ra, observer); + ResourceTracker resourceTracker = new ResourceTracker(context, ra); + resourceTracker.allocate(plan, observer); } /** diff --git a/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/CaseExpressionSplitterNestedTest.java b/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/CaseExpressionSplitterNestedTest.java index fc72fe1065..10d274e40d 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/CaseExpressionSplitterNestedTest.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/CaseExpressionSplitterNestedTest.java @@ -87,7 +87,7 @@ public class CaseExpressionSplitterNestedTest extends BaseExpressionSplitterTest tr(100, 300, 250, 99, 200) // 500, 1, 500 ); - Fixtures.Table nestedCaseOutput = t( + private static final Fixtures.Table nestedCaseOutput = t( th("out"), tr(80), tr(26), @@ -100,7 +100,7 @@ public class CaseExpressionSplitterNestedTest extends BaseExpressionSplitterTest tr(500) ); - Fixtures.Table nestedCaseSimpleOutput = t( + private static final Fixtures.Table nestedCaseSimpleOutput = t( th("out"), tr(-10), tr(13), @@ -113,7 +113,7 @@ public class CaseExpressionSplitterNestedTest extends BaseExpressionSplitterTest tr(1) ); - Fixtures.Table nestedCaseWithConstantsOutput = t( + private static final Fixtures.Table nestedCaseWithConstantsOutput = t( th("out"), tr(80), tr(12), @@ -145,6 +145,7 @@ public void testNestedNoSplitsGandiva() throws Exception { splitAndVerifyCase(nestedCaseQuery, nestedCaseInput, nestedCaseOutput, expSplits, annotator); } + @SuppressWarnings("checkstyle:LocalFinalVariableName") @Test public void testNestedCaseQuerySimple() throws Exception { final String _xxx0g = "(case when (greater_than_or_equal_to(c0, c1)) then (0i) else (1i) end)"; @@ -177,6 +178,7 @@ public void testNestedCaseQuerySimple() throws Exception { splitAndVerifyCase(nestedCaseQuerySimple, nestedCaseInput, nestedCaseSimpleOutput, expSplits, annotator); } + @SuppressWarnings("checkstyle:LocalFinalVariableName") @Test public void testNestedMixedSplits() throws Exception { final String _xxx0g = "(case when (greater_than_or_equal_to(c0, c1)) then (0i) else (-1i) end)"; diff --git a/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/TestConvertFunctions.java b/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/TestConvertFunctions.java index 9b7204bda8..40a2eb45c5 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/TestConvertFunctions.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/TestConvertFunctions.java @@ -332,7 +332,9 @@ public void testFixedInts4SQL_to() throws Throwable { @Test public void testHadooopVInt() throws Exception { + @SuppressWarnings("checkstyle:LocalFinalVariableName") final int _0 = 0; + @SuppressWarnings("checkstyle:LocalFinalVariableName") final int _9 = 9; final ArrowBuf buffer = getAllocator().buffer(_9); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/TestGeoHash.java b/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/TestGeoHash.java new file mode 100644 index 0000000000..bdd3f07f4a --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/TestGeoHash.java @@ -0,0 +1,181 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.physical.impl; + +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import java.util.ArrayList; +import java.util.Optional; + +import org.junit.Before; +import org.junit.Test; + +import com.dremio.BaseTestQuery; +import com.dremio.common.exceptions.UserRemoteException; + +/** + * Test for GeoHash Decode and Encode. + */ +public class TestGeoHash extends BaseTestQuery { + class TestInput { + String hash; + double lat; + double lon; + Optional precision; + TestInput(String newHash, double newLat, double newLon) { + hash = newHash; + lon = newLon; + lat = newLat; + precision = Optional.empty(); + } + TestInput(String newHash, double newLat, double newLon, long p) { + hash = newHash; + lon = newLon; + lat = newLat; + precision = Optional.of(p); + } + } + final ArrayList testData = new ArrayList<>(); + final ArrayList testErrorData = new ArrayList<>(); + final ArrayList testDataDecodeOnly = new ArrayList<>(); + final ArrayList testLonRangeErrorData = new ArrayList<>(); + final ArrayList testLatRangeErrorData = new ArrayList<>(); + @Before + public void setupTestData() { + testData.add(new TestInput("ezs427zzzzzz7zzzzzzz", 42.60498038493089, -5.603027511388223)); + testData.add(new TestInput("ezs42", 42.60498046875, -5.60302734375, 5)); + testData.add(new TestInput("e", 22.5, -22.5, 1)); + testData.add(new TestInput("ezs427zzzzzz", 42.60498038493097, -5.603027511388063, 12)); + testData.add(new TestInput("ezs42ebpbpbj1n92syp0", 42.60498, -5.6030273)); + testData.add(new TestInput("u4pruydqqvj8pr9yc27r", 57.64911, 10.40744)); + testData.add(new TestInput("zzzzzzzzzzzzzzzzzzzz", 90.0, 180.0)); + testData.add(new TestInput("00000000000000000000", -90.0, -180.0)); + + + testDataDecodeOnly.add(new TestInput("", 0.0, 0.0)); + testDataDecodeOnly.add(new TestInput("aa", 0.0, 0.0)); + testDataDecodeOnly.add(new TestInput("eae", 0.0, 0.0)); + + testErrorData.add(new TestInput("???", 57.64911, 10.40744, -12)); + testErrorData.add(new TestInput("???", 57.64911, 10.40744, 0)); + testErrorData.add(new TestInput("???", 57.64911, 10.40744, 21)); + + testLonRangeErrorData.add(new TestInput("???", 57.64911, 910.40744, 12)); + testLonRangeErrorData.add(new TestInput("???", 57.64911, -710.40744)); + testLonRangeErrorData.add(new TestInput("???", 57.64911, 180.40744)); + + testLatRangeErrorData.add(new TestInput("???", 97.64911, 10.40744, 12)); + testLatRangeErrorData.add(new TestInput("???", -157.64911, -10.40744)); + testLatRangeErrorData.add(new TestInput("???", 90.01, 80.40744)); + + + + } + private void testDecode(TestInput test) throws Exception { + { + final String queryKey = "select st_fromgeohash('" + test.hash + "')['Latitude'] as Latitude"; + + testBuilder() + .sqlQuery(queryKey) + .unOrdered() + .baselineColumns("Latitude") + .baselineValues(test.lat) + .go(); + } + { + final String queryKey = "select st_fromgeohash('" + test.hash + "')['Longitude'] as Longitude"; + + testBuilder() + .sqlQuery(queryKey) + .unOrdered() + .baselineColumns("Longitude") + .baselineValues(test.lon) + .go(); + } + } + @Test + public void hashDecode() throws Exception { + for (TestInput test : testData) { + testDecode(test); + } + } + @Test + public void hashEncode() throws Exception { + for (TestInput test : testData) { + String precisionOption = ""; + if (test.precision.isPresent()) { + precisionOption = ", " + test.precision.get(); + } + final String queryKey = "select st_geohash(" + test.lat + "," + test.lon + precisionOption + ") as hash"; + + testBuilder() + .sqlQuery(queryKey) + .unOrdered() + .baselineColumns("hash") + .baselineValues(test.hash) + .go(); + } + } + @Test + public void hashEncodeErrors() throws Exception { + for (TestInput test : testErrorData) { + String precisionOption = ""; + if (test.precision.isPresent()) { + precisionOption = ", " + test.precision.get(); + } + final String queryKey = "select st_geohash(" + test.lat + "," + test.lon + precisionOption + ") as hash"; + + assertThatThrownBy(() -> test(queryKey)) + .isInstanceOf(UserRemoteException.class) + .hasMessageContaining("precision must be between 1 and 20"); + } + } + @Test + public void hashEncodeLonRange() throws Exception { + for (TestInput test : testLonRangeErrorData) { + String precisionOption = ""; + if (test.precision.isPresent()) { + precisionOption = ", " + test.precision.get(); + } + final String queryKey = "select st_geohash(" + test.lat + "," + test.lon + precisionOption + ") as hash"; + + assertThatThrownBy(() -> test(queryKey)) + .isInstanceOf(UserRemoteException.class) + .hasMessageContaining("longitude must be between –180° and +180°"); + } + } + @Test + public void hashEncodeLatRange() throws Exception { + for (TestInput test : testLatRangeErrorData) { + String precisionOption = ""; + if (test.precision.isPresent()) { + precisionOption = ", " + test.precision.get(); + } + final String queryKey = "select st_geohash(" + test.lat + "," + test.lon + precisionOption + ") as hash"; + + assertThatThrownBy(() -> test(queryKey)) + .isInstanceOf(UserRemoteException.class) + .hasMessageContaining("latitude must be between –90° and +90°"); + } + } + @Test + public void hashBadHash() throws Exception { + for (TestInput test : testDataDecodeOnly) { + assertThatThrownBy(() -> testDecode(test)) + .hasMessageContaining("geohash must be a valid, base32-encoded geohash"); + } + } +} diff --git a/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/flatten/TestFlatten.java b/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/flatten/TestFlatten.java index 77afc070ef..a4f18c45d3 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/flatten/TestFlatten.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/flatten/TestFlatten.java @@ -242,7 +242,7 @@ public void testFlatten_Drill2162_simple() throws Exception { @Test public void drill1671() throws Exception{ String query = "select * from (select count(*) as cnt from (select id, flatten(evnts1), flatten(evnts2), flatten(evnts3), flatten(evnts4), flatten(evnts5), flatten(evnts6), flatten(evnts7), flatten(evnts8), flatten(evnts9), flatten(evnts10), flatten(evnts11) from cp.\"/flatten/many-arrays-50.json\")x )y where cnt = 2048"; - testPlanSubstrPatterns(query, new String[] {"columns=[`id`, `evnts1`, `evnts2`, `evnts3`, `evnts4`, `evnts5`, `evnts6`, `evnts7`, `evnts8`, `evnts9`, `evnts10`, `evnts11`]"}, null); + testPlanSubstrPatterns(query, new String[] {"columns=[`evnts1`, `evnts2`, `evnts3`, `evnts4`, `evnts5`, `evnts6`, `evnts7`, `evnts8`, `evnts9`, `evnts10`, `evnts11`]"}, null); int rowCount = testSql(query); assertEquals(1, rowCount); } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/flatten/TestFlattenPlanning.java b/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/flatten/TestFlattenPlanning.java index 263b1f3fbb..7ea0301269 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/flatten/TestFlattenPlanning.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/flatten/TestFlattenPlanning.java @@ -92,7 +92,7 @@ public void dx8383_flatten_lost() throws Exception { final String onvds = "SELECT str_list_col, flatten(str_list_list_col[0]) AS A\n" + "FROM dfs_test.flatten1"; - PlanTestBase.testPlanMatchingPatterns(onvds, new String[]{"(?s)Flatten\\(flattenField=\\[\\$1\\]\\).*Flatten\\(flattenField=\\[\\$0\\]\\)"}, new String[]{}); + PlanTestBase.testPlanMatchingPatterns(onvds, new String[]{"(?s)Flatten\\(flattenField=\\[\\$0\\]\\).*Flatten\\(flattenField=\\[\\$0\\]\\)"}, new String[]{}); } finally { properties.clear(DremioConfig.LEGACY_STORE_VIEWS_ENABLED); } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/join/TestMergeJoinAdvanced.java b/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/join/TestMergeJoinAdvanced.java index 0e6784a9a1..8ee768cf7c 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/join/TestMergeJoinAdvanced.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/join/TestMergeJoinAdvanced.java @@ -102,11 +102,11 @@ public void testFix2967() throws Exception { setSessionOption(ExecConstants.SLICE_TARGET, "1"); setSessionOption(GroupResourceInformation.MAX_WIDTH_PER_NODE_KEY, "23"); - final String TEST_RES_PATH = TestTools.getWorkingPath() + "/src/test/resources"; + final String testResPath = TestTools.getWorkingPath() + "/src/test/resources"; try { test("select * from dfs.\"%s/join/j1\" j1 left outer join dfs.\"%s/join/j2\" j2 on (j1.c_varchar = j2.c_varchar)", - TEST_RES_PATH, TEST_RES_PATH); + testResPath, testResPath); } finally { setSessionOption(PlannerSettings.BROADCAST.getOptionName(), String.valueOf(PlannerSettings.BROADCAST.getDefault ().getBoolVal())); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/orderedpartitioner/TestOrderedPartitionExchange.java b/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/orderedpartitioner/TestOrderedPartitionExchange.java index f6571340cc..7df30a6fbc 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/orderedpartitioner/TestOrderedPartitionExchange.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/orderedpartitioner/TestOrderedPartitionExchange.java @@ -22,8 +22,8 @@ import org.apache.arrow.vector.BigIntVector; import org.apache.arrow.vector.Float8Vector; import org.apache.arrow.vector.IntVector; -import org.apache.commons.math.stat.descriptive.moment.Mean; -import org.apache.commons.math.stat.descriptive.moment.StandardDeviation; +import org.apache.commons.math3.stat.descriptive.moment.Mean; +import org.apache.commons.math3.stat.descriptive.moment.StandardDeviation; import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; diff --git a/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/writer/TestParquetTimestampInt96.java b/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/writer/TestParquetTimestampInt96.java index 378159c8a8..efa9719e05 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/writer/TestParquetTimestampInt96.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/writer/TestParquetTimestampInt96.java @@ -19,9 +19,7 @@ import org.apache.hadoop.fs.FileSystem; import org.junit.AfterClass; import org.junit.BeforeClass; -import org.junit.Rule; import org.junit.Test; -import org.junit.rules.TemporaryFolder; import com.dremio.BaseTestQuery; import com.dremio.common.types.TypeProtos; @@ -32,9 +30,7 @@ public class TestParquetTimestampInt96 extends BaseTestQuery { - @Rule - public TemporaryFolder folder = new TemporaryFolder(); - static FileSystem fs; + private static FileSystem fs; @BeforeClass public static void initFs() throws Exception { @@ -86,10 +82,9 @@ public void testImpalaParquetInt96() throws Exception { */ @Test public void testImpalaParquetBinaryAsTimeStamp_DictChange() throws Exception { - final String WORKING_PATH = TestTools.getWorkingPath(); - final String TEST_RES_PATH = WORKING_PATH + "/src/test/resources"; + final String testResPath = TestTools.getWorkingPath() + "/src/test/resources"; testBuilder() - .sqlQuery("select int96_ts from dfs.\"%s/parquet/int96_dict_change\" order by int96_ts", TEST_RES_PATH) + .sqlQuery("select int96_ts from dfs.\"%s/parquet/int96_dict_change\" order by int96_ts", testResPath) .ordered() .csvBaselineFile("testframework/testParquetReader/testInt96DictChange/q1.tsv") .baselineTypes(TypeProtos.MinorType.TIMESTAMP) diff --git a/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/writer/TestParquetWriter.java b/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/writer/TestParquetWriter.java index c3a01c7324..84d7a7dee4 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/writer/TestParquetWriter.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/physical/impl/writer/TestParquetWriter.java @@ -23,6 +23,7 @@ import java.io.File; import java.io.FileWriter; +import java.io.IOException; import java.math.BigDecimal; import java.util.ArrayList; import java.util.HashMap; @@ -32,7 +33,9 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.LocatedFileStatus; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.RemoteIterator; import org.apache.parquet.hadoop.ParquetFileReader; import org.apache.parquet.hadoop.metadata.ParquetMetadata; import org.apache.parquet.hadoop.util.PageHeaderUtil; @@ -92,9 +95,9 @@ public class TestParquetWriter extends BaseTestQuery { List allTypeSelectsAndCasts = new ArrayList<>(); for (String s : allTypes.keySet()) { // don't need to cast a varchar, just add the column reference - if (s.equals("varchar")) { + if ("varchar".equals(s)) { allTypeSelectsAndCasts.add(String.format("\"%s_col\"", allTypes.get(s))); - } else if (s.equals("varbinary")) { + } else if ("varbinary".equals(s)) { allTypeSelectsAndCasts.add(String.format("convert_to(\"%s_col\", 'UTF8') \"%s_col\"", s, allTypes.get(s))); } else { allTypeSelectsAndCasts.add(String.format("cast(\"%s_col\" AS %S) \"%s_col\"", allTypes.get(s), s, allTypes.get(s))); @@ -358,6 +361,44 @@ public void testTPCHReadWriteDictGzip() throws Exception { } } + @Test + public void testTPCHReadWriteDictZstd() throws Exception { + String outputTableMinLevel = "supplier_parquet_dict_zstd_minlevel"; + String outputTableMaxLevel = "supplier_parquet_dict_zstd_maxlevel"; + try { + test(String.format("ALTER SESSION SET \"%s\" = 'zstd'", ExecConstants.PARQUET_WRITER_COMPRESSION_TYPE)); + String inputTable = "cp.\"tpch/supplier.parquet\""; + + test(String.format("ALTER SESSION SET \"%s\" = %d", ExecConstants.PARQUET_WRITER_COMPRESSION_ZSTD_LEVEL, Integer.MIN_VALUE)); + runTestAndValidate("*", "*", inputTable, outputTableMinLevel, false, true); + + test(String.format("ALTER SESSION SET \"%s\" = %d", ExecConstants.PARQUET_WRITER_COMPRESSION_ZSTD_LEVEL, Integer.MAX_VALUE)); + runTestAndValidate("*", "*", inputTable, outputTableMaxLevel, false, true); + + // The only way to check if the level arrives to the compressors is to check the sizes of the generated files + long minLevelSize = calculateSize(new Path(getDfsTestTmpSchemaLocation(), outputTableMinLevel)); + long maxLevelSize = calculateSize(new Path(getDfsTestTmpSchemaLocation(), outputTableMaxLevel)); + assertTrue("The parquet files generated with minimum ZSTD compression level should be bigger than the" + + " ones generated with maximum compression level", minLevelSize > maxLevelSize); + } finally { + test(String.format("ALTER SESSION SET \"%s\" = '%s'", ExecConstants.PARQUET_WRITER_COMPRESSION_TYPE, + ExecConstants.PARQUET_WRITER_COMPRESSION_TYPE_VALIDATOR.getDefault().getStringVal())); + test(String.format("ALTER SESSION SET \"%s\" = %d", ExecConstants.PARQUET_WRITER_COMPRESSION_ZSTD_LEVEL, + ExecConstants.PARQUET_WRITER_COMPRESSION_ZSTD_LEVEL_VALIDATOR.getDefault().getNumVal())); + deleteTableIfExists(outputTableMinLevel); + deleteTableIfExists(outputTableMaxLevel); + } + } + + private long calculateSize(Path path) throws IOException { + long size = 0L; + RemoteIterator it = path.getFileSystem(new Configuration()).listFiles(path, true); + while (it.hasNext()) { + size += it.next().getLen(); + } + return size; + } + // working to create an exhaustive test of the format for this one. including all convertedTypes // will not be supporting interval for Beta as of current schedule // Types left out: @@ -689,6 +730,10 @@ private String select(String selection, String input, boolean sort) { } public void runTestAndValidate(String selection, String validationSelection, String inputTable, String outputFile, boolean sort) throws Exception { + runTestAndValidate(selection, validationSelection, inputTable, outputFile, sort, false); + } + + public void runTestAndValidate(String selection, String validationSelection, String inputTable, String outputFile, boolean sort, boolean keepOutput) throws Exception { try { deleteTableIfExists(outputFile); test("use dfs_test"); @@ -715,7 +760,9 @@ public void runTestAndValidate(String selection, String validationSelection, Str PageHeaderUtil.validatePageHeaders(file.getPath(), footer); } } finally { - deleteTableIfExists(outputFile); + if (!keepOutput) { + deleteTableIfExists(outputFile); + } } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/TestDml.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/TestDml.java index 4e3c0846e3..500f859556 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/TestDml.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/TestDml.java @@ -47,7 +47,7 @@ import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.type.SqlTypeName; -import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.junit.After; import org.junit.Before; @@ -393,7 +393,8 @@ public void testTableModifyScanCrelSubstitutionRewriterMixedScanCrels() throws E assertThat(scanCrel.isSubstitutable()).as("Target scanCrel should not be substitutable after rewrite").isFalse(); } else { assertThat(scanCrel.isSubstitutable()).as("Source scanCrel (different from target scanCrel) should still be substitutable after rewrite").isTrue(); - }}); + } + }); nation.close(); } @@ -465,10 +466,10 @@ public void testUpdate() throws Exception { @Test public void testMerge_updateOnly() throws Exception { - final String merge_updateOnly = "merge into " + table.getTableName() + " using " + table.getTableName() + final String mergeUpdateOnly = "merge into " + table.getTableName() + " using " + table.getTableName() + " as s on " + table.getTableName() + ".order_id = s.order_id when matched then update set order_id = -1"; - testMerge(merge_updateOnly, 1, MergeType.UPDATE_ONLY); + testMerge(mergeUpdateOnly, 1, MergeType.UPDATE_ONLY); } @Test @@ -481,10 +482,10 @@ public void testMerge_insertOnly() throws Exception { @Test public void testMerge_updateWithInsert() throws Exception { - final String merge_updateWithInsert = "merge into " + table.getTableName() + " using " + table.getTableName() + final String mergeUpdateWithInsert = "merge into " + table.getTableName() + " using " + table.getTableName() + " as s on " + table.getTableName() + ".order_id = s.order_id when matched then update set order_id = -1 WHEN NOT MATCHED THEN INSERT(order_id) VALUES(-3) "; - testMerge(merge_updateWithInsert, 1, MergeType.UPDATE_INSERT); + testMerge(mergeUpdateWithInsert, 1, MergeType.UPDATE_INSERT); } @Test @@ -557,7 +558,8 @@ private String getInsertPlan(String sql) throws Exception { private Prel getDmlPlan(Catalog catalog, SqlNode sqlNode) throws Exception { DmlHandler dmlHandler = getDmlHandler(sqlNode); - return dmlHandler.getNonPhysicalPlan(catalog, config, sqlNode, DmlUtils.getTablePath(catalog, dmlHandler.getTargetTablePath(sqlNode))); + dmlHandler.getPlan(catalog, config, null, sqlNode, DmlUtils.getTablePath(catalog, dmlHandler.getTargetTablePath(sqlNode))); + return dmlHandler.getPrel(); } private Prel getDmlPlan(String sql) throws Exception { @@ -656,8 +658,7 @@ private void validateDeleteSpecificOperationsAfterCopyOnWriteJoin(Prel plan, Boo FilterPrel filterPrel = filterPrels.get(0); assertThat(filterPrel).as("filter is expected").isNotNull(); - } - else { + } else { assertThat(filterPrels.size()).as("no filter is expected").isEqualTo(0); } } @@ -731,20 +732,20 @@ private static void validateWriterPlan(Prel plan, JoinRelType joinType, MergeTyp List writerPrels= NodeFinder.find(unionAllPrel.getInput(0), writerDescriptor); boolean writerIsOnZeroUnionInput = CollectionUtils.isNotEmpty(writerPrels); - validateDeletedDataFilesTableFunction(unionAllPrel.getInput(writerIsOnZeroUnionInput ? 1 : 0), mergeType); + validateDeletedFilesTableFunction(unionAllPrel.getInput(writerIsOnZeroUnionInput ? 1 : 0), mergeType); validateBaseCopyOnWriteJoinPlan(unionAllPrel.getInput(writerIsOnZeroUnionInput ? 0 : 1), joinType, mergeType); } - private static void validateDeletedDataFilesTableFunction(RelNode plan, MergeType mergeType) { + private static void validateDeletedFilesTableFunction(RelNode plan, MergeType mergeType) { TargetNodeDescriptor descriptor = new TargetNodeDescriptor(TableFunctionPrel.class, null); List tableFunctionPrels= NodeFinder.find(plan, descriptor); assertThat(tableFunctionPrels).isNotNull(); - TableFunctionPrel deletedDataFilesTableFunctionPre = tableFunctionPrels.get(0); - assertThat(TableFunctionConfig.FunctionType.DELETED_DATA_FILES_METADATA).isEqualTo(deletedDataFilesTableFunctionPre.getTableFunctionConfig().getType()); + TableFunctionPrel deletedFilesTableFunctionPre = tableFunctionPrels.get(0); + assertThat(TableFunctionConfig.FunctionType.DELETED_FILES_METADATA).isEqualTo(deletedFilesTableFunctionPre.getTableFunctionConfig().getType()); - validateDmlAgg(deletedDataFilesTableFunctionPre, mergeType); + validateDmlAgg(deletedFilesTableFunctionPre, mergeType); } private static void validateBaseCopyOnWriteJoinPlan(RelNode plan, JoinRelType joinType, MergeType mergeType) { @@ -768,7 +769,7 @@ private static void validateBaseCopyOnWriteJoinPlan(RelNode plan, JoinRelType jo private static void validateFilePathJoin(RelNode plan, MergeType mergeType) { Map attributes = ImmutableMap.of( "joinType", "inner", - "condition", "=($0, $7)" + "condition", "=($0, $8)" ); HashJoinPrel hashJoinPrel = findSingleNode(plan, HashJoinPrel.class, attributes); @@ -871,6 +872,10 @@ private void testMerge(String mergeQuery, int updatedColumns, MergeType mergeTyp break; case UPDATE_INSERT: assertThat(aggInput.getRowType().getFieldCount()).isEqualTo(userColumnCount + 2 + updatedColumns); + break; + case INVALID: + default: + throw new IllegalArgumentException("invalid merge type: " + mergeType); } // Verify column name testResultColumnName(mergeQuery); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/TestFragmentPriorityAssignment.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/TestFragmentPriorityAssignment.java deleted file mode 100644 index 1fccda10ab..0000000000 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/TestFragmentPriorityAssignment.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.dremio.exec.planner; - -import static org.junit.Assert.assertEquals; - -import org.junit.Test; - -import com.dremio.exec.PlanOnlyTestBase; -import com.dremio.exec.ops.QueryContext; -import com.dremio.exec.physical.PhysicalPlan; -import com.dremio.exec.planner.fragment.AssignFragmentPriorityVisitor; -import com.dremio.exec.server.SabotContext; -import com.dremio.options.OptionValue; - -/** - * Tests assignment priorities of queries with single and multiple fragments. - */ -public class TestFragmentPriorityAssignment extends PlanOnlyTestBase { - @Test - public void testSingleFragmentAssignment() throws Exception { - final String sql = "SELECT city from cp.\"sample/samples-samples-dremio-com-zips-json.json\""; - final PhysicalPlan plan = createPlan(sql); - AssignFragmentPriorityVisitor priorityAssigner = new AssignFragmentPriorityVisitor(); - plan.getRoot().accept(priorityAssigner, null); - assertEquals(1, priorityAssigner.getFragmentWeight(0)); - assertEquals(1, priorityAssigner.getFragmentWeight(1)); - } - - @Test - public void testMultiFragmentAssignment() throws Exception { - final String yelpTable = TEMP_SCHEMA + ".\"yelp\""; - final String sql = "SELECT nested_0.review_id AS review_id, nested_0.user_id AS user_id, nested_0.votes AS votes," + - " nested_0.stars AS stars, join_business.business_id AS business_id0, " + - " join_business.neighborhoods AS neighborhoods, join_business.city AS city, " + - " join_business.latitude AS latitude, join_business.review_count AS review_count, " + - " join_business.full_address AS full_address, join_business.stars AS stars0, " + - " join_business.categories AS categories, join_business.state AS state, " + - " join_business.longitude AS longitude\n" + - "FROM (\n" + - " SELECT review_id, user_id, votes, stars, business_id\n" + - " FROM cp.\"yelp_review.json\" where 1 = 0\n" + - ") nested_0\n" + - " FULL JOIN " + yelpTable + " AS join_business ON nested_0.business_id = join_business.business_id"; - final PhysicalPlan plan = createPlan(sql); - AssignFragmentPriorityVisitor priorityAssigner = new AssignFragmentPriorityVisitor(); - plan.getRoot().accept(priorityAssigner, null); - assertEquals(2, priorityAssigner.getFragmentWeight(1)); - assertEquals(1, priorityAssigner.getFragmentWeight(2)); - assertEquals(2, priorityAssigner.getFragmentWeight(3)); - } - - @Test - public void testComplexMultiFragmentAssignment() throws Exception { - final String yelpTable = TEMP_SCHEMA + ".\"yelp\""; - final String sql = "SELECT * from " + - "(SELECT review_id, user_id, stars, name, sumreview," + - " rank() over (partition by user_id order by sumreview desc) rk" + - " FROM (\n" + - " SELECT x.review_id as review_id, x.user_id as user_id, x.stars as stars, u.name as name, " + - " sum(coalesce(y.review_count*y.stars,0)) sumreview" + - " FROM cp.\"yelp_review.json\" x, " + yelpTable + " y, cp.\"yelp_user_data.json\" z, " + - " cp.\"user.json\" u \n" + - " where x.business_id = y.business_id and x.user_id = z.user_id and z.name = u.name \n" + - " group by rollup(review_id, user_id, stars, name))dw1) dw2" + - " where rk <= 100 " + - " order by review_id, user_id, sumreview, rk" + - " limit 100;"; - final PhysicalPlan plan = createPlan(sql); - AssignFragmentPriorityVisitor priorityAssigner = new AssignFragmentPriorityVisitor(); - plan.getRoot().accept(priorityAssigner, null); - assertEquals(5, priorityAssigner.getFragmentWeight(0)); - assertEquals(5, priorityAssigner.getFragmentWeight(7)); - assertEquals(1, priorityAssigner.getFragmentWeight(8)); - } - - private PhysicalPlan createPlan(String sql) throws Exception { - final SabotContext context = createSabotContext( - () -> OptionValue.createLong(OptionValue.OptionType.SYSTEM, "planner.slice_target", 1), - () -> OptionValue.createBoolean(OptionValue.OptionType.SYSTEM, "planner.enable_join_optimization", false), - () -> OptionValue.createBoolean(OptionValue.OptionType.SYSTEM, "planner.assign_priority", true) - ); - final QueryContext queryContext = createContext(context); - return createPlan(sql, queryContext); - } -} diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/fragment/TestSplitNormalizer.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/fragment/TestSplitNormalizer.java index 87c42e6067..bddb32bb79 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/fragment/TestSplitNormalizer.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/fragment/TestSplitNormalizer.java @@ -104,7 +104,8 @@ public void multiNode() throws Exception { // assign alternate splits to each node. List[] writeSplitsSharded = new List[2]; - writeSplitsSharded[0] = writeSplitsSharded[1] = new ArrayList<>(); + writeSplitsSharded[0] = new ArrayList<>(); + writeSplitsSharded[1] = new ArrayList<>(); for (int i = 0; i < writeSplits.size(); i++) { if (i % 2 == 0) { writeSplitsSharded[0].add(writeSplits.get(i)); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/logical/FilterSplitTest.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/logical/FilterSplitTest.java index 66525078fd..587a12460a 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/logical/FilterSplitTest.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/logical/FilterSplitTest.java @@ -15,156 +15,95 @@ */ package com.dremio.exec.planner.logical; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static com.dremio.test.dsl.RexDsl.and; +import static com.dremio.test.dsl.RexDsl.eq; +import static com.dremio.test.dsl.RexDsl.intInput; +import static com.dremio.test.dsl.RexDsl.literal; +import static com.dremio.test.dsl.RexDsl.lt; +import static com.dremio.test.dsl.RexDsl.or; +import static com.dremio.test.dsl.RexDsl.varcharInput; +import static com.dremio.test.scaffolding.ScaffoldingRel.TYPE_FACTORY; import java.util.BitSet; +import java.util.stream.Collectors; +import java.util.stream.IntStream; -import org.apache.calcite.adapter.java.JavaTypeFactory; -import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexNode; import org.apache.calcite.sql.fun.SqlStdOperatorTable; -import org.apache.calcite.sql.type.SqlTypeName; import org.junit.Test; import com.dremio.exec.planner.DremioRexBuilder; import com.dremio.exec.planner.logical.partition.FindPartitionConditions; -import com.dremio.exec.planner.types.JavaTypeFactoryImpl; +import com.dremio.test.GoldenFileTestBuilder; public class FilterSplitTest { - static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(FilterSplitTest.class); - - final JavaTypeFactory t = JavaTypeFactoryImpl.INSTANCE; - final RexBuilder builder = new DremioRexBuilder(t); - final RelDataType intType = t.createSqlType(SqlTypeName.INTEGER); - final RelDataType sType = t.createSqlType(SqlTypeName.VARCHAR, 20); + private static final RexBuilder REX_BUILDER = new DremioRexBuilder(TYPE_FACTORY); @Test - public void simpleCompound() { - // a < 1 AND dir0 in (2,3) - RexNode n = and( - lt(c(0), lit(1)), + public void goldenTest() { + new GoldenFileTestBuilder<>(this::transform, rex -> GoldenFileTestBuilder.MultiLineString.create(rex.toString())) + .add("simpleCompound: (a < 1 AND dir0 in (2,3))", + and( + lt(c(0), literal(1)), or( - eq(c(1), lit(2)), - eq(c(1), lit(3)) - ) - ); - - BitSet bs = new BitSet(); - bs.set(1); - FindPartitionConditions c = new FindPartitionConditions(bs, builder); - c.analyze(n); - - RexNode partNode = c.getFinalCondition(); - assertEquals(n.toString(), "AND(<($0, 1), OR(=($1, 2), =($1, 3)))"); - assertEquals(partNode.toString(), "OR(=($1, 2), =($1, 3))"); - } - - @Test - public void twoLevelDir() { - // (dir0 = 1 and dir1 = 2) OR (dir0 = 3 and dir1 = 4) - RexNode n = or( + eq(c(1), literal(2)), + eq(c(1), literal(3))))) + .add("badFunc (dir0 || 1)", fn(cs(0), cs(1))) + .add("twoLevelDir: (dir0 = 1 and dir1 = 2) OR (dir0 = 3 and dir1 = 4)", + or( and( - eq(c(1), lit(1)), - eq(c(2), lit(2)) - ), + eq(c(1), literal(1)), + eq(c(2), literal(2))), and( - eq(c(1), lit(3)), - eq(c(2), lit(4)) - ) - - ); - - BitSet bs = new BitSet(); - bs.set(1); - bs.set(2); - FindPartitionConditions c = new FindPartitionConditions(bs, builder); - c.analyze(n); - - RexNode partNode = c.getFinalCondition(); - assertEquals("OR(AND(=($1, 1), =($2, 2)), AND(=($1, 3), =($2, 4)))", n.toString()); - assertEquals("OR(AND(=($1, 1), =($2, 2)), AND(=($1, 3), =($2, 4)))", partNode.toString()); - } - - @Test - public void badOr() { - // (dir0 = 1 and dir1 = 2) OR (a < 5) - RexNode n = or( + eq(c(1), literal(3)), + eq(c(2), literal(4))))) + .add("badOr: (dir0 = 1 and dir1 = 2) OR (a < 5)", + or( and( - eq(c(1), lit(1)), - eq(c(2), lit(2)) - ), - lt(c(0), lit(5)) - - ); - - BitSet bs = new BitSet(); - bs.set(1); - bs.set(2); - FindPartitionConditions c = new FindPartitionConditions(bs, builder); - c.analyze(n); - - RexNode partNode = c.getFinalCondition(); - assertEquals("OR(AND(=($1, 1), =($2, 2)), <($0, 5))", n.toString()); - assertTrue(partNode == null); + eq(c(1), literal(1)), + eq(c(2), literal(2))), + lt(c(0), literal(5)))) + .add("disjunctiveNormalForm (a, dir0) IN ((0, 1), (2, 3))", + or( + and( + eq(c(0), literal(0)), + eq(c(1), literal( 1))), + and( + eq(c(0), literal(2)), + eq(c(1), literal(3))))) + .add("Large DNF (a, dir0) IN (....)", + or( + IntStream.range(0, 100) + .mapToObj(i -> + and(eq(c(0), literal(i)), eq(c(1), literal(i)))) + .collect(Collectors.toList()))) + .runTests(); } - - @Test - public void badFunc() { - // (dir0 = 1 and dir1 = 2) OR (a < 5) - RexNode n = fn( - cs(0), - cs(1) - ); - + public String transform (RexNode rexNode){ BitSet bs = new BitSet(); bs.set(1); bs.set(2); - FindPartitionConditions c = new FindPartitionConditions(bs, builder); - c.analyze(n); - + FindPartitionConditions c = new FindPartitionConditions(bs, REX_BUILDER); + c.analyze(rexNode); RexNode partNode = c.getFinalCondition(); - assertEquals("||($0, $1)", n.toString()); - assertTrue(partNode == null); - } - - - private RexNode and(RexNode...nodes){ - return builder.makeCall(SqlStdOperatorTable.AND, nodes); + if(partNode != null) { + return partNode.toString(); + } else { + return null; + } } - private RexNode fn(RexNode...nodes){ - return builder.makeCall(SqlStdOperatorTable.CONCAT, nodes); + private static RexNode fn(RexNode...nodes){ + return REX_BUILDER.makeCall(SqlStdOperatorTable.CONCAT, nodes); } - private RexNode or(RexNode...nodes){ - return builder.makeCall(SqlStdOperatorTable.OR, nodes); + private static RexNode c(int index){ + return intInput(index); } - private RexNode lt(RexNode left, RexNode right){ - return builder.makeCall(SqlStdOperatorTable.LESS_THAN, left, right); - } - - private RexNode eq(RexNode left, RexNode right){ - return builder.makeCall(SqlStdOperatorTable.EQUALS, left, right); - } - - private RexNode lit(int value){ - return builder.makeLiteral(value, intType, true); - } - - private RexNode c(int index){ - return builder.makeInputRef(intType, index); - } - - private RexNode cs(int index){ - return builder.makeInputRef(sType, index); - } - - private RexNode str(String s){ - return builder.makeLiteral(s); + return varcharInput(index); } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/logical/TestRewriteConvertFunctionVisitor.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/logical/TestRewriteConvertFunctionVisitor.java new file mode 100644 index 0000000000..1c5e7ae554 --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/logical/TestRewriteConvertFunctionVisitor.java @@ -0,0 +1,49 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.logical; + +import org.junit.Test; + +import com.dremio.PlanTestBase; + +public class TestRewriteConvertFunctionVisitor extends PlanTestBase { + @Test + public void testConvertFromRewrite() throws Exception { + final String convertFromQuery = String.format( + "SELECT account_id, convert_from(setting,'json') as setting1 \n" + + "FROM %s", + "cp.\"car-ownership.parquet\""); + + testPlanMatchingPatterns( + convertFromQuery, + new String[]{"ConvertFromJson\\(account_id=\\[\\$0], setting=\\[\\$1], " + + "CONVERT_FROM_JSON_0=\\[CONVERT\\(CONVERT_FROM_JSON_0\\)], " + + "conversions=\\[\\[originField='setting', inputField='CONVERT_FROM_JSON_0']]\\)"} + ); + } + + @Test + public void testConvertToRewrite() throws Exception { + final String convertToQuery = String.format( + "SELECT CONVERT_TO('{\"name\":\"John\", \"age\":30, \"car\":null}','json')" + ); + testPlanMatchingPatterns( + convertToQuery, + new String[]{"Project\\(EXPR\\$0\\=\\[CONVERT_TOjson\\('\\{\"name\":\"John\", \"age\":30, \"car\":null\\}'" + + ":VARCHAR\\(37\\)\\)\\]\\)"} + ); + } +} diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/logical/TestValuesRel.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/logical/TestValuesRel.java index 81d80b36e8..b28d279c9e 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/logical/TestValuesRel.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/logical/TestValuesRel.java @@ -68,13 +68,13 @@ public void setup() { // Test the row type adjustment, modelling the tuples structure of an IN list. @Test public void testNumericValuesRelRowTypeAdjustment() { - final int INListLength = 20; + final int lengthOfINList = 20; // Build RowType & Tuples RelDataTypeField relDataType = new RelDataTypeFieldImpl("ROW_VALUE", 0, new BasicSqlType(RelDataTypeSystemImpl.REL_DATA_TYPE_SYSTEM, SqlTypeName.ANY)); RelDataType rowType = new RelRecordType(StructKind.FULLY_QUALIFIED, Arrays.asList(relDataType)); ImmutableList.Builder> tuples = new ImmutableList.Builder<>(); - for (int i = 0; i < INListLength; i++) { + for (int i = 0; i < lengthOfINList; i++) { tuples.add(new ImmutableList.Builder().add(new RexBuilder(typeFactory).makeExactLiteral(new BigDecimal(i))).build()); } @@ -94,13 +94,13 @@ public void testNumericValuesRelRowTypeAdjustment() { // Test the row type adjustment, modelling the tuples structure of an IN list. @Test public void testCharValuesRelRowTypeAdjustment() { - final int INListLength = 20; + final int lengthOfINList = 20; // Build RowType & Tuples RelDataTypeField relDataType = new RelDataTypeFieldImpl("ROW_VALUE", 0, new BasicSqlType(RelDataTypeSystemImpl.REL_DATA_TYPE_SYSTEM, SqlTypeName.ANY)); RelDataType rowType = new RelRecordType(StructKind.FULLY_QUALIFIED, Arrays.asList(relDataType)); ImmutableList.Builder> tuples = new ImmutableList.Builder<>(); - for (int i = 0; i < INListLength; ++i) { + for (int i = 0; i < lengthOfINList; ++i) { tuples.add(new ImmutableList.Builder().add(new RexBuilder(typeFactory).makeLiteral(charLiteralBuilder(i))).build()); } @@ -115,7 +115,7 @@ public void testCharValuesRelRowTypeAdjustment() { RelDataType adjustedRowType = valuesRel.getRowType(); assertEquals(1, adjustedRowType.getFieldCount()); assertEquals(SqlTypeName.VARCHAR, adjustedRowType.getFieldList().get(0).getType().getSqlTypeName()); - assertEquals(INListLength - 1, adjustedRowType.getFieldList().get(0).getType().getPrecision()); + assertEquals(lengthOfINList - 1, adjustedRowType.getFieldList().get(0).getType().getPrecision()); } private String charLiteralBuilder(int length) { diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/CopyIntoTests.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/CopyIntoTests.java index 41d15d4788..8f775b5b04 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/CopyIntoTests.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/CopyIntoTests.java @@ -48,6 +48,9 @@ import java.util.stream.Stream; import org.apache.arrow.memory.BufferAllocator; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.arrow.vector.types.pojo.FieldType; import org.apache.arrow.vector.util.JsonStringArrayList; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.commons.lang3.ArrayUtils; @@ -59,10 +62,13 @@ import com.dremio.TestBuilder; import com.dremio.common.exceptions.UserException; +import com.dremio.common.expression.CompleteType; import com.dremio.common.util.FileUtils; +import com.dremio.exec.physical.config.ExtendedFormatOptions; import com.dremio.exec.planner.CopyIntoTablePlanBuilder; import com.dremio.exec.planner.sql.handlers.query.CopyIntoTableContext; import com.dremio.exec.planner.sql.parser.SqlCopyIntoTable; +import com.dremio.exec.store.easy.EasyFormatUtils; import com.dremio.service.namespace.file.proto.FileType; import com.dremio.test.UserExceptionAssert; import com.google.common.collect.ImmutableList; @@ -123,7 +129,7 @@ private static void validateEmptyAsNull(BufferAllocator allocator, String source test(copyIntoQuery); } catch (UserException e) { exceptionThrown = true; - if(fileFormat.equals("csv")) { + if ("csv".equals(fileFormat)) { Assert.assertTrue(e.getMessage(), e.getMessage().contains("Error processing input: ")); Assert.assertTrue(e.getMessage(), e.getMessage().contains("line=2")); } else { @@ -183,6 +189,82 @@ public static void testCSVWithEscapeChar(BufferAllocator allocator, String sourc .baselineValues("7\"\"\"\"2", "3\"4", "5|\"6") .go(); } + + @Test + public void testCSVOptimizedFlowIsUsedForVarcharWithNoTransformations() { + // When running COPY INTO with CSV, we want to ensure that the optimised codepath is used only when: + // 1. target field type is VARCHAR + // 2. no string transformations like NULL_IF are needed. + // For all other supported data types or when we need string transformations are needed, we follow the normal codepath. + // Note that we can follow the optimised code path regardless of the value of TRIM_SPACE and EMPTY_AS_NULL. + + // Test all possible combinations for VARCHAR datatype. + final Field sampleStringField = new Field("sampleStringField", FieldType.nullable(ArrowType.Utf8.INSTANCE), null); + ExtendedFormatOptions options = new ExtendedFormatOptions(false, false, "YYYY-MM-DD", "HH24:MI:SS.FFF", "YYYY-MM-DD HH24:MI:SS.FFF",null); + + // TRIM_SPACE should have no effect on deciding whether we take the optimised write path. + options.setTrimSpace(false); + final boolean result1 = EasyFormatUtils.isVarcharOptimizationPossible(options, sampleStringField.getType()); + assertThat(result1).isTrue(); + + options.setTrimSpace(true); + final boolean result2 = EasyFormatUtils.isVarcharOptimizationPossible(options, sampleStringField.getType()); + assertThat(result2).isTrue(); + + // re-initialise options + options = new ExtendedFormatOptions(false, false, "YYYY-MM-DD", "HH24:MI:SS.FFF", "YYYY-MM-DD HH24:MI:SS.FFF",null); + + // EMPTY_AS_NULL should have no effect on deciding whether we take the optimised write path. + options.setEmptyAsNull(false); + boolean result3 = EasyFormatUtils.isVarcharOptimizationPossible(options, sampleStringField.getType()); + assertThat(result3).isTrue(); + + options.setEmptyAsNull(true); + final boolean result4 = EasyFormatUtils.isVarcharOptimizationPossible(options, sampleStringField.getType()); + assertThat(result4).isTrue(); + + // Passing a NULL_IF expressions list means we should take the write path that involves transformations. + final List nullIfExpressions = new ArrayList<>(); + nullIfExpressions.add("NA"); + nullIfExpressions.add("None"); + options.setNullIfExpressions(nullIfExpressions); + final boolean result5 = EasyFormatUtils.isVarcharOptimizationPossible(options, sampleStringField.getType()); + assertThat(result5).isFalse(); + + // For all supported data types other than VARCHAR, we should be taking the write path with transformations. + + // re-initialise options + options = new ExtendedFormatOptions(false, false, "YYYY-MM-DD", "HH24:MI:SS.FFF", "YYYY-MM-DD HH24:MI:SS.FFF",null); + + final boolean result6 = EasyFormatUtils.isVarcharOptimizationPossible(options, CompleteType.INT.getType()); + assertThat(result6).isFalse(); + + final boolean result7 = EasyFormatUtils.isVarcharOptimizationPossible(options, CompleteType.BIGINT.getType()); + assertThat(result7).isFalse(); + + final boolean result8 = EasyFormatUtils.isVarcharOptimizationPossible(options, CompleteType.FLOAT.getType()); + assertThat(result8).isFalse(); + + final boolean result9 = EasyFormatUtils.isVarcharOptimizationPossible(options, CompleteType.BIT.getType()); // BIT --> BOOLEAN + assertThat(result9).isFalse(); + + final boolean result10 = EasyFormatUtils.isVarcharOptimizationPossible(options, CompleteType.DATE.getType()); + assertThat(result10).isFalse(); + + final boolean result11 = EasyFormatUtils.isVarcharOptimizationPossible(options, CompleteType.TIME.getType()); + assertThat(result11).isFalse(); + + final boolean result12 = EasyFormatUtils.isVarcharOptimizationPossible(options, CompleteType.TIMESTAMP.getType()); + assertThat(result12).isFalse(); + + final boolean result13 = EasyFormatUtils.isVarcharOptimizationPossible(options, CompleteType.DOUBLE.getType()); + assertThat(result13).isFalse(); + + // Use arbitrary precision and scale values for the sake of this test. + Field sampleDecimalField = new Field("sampleDecimalField", new FieldType(true, new ArrowType.Decimal(20, 10, 128), null), null); + final boolean result14 = EasyFormatUtils.isVarcharOptimizationPossible(options, sampleDecimalField.getType()); + assertThat(result14).isFalse(); + } @Test public void testMalformedQueries() throws Exception { try (DmlQueryTestUtils.Table targetTable = createBasicTable(SOURCE,2, 0)) { @@ -1060,7 +1142,7 @@ public static void testSource(BufferAllocator allocator, String source, String t String storageLocation = "\'@" + source + "/" + location.getName() + "\'"; String fileExtension = sourceFile.toLowerCase().endsWith("json")? "json" : "csv"; String copyIntoQuery = String.format("COPY INTO %s.%s FROM %s FILE_FORMAT \'%s\'", TEMP_SCHEMA, tableName, storageLocation, fileExtension); - if (fileExtension.equals("csv")) { + if ("csv".equals(fileExtension)) { copyIntoQuery += " (RECORD_DELIMITER '\n')"; } test(copyIntoQuery); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/DeleteTests.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/DeleteTests.java index ca82906f01..69f9b6738e 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/DeleteTests.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/DeleteTests.java @@ -92,6 +92,16 @@ public static void testDeleteById(BufferAllocator allocator, String source) thro } } + public static void testDeleteByIdWithEqualNull(BufferAllocator allocator, String source) throws Exception { + // column = null should return false and no data should be deleted + try (Tables tables = createBasicNonPartitionedAndPartitionedTables(source, 2, 10, PARTITION_COLUMN_ONE_INDEX_SET)) { + for (Table table : tables.tables) { + testDmlQuery(allocator, "DELETE FROM %s WHERE id = %s", new Object[]{table.fqn, null}, table, 0, + ArrayUtils.subarray(table.originalData, 0, table.originalData.length)); + } + } + } + public static void testDeleteTargetTableWithAndWithoutAlias(BufferAllocator allocator, String source) throws Exception { // without target table aliasing try (Tables tables = createBasicNonPartitionedAndPartitionedTables(source, 2, 10, PARTITION_COLUMN_ONE_INDEX_SET)) { diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/DmlQueryTestUtils.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/DmlQueryTestUtils.java index 3a4d73d044..da82b1cc64 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/DmlQueryTestUtils.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/DmlQueryTestUtils.java @@ -16,6 +16,7 @@ package com.dremio.exec.planner.sql; import static com.dremio.BaseTestQuery.getDfsTestTmpSchemaLocation; +import static com.dremio.BaseTestQuery.runSQL; import static com.dremio.BaseTestQuery.test; import static com.dremio.BaseTestQuery.testRunAndReturn; import static org.assertj.core.api.Assertions.assertThat; @@ -167,6 +168,35 @@ public static Table createTable(String source, String[] paths, String name, Colu return new Table(name, paths, fqn, Arrays.stream(schema).map(column -> column.name).toArray(String[]::new), data); } + /** + * Creates a empty table with the given name, schema, and the source + * + * @param source where the table belongs + * @param paths paths + * @param name table name + * @return table that's created with data inserted + */ + public static Table createEmptyTable(String source, String[] paths, String name, int columnCount) throws Exception { + ColumnInfo[] schema = new ColumnInfo[columnCount]; + schema[0] = new ColumnInfo("id", SqlTypeName.INTEGER, false); + for (int c = 0; c < columnCount - 1; c++) { + schema[c + 1] = new ColumnInfo("column_" + c, SqlTypeName.VARCHAR, false); + } + + String fullPath = String.join(".", paths); + String fqn = source + (fullPath.isEmpty() ? "" : "." + fullPath) + "." + name; + String createTableSql = getCreateTableSql(Arrays.stream(schema).filter( + columnInfo -> columnInfo.partitionColumn).map( + columnInfo -> columnInfo.name).collect(Collectors.toList())); + String schemaSql = Arrays.stream(schema).map(column -> + String.format("%s %s%s", column.name, column.typeName, Strings.isNullOrEmpty(column.extra) ? "" : " " + column.extra)) + .collect(Collectors.joining(", ")); + + test(createTableSql, fqn, schemaSql); + + return new Table(name, paths, fqn, Arrays.stream(schema).map(column -> column.name).toArray(String[]::new), null); + } + /** * Create an iceberg table, directly using the APIs. This method simulates the table creation from engines, * that use Iceberg OSS. Engines such as spark, hive etc. @@ -651,7 +681,7 @@ public static void testMalformedDmlQueries(Object[] tables, String... malformedQ UserExceptionAssert.assertThatThrownBy(() -> test(fullQuery)) .withFailMessage("Query failed to generate the expected error:\n" + fullQuery) .satisfiesAnyOf( - ex -> assertThat(ex).hasMessageContaining("Failure parsing the query."), + ex -> assertThat(ex).hasMessageContaining("PARSE ERROR:"), ex -> assertThat(ex).hasMessageContaining("VALIDATION ERROR:")); } } @@ -722,10 +752,13 @@ public static void testQueryValidateStatusSummary(BufferAllocator allocator, Str } } - public static org.apache.iceberg.Table loadTable(Table table, BufferAllocator allocator) { + public static org.apache.iceberg.Table loadTable(Table table) { String tablePath = getDfsTestTmpSchemaLocation() + "/" + table.name.replaceAll("\"", ""); - org.apache.iceberg.Table loadedTable = hadoopTables.load(tablePath); - return loadedTable; + return hadoopTables.load(tablePath); + } + + public static org.apache.iceberg.Table loadTable(String tablePath) { + return hadoopTables.load(tablePath); } public static void verifyData(BufferAllocator allocator, Table table, Object[]... expectedData) throws Exception { @@ -777,4 +810,57 @@ public static long waitUntilAfter(long timestampMillis) { } return current; } + + public static void addColumn(DmlQueryTestUtils.Table table, BufferAllocator allocator, String column, String dataType) throws Exception { + new TestBuilder(allocator) + .sqlQuery("ALTER TABLE %s ADD COLUMNS (%s %s)", table.fqn, column, dataType) + .unOrdered() + .baselineColumns("ok", "summary") + .baselineValues(true, "New columns added.") + .go(); + } + + public static void addIdentityPartition(DmlQueryTestUtils.Table table, BufferAllocator allocator, String column) throws Exception { + runSQL(String.format("ALTER TABLE %s add PARTITION FIELD IDENTITY(%s)", table.fqn, column)); + } + + public static void dropIdentityPartition(DmlQueryTestUtils.Table table, BufferAllocator allocator, String column) throws Exception { + runSQL(String.format("ALTER TABLE %s drop PARTITION FIELD IDENTITY(%s)", table.fqn, column)); + } + + public static void addBucketPartition(DmlQueryTestUtils.Table table, BufferAllocator allocator, String column) throws Exception { + runSQL(String.format("ALTER TABLE %s add PARTITION FIELD BUCKET(10,%s)", table.fqn, column)); + } + + public static void dropBucketPartition(DmlQueryTestUtils.Table table, BufferAllocator allocator, String column) throws Exception { + runSQL(String.format("ALTER TABLE %s drop PARTITION FIELD BUCKET(10,%s)", table.fqn, column)); + } + + public static void addTruncate2Partition(DmlQueryTestUtils.Table table, BufferAllocator allocator, String column) throws Exception { + runSQL(String.format("ALTER TABLE %s add PARTITION FIELD TRUNCATE(2,%s)", table.fqn, column)); + } + + public static void dropTruncate2Partition(DmlQueryTestUtils.Table table, BufferAllocator allocator, String column) throws Exception { + runSQL(String.format("ALTER TABLE %s drop PARTITION FIELD TRUNCATE(2,%s)", table.fqn, column)); + } + + public static void addYearPartition(DmlQueryTestUtils.Table table, BufferAllocator allocator, String column) throws Exception { + runSQL(String.format("ALTER TABLE %s add PARTITION FIELD YEAR(%s)", table.fqn, column)); + } + + public static void addDayPartition(DmlQueryTestUtils.Table table, BufferAllocator allocator, String column) throws Exception { + runSQL(String.format("ALTER TABLE %s add PARTITION FIELD DAY(%s)", table.fqn, column)); + } + + public static void addMonthPartition(DmlQueryTestUtils.Table table, BufferAllocator allocator, String column) throws Exception { + runSQL(String.format("ALTER TABLE %s add PARTITION FIELD MONTH(%s)", table.fqn, column)); + } + + public static void dropYearPartition(DmlQueryTestUtils.Table table, BufferAllocator allocator, String column) throws Exception { + runSQL(String.format("ALTER TABLE %s drop PARTITION FIELD YEAR(%s)", table.fqn, column)); + } + + public static void insertIntoTable(String tableName, String columns, String values) throws Exception { + runSQL(String.format("INSERT INTO %s%s VALUES%s", tableName, columns, values)); + } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/ExplainPlanTests.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/ExplainPlanTests.java new file mode 100644 index 0000000000..f089f05cb8 --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/ExplainPlanTests.java @@ -0,0 +1,148 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql; + +import static com.dremio.BaseTestQuery.test; +import static com.dremio.exec.planner.sql.DmlQueryTestUtils.Table; +import static com.dremio.exec.planner.sql.DmlQueryTestUtils.createBasicTable; +import static org.assertj.core.api.Assertions.assertThat; + +import com.dremio.test.UserExceptionAssert; + +/** + * Explain Plan tests. + * + * Note: Add tests used across all platforms here. + */ +public class ExplainPlanTests { + + public static void testMalformedExplainPlanQueries(String source) throws Exception { + try (Table targetTable = createBasicTable(source,2, 0)) { + testMalformedExplainQuery(targetTable.fqn, + "EXPLAIN", + "EXPLAIN PLAN", + "EXPLAIN PLAN FOR", + "EXPLAIN PLAN INCLUDING ALL ATTRIBUTES FOR %s", + "EXPLAIN PLAN INCLUDING ALL ATTRIBUTES WITHOUT IMPLEMENTATION FOR %s", + "EXPLAIN PLAN WITHOUT IMPLEMENTATION FOR %s", + "EXPLAIN PLAN FOR %s" + ); + } + } + + // INSERT + public static void testExplainLogicalPlanOnInsert(String source) throws Exception { + try (Table targetTable = createBasicTable(source, 2, 5)) { + testValidExplainQuery("EXPLAIN PLAN WITHOUT IMPLEMENTATION FOR insert into %s values(5, 'taco')", new Object[]{targetTable.fqn}); + } + } + + public static void testExplainPhysicalPlanOnInsert(String source) throws Exception { + try (Table targetTable = createBasicTable(source, 2, 5)) { + testValidExplainQuery("EXPLAIN PLAN FOR insert into %s values(5, 'taco')", new Object[]{targetTable.fqn}); + } + } + + public static void testExplainPlanWithDetailLevelOnInsert(String source) throws Exception { + try (Table targetTable = createBasicTable(source, 2, 5)) { + testValidExplainQuery("EXPLAIN PLAN INCLUDING ALL ATTRIBUTES FOR insert into %s values(5, 'taco')", new Object[]{targetTable.fqn}); + } + } + + // DELETE + public static void testExplainLogicalPlanOnDelete(String source) throws Exception { + try (Table targetTable = createBasicTable(source, 2, 5)) { + testValidExplainQuery("EXPLAIN PLAN WITHOUT IMPLEMENTATION FOR DELETE FROM %s where id = %s", new Object[]{targetTable.fqn, targetTable.originalData[0][0]}); + } + } + + public static void testExplainPhysicalPlanOnDelete(String source) throws Exception { + try (Table targetTable = createBasicTable(source, 2, 5)) { + testValidExplainQuery("EXPLAIN PLAN FOR DELETE FROM %s where id = %s", new Object[]{targetTable.fqn, targetTable.originalData[0][0]}); + } + } + + public static void testExplainPlanWithDetailLevelOnDelete(String source) throws Exception { + try (Table targetTable = createBasicTable(source, 2, 5)) { + testValidExplainQuery("EXPLAIN PLAN INCLUDING ALL ATTRIBUTES FOR DELETE FROM %s where id = %s", new Object[]{targetTable.fqn, targetTable.originalData[0][0]}); + } + } + + // UPDATE + public static void testExplainLogicalPlanOnUpdate(String source) throws Exception { + try (Table targetTable = createBasicTable(source, 2, 5)) { + testValidExplainQuery("EXPLAIN PLAN WITHOUT IMPLEMENTATION FOR UPDATE %s SET id = 0", new Object[]{targetTable.fqn}); + } + } + + public static void testExplainPhysicalPlanOnUpdate(String source) throws Exception { + try (Table targetTable = createBasicTable(source, 2, 5)) { + testValidExplainQuery("EXPLAIN PLAN FOR UPDATE %s SET id = 0", new Object[]{targetTable.fqn}); + } + } + + public static void testExplainPlanWithDetailLevelOnUpdate(String source) throws Exception { + try (Table targetTable = createBasicTable(source, 2, 5)) { + testValidExplainQuery("EXPLAIN PLAN INCLUDING ALL ATTRIBUTES FOR UPDATE %s SET id = 0", new Object[]{targetTable.fqn}); + } + } + + // MERGE + public static void testExplainLogicalPlanOnMerge(String source) throws Exception { + try (Table sourceTable = createBasicTable(source,3, 5); + Table targetTable = createBasicTable(source, 2, 5)) { + testValidExplainQuery("EXPLAIN PLAN WITHOUT IMPLEMENTATION FOR MERGE INTO %s USING %s ON (%s.id = %s.id)" + + " WHEN MATCHED THEN UPDATE SET column_0 = column_1", + new Object[]{targetTable.fqn, sourceTable.fqn, targetTable.fqn, sourceTable.fqn}); + } + } + + public static void testExplainPhysicalPlanOnMerge(String source) throws Exception { + try (Table sourceTable = createBasicTable(source,3, 5); + Table targetTable = createBasicTable(source, 2, 5)) { + testValidExplainQuery("EXPLAIN PLAN FOR MERGE INTO %s USING %s ON (%s.id = %s.id)" + + " WHEN MATCHED THEN UPDATE SET column_0 = column_1", + new Object[]{targetTable.fqn, sourceTable.fqn, targetTable.fqn, sourceTable.fqn}); + } + } + + public static void testExplainPlanWithDetailLevelOnMerge(String source) throws Exception { + try (Table sourceTable = createBasicTable(source,3, 5); + Table targetTable = createBasicTable(source, 2, 5)) { + testValidExplainQuery("EXPLAIN PLAN INCLUDING ALL ATTRIBUTES FOR MERGE INTO %s USING %s ON (%s.id = %s.id)" + + " WHEN MATCHED THEN UPDATE SET column_0 = column_1", + new Object[]{targetTable.fqn, sourceTable.fqn, targetTable.fqn, sourceTable.fqn}); + } + } + + /*Private Test Functions*/ + private static void testMalformedExplainQuery(String table, String... malformedQueries) { + for (String malformedQuery : malformedQueries) { + String fullQuery = String.format(malformedQuery, table); + UserExceptionAssert.assertThatThrownBy(() -> test(fullQuery)) + .withFailMessage("Query failed to generate the expected error:\n" + fullQuery) + .satisfiesAnyOf( + ex -> assertThat(ex).hasMessageContaining("PARSE ERROR:"), + ex -> assertThat(ex).hasMessageContaining("VALIDATION ERROR:")); + } + } + + private static void testValidExplainQuery(String query, Object[] args) throws Exception { + // Run the Explain Plan query + String fullQuery = String.format(query, args); + test(fullQuery); + } +} diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/ITDelete.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/ITDelete.java index 610a0ffa83..ab56f71a2e 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/ITDelete.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/ITDelete.java @@ -47,6 +47,11 @@ public void testDeleteById() throws Exception { DeleteTests.testDeleteById(allocator, SOURCE); } + @Test + public void testDeleteByIdWithEqualNull() throws Exception { + DeleteTests.testDeleteByIdWithEqualNull(allocator, SOURCE); + } + @Test public void testDeleteTargetTableWithAndWithoutAlias() throws Exception { DeleteTests.testDeleteTargetTableWithAndWithoutAlias(allocator, SOURCE); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/ITExplainPlan.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/ITExplainPlan.java new file mode 100644 index 0000000000..1099701e5e --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/ITExplainPlan.java @@ -0,0 +1,88 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dremio.exec.planner.sql; + +import org.junit.Test; + +public class ITExplainPlan extends ITDmlQueryBase { + // Defining SOURCE such that you can easily copy and paste the same test across other test variations + private static final String SOURCE = TEMP_SCHEMA_HADOOP; + + @Test + public void testMalformedExplainPlanQueries() throws Exception { + ExplainPlanTests.testMalformedExplainPlanQueries(SOURCE); + } + + @Test + public void testExplainPhysicalPlanOnInsert() throws Exception { + ExplainPlanTests.testExplainPhysicalPlanOnInsert(SOURCE); + } + + @Test + public void testExplainLogicalPlanOnInsert() throws Exception { + ExplainPlanTests.testExplainLogicalPlanOnInsert(SOURCE); + } + + @Test + public void testExplainPlanWithDetailLevelOnInsert() throws Exception { + ExplainPlanTests.testExplainPlanWithDetailLevelOnInsert(SOURCE); + } + + @Test + public void testExplainLogicalPlanOnDelete() throws Exception { + ExplainPlanTests.testExplainLogicalPlanOnDelete(SOURCE); + } + + @Test + public void testExplainPhysicalPlanOnDelete() throws Exception { + ExplainPlanTests.testExplainPhysicalPlanOnDelete(SOURCE); + } + + @Test + public void testExplainPlanWithDetailLevelOnDelete() throws Exception { + ExplainPlanTests.testExplainPlanWithDetailLevelOnDelete(SOURCE); + } + + @Test + public void testExplainLogicalPlanOnUpdate() throws Exception { + ExplainPlanTests.testExplainLogicalPlanOnUpdate(SOURCE); + } + + @Test + public void testExplainPhysicalPlanOnUpdate() throws Exception { + ExplainPlanTests.testExplainPhysicalPlanOnUpdate(SOURCE); + } + + @Test + public void testExplainPlanWithDetailLevelOnUpdate() throws Exception { + ExplainPlanTests.testExplainPlanWithDetailLevelOnUpdate(SOURCE); + } + @Test + public void testExplainLogicalPlanOnMerge() throws Exception { + ExplainPlanTests.testExplainLogicalPlanOnMerge(SOURCE); + } + + @Test + public void testExplainPhysicalPlanOnMerge() throws Exception { + ExplainPlanTests.testExplainPhysicalPlanOnMerge(SOURCE); + } + + @Test + public void testExplainPlanWithDetailLevelOnMerge() throws Exception { + ExplainPlanTests.testExplainPlanWithDetailLevelOnMerge(SOURCE); + } +} diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/ITOptimizeTable.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/ITOptimizeTable.java index aef6b9c227..1752c2996a 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/ITOptimizeTable.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/ITOptimizeTable.java @@ -15,32 +15,17 @@ */ package com.dremio.exec.planner.sql; -import org.junit.AfterClass; -import org.junit.BeforeClass; import org.junit.Test; import com.dremio.BaseTestQuery; -import com.dremio.exec.ExecConstants; -import com.dremio.options.OptionValue; /** - * Test OPTIMIZE TABLE + * Test OPTIMIZE TABLE scenarios */ public class ITOptimizeTable extends BaseTestQuery { // Defining SOURCE such that you can easily copy and paste the same test across other test variations private static final String SOURCE = TEMP_SCHEMA_HADOOP; - @BeforeClass - public static void setUp() throws Exception { - getSabotContext().getOptionManager().setOption(OptionValue.createBoolean( - OptionValue.OptionType.SYSTEM, ExecConstants.ENABLE_ICEBERG_OPTIMIZE.getOptionName(), true)); - } - - @AfterClass - public static void tearDown() throws Exception { - getSabotContext().getOptionManager().setOption(ExecConstants.ENABLE_ICEBERG_OPTIMIZE.getDefault()); - } - @Test public void testOnUnPartitioned() throws Exception { OptimizeTests.testOnUnPartitioned(SOURCE, allocator); @@ -52,7 +37,7 @@ public void testOnPartitioned() throws Exception { } @Test - public void testOnUnpartitionedMinInputFilesCriteria() throws Exception { + public void testOnUnPartitionedMinInputFilesCriteria() throws Exception { OptimizeTests.testOnUnpartitionedMinInputFilesCriteria(SOURCE, allocator); } @@ -95,4 +80,59 @@ public void testUnsupportedScenarios() throws Exception { public void testEvolvedPartitions() throws Exception { OptimizeTests.testEvolvedPartitions(SOURCE, allocator); } + + @Test + public void testOptimizeDataOnlyUnPartitioned() throws Exception { + OptimizeTests.testOptimizeDataFilesUnPartitioned(SOURCE, allocator); + } + + @Test + public void testOptimizeDataOnlyPartitioned() throws Exception { + OptimizeTests.testOptimizeDataOnPartitioned(SOURCE, allocator); + } + + @Test + public void testOptimizeManifestsOnlyUnPartitioned() throws Exception { + OptimizeTests.testOptimizeManifestsOnlyUnPartitioned(SOURCE, allocator); + } + + @Test + public void testOptimizeManifestsOnlyPartitioned() throws Exception { + OptimizeTests.testOptimizeManifestsOnlyPartitioned(SOURCE, allocator); + } + + @Test + public void testOptimizeLargeManifests() throws Exception { + OptimizeTests.testOptimizeLargeManifests(SOURCE, allocator); + } + + @Test + public void testOptimizeManifestsModesIsolations() throws Exception { + OptimizeTests.testOptimizeManifestsModesIsolations(SOURCE, allocator); + } + + @Test + public void testOptimizeManifestsWithOptimalSize() throws Exception { + OptimizeTests.testOptimizeManifestsWithOptimalSize(SOURCE, allocator); + } + + @Test + public void testOptimizeOnEmptyTableNoSnapshots() throws Exception { + OptimizeTests.testOptimizeOnEmptyTableNoSnapshots(SOURCE, allocator); + } + + @Test + public void testOptimizeOnEmptyTableHollowSnapshot() throws Exception { + OptimizeTests.testOptimizeOnEmptyTableHollowSnapshot(SOURCE, allocator); + } + + @Test + public void testOptimizeNoopOnResidualDataManifests() throws Exception { + OptimizeTests.testOptimizeNoopOnResidualDataManifests(SOURCE, allocator); + } + + @Test + public void testRewriteManifestsForEvolvedPartitionSpec() throws Exception { + OptimizeTests.testRewriteManifestsForEvolvedPartitionSpec(SOURCE, allocator); + } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/ITOptimizeV2TableWithPositionalDeletes.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/ITOptimizeV2TableWithPositionalDeletes.java new file mode 100644 index 0000000000..577e4789c2 --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/ITOptimizeV2TableWithPositionalDeletes.java @@ -0,0 +1,88 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql; + +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import com.dremio.BaseTestQuery; +import com.dremio.exec.ExecConstants; + +/** + * Test OPTIMIZE TABLE scenarios for Iceberg V2 tables with row-level-delete files + */ +public class ITOptimizeV2TableWithPositionalDeletes extends BaseTestQuery { + + @BeforeClass + public static void setup() throws Exception { + OptimizeTestWithDeletes.setup(); + + // Vectorized parquet read is not available in OSS + setSystemOption(ExecConstants.PARQUET_READER_VECTORIZE, "false"); + } + + @AfterClass + public static void tearDown() throws Exception { + setSystemOption(ExecConstants.PARQUET_READER_VECTORIZE, + ExecConstants.PARQUET_READER_VECTORIZE.getDefault().getBoolVal().toString()); + } + + @Test + public void testV2OptimizePartitioned() throws Exception { + OptimizeTestWithDeletes.testV2OptimizePartitioned(allocator); + } + + @Test + public void testV2OptimizeUnpartitioned() throws Exception { + OptimizeTestWithDeletes.testV2OptimizeUnpartitioned(allocator); + } + + @Test + public void testV2OptimizeMinInputFiles() throws Exception { + OptimizeTestWithDeletes.testV2OptimizeMinInputFiles(allocator); + } + + @Test + public void testV2OptimizeDeleteLinkedFilesOnlyUnpartitioned() throws Exception { + OptimizeTestWithDeletes.testV2OptimizeDeleteLinkedFilesOnlyUnpartitioned(allocator); + } + + @Test + public void testV2OptimizeDeleteLinkedFilesOnlyPartitioned() throws Exception { + OptimizeTestWithDeletes.testV2OptimizeDeleteLinkedFilesOnlyPartitioned(allocator); + } + + @Test + public void testV2OptimizeMultipleDeleteFiles() throws Exception { + OptimizeTestWithDeletes.testV2OptimizeMultipleDeleteFiles(allocator); + } + + @Test + public void testV2OptimizePartitionEvolution() throws Exception { + OptimizeTestWithDeletes.testV2OptimizePartitionEvolution(allocator); + } + + @Test + public void testV2OptimizeUpdateSequenceNumber() throws Exception { + OptimizeTestWithDeletes.testV2OptimizeUpdateSequenceNumber(allocator); + } + + @Test + public void testV2OptimizeUpdateSequenceNumberWithDeleteLink() throws Exception { + OptimizeTestWithDeletes.testV2OptimizeUpdateSequenceNumberWithDeleteLink(allocator); + } +} diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/ITRollback.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/ITRollback.java index a0da37d659..b156f3f3f6 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/ITRollback.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/ITRollback.java @@ -27,6 +27,7 @@ public class ITRollback extends ITDmlQueryBase { // Defining SOURCE such that you can easily copy and paste the same test across other test variations private static final String SOURCE = TEMP_SCHEMA_HADOOP; + @Override @Before public void before() throws Exception { test("USE %s", SOURCE); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/ITUpdate.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/ITUpdate.java index 682782b5fc..237b1daa6f 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/ITUpdate.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/ITUpdate.java @@ -52,6 +52,11 @@ public void testUpdateById() throws Exception { UpdateTests.testUpdateById(allocator, SOURCE); } + @Test + public void testUpdateByIdWithEqualNull() throws Exception { + UpdateTests.testUpdateByIdWithEqualNull(allocator, SOURCE); + } + @Test public void testUpdateTargetTableWithAndWithoutAlias() throws Exception { UpdateTests.testUpdateTargetTableWithAndWithoutAlias(allocator, SOURCE); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/ITVacuum.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/ITVacuum.java index ae81a7faca..92b43fef38 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/ITVacuum.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/ITVacuum.java @@ -28,6 +28,7 @@ public class ITVacuum extends ITDmlQueryBase { // Defining SOURCE such that you can easily copy and paste the same test across other test variations private static final String SOURCE = TEMP_SCHEMA_HADOOP; + @Override @Before public void before() throws Exception { test("USE %s", SOURCE); @@ -38,6 +39,11 @@ public void testMalformedVacuumQueries() throws Exception { VacuumTests.testMalformedVacuumQueries(SOURCE); } + @Test + public void testSimpleExpireOlderThanRetainLastUsingEqual() throws Exception { + VacuumTests.testSimpleExpireOlderThanRetainLastUsingEqual(allocator, SOURCE); + } + @Test public void testSimpleExpireOlderThan() throws Exception { VacuumTests.testSimpleExpireOlderThan(allocator, SOURCE); @@ -68,6 +74,16 @@ public void testExpireOlderThanWithRollback() throws Exception { VacuumTests.testExpireOlderThanWithRollback(allocator, SOURCE); } + @Test + public void testExpireOnTableWithPartitions() throws Exception { + VacuumTests.testExpireOnTableWithPartitions(allocator, SOURCE); + } + + @Test + public void testExpireOnEmptyTableNoSnapshots() throws Exception { + VacuumTests.testExpireOnEmptyTableNoSnapshots(allocator, SOURCE); + } + @Test public void testRetainZeroSnapshots() throws Exception { VacuumTests.testRetainZeroSnapshots(SOURCE); @@ -92,4 +108,19 @@ public void testExpireDatasetRefreshed() throws Exception { public void testUnparseSqlVacuum() throws Exception { VacuumTests.testUnparseSqlVacuum(SOURCE); } + + @Test + public void testExpireOnTableOneSnapshot() throws Exception { + VacuumTests.testExpireOnTableOneSnapshot(SOURCE); + } + + @Test + public void testRetainMoreSnapshots() throws Exception { + VacuumTests.testRetainMoreSnapshots(SOURCE); + } + + @Test + public void testRetainAllSnapshots() throws Exception { + VacuumTests.testRetainAllSnapshots(allocator, SOURCE); + } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/OptimizeTestForPartitions.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/OptimizeTestForPartitions.java new file mode 100644 index 0000000000..78b6f567a7 --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/OptimizeTestForPartitions.java @@ -0,0 +1,425 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql; + +import static com.dremio.exec.planner.sql.DmlQueryTestUtils.addBucketPartition; +import static com.dremio.exec.planner.sql.DmlQueryTestUtils.addColumn; +import static com.dremio.exec.planner.sql.DmlQueryTestUtils.addDayPartition; +import static com.dremio.exec.planner.sql.DmlQueryTestUtils.addIdentityPartition; +import static com.dremio.exec.planner.sql.DmlQueryTestUtils.addMonthPartition; +import static com.dremio.exec.planner.sql.DmlQueryTestUtils.addTruncate2Partition; +import static com.dremio.exec.planner.sql.DmlQueryTestUtils.addYearPartition; +import static com.dremio.exec.planner.sql.DmlQueryTestUtils.createBasicTable; +import static com.dremio.exec.planner.sql.DmlQueryTestUtils.dropBucketPartition; +import static com.dremio.exec.planner.sql.DmlQueryTestUtils.insertIntoTable; +import static com.dremio.exec.planner.sql.OptimizeTests.assertFileCount; +import static com.dremio.exec.planner.sql.OptimizeTests.insertCommits; +import static com.dremio.exec.planner.sql.OptimizeTests.testOptimizeCommand; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import org.apache.arrow.memory.BufferAllocator; + +import com.dremio.BaseTestQuery; +import com.dremio.common.exceptions.UserRemoteException; +import com.google.common.collect.ImmutableSet; + +/** + * Tests for OPTIMIZE TABLE iceberg_table FOR PARTITIONS (expr) + */ +public class OptimizeTestForPartitions extends BaseTestQuery { + + public static void testOptimizeTableForStringIdentityPartitions(String SCHEMA, BufferAllocator allocator) throws Exception { + try (DmlQueryTestUtils.Table table = createBasicTable(SCHEMA, 0, 2, 0, ImmutableSet.of(1))) { + insertCommits(table, 5); + assertFileCount(table.fqn, 25L, allocator); // 5 files per partition + + //It should optimize only partition where column_0='0_0' which have 5 files. + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s FOR PARTITIONS column_0='0_0'", table.fqn), 5L, 0L, 1L); + //20 files for partitions(0_1,0_2,0_3,0_4) and 1 file for 0_0. total = 21. + assertFileCount(table.fqn, 21L, allocator); + } + } + + public static void testOptimizeTableForIdentityPartitionsWithLikeExpression(String SCHEMA, BufferAllocator allocator) throws Exception { + try (DmlQueryTestUtils.Table table = createBasicTable(SCHEMA, 0, 2, 0, ImmutableSet.of(1))) { + insertCommits(table, 5); + assertFileCount(table.fqn, 25L, allocator); // 5 files per partition + + //It should optimize only partition where column_0='0_0' which have 5 files. + testOptimizeCommand(allocator,"OPTIMIZE TABLE "+table.fqn+" FOR PARTITIONS column_0 like '0_0'", 5L, 0L, 1L); + //20 files for partitions(0_1,0_2,0_3,0_4) and 1 file for 0_0. total = 21. + assertFileCount(table.fqn, 21L, allocator); + } + } + + public static void testOptimizeTableForIntegerIdentityPartitions(String SCHEMA, BufferAllocator allocator) throws Exception { + try (DmlQueryTestUtils.Table table = createBasicTable(SCHEMA, 0, 2, 0, ImmutableSet.of())) { + insertCommits(table, 5); + assertFileCount(table.fqn, 5L, allocator); // 5 files per partition + + addColumn(table, allocator, "part_col_int", "INTEGER"); + addIdentityPartition(table, allocator, "part_col_int"); + + for (int i = 0; i < 5; i++) { + insertIntoTable(table.fqn, "(id, column_0, part_col_int)", "(1,'a', 1)"); + } + + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s FOR PARTITIONS part_col_int=1", table.fqn), 10L, 0L, 2L); + assertFileCount(table.fqn, 2L, allocator); + } + } + + public static void testOptimizeTableForIntegerIdentityPartitionsWithExpression(String SCHEMA, BufferAllocator allocator) throws Exception { + try (DmlQueryTestUtils.Table table = createBasicTable(SCHEMA, 0, 2, 0, ImmutableSet.of())) { + insertCommits(table, 5); + assertFileCount(table.fqn, 5L, allocator); // 5 files per partition + + addColumn(table, allocator, "part_col_int", "INTEGER"); + addIdentityPartition(table, allocator, "part_col_int"); + + for (int i = 0; i < 5; i++) { + insertIntoTable(table.fqn, "(id, column_0, part_col_int)", "(1,'a', 2)"); + } + + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s FOR PARTITIONS part_col_int/2=1", table.fqn), 5L, 0L, 1L); + assertFileCount(table.fqn, 6L, allocator); + } + } + + public static void testOptimizeTableForTimestampIdentityPartitions(String SCHEMA, BufferAllocator allocator) throws Exception { + try (DmlQueryTestUtils.Table table = createBasicTable(SCHEMA, 0, 2, 0, ImmutableSet.of())) { + insertCommits(table, 5); + assertFileCount(table.fqn, 5L, allocator); + + addColumn(table, allocator, "part_col_ts", "TIMESTAMP"); + addIdentityPartition(table, allocator, "part_col_ts"); + + for (int i = 0; i < 5; i++) { + insertIntoTable(table.fqn, "(id, column_0, part_col_ts)", "(1,'a', '2022-01-01')"); + insertIntoTable(table.fqn, "(id, column_0, part_col_ts)", "(1,'a', '2023-01-01')"); + } + + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s FOR PARTITIONS part_col_ts='2022-01-01'", table.fqn), 5L, 0L, 1L); + assertFileCount(table.fqn, 11L, allocator); + } + } + + public static void testOptimizeTableForYearIdentityPartitions(String SCHEMA, BufferAllocator allocator) throws Exception { + try (DmlQueryTestUtils.Table table = createBasicTable(SCHEMA, 0, 2, 0, ImmutableSet.of())) { + insertCommits(table, 5); + assertFileCount(table.fqn, 5L, allocator); + + addColumn(table, allocator, "part_col_ts", "TIMESTAMP"); + addIdentityPartition(table, allocator, "part_col_ts"); + + for (int i = 0; i < 5; i++) { + insertIntoTable(table.fqn, "(id, column_0, part_col_ts)", "(1,'a', '2022-01-01')"); + insertIntoTable(table.fqn, "(id, column_0, part_col_ts)", "(1,'a', '2023-01-01')"); + } + + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s FOR PARTITIONS year(part_col_ts)=2022", table.fqn), 5L, 0L, 1L); + assertFileCount(table.fqn, 11L, allocator); + } + } + + public static void testOptimizeTableForTwoIdentityPartitions(String SCHEMA, BufferAllocator allocator) throws Exception { + try (DmlQueryTestUtils.Table table = createBasicTable(SCHEMA, 0, 2, 0, ImmutableSet.of())) { + insertCommits(table, 5); + assertFileCount(table.fqn, 5L, allocator); // 5 files per partition + + addColumn(table, allocator, "part_col_int_1", "INTEGER"); + addColumn(table, allocator, "part_col_int_2", "INTEGER"); + addIdentityPartition(table, allocator, "part_col_int_1"); + addIdentityPartition(table, allocator, "part_col_int_2"); + + for (int i = 0; i < 5; i++) { + insertIntoTable(table.fqn, "(id, column_0, part_col_int_1, part_col_int_2)", "(1,'a', 1,11)"); + } + + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s FOR PARTITIONS part_col_int_2-part_col_int_1=10", table.fqn), 5L, 0L, 1L); + assertFileCount(table.fqn, 6L, allocator); + } + } + + public static void testOptimizeTableForNonPartition(String SCHEMA, BufferAllocator allocator) throws Exception { + try (DmlQueryTestUtils.Table table = createBasicTable(SCHEMA, 0, 3, 0, ImmutableSet.of(1))) { + insertCommits(table, 5); + assertFileCount(table.fqn, 25L, allocator); // 5 files per partition + + //column_1 is non partition column, column_0 is an identity partition column. + assertThatThrownBy(() -> runSQL(String.format("OPTIMIZE TABLE %s FOR PARTITIONS column_1='0_0'", table.fqn))) + .isInstanceOf(UserRemoteException.class) + .hasMessageContaining("OPTIMIZE command is only supported on the partition columns - [column_0]"); + } + } + + public static void testOptimizeTableForPartitionEvolBucketWithIdentity(String SCHEMA, BufferAllocator allocator) throws Exception { + try (DmlQueryTestUtils.Table table = createBasicTable(SCHEMA, 0, 2, 0, ImmutableSet.of(1))) { + insertCommits(table, 5); + assertFileCount(table.fqn, 25L, allocator); // 5 files per partition + + addColumn(table, allocator, "part_col_bucket", "INTEGER"); + addBucketPartition(table, allocator, "part_col_bucket"); + + for (int i = 0; i < 5; i++) { + insertIntoTable(table.fqn, "(id, column_0, part_col_bucket)", "(1,'a', 1)"); + } + assertFileCount(table.fqn, 30L, allocator); + + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s FOR PARTITIONS part_col_bucket=1", table.fqn), 30L, 0L, 6L); + + addIdentityPartition(table, allocator, "part_col_bucket"); + + for (int i = 0; i < 5; i++) { + insertIntoTable(table.fqn, "(id, column_0, part_col_bucket)", "(1,'a', 1)"); + } + assertFileCount(table.fqn, 11L, allocator); + + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s FOR PARTITIONS part_col_bucket=1", table.fqn), 6L,0L , 1L); + + assertFileCount(table.fqn, 6L, allocator); + } + } + + public static void testOptimizeTableForPartitionEvolIdentityWithBucket(String SCHEMA, BufferAllocator allocator) throws Exception { + try (DmlQueryTestUtils.Table table = createBasicTable(SCHEMA, 0, 2, 0, ImmutableSet.of(1))) { + insertCommits(table, 5); + assertFileCount(table.fqn, 25L, allocator); // 5 files per partition + + addColumn(table, allocator, "part_col_bucket", "INTEGER"); + addIdentityPartition(table, allocator, "part_col_bucket"); + // 5 files for part_col_bucket=1 + for (int i = 0; i < 5; i++) { + insertIntoTable(table.fqn, "(id, column_0, part_col_bucket)", "(1,'a', 1)"); + insertIntoTable(table.fqn, "(id, column_0, part_col_bucket)", "(2,'b', 2)"); + } + assertFileCount(table.fqn, 35L, allocator); + // rewritten_data_files_count 25(null for part_col_bucket) + 5(part_col_bucket=1) new_data_files_count = 5 for column_0+part_col_bucket=null and 1 for column_0=a and part_col_bucket=1 + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s FOR PARTITIONS part_col_bucket=1", table.fqn), 30L, 0L, 6L); + + assertFileCount(table.fqn, 11L, allocator); + + addBucketPartition(table, allocator, "part_col_bucket"); + + for (int i = 0; i < 5; i++) { + insertIntoTable(table.fqn, "(id, column_0, part_col_bucket)", "(1,'a', 1)"); + } + + assertFileCount(table.fqn, 16L, allocator); + + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s FOR PARTITIONS part_col_bucket=1", table.fqn), 6L,0L , 1L); + + assertFileCount(table.fqn, 11L, allocator); + } + } + + public static void testOptimizeTableForPartitionEvolBucketToIdentity(String SCHEMA, BufferAllocator allocator) throws Exception { + try (DmlQueryTestUtils.Table table = createBasicTable(SCHEMA, 0, 2, 0, ImmutableSet.of(1))) { + insertCommits(table, 5); + assertFileCount(table.fqn, 25L, allocator); // 5 files per partition + + addColumn(table, allocator, "part_col_bucket", "INTEGER"); + addBucketPartition(table, allocator, "part_col_bucket"); + + for (int i = 0; i < 5; i++) { + insertIntoTable(table.fqn, "(id, column_0, part_col_bucket)", "(1,'a', 1)"); + } + + dropBucketPartition(table, allocator, "part_col_bucket"); + //part_col_bucket is not a partition column now. + assertThatThrownBy(() -> runSQL(String.format("OPTIMIZE TABLE %s FOR PARTITIONS part_col_bucket=1", table.fqn))) + .isInstanceOf(UserRemoteException.class) + .hasMessageContaining("OPTIMIZE command is only supported on the partition columns - [column_0]"); + + addIdentityPartition(table, allocator, "part_col_bucket"); + + for (int i = 0; i < 5; i++) { + insertIntoTable(table.fqn, "(id, column_0, part_col_bucket)", "(1,'a', 2)"); + } + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s FOR PARTITIONS part_col_bucket=2", table.fqn), 30L, 0L, 6L); + + assertFileCount(table.fqn, 11L, allocator); + } + } + + public static void testOptimizeTableForPartitionEvolBucketToIdentityWithExpression(String SCHEMA, BufferAllocator allocator) throws Exception { + try (DmlQueryTestUtils.Table table = createBasicTable(SCHEMA, 0, 2, 0, ImmutableSet.of(1))) { + insertCommits(table, 5); + assertFileCount(table.fqn, 25L, allocator); // 5 files per partition + + addColumn(table, allocator, "part_col_bucket", "INTEGER"); + addBucketPartition(table, allocator, "part_col_bucket"); + + for (int i = 0; i < 5; i++) { + insertIntoTable(table.fqn, "(id, column_0, part_col_bucket)", "(1,'a', 1)"); + } + + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s FOR PARTITIONS part_col_bucket=1", table.fqn), 30L, 0L, 6L); + + dropBucketPartition(table, allocator, "part_col_bucket"); + //part_col_bucket is not a partition column now. + assertThatThrownBy(() -> runSQL(String.format("OPTIMIZE TABLE %s FOR PARTITIONS part_col_bucket=1", table.fqn))) + .isInstanceOf(UserRemoteException.class) + .hasMessageContaining("OPTIMIZE command is only supported on the partition columns - [column_0]"); + + addIdentityPartition(table, allocator, "part_col_bucket"); + + for (int i = 0; i < 5; i++) { + insertIntoTable(table.fqn, "(id, column_0, part_col_bucket)", "(1,'a', 2)"); + } + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s FOR PARTITIONS part_col_bucket/2=1", table.fqn), 5L, 0L, 1L); + + assertFileCount(table.fqn, 7L, allocator); + } + } + + public static void testOptimizeTableForPartitionWithInvalidExpression(String SCHEMA, BufferAllocator allocator) throws Exception { + try (DmlQueryTestUtils.Table table = createBasicTable(SCHEMA, 0, 2, 0, ImmutableSet.of())) { + insertCommits(table, 5); + assertFileCount(table.fqn, 5L, allocator); + + addColumn(table, allocator, "col_int", "INTEGER"); + addColumn(table, allocator, "part_col_int", "INTEGER"); + addIdentityPartition(table, allocator, "part_col_int"); + + for (int i = 0; i < 5; i++) { + insertIntoTable(table.fqn, "(id, column_0, col_int, part_col_int)", "(1,'a', 2,2)"); + } + + assertThatThrownBy(() -> runSQL(String.format("OPTIMIZE TABLE %s FOR PARTITIONS part_col_int/col_int=1", table.fqn))) + .isInstanceOf(UserRemoteException.class) + .hasMessageContaining("OPTIMIZE command is only supported on the partition columns - [part_col_int]"); + + assertThatThrownBy(() -> runSQL(String.format("OPTIMIZE TABLE %s FOR PARTITIONS col_int/part_col_int=1", table.fqn))) + .isInstanceOf(UserRemoteException.class) + .hasMessageContaining("OPTIMIZE command is only supported on the partition columns - [part_col_int]"); + + assertThatThrownBy(() -> runSQL(String.format("OPTIMIZE TABLE %s FOR PARTITIONS column_0 like '0_0' ", table.fqn))) + .isInstanceOf(UserRemoteException.class) + .hasMessageContaining("OPTIMIZE command is only supported on the partition columns - [part_col_int]"); + + } + } + + public static void testOptimizeTableForTruncatePartitions(String SCHEMA, BufferAllocator allocator) throws Exception { + try (DmlQueryTestUtils.Table table = createBasicTable(SCHEMA, 0, 2, 0, ImmutableSet.of())) { + insertCommits(table, 5); + assertFileCount(table.fqn, 5L, allocator); + + addColumn(table, allocator, "part_col_str", "VARCHAR"); + addTruncate2Partition(table, allocator, "part_col_str"); + + for (int i = 0; i < 5; i++) { + insertIntoTable(table.fqn, "(id, column_0, part_col_str)", "(1,'a', '1value')"); + insertIntoTable(table.fqn, "(id, column_0, part_col_str)", "(1,'a', '2value')"); // will not optimize for filter part_col_str='1value' + } + + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s FOR PARTITIONS part_col_str='1value'", table.fqn), 10L, 0L, 2L); + assertFileCount(table.fqn, 7L, allocator); + } + } + + public static void testOptimizeTableForTruncatePartitionWithLike(String SCHEMA, BufferAllocator allocator) throws Exception { + try (DmlQueryTestUtils.Table table = createBasicTable(SCHEMA, 0, 2, 0, ImmutableSet.of())) { + insertCommits(table, 5); + assertFileCount(table.fqn, 5L, allocator); + + addColumn(table, allocator, "part_col_str", "VARCHAR"); + addTruncate2Partition(table, allocator, "part_col_str"); + + for (int i = 0; i < 5; i++) { + insertIntoTable(table.fqn, "(id, column_0, part_col_str)", "(1,'a', '1value')"); + insertIntoTable(table.fqn, "(id, column_0, part_col_str)", "(1,'a', '2value')"); // will not optimize for filter part_col_str='1value' + } + + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s FOR PARTITIONS part_col_str like '1%s'", table.fqn, "%"), 15L, 0L, 3L); + assertFileCount(table.fqn, 3L, allocator); + } + } + + public static void testOptimizeTableForYearPartitions(String SCHEMA, BufferAllocator allocator) throws Exception { + try (DmlQueryTestUtils.Table table = createBasicTable(SCHEMA, 0, 2, 0, ImmutableSet.of())) { + insertCommits(table, 5); + assertFileCount(table.fqn, 5L, allocator); + + addColumn(table, allocator, "part_col_ts", "TIMESTAMP"); + addYearPartition(table, allocator, "part_col_ts"); + + for (int i = 0; i < 5; i++) { + insertIntoTable(table.fqn, "(id, column_0, part_col_ts)", "(1,'a', '2022-01-01')"); + insertIntoTable(table.fqn, "(id, column_0, part_col_ts)", "(1,'a', '2023-01-01')"); + } + //Optimize all the files, since expression is not supported on transformed partition. + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s FOR PARTITIONS year(part_col_ts)=2022", table.fqn), 15L, 0L, 3L); + assertFileCount(table.fqn, 3L, allocator); + } + } + + public static void testOptimizeTableForYearPartitionsWithEquality(String SCHEMA, BufferAllocator allocator) throws Exception { + try (DmlQueryTestUtils.Table table = createBasicTable(SCHEMA, 0, 2, 0, ImmutableSet.of())) { + insertCommits(table, 5); + assertFileCount(table.fqn, 5L, allocator); + + addColumn(table, allocator, "part_col_ts", "TIMESTAMP"); + addYearPartition(table, allocator, "part_col_ts"); + + for (int i = 0; i < 5; i++) { + insertIntoTable(table.fqn, "(id, column_0, part_col_ts)", "(1,'a', '2022-01-01')"); + insertIntoTable(table.fqn, "(id, column_0, part_col_ts)", "(1,'a', '2023-01-01')"); + } + //Optimize all the files, since expression is not supported on transformed partition. + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s FOR PARTITIONS part_col_ts='2022-01-01'", table.fqn), 15L, 0L, 3L); + assertFileCount(table.fqn, 3L, allocator); + } + } + + public static void testOptimizeTableForMonthPartitions(String SCHEMA, BufferAllocator allocator) throws Exception { + try (DmlQueryTestUtils.Table table = createBasicTable(SCHEMA, 0, 2, 0, ImmutableSet.of())) { + insertCommits(table, 5); + assertFileCount(table.fqn, 5L, allocator); + + addColumn(table, allocator, "part_col_ts", "TIMESTAMP"); + addMonthPartition(table, allocator, "part_col_ts"); + + for (int i = 0; i < 5; i++) { + insertIntoTable(table.fqn, "(id, column_0, part_col_ts)", "(1,'a', '2022-02-02')"); + insertIntoTable(table.fqn, "(id, column_0, part_col_ts)", "(1,'a', '2023-01-01')"); + } + //Optimize all the files, since expression is not supported on transformed partition. + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s FOR PARTITIONS month(part_col_ts)=01", table.fqn), 15L, 0L, 3L); + assertFileCount(table.fqn, 3L, allocator); + } + } + + public static void testOptimizeTableForDayPartitions(String SCHEMA, BufferAllocator allocator) throws Exception { + try (DmlQueryTestUtils.Table table = createBasicTable(SCHEMA, 0, 2, 0, ImmutableSet.of())) { + insertCommits(table, 5); + assertFileCount(table.fqn, 5L, allocator); + + addColumn(table, allocator, "part_col_ts", "TIMESTAMP"); + addDayPartition(table, allocator, "part_col_ts"); + + for (int i = 0; i < 5; i++) { + insertIntoTable(table.fqn, "(id, column_0, part_col_ts)", "(1,'a', '2022-02-02')"); + insertIntoTable(table.fqn, "(id, column_0, part_col_ts)", "(1,'a', '2023-01-01')"); + } + //Optimize all the files, since expression is not supported on transformed partition. + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s FOR PARTITIONS month(part_col_ts)=01", table.fqn), 15L, 0L, 3L); + assertFileCount(table.fqn, 3L, allocator); + } + } + +} diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/OptimizeTestWithDeletes.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/OptimizeTestWithDeletes.java new file mode 100644 index 0000000000..77c7bbae8c --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/OptimizeTestWithDeletes.java @@ -0,0 +1,449 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql; + +import static org.apache.iceberg.Transactions.createTableTransaction; +import static org.apache.iceberg.types.Types.NestedField.required; + +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; + +import org.apache.arrow.memory.BufferAllocator; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.util.StringUtils; +import org.apache.iceberg.AppendFiles; +import org.apache.iceberg.DataFile; +import org.apache.iceberg.DataFiles; +import org.apache.iceberg.DeleteFile; +import org.apache.iceberg.FileMetadata; +import org.apache.iceberg.PartitionSpec; +import org.apache.iceberg.RowDelta; +import org.apache.iceberg.Schema; +import org.apache.iceberg.TableMetadata; +import org.apache.iceberg.TableOperations; +import org.apache.iceberg.Transaction; +import org.apache.iceberg.Transactions; +import org.apache.iceberg.types.Types; + +import com.dremio.BaseTestQuery; +import com.dremio.TestBuilder; +import com.dremio.exec.store.iceberg.IcebergPartitionData; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; + +public class OptimizeTestWithDeletes extends BaseTestQuery { + private static FileSystem fs; + private static final String SOURCE_TABLE_PATH = "iceberg/v2/multi_rowgroup_orders_with_deletes"; + private static final String SETUP_BASE_PATH = "/tmp/iceberg-test-tables/v2/multi_rowgroup_orders_with_deletes"; + + private static final Schema ICEBERG_SCHEMA = new Schema( + required(1, "order_id", Types.IntegerType.get()), + required(2, "order_year", Types.IntegerType.get()), + required(3, "order_date", Types.TimestampType.withoutZone()), + required(4, "source_id", Types.IntegerType.get()), + required(5, "product_name", Types.StringType.get()), + required(6, "amount", Types.DoubleType.get()) + ); + + private static final PartitionSpec UNPARTITIONED_SPEC = PartitionSpec.unpartitioned(); + + private static final PartitionSpec PARTITIONED_SPEC = PartitionSpec.builderFor(ICEBERG_SCHEMA) + .identity("order_year").build(); + private static final DataFile DF1 = unpartitionedDataFile("2021/2021-00.parquet", 21393); + private static final DataFile DF2 = unpartitionedDataFile("2021/2021-01.parquet", 21412); + private static final DataFile DF3 = unpartitionedDataFile("2021/2021-02.parquet", 21444); + private static final DataFile DF4 = unpartitionedDataFile("2019/2019-00.parquet", 21361); + private static final DeleteFile DEL_F1 = unpartitionedDeleteFile("2021/delete-2021-00.parquet", 27919); + private static final DeleteFile DEL_F2 = unpartitionedDeleteFile("2021/delete-2021-01.parquet", 3841); + + public static void setup() throws Exception { + fs = setupLocalFS(); + setupTableData(); + } + + private static void setupTableData() throws Exception { + Path path = new Path(SETUP_BASE_PATH); + if (fs.exists(path)) { + fs.delete(path, true); + } + fs.mkdirs(path); + copyFromJar( SOURCE_TABLE_PATH + "/data", Paths.get(SETUP_BASE_PATH + "/data")); + } + + private static TableInfo setupUnpartitionedV2Table(List unlinkedDataFiles, + List linkedDataFiles, + List deleteFiles) throws Exception { + // Setup metadata in a temporary location, so table path doesn't conflict across test-cases + String metadataPath = Files.createTempDirectory(Paths.get(SETUP_BASE_PATH), "optimizeV2").toAbsolutePath().toString(); + final String[] tablePathComponents = StringUtils.split(metadataPath, '/'); + final String tableName = tablePathComponents[tablePathComponents.length - 1]; + final String tableFqn = Arrays.stream(tablePathComponents, 2, tablePathComponents.length) + .collect(Collectors.joining("\".\"", "dfs_hadoop_mutable.\"", "\"")); + + String tablePath = "file://" + metadataPath; + final TableMetadata metadata = TableMetadata.newTableMetadata(ICEBERG_SCHEMA, UNPARTITIONED_SPEC, tablePath, + ImmutableMap.of("format-version", "2")); + TableOperations ops = new TestHadoopTableOperations(new Path(tablePath), fs.getConf()); + + Transaction createTableTransaction = createTableTransaction(tableName, ops, metadata); + createTableTransaction.commitTransaction(); + + if (!unlinkedDataFiles.isEmpty()) { + Transaction appendTransaction = Transactions.newTransaction(tableName, ops); + AppendFiles appendFiles = appendTransaction.newAppend(); + unlinkedDataFiles.forEach(appendFiles::appendFile); + appendFiles.commit(); + appendTransaction.commitTransaction(); + } + + Transaction rowDeltaTransaction = Transactions.newTransaction(tableName, ops); + RowDelta rowDelta = rowDeltaTransaction.newRowDelta(); + linkedDataFiles.forEach(rowDelta::addRows); + deleteFiles.forEach(rowDelta::addDeletes); + rowDelta.commit(); + rowDeltaTransaction.commitTransaction(); + + refresh(tableFqn); + return new TableInfo(tableFqn, metadataPath, ops); + } + + + public static TableInfo setupPartitionedV2Table() throws Exception { + // Setup metadata in a temporary location, so table path doesn't conflict across test-cases + String metadataPath = Files.createTempDirectory(Paths.get(SETUP_BASE_PATH), "optimizeV2Partitioned"). + toAbsolutePath().toString(); + final String[] tablePathComponents = StringUtils.split(metadataPath, '/'); + final String tableName = tablePathComponents[tablePathComponents.length - 1]; + final String tableFqn = Arrays.stream(tablePathComponents, 2, tablePathComponents.length) + .collect(Collectors.joining("\".\"", "dfs_hadoop_mutable.\"", "\"")); + + String tablePath = "file://" + metadataPath; + final TableMetadata metadata = TableMetadata.newTableMetadata(ICEBERG_SCHEMA, PARTITIONED_SPEC, tablePath, + ImmutableMap.of("format-version", "2")); + TableOperations ops = new TestHadoopTableOperations(new Path(tablePath), fs.getConf()); + + Transaction createTableTransaction = createTableTransaction(tableName, ops, metadata); + createTableTransaction.commitTransaction(); + + // Use same data files and delete files as in the source + Transaction appendTransaction = Transactions.newTransaction(tableName, ops); + AppendFiles appendFiles = appendTransaction.newAppend(); + appendFiles.appendFile(partitionedDataFile("2019-02.parquet", 21358, 2019)); + appendFiles.appendFile(partitionedDataFile("2020-02.parquet", 21438, 2020)); + appendFiles.appendFile(partitionedDataFile("2019-00.parquet", 21361, 2019)); + appendFiles.appendFile(partitionedDataFile("2019-01.parquet", 21434, 2019)); + appendFiles.commit(); + appendTransaction.commitTransaction(); + + Transaction rowDeltaTransaction1 = Transactions.newTransaction(tableName, ops); + RowDelta rowDelta1 = rowDeltaTransaction1.newRowDelta(); + rowDelta1.addRows(partitionedDataFile("2020-00.parquet", 21386, 2020)); + rowDelta1.addRows(partitionedDataFile("2020-01.parquet", 21430, 2020)); + rowDelta1.addDeletes(partitionedDeleteFile("delete-2020-00.parquet", 16289, 500, 2020)); + rowDelta1.commit(); + rowDeltaTransaction1.commitTransaction(); + + Transaction rowDeltaTransaction2 = Transactions.newTransaction(tableName, ops); + RowDelta rowDelta2 = rowDeltaTransaction2.newRowDelta(); + rowDelta2.addRows(partitionedDataFile("2021-00.parquet", 21393, 2021)); + rowDelta2.addRows(partitionedDataFile("2021-01.parquet", 21412, 2021)); + rowDelta2.addRows(partitionedDataFile("2021-02.parquet", 21444, 2021)); + rowDelta2.addDeletes(partitionedDeleteFile("delete-2021-02.parquet", 16283, 500, 2021)); + rowDelta2.addDeletes(partitionedDeleteFile("delete-2021-01.parquet", 3841, 30, 2021)); + rowDelta2.addDeletes(partitionedDeleteFile("delete-2021-00.parquet", 27919, 900, 2021)); + rowDelta2.commit(); + rowDeltaTransaction2.commitTransaction(); + + refresh(tableFqn); + return new TableInfo(tableFqn, metadataPath, ops); + } + + private static void assertOptimize(BufferAllocator allocator, String table, String options, + long expectedRewrittenDataFiles, long expectedRewrittenDeleteFiles, + long expectedNewDataFiles) throws Exception { + new TestBuilder(allocator) + .sqlQuery(String.format("optimize table %s (%s)", table, options)) + .unOrdered() + .baselineColumns("rewritten_data_files_count", "rewritten_delete_files_count", "new_data_files_count") + .baselineValues(expectedRewrittenDataFiles, expectedRewrittenDeleteFiles, expectedNewDataFiles) + .build() + .run(); + } + + private static List l(T... t) { + return Arrays.asList(t); + } + + public static void testV2OptimizePartitioned(BufferAllocator allocator) throws Exception { + TableInfo tableInfo = setupPartitionedV2Table(); + assertOptimize(allocator, tableInfo.getTableName(), "MIN_INPUT_FILES=1", 9L, 4L, 3L); + fs.delete(new Path(tableInfo.getMetadataLocation()), true); + } + + public static void testV2OptimizeDeleteLinkedFilesOnlyPartitioned(BufferAllocator allocator) throws Exception { + TableInfo tableInfo = setupPartitionedV2Table(); + assertOptimize(allocator, tableInfo.getTableName(), "MIN_INPUT_FILES=1, MIN_FILE_SIZE_MB=0", 5L, 4L, 2L); + fs.delete(new Path(tableInfo.getMetadataLocation()), true); + } + + public static void testV2OptimizeUnpartitioned(BufferAllocator allocator) throws Exception { + TableInfo tableInfo = setupUnpartitionedV2Table(Collections.EMPTY_LIST, l(DF1, DF2, DF3), l(DEL_F1)); + assertOptimize(allocator, tableInfo.getTableName(), "MIN_INPUT_FILES=2", 3L, 1L, 1L); + fs.delete(new Path(tableInfo.getMetadataLocation()), true); + } + + public static void testV2OptimizeMinInputFiles(BufferAllocator allocator) throws Exception { + TableInfo tableInfo = setupUnpartitionedV2Table(Collections.EMPTY_LIST, l(DF1, DF2, DF3, DF4), l(DEL_F1)); + assertOptimize(allocator, tableInfo.getTableName(), "MIN_INPUT_FILES=5", 4L, 1L, 1L); + fs.delete(new Path(tableInfo.getMetadataLocation()), true); + } + + public static void testV2OptimizeDeleteLinkedFilesOnlyUnpartitioned(BufferAllocator allocator) throws Exception { + TableInfo tableInfo = setupUnpartitionedV2Table(l(DF4), l(DF1, DF2, DF3), l(DEL_F1)); + assertOptimize(allocator, tableInfo.getTableName(), "MIN_INPUT_FILES=1, MIN_FILE_SIZE_MB=0", 3L, 1L, 1L); + fs.delete(new Path(tableInfo.getMetadataLocation()), true); + } + + public static void testV2OptimizeMultipleDeleteFiles(BufferAllocator allocator) throws Exception { + TableInfo tableInfo = setupUnpartitionedV2Table(l(DF4), l(DF1), l(DEL_F1, DEL_F2)); + assertOptimize(allocator, tableInfo.getTableName(), "MIN_INPUT_FILES=1, MIN_FILE_SIZE_MB=0", 1L, 2L, 1L); + fs.delete(new Path(tableInfo.getMetadataLocation()), true); + } + + public static void testV2OptimizePartitionEvolution(BufferAllocator allocator) throws Exception { + TableInfo tableInfo = setupUnpartitionedV2Table(l(DF4), l(DF1, DF2, DF3), l(DEL_F1)); + runSQL(String.format("alter table %s add partition field order_year", tableInfo.getTableName())); + assertOptimize(allocator, tableInfo.getTableName(), "MIN_INPUT_FILES=1, MIN_FILE_SIZE_MB=0", 4L, 1L, 2L); + + new TestBuilder(allocator) + .sqlQuery(String.format("SELECT \"partition\" FROM TABLE(TABLE_FILES('%s'))", tableInfo.getTableName())) + .unOrdered() + .baselineColumns("partition") + .baselineValues("{order_year=2019}") + .baselineValues("{order_year=2021}") + .build() + .run(); + fs.delete(new Path(tableInfo.getMetadataLocation()), true); + } + + public static void testV2OptimizeUpdateSequenceNumber(BufferAllocator allocator) throws Exception { + final String metadataPath = Files.createTempDirectory(Paths.get(SETUP_BASE_PATH), "optimizeV2").toAbsolutePath().toString(); + final String[] tablePathComponents = StringUtils.split(metadataPath, '/'); + final String tableName = tablePathComponents[tablePathComponents.length - 1]; + final String tableFqn = Arrays.stream(tablePathComponents, 2, tablePathComponents.length) + .collect(Collectors.joining("\".\"", "dfs_hadoop_mutable.\"", "\"")); + + String tablePath = "file://" + metadataPath; + final TableMetadata metadata = TableMetadata.newTableMetadata(ICEBERG_SCHEMA, UNPARTITIONED_SPEC, tablePath, + ImmutableMap.of("format-version", "2")); + TableOperations ops = new TestHadoopTableOperations(new Path(tablePath), fs.getConf()); + + Transaction createTableTransaction = createTableTransaction(tableName, ops, metadata); + createTableTransaction.commitTransaction(); + + Transaction rowDeltaTransaction = Transactions.newTransaction(tableName, ops); + RowDelta rowDelta = rowDeltaTransaction.newRowDelta(); + l(DF1, DF2, DF3).forEach(rowDelta::addRows); + l(DEL_F1).forEach(rowDelta::addDeletes); + rowDelta.commit(); + rowDeltaTransaction.commitTransaction(); + + // Rewrite DF1 + Long snapshotId = ops.current().currentSnapshot().snapshotId(); + Transaction transaction = Transactions.newTransaction(tableName, ops); + transaction.newRewrite().rewriteFiles(ImmutableSet.of(DF1), ImmutableSet.of(DF1)) + .validateFromSnapshot(snapshotId).commit(); + transaction.commitTransaction(); + + refresh(tableFqn); + assertOptimize(allocator, tableFqn, "MIN_INPUT_FILES=2, MIN_FILE_SIZE_MB=0", 2L, 1L, 1L); + + new TestBuilder(allocator) + .sqlQuery(String.format("SELECT file_path FROM TABLE(TABLE_FILES('%s')) ORDER BY record_count LIMIT 1", tableFqn)) + .ordered() + .baselineColumns("file_path") + .baselineValues(DF1.path().toString()) + .build() + .run(); + fs.delete(new Path(metadataPath), true); + } + + public static void testV2OptimizeUpdateSequenceNumberWithDeleteLink(BufferAllocator allocator) throws Exception { + final String metadataPath = Files.createTempDirectory(Paths.get(SETUP_BASE_PATH), "optimizeV2").toAbsolutePath().toString(); + final String[] tablePathComponents = StringUtils.split(metadataPath, '/'); + final String tableName = tablePathComponents[tablePathComponents.length - 1]; + final String tableFqn = Arrays.stream(tablePathComponents, 2, tablePathComponents.length) + .collect(Collectors.joining("\".\"", "dfs_hadoop_mutable.\"", "\"")); + + String tablePath = "file://" + metadataPath; + final TableMetadata metadata = TableMetadata.newTableMetadata(ICEBERG_SCHEMA, UNPARTITIONED_SPEC, tablePath, + ImmutableMap.of("format-version", "2")); + TableOperations ops = new TestHadoopTableOperations(new Path(tablePath), fs.getConf()); + + Transaction createTableTransaction = createTableTransaction(tableName, ops, metadata); + createTableTransaction.commitTransaction(); + + Transaction transaction = Transactions.newTransaction(tableName, ops); + transaction.newAppend().appendFile(DF1).commit(); + transaction.commitTransaction(); + + transaction = Transactions.newTransaction(tableName, ops); + transaction.newRowDelta() + .addRows(DF2) + .addDeletes(DEL_F1) + .commit(); + transaction.commitTransaction(); + + Long snapshotId = ops.current().currentSnapshot().snapshotId(); + transaction = Transactions.newTransaction(tableName, ops); + transaction.newRewrite().rewriteFiles(ImmutableSet.of(DF1), ImmutableSet.of(DF1)) + .validateFromSnapshot(snapshotId).commit(); + transaction.commitTransaction(); + + transaction = Transactions.newTransaction(tableName, ops); + transaction.newRowDelta() + .addDeletes(DEL_F2) + .commit(); + transaction.commitTransaction(); + + refresh(tableFqn); + assertOptimize(allocator, tableFqn, "MIN_INPUT_FILES=2, MIN_FILE_SIZE_MB=0", 2L, 2L, 1L); + + // delf1 had 300 deletes linked to df1 (1000 rows) and df2 (1000 rows) each, while delf2 has 10 deletes for both. + // Final count should be 1680 - 990 from df1 [10 rows from delf2 removed] and 690 from df2 [300 rows removed from delf1 and 10 rows from delf2]. + new TestBuilder(allocator) + .sqlQuery(String.format("SELECT record_count FROM TABLE(TABLE_FILES('%s'))", tableFqn)) + .unOrdered() + .baselineColumns("record_count") + .baselineValues(1680L) + .build() + .run(); + fs.delete(new Path(metadataPath), true); + } + + public static void testV2OptimizeForIdentityPartitions(BufferAllocator allocator) throws Exception { + TableInfo tableInfo = setupPartitionedV2Table(); + new TestBuilder(allocator) + .sqlQuery(String.format("optimize table %s FOR PARTITIONS order_year=2021 (MIN_INPUT_FILES=1)", tableInfo.getTableName())) + .unOrdered() + .baselineColumns("rewritten_data_files_count", "rewritten_delete_files_count", "new_data_files_count") + .baselineValues(3L, 3L, 1L) + .build() + .run(); + fs.delete(new Path(tableInfo.getMetadataLocation()), true); + } + + public static void testV2OptimizeForIdentityPartitionsWithExpression(BufferAllocator allocator) throws Exception { + TableInfo tableInfo = setupPartitionedV2Table(); + new TestBuilder(allocator) + .sqlQuery(String.format("optimize table %s FOR PARTITIONS order_year%%2=0 (MIN_INPUT_FILES=1, MIN_FILE_SIZE_MB=0)", tableInfo.getTableName())) + .unOrdered() + .baselineColumns("rewritten_data_files_count", "rewritten_delete_files_count", "new_data_files_count") + .baselineValues(2L, 1L, 1L) + .build() + .run(); + fs.delete(new Path(tableInfo.getMetadataLocation()), true); + } + + public static void testV2OptimizeEmptyPartition(BufferAllocator allocator) throws Exception { + TableInfo tableInfo = setupPartitionedV2Table(); + runSQL(String.format("DELETE FROM %s WHERE order_year=2021", tableInfo.getTableName())); + new TestBuilder(allocator) + .sqlQuery(String.format("SELECT * FROM TABLE(TABLE_FILES('%s')) WHERE 'partition'='{order_year=2021}'", tableInfo.getTableName())) + .unOrdered() + .expectsEmptyResultSet() + .go(); + new TestBuilder(allocator) + .sqlQuery(String.format("optimize table %s FOR PARTITIONS order_year=2021 (MIN_INPUT_FILES=1)", tableInfo.getTableName())) + .unOrdered() + .baselineColumns("rewritten_data_files_count", "rewritten_delete_files_count", "new_data_files_count") + .baselineValues(0L, 0L, 0L) + .build() + .run(); + fs.delete(new Path(tableInfo.getMetadataLocation()), true); + } + + private static DataFile unpartitionedDataFile(String relativePath, long size) { + return DataFiles.builder(UNPARTITIONED_SPEC) + .withFileSizeInBytes(size) + .withRecordCount(1) + .withPath(String.format("%s/data/%s", SETUP_BASE_PATH, relativePath)) + .build(); + } + + private static DeleteFile unpartitionedDeleteFile(String relativePath, long size) { + return FileMetadata.deleteFileBuilder(UNPARTITIONED_SPEC) + .ofPositionDeletes() + .withFileSizeInBytes(size) + .withRecordCount(1) + .withPath(String.format("%s/data/%s", SETUP_BASE_PATH, relativePath)) + .build(); + } + + private static DataFile partitionedDataFile(String relativePath, long size, int partitionOrderYr) { + IcebergPartitionData icebergPartitionData = new IcebergPartitionData(PARTITIONED_SPEC.partitionType()); + icebergPartitionData.set(0, partitionOrderYr); + + return DataFiles.builder(PARTITIONED_SPEC) + .withFileSizeInBytes(size) + .withRecordCount(1000L) + .withPath(String.format("%s/data/%d/%s", SETUP_BASE_PATH, partitionOrderYr, relativePath)) + .withPartition(icebergPartitionData) + .build(); + } + + private static DeleteFile partitionedDeleteFile(String relativePath, long size, long records, int partitionOrderYr) { + IcebergPartitionData icebergPartitionData = new IcebergPartitionData(PARTITIONED_SPEC.partitionType()); + icebergPartitionData.set(0, partitionOrderYr); + + return FileMetadata.deleteFileBuilder(PARTITIONED_SPEC) + .ofPositionDeletes() + .withFileSizeInBytes(size) + .withRecordCount(records) + .withPath(String.format("%s/data/%d/%s", SETUP_BASE_PATH, partitionOrderYr, relativePath)) + .withPartition(icebergPartitionData) + .build(); + } + + public static class TableInfo { + private final String tableName; + private final String metadataLocation; + private final TableOperations tableOps; + + TableInfo(String tableName, String metadataLocation, TableOperations tableOps) { + this.tableName = tableName; + this.metadataLocation = metadataLocation; + this.tableOps = tableOps; + } + + public String getTableName() { + return tableName; + } + + public String getMetadataLocation() { + return metadataLocation; + } + + public TableOperations getTableOps() { + return tableOps; + } + } +} diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/OptimizeTests.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/OptimizeTests.java index 877a6622bf..83308e62b3 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/OptimizeTests.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/OptimizeTests.java @@ -16,28 +16,58 @@ package com.dremio.exec.planner.sql; import static com.dremio.exec.planner.OptimizeOutputSchema.NEW_DATA_FILES_COUNT; +import static com.dremio.exec.planner.OptimizeOutputSchema.OPTIMIZE_OUTPUT_SUMMARY; import static com.dremio.exec.planner.OptimizeOutputSchema.REWRITTEN_DATA_FILE_COUNT; +import static com.dremio.exec.planner.OptimizeOutputSchema.REWRITTEN_DELETE_FILE_COUNT; import static com.dremio.exec.planner.sql.DmlQueryTestUtils.createBasicTable; +import static com.dremio.exec.planner.sql.DmlQueryTestUtils.createStockIcebergTable; import static com.dremio.exec.planner.sql.DmlQueryTestUtils.insertRows; +import static com.dremio.exec.planner.sql.DmlQueryTestUtils.loadTable; +import static org.apache.iceberg.TableProperties.MANIFEST_TARGET_SIZE_BYTES; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.UUID; +import java.util.stream.Collectors; +import java.util.stream.Stream; import org.apache.arrow.memory.BufferAllocator; +import org.apache.arrow.vector.BigIntVector; import org.apache.arrow.vector.Float8Vector; +import org.apache.arrow.vector.VarCharVector; +import org.apache.arrow.vector.complex.ListVector; +import org.apache.arrow.vector.util.Text; +import org.apache.iceberg.AppendFiles; +import org.apache.iceberg.DataFile; import org.apache.iceberg.DataFiles; import org.apache.iceberg.FileFormat; +import org.apache.iceberg.ManifestFile; +import org.apache.iceberg.ManifestFiles; +import org.apache.iceberg.ManifestWriter; import org.apache.iceberg.Snapshot; +import org.apache.iceberg.Table; +import org.apache.iceberg.io.OutputFile; import com.dremio.BaseTestQuery; import com.dremio.TestBuilder; import com.dremio.common.exceptions.UserRemoteException; +import com.dremio.common.expression.SchemaPath; import com.dremio.common.util.TestTools; import com.dremio.exec.record.RecordBatchLoader; import com.dremio.exec.record.VectorWrapper; import com.dremio.sabot.rpc.user.QueryDataBatch; import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; public class OptimizeTests extends BaseTestQuery { @@ -46,21 +76,65 @@ public static void testOnUnPartitioned(String source, BufferAllocator allocator) try (DmlQueryTestUtils.Table table = createTestTable(source, 6)) { assertFileCount(table.fqn, 6L, allocator); - new TestBuilder(allocator).sqlQuery("OPTIMIZE TABLE %s", table.fqn).unOrdered() - .baselineColumns(REWRITTEN_DATA_FILE_COUNT, NEW_DATA_FILES_COUNT) - .baselineValues(6L, 1L).go(); + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s", table.fqn), 6L, 0L, 1L); assertFileCount(table.fqn, 1L, allocator); } } + public static void testOptimizeDataFilesUnPartitioned(String source, BufferAllocator allocator) throws Exception { + try (DmlQueryTestUtils.Table table = createTestTable(source, 6)) { + assertFileCount(table.fqn, 6L, allocator); + + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s REWRITE DATA", table.fqn), 6L, 0L, 1L); + + assertFileCount(table.fqn, 1L, allocator); + } + } + + public static void testOptimizeManifestsOnlyUnPartitioned(String source, BufferAllocator allocator) throws Exception { + try (DmlQueryTestUtils.Table table = createTestTable(source, 6)) { + assertFileCount(table.fqn, 6L, allocator); + assertManifestCount(table.fqn, 6L, allocator); + + new TestBuilder(allocator).sqlQuery("OPTIMIZE TABLE %s REWRITE MANIFESTS", table.fqn).unOrdered() + .baselineColumns(OPTIMIZE_OUTPUT_SUMMARY) + .baselineValues("Optimize table successful").go(); + + assertFileCount(table.fqn, 6L, allocator); + assertManifestCount(table.fqn, 1L, allocator); + } + } + public static void testOnPartitioned(String source, BufferAllocator allocator) throws Exception { try (DmlQueryTestUtils.Table table = createPartitionedTestTable(source, 3)) { assertFileCount(table.fqn, 9L, allocator); // 3 files per partition - new TestBuilder(allocator).sqlQuery("OPTIMIZE TABLE %s", table.fqn).unOrdered() - .baselineColumns(REWRITTEN_DATA_FILE_COUNT, NEW_DATA_FILES_COUNT) - .baselineValues(9L, 3L).go(); + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s", table.fqn),9L, 0L, 3L); + + assertFileCount(table.fqn, 3L, allocator); // should be left with 1 file per partition + } + } + + public static void testOptimizeManifestsOnlyPartitioned(String source, BufferAllocator allocator) throws Exception { + try (DmlQueryTestUtils.Table table = createPartitionedTestTable(source, 3)) { + assertFileCount(table.fqn, 9L, allocator); // 3 files per partition + assertManifestCount(table.fqn, 3L, allocator); + + new TestBuilder(allocator).sqlQuery("OPTIMIZE TABLE %s REWRITE MANIFESTS", table.fqn).unOrdered() + .baselineColumns(OPTIMIZE_OUTPUT_SUMMARY) + .baselineValues("Optimize table successful").go(); + + assertManifestCount(table.fqn, 1L, allocator); + assertFileCount(table.fqn, 9L, allocator); // unchanged + } + } + + public static void testOptimizeDataOnPartitioned(String source, BufferAllocator allocator) throws Exception { + try (DmlQueryTestUtils.Table table = createPartitionedTestTable(source, 3)) { + assertFileCount(table.fqn, 9L, allocator); // 3 files per partition + + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s REWRITE DATA", table.fqn),9L, 0L, 3L); assertFileCount(table.fqn, 3L, allocator); // should be left with 1 file per partition } @@ -68,15 +142,12 @@ public static void testOnPartitioned(String source, BufferAllocator allocator) t public static void testOnUnpartitionedMinInputFilesCriteria(String source, BufferAllocator allocator) throws Exception { try (DmlQueryTestUtils.Table table = createTestTable(source, 5)) { - new TestBuilder(allocator).sqlQuery("OPTIMIZE TABLE %s (MIN_INPUT_FILES = 6)", table.fqn).unOrdered() - .baselineColumns(REWRITTEN_DATA_FILE_COUNT, NEW_DATA_FILES_COUNT) - .baselineValues(0L, 0L).go(); // NOOP because min input files > small file count + // NOOP because min input files > small file count + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s (MIN_INPUT_FILES = 6)", table.fqn), 0L, 0L, 0L); assertFileCount(table.fqn, 5L, allocator); - new TestBuilder(allocator).sqlQuery("OPTIMIZE TABLE %s (MIN_INPUT_FILES = 5)", table.fqn).unOrdered() - .baselineColumns(REWRITTEN_DATA_FILE_COUNT, NEW_DATA_FILES_COUNT) - .baselineValues(5L, 1L).go(); + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s (MIN_INPUT_FILES = 5)", table.fqn),5L, 0L, 1L ); assertFileCount(table.fqn, 1L, allocator); } @@ -84,15 +155,12 @@ public static void testOnUnpartitionedMinInputFilesCriteria(String source, Buffe public static void testOnPartitionedMinInputFilesCriteria(String source, BufferAllocator allocator) throws Exception { try (DmlQueryTestUtils.Table table = createPartitionedTestTable(source, 3)) { - new TestBuilder(allocator).sqlQuery("OPTIMIZE TABLE %s (MIN_INPUT_FILES = 10)", table.fqn).unOrdered() - .baselineColumns(REWRITTEN_DATA_FILE_COUNT, NEW_DATA_FILES_COUNT) - .baselineValues(0L, 0L).go(); // NOOP because min input files > small file count from all partitions + // NOOP because min input files > small file count from all partitions + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s (MIN_INPUT_FILES = 10)", table.fqn),0L, 0L, 0L ); assertFileCount(table.fqn, 9, allocator); - new TestBuilder(allocator).sqlQuery("OPTIMIZE TABLE %s (MIN_INPUT_FILES = 9)", table.fqn).unOrdered() - .baselineColumns(REWRITTEN_DATA_FILE_COUNT, NEW_DATA_FILES_COUNT) - .baselineValues(9L, 3L).go(); + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s (MIN_INPUT_FILES = 9)", table.fqn),9L, 0L, 3L ); assertFileCount(table.fqn, 3, allocator); } @@ -102,15 +170,12 @@ public static void testOnUnpartitionedMinFileSizeCriteria(String source, BufferA try (DmlQueryTestUtils.Table table = createTestTable(source, 5)) { assertFileCount(table.fqn, 5L, allocator); - new TestBuilder(allocator).sqlQuery("OPTIMIZE TABLE %s (MIN_FILE_SIZE_MB = 0)", table.fqn).unOrdered() - .baselineColumns(REWRITTEN_DATA_FILE_COUNT, NEW_DATA_FILES_COUNT) - .baselineValues(0L, 0L).go(); // NOOP because files are likely to be larger than 20 bytes + // NOOP because files are likely to be larger than 20 bytes + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s (MIN_FILE_SIZE_MB = 0)", table.fqn), 0L, 0L, 0L); assertFileCount(table.fqn, 5L, allocator); - new TestBuilder(allocator).sqlQuery("OPTIMIZE TABLE %s (MIN_FILE_SIZE_MB = 1)", table.fqn).unOrdered() - .baselineColumns(REWRITTEN_DATA_FILE_COUNT, NEW_DATA_FILES_COUNT) - .baselineValues(5L, 1L).go(); + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s (MIN_FILE_SIZE_MB = 1)", table.fqn),5L, 0L, 1L); assertFileCount(table.fqn, 1L, allocator); } @@ -118,7 +183,7 @@ public static void testOnUnpartitionedMinFileSizeCriteria(String source, BufferA public static void testWithTargetFileSizeAlreadyOptimal(String source, BufferAllocator allocator) throws Exception { try (DmlQueryTestUtils.Table table = createTestTable(source, 0)) { - org.apache.iceberg.Table loadedTable = DmlQueryTestUtils.loadTable(table, allocator); + org.apache.iceberg.Table loadedTable = DmlQueryTestUtils.loadTable(table); for (int commitId = 0; commitId < 5; commitId++) { loadedTable.newAppend().appendFile( DataFiles.builder(loadedTable.spec()) @@ -134,18 +199,15 @@ public static void testWithTargetFileSizeAlreadyOptimal(String source, BufferAll long avgFileSize = getAvgFileSize(table.fqn, allocator); - new TestBuilder(allocator).sqlQuery("OPTIMIZE TABLE %s (TARGET_FILE_SIZE_MB = %d)", table.fqn, avgFileSize) - .unOrdered() - .baselineColumns(REWRITTEN_DATA_FILE_COUNT, NEW_DATA_FILES_COUNT) - .baselineValues(0L, 0L).go(); + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s (TARGET_FILE_SIZE_MB = %d)", table.fqn, avgFileSize),0L, 0L, 0L); assertFileCount(table.fqn, 5L, allocator); } } public static void testWithMixedSizes(String source, BufferAllocator allocator) throws Exception { - try (DmlQueryTestUtils.Table table = createTestTable(source,0)) { - org.apache.iceberg.Table loadedTable = DmlQueryTestUtils.loadTable(table, allocator); + try (DmlQueryTestUtils.Table table = createTestTable(source, 0)) { + org.apache.iceberg.Table loadedTable = DmlQueryTestUtils.loadTable(table); loadedTable.newAppend().appendFile( DataFiles.builder(loadedTable.spec()) .withPath("/data/fake.parquet") @@ -155,14 +217,14 @@ public static void testWithMixedSizes(String source, BufferAllocator allocator) .build() ).appendFile( DataFiles.builder(loadedTable.spec()) - .withPath(TestTools.getWorkingPath()+"/src/test/resources/iceberg/root_pointer/f1.parquet") + .withPath(TestTools.getWorkingPath() + "/src/test/resources/iceberg/root_pointer/f1.parquet") .withFormat(FileFormat.PARQUET) .withFileSizeInBytes(929) .withRecordCount(1_000) .build() ).appendFile( DataFiles.builder(loadedTable.spec()) - .withPath(TestTools.getWorkingPath()+"/src/test/resources/iceberg/root_pointer/f2.parquet") + .withPath(TestTools.getWorkingPath() + "/src/test/resources/iceberg/root_pointer/f2.parquet") .withFormat(FileFormat.PARQUET) .withFileSizeInBytes(929) .withRecordCount(1_000) @@ -171,11 +233,9 @@ public static void testWithMixedSizes(String source, BufferAllocator allocator) refreshTable(table, allocator); assertFileCount(table.fqn, 3L, allocator); - new TestBuilder(allocator).sqlQuery("OPTIMIZE TABLE %s (TARGET_FILE_SIZE_MB=%d, MIN_FILE_SIZE_MB=%d, MAX_FILE_SIZE_MB=%d, MIN_INPUT_FILES=%d)", - table.fqn, 2L, 1L, 3L, 2L) - .unOrdered() - .baselineColumns(REWRITTEN_DATA_FILE_COUNT, NEW_DATA_FILES_COUNT) - .baselineValues(2L, 1L).go(); // Only two small files (f1 and f2) are considered for rewrite but not the already optimal one (map_float_type). + // Only two small files (f1 and f2) are considered for rewrite but not the already optimal one (map_float_type). + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s (TARGET_FILE_SIZE_MB=%d, MIN_FILE_SIZE_MB=%d, MAX_FILE_SIZE_MB=%d, MIN_INPUT_FILES=%d)", + table.fqn, 2L, 1L, 3L, 2L),2L, 0L, 1L ); assertFileCount(table.fqn, 2L, allocator); } @@ -185,77 +245,291 @@ public static void testWithSingleFilePartitions(String source, BufferAllocator a try (DmlQueryTestUtils.Table table = createPartitionedTestTable(source, 3)) { assertFileCount(table.fqn, 9L, allocator); // 3 files per partition - new TestBuilder(allocator).sqlQuery("OPTIMIZE TABLE %s (MIN_INPUT_FILES=%d)", table.fqn, 2).unOrdered() - .baselineColumns(REWRITTEN_DATA_FILE_COUNT, NEW_DATA_FILES_COUNT) - .baselineValues(9L, 3L).go(); + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s (MIN_INPUT_FILES=%d)", table.fqn, 2),9L, 0L, 3L); assertFileCount(table.fqn, 3L, allocator); // should be left with 1 file per partition - new TestBuilder(allocator).sqlQuery("OPTIMIZE TABLE %s (MIN_INPUT_FILES=%d)", table.fqn, 2).unOrdered() - .baselineColumns(REWRITTEN_DATA_FILE_COUNT, NEW_DATA_FILES_COUNT) - .baselineValues(0L, 0L).go(); // should be NOOP + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s (MIN_INPUT_FILES=%d)", table.fqn, 2),0L, 0L, 0L); // should be NOOP + assertFileCount(table.fqn, 3L, allocator); } } public static void testWithSingleFilePartitionsAndEvolvedSpec(String source, BufferAllocator allocator) throws Exception { - try (DmlQueryTestUtils.Table table = createTestTable(source,0)) { + try (DmlQueryTestUtils.Table table = createTestTable(source, 0)) { runSQL(String.format("INSERT INTO %s VALUES (1, 1), (1, 1), (1, 1)", table.fqn)); runSQL(String.format("ALTER TABLE %s ADD PARTITION FIELD %s", table.fqn, table.columns[0])); runSQL(String.format("INSERT INTO %s VALUES (1, 1), (2, 2)", table.fqn)); runSQL(String.format("INSERT INTO %s VALUES (2, 2)", table.fqn)); // Partition column_0=1 will have 3 records from previous version of spec and 1 record from recent insert. - new TestBuilder(allocator).sqlQuery("OPTIMIZE TABLE %s (MIN_INPUT_FILES=%d)", table.fqn, 1).unOrdered() - .baselineColumns(REWRITTEN_DATA_FILE_COUNT, NEW_DATA_FILES_COUNT) - .baselineValues(4L, 2L).go(); + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s (MIN_INPUT_FILES=%d)", table.fqn, 1),4L, 0L, 2L); + + List results = testSqlWithResults(String.format("SELECT OPERATION, SUMMARY FROM TABLE(TABLE_SNAPSHOT('%s')) ORDER BY COMMITTED_AT DESC LIMIT 2", table.fqn)); + RecordBatchLoader loader = new RecordBatchLoader(getSabotContext().getAllocator()); + QueryDataBatch data = results.get(0); + loader.load(data.getHeader().getDef(), data.getData()); - Snapshot snapshot = DmlQueryTestUtils.loadTable(table, allocator).currentSnapshot(); - assertThat(snapshot.operation()).isEqualTo("replace"); - String addedRecords = snapshot.summary().get("added-records"); - String deletedRecords = snapshot.summary().get("deleted-records"); + VarCharVector operationVector = loader.getValueAccessorById(VarCharVector.class, + loader.getValueVectorId(SchemaPath.getCompoundPath("operation")).getFieldIds()).getValueVector(); + assertThat(operationVector.getObject(0).toString()).isEqualTo("replace"); + assertThat(operationVector.getObject(1).toString()).isEqualTo("replace"); + Map rewriteDataSnapshotSummary = fetchSummary(loader, 1); + String addedRecords = rewriteDataSnapshotSummary.get("added-records"); + String deletedRecords = rewriteDataSnapshotSummary.get("deleted-records"); assertThat(addedRecords).isEqualTo(deletedRecords).isEqualTo("6"); } } public static void testUnsupportedScenarios(String source, BufferAllocator allocator) throws Exception { try (DmlQueryTestUtils.Table table = createPartitionedTestTable(source, 3)) { - assertThatThrownBy(() -> runSQL(String.format("OPTIMIZE TABLE %s WHERE %s=0", table.fqn, table.columns[1]))) - .isInstanceOf(UserRemoteException.class) - .hasMessageContaining("PARSE ERROR: Failure parsing the query."); - - assertThatThrownBy(() -> runSQL(String.format("OPTIMIZE TABLE %s REWRITE MANIFESTS", table.fqn))) - .isInstanceOf(UserRemoteException.class) - .hasMessageContaining("PARSE ERROR: Failure parsing the query."); - assertThatThrownBy(() -> runSQL(String.format("OPTIMIZE TABLE %s REWRITE DATA USING SORT", table.fqn))) .isInstanceOf(UserRemoteException.class) - .hasMessageContaining("PARSE ERROR: Failure parsing the query."); + .hasMessageStartingWith("PARSE ERROR"); assertThatThrownBy(() -> runSQL(String.format("OPTIMIZE TABLE %s USING SORT", table.fqn))) .isInstanceOf(UserRemoteException.class) - .hasMessageContaining("PARSE ERROR: Failure parsing the query."); + .hasMessageStartingWith("PARSE ERROR"); } } public static void testEvolvedPartitions(String source, BufferAllocator allocator) throws Exception { - try (DmlQueryTestUtils.Table table = createTestTable(source,6)) { + try (DmlQueryTestUtils.Table table = createTestTable(source, 6)) { assertFileCount(table.fqn, 6L, allocator); - new TestBuilder(allocator).sqlQuery("OPTIMIZE TABLE %s", table.fqn).unOrdered() - .baselineColumns(REWRITTEN_DATA_FILE_COUNT, NEW_DATA_FILES_COUNT) - .baselineValues(6L, 1L).go(); + + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s", table.fqn),6L, 0L, 1L); + assertFileCount(table.fqn, 1L, allocator); runSQL(String.format("ALTER TABLE %s ADD partition FIELD %s", table.fqn, table.columns[1])); - new TestBuilder(allocator).sqlQuery("OPTIMIZE TABLE %s (MIN_INPUT_FILES=%d)", table.fqn, 1).unOrdered() - .baselineColumns(REWRITTEN_DATA_FILE_COUNT, NEW_DATA_FILES_COUNT) - .baselineValues(1L, 6L).go(); + + testOptimizeCommand(allocator, String.format("OPTIMIZE TABLE %s (MIN_INPUT_FILES=%d)", table.fqn, 1), 1L, 0L, 6L); assertFileCount(table.fqn, 6L, allocator); } } + public static void testOptimizeLargeManifests(String source, BufferAllocator allocator) throws Exception { + try (DmlQueryTestUtils.Table table = createStockIcebergTable(source, 0, 2, "large_manifests_table")) { + Table icebergTable = loadTable(table); + icebergTable.updateProperties().set(MANIFEST_TARGET_SIZE_BYTES, "1000").commit(); // 1KB + icebergTable.newFastAppend().appendManifest(writeManifestFile(icebergTable, 10)).commit(); + + runSQL(String.format("OPTIMIZE TABLE %s REWRITE MANIFESTS", table.fqn)); + + icebergTable.refresh(); + Snapshot replacedSnapshot = icebergTable.currentSnapshot(); + + assertThat(replacedSnapshot.operation()).isEqualTo("replace"); + assertThat(Integer.parseInt(replacedSnapshot.summary().get("manifests-created"))).isGreaterThan(1); + assertThat(replacedSnapshot.summary().get("manifests-replaced")).isEqualTo("1"); + } + } + + public static void testOptimizeManifestsModesIsolations(String source, BufferAllocator allocator) throws Exception { + try (DmlQueryTestUtils.Table table = createTestTable(source, 6)) { + + long snapshot1 = latestSnapshotId(table); + Set snapshot1DataFiles = dataFilePaths(table); + Set snapshot1Manifests = manifestFilePaths(table); + + runSQL(String.format("OPTIMIZE TABLE %s REWRITE MANIFESTS", table.fqn)); + + long snapshot2 = latestSnapshotId(table); + Set snapshot2DataFiles = dataFilePaths(table); + Set snapshot2ManifestFiles = manifestFilePaths(table); + + assertThat(snapshot1).isNotEqualTo(snapshot2); + assertThat(snapshot1DataFiles).isEqualTo(snapshot2DataFiles); + assertThat(snapshot1Manifests).isNotEqualTo(snapshot2ManifestFiles); + + insertCommits(table, 6); + long snapshot3 = latestSnapshotId(table); + Set snapshot3DataFiles = dataFilePaths(table); + runSQL(String.format("OPTIMIZE TABLE %s REWRITE DATA", table.fqn)); + + long snapshot4 = latestSnapshotId(table); + Set snapshot4DataFiles = dataFilePaths(table); + + assertThat(snapshot3).isNotEqualTo(snapshot4); + assertThat(snapshot3DataFiles).isNotEqualTo(snapshot4DataFiles); + } + } + + public static void testOptimizeManifestsWithOptimalSize(String source, BufferAllocator allocator) throws Exception { + try (DmlQueryTestUtils.Table table = createStockIcebergTable(source, 0, 2, "modes_isolation")) { + Table icebergTable = loadTable(table); + List manifestFiles = new ArrayList<>(5); + for (int i = 0; i < 5; i++) { + ManifestFile manifestFile = writeManifestFile(icebergTable, 10); + manifestFiles.add(manifestFile); + } + AppendFiles append = icebergTable.newFastAppend(); + manifestFiles.forEach(append::appendManifest); + append.commit(); + icebergTable.refresh(); + + long avgManifestSize = (long) manifestFiles.stream().mapToLong(ManifestFile::length).average().getAsDouble(); + icebergTable.updateProperties().set(MANIFEST_TARGET_SIZE_BYTES, String.valueOf(avgManifestSize)).commit(); + Snapshot snapshot1 = icebergTable.currentSnapshot(); + + runSQL(String.format("OPTIMIZE TABLE %s REWRITE MANIFESTS", table.fqn)); + icebergTable.refresh(); + Snapshot snapshot2 = icebergTable.currentSnapshot(); + + assertThat(snapshot1.snapshotId()).isEqualTo(snapshot2.snapshotId()); // NOOP + + assertNoOrphanManifests(icebergTable, snapshot1); + } + } + + public static void testOptimizeOnEmptyTableNoSnapshots(String source, BufferAllocator allocator) throws Exception { + try (DmlQueryTestUtils.Table table = createStockIcebergTable(source, 0, 2, "modes_isolation")) { + Table icebergTable = loadTable(table); + + runSQL(String.format("OPTIMIZE TABLE %s REWRITE MANIFESTS", table.fqn)); + icebergTable.refresh(); + + assertThat(icebergTable.currentSnapshot()).isNull(); + + runSQL(String.format("OPTIMIZE TABLE %s REWRITE DATA (MIN_INPUT_FILES=1)", table.fqn)); + icebergTable.refresh(); + + assertThat(icebergTable.currentSnapshot()).isNull(); + + runSQL(String.format("OPTIMIZE TABLE %s (MIN_INPUT_FILES=1)", table.fqn)); + icebergTable.refresh(); + + assertThat(icebergTable.currentSnapshot()).isNull(); + } + } + + public static void testOptimizeOnEmptyTableHollowSnapshot(String source, BufferAllocator allocator) throws Exception { + try (DmlQueryTestUtils.Table table = createBasicTable(source, 2, 0)) { + + runSQL(String.format("OPTIMIZE TABLE %s REWRITE MANIFESTS", table.fqn)); + runSQL(String.format("OPTIMIZE TABLE %s REWRITE DATA (MIN_INPUT_FILES=1)", table.fqn)); + runSQL(String.format("OPTIMIZE TABLE %s (MIN_INPUT_FILES=1)", table.fqn)); + + new TestBuilder(allocator).sqlQuery("SELECT count(*) as CNT FROM TABLE(TABLE_SNAPSHOT('%s')) where operation='replace'", table.fqn).unOrdered() + .baselineColumns("CNT") + .baselineValues(0L).go(); // No replace snapshot + } + } + + public static void testOptimizeNoopOnResidualDataManifests(String source, BufferAllocator allocator) throws Exception { + final int noOfInputManifests = 10; + try (DmlQueryTestUtils.Table table = createStockIcebergTable(source, 0, 2, "modes_isolation")) { + Table icebergTable = loadTable(table); + List manifestFiles = new ArrayList<>(5); + for (int i = 0; i < noOfInputManifests; i++) { + ManifestFile manifestFile = writeManifestFile(icebergTable, 1000); + manifestFiles.add(manifestFile); + } + AppendFiles append = icebergTable.newFastAppend(); + manifestFiles.forEach(append::appendManifest); + append.commit(); + icebergTable.refresh(); + + long totalManifestSize = manifestFiles.stream().mapToLong(ManifestFile::length).sum(); + long targetManifestSize = (long) (totalManifestSize * 0.3); + icebergTable.updateProperties().set(MANIFEST_TARGET_SIZE_BYTES, String.valueOf(targetManifestSize)).commit(); + Snapshot initialSnapshot = icebergTable.currentSnapshot(); + + runSQL(String.format("OPTIMIZE TABLE %s REWRITE MANIFESTS", table.fqn)); + icebergTable.refresh(); + Snapshot replacedSnapshot = icebergTable.currentSnapshot(); + + // Manifests optimized into multiple output manifests, last one residual is expected to be outside the optimal range. + assertThat(initialSnapshot.snapshotId()).isNotEqualTo(replacedSnapshot.snapshotId()); + assertThat(Integer.parseInt(replacedSnapshot.summary().get("manifests-created"))).isGreaterThan(1); + assertThat(replacedSnapshot.summary().get("manifests-replaced")).isEqualTo(String.valueOf(noOfInputManifests)); + + // Assert at-least one file outside the range + assertThat(replacedSnapshot.allManifests(icebergTable.io()).stream() + .anyMatch(m -> isNotInOptimalSizeRange(m, targetManifestSize))).isTrue(); + + runSQL(String.format("OPTIMIZE TABLE %s REWRITE MANIFESTS", table.fqn)); + icebergTable.refresh(); + Snapshot noopSnapshot = icebergTable.currentSnapshot(); + assertThat(replacedSnapshot.snapshotId()).isEqualTo(noopSnapshot.snapshotId()); + + // Ensure no orphan manifest files are left over in the directory + assertNoOrphanManifests(icebergTable, initialSnapshot, replacedSnapshot); + } + } + + public static void testRewriteManifestsForEvolvedPartitionSpec(String source, BufferAllocator allocator) throws Exception { + try (DmlQueryTestUtils.Table table = createBasicTable(source, 3, 5)) { + runSQL(String.format("ALTER TABLE %s ADD PARTITION FIELD identity(%s)", table.fqn, table.columns[0])); + insertCommits(table, 2); + + runSQL(String.format("ALTER TABLE %s ADD PARTITION FIELD truncate(2, %s)", table.fqn, table.columns[1])); + insertCommits(table, 2); + + runSQL(String.format("ALTER TABLE %s ADD PARTITION FIELD identity(%s)", table.fqn, table.columns[2])); + insertCommits(table, 2); + + long snapshot1 = latestSnapshotId(table); + runSQL(String.format("OPTIMIZE TABLE %s REWRITE MANIFESTS", table.fqn)); + + long replaceSnapshot = latestSnapshotId(table); + + assertThat(snapshot1).isNotEqualTo(replaceSnapshot); + + new TestBuilder(allocator) + .sqlQuery("SELECT partition_spec_id FROM TABLE(table_manifests('%s'))", table.fqn) + .unOrdered() + .baselineColumns("partition_spec_id") + .baselineRecords(ImmutableList.of( + ImmutableMap.of("`partition_spec_id`", 0), + ImmutableMap.of("`partition_spec_id`", 1), + ImmutableMap.of("`partition_spec_id`", 2), + ImmutableMap.of("`partition_spec_id`", 3) + )).go(); + } + } + + public static void assertNoOrphanManifests(Table icebergTable, Snapshot... snapshots) throws IOException { + Set allManifestPaths = new HashSet<>(); + for (Snapshot snapshot : snapshots) { + snapshot.allManifests(icebergTable.io()).forEach(m -> allManifestPaths.add(m.path())); + } + + try (Stream filePathStream = Files.walk(Paths.get(icebergTable.location()))) { + Set allAvroInTableDir = filePathStream.filter(p -> p.endsWith("avro")) + .map(p -> p.toString()).collect(Collectors.toSet()); + + assertThat(allManifestPaths).containsAll(allAvroInTableDir); + } + } + + private static boolean isNotInOptimalSizeRange(ManifestFile manifestFile, long manifestTargetSizeBytes) { + long minManifestFileSize = (long) (manifestTargetSizeBytes * 0.75); + long maxManifestFileSize = (long) (manifestTargetSizeBytes * 1.8); + + return manifestFile.length() < minManifestFileSize || manifestFile.length() > maxManifestFileSize; + } + + private static ManifestFile writeManifestFile(Table icebergTable, int noOfDataFiles) throws IOException { + final OutputFile manifestLocation = icebergTable.io().newOutputFile( + String.format("%s/metadata/%s-mx.avro", icebergTable.location(), UUID.randomUUID())); + ManifestWriter writer = ManifestFiles.write(icebergTable.spec(), manifestLocation); + + for (int i = 0; i < noOfDataFiles; i++) { + writer.add(DataFiles.builder(icebergTable.spec()) + .withPath(String.format("/data/fake-%d.parquet", i)) + .withFormat(FileFormat.PARQUET) + .withFileSizeInBytes(20 * 1024 * 1024L) + .withRecordCount(1_000_000) + .build()); + } + + writer.close(); + return writer.toManifestFile(); + } + private static long getAvgFileSize(String tableFqn, BufferAllocator allocator) throws Exception { List res = testSqlWithResults(String.format("SELECT avg(file_size_in_bytes)/1048576 as avg_file_size_mb FROM TABLE(table_files('%s'))", tableFqn)); @@ -282,12 +556,18 @@ private static void refreshTable(DmlQueryTestUtils.Table table, BufferAllocator .baselineValues(true, String.format("Metadata for table '%s' refreshed.", table.fqn.replaceAll("\"", ""))).go(); } - private static void assertFileCount(String tableFqn, long expectedFileCount, BufferAllocator allocator) throws Exception { + public static void assertFileCount(String tableFqn, long expectedFileCount, BufferAllocator allocator) throws Exception { new TestBuilder(allocator).sqlQuery("SELECT COUNT(*) AS FILE_COUNT FROM TABLE(table_files('%s'))", tableFqn).unOrdered() .baselineColumns("FILE_COUNT") .baselineValues(expectedFileCount).go(); } + private static void assertManifestCount(String tableFqn, long expectedFileCount, BufferAllocator allocator) throws Exception { + new TestBuilder(allocator).sqlQuery("SELECT COUNT(*) AS FILE_COUNT FROM TABLE(table_manifests('%s'))", tableFqn).unOrdered() + .baselineColumns("FILE_COUNT") + .baselineValues(expectedFileCount).go(); + } + private static DmlQueryTestUtils.Table createPartitionedTestTable(String source, int noOfInsertCommitsPerPartition) throws Exception { DmlQueryTestUtils.Table table = createBasicTable(source, 0, 2, 0, ImmutableSet.of(1)); insertCommits(table, noOfInsertCommitsPerPartition); @@ -300,10 +580,79 @@ private static DmlQueryTestUtils.Table createTestTable(String source, int noOfIn return table; } - private static void insertCommits(DmlQueryTestUtils.Table table, int noOfInsertCommits) throws Exception { + private static Map fetchSummary(RecordBatchLoader loader, int index) { + ListVector summaryVector = loader.getValueAccessorById(ListVector.class, + loader.getValueVectorId(SchemaPath.getCompoundPath("summary")).getFieldIds()).getValueVector(); + + Map result = new HashMap<>(); + List summary = summaryVector.getObject(index); + + for (Object entry : summary) { + Map entryMap = (Map) entry; + result.put(entryMap.get("key").toString(), entryMap.get("value").toString()); + } + + return result; + } + + private static long latestSnapshotId(DmlQueryTestUtils.Table table) throws Exception { + List results = testSqlWithResults(String.format("SELECT SNAPSHOT_ID FROM TABLE(TABLE_SNAPSHOT('%s')) ORDER BY COMMITTED_AT DESC LIMIT 1", table.fqn)); + RecordBatchLoader loader = new RecordBatchLoader(getSabotContext().getAllocator()); + QueryDataBatch data = results.get(0); + loader.load(data.getHeader().getDef(), data.getData()); + + BigIntVector snapshotIdVector = loader.getValueAccessorById(BigIntVector.class, loader.getValueVectorId( + SchemaPath.getCompoundPath("SNAPSHOT_ID")).getFieldIds()).getValueVector(); + + return snapshotIdVector.get(0); + } + + private static Set dataFilePaths(DmlQueryTestUtils.Table table) throws Exception { + List results = testSqlWithResults(String.format("SELECT FILE_PATH FROM TABLE(TABLE_FILES('%s'))", table.fqn)); + RecordBatchLoader loader = new RecordBatchLoader(getSabotContext().getAllocator()); + QueryDataBatch data = results.get(0); + loader.load(data.getHeader().getDef(), data.getData()); + + VarCharVector filePathVector = loader.getValueAccessorById(VarCharVector.class, loader.getValueVectorId( + SchemaPath.getCompoundPath("FILE_PATH")).getFieldIds()).getValueVector(); + + Set filePaths = new HashSet<>(filePathVector.getValueCount()); + for (int i = 0; i < filePathVector.getValueCount(); i++) { + filePaths.add(filePathVector.getObject(i).toString()); + } + return filePaths; + } + + private static Set manifestFilePaths(DmlQueryTestUtils.Table table) throws Exception { + List results = testSqlWithResults(String.format("SELECT PATH FROM TABLE(TABLE_MANIFESTS('%s'))", table.fqn)); + RecordBatchLoader loader = new RecordBatchLoader(getSabotContext().getAllocator()); + QueryDataBatch data = results.get(0); + loader.load(data.getHeader().getDef(), data.getData()); + + VarCharVector filePathVector = loader.getValueAccessorById(VarCharVector.class, loader.getValueVectorId( + SchemaPath.getCompoundPath("PATH")).getFieldIds()).getValueVector(); + + Set filePaths = new HashSet<>(filePathVector.getValueCount()); + for (int i = 0; i < filePathVector.getValueCount(); i++) { + filePaths.add(filePathVector.getObject(i).toString()); + } + return filePaths; + } + + public static void insertCommits(DmlQueryTestUtils.Table table, int noOfInsertCommits) throws Exception { for (int commitId = 0; commitId < noOfInsertCommits; commitId++) { // Same number of rows per commit. If it's partitioned table, rows will get distributed. insertRows(table, noOfInsertCommits); } } + + public static void testOptimizeCommand(BufferAllocator allocator, String optimizeQuery, long expectedRDFC, long expectedRDelFC, long expectedNDFC) throws Exception { + new TestBuilder(allocator) + .sqlQuery(optimizeQuery) + .unOrdered() + .baselineColumns(REWRITTEN_DATA_FILE_COUNT, REWRITTEN_DELETE_FILE_COUNT, NEW_DATA_FILES_COUNT) + .baselineValues(expectedRDFC,expectedRDelFC,expectedNDFC) + .build() + .run(); + } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestCreateTableQueryCleanup.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestCreateTableQueryCleanup.java index cbe98be9fe..eddb22ddf8 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestCreateTableQueryCleanup.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestCreateTableQueryCleanup.java @@ -33,8 +33,6 @@ import com.dremio.common.exceptions.UserException; import com.dremio.exec.catalog.Catalog; -import com.dremio.exec.catalog.DatasetCatalog; -import com.dremio.exec.catalog.MutablePlugin; import com.dremio.exec.ops.QueryContext; import com.dremio.exec.physical.base.IcebergWriterOptions; import com.dremio.exec.physical.base.ImmutableIcebergWriterOptions; @@ -45,14 +43,12 @@ import com.dremio.exec.planner.sql.handlers.SqlHandlerConfig; import com.dremio.exec.planner.sql.handlers.SqlHandlerUtil; import com.dremio.exec.planner.sql.handlers.direct.CreateEmptyTableHandler; -import com.dremio.exec.planner.sql.handlers.query.CreateTableHandler; import com.dremio.exec.planner.sql.parser.DremioSqlColumnDeclaration; import com.dremio.exec.planner.sql.parser.PartitionDistributionStrategy; import com.dremio.exec.planner.sql.parser.SqlCreateEmptyTable; import com.dremio.exec.record.BatchSchema; import com.dremio.exec.store.dfs.FileSystemPlugin; import com.dremio.exec.store.dfs.IcebergTableProps; -import com.dremio.exec.store.iceberg.DremioFileIO; import com.dremio.exec.store.iceberg.IcebergSerDe; import com.dremio.exec.store.iceberg.IcebergUtils; import com.dremio.exec.store.iceberg.SchemaConverter; @@ -72,12 +68,12 @@ public void testTableCleanupInIcebergTableCreationCommitterOnFailure() { when(command.endTransaction()).thenThrow(new UncheckedIOException(new IOException("endTransaction_error"))); IcebergTableCreationCommitter committer = new IcebergTableCreationCommitter("table1", BatchSchema.EMPTY, - ImmutableList.of(), command, null, PartitionSpec.unpartitioned()); + ImmutableList.of(), command, null, PartitionSpec.unpartitioned()); assertThatThrownBy(committer::commit) - .isInstanceOf(RuntimeException.class) - .hasCauseInstanceOf(UncheckedIOException.class) - .hasMessageContaining("endTransaction_error"); + .isInstanceOf(RuntimeException.class) + .hasCauseInstanceOf(UncheckedIOException.class) + .hasMessageContaining("endTransaction_error"); verify(command, times(1)).deleteTable(); } @@ -96,52 +92,36 @@ public void testTableCleanupInCreateEmptyTableHandlerOnFailure() throws Exceptio when(context.getOptions()).thenReturn(manager); when(config.getContext()).thenReturn(context); UserSession userSession = mock(UserSession.class); - CreateEmptyTableHandler handler = new CreateEmptyTableHandler(catalog, config, userSession, false); + CreateEmptyTableHandler handler = new CreateEmptyTableHandler(catalog, config, userSession, false); - List columnDeclarations = SqlHandlerUtil.columnDeclarationsFromSqlNodes(SqlNodeList.EMPTY, sql); + List columnDeclarations = SqlHandlerUtil.columnDeclarationsFromSqlNodes( + SqlNodeList.EMPTY, sql); BatchSchema batchSchema = SqlHandlerUtil.batchSchemaFromSqlSchemaSpec(config, columnDeclarations, sql); - PartitionSpec partitionSpec = IcebergUtils.getIcebergPartitionSpecFromTransforms(batchSchema, new ArrayList<>(), null); + PartitionSpec partitionSpec = IcebergUtils.getIcebergPartitionSpecFromTransforms(batchSchema, new ArrayList<>(), + null); ByteString partitionSpecByteString = ByteString.copyFrom(IcebergSerDe.serializePartitionSpec(partitionSpec)); - String schemaAsJson = IcebergSerDe.serializedSchemaAsJson(SchemaConverter.getBuilder().build().toIcebergSchema(batchSchema)); + String schemaAsJson = IcebergSerDe.serializedSchemaAsJson( + SchemaConverter.getBuilder().build().toIcebergSchema(batchSchema)); IcebergTableProps icebergTableProps = new IcebergTableProps(partitionSpecByteString, schemaAsJson); IcebergWriterOptions icebergWriterOptions = new ImmutableIcebergWriterOptions.Builder() - .setIcebergTableProps(icebergTableProps).build(); + .setIcebergTableProps(icebergTableProps).build(); TableFormatWriterOptions tableFormatOptions = new ImmutableTableFormatWriterOptions.Builder() - .setIcebergSpecificOptions(icebergWriterOptions).setOperation(TableFormatOperation.CREATE).build(); + .setIcebergSpecificOptions(icebergWriterOptions).setOperation(TableFormatOperation.CREATE).build(); WriterOptions options = new WriterOptions(0, new ArrayList<>(), - new ArrayList<>(), new ArrayList<>(), PartitionDistributionStrategy.UNSPECIFIED, - null, sqlCreateEmptyTable.isSingleWriter(), Long.MAX_VALUE, tableFormatOptions, null); + new ArrayList<>(), new ArrayList<>(), PartitionDistributionStrategy.UNSPECIFIED, + null, sqlCreateEmptyTable.isSingleWriter(), Long.MAX_VALUE, tableFormatOptions, null); doThrow(new RuntimeException("createEmptyTable_error")).when(catalog).createEmptyTable(key, batchSchema, options); when(catalog.getSource(key.getRoot())).thenReturn(mock(FileSystemPlugin.class)); assertThatThrownBy(() -> handler.callCatalogCreateEmptyTableWithCleanup(key, batchSchema, options)) - .isInstanceOf(UserException.class) - .hasMessageContaining("createEmptyTable_error") // Message that should be checked for: createEmptyTable_error - .hasRootCauseInstanceOf(RuntimeException.class) - .hasRootCauseMessage("createEmptyTable_error"); + .isInstanceOf(UserException.class) + .hasMessageContaining("createEmptyTable_error") // Message that should be checked for: createEmptyTable_error + .hasRootCauseInstanceOf(RuntimeException.class) + .hasRootCauseMessage("createEmptyTable_error"); verify(catalog, times(1)).forgetTable(key); } - - @Test - public void testCTASCleanupInCreateTableHandler() throws IOException { - final String tableFolderToDelete = "dummyTableFolderToDelete"; - final NamespaceKey tableName = new NamespaceKey(ImmutableList.of("dummyTable")); - - DatasetCatalog datasetCatalog = mock(DatasetCatalog.class); - DremioFileIO dremioFileIO = mock(DremioFileIO.class); - - when(dremioFileIO.getPlugin()).thenReturn(mock(FileSystemPlugin.class)); - CreateTableHandler.cleanUpImpl(dremioFileIO, datasetCatalog, tableName, tableFolderToDelete); - verify(dremioFileIO,times(1)).deleteFile(tableFolderToDelete, true, true); - verify(datasetCatalog, times(1)).forgetTable(tableName); - - when(dremioFileIO.getPlugin()).thenReturn(mock(MutablePlugin.class)); - CreateTableHandler.cleanUpImpl(dremioFileIO, datasetCatalog, tableName, tableFolderToDelete); - verify(dremioFileIO,times(1)).deleteFile(tableFolderToDelete, true, false); - verify(datasetCatalog, times(1)).dropTable(tableName, null); - } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestIcebergCtasPlan.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestIcebergCtasPlan.java index 65a36a9b60..5f91b499f0 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestIcebergCtasPlan.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestIcebergCtasPlan.java @@ -15,22 +15,14 @@ */ package com.dremio.exec.planner.sql; -import static org.mockito.ArgumentMatchers.anyBoolean; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.doAnswer; - import org.apache.calcite.sql.SqlNode; import org.junit.Assert; import org.junit.Test; -import org.mockito.Mockito; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; import com.dremio.PlanTestBase; import com.dremio.common.util.TestTools; import com.dremio.exec.ExecTest; import com.dremio.exec.PassthroughQueryObserver; -import com.dremio.exec.catalog.DatasetCatalog; import com.dremio.exec.ops.QueryContext; import com.dremio.exec.physical.PhysicalPlan; import com.dremio.exec.planner.observer.AttemptObserver; @@ -42,12 +34,9 @@ import com.dremio.exec.rpc.user.security.testing.UserServiceTestImpl; import com.dremio.exec.server.SabotContext; import com.dremio.exec.server.options.SessionOptionManagerImpl; -import com.dremio.exec.store.iceberg.DremioFileIO; import com.dremio.options.OptionManager; import com.dremio.options.OptionValue; import com.dremio.sabot.rpc.user.UserSession; -import com.dremio.service.namespace.NamespaceKey; -import com.google.common.collect.ImmutableList; public class TestIcebergCtasPlan extends PlanTestBase { @@ -112,32 +101,4 @@ public void testHashExchangeInIcebergCTAS() throws Exception { "IcebergManifestList.*"}); } } - - @Test - public void testCTASCleaner() { - DatasetCatalog datasetCatalog = Mockito.mock(DatasetCatalog.class); - DremioFileIO dremioFileIO = Mockito.mock(DremioFileIO.class); - - final String tableFolderToDelete = "dummyTableFolderToDelete"; - final NamespaceKey tableName = new NamespaceKey(ImmutableList.of("dummyTable")); - final String[] actualTableFolderDeleted = new String[1]; - - doAnswer(new Answer() { - public Void answer(InvocationOnMock invocation) { - Object[] args = invocation.getArguments(); - Assert.assertEquals(args.length, 3); - - // table location - actualTableFolderDeleted[0] = (String)args[0]; - - // "recursive" flag - Assert.assertTrue((boolean)args[1]); - return null; - } - }).when(dremioFileIO).deleteFile(anyString(), anyBoolean(), anyBoolean()); - - CreateTableHandler.cleanUpImpl(dremioFileIO, datasetCatalog, tableName, tableFolderToDelete); - - Assert.assertEquals(tableFolderToDelete, actualTableFolderDeleted[0]); - } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestInsertIntoTable.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestInsertIntoTable.java index 3fc29fa287..91f7220464 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestInsertIntoTable.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestInsertIntoTable.java @@ -108,8 +108,7 @@ private void testInsertCaseInsensesitive(String tblName, String schema) throws E .sqlBaselineQuery("SELECT n_nationkey ID, n_regionkey CODE from cp.\"tpch/nation.parquet\"") .build() .run(); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), tblName)); } } @@ -181,8 +180,7 @@ private void testInsertCommandInvalidPath(String tblName, String schema) throws UserExceptionAssert.assertThatThrownBy(() -> test(insertQuery)) .hasMessageContaining(String.format("Table [%s] does not exist.", tblName)); }); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), tblName)); } } @@ -449,8 +447,7 @@ private void testInsertUsingValues(String insert_values_test, String schema) thr .baselineValues(1L) .build() .run(); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), insert_values_test)); } } @@ -488,8 +485,7 @@ private void testOutputColumnsForInsertUsingValues(String insert_values_test, St .baselineValues(1L) .build() .run(); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), insert_values_test)); } } @@ -525,8 +521,7 @@ private void testInsertUsingValuesWithSchemaMismatch(String insert_values_test, .baselineValues(1L) .build() .run(); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), insert_values_test)); } } @@ -546,8 +541,7 @@ private void testIncomptibleStringToInt(String newTable, String schema) throws E String insert = "insert into " + schema + "." + newTable + " select * from (values('abcd'))"; UserExceptionAssert.assertThatThrownBy(() -> test(insert)) .hasMessageContaining("Failed to cast the string abcd to int32_t"); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable)); } } @@ -585,8 +579,7 @@ private void testUpPromotableInsert(String newTable, String schema) throws Excep .baselineColumns("col1", "col2", "col3") .baselineValues(new Long("1"), new BigDecimal("12345.340"), new Double("0.3")) .go(); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_1")); FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_2")); } @@ -629,8 +622,7 @@ private void testListComplexInsert(String newTable, String dfsSchema, String tes .baselineValues(2L) .build() .run(); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_1")); FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_2")); } @@ -668,8 +660,7 @@ private void testListComplexInsertFailure(String newTable, String dfsSchema, Str String expected = isComplexTypeSupport() ? "Table schema(listcol1::list) doesn't match with query schema(listcol2::list)" : "Table schema(listcol1::list) doesn't match with query schema(listcol1::list)"; UserExceptionAssert.assertThatThrownBy(() -> test(insertQuery)) .hasMessageContaining(expected); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_1")); FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_2")); } @@ -712,8 +703,7 @@ private void testStructComplexInsert(String newTable, String dfsSchema, String t .baselineValues(2L) .build() .run(); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_1")); FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_2")); } @@ -751,8 +741,7 @@ private void testStructComplexInsertFailure(String newTable, String dfsSchema, S String expected = isComplexTypeSupport() ? "Table schema(structcol1::struct) doesn't match with query schema(structcol2::struct)" : "schema(structcol1::struct) doesn't match with query schema(structcol1::struct)"; UserExceptionAssert.assertThatThrownBy(() -> test(insertQuery)) .hasMessageContaining(expected); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_1")); FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_2")); } @@ -790,8 +779,7 @@ private void testComplexInsertIncompatibleFailure(String newTable, String dfsSch String expected = isComplexTypeSupport() ? "Table schema(structcol1::struct) doesn't match with query schema(name::varchar)" : "Table schema(structcol1::struct) doesn't match with query schema(structcol1::varchar)"; UserExceptionAssert.assertThatThrownBy(() -> test(insertQuery)) .hasMessageContaining(expected); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_1")); FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_2")); } @@ -833,8 +821,7 @@ private void testUpPromotablePartitionInsert(String newTable, String schema, Ice File tableFolder = new File(getDfsTestTmpSchemaLocation(), newTable + "_2"); checkSinglePartitionValue(tableFolder, Long.class, new Long(1), catalogType); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_1")); FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_2")); } @@ -883,8 +870,7 @@ private void testUpPromotablePartitionWithStarInsert(String newTable, String sch File tableFolder = new File(getDfsTestTmpSchemaLocation(), newTable + "_2"); checkSinglePartitionValue(tableFolder, Long.class, new Long(1), catalogType); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_1")); FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_2")); } @@ -921,8 +907,7 @@ private void testDecimalInsertMorePrecisionEqualScale(String newTable, String sc .baselineColumns("col2") .baselineValues(new BigDecimal("12345.34")) .go(); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_1")); FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_2")); } @@ -953,8 +938,7 @@ private void testDecimalInsertMorePrecisionUnequalScale(String newTable, String " select * from " + schema + "." + newTable + "_1"; UserExceptionAssert.assertThatThrownBy(() -> test(insertUppromoting)) .hasMessageContaining("Table schema(col2::decimal(20,2)) doesn't match with query schema(zcol1::decimal(10,5))"); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_1")); FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_2")); } @@ -985,8 +969,7 @@ private void testDecimalInsertLessPrecision(String newTable, String schema) thro " select * from " + schema + "." + newTable + "_1"; UserExceptionAssert.assertThatThrownBy(() -> test(insertUppromoting)) .hasMessageContaining("Table schema(col2::decimal(9,2)) doesn't match with query schema(zcol1::decimal(10,2))"); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_1")); FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_2")); } @@ -1028,8 +1011,7 @@ private void testDoubleToDecimalInsertFailure(String newTable, String schema) th " select * from " + schema + "." + newTable + "_1"; UserExceptionAssert.assertThatThrownBy(() -> test(insertUppromotingFailure)) .hasMessageContaining("Table schema(col3::decimal(16,2)) doesn't match with query schema(zcol1::double)"); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_1")); FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_2")); FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_3")); @@ -1070,8 +1052,7 @@ private void testFloatToDecimalInsertFailure(String newTable, String schema) thr " select * from " + schema + "." + newTable + "_1"; UserExceptionAssert.assertThatThrownBy(() -> test(insertUppromotingFailure)) .hasMessageContaining("Table schema(col3::decimal(7,1)) doesn't match with query schema(zcol1::float)"); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_1")); FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_2")); FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_3")); @@ -1117,8 +1098,7 @@ private void testBigIntToDecimalInsertFailure(String newTable, String schema) th " select * from " + schema + "." + newTable + "_1"; UserExceptionAssert.assertThatThrownBy(() -> test(insertUppromotingFailure)) .hasMessageContaining("Table schema(col3::decimal(20,2)) doesn't match with query schema(zcol1::int64)"); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_1")); FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_2")); FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_3")); @@ -1164,8 +1144,7 @@ private void testIntToDecimalInsertFailure(String newTable, String schema) throw " select * from " + schema + "." + newTable + "_1"; UserExceptionAssert.assertThatThrownBy(() -> test(insertUppromotingFailure)) .hasMessageContaining("Table schema(col3::decimal(11,2)) doesn't match with query schema(zcol1::int32)"); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_1")); FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_2")); FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_3")); @@ -1220,8 +1199,7 @@ private void testDecimalToDoubleInsertFailure(String newTable, String schema) th .baselineColumns("c") .baselineValues(new Long(2)) .go(); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_1")); FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_2")); FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_3")); @@ -1276,8 +1254,7 @@ private void testDecimalToFloatInsertFailure(String newTable, String schema) thr .baselineColumns("c") .baselineValues(new Long(2)) .go(); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_1")); FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_2")); FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_3")); @@ -1309,8 +1286,7 @@ private void testBigIntToIntInsertFailure(String newTable, String testSchema) th " select * from " + testSchema + "." + newTable + "_1"; UserExceptionAssert.assertThatThrownBy(() -> test(insertUppromoting)) .hasMessageContaining("Table schema(col2::int32) doesn't match with query schema(zcol1::int64)"); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_1")); FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_2")); } @@ -1341,8 +1317,7 @@ private void testDoubleToFloatInsertFailure(String newTable, String testSchema) " select * from " + testSchema + "." + newTable + "_1"; UserExceptionAssert.assertThatThrownBy(() -> test(insertUppromoting)) .hasMessageContaining("Table schema(col2::float) doesn't match with query schema(zcol1::double)"); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_1")); FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_2")); } @@ -1548,8 +1523,7 @@ private void insertIntoFewCols(String newTable, String schema) throws Exception .baselineValues(1, "name1") .baselineValues(null, "name2") .go(); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable)); } } @@ -1568,8 +1542,7 @@ private void insertIntoFewColsDuplicate(String newTable, String tempSchema) thro Thread.sleep(1001); String insertTable = String.format("insert into %s.%s(id, id) select id, id from %s.%s", tempSchema, newTable, tempSchema, newTable); errorMsgTestHelper(insertTable, "Duplicate column name [id]"); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable)); } } @@ -1607,8 +1580,7 @@ private void insertIntoFewColsNonExistingCol(String newTable, String schema) thr Thread.sleep(1001); String insertTable = String.format("insert into %s.%s(id, id1, id2) select id, id, id from %s.%s", schema, newTable, schema, newTable); errorMsgTestHelper(insertTable, "Specified column(s) [id2, id1] not found in schema."); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable)); } } @@ -1627,8 +1599,7 @@ private void insertIntoFewColsSchemaMismatch(String newTable, String schema) thr Thread.sleep(1001); String insertTable = String.format("insert into %s.%s(id) select name from %s.%s", schema, newTable, schema, newTable); errorMsgTestHelper(insertTable, "Table schema(id::int32) doesn't match with query schema(name::varchar)"); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable)); } } @@ -1658,8 +1629,7 @@ private void insertIntoFewColsOrderedFields(String newTable, String schema) thro .baselineValues(1, "name1", "address1", 1) .baselineValues(null, "name2", null, 2) .go(); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable)); } } @@ -1685,8 +1655,7 @@ private void insertIntoFewColsUnorderedFields(String newTable, String schema) th .baselineColumns("id", "name") .baselineValues(1, "name1") .go(); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable)); } } @@ -1712,8 +1681,7 @@ private void insertIntoFewColsUppromotableTypes(String newTable, String schema) .baselineColumns("id", "name") .baselineValues(1.0f, "name1") .go(); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable)); } } @@ -1753,8 +1721,7 @@ private void testFewColsUpPromotableInsert(String newTable, String schema) throw .baselineColumns("col1", "col2", "col3") .baselineValues(new Long("1"), new BigDecimal("12345.340"), new Double("0.3")) .go(); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_1")); FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTable + "_2")); } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestSQLAnalyzer.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestSQLAnalyzer.java deleted file mode 100644 index bbd3b00970..0000000000 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestSQLAnalyzer.java +++ /dev/null @@ -1,510 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.dremio.exec.planner.sql; - -import static org.apache.calcite.sql.validate.SqlMonotonicity.CONSTANT; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.stream.Stream; - -import org.apache.calcite.adapter.java.JavaTypeFactory; -import org.apache.calcite.config.CalciteConnectionConfig; -import org.apache.calcite.jdbc.CalciteSchema; -import org.apache.calcite.linq4j.tree.Expression; -import org.apache.calcite.plan.RelOptPlanner; -import org.apache.calcite.plan.RelOptSchema; -import org.apache.calcite.plan.RelOptTable; -import org.apache.calcite.prepare.Prepare; -import org.apache.calcite.rel.RelCollation; -import org.apache.calcite.rel.RelDistribution; -import org.apache.calcite.rel.RelDistributions; -import org.apache.calcite.rel.RelNode; -import org.apache.calcite.rel.RelReferentialConstraint; -import org.apache.calcite.rel.logical.LogicalTableScan; -import org.apache.calcite.rel.type.RelDataType; -import org.apache.calcite.rel.type.RelDataTypeFactory; -import org.apache.calcite.rel.type.RelDataTypeField; -import org.apache.calcite.schema.ColumnStrategy; -import org.apache.calcite.sql.SqlAccessType; -import org.apache.calcite.sql.SqlFunctionCategory; -import org.apache.calcite.sql.SqlIdentifier; -import org.apache.calcite.sql.SqlOperator; -import org.apache.calcite.sql.SqlSyntax; -import org.apache.calcite.sql.advise.SqlAdvisor; -import org.apache.calcite.sql.advise.SqlAdvisorValidator; -import org.apache.calcite.sql.parser.SqlParserUtil; -import org.apache.calcite.sql.parser.StringAndPos; -import org.apache.calcite.sql.type.SqlTypeName; -import org.apache.calcite.sql.validate.SqlModality; -import org.apache.calcite.sql.validate.SqlMoniker; -import org.apache.calcite.sql.validate.SqlMonikerImpl; -import org.apache.calcite.sql.validate.SqlMonikerType; -import org.apache.calcite.sql.validate.SqlMonotonicity; -import org.apache.calcite.sql.validate.SqlNameMatcher; -import org.apache.calcite.sql.validate.SqlNameMatchers; -import org.apache.calcite.sql.validate.SqlValidatorUtil; -import org.apache.calcite.sql.validate.SqlValidatorWithHints; -import org.apache.calcite.sql2rel.InitializerContext; -import org.apache.calcite.util.ImmutableBitSet; -import org.apache.calcite.util.Pair; -import org.apache.calcite.util.Util; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; - -import com.dremio.exec.expr.fn.FunctionImplementationRegistry; -import com.dremio.exec.planner.types.JavaTypeFactoryImpl; -import com.dremio.exec.server.SabotContext; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Iterables; - -/** - * Tests {@link com.dremio.exec.planner.sql.SQLAnalyzer} - */ -public class TestSQLAnalyzer { - - private SQLAnalyzer sqlAnalyzer; - - protected static final String TEST_CATALOG = "TEST_CATALOG"; - protected static final String TEST_SCHEMA = "TEST_SCHEMA"; - protected static final String TEST_TABLE = "TEST_TABLE"; - protected static final List FROM_KEYWORDS = Arrays.asList("(", "LATERAL", "TABLE", "UNNEST"); - - @BeforeEach - public void setup() { - // Create and Mock dependencies - final SabotContext sabotContext = mock(SabotContext.class); - final FunctionImplementationRegistry functionImplementationRegistry = mock(FunctionImplementationRegistry.class); - final JavaTypeFactory typeFactory = JavaTypeFactoryImpl.INSTANCE; - - // Stub necessary methods - when(sabotContext.getFunctionImplementationRegistry()).thenReturn(functionImplementationRegistry); - - // Utilize custom catalog reader implementation to return specific suggestions, - // without requiring server startup - MockCatalogReader mockCatalogReader = new MockCatalogReader(typeFactory, false); - SqlValidatorWithHints validator = new SqlAdvisorValidator(new OperatorTable(sabotContext.getFunctionImplementationRegistry()), - mockCatalogReader, typeFactory, SqlAdvisorValidator.Config.DEFAULT.withSqlConformance(DremioSqlConformance.INSTANCE)); - sqlAnalyzer = new SQLAnalyzer(validator); - } - - /** - * Values to be used for parameterized tests. Contains the following: - * - Query to be used for suggestion with ^ indicating position of the cursor. - * - The expected number of returned suggestions given the MockCatalogReader implementation. - * - Boolean indicating if suggestion values should be checked. - */ - private static Stream data() { - return Stream.of( - // Cursor after 'from' - Arguments.of( - "Select * from ^", - 7, - true), - // Cursor after 'T' - Arguments.of( - "select * from T^", - 4, - true), - // Cursor after 'I' - Arguments.of( - "select * from I^", - 0, - true), - // Cursor before 'dummy a' - Arguments.of( - "select a.colOne, b.colTwo from ^dummy a, TEST_SCHEMA.dummy b", - 7, - true), - // Cursor after 'from' - Arguments.of( - "select a.colOne, b.colTwo from ^", - 7, - true), - // Cursor after 'from' - Arguments.of( - "select a.colOne, b.colTwo from ^, TEST_SCHEMA.dummy b", - 7, - true), - // Cursor after 'from' before 'a' - Arguments.of( - "select a.colOne, b.colTwo from ^a", - 7, - true), - // Cursor before 'TEST_SCHEMA' - Arguments.of( - "select a.colOne, b.colTwo from dummy a, ^TEST_SCHEMA.dummy b", - 7, - true), - // Cursor after 'TEST_SCHEMA.' - Arguments.of( - "select a.colOne, b.colTwo from dummy a, TEST_SCHEMA.^", - 3, - true), - // Cursor after 'group' - Arguments.of( - "select a.colOne, b.colTwo from emp group ^", - 1, - false), - // Cursor before 'dummy a' - Arguments.of( - "select a.colOne, b.colTwo from ^dummy a join TEST_SCHEMA.dummy b " - + "on a.colOne=b.colOne where colTwo=1", - 7, - true), - // Cursor after 'from' before 'a' - Arguments.of( - "select a.colOne, b.colTwo from ^ a join sales.dummy b", - 7, - true), - // Cursor before 'TEST_SCHEMA.dummy b' - Arguments.of( - "select a.colOne, b.colTwo from dummy a join ^TEST_SCHEMA.dummy b " - + "on a.colTwo=b.colTwo where colOne=1", - 7, - true), - // Cursor after 'TEST_SCHEMA.' - Arguments.of( - "select a.colOne, b.colTwo from dummy a join TEST_SCHEMA.^", - 3, - true), - // Cursor after 'TEST_SCHEMA.' - Arguments.of( - "select a.colOne, b.colTwo from dummy a join TEST_SCHEMA.^ on", - 3, - true), - // Cursor after 'TEST_SCHEMA.' - Arguments.of( - "select a.colOne, b.colTwo from dummy a join TEST_SCHEMA.^ on a.colTwo=", - 108, - false), - // Cursor after 'TEST_CATALOG.TEST_SCHEMA' - Arguments.of( - "select * from dummy join TEST_CATALOG.TEST_SCHEMA ^", - 32, - false) - ); - } - - /** - * Check that the returned suggestions list contains the expected hints. - * - * @param suggestions The list of query hints - */ - private void assertSuggestions(List suggestions) { - for (SqlMoniker hint : suggestions) { - switch (hint.getType()) { - case CATALOG: - assertEquals(TEST_CATALOG ,hint.getFullyQualifiedNames().get(0)); - break; - case SCHEMA: - assertEquals(TEST_SCHEMA ,hint.getFullyQualifiedNames().get(0)); - break; - case TABLE: - assertEquals(TEST_TABLE ,hint.getFullyQualifiedNames().get(0)); - break; - case KEYWORD: - assertTrue(FROM_KEYWORDS.contains(hint.getFullyQualifiedNames().get(0))); - break; - default: - Assertions.fail(); - } - } - } - - @ParameterizedTest(name = "{index}: {0}") - @MethodSource("data") - public void testSuggestion(String sql, int expectedSuggestionCount, boolean checkSuggestions) { - final StringAndPos stringAndPos = SqlParserUtil.findPos(sql); - List suggestions = sqlAnalyzer.suggest(stringAndPos.sql, stringAndPos.cursor); - assertEquals(expectedSuggestionCount, suggestions.size()); - if (checkSuggestions) { - assertSuggestions(suggestions); - } - } - - @ParameterizedTest(name = "{index}: {0}") - @MethodSource("data") - public void testValidation(String sql, int expectedSuggestionCount, boolean checkSuggestions) { - List validationErrors = sqlAnalyzer.validate("select * from"); - assertEquals(1, validationErrors.size()); - assertEquals(10, validationErrors.get(0).getStartColumnNum()); - assertEquals(13, validationErrors.get(0).getEndColumnNum()); - } - - /** - * Custom catalog reader to replace {@link org.apache.calcite.prepare.CalciteCatalogReader} in the - * instantiation of the SQL validator. - */ - class MockCatalogReader implements Prepare.CatalogReader { - - protected final RelDataTypeFactory typeFactory; - private final boolean caseSensitive; - private ImmutableList> schemaPaths; - - /** - * Creates a MockCatalogReader. - */ - public MockCatalogReader(RelDataTypeFactory typeFactory, - boolean caseSensitive) { - this(typeFactory, caseSensitive, ImmutableList.of(ImmutableList.of())); - } - - private MockCatalogReader(RelDataTypeFactory typeFactory, - boolean caseSensitive, ImmutableList> schemaPaths) { - this.typeFactory = typeFactory; - this.caseSensitive = caseSensitive; - this.schemaPaths = schemaPaths; - } - - @Override - public Prepare.PreparingTable getTableForMember(List names) { - return getTable(names); - } - - @Override - public RelDataTypeFactory getTypeFactory() { - return typeFactory; - } - - @Override - public void registerRules(RelOptPlanner planner) throws Exception { - // Do nothing - } - - @Override - public Prepare.CatalogReader withSchemaPath(List schemaPath) { - ImmutableList immutableSchemaPath = ImmutableList.copyOf(schemaPath); - return new MockCatalogReader(typeFactory, caseSensitive, - ImmutableList.> copyOf(Iterables.concat(this.schemaPaths, ImmutableList.of(immutableSchemaPath)))); - } - - @Override - public Prepare.PreparingTable getTable(List names) { - // Create table used for suggestion - if (names.contains("TEST_TABLE")) { - MockTable mockTable = new MockTable("TEST_TABLE", this); - mockTable.addColumn("colOne", typeFactory.createSqlType(SqlTypeName.INTEGER)); - mockTable.addColumn("colTwo", typeFactory.createSqlType(SqlTypeName.INTEGER)); - return mockTable; - } - - return null; - } - - @Override - public RelDataType getNamedType(SqlIdentifier typeName) { - return null; - } - - @Override - public List getAllSchemaObjectNames(List names) { - final List result = new ArrayList<>(); - result.add(new SqlMonikerImpl(Arrays.asList("TEST_CATALOG"), SqlMonikerType.CATALOG)); - result.add(new SqlMonikerImpl(Arrays.asList("TEST_SCHEMA"), SqlMonikerType.SCHEMA)); - result.add(new SqlMonikerImpl(Arrays.asList("TEST_TABLE"), SqlMonikerType.TABLE)); - return result; - } - - @Override - public List> getSchemaPaths() { - return (List>) (Object) schemaPaths; - } - - @Override - public RelDataTypeField field(RelDataType rowType, String alias) { - return SqlValidatorUtil.lookupField(caseSensitive, rowType, alias); - } - - @Override - public boolean matches(String string, String name) { - return Util.matches(caseSensitive, string, name); - } - - @Override - public RelDataType createTypeFromProjection(RelDataType type, List columnNameList) { - return SqlValidatorUtil.createTypeFromProjection(type, columnNameList, - typeFactory, caseSensitive); - } - - @Override - public boolean isCaseSensitive() { - return false; - } - - @Override - public CalciteSchema getRootSchema() { - throw new UnsupportedOperationException(); - } - - @Override - public CalciteConnectionConfig getConfig() { - throw new UnsupportedOperationException(); - } - - @Override - public SqlNameMatcher nameMatcher() { - return SqlNameMatchers.withCaseSensitive(caseSensitive); - } - - @Override - public void lookupOperatorOverloads(SqlIdentifier opName, SqlFunctionCategory category, SqlSyntax syntax, - List operatorList, SqlNameMatcher nameMatcher) { - // Do nothing - } - - @Override - public List getOperatorList() { - return null; - } - - /** - * Mock implementation of - * {@link org.apache.calcite.prepare.Prepare.PreparingTable}. - */ - public class MockTable implements Prepare.PreparingTable { - protected final MockCatalogReader catalogReader; - protected final List> columnList = - new ArrayList<>(); - protected RelDataType rowType; - protected final List names; - - public MockTable(String name, MockCatalogReader catalogReader) { - this.names = ImmutableList.of("TEST_CATALOG", "TEST_SCHEMA", name); - this.catalogReader = catalogReader; - } - - @Override - public RelOptSchema getRelOptSchema() { - return catalogReader; - } - - @Override - public RelNode toRel(ToRelContext context) { - return LogicalTableScan.create(context.getCluster(), this, ImmutableList.of()); - } - - @Override - public List getCollationList() { - return null; - } - - @Override - public RelDistribution getDistribution() { - return RelDistributions.ANY; - } - - @Override - public boolean isKey(ImmutableBitSet columns) { - return false; - } - - @Override - public List getKeys() { - return ImmutableList.of(); - } - - @Override - public T unwrap(Class clazz) { - if (clazz.isInstance(this)) { - return clazz.cast(this); - } - return null; - } - - @Override - public RelDataType getRowType() { - return rowType; - } - - @Override - public List getQualifiedName() { - return names; - } - - @Override - public double getRowCount() { - return 0; - } - - @Override - public SqlMonotonicity getMonotonicity(String columnName) { - return CONSTANT; - } - - @Override - public SqlAccessType getAllowedAccess() { - return SqlAccessType.ALL; - } - - @Override - public boolean supportsModality(SqlModality modality) { - return false; - } - - @Override - public boolean isTemporal() { - return false; - } - - @Override - public Expression getExpression(Class clazz) { - throw new UnsupportedOperationException(); - } - - public void addColumn(String name, RelDataType type) { - columnList.add(Pair.of(name, type)); - } - - @Override - public RelOptTable extend(List extendedFields) { - return this; - } - - @Override - public List getReferentialConstraints() { - return ImmutableList.of(); - } - - @Override - public List getColumnStrategies() { - return ImmutableList.of(); - } - - @Override - public boolean columnHasDefaultValue(RelDataType rowType, int ordinal, InitializerContext initializerContext) { - return false; - } - } - - @Override - public C unwrap(Class aClass) { - if (aClass.isInstance(this)) { - return aClass.cast(this); - } - return null; - } - } -} diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestSQLAnalyzerFactory.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestSQLAnalyzerFactory.java deleted file mode 100644 index 534b8895d1..0000000000 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestSQLAnalyzerFactory.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.dremio.exec.planner.sql; - -import static org.junit.Assert.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import org.apache.calcite.sql.advise.SqlAdvisorValidator; -import org.apache.calcite.sql.validate.SqlValidatorImpl; -import org.apache.calcite.sql.validate.SqlValidatorWithHints; -import org.junit.Test; - -import com.dremio.exec.catalog.Catalog; -import com.dremio.exec.catalog.MetadataRequestOptions; -import com.dremio.exec.expr.fn.FunctionImplementationRegistry; -import com.dremio.exec.planner.physical.PlannerSettings; -import com.dremio.exec.server.SabotContext; -import com.dremio.exec.server.options.ProjectOptionManager; -import com.dremio.exec.store.CatalogService; -import com.dremio.options.OptionList; -import com.dremio.options.OptionValidatorListing; -import com.dremio.options.OptionValue; -import com.dremio.sabot.rpc.user.UserSession; -import com.dremio.service.users.SystemUser; - -/** - * Tests {@link TestSQLAnalyzerFactory} - */ -public class TestSQLAnalyzerFactory { - - @Test - public void testCreationOfValidator() { - SabotContext sabotContext = mock(SabotContext.class); - FunctionImplementationRegistry functionImplementationRegistry = mock(FunctionImplementationRegistry.class); - CatalogService catalogService = mock(CatalogService.class); - Catalog catalog = mock(Catalog.class); - ProjectOptionManager mockOptions = mock(ProjectOptionManager.class); - when(mockOptions.getOptionValidatorListing()).thenReturn(mock(OptionValidatorListing.class)); - - // Stub appropriate methods. - when(sabotContext.getFunctionImplementationRegistry()).thenReturn(functionImplementationRegistry); - when(sabotContext.getCatalogService()).thenReturn(catalogService); - when(sabotContext.getCatalogService().getCatalog(any(MetadataRequestOptions.class))).thenReturn(catalog); - - OptionValue value1 = OptionValue.createBoolean(OptionValue.OptionType.SYSTEM, PlannerSettings.ENABLE_DECIMAL_V2_KEY, false); - OptionValue value2 = OptionValue.createLong(OptionValue.OptionType.SYSTEM, UserSession.MAX_METADATA_COUNT.getOptionName(), 0); - OptionList optionList = new OptionList(); - optionList.add(value1); - optionList.add(value2); - - when(mockOptions.getOption(PlannerSettings.ENABLE_DECIMAL_V2_KEY)).thenReturn(value1); - when(mockOptions.getOption(UserSession.MAX_METADATA_COUNT.getOptionName())).thenReturn(value2); - when(mockOptions.getNonDefaultOptions()).thenReturn(optionList); - - // Test that the correct concrete implementation is created. - SQLAnalyzer sqlAnalyzer = SQLAnalyzerFactory.createSQLAnalyzer(SystemUser.SYSTEM_USERNAME, sabotContext, null, true, mockOptions); - SqlValidatorWithHints validator = sqlAnalyzer.validator; - assertTrue(validator instanceof SqlAdvisorValidator); - - sqlAnalyzer = SQLAnalyzerFactory.createSQLAnalyzer(SystemUser.SYSTEM_USERNAME, sabotContext, null, false, mockOptions); - validator = sqlAnalyzer.validator; - assertTrue(validator instanceof SqlValidatorImpl); - } -} diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestSQLCreateEmptyTable.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestSQLCreateEmptyTable.java index 33cbcfd1e4..1d5973e284 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestSQLCreateEmptyTable.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestSQLCreateEmptyTable.java @@ -18,6 +18,7 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import java.util.ArrayList; import java.util.List; import org.apache.calcite.sql.SqlKind; @@ -28,6 +29,7 @@ import com.dremio.common.exceptions.UserException; import com.dremio.exec.planner.physical.PlannerSettings; import com.dremio.exec.planner.sql.parser.SqlCreateEmptyTable; +import com.dremio.exec.proto.UserBitShared; import com.google.common.collect.ImmutableList; import com.google.common.collect.Sets; @@ -208,6 +210,48 @@ public void testTruncatePartitionTransformWithBadArgsFails() { .isInstanceOf(UserException.class) .hasMessageContaining("Invalid arguments for partition transform"); } + @Test + public void testParseMalformedQueriesWithTableProperties() throws Exception { + List malformedQueries = new ArrayList() {{ + add("create table s (a BIGINT) TBLPROPERTIES ()"); + add("create table s (a BIGINT) TBLPROPERTIES"); + add("create table s (a BIGINT) TBLPROPERTIES ('property_name' = 'property_value', 'property_name1' = )"); + add("create table s (a BIGINT) TBLPROPERTIES ('property_name')"); + add("create table s (a BIGINT) TBLPROPERTIES ('property_name', 'property_name1' )"); + add("create table s (a BIGINT) TBLPROPERTIES ('property_name', 'property_name1' = )"); + add("create table s (a BIGINT) TBLPROPERTIES ('property_name' = 'property_value', 'property_name1')"); + }}; + + for (String malformedQuery : malformedQueries) { + parseAndVerifyMalFormat(malformedQuery); + } + } + + @Test + public void testParseWellformedQueriesWithTableProperties() throws Exception { + List wellformedQueries = new ArrayList() {{ + add("create table s (a BIGINT) TBLPROPERTIES ('property_name' = 'property_value')"); + add("create table s (a BIGINT) TBLPROPERTIES ('property_name' = 'property_value', 'property_name1' = 'property_value1')"); + }}; + + for (String wellformedQuery : wellformedQueries) { + parseAndVerifyWellFormat(wellformedQuery); + } + } + + private void parseAndVerifyWellFormat(String sql) { + SqlNode sqlNode = SqlConverter.parseSingleStatementImpl(sql, parserConfig, false); + Assert.assertTrue(sqlNode instanceof SqlCreateEmptyTable); + Assert.assertTrue(sqlNode.isA(Sets.immutableEnumSet(SqlKind.OTHER_DDL))); + } + + private void parseAndVerifyMalFormat(String sql) { + try { + SqlConverter.parseSingleStatementImpl(sql, parserConfig, false); + } catch (UserException ue) { + Assert.assertEquals(ue.getErrorType(), UserBitShared.DremioPBError.ErrorType.PARSE); + } + } private List parseAndGetPartitionTransforms(String sql) { SqlNode sqlNode = SqlConverter.parseSingleStatementImpl(sql, parserConfig, false); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestSqlAlterTableProperties.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestSqlAlterTableProperties.java new file mode 100644 index 0000000000..2e0face260 --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestSqlAlterTableProperties.java @@ -0,0 +1,125 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql; + +import static org.junit.Assert.assertEquals; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.calcite.config.NullCollation; +import org.apache.calcite.sql.SqlDialect; +import org.apache.calcite.sql.SqlKind; +import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.pretty.SqlPrettyWriter; +import org.junit.Assert; +import org.junit.Test; + +import com.dremio.BaseTestQuery; +import com.dremio.common.exceptions.UserException; +import com.dremio.common.utils.SqlUtils; +import com.dremio.exec.planner.physical.PlannerSettings; +import com.dremio.exec.planner.sql.parser.SqlAlterTableProperties; +import com.dremio.exec.proto.UserBitShared; +import com.dremio.test.UserExceptionAssert; +import com.google.common.collect.Sets; + +public class TestSqlAlterTableProperties extends BaseTestQuery { + private final ParserConfig parserConfig = new ParserConfig(ParserConfig.QUOTING, 100, + PlannerSettings.FULL_NESTED_SCHEMA_SUPPORT.getDefault().getBoolVal()); + + @Test + public void testParseMalformedQueries() throws Exception { + List malformedQueries = new ArrayList() {{ + add("ALTER TABLE t1 SET TBLPROPERTIES ()"); + add("ALTER TABLE t1 SET TBLPROPERTIES"); + add("ALTER TABLE t1 SET TBLPROPERTIES ('property_name' = 'property_value', 'property_name1' = )"); + add("ALTER TABLE t1 SET TBLPROPERTIES ('property_name' )"); + add("ALTER TABLE t1 SET TBLPROPERTIES ('property_name', 'property_name1' )"); + add("ALTER TABLE t1 UNSET TBLPROPERTIES ('property_name' = 'property_value')"); + add("ALTER TABLE t1 UNSET TBLPROPERTIES ('property_name', 'property_name1' = )"); + add("ALTER TABLE t1 SET TBLPROPERTIES ('property_name', 'property_name1' = 'property_value' )"); + add("ALTER TABLE t1 SETUNSET TBLPROPERTIES ('property_name' )"); + }}; + + for (String malformedQuery : malformedQueries) { + parseAndVerifyMalFormat(malformedQuery); + } + } + + @Test + public void testParseWellformedQueries() throws Exception { + List wellformedQueries = new ArrayList() {{ + add("ALTER TABLE t1 SET TBLPROPERTIES ('property_name' = 'property_value')"); + add("ALTER TABLE t1 SET TBLPROPERTIES ('property_name' = 'property_value', 'property_name1' = 'property_value1')"); + add("ALTER TABLE t1 UNSET TBLPROPERTIES ('property_name')"); + add("ALTER TABLE t1 UNSET TBLPROPERTIES ('property_name', 'property_name1')"); + }}; + + for (String wellformedQuery : wellformedQueries) { + parseAndVerifyWellFormat(wellformedQuery); + } + } + + @Test + public void testParseAndUnparse() throws Exception { + Map queryExpectedStrings = new HashMap() {{ + put("ALTER TABLE t1 SET TBLPROPERTIES ('property_name' = 'property_value', 'property_name1' = 'property_value1')", + "ALTER TABLE \"t1\" SET TBLPROPERTIES ('property_name' = 'property_value', 'property_name1' = 'property_value1')"); + put("ALTER TABLE t1 UNSET TBLPROPERTIES ('property_name', 'property_name1')", + "ALTER TABLE \"t1\" UNSET TBLPROPERTIES ('property_name', 'property_name1')"); + }}; + + for (Map.Entry entry : queryExpectedStrings.entrySet()) { + parseAndVerifyUnparse(entry.getKey(), entry.getValue()); + } + } + + @Test + public void testExecuteWithoutTablePropertiesSupportKey() throws Exception { + UserExceptionAssert.assertThatThrownBy(() -> BaseTestQuery.test("ALTER TABLE t1 SET TBLPROPERTIES ('property_name' = 'property_value')")) + .hasMessageContaining("TBLPROPERTIES is not supported in the query"); + } + + private void parseAndVerifyWellFormat(String sql) { + SqlNode sqlNode = SqlConverter.parseSingleStatementImpl(sql, parserConfig, false); + Assert.assertTrue(sqlNode instanceof SqlAlterTableProperties); + Assert.assertTrue(sqlNode.isA(Sets.immutableEnumSet(SqlKind.ALTER_TABLE))); + } + + private void parseAndVerifyUnparse(String sql, String expectedString) { + SqlNode sqlNode = SqlConverter.parseSingleStatementImpl(sql, parserConfig, false); + Assert.assertTrue(sqlNode instanceof SqlAlterTableProperties); + + // verify unParse + SqlDialect DREMIO_DIALECT = + new SqlDialect(SqlDialect.DatabaseProduct.UNKNOWN, "Dremio", Character.toString(SqlUtils.QUOTE), NullCollation.FIRST); + SqlPrettyWriter writer = new SqlPrettyWriter(DREMIO_DIALECT); + sqlNode.unparse(writer, 0, 0); + String actual = writer.toString(); + Assert.assertEquals(expectedString.toLowerCase(), actual.toLowerCase()); + } + + private void parseAndVerifyMalFormat(String sql) { + try { + SqlConverter.parseSingleStatementImpl(sql, parserConfig, false); + } catch (UserException ue) { + assertEquals(ue.getErrorType(), UserBitShared.DremioPBError.ErrorType.PARSE); + } + } +} diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestTableVersionParsing.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestTableVersionParsing.java index 087bec85c1..4f7de16f98 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestTableVersionParsing.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestTableVersionParsing.java @@ -387,7 +387,7 @@ public void testUnparseTableMacroWithSnapshotVersion() throws Exception { new TableVersionContext(TableVersionType.SNAPSHOT_ID, "snapshotid1")); String expectedUnparsedString = "SELECT *\n" + - "FROM my.table1 AT SNAPSHOT_ID snapshotid1"; + "FROM my.table1 AT SNAPSHOT snapshotid1"; SqlNode rootNode = parseAndValidate("SELECT * FROM TABLE(table_files('my.table1')) AT SNAPSHOT 'snapshotid1'", ImmutableList.of(expected), true); @@ -469,7 +469,7 @@ public void testUnparseTableWithSnapshotVersion() throws Exception { new TableVersionContext(TableVersionType.SNAPSHOT_ID, "1")); String expectedUnparsedString = "SELECT *\n" + - "FROM \"my\".\"table1\" AT SNAPSHOT_ID 1"; + "FROM \"my\".\"table1\" AT SNAPSHOT 1"; SqlNode rootNode = parseAndValidate("SELECT * FROM my.table1 AT SNAPSHOT '1'", ImmutableList.of(expected), true); rootNode.unparse(writer, 0, 0); String sqlString = writer.toString(); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestUDFParsing.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestUDFParsing.java index fd91c58a63..e554bf001a 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestUDFParsing.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestUDFParsing.java @@ -165,9 +165,9 @@ private void validateCreate(SqlCreateFunction sqlCreateFunctionResult, String fN Assert.assertEquals("function body doesn't match", exp, sqlCreateFunctionResult.getExpression().toString()); Assert.assertEquals("function arguments doesn't match", fieldList, sqlCreateFunctionResult.getFieldList().toString()); if (scalarReturnType == null) { - Assert.assertNull("function return type doesn't match", sqlCreateFunctionResult.getScalarReturnType()); + Assert.assertNull("function return type doesn't match", sqlCreateFunctionResult.getReturnType().getScalarReturnType()); } else { - Assert.assertEquals("function return type doesn't match", scalarReturnType, sqlCreateFunctionResult.getScalarReturnType().toString()); + Assert.assertEquals("function return type doesn't match", scalarReturnType, sqlCreateFunctionResult.getReturnType().getScalarReturnType().toString()); } Assert.assertEquals("shouldReplace doesn't match", replacePolicy, sqlCreateFunctionResult.shouldReplace()); } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestVacuumCatalogValidations.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestVacuumCatalogValidations.java new file mode 100644 index 0000000000..e08f503731 --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/TestVacuumCatalogValidations.java @@ -0,0 +1,64 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql; + +import static com.dremio.exec.planner.sql.DmlQueryTestUtils.createBasicTable; +import static org.assertj.core.api.AssertionsForClassTypes.assertThatThrownBy; + +import org.junit.Test; + +import com.dremio.BaseTestQuery; +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.ExecConstants; + +/** + * Validations on `VACUUM CATALOG` sql command + */ +public class TestVacuumCatalogValidations extends BaseTestQuery { + + @Test + public void testFeatureNotEnabled() { + assertThatThrownBy(() -> runSQL(String.format("VACUUM CATALOG %s", TEMP_SCHEMA_HADOOP))) + .isInstanceOf(UserException.class) + .hasMessageContaining("UNSUPPORTED_OPERATION ERROR: VACUUM CATALOG command is not supported."); + } + + @Test + public void testInvalidPath() throws Exception { + try (AutoCloseable c = enableVacuumCatalog(); + DmlQueryTestUtils.Table table = createBasicTable(TEMP_SCHEMA_HADOOP,2, 1)) { + assertThatThrownBy(() -> runSQL(String.format("VACUUM CATALOG %s.%s", TEMP_SCHEMA_HADOOP, table.fqn))) + .isInstanceOf(UserException.class) + .hasMessageContaining("PARSE ERROR: Catalog name cannot have multiple path components."); + } + } + + @Test + public void testAssertSourceType() throws Exception { + try (AutoCloseable c = enableVacuumCatalog()) { + assertThatThrownBy(() -> runSQL(String.format("VACUUM CATALOG %s", TEMP_SCHEMA_HADOOP))) + .isInstanceOf(UserException.class) + .hasMessageContaining("UNSUPPORTED_OPERATION ERROR: VACUUM CATALOG is supported only on versioned sources."); + } + } + + private static AutoCloseable enableVacuumCatalog() { + setSystemOption(ExecConstants.ENABLE_ICEBERG_VACUUM_CATALOG, "true"); + return () -> + setSystemOption(ExecConstants.ENABLE_ICEBERG_VACUUM_CATALOG, + ExecConstants.ENABLE_ICEBERG_VACUUM_CATALOG.getDefault().getBoolVal().toString()); + } +} diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/UpdateTests.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/UpdateTests.java index 0bed4db6c3..59591292fa 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/UpdateTests.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/UpdateTests.java @@ -136,6 +136,17 @@ public static void testUpdateById(BufferAllocator allocator, String source) thro } } + public static void testUpdateByIdWithEqualNull(BufferAllocator allocator, String source) throws Exception { + // column = null should return false and no data should be updated + try (Tables tables = createBasicNonPartitionedAndPartitionedTables(source, 2, 10, PARTITION_COLUMN_ONE_INDEX_SET)) { + for (Table table : tables.tables) { + testDmlQuery(allocator, "UPDATE %s SET id = %s WHERE id = %s", + new Object[]{table.fqn, (int) table.originalData[5][0] * 10, null}, table, 0, + ArrayUtils.subarray(table.originalData, 0, table.originalData.length)); + } + } + } + public static void testUpdateTargetTableWithAndWithoutAlias(BufferAllocator allocator, String source) throws Exception { // without target table aliasing try (Tables tables = createBasicNonPartitionedAndPartitionedTables(source, 2, 10, PARTITION_COLUMN_ONE_INDEX_SET)) { diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/VacuumTests.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/VacuumTests.java index af47022aeb..d5a80243c5 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/VacuumTests.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/VacuumTests.java @@ -18,12 +18,25 @@ import static com.dremio.BaseTestQuery.getDfsTestTmpSchemaLocation; import static com.dremio.BaseTestQuery.getIcebergTable; import static com.dremio.BaseTestQuery.test; +import static com.dremio.exec.planner.VacuumOutputSchema.DELETE_DATA_FILE_COUNT; +import static com.dremio.exec.planner.VacuumOutputSchema.DELETE_EQUALITY_DELETE_FILES_COUNT; +import static com.dremio.exec.planner.VacuumOutputSchema.DELETE_MANIFEST_FILES_COUNT; +import static com.dremio.exec.planner.VacuumOutputSchema.DELETE_MANIFEST_LISTS_COUNT; +import static com.dremio.exec.planner.VacuumOutputSchema.DELETE_PARTITION_STATS_FILES_COUNT; +import static com.dremio.exec.planner.VacuumOutputSchema.DELETE_POSITION_DELETE_FILES_COUNT; +import static com.dremio.exec.planner.sql.DmlQueryTestUtils.EMPTY_PATHS; +import static com.dremio.exec.planner.sql.DmlQueryTestUtils.PARTITION_COLUMN_ONE_INDEX_SET; import static com.dremio.exec.planner.sql.DmlQueryTestUtils.addQuotes; import static com.dremio.exec.planner.sql.DmlQueryTestUtils.addRows; +import static com.dremio.exec.planner.sql.DmlQueryTestUtils.createBasicNonPartitionedAndPartitionedTables; import static com.dremio.exec.planner.sql.DmlQueryTestUtils.createBasicTable; +import static com.dremio.exec.planner.sql.DmlQueryTestUtils.createEmptyTable; +import static com.dremio.exec.planner.sql.DmlQueryTestUtils.createStockIcebergTable; +import static com.dremio.exec.planner.sql.DmlQueryTestUtils.loadTable; import static com.dremio.exec.planner.sql.DmlQueryTestUtils.testMalformedDmlQueries; import static com.dremio.exec.planner.sql.DmlQueryTestUtils.testQueryValidateStatusSummary; import static com.dremio.exec.planner.sql.DmlQueryTestUtils.verifyCountSnapshotQuery; +import static com.dremio.exec.planner.sql.DmlQueryTestUtils.verifyData; import static com.dremio.exec.planner.sql.DmlQueryTestUtils.waitUntilAfter; import static com.dremio.exec.planner.sql.handlers.SqlHandlerUtil.getTimestampFromMillis; @@ -32,13 +45,13 @@ import java.util.Set; import org.apache.arrow.memory.BufferAllocator; -import org.apache.commons.lang3.ArrayUtils; import org.apache.iceberg.DataFile; import org.apache.iceberg.Snapshot; import org.apache.iceberg.Table; import org.apache.iceberg.io.FileIO; import org.junit.Assert; +import com.dremio.TestBuilder; import com.dremio.exec.proto.UserBitShared.DremioPBError.ErrorType; import com.dremio.exec.store.iceberg.model.IcebergCatalogType; import com.dremio.io.file.Path; @@ -65,6 +78,8 @@ public static void testMalformedVacuumQueries(String source) throws Exception { "VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN", "VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN =", "VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN = %s RETAIN_LAST", + "VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN RETAIN_LAST", + "VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN %s RETAIN_LAST", "VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN = %s RETAIN_LAST =", "VACUUM TABLE %s EXPIRE SNAPSHOTS RETAIN_LAST", "VACUUM TABLE %s EXPIRE SNAPSHOTS RETAIN_LAST =", @@ -74,6 +89,25 @@ public static void testMalformedVacuumQueries(String source) throws Exception { } } + public static void testSimpleExpireOlderThanRetainLastUsingEqual(BufferAllocator allocator, String source) throws Exception { + try (DmlQueryTestUtils.Table table = createBasicTable(source,2, 1)) { + Thread.sleep(100); + final long timestampMillisToExpire = System.currentTimeMillis(); + // Insert more rows to increase snapshots + DmlQueryTestUtils.Table table2 = addRows(table, 1); + table2 = addRows(table2, 1); + validateOutputResult( + allocator, + "VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN = '%s' RETAIN_LAST = 1", + new Object[]{table.fqn, getTimestampFromMillis(timestampMillisToExpire)}, + table, + new Long[] {0L, 0L, 0L, 0L, 2L, 0L}); + + // Data not changed. + verifyData(allocator, table2, table2.originalData); + } + } + public static void testSimpleExpireOlderThan(BufferAllocator allocator, String source) throws Exception { try (DmlQueryTestUtils.Table table = createBasicTable(source,2, 1)) { Thread.sleep(100); @@ -81,12 +115,15 @@ public static void testSimpleExpireOlderThan(BufferAllocator allocator, String s // Insert more rows to increase snapshots DmlQueryTestUtils.Table table2 = addRows(table, 1); table2 = addRows(table2, 1); - testQueryValidateStatusSummary(allocator, - "VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN = '%s'", new Object[]{table.fqn, getTimestampFromMillis(timestampMillisToExpire)}, + validateOutputResult( + allocator, + "VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN '%s'", + new Object[]{table.fqn, getTimestampFromMillis(timestampMillisToExpire)}, table, - true, - String.format("Table [%s] vacuumed", table.fqn), - ArrayUtils.subarray(table2.originalData, 0, table2.originalData.length)); + new Long[] {0L, 0L, 0L, 0L, 2L, 0L}); + + // Data not changed. + verifyData(allocator, table2, table2.originalData); } } @@ -110,12 +147,15 @@ public static void testExpireOlderThan(BufferAllocator allocator, String source) Assert.assertEquals("Should have four snapshots", 4, Iterables.size(updatedTable.snapshots())); Assert.assertEquals("Should have four history entries", 4, updatedTable.history().size()); - testQueryValidateStatusSummary(allocator, - "VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN = '%s'", new Object[]{table.fqn, getTimestampFromMillis(timestampMillisToExpire)}, + validateOutputResult( + allocator, + "VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN '%s'", + new Object[]{table.fqn, getTimestampFromMillis(timestampMillisToExpire)}, table, - true, - String.format("Table [%s] vacuumed", table.fqn), - ArrayUtils.subarray(table2.originalData, 0, table2.originalData.length)); + new Long[] {0L, 0L, 0L, 0L, 2L, 0L}); + + // Data not changed. + verifyData(allocator, table2, table2.originalData); Table vacuumedTable = getIcebergTable(tableFolder, IcebergCatalogType.HADOOP); Assert.assertEquals("Expire should keep last two snapshots", 2, Iterables.size(vacuumedTable.snapshots())); @@ -146,12 +186,15 @@ public static void testExpireRetainLast(BufferAllocator allocator, String source Assert.assertEquals("Should have four history entries", 4, updatedTable.history().size()); // No snapshots are dated back to default 5 days ago, and no snapshots are expired, even claim to retain last 2. - testQueryValidateStatusSummary(allocator, - "VACUUM TABLE %s EXPIRE SNAPSHOTS RETAIN_LAST = %s", new Object[]{table.fqn, "2"}, + validateOutputResult( + allocator, + "VACUUM TABLE %s EXPIRE SNAPSHOTS RETAIN_LAST %s", + new Object[]{table.fqn, "2"}, table, - true, - String.format("Table [%s] vacuumed", table.fqn), - ArrayUtils.subarray(table2.originalData, 0, table2.originalData.length)); + new Long[] {0L, 0L, 0L, 0L, 0L, 0L}); + + // Data not changed. + verifyData(allocator, table2, table2.originalData); Table vacuumedTable = getIcebergTable(tableFolder, IcebergCatalogType.HADOOP); Assert.assertEquals("Expire should keep last four snapshots", 4, Iterables.size(vacuumedTable.snapshots())); @@ -183,13 +226,15 @@ public static void testRetainLastWithExpireOlderThan(BufferAllocator allocator, final long expectedSnapshotId = updatedTable.currentSnapshot().snapshotId(); // Use the latest snapshot's timestamp for OLDER_THAN. But, it still needs to keep two snapshots as RETAIN_LAST is set. - testQueryValidateStatusSummary(allocator, - "VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN = '%s' RETAIN_LAST = %s", + validateOutputResult( + allocator, + "VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN '%s' RETAIN_LAST %s", new Object[]{table.fqn, getTimestampFromMillis(timestampMillisToExpire), "2"}, table, - true, - String.format("Table [%s] vacuumed", table.fqn), - ArrayUtils.subarray(table2.originalData, 0, table2.originalData.length)); + new Long[] {0L, 0L, 0L, 0L, 2L, 0L}); + + // Data not changed. + verifyData(allocator, table2, table2.originalData); Table vacuumedTable = getIcebergTable(tableFolder, IcebergCatalogType.HADOOP); Assert.assertEquals("Expire should keep last two snapshots", 2, Iterables.size(vacuumedTable.snapshots())); @@ -228,12 +273,15 @@ public static void testExpireDataFilesCleanup(BufferAllocator allocator, String String.format("Table [%s] rollbacked", table.fqn), null); - testQueryValidateStatusSummary(allocator, - "VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN = '%s'", new Object[]{table.fqn, getTimestampFromMillis(timestampMillisToExpire)}, + validateOutputResult( + allocator, + "VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN '%s'", + new Object[]{table.fqn, getTimestampFromMillis(timestampMillisToExpire)}, table, - true, - String.format("Table [%s] vacuumed", table.fqn), - null); + new Long[] {3L, 0L, 0L, 3L, 4L, 0L}); + + // Data not changed. + verifyData(allocator, table, table.originalData); Table vacuumedTable = getIcebergTable(tableFolder, IcebergCatalogType.HADOOP); final Set filesAfterVacuum = collectDataFilesFromTable(vacuumedTable); @@ -271,12 +319,15 @@ public static void testExpireOlderThanWithRollback(BufferAllocator allocator, St String.format("Table [%s] rollbacked", table.fqn), null); - testQueryValidateStatusSummary(allocator, - "VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN = '%s' RETAIN_LAST = 2", new Object[]{table.fqn, getTimestampFromMillis(timestampMillisToExpire)}, + validateOutputResult( + allocator, + "VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN '%s' RETAIN_LAST 2", + new Object[]{table.fqn, getTimestampFromMillis(timestampMillisToExpire)}, table, - true, - String.format("Table [%s] vacuumed", table.fqn), - null); + new Long[] {2L, 0L, 0L, 2L, 2L, 0L}); + + // Data not changed. + verifyData(allocator, table, table.originalData); Table vacuumedTable = getIcebergTable(tableFolder, IcebergCatalogType.HADOOP); Assert.assertEquals("Expire should keep last 2 snapshot", 2, Iterables.size(vacuumedTable.snapshots())); @@ -288,6 +339,60 @@ public static void testExpireOlderThanWithRollback(BufferAllocator allocator, St } } + public static void testExpireOnTableWithPartitions(BufferAllocator allocator, String source) throws Exception { + try (DmlQueryTestUtils.Tables tables = createBasicNonPartitionedAndPartitionedTables(source, 2, 3, PARTITION_COLUMN_ONE_INDEX_SET)) { + Assert.assertEquals("Should have two tables", 2, tables.tables.length); + // Second table has partitions + DmlQueryTestUtils.Table table = tables.tables[1]; + + String tableName = table.name.startsWith("\"") ? table.name.substring(1, table.name.length() - 1) : table.name; + File tableFolder = new File(getDfsTestTmpSchemaLocation(), tableName); + Table icebergTable = getIcebergTable(tableFolder, IcebergCatalogType.HADOOP); + Assert.assertEquals("Should have two snapshots", 2, Iterables.size(icebergTable.snapshots())); + Assert.assertEquals("Should have two history entries", 2, icebergTable.history().size()); + final Snapshot firstSnapshot = Iterables.getFirst(icebergTable.snapshots(), null); + Assert.assertNotNull("Should get first snapshot", firstSnapshot); + final Snapshot secondSnapshot = Iterables.getLast(icebergTable.snapshots()); + final long rollbackToSnapshotId = secondSnapshot.snapshotId(); + + // Insert more rows to increase snapshots and partition files + addRows(table, 2); + addRows(table, 2); + + final long timestampMillisToExpire = waitUntilAfter(secondSnapshot.timestampMillis()); + + testQueryValidateStatusSummary(allocator, + "ROLLBACK TABLE %s TO SNAPSHOT '%s'", new Object[]{table.fqn, rollbackToSnapshotId}, + table, + true, + String.format("Table [%s] rollbacked", table.fqn), + null); + + validateOutputResult( + allocator, + "VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN '%s' RETAIN_LAST 2", + new Object[]{table.fqn, getTimestampFromMillis(timestampMillisToExpire)}, + table, + new Long[] {4L, 0L, 0L, 2L, 2L, 4L}); + } + } + + public static void testExpireOnEmptyTableNoSnapshots(BufferAllocator allocator, String source) throws Exception { + try (DmlQueryTestUtils.Table table = createStockIcebergTable(source, 0, 2, "modes_isolation")) { + Table icebergTable = loadTable(table); + Assert.assertNull(icebergTable.currentSnapshot()); + final String timestampToExpire = getTimestampFromMillis(System.currentTimeMillis()); + + UserExceptionAssert.assertThatThrownBy(() -> + test("VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN '%s'", table.fqn, timestampToExpire)) + .hasErrorType(ErrorType.UNSUPPORTED_OPERATION) + .hasMessageContaining("Vacuum table succeeded, and the operation did not change the number of snapshots"); + + icebergTable.refresh(); + Assert.assertNull(icebergTable.currentSnapshot()); + } + } + public static void testRetainZeroSnapshots(String source) throws Exception { try (DmlQueryTestUtils.Table table = createBasicTable(source,2, 1)) { String tableName = table.name.startsWith("\"") ? table.name.substring(1, table.name.length() - 1) : table.name; @@ -296,7 +401,7 @@ public static void testRetainZeroSnapshots(String source) throws Exception { final long timestampMillisToExpire = waitUntilAfter(icebergTable.currentSnapshot().timestampMillis()); UserExceptionAssert.assertThatThrownBy(() -> - test("VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN = '%s' RETAIN_LAST = 0", table.fqn, getTimestampFromMillis(timestampMillisToExpire))) + test("VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN '%s' RETAIN_LAST 0", table.fqn, getTimestampFromMillis(timestampMillisToExpire))) .hasErrorType(ErrorType.UNSUPPORTED_OPERATION) .hasMessageContaining("Minimum number of snapshots to retain can be 1"); } @@ -305,7 +410,7 @@ public static void testRetainZeroSnapshots(String source) throws Exception { public static void testInvalidTimestampLiteral(String source) throws Exception { try (DmlQueryTestUtils.Table table = createBasicTable(source,2, 1)) { UserExceptionAssert.assertThatThrownBy(() -> - test("VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN = '2022-09-01 abc'", table.fqn)) + test("VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN '2022-09-01 abc'", table.fqn)) .hasErrorType(ErrorType.PARSE) .hasMessageContaining("Literal '2022-09-01 abc' cannot be casted to TIMESTAMP"); } @@ -314,7 +419,7 @@ public static void testInvalidTimestampLiteral(String source) throws Exception { public static void testEmptyTimestamp(String source) throws Exception { try (DmlQueryTestUtils.Table table = createBasicTable(source,2, 1)) { UserExceptionAssert.assertThatThrownBy(() -> - test("VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN = ''", table.fqn)) + test("VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN ''", table.fqn)) .hasErrorType(ErrorType.PARSE) .hasMessageContaining("Literal '' cannot be casted to TIMESTAMP"); } @@ -332,12 +437,12 @@ public static void testExpireDatasetRefreshed(BufferAllocator allocator, String addRows(table, 1); verifyCountSnapshotQuery(allocator, table.fqn, 4L); - testQueryValidateStatusSummary(allocator, - "VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN = '%s'", new Object[]{table.fqn, getTimestampFromMillis(timestampMillisToExpire)}, + validateOutputResult( + allocator, + "VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN '%s'", + new Object[]{table.fqn, getTimestampFromMillis(timestampMillisToExpire)}, table, - true, - String.format("Table [%s] vacuumed", table.fqn), - null); + new Long[] {0L, 0L, 0L, 0L, 2L, 0L}); // The count table_snapshot query result should be refreshed and only 2 are left. verifyCountSnapshotQuery(allocator, table.fqn, 2L); @@ -351,16 +456,58 @@ public static void testUnparseSqlVacuum(String source) throws Exception { Table icebergTable = getIcebergTable(tableFolder, IcebergCatalogType.HADOOP); final long timestampMillisToExpire = waitUntilAfter(icebergTable.currentSnapshot().timestampMillis()); - final String vacuumQuery = String.format("VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN = '%s' RETAIN_LAST = 1", + final String vacuumQuery = String.format("VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN '%s' RETAIN_LAST 1", table.fqn, getTimestampFromMillis(timestampMillisToExpire)); - final String expected = String.format("VACUUM TABLE %s EXPIRE SNAPSHOTS \"OLDER_THAN\" = '%s' \"RETAIN_LAST\" = 1", + final String expected = String.format("VACUUM TABLE %s EXPIRE SNAPSHOTS \"OLDER_THAN\" '%s' \"RETAIN_LAST\" 1", "\"" + source + "\"." + addQuotes(tableName), getTimestampFromMillis(timestampMillisToExpire)); parseAndValidateSqlNode(vacuumQuery, expected); } } + public static void testExpireOnTableOneSnapshot(String source) throws Exception { + // Table has only one snapshot. Don't need to run expire snapshots query. + try (DmlQueryTestUtils.Table table = createEmptyTable(source,EMPTY_PATHS, "tableName", 1)) { + final long timestampMillisToExpire = System.currentTimeMillis(); + UserExceptionAssert.assertThatThrownBy(() -> + test("VACUUM TABLE %s EXPIRE SNAPSHOTS OLDER_THAN '%s'", table.fqn, getTimestampFromMillis(timestampMillisToExpire))) + .hasErrorType(ErrorType.UNSUPPORTED_OPERATION) + .hasMessageContaining("Vacuum table succeeded, and the operation did not change the number of snapshots"); + } + } + + public static void testRetainMoreSnapshots(String source) throws Exception { + // Table has less snapshot than the retained number. Don't need to run expire snapshots query. + try (DmlQueryTestUtils.Table table = createBasicTable(source,2, 1)) { + UserExceptionAssert.assertThatThrownBy(() -> + test("VACUUM TABLE %s EXPIRE SNAPSHOTS RETAIN_LAST 5", table.fqn)) + .hasErrorType(ErrorType.UNSUPPORTED_OPERATION) + .hasMessageContaining("Vacuum table succeeded, and the operation did not change the number of snapshots"); + } + } + + public static void testRetainAllSnapshots(BufferAllocator allocator, String source) throws Exception { + try (DmlQueryTestUtils.Table table = createBasicTable(source,2, 1)) { + verifyCountSnapshotQuery(allocator, table.fqn, 2L); + // Insert more rows to increase snapshots + DmlQueryTestUtils.Table table2 = addRows(table, 1); + table2 = addRows(table2, 1); + table2 = addRows(table2, 1); + table2 = addRows(table2, 1); + addRows(table2, 1); + verifyCountSnapshotQuery(allocator, table.fqn, 7L); + + // No snapshots are dated back to default 5 days ago, and no snapshots are expired, even claim to retain last 2. + validateOutputResult( + allocator, + "VACUUM TABLE %s EXPIRE SNAPSHOTS RETAIN_LAST %s", + new Object[]{table.fqn, "2"}, + table, + new Long[]{0L, 0L, 0L, 0L, 0L, 0L}); + } + } + private static Set collectDataFilesFromTable(Table icebergTable) { Set files = Sets.newHashSet(); if (icebergTable == null) { @@ -383,4 +530,15 @@ private static Set collectDataFilesFromSnapshot(Snapshot snapshot, FileI private static Set pathSet(Iterable files) { return Sets.newHashSet(Iterables.transform(files, file -> file.path().toString())); } + + private static void validateOutputResult(BufferAllocator allocator, String query, Object[] args, DmlQueryTestUtils.Table table, Long[] results) throws Exception { + Assert.assertEquals(6, results.length); + new TestBuilder(allocator) + .sqlQuery(query, args) + .unOrdered() + .baselineColumns(DELETE_DATA_FILE_COUNT, DELETE_POSITION_DELETE_FILES_COUNT, DELETE_EQUALITY_DELETE_FILES_COUNT, + DELETE_MANIFEST_FILES_COUNT, DELETE_MANIFEST_LISTS_COUNT, DELETE_PARTITION_STATS_FILES_COUNT) + .baselineValues(results[0], results[1], results[2], results[3], results[4], results[5]) + .go(); + } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/TestSqlHandlerUtil.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/TestSqlHandlerUtil.java new file mode 100644 index 0000000000..4a6d6c29b7 --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/TestSqlHandlerUtil.java @@ -0,0 +1,61 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; + +import org.junit.Test; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.store.StoragePlugin; +import com.dremio.exec.store.ischema.InfoSchemaStoragePlugin; + +public class TestSqlHandlerUtil { + + @Test + public void testGetSourceType() throws Exception { + Catalog catalog = mock(Catalog.class); + StoragePlugin source = mock(InfoSchemaStoragePlugin.class); + String sourceName = "info_schema"; + doReturn(source).when(catalog).getSource(sourceName); + + assertThat(SqlHandlerUtil.getSourceType(catalog, sourceName)).isEqualTo("InfoSchemaStoragePlugin"); + } + + @Test + public void testGetUnknownSourceType() throws Exception { + Catalog catalog = mock(Catalog.class); + String sourceName = "unknown"; + doThrow(UserException.class).when(catalog).getSource(sourceName); + + assertThat(SqlHandlerUtil.getSourceType(catalog, sourceName)).isEqualTo("Unknown"); + } + + @Test + public void testGetUnknownSourceTypeForEmpty() throws Exception { + Catalog catalog = mock(Catalog.class); + assertThat(SqlHandlerUtil.getSourceType(catalog, "")).isEqualTo("Unknown"); + } + + @Test + public void testGetUnknownSourceTypeForNull() throws Exception { + assertThat(SqlHandlerUtil.getSourceType(mock(Catalog.class), null)).isEqualTo("Unknown"); + } +} diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/direct/ITCreateViewHandler.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/direct/ITCreateViewHandler.java index ddb4548e85..013a34ea35 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/direct/ITCreateViewHandler.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/direct/ITCreateViewHandler.java @@ -16,7 +16,6 @@ package com.dremio.exec.planner.sql.handlers.direct; import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.assertThatThrownBy; import java.util.List; @@ -28,7 +27,6 @@ import org.junit.Test; import com.dremio.BaseTestQuery; -import com.dremio.common.exceptions.UserException; import com.dremio.config.DremioConfig; import com.dremio.exec.store.iceberg.IcebergTestTables; import com.dremio.sabot.rpc.user.QueryDataBatch; @@ -70,14 +68,4 @@ public void testCreateViewForSimpleQuery() throws Exception { String resultString = getResultString(results, "|"); assertThat(resultString).isEqualTo("ok|summary\ntrue|View 'dfs_test.test_vds' created successfully\n"); } - - @Test - public void testCreateViewFailsForTimeTravelQuery() { - assertThatThrownBy(() -> { - String sql = String.format("CREATE VDS %s AS SELECT * FROM %s AT SNAPSHOT '7958422591156276457'", VDS_NAME, - table.getTableName()); - test(sql); - }).isInstanceOf(UserException.class) - .hasMessageContaining("Views cannot be created for time travel queries"); - } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/direct/TestAccelDropHandler.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/direct/TestAccelDropHandler.java new file mode 100644 index 0000000000..de8a5af7c0 --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/direct/TestAccelDropHandler.java @@ -0,0 +1,204 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers.direct; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.RETURNS_DEEP_STUBS; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlLiteral; +import org.apache.calcite.sql.parser.SqlParserPos; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.CatalogOptions; +import com.dremio.exec.catalog.DremioTable; +import com.dremio.exec.catalog.TableVersionContext; +import com.dremio.exec.catalog.TableVersionType; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.catalog.VersionedPlugin; +import com.dremio.exec.ops.QueryContext; +import com.dremio.exec.ops.ReflectionContext; +import com.dremio.exec.planner.sql.SchemaUtilities; +import com.dremio.exec.planner.sql.parser.SqlDropReflection; +import com.dremio.exec.planner.sql.parser.SqlTableVersionSpec; +import com.dremio.exec.store.StoragePlugin; +import com.dremio.exec.store.sys.accel.AccelerationManager; +import com.dremio.options.OptionManager; +import com.dremio.sabot.rpc.user.UserSession; +import com.dremio.service.namespace.NamespaceException; +import com.dremio.service.namespace.NamespaceKey; +import com.google.common.collect.ImmutableMap; + +@RunWith(MockitoJUnitRunner.class) +public class TestAccelDropHandler { + private static final String TABLE_NAME_V1 = "mysource1.myfolder.mytable"; + private static final String TABLE_NAME_V2 = "mysource2.myfolder.mytable"; + private static final String TABLE_NAME_NV = "s3.myfolder.mytable"; + private AccelDropReflectionHandler accelDropReflectionHandler; + @Mock + private Catalog catalog; + @Mock + private QueryContext queryContext; + private OptionManager optionManager; + private UserSession userSession; + private DremioTable dremioTable; + private String layoutId = "12345"; + private SchemaUtilities.TableWithPath tableWithPath ; + + private SqlIdentifier layoutIdentifier; + private FakeVersionedPlugin versionedPlugin; + private StoragePlugin nonVersionedPlugin; + private AccelerationManager accelerationManager; + private ReflectionContext reflectionContext; + + + @Before + public void setup() throws NamespaceException { + queryContext = mock(QueryContext.class, RETURNS_DEEP_STUBS); + + layoutIdentifier = new SqlIdentifier(layoutId, SqlParserPos.ZERO); + dremioTable = mock(DremioTable.class, RETURNS_DEEP_STUBS); + tableWithPath = new SchemaUtilities.TableWithPath(dremioTable); + versionedPlugin = mock(FakeVersionedPlugin.class); + nonVersionedPlugin = mock(StoragePlugin.class); + accelerationManager = mock(AccelerationManager.class); + reflectionContext = ReflectionContext.SYSTEM_USER_CONTEXT; + accelDropReflectionHandler = new AccelDropReflectionHandler(catalog, queryContext, reflectionContext); + } + + @Test + public void testDropWithVersionNoOption() throws Exception { + List tablePath1 = Arrays.asList("mysource1", "myfolder","mytable"); + SqlIdentifier tableIdentifier1 = new SqlIdentifier(TABLE_NAME_V1, SqlParserPos.ZERO); + NamespaceKey tableNamespaceKey1 = new NamespaceKey(tablePath1); + SqlDropReflection sqlDropReflection = new SqlDropReflection(SqlParserPos.ZERO, tableIdentifier1, layoutIdentifier, SqlTableVersionSpec.NOT_SPECIFIED); + when(catalog.resolveSingle(any(NamespaceKey.class))).thenReturn(tableNamespaceKey1); + when(catalog.getSource("mysource1")).thenReturn(versionedPlugin); + when(queryContext.getOptions().getOption(CatalogOptions.REFLECTION_ARCTIC_ENABLED)).thenReturn(false); + // Act and Assert + assertThatThrownBy(() -> accelDropReflectionHandler.toResult("",sqlDropReflection)) + .isInstanceOf(UserException.class) + .hasMessageContaining("does not support reflection"); + } + + @Test + public void testDropOnNonVersionedSource() throws Exception { + List tablePath = Arrays.asList("s3", "myfolder","mytable"); + SqlIdentifier tableIdentifier = new SqlIdentifier(TABLE_NAME_V1, SqlParserPos.ZERO); + NamespaceKey tableNamespaceKey = new NamespaceKey(tablePath); + SqlDropReflection sqlDropReflection = new SqlDropReflection(SqlParserPos.ZERO, tableIdentifier, layoutIdentifier, SqlTableVersionSpec.NOT_SPECIFIED); + when(dremioTable.getPath().getPathComponents()).thenReturn(tablePath); + when(catalog.resolveSingle(any(NamespaceKey.class))).thenReturn(tableNamespaceKey); + when(catalog.getTable(tableNamespaceKey)).thenReturn(dremioTable); + when(catalog.getSource("s3")).thenReturn(nonVersionedPlugin); + List result = accelDropReflectionHandler.toResult("",sqlDropReflection); + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary).contains("Reflection dropped."); + } + + @Test + public void testDropOnNonVersionedSourceWithSessionVersion() throws Exception { + List tablePath = Arrays.asList("s3", "myfolder","mytable"); + SqlIdentifier tableIdentifier = new SqlIdentifier(TABLE_NAME_V1, SqlParserPos.ZERO); + NamespaceKey tableNamespaceKey = new NamespaceKey(tablePath); + SqlDropReflection sqlDropReflection = new SqlDropReflection(SqlParserPos.ZERO, tableIdentifier, layoutIdentifier, SqlTableVersionSpec.NOT_SPECIFIED); + final Map sourceVersionMapping = ImmutableMap.of( + "mysource1", VersionContext.ofBranch("branch1"), + "mysource2", VersionContext.ofRef("ref1") + ); + when(dremioTable.getPath().getPathComponents()).thenReturn(tablePath); + when(catalog.resolveSingle(any(NamespaceKey.class))).thenReturn(tableNamespaceKey); + when(catalog.getTable(tableNamespaceKey)).thenReturn(dremioTable); + when(catalog.getSource("s3")).thenReturn(nonVersionedPlugin); + when(queryContext.getSession().getSessionVersionForSource("mysource2")).thenReturn(sourceVersionMapping.get("mysource2")); + List result = accelDropReflectionHandler.toResult("",sqlDropReflection); + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary).contains("Reflection dropped."); + } + + @Test + public void testDropWithVersion() throws Exception { + List tablePath = Arrays.asList("mysource2", "myfolder","mytable"); + SqlIdentifier tableIdentifier = new SqlIdentifier(TABLE_NAME_V1, SqlParserPos.ZERO); + NamespaceKey tableNamespaceKey = new NamespaceKey(tablePath); + + SqlDropReflection sqlDropReflection = new SqlDropReflection(SqlParserPos.ZERO, tableIdentifier, layoutIdentifier, SqlTableVersionSpec.NOT_SPECIFIED); + + final Map sourceVersionMapping = ImmutableMap.of( + "mysource1", VersionContext.ofBranch("branch1"), + "mysource2", VersionContext.ofRef("ref1") + ); + final TableVersionContext tableVersionContext = new TableVersionContext(TableVersionType.REFERENCE, "ref1"); + when(dremioTable.getPath().getPathComponents()).thenReturn(tablePath); + when(catalog.resolveSingle(any(NamespaceKey.class))).thenReturn(tableNamespaceKey); + when(catalog.getTableSnapshot(tableNamespaceKey, tableVersionContext)).thenReturn(dremioTable); + when(catalog.getSource("mysource2")).thenReturn(versionedPlugin); + when(queryContext.getOptions().getOption(CatalogOptions.REFLECTION_ARCTIC_ENABLED)).thenReturn(true); + when(queryContext.getSession().getSessionVersionForSource("mysource2")).thenReturn(sourceVersionMapping.get("mysource2")); + // Act and Assert + List result = accelDropReflectionHandler.toResult("",sqlDropReflection); + VersionContext expectedVersionContext = VersionContext.ofRef("ref1"); + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + } + + @Test + public void testDropWithVersionAt() throws Exception { + List tablePath = Arrays.asList("mysource2", "myfolder","mytable"); + SqlIdentifier tableIdentifier = new SqlIdentifier(TABLE_NAME_V1, SqlParserPos.ZERO); + SqlLiteral versionSpec = SqlLiteral.createCharString("ref1", SqlParserPos.ZERO); + NamespaceKey tableNamespaceKey = new NamespaceKey(tablePath); + + SqlDropReflection sqlDropReflection = new SqlDropReflection(SqlParserPos.ZERO, tableIdentifier, layoutIdentifier, + new SqlTableVersionSpec(SqlParserPos.ZERO, TableVersionType.REFERENCE, versionSpec)); + + final TableVersionContext tableVersionContext = new TableVersionContext(TableVersionType.REFERENCE, "ref1"); + when(dremioTable.getPath().getPathComponents()).thenReturn(tablePath); + when(catalog.resolveSingle(any(NamespaceKey.class))).thenReturn(tableNamespaceKey); + when(catalog.getTableSnapshot(tableNamespaceKey, tableVersionContext)).thenReturn(dremioTable); + when(catalog.getSource("mysource2")).thenReturn(versionedPlugin); + when(queryContext.getOptions().getOption(CatalogOptions.REFLECTION_ARCTIC_ENABLED)).thenReturn(true); + // Act and Assert + List result = accelDropReflectionHandler.toResult("",sqlDropReflection); + VersionContext expectedVersionContext = VersionContext.ofRef("ref1"); + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + } + + + /** + * Fake Versioned Plugin interface for test + */ + private interface FakeVersionedPlugin extends VersionedPlugin, StoragePlugin { + } + +} diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/direct/TestAccelToggleHandler.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/direct/TestAccelToggleHandler.java new file mode 100644 index 0000000000..4d5b27e1a0 --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/direct/TestAccelToggleHandler.java @@ -0,0 +1,149 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers.direct; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.RETURNS_DEEP_STUBS; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlLiteral; +import org.apache.calcite.sql.parser.SqlParserPos; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.CatalogEntityKey; +import com.dremio.exec.catalog.CatalogOptions; +import com.dremio.exec.catalog.DremioTable; +import com.dremio.exec.catalog.TableVersionContext; +import com.dremio.exec.catalog.TableVersionType; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.catalog.VersionedPlugin; +import com.dremio.exec.ops.QueryContext; +import com.dremio.exec.ops.ReflectionContext; +import com.dremio.exec.planner.sql.parser.SqlAccelToggle; +import com.dremio.exec.planner.sql.parser.SqlTableVersionSpec; +import com.dremio.exec.store.StoragePlugin; +import com.dremio.service.namespace.NamespaceException; +import com.dremio.service.namespace.NamespaceKey; +import com.google.common.collect.ImmutableMap; + +@RunWith(MockitoJUnitRunner.class) +public class TestAccelToggleHandler { + private static final String TABLE_NAME_V1 = "mysource1.myfolder.mytable"; + private AccelToggleHandler accelToggleHandler; + @Mock + private Catalog catalog; + @Mock + private QueryContext queryContext; + private DremioTable dremioTable; + SqlLiteral raw; + SqlLiteral enable; + private FakeVersionedPlugin versionedPlugin; + private ReflectionContext reflectionContext; + private List tablePath1 = Arrays.asList("mysource1", "myfolder","mytable"); + private SqlIdentifier tableIdentifier1 = new SqlIdentifier(TABLE_NAME_V1, SqlParserPos.ZERO); + private NamespaceKey tableNamespaceKey1 = new NamespaceKey(tablePath1); + + + @Before + public void setup() throws NamespaceException { + queryContext = mock(QueryContext.class, RETURNS_DEEP_STUBS); + raw = SqlLiteral.createBoolean(true, SqlParserPos.ZERO); + enable = SqlLiteral.createBoolean(true, SqlParserPos.ZERO); + dremioTable = mock(DremioTable.class, RETURNS_DEEP_STUBS); + versionedPlugin = mock(FakeVersionedPlugin.class); + reflectionContext = ReflectionContext.SYSTEM_USER_CONTEXT; + accelToggleHandler = new AccelToggleHandler(catalog, queryContext, reflectionContext); + + } + + @Test + public void testToggleWithVersionNoOption() throws Exception { + SqlAccelToggle sqlAccelToggle = new SqlAccelToggle(SqlParserPos.ZERO, tableIdentifier1, raw, enable, SqlTableVersionSpec.NOT_SPECIFIED); + when(dremioTable.getPath().getPathComponents()).thenReturn(tablePath1); + when(catalog.resolveSingle(any(NamespaceKey.class))).thenReturn(tableNamespaceKey1); + when(catalog.getSource("mysource1")).thenReturn(versionedPlugin); + when(queryContext.getOptions().getOption(CatalogOptions.REFLECTION_ARCTIC_ENABLED)).thenReturn(false); + // Act and Assert + assertThatThrownBy(() -> accelToggleHandler.toResult("",sqlAccelToggle)) + .isInstanceOf(UserException.class) + .hasMessageContaining("does not support reflection"); + } + + @Test + public void testToggleWithVersion() throws Exception { + SqlLiteral versionSpec = SqlLiteral.createCharString("ref1", SqlParserPos.ZERO); + SqlAccelToggle sqlAccelToggle = new SqlAccelToggle(SqlParserPos.ZERO, tableIdentifier1, raw, enable, + new SqlTableVersionSpec(SqlParserPos.ZERO, TableVersionType.REFERENCE, versionSpec)); + when(dremioTable.getPath().getPathComponents()).thenReturn(tablePath1); + when(catalog.resolveSingle(any(NamespaceKey.class))).thenReturn(tableNamespaceKey1); + final TableVersionContext tableVersionContext = new TableVersionContext(TableVersionType.REFERENCE, "ref1"); + when(catalog.getTableSnapshot(tableNamespaceKey1, tableVersionContext)).thenReturn(dremioTable); + when(catalog.getSource("mysource1")).thenReturn(versionedPlugin); + when(queryContext.getOptions().getOption(CatalogOptions.REFLECTION_ARCTIC_ENABLED)).thenReturn(true); + + // Act and Assert + List result = accelToggleHandler.toResult("",sqlAccelToggle); + VersionContext expectedVersionContext = VersionContext.ofRef("ref1"); + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary).contains("Acceleration enabled"); + } + + @Test + public void testToggleWithVersionAt() throws Exception { + SqlAccelToggle sqlAccelToggle = new SqlAccelToggle(SqlParserPos.ZERO, tableIdentifier1, raw, enable, SqlTableVersionSpec.NOT_SPECIFIED); + + final Map sourceVersionMapping = ImmutableMap.of( + "mysource1", VersionContext.ofBranch("branch1"), + "mysource2", VersionContext.ofRef("ref1") + ); + when(dremioTable.getPath().getPathComponents()).thenReturn(tablePath1); + when(catalog.resolveSingle(any(NamespaceKey.class))).thenReturn(tableNamespaceKey1); + final TableVersionContext tableVersionContext = new TableVersionContext(TableVersionType.REFERENCE, "ref1"); + CatalogEntityKey catalogEntityKey= CatalogEntityKey.newBuilder().keyComponents(tablePath1).tableVersionContext(tableVersionContext).build(); + when(catalog.getTableSnapshot(tableNamespaceKey1, tableVersionContext)).thenReturn(dremioTable); + when(catalog.getSource("mysource1")).thenReturn(versionedPlugin); + when(queryContext.getOptions().getOption(CatalogOptions.REFLECTION_ARCTIC_ENABLED)).thenReturn(true); + when(queryContext.getSession().getSessionVersionForSource("mysource1")).thenReturn(sourceVersionMapping.get("mysource2")); + // Act and Assert + List result = accelToggleHandler.toResult("",sqlAccelToggle); + VersionContext expectedVersionContext = VersionContext.ofRef("ref1"); + assertThat(result).isNotEmpty(); + assertThat(result.get(0).ok).isTrue(); + assertThat(result.get(0).summary).contains("Acceleration enabled"); + } + + /** + * Fake Versioned Plugin interface for test + */ + private interface FakeVersionedPlugin extends VersionedPlugin, StoragePlugin { + } + +} diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/direct/TestCreateViewHandler.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/direct/TestCreateViewHandler.java index 6e72c582ae..02d9d94d72 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/direct/TestCreateViewHandler.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/direct/TestCreateViewHandler.java @@ -23,6 +23,7 @@ import static org.mockito.Mockito.anySet; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; @@ -69,6 +70,7 @@ import com.dremio.exec.planner.sql.SqlConverter; import com.dremio.exec.planner.sql.handlers.SqlHandlerConfig; import com.dremio.exec.planner.sql.parser.SqlCreateView; +import com.dremio.exec.planner.sql.parser.SqlGrant; import com.dremio.exec.proto.UserBitShared.UserCredentials; import com.dremio.exec.record.BatchSchema; import com.dremio.exec.server.MaterializationDescriptorProvider; @@ -155,7 +157,7 @@ public class TestCreateViewHandler extends DremioTest { private ViewOptions replaceViewOptions = new ViewOptions.ViewOptionsBuilder() .version(DEFAULT_RESOLVED_VERSION_CONTEXT) .batchSchema(batchSchema) - .viewUpdate(true) + .actionType(ViewOptions.ActionType.UPDATE_VIEW) .build(); private CreateViewHandler createViewHandler; private SqlConverter parser; @@ -262,6 +264,12 @@ public void replaceVersionedViewNameClash() throws Exception { } private void setupResources() throws SqlParseException { + setupCreateViewHandler(); + // versioned view test only + doReturn(true).when(createViewHandler).isVersioned(DEFAULT_NAMESPACE_KEY); + } + + private void setupCreateViewHandler() { when(catalog.resolveSingle(default_input.getPath())).thenReturn(DEFAULT_NAMESPACE_KEY); when(context.getCatalog()).thenReturn(catalog); when(optionManager.getOption(VERSIONED_VIEW_ENABLED)).thenReturn(true); @@ -271,8 +279,6 @@ private void setupResources() throws SqlParseException { when(sqlNode.toSqlString(CalciteSqlDialect.DEFAULT, true)).thenReturn(queryString); when(sqlNode.getKind()).thenReturn(SqlKind.SELECT); createViewHandler = spy(new CreateViewHandler(config)); - // versioned view test only - doReturn(true).when(createViewHandler).isVersioned(DEFAULT_NAMESPACE_KEY); } @Test @@ -353,6 +359,18 @@ public void testGetViewSqlBracketed() throws UserException { runTestGetViewSql("CREATE VIEW foo AS\nSELECT [\"*\"] FROM bar", "SELECT \"\"\"*\"\"\" FROM bar"); } + @Test + public void createViewWithoutALTERPrivilege() throws Exception { + setupCreateViewHandler(); + doThrow(UserException.validationError().message("permission denied").buildSilently()) + .when(catalog) + .validatePrivilege(new NamespaceKey(DEFAULT_SOURCE_NAME), SqlGrant.Privilege.ALTER); + + assertThatThrownBy(() -> createViewHandler.toResult("", default_input)) + .isInstanceOf(UserException.class) + .hasMessage("permission denied"); + } + private void runTestGetViewSql(String sql, String expected) throws UserException { SqlNode sqlNode = parser.parse(sql); String result = createViewHandler.getViewSql((SqlCreateView) sqlNode, sql); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/direct/TestDescribeFunctionHandler.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/direct/TestDescribeFunctionHandler.java index 750e626a59..c49a913a56 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/direct/TestDescribeFunctionHandler.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/direct/TestDescribeFunctionHandler.java @@ -50,7 +50,7 @@ public class TestDescribeFunctionHandler { private UserDefinedFunction udf1 = new UserDefinedFunction("test1" , "SELECT 1", CompleteType.VARCHAR, - new ArrayList<>(), new ArrayList<>(), new Timestamp(System.currentTimeMillis()), new Timestamp(System.currentTimeMillis())); + new ArrayList<>(), new ArrayList<>(), null, new Timestamp(System.currentTimeMillis()), new Timestamp(System.currentTimeMillis())); @Before public void setup() throws IOException { diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/direct/TestSqlNodeUtil.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/direct/TestSqlNodeUtil.java new file mode 100644 index 0000000000..ae45bb26a7 --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/direct/TestSqlNodeUtil.java @@ -0,0 +1,66 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers.direct; + +import static org.junit.Assert.assertEquals; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; + +import org.apache.calcite.sql.SqlDelete; +import org.apache.calcite.sql.SqlKind; +import org.apache.calcite.sql.SqlOrderBy; +import org.apache.calcite.sql.SqlSelect; +import org.apache.calcite.sql.SqlWith; +import org.apache.calcite.sql.parser.SqlParserPos; +import org.junit.Test; + +public class TestSqlNodeUtil { + + @Test + public void testQueryKindSelect() { + SqlSelect sqlSelect = mock(SqlSelect.class); + doReturn(SqlKind.SELECT).when(sqlSelect).getKind(); + assertEquals(SqlKind.SELECT.lowerName, SqlNodeUtil.getQueryKind(sqlSelect)); + } + + @Test + public void testQueryKindSelectWithOrderBy() { + SqlSelect sqlSelect = mock(SqlSelect.class); + doReturn(SqlKind.SELECT).when(sqlSelect).getKind(); + SqlOrderBy sqlOrderBy = new SqlOrderBy(new SqlParserPos(0, 0), sqlSelect, null, null, null); + assertEquals(SqlKind.SELECT.lowerName, SqlNodeUtil.getQueryKind(sqlOrderBy)); + } + + @Test + public void testQueryKindSelectWithWith() { + SqlSelect sqlSelect = mock(SqlSelect.class); + doReturn(SqlKind.SELECT).when(sqlSelect).getKind(); + SqlWith sqlWith = new SqlWith(new SqlParserPos(0, 0), null, sqlSelect); + assertEquals(SqlKind.SELECT.lowerName, SqlNodeUtil.getQueryKind(sqlWith)); + } + + @Test + public void testQueryKindDelete() { + SqlDelete sqlDelete = mock(SqlDelete.class); + doReturn(SqlKind.DELETE).when(sqlDelete).getKind(); + assertEquals(SqlKind.DELETE.lowerName, SqlNodeUtil.getQueryKind(sqlDelete)); + } + + @Test + public void testQueryKindNull() { + assertEquals("unknown", SqlNodeUtil.getQueryKind(null)); + } +} diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/query/TestOptimize.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/query/TestOptimize.java index ac5dcea247..346f6be446 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/query/TestOptimize.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/query/TestOptimize.java @@ -15,7 +15,6 @@ */ package com.dremio.exec.planner.sql.handlers.query; -import static com.dremio.exec.ExecConstants.ENABLE_ICEBERG_OPTIMIZE; import static com.dremio.service.users.SystemUser.SYSTEM_USERNAME; import static java.lang.String.format; import static org.assertj.core.api.Assertions.assertThat; @@ -36,7 +35,6 @@ import com.dremio.BaseTestQuery; import com.dremio.common.exceptions.UserException; -import com.dremio.exec.ExecConstants; import com.dremio.exec.ExecTest; import com.dremio.exec.PassthroughQueryObserver; import com.dremio.exec.calcite.logical.TableOptimizeCrel; @@ -57,8 +55,6 @@ import com.dremio.exec.store.iceberg.IcebergTestTables; import com.dremio.exec.util.ColumnUtils; import com.dremio.options.OptionManager; -import com.dremio.options.OptionValue; -import com.dremio.options.TypeValidators; import com.dremio.sabot.rpc.user.UserSession; import com.dremio.service.namespace.NamespaceKey; @@ -68,6 +64,7 @@ public class TestOptimize extends BaseTestQuery { private static IcebergTestTables.Table table; + private static IcebergTestTables.Table tableWithDeletes; private static SqlConverter converter; private static SqlHandlerConfig config; @@ -107,26 +104,20 @@ public static void setUp() throws Exception { config = new SqlHandlerConfig(queryContext, converter, observer, null); - setBooleanOption(config, ExecConstants.ENABLE_ICEBERG_OPTIMIZE, true); } - private static void setBooleanOption(SqlHandlerConfig configuration, TypeValidators.BooleanValidator option, boolean flag) { - configuration.getContext().getOptions().setOption(OptionValue.createBoolean( - OptionValue.OptionType.SYSTEM, - option.getOptionName(), - flag)); - } - - @Before public void init() throws Exception { table = IcebergTestTables.V2_ORDERS.get(); + tableWithDeletes = IcebergTestTables.V2_MULTI_ROWGROUP_ORDERS_WITH_DELETES.get(); table.enableIcebergSystemOptions(); + tableWithDeletes.enableIcebergSystemOptions(); } @After public void tearDown() throws Exception { table.close(); + tableWithDeletes.close(); } //=========================================================================== @@ -137,7 +128,7 @@ public void testLogicalRelNodeConversion() throws Exception { String sql = format("OPTIMIZE TABLE %s", table.getTableName()); final SqlNode node = converter.parse(sql); final ConvertedRelNode convertedRelNode = PrelTransformer.validateAndConvert(config, node); - assertThat(convertedRelNode.getValidatedRowType().getFieldCount()).isEqualTo(2); + assertThat(convertedRelNode.getValidatedRowType().getFieldCount()).isEqualTo(3); // find TableOptimizeRel assertThat(convertedRelNode.getConvertedNode() instanceof TableOptimizeCrel).as("TableOptimizeCrel node is expected").isTrue(); @@ -158,11 +149,6 @@ public void testValidations() throws Exception { NamespaceKey path = SqlNodeUtil.unwrap(node, SqlOptimize.class).getPath(); OptimizeHandler optimizeHandler = new OptimizeHandler(); - //Disable dremio.iceberg.optimize.enabled - when(mockQueryContext.getOptions().getOption(ENABLE_ICEBERG_OPTIMIZE)).thenReturn(Boolean.FALSE); - assertThatThrownBy(() -> optimizeHandler.checkValidations(mockCatalog, mockConfig, path, node)) - .isInstanceOf(UserException.class).hasMessageContaining("OPTIMIZE TABLE command is not supported"); - //Disable SELECT Privilege Mockito.doThrow(UserException.permissionError() .message(String.format("User [%s] not authorized to %s [%s]", SYSTEM_USERNAME, SqlGrant.Privilege.SELECT, path)) @@ -191,8 +177,8 @@ public void testOptimizePlan() throws Exception { //validate IcebergManifestListOperator Count assertThat(StringUtils.countMatches(textPlan, "IcebergManifestList")).as("Two IcebergManifestList operator is expected").isEqualTo(2); - //validate TableFunctionDeletedDataFileMetadata Count - assertThat(StringUtils.countMatches(textPlan, "Table Function Type=[DELETED_DATA_FILES_METADATA])")).as("Only one DELETED_DATA_FILES_METADATA Table Function operator is expected").isEqualTo(1); + //validate TableFunctionDeletedFileMetadata Count + assertThat(StringUtils.countMatches(textPlan, "Table Function Type=[DELETED_FILES_METADATA])")).as("Only one DELETED_DATA_FILES_METADATA Table Function operator is expected").isEqualTo(1); //validate TableFunctionSplitGenManifestScan Count assertThat(StringUtils.countMatches(textPlan, "Table Function Type=[SPLIT_GEN_MANIFEST_SCAN]")).as("Only one SPLIT_GEN_MANIFEST_SCAN Table Function operator is expected").isEqualTo(1); @@ -201,7 +187,7 @@ public void testOptimizePlan() throws Exception { assertThat(StringUtils.countMatches(textPlan, "RecordType(BIGINT D_R_E_M_I_O_D_A_T_A_F_I_L_E_R_O_W_C_O_U_N_T, VARCHAR(65536) D_R_E_M_I_O_D_A_T_A_F_I_L_E_F_I_L_E_P_A_T_H, VARBINARY(65536) icebergMetadata)")).as("Only one such Project is expected").isEqualTo(1); //validate count aggregation on OperationType - assertThat(textPlan).contains("Project(rewritten_data_files_count=[CASE(=($9, 1), $1, CAST(0:BIGINT):BIGINT)], new_data_files_count=[CASE(=($9, 0), $1, CAST(0:BIGINT):BIGINT)])"); + assertThat(textPlan).contains("Project(rewritten_data_files_count=[CASE(=($9, 1), $1, CAST(0:BIGINT):BIGINT)], rewritten_delete_files_count=[CASE(=($9, 3), $1, CAST(0:BIGINT):BIGINT)], new_data_files_count=[CASE(=($9, 0), $1, CAST(0:BIGINT):BIGINT)])"); //validate OptimizeTableOperators testMatchingPatterns(textPlan, new String[] { @@ -232,6 +218,117 @@ public void testOptimizePlan() throws Exception { } + @Test + public void testV1OptimizePlan() throws Exception { + IcebergTestTables.Table v1table = IcebergTestTables.NATION.get(); + test(String.format("CREATE TABLE %s.%s as select * from ", TEMP_SCHEMA_HADOOP, "v1table") + v1table.getTableName()); + final String sql = "OPTIMIZE TABLE " + TEMP_SCHEMA_HADOOP + ".v1table"; + OptimizeHandler optimizeHandler = new OptimizeHandler(); + SqlNode sqlNode = converter.parse(sql); + optimizeHandler.getPlan(config, sql, sqlNode); + String textPlan = optimizeHandler.getTextPlan(); + + //validate IcebergManifestListOperator Count + assertThat(StringUtils.countMatches(textPlan, "IcebergManifestList")).as("Two IcebergManifestList operator is expected").isEqualTo(2); + + //validate TableFunctionDeletedFileMetadata Count + assertThat(StringUtils.countMatches(textPlan, "Table Function Type=[DELETED_FILES_METADATA])")).as("Only one DELETED_DATA_FILES_METADATA Table Function operator is expected").isEqualTo(1); + + //validate TableFunctionSplitGenManifestScan Count + assertThat(StringUtils.countMatches(textPlan, "Table Function Type=[SPLIT_GEN_MANIFEST_SCAN]")).as("Only one SPLIT_GEN_MANIFEST_SCAN Table Function operator is expected").isEqualTo(1); + + //validate ProjectWithIcebergMetadata + assertThat(StringUtils.countMatches(textPlan, "RecordType(BIGINT D_R_E_M_I_O_D_A_T_A_F_I_L_E_R_O_W_C_O_U_N_T, VARCHAR(65536) D_R_E_M_I_O_D_A_T_A_F_I_L_E_F_I_L_E_P_A_T_H, VARBINARY(65536) icebergMetadata)")).as("Only one such Project is expected").isEqualTo(1); + + //validate count aggregation on OperationType + assertThat(textPlan).contains("Project(rewritten_data_files_count=[CASE(=($9, 1), $1, CAST(0:BIGINT):BIGINT)], rewritten_delete_files_count=[CASE(=($9, 3), $1, CAST(0:BIGINT):BIGINT)], new_data_files_count=[CASE(=($9, 0), $1, CAST(0:BIGINT):BIGINT)])"); + + //validate OptimizeTableOperators + testMatchingPatterns(textPlan, new String[] { + // We should have all these operators + "WriterCommitter", + "UnionAll", + "Writer", + "TableFunction", + "Project", + "IcebergManifestList", + "IcebergManifestScan","StreamAgg"}); + + //validate OptimizeTableOperatorsOrder + testMatchingPatterns(textPlan, new String[] { + "(?s)" + + "WriterCommitter.*" + + "UnionAll.*" + + "Writer.*" + + "TableFunction.*" + + "TableFunction.*" + + "IcebergManifestList.*" + + "Project.*" + + "TableFunction.*" + + "Project.*" + ColumnUtils.ROW_COUNT_COLUMN_NAME + ".*" + ColumnUtils.FILE_PATH_COLUMN_NAME + ".*" + + "IcebergManifestScan.*" + + "IcebergManifestList.*"}); + + + } + + @Test + public void testOptimizePlanWithDeletes() throws Exception { + final String sql = "OPTIMIZE TABLE " + tableWithDeletes.getTableName(); + OptimizeHandler optimizeHandler = new OptimizeHandler(); + SqlNode sqlNode = converter.parse(sql); + optimizeHandler.getPlan(config, sql, sqlNode); + String textPlan = optimizeHandler.getTextPlan(); + + //validate IcebergManifestListOperator Count + assertThat(StringUtils.countMatches(textPlan, "IcebergManifestList")).as("Six IcebergManifestList operators are expected").isEqualTo(6); + + //validate TableFunctionDeletedFileMetadata Count + assertThat(StringUtils.countMatches(textPlan, "Table Function Type=[DELETED_FILES_METADATA])")).as("Two DELETED_FILES_METADATA Table Function operators are expected").isEqualTo(2); + + //validate ProjectWithIcebergMetadata + assertThat(StringUtils.countMatches(textPlan, "RecordType(BIGINT D_R_E_M_I_O_D_A_T_A_F_I_L_E_R_O_W_C_O_U_N_T, VARCHAR(65536) D_R_E_M_I_O_D_A_T_A_F_I_L_E_F_I_L_E_P_A_T_H, VARBINARY(65536) icebergMetadata)")).as("Two such Projects are expected").isEqualTo(2); + + //validate count aggregation on OperationType + assertThat(textPlan).contains("Project(rewritten_data_files_count=[CASE(=($9, 1), $1, CAST(0:BIGINT):BIGINT)], rewritten_delete_files_count=[CASE(=($9, 3), $1, CAST(0:BIGINT):BIGINT)], new_data_files_count=[CASE(=($9, 0), $1, CAST(0:BIGINT):BIGINT)])"); + + //validate OptimizeTableOperators + testMatchingPatterns(textPlan, new String[] { + // We should have all these operators + "WriterCommitter", + "UnionAll", + "Writer", + "TableFunction", + "Project", + "IcebergManifestList", + "IcebergManifestScan","StreamAgg"}); + + //validate OptimizeTableOperatorsOrder + testMatchingPatterns(textPlan, new String[] { + "(?s)" + + "WriterCommitter.*" + + "UnionAll.*" + + "Writer.*" + + "TableFunction.*" + + "TableFunction.*" + + "HashJoin.*" + + "IcebergManifestList.*" + + "Project.*" + + "UnionAll.*" + + "TableFunction.*" + + "Project.*" + ColumnUtils.ROW_COUNT_COLUMN_NAME + ".*" + ColumnUtils.FILE_PATH_COLUMN_NAME + ".*" + + "Filter.*" + + "HashJoin.*" + + "IcebergManifestList.*" + + "HashAgg.*" + + "TableFunction.*" + + "IcebergManifestList.*" + + "TableFunction.*" + + "Project.*" + ColumnUtils.ROW_COUNT_COLUMN_NAME + ".*" + ColumnUtils.FILE_PATH_COLUMN_NAME + ".*" + + "IcebergManifestScan.*" + + "IcebergManifestList.*"}); + } + private void testMatchingPatterns(String plan, String[] expectedPatterns) { // Check and make sure all expected patterns are in the plan if (expectedPatterns != null) { diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/query/TestOptimizeHandler.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/query/TestOptimizeHandler.java index 530a75c735..34d4b95a9c 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/query/TestOptimizeHandler.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/query/TestOptimizeHandler.java @@ -15,152 +15,99 @@ */ package com.dremio.exec.planner.sql.handlers.query; +import static com.dremio.service.users.SystemUser.SYSTEM_USERNAME; import static org.assertj.core.api.Assertions.assertThatThrownBy; -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.eq; -import static org.mockito.Mockito.when; - -import java.util.Arrays; -import java.util.List; import org.apache.calcite.avatica.util.Quoting; import org.apache.calcite.sql.parser.SqlParseException; import org.apache.calcite.sql.parser.SqlParser; -import org.apache.calcite.util.Pair; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.MethodSource; -import org.junit.jupiter.params.provider.ValueSource; -import org.mockito.Answers; -import org.mockito.MockedStatic; -import org.mockito.Mockito; +import org.junit.BeforeClass; +import org.junit.Test; import com.dremio.BaseTestQuery; import com.dremio.common.exceptions.UserException; import com.dremio.exec.ExecConstants; -import com.dremio.exec.catalog.Catalog; -import com.dremio.exec.catalog.DremioTable; +import com.dremio.exec.ExecTest; +import com.dremio.exec.PassthroughQueryObserver; import com.dremio.exec.ops.QueryContext; +import com.dremio.exec.planner.observer.AttemptObserver; import com.dremio.exec.planner.physical.PlannerSettings; import com.dremio.exec.planner.sql.ParserConfig; +import com.dremio.exec.planner.sql.SqlConverter; import com.dremio.exec.planner.sql.handlers.SqlHandlerConfig; import com.dremio.exec.planner.sql.parser.SqlOptimize; +import com.dremio.exec.proto.UserBitShared; +import com.dremio.exec.proto.UserProtos; +import com.dremio.exec.server.SabotContext; +import com.dremio.exec.server.options.SessionOptionManagerImpl; import com.dremio.exec.store.iceberg.IcebergTestTables; -import com.dremio.exec.store.iceberg.IcebergUtils; -import com.dremio.options.OptionManager; +import com.dremio.options.OptionValue; +import com.dremio.sabot.rpc.user.UserSession; import com.dremio.service.namespace.NamespaceKey; public class TestOptimizeHandler extends BaseTestQuery { - private static SqlHandlerConfig mockConfig; - private static OptionManager mockOptionManager; - private static Catalog mockCatalog; - private final Class exceptionType = IllegalArgumentException.class; - - @BeforeAll - public static void setup() { - mockConfig = Mockito.mock(SqlHandlerConfig.class); - QueryContext mockQueryContext = Mockito.mock(QueryContext.class); - mockCatalog = Mockito.mock(Catalog.class); - mockOptionManager = Mockito.mock(OptionManager.class); - - when(mockConfig.getContext()).thenReturn(mockQueryContext); - when(mockQueryContext.getOptions()).thenReturn(mockOptionManager); - when(mockQueryContext.getCatalog()).thenReturn(mockCatalog); - when(mockOptionManager.getOption(ExecConstants.OPTIMIZE_TARGET_FILE_SIZE_MB)).thenReturn(256L); - when(mockOptionManager.getOption(ExecConstants.OPTIMIZE_MINIMUM_FILE_SIZE_DEFAULT_RATIO)).thenReturn(0.75); - when(mockOptionManager.getOption(ExecConstants.OPTIMIZE_MAXIMUM_FILE_SIZE_DEFAULT_RATIO)).thenReturn(1.8); - when(mockOptionManager.getOption(ExecConstants.OPTIMIZE_MINIMUM_INPUT_FILES)).thenReturn(5L); - when(mockOptionManager.getOption(ExecConstants.ENABLE_ICEBERG_OPTIMIZE)).thenReturn(true); - when(mockOptionManager.getOption(ExecConstants.ENABLE_ICEBERG)).thenReturn(true); - } - - @ParameterizedTest - @ValueSource(strings = { - "OPTIMIZE TABLE a.b.c (target_file_size_mb=5)", - "OPTIMIZE TABLE a.b.c (target_file_size_mb=5, min_file_size_mb=1)", - "OPTIMIZE TABLE a.b.c (target_file_size_mb=5, max_file_size_mb=6)", - "OPTIMIZE TABLE a.b.c (min_file_size_mb=1, target_file_size_mb=5, max_file_size_mb=6)", - "OPTIMIZE TABLE a.b.c (min_file_size_mb=200, max_file_size_mb=300)", - "OPTIMIZE TABLE a.b.c (min_file_size_mb=0)" - }) - - void testValidOptions(String query) { - assertDoesNotThrow(() -> getValidOptimizeOptions(query)); - } - - @ParameterizedTest - @MethodSource("invalidOptionQueries") - void testInvalidOptions(Pair test) { - assertThatThrownBy(() -> getValidOptimizeOptions(test.getKey())).isInstanceOf(exceptionType).hasMessage(test.getValue()); + private static SqlConverter converter; + private static SqlHandlerConfig config; + + @BeforeClass + public static void setup() throws Exception { + SabotContext context = getSabotContext(); + + UserSession session = UserSession.Builder.newBuilder() + .withSessionOptionManager( + new SessionOptionManagerImpl(getSabotContext().getOptionValidatorListing()), + getSabotContext().getOptionManager()) + .withUserProperties(UserProtos.UserProperties.getDefaultInstance()) + .withCredentials(UserBitShared.UserCredentials.newBuilder().setUserName(SYSTEM_USERNAME).build()) + .setSupportComplexTypes(true) + .build(); + + final QueryContext queryContext = new QueryContext(session, context, UserBitShared.QueryId.getDefaultInstance()); + queryContext.setGroupResourceInformation(context.getClusterResourceInformation()); + final AttemptObserver observer = new PassthroughQueryObserver(ExecTest.mockUserClientConnection(null)); + + converter = new SqlConverter( + queryContext.getPlannerSettings(), + queryContext.getOperatorTable(), + queryContext, + queryContext.getMaterializationProvider(), + queryContext.getFunctionRegistry(), + queryContext.getSession(), + observer, + queryContext.getCatalog(), + queryContext.getSubstitutionProviderFactory(), + queryContext.getConfig(), + queryContext.getScanResult(), + queryContext.getRelMetadataQuerySupplier()); + + config = new SqlHandlerConfig(queryContext, converter, observer, null); } @Test - void testOptimizeDisabled() throws Exception { + public void testNonexistentTable() throws Exception { SqlOptimize sqlOptimize = parseToSqlOptimizeNode("OPTIMIZE TABLE a.b.c"); OptimizeHandler optimizeHandler = (OptimizeHandler) sqlOptimize.toPlanHandler(); - when(mockConfig.getContext().getOptions().getOption(ExecConstants.ENABLE_ICEBERG_OPTIMIZE)).thenReturn(false); - when(mockCatalog.resolveToDefault(any())).thenReturn(null); - when(mockCatalog.getTableNoResolve(sqlOptimize.getPath())).thenReturn(Mockito.mock(DremioTable.class)); - assertThatThrownBy(() -> optimizeHandler.getPlan(mockConfig, "OPTIMIZE TABLE a.b.c", sqlOptimize)) - .isInstanceOf(UserException.class) - .hasMessage("OPTIMIZE TABLE command is not supported."); - } - @Test - void testNonexistentTable() throws Exception { - SqlOptimize sqlOptimize = parseToSqlOptimizeNode("OPTIMIZE TABLE a.b.c"); - OptimizeHandler optimizeHandler = (OptimizeHandler) sqlOptimize.toPlanHandler(); - when(mockCatalog.getTableNoResolve(any())).thenReturn(null); - assertThatThrownBy(() -> optimizeHandler.getPlan(mockConfig, "OPTIMIZE TABLE a.b.c", sqlOptimize)) + assertThatThrownBy(() -> optimizeHandler.getPlan(config, "OPTIMIZE TABLE a.b.c", sqlOptimize)) .isInstanceOf(UserException.class) .hasMessage("Table [a.b.c] does not exist."); } @Test - void testV2Table() throws Exception { - IcebergTestTables.Table table = IcebergTestTables.V2_MULTI_ROWGROUP_ORDERS_WITH_DELETES.get(); + public void testV2TableWithDeletes() throws Exception { + IcebergTestTables.Table table = IcebergTestTables.PRODUCTS_WITH_EQ_DELETES.get(); + config.getContext().getOptions().setOption(OptionValue.createBoolean(OptionValue.OptionType.SYSTEM, + ExecConstants.ENABLE_ICEBERG_MERGE_ON_READ_SCAN_WITH_EQUALITY_DELETE.getOptionName(), true)); String query = String.format("OPTIMIZE TABLE %s", table.getTableName()); SqlOptimize sqlOptimize = parseToSqlOptimizeNode(query); NamespaceKey path = sqlOptimize.getPath(); OptimizeHandler optimizeHandler = (OptimizeHandler) sqlOptimize.toPlanHandler(); - try (MockedStatic mockedStatic = Mockito.mockStatic(IcebergUtils.class)) { - mockedStatic.when(() -> IcebergUtils.checkTableExistenceAndMutability(eq(mockCatalog), eq(mockConfig), eq(path), any(), eq(false))).thenReturn(null); - DremioTable mockTable = Mockito.mock(DremioTable.class, Answers.RETURNS_DEEP_STUBS); - when(mockCatalog.getTableNoResolve(path)).thenReturn(mockTable); - when(mockTable.getPath()).thenReturn(path); - when(mockTable.getDatasetConfig().getPhysicalDataset().getIcebergMetadata().getDeleteManifestStats().getRecordCount()).thenReturn(1L); - - assertThatThrownBy(() -> optimizeHandler.getPlan(mockConfig, query, sqlOptimize)) - .isInstanceOf(UserException.class) - .hasMessage("OPTIMIZE TABLE command does not support tables with delete files."); - } - } - - static List> invalidOptionQueries() { - return Arrays.asList( - Pair.of("OPTIMIZE TABLE a.b.c (target_file_size_mb=2, min_file_size_mb=3)", "Value of TARGET_FILE_SIZE_MB [2] cannot be less than MIN_FILE_SIZE_MB [3]."), - Pair.of("OPTIMIZE TABLE a.b.c (max_file_size_mb=270, min_file_size_mb=269)", "Value of TARGET_FILE_SIZE_MB [256] cannot be less than MIN_FILE_SIZE_MB [269]."), - Pair.of("OPTIMIZE TABLE a.b.c (target_file_size_mb=2, max_file_size_mb=1)", "Value of TARGET_FILE_SIZE_MB [2] cannot be greater than MAX_FILE_SIZE_MB [1]."), - Pair.of("OPTIMIZE TABLE a.b.c (min_file_size_mb=2, max_file_size_mb=26)", "Value of TARGET_FILE_SIZE_MB [256] cannot be greater than MAX_FILE_SIZE_MB [26]."), - Pair.of("OPTIMIZE TABLE a.b.c (max_file_size_mb=2, min_file_size_mb=5)", "Value of MIN_FILE_SIZE_MB [5] cannot be greater than MAX_FILE_SIZE_MB [2]."), - Pair.of("OPTIMIZE TABLE a.b.c (target_file_size_mb=2, min_file_size_mb=5)", "Value of MIN_FILE_SIZE_MB [5] cannot be greater than MAX_FILE_SIZE_MB [3]."), - Pair.of("OPTIMIZE TABLE a.b.c (max_file_size_mb=0)", "MAX_FILE_SIZE_MB [0] should be a positive integer value."), - Pair.of("OPTIMIZE TABLE a.b.c (min_input_files=0)", "Value of MIN_INPUT_FILES [0] cannot be less than 1."), - Pair.of("OPTIMIZE TABLE a.b.c (min_input_files=-2)", "Value of MIN_INPUT_FILES [-2] cannot be less than 1."), - Pair.of("OPTIMIZE TABLE a.b.c (max_file_size_mb=-1200)", "MAX_FILE_SIZE_MB [-1200] should be a positive integer value."), - Pair.of("OPTIMIZE TABLE a.b.c (min_file_size_mb=-1050)", "MIN_FILE_SIZE_MB [-1050] should be a non-negative integer value."), - Pair.of("OPTIMIZE TABLE a.b.c (target_file_size_mb=-256)", "TARGET_FILE_SIZE_MB [-256] should be a positive integer value.") - ); - } - - private static OptimizeOptions getValidOptimizeOptions(String toParse) throws Exception { - SqlOptimize sqlOptimize = parseToSqlOptimizeNode(toParse); - //return Optimize Options if all the inputs are valid else throw error. - return new OptimizeOptions(mockConfig.getContext().getOptions(), sqlOptimize, true); + assertThatThrownBy(() -> optimizeHandler.checkValidations(config.getContext().getCatalog(), config, path, sqlOptimize)) + .isInstanceOf(UserException.class) + .hasMessage("OPTIMIZE TABLE command does not support tables with equality delete files."); + table.close(); } private static SqlOptimize parseToSqlOptimizeNode(String toParse) throws SqlParseException { diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/query/TestOptimizeOptions.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/query/TestOptimizeOptions.java new file mode 100644 index 0000000000..2cd3d4d5a3 --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/query/TestOptimizeOptions.java @@ -0,0 +1,163 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.planner.sql.handlers.query; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.util.Arrays; +import java.util.List; + +import org.apache.calcite.avatica.util.Quoting; +import org.apache.calcite.sql.parser.SqlParseException; +import org.apache.calcite.sql.parser.SqlParser; +import org.apache.calcite.util.Pair; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; +import org.junit.jupiter.params.provider.ValueSource; +import org.mockito.Mockito; + +import com.dremio.exec.ExecConstants; +import com.dremio.exec.planner.physical.PlannerSettings; +import com.dremio.exec.planner.sql.ParserConfig; +import com.dremio.exec.planner.sql.parser.SqlOptimize; +import com.dremio.exec.planner.sql.parser.TestSqlOptimize; +import com.dremio.options.OptionManager; + +/** + * Tests for {@link OptimizeOptions} + */ +public class TestOptimizeOptions { + private static OptionManager mockOptionManager; + private final Class exceptionType = IllegalArgumentException.class; + + @Test + public void testCreateInstanceFromNodeAllOptions() throws SqlParseException { + SqlOptimize sqlNode = (SqlOptimize) TestSqlOptimize.parse( + "OPTIMIZE TABLE a.b.c REWRITE DATA (TARGET_FILE_SIZE_MB=257, MIN_INPUT_FILES=10, MAX_FILE_SIZE_MB=300, MIN_FILE_SIZE_MB=100)"); + + OptimizeOptions optimizeOptions = OptimizeOptions.createInstance(sqlNode); + + assertThat(optimizeOptions.isOptimizeManifestFiles()).isFalse(); + assertThat(optimizeOptions.isOptimizeDataFiles()).isTrue(); + assertThat(optimizeOptions.isOptimizeManifestsOnly()).isFalse(); + assertThat(optimizeOptions.isSingleDataWriter()).isFalse(); + + assertThat(optimizeOptions.getTargetFileSizeBytes()).isEqualTo(257 * 1024 * 1024); + assertThat(optimizeOptions.getMinInputFiles()).isEqualTo(10); + assertThat(optimizeOptions.getMinFileSizeBytes()).isEqualTo(100 * 1024 * 1024); + assertThat(optimizeOptions.getMaxFileSizeBytes()).isEqualTo(300 * 1024 * 1024); + } + + @Test + public void testCreateInstanceFromNodeAllDefaults() throws SqlParseException { + SqlOptimize sqlNode = (SqlOptimize) TestSqlOptimize.parse("OPTIMIZE TABLE a.b.c"); + + OptimizeOptions optimizeOptions = OptimizeOptions.createInstance(sqlNode); + + assertThat(optimizeOptions.isOptimizeManifestFiles()).isTrue(); + assertThat(optimizeOptions.isOptimizeDataFiles()).isTrue(); + assertThat(optimizeOptions.isOptimizeManifestsOnly()).isFalse(); + assertThat(optimizeOptions.isSingleDataWriter()).isFalse(); + + assertThat(optimizeOptions.getTargetFileSizeBytes()).isEqualTo( ExecConstants.OPTIMIZE_TARGET_FILE_SIZE_MB.getDefault().getNumVal() * 1024 * 1024); + assertThat(optimizeOptions.getMinInputFiles()).isEqualTo(ExecConstants.OPTIMIZE_MINIMUM_INPUT_FILES.getDefault().getNumVal()); + + long expectedMin = ((long) (ExecConstants.OPTIMIZE_TARGET_FILE_SIZE_MB.getDefault().getNumVal() * ExecConstants.OPTIMIZE_MINIMUM_FILE_SIZE_DEFAULT_RATIO.getDefault().getFloatVal())) * 1024 * 1024; + assertThat(optimizeOptions.getMinFileSizeBytes()).isEqualTo(expectedMin); + + long expectedMax = ((long) (ExecConstants.OPTIMIZE_TARGET_FILE_SIZE_MB.getDefault().getNumVal() * ExecConstants.OPTIMIZE_MAXIMUM_FILE_SIZE_DEFAULT_RATIO.getDefault().getFloatVal())) * 1024 * 1024; + assertThat(optimizeOptions.getMaxFileSizeBytes()).isEqualTo(expectedMax); + } + + @Test + public void testCreateInstanceUsingSupportOptions() throws SqlParseException { + SqlOptimize sqlNode = (SqlOptimize) TestSqlOptimize.parse("OPTIMIZE TABLE a.b.c"); + + OptionManager optionManager = mock(OptionManager.class); + when(optionManager.getOption(ExecConstants.OPTIMIZE_TARGET_FILE_SIZE_MB)).thenReturn(1000L); + when(optionManager.getOption(ExecConstants.OPTIMIZE_MAXIMUM_FILE_SIZE_DEFAULT_RATIO)).thenReturn(2D); + when(optionManager.getOption(ExecConstants.OPTIMIZE_MINIMUM_FILE_SIZE_DEFAULT_RATIO)).thenReturn(0.2D); + when(optionManager.getOption(ExecConstants.OPTIMIZE_MINIMUM_INPUT_FILES)).thenReturn(10L); + + OptimizeOptions optimizeOptions = OptimizeOptions.createInstance(optionManager, sqlNode, true); + + assertThat(optimizeOptions.getMinInputFiles()).isEqualTo(10L); + assertThat(optimizeOptions.getMinFileSizeBytes()).isEqualTo(200 * 1024 * 1024); + assertThat(optimizeOptions.getMaxFileSizeBytes()).isEqualTo(2000 * 1024 * 1024); + assertThat(optimizeOptions.getTargetFileSizeBytes()).isEqualTo(1000 * 1024 * 1024); + } + + @ParameterizedTest + @ValueSource(strings = { + "OPTIMIZE TABLE a.b.c (target_file_size_mb=5)", + "OPTIMIZE TABLE a.b.c (target_file_size_mb=5, min_file_size_mb=1)", + "OPTIMIZE TABLE a.b.c (target_file_size_mb=5, max_file_size_mb=6)", + "OPTIMIZE TABLE a.b.c (min_file_size_mb=1, target_file_size_mb=5, max_file_size_mb=6)", + "OPTIMIZE TABLE a.b.c (min_file_size_mb=200, max_file_size_mb=300)", + "OPTIMIZE TABLE a.b.c (min_file_size_mb=0)" + }) + + void testValidOptions(String query) { + assertDoesNotThrow(() -> getValidOptimizeOptions(query)); + } + + @ParameterizedTest + @MethodSource("invalidOptionQueries") + void testInvalidOptions(Pair test) { + assertThatThrownBy(() -> getValidOptimizeOptions(test.getKey())).isInstanceOf(exceptionType).hasMessage(test.getValue()); + } + + static List> invalidOptionQueries() { + return Arrays.asList( + Pair.of("OPTIMIZE TABLE a.b.c (target_file_size_mb=2, min_file_size_mb=3)", "Value of TARGET_FILE_SIZE_MB [2] cannot be less than MIN_FILE_SIZE_MB [3]."), + Pair.of("OPTIMIZE TABLE a.b.c (max_file_size_mb=270, min_file_size_mb=269)", "Value of TARGET_FILE_SIZE_MB [256] cannot be less than MIN_FILE_SIZE_MB [269]."), + Pair.of("OPTIMIZE TABLE a.b.c (target_file_size_mb=2, max_file_size_mb=1)", "Value of TARGET_FILE_SIZE_MB [2] cannot be greater than MAX_FILE_SIZE_MB [1]."), + Pair.of("OPTIMIZE TABLE a.b.c (min_file_size_mb=2, max_file_size_mb=26)", "Value of TARGET_FILE_SIZE_MB [256] cannot be greater than MAX_FILE_SIZE_MB [26]."), + Pair.of("OPTIMIZE TABLE a.b.c (max_file_size_mb=2, min_file_size_mb=5)", "Value of MIN_FILE_SIZE_MB [5] cannot be greater than MAX_FILE_SIZE_MB [2]."), + Pair.of("OPTIMIZE TABLE a.b.c (target_file_size_mb=2, min_file_size_mb=5)", "Value of MIN_FILE_SIZE_MB [5] cannot be greater than MAX_FILE_SIZE_MB [3]."), + Pair.of("OPTIMIZE TABLE a.b.c (max_file_size_mb=0)", "MAX_FILE_SIZE_MB [0] should be a positive integer value."), + Pair.of("OPTIMIZE TABLE a.b.c (min_input_files=0)", "Value of MIN_INPUT_FILES [0] cannot be less than 1."), + Pair.of("OPTIMIZE TABLE a.b.c (min_input_files=-2)", "Value of MIN_INPUT_FILES [-2] cannot be less than 1."), + Pair.of("OPTIMIZE TABLE a.b.c (max_file_size_mb=-1200)", "MAX_FILE_SIZE_MB [-1200] should be a positive integer value."), + Pair.of("OPTIMIZE TABLE a.b.c (min_file_size_mb=-1050)", "MIN_FILE_SIZE_MB [-1050] should be a non-negative integer value."), + Pair.of("OPTIMIZE TABLE a.b.c (target_file_size_mb=-256)", "TARGET_FILE_SIZE_MB [-256] should be a positive integer value.") + ); + } + + private static OptimizeOptions getValidOptimizeOptions(String toParse) throws Exception { + SqlOptimize sqlOptimize = parseToSqlOptimizeNode(toParse); + //return Optimize Options if all the inputs are valid else throw error. + mockOptionManager = Mockito.mock(OptionManager.class); + when(mockOptionManager.getOption(ExecConstants.OPTIMIZE_TARGET_FILE_SIZE_MB)).thenReturn(256L); + when(mockOptionManager.getOption(ExecConstants.OPTIMIZE_MINIMUM_FILE_SIZE_DEFAULT_RATIO)).thenReturn(0.75); + when(mockOptionManager.getOption(ExecConstants.OPTIMIZE_MAXIMUM_FILE_SIZE_DEFAULT_RATIO)).thenReturn(1.8); + when(mockOptionManager.getOption(ExecConstants.OPTIMIZE_MINIMUM_INPUT_FILES)).thenReturn(5L); + when(mockOptionManager.getOption(ExecConstants.ENABLE_ICEBERG)).thenReturn(true); + + return OptimizeOptions.createInstance(mockOptionManager, sqlOptimize, true); + } + + private static SqlOptimize parseToSqlOptimizeNode(String toParse) throws SqlParseException { + ParserConfig config = new ParserConfig(Quoting.DOUBLE_QUOTE, 255, PlannerSettings.FULL_NESTED_SCHEMA_SUPPORT.getDefault().getBoolVal()); + SqlParser parser = SqlParser.create(toParse, config); + return (SqlOptimize) parser.parseStmt(); + } +} diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/refresh/TestFileSystemFullRefreshPlanBuilder.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/refresh/TestFileSystemFullRefreshPlanBuilder.java index 761c0d6873..169c91d50c 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/refresh/TestFileSystemFullRefreshPlanBuilder.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/handlers/refresh/TestFileSystemFullRefreshPlanBuilder.java @@ -16,7 +16,6 @@ package com.dremio.exec.planner.sql.handlers.refresh; import static com.dremio.exec.store.metadatarefresh.RefreshDatasetTestUtils.fsDelete; -import static com.dremio.exec.store.metadatarefresh.RefreshDatasetTestUtils.setupLocalFS; import java.nio.file.Paths; diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/parser/TestOptimizeParse.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/parser/TestOptimizeParse.java index e935221e1c..20d07faa3a 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/parser/TestOptimizeParse.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/parser/TestOptimizeParse.java @@ -15,18 +15,17 @@ */ package com.dremio.exec.planner.sql.parser; -import static org.junit.Assert.assertThrows; +import static org.junit.jupiter.api.Assertions.assertThrows; -import java.util.Arrays; -import java.util.Collection; +import java.util.stream.Stream; import org.apache.calcite.avatica.util.Quoting; import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.parser.SqlParseException; import org.apache.calcite.sql.parser.SqlParser; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; import com.dremio.exec.planner.physical.PlannerSettings; import com.dremio.exec.planner.sql.ParserConfig; @@ -34,45 +33,36 @@ /** * Tests for optimize command */ -@RunWith(Parameterized.class) public class TestOptimizeParse { - @Parameterized.Parameters - public static Collection data() { - return Arrays.asList(new Object[][]{ - {"OPTIMIZE TABLE a.b.c", true}, - {"OPTIMIZE TABLE a.b.c USING BIN_PACK", true}, - {"OPTIMIZE TABLE a.b.c (target_file_size_mb=2)", true}, - {"OPTIMIZE TABLE a.b.c USING BIN_PACK (target_file_size_mb=2)", true}, - {"OPTIMIZE TABLE a.b.c REWRITE DATA USING BIN_PACK", true}, - {"OPTIMIZE a.b.c", false}, // No table keyword - {"OPTIMIZE TABLE WHERE id=5", false}, // No table name - {"OPTIMIZE TABLE a.b.c USING SORT", false}, // SORT not supported - {"OPTIMIZE TABLE a.b.c WHERE id=5", false}, // WHERE conditions not supported - {"OPTIMIZE TABLE a.b.c (unknown_file_size=2)", false}, // Invalid option - {"OPTIMIZE TABLE a.b.c (target_file_size_bytes=2)", false}, // Old options should not work - {"OPTIMIZE TABLE a.b.c (target_file_size_mb=0.2)", false}, // Options must be numeric - {"OPTIMIZE TABLE a.b.c (target_file_size_mb=COUNT(col_name))", false}, // Options must be literal - {"OPTIMIZE TABLE a.b.c (target_file_size_mb=2) WHERE id=5", false}, // Where clause must be before options - {"OPTIMIZE TABLE a.b.c REWRITE MANIFESTS", false}, // REWRITE MANIFESTS not supported - {"OPTIMIZE TABLE a.b.c REWRITE MANIFESTS WHERE id=5", false}, // Where clause not allowed when rewriting manifests - {"OPTIMIZE TABLE a.b.c REWRITE MANIFESTS (target_file_size_mb=2)", false}, // Options not allowed when rewriting manifests - {"OPTIMIZE TABLE a.b.c REWRITE DATA target_file_size_mb=2", false}, // Options must be enclosed in parentheses - {"OPTIMIZE TABLE a.b.c REWRITE MANIFESTS WHERE id=5 (target_file_size_mb=2)", false}, // Where clause and options not allowed when rewriting manifests - {"OPTIMIZE TABLE a.b.c REWRITE DATA USING SORT (target_file_size_mb=2, min_input_files=5 WHERE id=5)", false} // Where clause must be before options - }); + public static Stream data() { + return Stream.of( + Arguments.of("OPTIMIZE TABLE a.b.c", true), + Arguments.of("OPTIMIZE TABLE a.b.c USING BIN_PACK", true), + Arguments.of("OPTIMIZE TABLE a.b.c (target_file_size_mb=2)", true), + Arguments.of("OPTIMIZE TABLE a.b.c USING BIN_PACK (target_file_size_mb=2)", true), + Arguments.of("OPTIMIZE TABLE a.b.c REWRITE DATA USING BIN_PACK", true), + Arguments.of("OPTIMIZE a.b.c", false), // No table keyword + Arguments.of("OPTIMIZE TABLE WHERE id=5", false), // No table name + Arguments.of("OPTIMIZE TABLE a.b.c USING SORT", false), // SORT not supported + Arguments.of("OPTIMIZE TABLE a.b.c WHERE id=5", false), + Arguments.of("OPTIMIZE TABLE a.b.c FOR PARTITIONS (id=5)", true), + Arguments.of("OPTIMIZE TABLE a.b.c (unknown_file_size=2)", false), // Invalid option + Arguments.of("OPTIMIZE TABLE a.b.c (target_file_size_bytes=2)", false), // Old options should not work + Arguments.of("OPTIMIZE TABLE a.b.c (target_file_size_mb=0.2)", false), // Options must be numeric + Arguments.of("OPTIMIZE TABLE a.b.c (target_file_size_mb=COUNT(col_name))", false), // Options must be literal + Arguments.of("OPTIMIZE TABLE a.b.c (target_file_size_mb=2) WHERE id=5", false), // Where clause must be before options + Arguments.of("OPTIMIZE TABLE a.b.c REWRITE MANIFESTS WHERE id=5", false), // Where clause not allowed when rewriting manifests + Arguments.of("OPTIMIZE TABLE a.b.c REWRITE MANIFESTS (target_file_size_mb=2)", false), // Options not allowed when rewriting manifests + Arguments.of("OPTIMIZE TABLE a.b.c REWRITE DATA target_file_size_mb=2", false), // Options must be enclosed in parentheses + Arguments.of("OPTIMIZE TABLE a.b.c REWRITE MANIFESTS WHERE id=5 (target_file_size_mb=2)", false), // Where clause and options not allowed when rewriting manifests + Arguments.of("OPTIMIZE TABLE a.b.c REWRITE DATA USING SORT (target_file_size_mb=2, min_input_files=5 WHERE id=5)", false) // Where clause must be before options + ); } - private String query; - private boolean shouldSucceed; - - public TestOptimizeParse(String query, boolean shouldSucceed) { - this.query = query; - this.shouldSucceed = shouldSucceed; - } - - @Test - public void testOptimizeTableParseVariants() throws SqlParseException { + @ParameterizedTest + @MethodSource("data") + public void testOptimizeTableParseVariants(String query, boolean shouldSucceed) throws SqlParseException { if (!shouldSucceed) { assertThrows(SqlParseException.class, () -> parse(query)); } else { diff --git a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/parser/TestSqlOptimize.java b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/parser/TestSqlOptimize.java index 5c503f3f58..3ac97ecc05 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/parser/TestSqlOptimize.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/planner/sql/parser/TestSqlOptimize.java @@ -15,6 +15,7 @@ */ package com.dremio.exec.planner.sql.parser; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; @@ -24,9 +25,13 @@ import org.apache.calcite.avatica.util.Quoting; import org.apache.calcite.config.NullCollation; import org.apache.calcite.sql.SqlDialect; +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlLiteral; import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.SqlNodeList; import org.apache.calcite.sql.parser.SqlParseException; import org.apache.calcite.sql.parser.SqlParser; +import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.calcite.sql.pretty.SqlPrettyWriter; import org.junit.Test; @@ -39,6 +44,56 @@ public class TestSqlOptimize { new SqlDialect(SqlDialect.DatabaseProduct.UNKNOWN, "Dremio", Character.toString(SqlUtils.QUOTE), NullCollation.FIRST); private SqlPrettyWriter writer = new SqlPrettyWriter(DREMIO_DIALECT); + @Test + public void testOperandSetter() throws SqlParseException { + SqlNode parsed = parse("OPTIMIZE TABLE a.b.c"); + assertTrue(parsed instanceof SqlOptimize); + SqlOptimize optimizeParsed = (SqlOptimize) parsed; + + optimizeParsed.setOperand(0, optimizeParsed.getTable().setName(0,"d")); + optimizeParsed.setOperand(3, SqlLiteral.createSymbol(CompactionType.SORT, SqlParserPos.ZERO)); + optimizeParsed.setOperand(4, new SqlIdentifier("e", SqlParserPos.ZERO)); + optimizeParsed.setOperand(5, SqlNodeList.of(new SqlIdentifier("f", SqlParserPos.ZERO))); + optimizeParsed.setOperand(6, SqlNodeList.of(new SqlIdentifier("g", SqlParserPos.ZERO))); + + optimizeParsed.unparse(writer, 0, 0); + String actualString = writer.toString(); + String expectedString = "OPTIMIZE TABLE \"d\".\"b\".\"c\" USING SORT FOR PARTITIONS \"e\" (\"f\" = \"g\")"; + assertEquals(expectedString, actualString); + } + + @Test + public void testOperandSetterRewriteManifest() throws SqlParseException { + SqlNode parsed = parse("OPTIMIZE TABLE a.b.c"); + assertTrue(parsed instanceof SqlOptimize); + SqlOptimize optimizeParsed = (SqlOptimize) parsed; + + optimizeParsed.setOperand(0, optimizeParsed.getTable().setName(0,"d")); + optimizeParsed.setOperand(1, SqlLiteral.createBoolean(true, SqlParserPos.ZERO)); + optimizeParsed.setOperand(2, SqlLiteral.createBoolean(false, SqlParserPos.ZERO)); + + optimizeParsed.unparse(writer, 0, 0); + String actualString = writer.toString(); + String expectedString = "OPTIMIZE TABLE \"d\".\"b\".\"c\" REWRITE MANIFESTS"; + assertEquals(expectedString, actualString); + } + + @Test + public void testOperandSetterRewriteDataFiles() throws SqlParseException { + SqlNode parsed = parse("OPTIMIZE TABLE a.b.c"); + assertTrue(parsed instanceof SqlOptimize); + SqlOptimize optimizeParsed = (SqlOptimize) parsed; + + optimizeParsed.setOperand(0, optimizeParsed.getTable().setName(0,"d")); + optimizeParsed.setOperand(1, SqlLiteral.createBoolean(false, SqlParserPos.ZERO)); + optimizeParsed.setOperand(2, SqlLiteral.createBoolean(true, SqlParserPos.ZERO)); + + optimizeParsed.unparse(writer, 0, 0); + String actualString = writer.toString(); + String expectedString = "OPTIMIZE TABLE \"d\".\"b\".\"c\" REWRITE DATA USING BIN_PACK"; + assertEquals(expectedString, actualString); + } + @Test public void testBasic() throws SqlParseException { SqlNode parsed = parse("OPTIMIZE TABLE a.b.c"); @@ -47,11 +102,11 @@ public void testBasic() throws SqlParseException { parsed.unparse(writer, 0, 0); String actualString = writer.toString(); - String expectedUnparsedString = "OPTIMIZE TABLE \"a\".\"b\".\"c\" REWRITE DATA USING BIN_PACK"; + String expectedUnparsedString = "OPTIMIZE TABLE \"a\".\"b\".\"c\" USING BIN_PACK"; assertEquals(actualString, expectedUnparsedString); assertEquals("Table name does not match.", "a.b.c", optimizeParsed.getTable().toString()); - assertFalse("RewriteManifests is incorrect.", optimizeParsed.getRewriteManifests().booleanValue()); + assertTrue("RewriteManifests is incorrect.", optimizeParsed.getRewriteManifests().booleanValue()); assertEquals("Compaction type does not match,", CompactionType.BIN_PACK, optimizeParsed.getCompactionType()); } @@ -63,11 +118,11 @@ public void testBasicWithBinPack() throws SqlParseException { parsed.unparse(writer, 0, 0); String actualString = writer.toString(); - String expectedUnparsedString = "OPTIMIZE TABLE \"a\".\"b\".\"c\" REWRITE DATA USING BIN_PACK"; + String expectedUnparsedString = "OPTIMIZE TABLE \"a\".\"b\".\"c\" USING BIN_PACK"; assertEquals(actualString, expectedUnparsedString); assertEquals("Table name does not match.", "a.b.c", optimizeParsed.getTable().toString()); - assertFalse("RewriteManifests is incorrect.", optimizeParsed.getRewriteManifests().booleanValue()); + assertTrue("RewriteManifests is incorrect.", optimizeParsed.getRewriteManifests().booleanValue()); assertEquals("Compaction type does not match,", CompactionType.BIN_PACK, optimizeParsed.getCompactionType()); } @@ -79,11 +134,11 @@ public void testBasicWithOptions() throws SqlParseException { parsed.unparse(writer, 0, 0); String actualString = writer.toString(); - String expectedUnparsedString = "OPTIMIZE TABLE \"a\".\"b\".\"c\" REWRITE DATA USING BIN_PACK (\"target_file_size_mb\" = 2)"; + String expectedUnparsedString = "OPTIMIZE TABLE \"a\".\"b\".\"c\" USING BIN_PACK (\"target_file_size_mb\" = 2)"; assertEquals(actualString, expectedUnparsedString); assertEquals("Table name does not match.", "a.b.c", optimizeParsed.getTable().toString()); - assertFalse("RewriteManifests is incorrect.", optimizeParsed.getRewriteManifests().booleanValue()); + assertTrue("RewriteManifests is incorrect.", optimizeParsed.getRewriteManifests().booleanValue()); assertEquals("Compaction type does not match,", CompactionType.BIN_PACK, optimizeParsed.getCompactionType()); assertEquals("Options do not match.", "target_file_size_mb", optimizeParsed.getOptionNames().get(0).toString()); assertEquals("Options do not match.", "2", optimizeParsed.getOptionValues().get(0).toString()); @@ -102,6 +157,7 @@ public void testRewriteDataWithBinPack() throws SqlParseException { assertEquals("Table name does not match.", "a.b.c", optimizeParsed.getTable().toString()); assertFalse("RewriteManifests is incorrect.", optimizeParsed.getRewriteManifests().booleanValue()); + assertTrue("RewriteDataFiles is incorrect.", optimizeParsed.getRewriteDataFiles().booleanValue()); assertEquals("CompactionType does not match.", CompactionType.BIN_PACK, optimizeParsed.getCompactionType()); } @@ -148,7 +204,16 @@ public void testRewriteDataWithBinPackMultipleOptions() throws SqlParseException assertEquals("Unset options should be empty", Optional.empty(), optimizeParsed.getMinFileSize()); } - private SqlNode parse(String toParse) throws SqlParseException { + @Test + public void testDataOptionsWithRewriteManifests() { + assertThatThrownBy(() -> parse("OPTIMIZE TABLE a.b.c REWRITE MANIFESTS USING BIN_PACK")) + .isInstanceOf(SqlParseException.class); + + assertThatThrownBy(() -> parse("OPTIMIZE TABLE a.b.c REWRITE MANIFESTS (\"target_file_size_mb\" = 2, \"min_input_files\" = 5)")) + .isInstanceOf(SqlParseException.class); + } + + public static SqlNode parse(String toParse) throws SqlParseException { ParserConfig config = new ParserConfig(Quoting.DOUBLE_QUOTE, 255, PlannerSettings.FULL_NESTED_SCHEMA_SUPPORT.getDefault().getBoolVal()); SqlParser parser = SqlParser.create(toParse, config); return parser.parseStmt(); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/server/SabotNode.java b/sabot/kernel/src/test/java/com/dremio/exec/server/SabotNode.java index d4304282d2..d5377ce56d 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/server/SabotNode.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/server/SabotNode.java @@ -849,7 +849,8 @@ ForemenWorkManager getForemenWorkManager( Provider maestroService, Provider jobTelemetryClient, Provider forwarderProvider, - Provider ruleBasedEngineSelectorProvider + Provider ruleBasedEngineSelectorProvider, + Provider requestContextProvider ) { final BufferAllocator jobResultsAllocator = bootstrap.getAllocator().newChildAllocator("JobResultsGrpcServer", 0, Long.MAX_VALUE); return new ForemenWorkManager( @@ -861,7 +862,8 @@ ForemenWorkManager getForemenWorkManager( forwarderProvider, TracerFacade.INSTANCE, ruleBasedEngineSelectorProvider, - jobResultsAllocator + jobResultsAllocator, + requestContextProvider ); } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/server/TestTpcdsSf1Leaks.java b/sabot/kernel/src/test/java/com/dremio/exec/server/TestTpcdsSf1Leaks.java index d7d0125ec9..00c29949a0 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/server/TestTpcdsSf1Leaks.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/server/TestTpcdsSf1Leaks.java @@ -17,7 +17,6 @@ import static com.dremio.exec.ExecConstants.SLICE_TARGET; import static com.dremio.exec.ExecConstants.SLICE_TARGET_DEFAULT; -import static org.junit.Assert.fail; import org.junit.BeforeClass; import org.junit.Ignore; @@ -67,12 +66,7 @@ public void test() throws Exception { final String query = getFile("tpcds-sf1/q73.sql"); for (int i = 0; i < 20; i++) { System.out.printf("%nRun #%d%n", i+1); - - try { - runSQL(query); - } catch (final Exception e) { - fail("query failed: " + e.getMessage()); - } + runSQL(query); } }finally { setSessionOption(SLICE_TARGET, Long.toString(SLICE_TARGET_DEFAULT)); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/sql/TestAlterTableAddColumns.java b/sabot/kernel/src/test/java/com/dremio/exec/sql/TestAlterTableAddColumns.java index ebb61b0b33..0bae1dd49d 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/sql/TestAlterTableAddColumns.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/sql/TestAlterTableAddColumns.java @@ -21,9 +21,10 @@ import org.junit.Test; import com.dremio.BaseTestQuery; +import com.dremio.exec.proto.UserBitShared; +import com.dremio.test.UserExceptionAssert; public class TestAlterTableAddColumns extends BaseTestQuery { - @Test public void badSql() { String[] queries = { @@ -31,8 +32,11 @@ public void badSql() { "ALTER TABLE ADD COLUMNS(col1 varchar)", "ALTER TABLE tbl ADD COLUMNS()" }; + for (String q : queries) { - errorMsgTestHelper(q, "Failure parsing the query"); + UserExceptionAssert + .assertThatThrownBy(() -> test(q)) + .hasErrorType(UserBitShared.DremioPBError.ErrorType.PARSE); } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/sql/TestAlterTableChangeColumn.java b/sabot/kernel/src/test/java/com/dremio/exec/sql/TestAlterTableChangeColumn.java index b7bdef1f90..1ace223445 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/sql/TestAlterTableChangeColumn.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/sql/TestAlterTableChangeColumn.java @@ -21,6 +21,8 @@ import org.junit.Test; import com.dremio.BaseTestQuery; +import com.dremio.exec.proto.UserBitShared; +import com.dremio.test.UserExceptionAssert; public class TestAlterTableChangeColumn extends BaseTestQuery { @@ -32,7 +34,9 @@ public void badSql() { "ALTER TABLE %s.%s CHANGE COLUMN version commit_message varchar", "ALTER TABLE CHANGE col1 col2 varchar"}; for (String q : queries) { - errorMsgTestHelper(q, "Failure parsing the query."); + UserExceptionAssert + .assertThatThrownBy(() -> test(q)) + .hasErrorType(UserBitShared.DremioPBError.ErrorType.PARSE); } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/sql/TestAlterTableDropColumn.java b/sabot/kernel/src/test/java/com/dremio/exec/sql/TestAlterTableDropColumn.java index fc1caf433a..1352284960 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/sql/TestAlterTableDropColumn.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/sql/TestAlterTableDropColumn.java @@ -21,6 +21,8 @@ import org.junit.Test; import com.dremio.BaseTestQuery; +import com.dremio.exec.proto.UserBitShared; +import com.dremio.test.UserExceptionAssert; public class TestAlterTableDropColumn extends BaseTestQuery { @@ -30,7 +32,9 @@ public void badSql() { "ALTER TABLE tbl DROP COLUMN", "ALTER TABLE DROP COLUMN col1"}; for (String q : queries) { - errorMsgTestHelper(q, "Failure parsing the query."); + UserExceptionAssert + .assertThatThrownBy(() -> test(q)) + .hasErrorType(UserBitShared.DremioPBError.ErrorType.PARSE); } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/sql/TestAlterTableSetOption.java b/sabot/kernel/src/test/java/com/dremio/exec/sql/TestAlterTableSetOption.java index 4b8afcf421..17ce08fa1b 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/sql/TestAlterTableSetOption.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/sql/TestAlterTableSetOption.java @@ -21,7 +21,9 @@ import com.dremio.BaseTestQuery; import com.dremio.config.DremioConfig; +import com.dremio.exec.proto.UserBitShared; import com.dremio.test.TemporarySystemProperties; +import com.dremio.test.UserExceptionAssert; public class TestAlterTableSetOption extends BaseTestQuery { @@ -44,7 +46,9 @@ public void badSql() { "ALTER SESSION tbl SET hive.parquet.enforce_varchar_width = ON", }; for (String q : queries) { - errorMsgTestHelper(q, "Failure parsing the query"); + UserExceptionAssert + .assertThatThrownBy(() -> test(q)) + .hasErrorType(UserBitShared.DremioPBError.ErrorType.PARSE); } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/sql/TestAlterTableToggleSchemaLearning.java b/sabot/kernel/src/test/java/com/dremio/exec/sql/TestAlterTableToggleSchemaLearning.java index 4cac15f940..45efdb0d8a 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/sql/TestAlterTableToggleSchemaLearning.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/sql/TestAlterTableToggleSchemaLearning.java @@ -27,7 +27,9 @@ import com.dremio.exec.ExecConstants; import com.dremio.exec.planner.physical.PlannerSettings; import com.dremio.exec.planner.sql.ParserConfig; +import com.dremio.exec.proto.UserBitShared; import com.dremio.test.TemporarySystemProperties; +import com.dremio.test.UserExceptionAssert; public class TestAlterTableToggleSchemaLearning extends BaseTestQuery { @@ -51,7 +53,9 @@ public void badSql() { "ALTER TABLE ENABLE SCHEMA LEARNING", "ALTER TABLE tbl ENABLE SCHEMALEARNING"}; for (String q : queries) { - errorMsgTestHelper(q, "Failure parsing the query."); + UserExceptionAssert + .assertThatThrownBy(() -> test(q)) + .hasErrorType(UserBitShared.DremioPBError.ErrorType.PARSE); } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/sql/TestCTAS.java b/sabot/kernel/src/test/java/com/dremio/exec/sql/TestCTAS.java index fad13890a0..0a301f7743 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/sql/TestCTAS.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/sql/TestCTAS.java @@ -67,7 +67,6 @@ import com.dremio.exec.store.iceberg.IcebergFormatMatcher; import com.dremio.exec.store.iceberg.SchemaConverter; import com.dremio.exec.store.iceberg.hadoop.IcebergHadoopModel; -import com.dremio.exec.store.iceberg.model.IcebergCatalogType; import com.dremio.exec.store.iceberg.model.IcebergModel; import com.dremio.exec.store.parquet.SingletonParquetFooterCache; import com.dremio.io.file.FileSystem; @@ -681,7 +680,7 @@ public boolean accept(File dir, String name) { FileSystemPlugin fileSystemPlugin = BaseTestQuery.getMockedFileSystemPlugin(); - IcebergHadoopModel icebergHadoopModel = new IcebergHadoopModel(new Configuration(), fileSystemPlugin); + IcebergHadoopModel icebergHadoopModel = new IcebergHadoopModel(fileSystemPlugin); when(fileSystemPlugin.getIcebergModel()).thenReturn(icebergHadoopModel); Table table = icebergHadoopModel.getIcebergTable(icebergHadoopModel.getTableIdentifier(tableFolder.toString())); SchemaConverter schemaConverter = SchemaConverter.getBuilder().setTableName(table.name()).build(); @@ -897,8 +896,7 @@ public void testCreateTableCommandInvalidPath() throws Exception { final String createTableQuery = String.format("CREATE TABLE %s(id int, code int)", tblName); UserExceptionAssert.assertThatThrownBy(() -> test(createTableQuery)) .hasMessageContaining(String.format("Invalid path. Given path, [%s] is not valid.", tblName)); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), tblName)); } } @@ -934,13 +932,12 @@ public void testIncorrectCatalog() throws Exception { " AS SELECT n_nationkey, n_regionkey from cp.\"tpch/nation.parquet\" limit 1", TEMP_SCHEMA_HADOOP, newTblName); test(ctasQuery); - //Try with wrong (nessie) catalog + // Try with wrong catalog (TEMP_SCHEMA is configured to use Nessie catalog) File tableFolder = new File(getDfsTestTmpSchemaLocation(), newTblName); - IcebergModel icebergModel = getIcebergModel(tableFolder, IcebergCatalogType.NESSIE); + IcebergModel icebergModel = getIcebergModel(TEMP_SCHEMA); UserExceptionAssert.assertThatThrownBy(() -> icebergModel.getIcebergTable(icebergModel.getTableIdentifier(tableFolder.getPath()))) .hasMessageContaining("Failed to load the Iceberg table."); } - } @Test diff --git a/sabot/kernel/src/test/java/com/dremio/exec/sql/TestCreateTable.java b/sabot/kernel/src/test/java/com/dremio/exec/sql/TestCreateTable.java index 94a6ce351e..4d3a7630d6 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/sql/TestCreateTable.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/sql/TestCreateTable.java @@ -236,8 +236,7 @@ public void testDroppingOfMapTypeColumn() throws Exception{ .baselineValues(2) .build() .run(); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), table1)); } } @@ -306,8 +305,7 @@ public void testReadingFromRootPointer() throws Exception{ .baselineValues(1, 2) .build() .run(); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), table1)); } } @@ -350,8 +348,7 @@ public void createTableIfNotExists() throws Exception{ .baselineValues(true,String.format("Table [%s.%s] already exists.", TEMP_SCHEMA, newTblName)) .build() .run(); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTblName)); } } @@ -390,8 +387,7 @@ public void ctasIfNotExists() throws Exception{ .expectsEmptyResultSet() .build() .run(); - } - finally { + } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTblName1)); FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTblName2)); FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTblName3)); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/sql/TestPromotionOfFilesWithoutExtension.java b/sabot/kernel/src/test/java/com/dremio/exec/sql/TestPromotionOfFilesWithoutExtension.java new file mode 100644 index 0000000000..c64bfec4aa --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/exec/sql/TestPromotionOfFilesWithoutExtension.java @@ -0,0 +1,76 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.sql; + + +import org.junit.Test; + +import com.dremio.BaseTestQuery; +import com.dremio.common.util.FileUtils; +import com.dremio.test.UserExceptionAssert; + +/* + * queries .csv files/folders that do not contain the ."csv" extension in the filename, ensuring the expected error message. + */ +public class TestPromotionOfFilesWithoutExtension extends BaseTestQuery { + + + + /* + * tests a .csv file without .csv extension. + * Expected to fail and propagate an error message. + */ + @Test + public void testPromoteFileWithoutExtension() throws Exception { + final String path = "/store/text/testWithoutExtension"; + String root = FileUtils.getResourceAsFile(path).toURI().toString(); + query(root, String.format("SELECT * FROM dfs.\"%s\"", root)); + } + + + /* + * tests a .csv folder consisting of files without .csv extension. + * Expected to fail and propagate an error message. + */ + @Test + public void testPromoteFolderOfFilesWithoutExtension() throws Exception { + final String path = "/store/text/FolderWithoutExtension"; + String root = FileUtils.getResourceAsFile(path).toURI().toString(); + query(root, String.format("SELECT * FROM dfs.\"%s\"", root)); + } + + + /* + * tests a .csv folder consisting of files without .csv extension by using the ALTER PDS command + * Expected to fail and propagate error message. + */ + @Test + public void testAlterPdsFileWithoutExtension() throws Exception { + final String path = "/store/text/FolderWithoutExtension"; + String root = FileUtils.getResourceAsFile(path).toURI().toString(); + query(root, String.format("ALTER PDS dfs.\"%s\" REFRESH METADATA AUTO PROMOTION", root)); + } + + + private void query(String path, String statement) throws Exception { + String mssg = "The file format for 'dfs.\"%s\"' could not be identified. In order for automatic format detection to succeed, " + + "files must include a file extension. Alternatively, manual promotion can be used to explicitly specify the format."; + String error = String.format(mssg, path); + UserExceptionAssert.assertThatThrownBy(() -> { + runSQL(statement); + }).isInstanceOf(Exception.class).hasMessageContaining(error); + } +} diff --git a/sabot/kernel/src/test/java/com/dremio/exec/sql/TestStoreQueryResults.java b/sabot/kernel/src/test/java/com/dremio/exec/sql/TestStoreQueryResults.java index a18a7e8c23..7f658fb356 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/sql/TestStoreQueryResults.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/sql/TestStoreQueryResults.java @@ -20,6 +20,7 @@ import java.io.File; import java.util.Arrays; import java.util.Collections; +import java.util.Map; import java.util.concurrent.CountDownLatch; import org.apache.calcite.plan.RelOptPlanner; @@ -102,7 +103,8 @@ public void execDataArrived(RpcOutcomeListener outcomeListener, QueryWritab } @Override - public void planRelTransform(PlannerPhase phase, RelOptPlanner planner, RelNode before, RelNode after, long millisTaken) { + public void planRelTransform(PlannerPhase phase, RelOptPlanner planner, RelNode before, RelNode after, + long millisTaken, Map timeBreakdownPerRule) { if (phase == PlannerPhase.PHYSICAL) { if (checkPlanWriterDistribution) { // Visit the tree and check that all the WriterCommitter is a singleton and its input is also singleton diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/TestDirListingRecordReader.java b/sabot/kernel/src/test/java/com/dremio/exec/store/TestDirListingRecordReader.java index 33f9fc4711..7032472efd 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/TestDirListingRecordReader.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/TestDirListingRecordReader.java @@ -17,7 +17,6 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; import static org.mockito.Mockito.RETURNS_DEEP_STUBS; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -572,9 +571,6 @@ public void testDirListReaderWithDifferentOperatingAndRootPath() throws Exceptio assertEquals(extractPartitionData(outputPartInfo.getObject(2)), "PartitionData{dir0=bar, dir1=subBar1}"); assertEquals(mtime.get(2), 33); assertEquals(size.get(2), 1010); - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); } } @@ -750,8 +746,7 @@ public void testDirListingParserPartitionPathsError1() throws IOException, Execu try { int noRecordsRead = reader.next(); - } - catch (Exception e) { + } catch (Exception e) { assertTrue(e instanceof UserException); assertEquals(e.getMessage(), "Failed to list files of directory /randompath/"); assertEquals(e.getCause().getMessage(), "All the directories should have = in the partition structure. Path /randompath/id/data=value/file2.parquet"); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/TestMaterializedDatasetTableProvider.java b/sabot/kernel/src/test/java/com/dremio/exec/store/TestMaterializedDatasetTableProvider.java index c203a475af..838538c7ed 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/TestMaterializedDatasetTableProvider.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/TestMaterializedDatasetTableProvider.java @@ -66,7 +66,7 @@ public void testTimeTravelFlagOnScan() throws Exception { assertThat(rel).isInstanceOf(ScanCrel.class); ScanCrel scan = (ScanCrel) rel; - assertThat(scan.isSubstitutable()).isFalse(); + assertThat(scan.isSubstitutable()).isTrue(); provider = getProviderWithOptions(DatasetRetrievalOptions.DEFAULT); table = provider.get(); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/TestMetadataRefresh.java b/sabot/kernel/src/test/java/com/dremio/exec/store/TestMetadataRefresh.java index d55caee22c..4ef548e955 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/TestMetadataRefresh.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/TestMetadataRefresh.java @@ -28,8 +28,10 @@ import com.dremio.BaseTestQuery; import com.dremio.exec.catalog.CatalogServiceImpl; +import com.dremio.exec.proto.UserBitShared; import com.dremio.exec.store.dfs.InternalFileConf; import com.dremio.service.namespace.source.proto.SourceConfig; +import com.dremio.test.UserExceptionAssert; public class TestMetadataRefresh extends BaseTestQuery { @@ -45,7 +47,9 @@ public void badSql() { "ALTER TABLE tbl REFRESH METADATA FOR FILES ()", "ALTER TABLE tbl REFRESH METADATA FOR FILES LAZY UPDATE"}; for (String q : queries) { - errorMsgTestHelper(q, "Failure parsing the query."); + UserExceptionAssert + .assertThatThrownBy(() -> test(q)) + .hasErrorType(UserBitShared.DremioPBError.ErrorType.PARSE); } } @@ -215,7 +219,7 @@ public void testRefreshWithoutAutoPromote() throws Exception { fail("Source should be unavailable."); } catch (Exception e) { assertTrue(e.getMessage() - .contains(String.format(" '%s' not found", name))); + .contains(String.format(" '%s.blue.metadata_refresh' not found", name))); } // AUTO PROMOTION, data source should be promoted and Table metadata should be refreshed diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/TestOutputMutator.java b/sabot/kernel/src/test/java/com/dremio/exec/store/TestOutputMutator.java index d16487eaec..2435fe8069 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/TestOutputMutator.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/TestOutputMutator.java @@ -52,6 +52,7 @@ public TestOutputMutator(BufferAllocator allocator) { this.bufferManager = new BufferManagerImpl(allocator); } + @Override public void removeField(Field field) throws SchemaChangeException { ValueVector vector = fieldVectorMap.remove(field.getName().toLowerCase()); if (vector == null) { @@ -71,6 +72,7 @@ public void finalizeContainer(int recordCount){ container.setRecordCount(recordCount); } + @Override public Iterator> iterator() { return container.iterator(); } @@ -123,14 +125,17 @@ public CallBack getCallBack() { return null; } + @Override public VectorContainer getContainer() { return container; } + @Override public boolean getAndResetSchemaChanged() { return false; } + @Override public boolean getSchemaChanged() { return false; } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/TestSchemaAggTableFunction.java b/sabot/kernel/src/test/java/com/dremio/exec/store/TestSchemaAggTableFunction.java index 329446625c..8566d0842f 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/TestSchemaAggTableFunction.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/TestSchemaAggTableFunction.java @@ -22,7 +22,6 @@ import static com.dremio.common.expression.CompleteType.INT; import static com.dremio.common.expression.CompleteType.VARCHAR; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; import java.util.Arrays; @@ -94,10 +93,6 @@ public void TestSchemaAggTableFunctionOutputSchema() throws Exception { closer.addAll(output); assertEquals(new BatchSchema(fieldList), output.getSchema()); } - catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); - } } @Test @@ -173,13 +168,8 @@ public void TestSchemaAggTableFunctionOutputValues() throws Exception { assertEquals(outputFloatVector.getValueCount(), 4); assertEquals(outputSchemaVector.getValueCount(), 1); - compareVectors((VectorContainer) output, fieldList.get(0), IntVector.class, Arrays.asList(12, 13, 14, 15)); compareVectors((VectorContainer) output, fieldList.get(1), Float4Vector.class, Arrays.asList(2.0f, 3.0f, 4.0f, 5.0f)); - - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); } } @@ -235,10 +225,6 @@ public void TestSchemaAggUpCasting() throws Exception { assertEquals(BatchSchema.deserialize(outVarBinaryVector.get(0)).toJSONString(), schema2.toJSONString()); tableFunction.closeRow(); } - catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); - } } @Test @@ -295,10 +281,6 @@ public void TestSchemaAggMultipleBatches() throws Exception { assertEquals(BatchSchema.deserialize(outVarBinaryVector.get(0)).toJSONString(), finalSchema.toJSONString()); tableFunction.closeRow(); } - catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); - } } @Test @@ -341,10 +323,6 @@ public void TestSchemaAggValueVectorHasLessRecords() throws Exception { assertEquals(BatchSchema.deserialize(outVarBinaryVector.get(0)).toJSONString(), finalSchema.toJSONString()); tableFunction.closeRow(); } - catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); - } } private TableFunctionConfig getConfig() { diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestDeltaLakeFormatDatasetAccessor.java b/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestDeltaLakeFormatDatasetAccessor.java new file mode 100644 index 0000000000..12bcba4ac3 --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestDeltaLakeFormatDatasetAccessor.java @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dremio.exec.store.deltalake; + +import static org.junit.Assert.assertFalse; +import static org.mockito.Mockito.mock; + +import java.io.File; +import java.io.IOException; + +import org.apache.hadoop.conf.Configuration; +import org.junit.Test; + +import com.dremio.connector.metadata.BytesOutput; +import com.dremio.connector.metadata.DatasetMetadata; +import com.dremio.exec.hadoop.HadoopFileSystem; +import com.dremio.exec.store.dfs.FileSelection; +import com.dremio.exec.store.dfs.FileSystemPlugin; +import com.dremio.io.file.FileSystem; +import com.dremio.io.file.Path; +import com.dremio.service.namespace.NamespaceKey; +import com.dremio.service.namespace.dataset.proto.DatasetType; + +public class TestDeltaLakeFormatDatasetAccessor { + + @Test + public void testMetadataStaleCheckNoSignature() throws IOException { + FileSystem fs = HadoopFileSystem.getLocal(new Configuration()); + FileSelection selection = FileSelection.create(fs, Path.of(new File("dummy").getAbsolutePath())); + BytesOutput signature = BytesOutput.NONE; + DatasetType dt = DatasetType.PHYSICAL_DATASET_SOURCE_FILE; + FileSystemPlugin fileSystemPlugin = mock(FileSystemPlugin.class); + DeltaLakeFormatPlugin deltaLakeFormatPlugin = mock(DeltaLakeFormatPlugin.class); + NamespaceKey key = new NamespaceKey("dummy"); + DeltaLakeFormatDatasetAccessor deltaLakeFormatDatasetAccessor = new DeltaLakeFormatDatasetAccessor(dt, fs, fileSystemPlugin, selection, key, deltaLakeFormatPlugin); + + // when there is no read signature, metadataValid should return false + assertFalse(deltaLakeFormatDatasetAccessor.metadataValid(signature, deltaLakeFormatDatasetAccessor, mock(DatasetMetadata.class), fs)); + } +} diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestDeltaLakeTable.java b/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestDeltaLakeTable.java index 005655509d..cb63543efe 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestDeltaLakeTable.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestDeltaLakeTable.java @@ -172,7 +172,7 @@ public void testEndingWithMultiPartCheckpointDatasetReadLatest() throws IOExcept assertEquals(snap.getVersionId(), 11); assertEquals(snap.getSchema(), "{\"type\":\"struct\",\"fields\":[{\"name\":\"intcol\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}}, {\"name\":\"longcol\",\"type\":\"long\",\"nullable\":true,\"metadata\":{}}, {\"name\":\"stringcol\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}}]}"); assertEquals(snap.getNetFilesAdded(), 5); - assertEquals(snap.getNetBytesAdded(), 4739); + assertEquals(snap.getNetBytesAdded(), 4737); } private String getPath(DatasetSplit datasetSplit) { diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestDeltaLastCheckPointReader.java b/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestDeltaLastCheckPointReader.java index 38dda24836..570a7b1cbd 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestDeltaLastCheckPointReader.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestDeltaLastCheckPointReader.java @@ -16,6 +16,7 @@ package com.dremio.exec.store.deltalake; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import java.io.File; import java.io.IOException; @@ -44,4 +45,17 @@ public void testVersionRead() throws IOException { assertEquals(lastCheckPoint.get(), (Object)10L); } + + @Test + public void testVersionReadForEmptyCheckPoint() { + try { + File f = FileUtils.getResourceAsFile("/deltalake/empty_last_checkpoint"); + FileSystem fs = HadoopFileSystem.getLocal(new Configuration()); + Path path = Path.of(f.getAbsolutePath()); + Optional lastCheckPoint = DeltaLastCheckPointReader.getLastCheckPoint(fs, path).getKey(); + } catch (Exception e) { + assertTrue(e.getMessage().contains("Failed to read _last_checkpoint file")); + assertTrue(e.getCause() instanceof IllegalArgumentException); + } + } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestDeltaLogCheckpointParquetReader.java b/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestDeltaLogCheckpointParquetReader.java index f793491aab..a9235c11f9 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestDeltaLogCheckpointParquetReader.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestDeltaLogCheckpointParquetReader.java @@ -163,7 +163,7 @@ public void testMultiPartCheckpointParquet() throws IOException { assertTrue(snapshot.containsCheckpoint()); assertEquals(4, snapshot.getNetFilesAdded()); assertEquals(4, snapshot.getNetOutputRows()); - assertEquals(3792, snapshot.getNetBytesAdded()); + assertEquals(3790, snapshot.getNetBytesAdded()); List splits = snapshot.getSplits(); assertEquals(2, splits.size()); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestDeltaMetadataFetchJob.java b/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestDeltaMetadataFetchJob.java index 5e98243175..06d48b6f52 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestDeltaMetadataFetchJob.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestDeltaMetadataFetchJob.java @@ -101,8 +101,7 @@ public void testFileNotFound() throws IOException { try { DeltaLogSnapshot snapshot = job.get(); - } - catch (CompletionException e) { + } catch (CompletionException e) { assertTrue(e.getCause() instanceof DeltaMetadataFetchJob.InvalidFileException); } } @@ -122,8 +121,7 @@ public void testCommitWrittenAfterReadStart() throws IOException { try { DeltaLogSnapshot snapshot = job.get(); - } - catch (CompletionException e) { + } catch (CompletionException e) { assertTrue(e.getCause() instanceof DeltaMetadataFetchJob.InvalidFileException); } } @@ -140,8 +138,7 @@ public void testBadConfig() throws IOException { try { DeltaLogSnapshot snapshot = job.get(); - } - catch (CompletionException e) { + } catch (CompletionException e) { assertTrue(e.getCause() instanceof DeltaMetadataFetchJob.InvalidFileException); } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestDeltaScan.java b/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestDeltaScan.java index cd61d2ffeb..f064bc33e5 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestDeltaScan.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestDeltaScan.java @@ -74,6 +74,7 @@ public void initFs() throws Exception { copyFromJar("deltalake/newPlanDataset", java.nio.file.Paths.get((testRootPath + "/newDataset"))); copyFromJar("deltalake/paritionenedNewPlan", java.nio.file.Paths.get((testRootPath + "/paritionenedNewPlan"))); copyFromJar("deltalake/commitInfoAtOnlyJson", java.nio.file.Paths.get((testRootPath + "/commitInfoAtOnlyJson"))); + copyFromJar("deltalake/deltaMixCharsName", java.nio.file.Paths.get((testRootPath + "/deltaMixCharsName"))); } @After @@ -458,4 +459,20 @@ public void testWithCommitInfoAtEndOnlyJson() throws Exception { .unOrdered().go(); } } + + @Test + public void testDeltaFileWithPlusSign () throws Exception { + try (AutoCloseable c = enableDeltaLake()) { + final String sql = "SELECT \"c1+c2/c3\" as col1 FROM dfs.tmp.deltalake.deltaMixCharsName"; + testBuilder() + .sqlQuery(sql) + .unOrdered() + .baselineColumns("col1") + .baselineValues("a b+c") + .baselineValues("a=b") + .baselineValues("a?b%c") + .unOrdered().go(); + } + } + } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestDeltaSnapshotListProcessor.java b/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestDeltaSnapshotListProcessor.java index 46c3f26d35..de0aa6bc97 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestDeltaSnapshotListProcessor.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestDeltaSnapshotListProcessor.java @@ -37,8 +37,7 @@ public void testMissingVersionInBetween() { try { processor.findValidSnapshots(list); - } - catch (Exception e) { + } catch (Exception e) { assertTrue(e instanceof UserException); assertEquals(e.getMessage(), "Missing version file 20"); throw e; @@ -87,8 +86,7 @@ public void missingVersionZero() { DeltaSnapshotListProcessor processor = new DeltaSnapshotListProcessor(); try { processor.findValidSnapshots(list); - } - catch (Exception e) { + } catch (Exception e) { assertTrue(e instanceof IllegalStateException); assertEquals(e.getMessage(), "Commit Json for version 0 not found while reading metadata"); throw e; diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestUtils.java b/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestUtils.java index 88d9d33a36..72dc227731 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestUtils.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/deltalake/TestUtils.java @@ -47,8 +47,7 @@ public static boolean groupPresent(Group x, String s) { try { x.getGroup(s, 0); return true; - } - catch (Exception e) { + } catch (Exception e) { return false; } }; @@ -84,14 +83,12 @@ public static DeltaLogSnapshot makeShiftCheckpointReader(FileSystem fs, Path fil if(groupPresent(g, "metaData")) { Group r = g.getGroup("metaData", 0); schemaString = r.getString("schemaString", 0); - } - else if(groupPresent(g, "add")) { + } else if (groupPresent(g, "add")) { Group r = g.getGroup("add", 0); addedFiles.add(Path.of(r.getString("path", 0))); numFilesAdded ++; numOutputBytes += r.getLong("size", 0); - } - else if(groupPresent(g, "commitInfo")) { + } else if (groupPresent(g, "commitInfo")) { Group r = g.getGroup("commitInfo", 0).getGroup("operationMetrics", 0); numFilesAdded += r.getInteger("numFiles", 0); numOutputBytes += r.getInteger("numOutputBytes", 0); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/dfs/TestEasySplitGenTableFunction.java b/sabot/kernel/src/test/java/com/dremio/exec/store/dfs/TestEasySplitGenTableFunction.java index 88174a60c0..6feadc6275 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/dfs/TestEasySplitGenTableFunction.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/dfs/TestEasySplitGenTableFunction.java @@ -16,7 +16,6 @@ package com.dremio.exec.store.dfs; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -68,7 +67,7 @@ private interface RowHandler { } @Test - public void testNoPartitionSplits() { + public void testNoPartitionSplits() throws Exception { try (VarCharVector pathVector = new VarCharVector(MetadataRefreshExecConstants.DirList.OUTPUT_SCHEMA.FILE_PATH, allocator); BigIntVector sizeVector = new BigIntVector(MetadataRefreshExecConstants.DirList.OUTPUT_SCHEMA.FILE_SIZE, allocator); BigIntVector mtimeVector = new BigIntVector(MetadataRefreshExecConstants.DirList.OUTPUT_SCHEMA.MODIFICATION_TIME, allocator); @@ -107,9 +106,6 @@ public void testNoPartitionSplits() { assertEquals(2, outgoingSplits.getValueCount()); assertSplit(extractSplit(outgoingSplits, 1), file2, 0L,file2Length); tableFunction.close(); - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/dfs/TestPrefetchingIterator.java b/sabot/kernel/src/test/java/com/dremio/exec/store/dfs/TestPrefetchingIterator.java index 46142e65c0..bebfc0876f 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/dfs/TestPrefetchingIterator.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/dfs/TestPrefetchingIterator.java @@ -24,7 +24,6 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.ArgumentMatchers.anyLong; @@ -196,9 +195,6 @@ private void testIteratorWithFilterAddedInBetween(boolean fromRowGroupSplits) th assertFalse(it.hasNext()); assertEquals(5L, ctx.getStats().getLongStat(ScanOperator.Metric.NUM_PARTITIONS_PRUNED)); - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); } } @@ -238,9 +234,6 @@ private void testIteratorWithFilterAddedInBetweenPrefetch(boolean fromRowGroupSp assertFalse(it.hasNext()); assertEquals(5L, ctx.getStats().getLongStat(ScanOperator.Metric.NUM_PARTITIONS_PRUNED)); - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); } it.close(); @@ -258,7 +251,7 @@ public void testIteratorWithFilterAddedInBetweenPrefetchAll() throws Exception { } @Test - public void testIteratorWithFilterNothingSkipped() { + public void testIteratorWithFilterNothingSkipped() throws Exception { CompositeReaderConfig readerConfig = mock(CompositeReaderConfig.class); when(readerConfig.getPartitionNVPairs(any(BufferAllocator.class), any(SplitAndPartitionInfo.class))) .thenReturn(getMatchingNameValuePairs()); @@ -283,9 +276,6 @@ public void testIteratorWithFilterNothingSkipped() { inputStreamProvider = insertedCreator.getInputStreamProvider(); } assertEquals(0L, ctx.getStats().getLongStat(ScanOperator.Metric.NUM_PARTITIONS_PRUNED)); - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); } } @@ -307,9 +297,6 @@ public void testIteratorWithFilterAllSkipped(boolean fromRowGroupSplits) throws assertEquals(0, recordReader.next()); assertFalse(it.hasNext()); assertEquals(10L, ctx.getStats().getLongStat(ScanOperator.Metric.NUM_PARTITIONS_PRUNED)); - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); } } @@ -337,9 +324,6 @@ private void testIteratorWithFilterSomeSkipped(boolean fromRowGroupSplits) throw it.next(); } assertEquals(7L, ctx.getStats().getLongStat(ScanOperator.Metric.NUM_PARTITIONS_PRUNED)); - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); } } @@ -368,9 +352,6 @@ private void testIteratorWithFilterSomeSkippedPrefetch(boolean fromRowGroupSplit it.next(); } assertEquals(7L, ctx.getStats().getLongStat(ScanOperator.Metric.NUM_PARTITIONS_PRUNED)); - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); } it.close(); @@ -409,9 +390,6 @@ private void testMultipleFilters(boolean fromRowGroupSplits) throws Exception { it.next(); } assertEquals(8L , ctx.getStats().getLongStat(ScanOperator.Metric.NUM_PARTITIONS_PRUNED)); - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); } } @@ -422,7 +400,7 @@ public void testMultipleFilters() throws Exception { } @Test - public void testIteratorEmpty() { + public void testIteratorEmpty() throws Exception { CompositeReaderConfig readerConfig = mock(CompositeReaderConfig.class); when(readerConfig.getPartitionNVPairs(any(BufferAllocator.class), any(SplitAndPartitionInfo.class))) .thenReturn(getMatchingNameValuePairs()); @@ -437,9 +415,6 @@ public void testIteratorEmpty() { it.addRuntimeFilter(filter); assertFalse(it.hasNext()); assertEquals(0L, ctx.getStats().getLongStat(ScanOperator.Metric.NUM_PARTITIONS_PRUNED)); - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); } } @@ -644,7 +619,7 @@ private ParquetSplitReaderCreatorIterator createSplitReaderCreator(OperatorConte when(config.getColumns()).thenReturn(Collections.singletonList(SchemaPath.getSimplePath("*"))); when(config.getFormatSettings()).thenReturn(FileConfig.getDefaultInstance()); when(optionManager.getOption(ExecConstants.FILESYSTEM_PARTITION_COLUMN_LABEL_VALIDATOR)).thenReturn("dir"); - when(inputStreamProviderFactory.create(any(),any(),any(),anyLong(),anyLong(),any(),any(),any(),any(),anyBoolean(),any(),anyLong(),anyBoolean(),anyBoolean())).thenReturn(inputStreamProvider); + when(inputStreamProviderFactory.create(any(),any(),any(),anyLong(),anyLong(),any(),any(),any(),any(),anyBoolean(),any(),anyLong(),anyBoolean(),anyBoolean(), any(), any())).thenReturn(inputStreamProvider); BlockMetaData blockMetaData = mock(BlockMetaData.class); when(footer.getBlocks()).thenReturn(Collections.singletonList(blockMetaData)); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/dfs/TestSplitGenTableFunction.java b/sabot/kernel/src/test/java/com/dremio/exec/store/dfs/TestSplitGenTableFunction.java index b73bd5141b..178e87698c 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/dfs/TestSplitGenTableFunction.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/dfs/TestSplitGenTableFunction.java @@ -16,7 +16,6 @@ package com.dremio.exec.store.dfs; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -66,7 +65,7 @@ private interface RowHandler { } @Test - public void testNoPartitionSplits() { + public void testNoPartitionSplits() throws Exception { try (VarCharVector pathVector = new VarCharVector(DeltaConstants.SCHEMA_ADD_PATH, allocator); BigIntVector sizeVector = new BigIntVector(DeltaConstants.SCHEMA_ADD_SIZE, allocator); BigIntVector mtimeVector = new BigIntVector(DeltaConstants.SCHEMA_ADD_MODIFICATION_TIME, allocator); @@ -112,14 +111,11 @@ public void testNoPartitionSplits() { assertSplit(extractSplit(outgoingSplits, 1), "/test/file2.parquet", 0L,2054L, 2054L, currentTime); assertSplit(extractSplit(outgoingSplits, 2), "/test/file3.parquet", 0L,211L, 211L, currentTime); tableFunction.close(); - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); } } @Test - public void testSplitsLargerThanMaxRecords() { + public void testSplitsLargerThanMaxRecords() throws Exception { final long blockSize = getOpCtx().getOptions().getOption(ExecConstants.PARQUET_SPLIT_SIZE).getNumVal(); final int batchSize = 5; try (VarCharVector pathVector = new VarCharVector(DeltaConstants.SCHEMA_ADD_PATH, allocator); @@ -170,9 +166,6 @@ public void testSplitsLargerThanMaxRecords() { assertEquals(11, outgoingSplits.getValueCount()); assertSplit(extractSplit(outgoingSplits, 10), "/test/file2.parquet", 0L, 2054L, 2054L, currentTime); tableFunction.close(); - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/easy/arrow/TestArrowFlatBufRecordReaderWriter.java b/sabot/kernel/src/test/java/com/dremio/exec/store/easy/arrow/TestArrowFlatBufRecordReaderWriter.java index f44ce05dca..fa230949ab 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/easy/arrow/TestArrowFlatBufRecordReaderWriter.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/easy/arrow/TestArrowFlatBufRecordReaderWriter.java @@ -47,6 +47,7 @@ public class TestArrowFlatBufRecordReaderWriter extends ExecTest { @Test public void testReadWrite() throws Exception { + @SuppressWarnings("checkstyle:LocalFinalVariableName") final int RECORD_COUNT = 4; final List vectorList = Lists.newArrayList(); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/easy/text/compliant/TestTextReader.java b/sabot/kernel/src/test/java/com/dremio/exec/store/easy/text/compliant/TestTextReader.java index 7029b2bbc1..6e8d15ab5b 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/easy/text/compliant/TestTextReader.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/easy/text/compliant/TestTextReader.java @@ -16,10 +16,12 @@ package com.dremio.exec.store.easy.text.compliant; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.File; import java.io.FileWriter; +import java.util.regex.Pattern; import org.apache.commons.io.FileUtils; import org.junit.AfterClass; @@ -178,7 +180,7 @@ private static void startTest() throws Exception { } try (FileWriter fwriter = new FileWriter(tblPathLarge)) { - int boundary = 1024*65; + int boundary = 32001; int j = 0; while (j++ < 3) { for (int i = 0; i < boundary; i++) { @@ -234,6 +236,8 @@ public void testColumnExceedsSize() throws Exception { } UserRemoteException urex = (UserRemoteException) ex; assertEquals(UserBitShared.DremioPBError.ErrorType.UNSUPPORTED_OPERATION, urex.getErrorType()); + boolean errorMsgMatched = Pattern.compile("(.*)Field with index(.*)exceeds the size limit(.*)", Pattern.DOTALL).matcher(ex.getMessage()).matches(); + assertTrue(errorMsgMatched); } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/easy/text/compliant/TestTextReaderExceptions.java b/sabot/kernel/src/test/java/com/dremio/exec/store/easy/text/compliant/TestTextReaderExceptions.java index 0a38127909..4474b648fd 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/easy/text/compliant/TestTextReaderExceptions.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/easy/text/compliant/TestTextReaderExceptions.java @@ -52,21 +52,6 @@ public TestTextReaderExceptions(TextFileConfig fileFormat, String[][] expected, public static Collection data() { return Arrays.asList(new Object[][] { - { - /* Multi char value is not supported for any parameter except line Delimiter - But test is added only for Field Delimiter */ - new TextFileConfig().setFieldDelimiter(",$").setLineDelimiter("\n"), - new String[][] { - {"c1","c2","c3"}, - {"r1c1","r1c2","r1c3"}, - {"r2c1","r2c2","r2c3"} - }, - "multi_char_field_delimiter.txt", - Exception.class, - "Expected single character but was String", - Exception.class, - "Expected single character but was String" - }, { /* Quoted Field example: value, "inside" ,val2 @@ -114,61 +99,6 @@ public static Collection data() { AssertionError.class, numOfRecordsDiffer }, - { - /* Extended ASCII char value is not supported for any parameter, - But test is added only for Field Delimiter */ - new TextFileConfig().setLineDelimiter("\n").setFieldDelimiter("¦"), // broken or broken bar - new String[][] { - {"c1","c2","c3"}, - {"r1c1","r1c2","r1c3"}, - {"r2c1","r2c2","r2c3"} - }, - "broken_pipe.txt", - Exception.class, - "Expected a character between 0 and 127", - Exception.class, - "Expected a character between 0 and 127" - }, - { - // TODO: To fix this, Identify unescaped Quote Correctly, ref: TextReader.java:247 - new TextFileConfig().setEscape("'").setLineDelimiter("\n"), - new String[][]{ - {"c1","c2","c3"}, - {"r1c1\"","r1c2","r1c3\""}, - {"r2c1","r2c2\"","r2c3"} - }, - "unescaped_quote.csv", - Exception.class, - expectedRecordNotFound, - Exception.class, - expectedRecordNotFound - }, - { - new TextFileConfig().setEscape("\\").setLineDelimiter("\n"), - new String[][] { - {"c1","c2","c3"}, - {"\"r1c1","r1c2","\"r1c3"}, - {"r2c1","\"r2c2","r2c3"} - }, - "custom_quote_escape.csv", - Exception.class, - expectedRecordNotFound, - Exception.class, - expectedRecordNotFound - }, - { - new TextFileConfig().setLineDelimiter("$"), - new String[][] { - {"c1","c2","c3"}, - {"r1c1\n","r1c2","r1c3\n"}, - {"r2c1","r2c2\n","r2c3"} - }, - "custom_ld_inside_quoted.csv", - AssertionError.class, - numOfRecordsDiffer, - Exception.class, - expectedRecordNotFound - }, { // Trim Header false new TextFileConfig().setLineDelimiter("\n").setTrimHeader(false), @@ -182,20 +112,6 @@ public static Collection data() { "Unexpected column", Exception.class, "VALIDATION ERROR: Column 'c1' not found in any table" - }, - { - // TODO: To fix this behaviour, match actual Line Delimiter before matching normalized Line Delimiter, ref: TextReader.java:245 - new TextFileConfig().setLineDelimiter("$"), - new String[][] { - {"c1","c2","c3"}, - {"r1c1","r1c2","r1c3"}, - {"r2c1","r2c2","r2c3"} - }, - "custom_line_delimiter.csv", - AssertionError.class, - numOfRecordsDiffer, - Exception.class, - "VALIDATION ERROR" } } ); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/easy/text/compliant/TestTextReaderResults.java b/sabot/kernel/src/test/java/com/dremio/exec/store/easy/text/compliant/TestTextReaderResults.java index 4e95dc6736..ed5bea9de2 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/easy/text/compliant/TestTextReaderResults.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/easy/text/compliant/TestTextReaderResults.java @@ -64,14 +64,78 @@ public static Collection data() { "custom_comment.csv" }, { - new TextFileConfig().setLineDelimiter("\n").setEscape("'"), // To distinct between quote and escape as they are same by default + new TextFileConfig().setFieldDelimiter(",$").setLineDelimiter("\n"), + new String[][]{ + {"c1", "c2", "c3"}, + {"r1c1", "r1c2", "r1c3"}, + {"r2c1", "r2c2", "r2c3"} + }, + "multi_char_field_delimiter.txt" + }, + { + new TextFileConfig().setLineDelimiter("\n").setFieldDelimiter("¦"), + new String[][]{ + {"c1", "c2", "c3"}, + {"r1c1", "r1c2", "r1c3"}, + {"r2c1", "r2c2", "r2c3"} + }, + "broken_pipe.txt" + }, + { + new TextFileConfig().setEscape("'").setLineDelimiter("\n"), + new String[][]{ + {"c1", "c2", "c3"}, + {"r1\"c1", "r1c2", "r1\"c3"}, + {"r2c1", "r2c2\"", "r2c3"} + }, + "unescaped_quote.csv" + }, + { + new TextFileConfig().setLineDelimiter("$"), + new String[][]{ + {"c1", "c2", "c3"}, + {"r1c1", "r1c2", "r1c3"}, + {"r2c1", "r2c2", "r2c3"} + }, + "custom_line_delimiter.csv" + }, + { + new TextFileConfig().setLineDelimiter("\n"), + new String[][]{ + {"c1", "c2", "c3"}, + {"\"r1\"c1\"", "r1c2", "\"r1\"c3\""}, + {"r2c1", "\"r2c2\"", "r2c3"} + }, + "quote_escape.csv" + }, + { + new TextFileConfig().setEscape("\\").setLineDelimiter("\n"), + new String[][]{ + {"c1", "c2", "c3"}, + {"\"r1\"\\c1\"", "r1c2", "\"r1c3\""}, + {"r2c1", "\"r2c2\"", "r2c3"} + }, + "custom_quote_escape.csv" + }, + { + // Failure to load + new TextFileConfig().setEscape("\\").setLineDelimiter("\n"), + new String[][]{ + {"c1", "c2", "c3"}, + {"r1c1", "This is value field value with an \"embedded\" quoted word using backslash-quote", "r1c3"}, + {"r2c1", "This is value field value with an \"embedded\" quoted word using double-double-quote", "r2c3"} + }, + "double_double_quote.csv" + }, + { + new TextFileConfig().setLineDelimiter("$"), new String[][] { {"c1","c2","c3"}, - {"\"r1c1","r1c2","\"r1c3"}, - {"r2c1","\"r2c2","r2c3"} + {"r1c1$","r1c2","r1c3$"}, + {"r2c1","r2c2$","r2c3"} }, - "quote_escape.csv" - } + "custom_ld_inside_quoted.csv" + }, } ); } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/excel/ExcelTestHelper.java b/sabot/kernel/src/test/java/com/dremio/exec/store/excel/ExcelTestHelper.java index ff6206dd81..fd9a49cc77 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/excel/ExcelTestHelper.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/excel/ExcelTestHelper.java @@ -279,8 +279,7 @@ void testProjectPushdown2(final TestBuilder testBuilder, String sheetName, boole .baselineValues(3.0d, "Three and Three") .baselineValues(4.0d, "Four and Four, Five and Five") .baselineValues(5.0d, "Four and Four, Five and Five"); - } - else { + } else { testBuilder .baselineValues(1.0d, "One") .baselineValues(2.0d, "Two") @@ -336,8 +335,7 @@ void testProjectPushdown3(final TestBuilder testBuilder, String sheetName, boole .baselineValues("Three and Three", "Three and Three") .baselineValues("Four and Four, Five and Five", "Four and Four, Five and Five") .baselineValues("Four and Four, Five and Five", "Four and Four, Five and Five"); - } - else { + } else { testBuilder .baselineValues("One", "One") .baselineValues("Two", "Two") diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/BaseIcebergViewTest.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/BaseIcebergViewTest.java index bc7b7d828d..f23d8cd8a5 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/BaseIcebergViewTest.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/BaseIcebergViewTest.java @@ -21,23 +21,15 @@ import org.apache.hadoop.conf.Configuration; import org.apache.iceberg.CatalogProperties; import org.apache.iceberg.nessie.NessieExtCatalog; -import org.glassfish.jersey.test.TestProperties; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.extension.ExtendWith; -import org.junit.jupiter.api.extension.RegisterExtension; import org.junit.jupiter.api.io.TempDir; import org.projectnessie.client.api.NessieApiV1; -import org.projectnessie.client.http.HttpClientBuilder; -import org.projectnessie.jaxrs.ext.NessieJaxRsExtension; -import org.projectnessie.jaxrs.ext.NessieUri; -import org.projectnessie.server.store.TableCommitMetaStoreWorker; -import org.projectnessie.versioned.persist.adapter.DatabaseAdapter; -import org.projectnessie.versioned.persist.inmem.InmemoryDatabaseAdapterFactory; -import org.projectnessie.versioned.persist.inmem.InmemoryTestConnectionProviderSource; -import org.projectnessie.versioned.persist.tests.extension.DatabaseAdapterExtension; -import org.projectnessie.versioned.persist.tests.extension.NessieDbAdapter; -import org.projectnessie.versioned.persist.tests.extension.NessieDbAdapterName; -import org.projectnessie.versioned.persist.tests.extension.NessieExternalDatabase; +import org.projectnessie.model.ContentKey; +import org.projectnessie.model.Namespace; +import org.projectnessie.tools.compatibility.api.NessieAPI; +import org.projectnessie.tools.compatibility.api.NessieBaseUri; +import org.projectnessie.tools.compatibility.internal.OlderNessieServersExtension; import com.dremio.BaseTestQuery; import com.dremio.exec.catalog.ResolvedVersionContext; @@ -48,50 +40,35 @@ import com.dremio.plugins.NessieClientImpl; import com.google.common.collect.ImmutableMap; -@ExtendWith(DatabaseAdapterExtension.class) -@NessieDbAdapterName(InmemoryDatabaseAdapterFactory.NAME) -@NessieExternalDatabase(InmemoryTestConnectionProviderSource.class) +@ExtendWith(OlderNessieServersExtension.class) public class BaseIcebergViewTest { - @NessieDbAdapter(storeWorker = TableCommitMetaStoreWorker.class) - static DatabaseAdapter databaseAdapter; - - @RegisterExtension - private static NessieJaxRsExtension server = buildNessieJaxRsExtension(); - - private static NessieJaxRsExtension buildNessieJaxRsExtension() { - // Prevents CDIExtension to load beans through JNDI causing - // an exception as Weld hasn't initialized a JNDI context - System.setProperty("com.sun.jersey.server.impl.cdi.lookupExtensionInBeanManager", "true"); - // Use a dynamically allocated port, not a static default (80/443) or statically - // configured port. - System.setProperty(TestProperties.CONTAINER_PORT, "0"); - - return new NessieJaxRsExtension(() -> databaseAdapter); - } @TempDir protected static Path temp; protected static String warehouseLocation; + @NessieBaseUri + protected static URI nessieBaseUri; + @NessieAPI protected static NessieApiV1 nessieApi; protected static NessieClient nessieClient; protected static Configuration fileSystemConfig; protected static FileSystemPlugin fsPlugin; + protected static final String userName = "test-user"; protected static IcebergNessieVersionedViews icebergNessieVersionedViews; protected static NessieExtCatalog nessieExtCatalog; @BeforeAll - public static void setup(@NessieUri URI x) throws Exception { + public static void setup() throws Exception { warehouseLocation = temp.toUri().toString(); - nessieApi = HttpClientBuilder.builder().withUri(x.toString()).build(NessieApiV1.class); nessieClient = new NessieClientImpl(nessieApi); fileSystemConfig = new Configuration(); fsPlugin = BaseTestQuery.getMockedFileSystemPlugin(); icebergNessieVersionedViews = new IcebergNessieVersionedViews( - warehouseLocation, nessieClient, fileSystemConfig, fsPlugin); + warehouseLocation, nessieClient, fileSystemConfig, fsPlugin, userName); - initCatalog(x, "main"); + initCatalog(nessieBaseUri.resolve("v1"), "main"); } private static void initCatalog(URI x, String ref) { @@ -117,4 +94,30 @@ protected ResolvedVersionContext getVersion(String branchName) { protected void createBranch(String branchName, VersionContext versionContext) { nessieClient.createBranch(branchName, versionContext); } + + protected void createNamespacesIfMissing(String branchName, ContentKey key) { + createNamespacesIfMissing(branchName, key.getNamespace()); + } + + protected void createNamespacesIfMissing(String branchName, Namespace ns) { + if (ns.isEmpty()) { + return; + } + + if (ns.getElements().size() > 1) { + createNamespacesIfMissing(branchName, ns.getParent()); + } + + try { + // Modern Nessie servers require namespaces to exist before tables can be created in them. + // The getContent() here is not very strict in that it does not check the type of content + // if it exists, but table creation will fail later if it uses a non-namespace object as + // a namespace. + if (nessieApi.getContent().refName(branchName).key(ns.toContentKey()).get().isEmpty()) { + nessieApi.createNamespace().refName(branchName).namespace(ns).create(); + } + } catch (Exception e) { + throw new IllegalStateException(e); + } + } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/IcebergMetadataTestTable.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/IcebergMetadataTestTable.java index 6981be1dea..c84e7ef34e 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/IcebergMetadataTestTable.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/IcebergMetadataTestTable.java @@ -15,10 +15,9 @@ */ package com.dremio.exec.store.iceberg; -import static com.dremio.exec.ExecConstants.ENABLE_ICEBERG_METADATA_FUNCTIONS; - import java.io.File; import java.util.List; +import java.util.Map; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.tuple.Pair; @@ -26,8 +25,8 @@ import org.apache.iceberg.ManifestFile; import org.apache.iceberg.Snapshot; import org.apache.iceberg.Table; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.After; +import org.junit.Before; import com.dremio.BaseTestQuery; import com.dremio.common.expression.SchemaPath; @@ -45,21 +44,21 @@ public abstract class IcebergMetadataTestTable extends BaseTestQuery { protected static Long FIRST_SNAPSHOT; - @BeforeClass - public static void initIcebergTable() throws Exception { - setSystemOption(ENABLE_ICEBERG_METADATA_FUNCTIONS, "true"); + @Before + public void before() throws Exception { //Create a non-partitioned iceberg table, String createCommandSql = String.format("create table %s.%s(c1 int, c2 varchar, c3 double)", TEMP_SCHEMA_HADOOP, METADATA_TEST_TABLE_NAME); - test(createCommandSql); + runSQL(createCommandSql); Thread.sleep(1001); File tableFolder = new File(getDfsTestTmpSchemaLocation(), METADATA_TEST_TABLE_NAME); metadata_test_iceberg_table = getIcebergTable(tableFolder, IcebergCatalogType.HADOOP); FIRST_SNAPSHOT = metadata_test_iceberg_table.currentSnapshot().snapshotId(); } - @AfterClass - public static void afterClass() throws Exception { - setSystemOption(ENABLE_ICEBERG_METADATA_FUNCTIONS, "false"); + @After + public void after() throws Exception { + //Drop table + runSQL(String.format("DROP TABLE %s.%s", TEMP_SCHEMA_HADOOP, METADATA_TEST_TABLE_NAME)); FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), METADATA_TEST_TABLE_NAME)); } @@ -83,12 +82,30 @@ private static void loadTable() { metadata_test_iceberg_table = getIcebergTable(tableFolder, IcebergCatalogType.HADOOP); } + protected void addPartition(String partitionSpec) throws Exception { + String insertCommandSql = String.format("ALTER TABLE %s.%s ADD PARTITION FIELD %s", TEMP_SCHEMA_HADOOP, METADATA_TEST_TABLE_NAME, partitionSpec); + test(insertCommandSql); + Thread.sleep(1001); + } + protected void insertOneRecord() throws Exception { String insertCommandSql = String.format("insert into %s.%s VALUES(1,'a', 2.0)", TEMP_SCHEMA_HADOOP, METADATA_TEST_TABLE_NAME); test(insertCommandSql); Thread.sleep(1001); } + protected void insertTwoRecords() throws Exception { + String insertCommandSql = String.format("insert into %s.%s VALUES(1,'a', 2.0),(2,'b', 3.0)", TEMP_SCHEMA_HADOOP, METADATA_TEST_TABLE_NAME); + test(insertCommandSql); + Thread.sleep(1001); + } + + protected void insertTwoLongRecords() throws Exception { + String insertCommandSql = String.format("insert into %s.%s VALUES(1,'abcdfg', 2.0),(2,'bcdfff', 3.0)", TEMP_SCHEMA_HADOOP, METADATA_TEST_TABLE_NAME); + test(insertCommandSql); + Thread.sleep(1001); + } + protected void expectedSchema(List> expectedSchema, String query, Object... args) throws Exception { testBuilder() .sqlQuery(query, args) @@ -107,4 +124,12 @@ protected void queryAndMatchResults(String query, String[] expectedColumns, Obje .run(); } + protected void queryAndMatchResults(String query, List> expectedRecords) throws Exception { + testBuilder() + .sqlQuery(query) + .unOrdered() + .baselineRecords(expectedRecords) + .build() + .run(); + } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestCommitFailure.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestCommitAndCleaningFailure.java similarity index 68% rename from sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestCommitFailure.java rename to sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestCommitAndCleaningFailure.java index aa96c62b97..00c7451936 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestCommitFailure.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestCommitAndCleaningFailure.java @@ -15,6 +15,8 @@ */ package com.dremio.exec.store.iceberg; +import static com.dremio.exec.work.foreman.AttemptManager.INJECTOR_CLEANING_FAILURE; +import static com.dremio.exec.work.foreman.AttemptManager.INJECTOR_COMMIT_FAILURE; import static org.assertj.core.api.Assertions.assertThatThrownBy; import java.io.File; @@ -27,17 +29,17 @@ import com.dremio.exec.testing.Controls; import com.dremio.exec.testing.ControlsInjectionUtil; import com.dremio.exec.work.foreman.AttemptManager; +import com.dremio.exec.work.foreman.ForemanException; // Inject a failure during commit, and verify that it bails out (no timeout/hang). -public class TestCommitFailure extends BaseTestQuery { +public class TestCommitAndCleaningFailure extends BaseTestQuery { - @Test - public void commit() throws Exception { + private void testWithInjectFailure(String injectedFailure, final Class exceptionClass) throws Exception { for (String testSchema: SCHEMAS_FOR_TEST) { - final String tableName = "commit_failure"; + final String tableName = injectedFailure; final String controls = Controls.newBuilder() - .addException(AttemptManager.class, "commit-failure", UnsupportedOperationException.class) + .addException(AttemptManager.class, injectedFailure, exceptionClass) .build(); try (AutoCloseable c = enableIcebergTables()) { @@ -50,7 +52,7 @@ public void commit() throws Exception { ControlsInjectionUtil.setControls(client, controls); assertThatThrownBy(() -> test(ctasQuery)) - .hasMessageContaining("commit-failure"); + .hasMessageContaining(injectedFailure); } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), tableName)); @@ -58,4 +60,14 @@ public void commit() throws Exception { } } } + + @Test + public void testCommitFailure() throws Exception { + testWithInjectFailure(INJECTOR_COMMIT_FAILURE, ForemanException.class); + } + + @Test + public void testCleaningFailure() throws Exception { + testWithInjectFailure(INJECTOR_CLEANING_FAILURE, UnsupportedOperationException.class); + } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestDeletedDataFilesMetadataTableFunction.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestDeletedFilesMetadataTableFunction.java similarity index 69% rename from sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestDeletedDataFilesMetadataTableFunction.java rename to sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestDeletedFilesMetadataTableFunction.java index 283ab2ac37..9e878bc8f3 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestDeletedDataFilesMetadataTableFunction.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestDeletedFilesMetadataTableFunction.java @@ -29,8 +29,8 @@ import org.junit.Test; import com.dremio.common.expression.SchemaPath; +import com.dremio.exec.physical.config.DeletedFilesMetadataTableFunctionContext; import com.dremio.exec.physical.config.TableFunctionConfig; -import com.dremio.exec.physical.config.TableFunctionContext; import com.dremio.exec.physical.config.TableFunctionPOP; import com.dremio.exec.record.BatchSchema; import com.dremio.exec.store.OperationType; @@ -40,7 +40,7 @@ import com.dremio.sabot.op.tablefunction.TableFunctionOperator; import com.google.common.collect.ImmutableList; -public class TestDeletedDataFilesMetadataTableFunction extends BaseTestTableFunction { +public class TestDeletedFilesMetadataTableFunction extends BaseTestTableFunction { // using a subset of fields here as any other fields are exposed as null columns, Fragment is included as // a test of the null column handling @@ -51,9 +51,10 @@ public class TestDeletedDataFilesMetadataTableFunction extends BaseTestTableFunc PROPS, null, new TableFunctionConfig( - TableFunctionConfig.FunctionType.DELETED_DATA_FILES_METADATA, + TableFunctionConfig.FunctionType.DELETED_FILES_METADATA, true, - new TableFunctionContext(null, + new DeletedFilesMetadataTableFunctionContext(OperationType.DELETE_DATAFILE, + null, TEST_SCHEMA, null, null, @@ -71,6 +72,31 @@ public class TestDeletedDataFilesMetadataTableFunction extends BaseTestTableFunc true, null))); + private static final TableFunctionPOP DELETE_TABLE_FUNCTION_POP = new TableFunctionPOP( + PROPS, + null, + new TableFunctionConfig( + TableFunctionConfig.FunctionType.DELETED_FILES_METADATA, + true, + new DeletedFilesMetadataTableFunctionContext(OperationType.DELETE_DELETEFILE, + null, + TEST_SCHEMA, + null, + null, + null, + null, + null, + TEST_SCHEMA.getFields().stream() + .map(f -> SchemaPath.getSimplePath(f.getName())) + .collect(ImmutableList.toImmutableList()), + null, + null, + null, + false, + false, + true, + null))); + @Test public void testResults() throws Exception { @@ -90,7 +116,16 @@ public void testResults() throws Exception { tr(NULL_VARCHAR, 3L, "path4", OperationType.DELETE_DATAFILE.value), tr(NULL_VARCHAR, 9L, "path5", OperationType.DELETE_DATAFILE.value)); + Table deleteOutput = t( + th(FRAGMENT_COLUMN, RECORDS_COLUMN, PATH_COLUMN, OPERATION_TYPE_COLUMN), + tr(NULL_VARCHAR, 20L, "path1", OperationType.DELETE_DELETEFILE.value), + tr(NULL_VARCHAR, 5L, "path2", OperationType.DELETE_DELETEFILE.value), + tr(NULL_VARCHAR, 10L, "path3", OperationType.DELETE_DELETEFILE.value), + tr(NULL_VARCHAR, 3L, "path4", OperationType.DELETE_DELETEFILE.value), + tr(NULL_VARCHAR, 9L, "path5", OperationType.DELETE_DELETEFILE.value)); + validateSingle(TABLE_FUNCTION_POP, TableFunctionOperator.class, input, output, 3); + validateSingle(DELETE_TABLE_FUNCTION_POP, TableFunctionOperator.class, input, deleteOutput, 3); } @Test @@ -105,5 +140,6 @@ public void testOutputBufferNotReused() throws Exception { tr("path5", 9L)); validateOutputBufferNotReused(TABLE_FUNCTION_POP, input, 3); + validateOutputBufferNotReused(DELETE_TABLE_FUNCTION_POP, input, 3); } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestDremioFileIO.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestDremioFileIO.java index 53ca779dee..353eaac208 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestDremioFileIO.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestDremioFileIO.java @@ -30,8 +30,7 @@ import org.junit.Before; import org.junit.Test; -import com.dremio.BaseTestQuery; -import com.dremio.exec.store.dfs.FileSystemPlugin; +import com.dremio.exec.hadoop.HadoopFileSystemConfigurationAdapter; import com.dremio.io.file.FileAttributes; import com.dremio.io.file.FileSystem; import com.dremio.io.file.Path; @@ -43,11 +42,9 @@ public class TestDremioFileIO { @Before public void beforeTest() throws Exception { - FileSystemPlugin fileSystemPlugin = BaseTestQuery.getMockedFileSystemPlugin(); fs = mock(FileSystem.class); - Configuration conf = new Configuration(); - conf.set("fs.default.name", "local"); - io = new DremioFileIO(fs, conf, fileSystemPlugin); + io = new DremioFileIO(fs, null, null, null, null, + new HadoopFileSystemConfigurationAdapter(new Configuration())); } @Test diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergColumnCounts.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergColumnCounts.java index 4328538e22..ff72889bd1 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergColumnCounts.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergColumnCounts.java @@ -18,117 +18,97 @@ import static org.junit.Assert.assertEquals; import java.io.File; -import java.util.HashMap; import java.util.List; -import java.util.Map; import org.apache.commons.io.FileUtils; -import org.apache.hadoop.conf.Configuration; -import org.junit.BeforeClass; import org.junit.Test; import com.dremio.BaseTestQuery; import com.dremio.connector.metadata.DatasetSplit; import com.dremio.connector.metadata.PartitionChunk; import com.dremio.datastore.LegacyProtobufSerializer; -import com.dremio.exec.store.iceberg.model.IcebergCatalogType; import com.dremio.sabot.exec.store.parquet.proto.ParquetProtobuf.ColumnValueCount; import com.dremio.sabot.exec.store.parquet.proto.ParquetProtobuf.ParquetDatasetSplitXAttr; import com.dremio.service.namespace.MetadataProtoUtils; import com.google.common.collect.ImmutableList; public class TestIcebergColumnCounts extends BaseTestQuery { - private static Map SCHEMAS_TO_CATALOG_TYPE = new HashMap<>(); - - static { - SCHEMAS_TO_CATALOG_TYPE.put(TEMP_SCHEMA_HADOOP, IcebergCatalogType.HADOOP); - } - - @BeforeClass - public static void initFs() throws Exception { - Configuration conf = new Configuration(); - conf.set("fs.default.name", "local"); - } @Test public void tableWithNulls() throws Exception { - for (Map.Entry schemaCatalogPair: SCHEMAS_TO_CATALOG_TYPE.entrySet()) { - final String tableName = "column_counts"; - try (AutoCloseable ac = enableIcebergTables()) { - final String ctasQuery = - String.format( - "CREATE TABLE %s.%s " - + " AS SELECT * from cp.\"parquet/null_test_data.json\"", - schemaCatalogPair.getKey(), - tableName); - - test(ctasQuery); - - File tableFolder = new File(getDfsTestTmpSchemaLocation(), tableName); - IcebergTableWrapper tableWrapper = new IcebergTableWrapper(getSabotContext(), localFs, - getIcebergModel(tableFolder, schemaCatalogPair.getValue()), tableFolder.toPath().toString()); - - List chunks = ImmutableList.copyOf( - tableWrapper.getTableInfo().getPartitionChunkListing().iterator()); - assertEquals(1, chunks.size()); - - DatasetSplit split = chunks.get(0).getSplits().iterator().next(); - ParquetDatasetSplitXAttr xattr = LegacyProtobufSerializer - .parseFrom(ParquetDatasetSplitXAttr.PARSER, MetadataProtoUtils.toProtobuf(split.getExtraInfo())); - assertEquals(2, xattr.getColumnValueCountsCount()); - - // both the columns have null values. - assertEquals(8, tableWrapper.getTableInfo().getRecordCount()); - assertEquals(6, xattr.getColumnValueCounts(0).getCount()); - assertEquals(4, xattr.getColumnValueCounts(1).getCount()); - } finally { - FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), tableName)); - } + final String tableName = "column_counts"; + try (AutoCloseable ac = enableIcebergTables()) { + final String ctasQuery = + String.format( + "CREATE TABLE %s.%s " + + " AS SELECT * from cp.\"parquet/null_test_data.json\"", + TEMP_SCHEMA_HADOOP, + tableName); + + test(ctasQuery); + + File tableFolder = new File(getDfsTestTmpSchemaLocation(), tableName); + IcebergTableWrapper tableWrapper = new IcebergTableWrapper(getSabotContext(), localFs, + getIcebergModel(TEMP_SCHEMA_HADOOP), tableFolder.toPath().toString()); + + List chunks = ImmutableList.copyOf( + tableWrapper.getTableInfo().getPartitionChunkListing().iterator()); + assertEquals(1, chunks.size()); + + DatasetSplit split = chunks.get(0).getSplits().iterator().next(); + ParquetDatasetSplitXAttr xattr = LegacyProtobufSerializer + .parseFrom(ParquetDatasetSplitXAttr.PARSER, MetadataProtoUtils.toProtobuf(split.getExtraInfo())); + assertEquals(2, xattr.getColumnValueCountsCount()); + + // both the columns have null values. + assertEquals(8, tableWrapper.getTableInfo().getRecordCount()); + assertEquals(6, xattr.getColumnValueCounts(0).getCount()); + assertEquals(4, xattr.getColumnValueCounts(1).getCount()); + } finally { + FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), tableName)); } } @Test public void testComplex() throws Exception { - for (Map.Entry schemaCatalogPair: SCHEMAS_TO_CATALOG_TYPE.entrySet()) { - final String tableName = "complex_column_counts"; - - try (AutoCloseable ac = enableIcebergTables()) { - final String ctasQuery = - String.format( - "CREATE TABLE %s.%s " - + " AS SELECT * from cp.\"complex_student.json\"", - schemaCatalogPair.getKey(), - tableName); - - test(ctasQuery); - - testBuilder() - .sqlQuery(String.format("select count(*) c from %s.%s", schemaCatalogPair.getKey(), tableName)) - .unOrdered() - .baselineColumns("c") - .baselineValues(10L) - .build() - .run(); - - File tableFolder = new File(getDfsTestTmpSchemaLocation(), tableName); - IcebergTableWrapper tableWrapper = new IcebergTableWrapper(getSabotContext(), localFs, - getIcebergModel(tableFolder, schemaCatalogPair.getValue()), tableFolder.toPath().toString()); - - List chunks = - ImmutableList.copyOf(tableWrapper.getTableInfo().getPartitionChunkListing().iterator()); - assertEquals(1, chunks.size()); - - DatasetSplit split = chunks.get(0).getSplits().iterator().next(); - ParquetDatasetSplitXAttr xattr = LegacyProtobufSerializer - .parseFrom(ParquetDatasetSplitXAttr.PARSER, MetadataProtoUtils.toProtobuf(split.getExtraInfo())); - - assertEquals(10, tableWrapper.getTableInfo().getRecordCount()); - for (ColumnValueCount entry : xattr.getColumnValueCountsList()) { - assertEquals(10, entry.getCount()); - } - } finally { - FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), tableName)); + final String tableName = "complex_column_counts"; + + try (AutoCloseable ac = enableIcebergTables()) { + final String ctasQuery = + String.format( + "CREATE TABLE %s.%s " + + " AS SELECT * from cp.\"complex_student.json\"", + TEMP_SCHEMA_HADOOP, + tableName); + + test(ctasQuery); + + testBuilder() + .sqlQuery(String.format("select count(*) c from %s.%s", TEMP_SCHEMA_HADOOP, tableName)) + .unOrdered() + .baselineColumns("c") + .baselineValues(10L) + .build() + .run(); + + File tableFolder = new File(getDfsTestTmpSchemaLocation(), tableName); + IcebergTableWrapper tableWrapper = new IcebergTableWrapper(getSabotContext(), localFs, + getIcebergModel(TEMP_SCHEMA_HADOOP), tableFolder.toPath().toString()); + + List chunks = + ImmutableList.copyOf(tableWrapper.getTableInfo().getPartitionChunkListing().iterator()); + assertEquals(1, chunks.size()); + + DatasetSplit split = chunks.get(0).getSplits().iterator().next(); + ParquetDatasetSplitXAttr xattr = LegacyProtobufSerializer + .parseFrom(ParquetDatasetSplitXAttr.PARSER, MetadataProtoUtils.toProtobuf(split.getExtraInfo())); + + assertEquals(10, tableWrapper.getTableInfo().getRecordCount()); + for (ColumnValueCount entry : xattr.getColumnValueCountsList()) { + assertEquals(10, entry.getCount()); } + } finally { + FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), tableName)); } } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergComplexColumnCommands.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergComplexColumnCommands.java index 298217915c..a9506c6f08 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergComplexColumnCommands.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergComplexColumnCommands.java @@ -26,6 +26,8 @@ import com.dremio.BaseTestQuery; import com.dremio.exec.ExecConstants; +import com.dremio.exec.proto.UserBitShared; +import com.dremio.test.UserExceptionAssert; @SuppressWarnings("UseCorrectAssertInTests") public class TestIcebergComplexColumnCommands extends BaseTestQuery { @@ -297,7 +299,9 @@ public void testCreateElementNamedColumn() throws Exception { String.format("create table %s.%s (element ROW(x ARRAY(DECIMAL(40,8))))", TEMP_SCHEMA, newTblName); //element is a keyword and cannot be used as column name. Without this restriction we cannot parse LIST datatype. - errorMsgTestHelper(query, "PARSE ERROR: Failure parsing the query."); + UserExceptionAssert + .assertThatThrownBy(() -> test(query)) + .hasErrorType(UserBitShared.DremioPBError.ErrorType.PARSE); } finally { FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), newTblName)); } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergFileType.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergFileType.java new file mode 100644 index 0000000000..a079f333a4 --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergFileType.java @@ -0,0 +1,45 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store.iceberg; + +import static org.junit.Assert.assertEquals; + +import org.apache.iceberg.FileContent; +import org.junit.Test; + +public class TestIcebergFileType { + + @Test + public void testFileContentMappings() { + // Currently, Iceberg's FileContent enum declares three types: DATA, POSITION_DELETES and EQUALITY_DELETES. + // We need to check that every file content type Iceberg declares, we have a mapping entry as well. + + assertEquals(IcebergFileType.DATA, IcebergFileType.valueByName(FileContent.DATA.name())); + assertEquals(IcebergFileType.EQUALITY_DELETES, IcebergFileType.valueByName(FileContent.EQUALITY_DELETES.name())); + assertEquals(IcebergFileType.POSITION_DELETES, IcebergFileType.valueByName(FileContent.POSITION_DELETES.name())); + } + + @Test + public void testFileTypes() { + assertEquals(IcebergFileType.DATA, IcebergFileType.valueByName("DATA")); + assertEquals(IcebergFileType.EQUALITY_DELETES, IcebergFileType.valueByName("EQUALITY_DELETES")); + assertEquals(IcebergFileType.POSITION_DELETES, IcebergFileType.valueByName("POSITION_DELETES")); + assertEquals(IcebergFileType.MANIFEST, IcebergFileType.valueByName("MANIFEST")); + assertEquals(IcebergFileType.MANIFEST_LIST, IcebergFileType.valueByName("MANIFEST_LIST")); + assertEquals(IcebergFileType.PARTITION_STATS, IcebergFileType.valueByName("PARTITION_STATS")); + assertEquals(IcebergFileType.OTHER, IcebergFileType.valueByName("OTHER")); + } +} diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergHadoopCommand.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergHadoopCommand.java index 4b4de71617..f601ad4b3d 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergHadoopCommand.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergHadoopCommand.java @@ -20,7 +20,6 @@ import java.io.File; -import org.apache.hadoop.conf.Configuration; import org.junit.ClassRule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -43,7 +42,7 @@ public void testCreateOperation() { String tableName = "icebergtable"; BatchSchema schema = BatchSchema.newBuilder().addField(CompleteType.INT.toField("int")).build(); FileSystemPlugin fileSystemPlugin = BaseTestQuery.getMockedFileSystemPlugin(); - IcebergHadoopModel icebergHadoopModel = new IcebergHadoopModel(new Configuration(), fileSystemPlugin); + IcebergHadoopModel icebergHadoopModel = new IcebergHadoopModel(fileSystemPlugin); when(fileSystemPlugin.getIcebergModel()).thenReturn(icebergHadoopModel); IcebergOpCommitter createTableCommitter = icebergHadoopModel.getCreateTableCommitter( tableName, icebergHadoopModel.getTableIdentifier(Path.of(tempDir.getRoot().getPath()).resolve(tableName).toString()), diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergManifestListRecordReader.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergManifestListRecordReader.java index 4605817f9e..ee37083d9e 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergManifestListRecordReader.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergManifestListRecordReader.java @@ -27,7 +27,6 @@ import org.apache.arrow.vector.VarCharVector; import org.apache.arrow.vector.complex.StructVector; import org.apache.hadoop.conf.Configuration; -import org.apache.iceberg.ManifestContent; import org.apache.iceberg.expressions.Expressions; import org.junit.AfterClass; import org.junit.Before; @@ -39,6 +38,7 @@ import com.dremio.exec.ExecConstants; import com.dremio.exec.catalog.MutablePlugin; import com.dremio.exec.hadoop.HadoopFileSystem; +import com.dremio.exec.hadoop.HadoopFileSystemConfigurationAdapter; import com.dremio.exec.store.SplitIdentity; import com.dremio.exec.store.SystemSchemas; import com.dremio.exec.store.TestOutputMutator; @@ -75,28 +75,32 @@ public static void closeTables() throws Exception { public void beforeTest() throws Exception { context = testContext.getNewOperatorContext(getTestAllocator(), null, DEFAULT_BATCH_SIZE, null); testCloseables.add(context); - fs = HadoopFileSystem.get(Path.of("/"), new Configuration(), context.getStats()); + Configuration conf = new Configuration(); + fs = HadoopFileSystem.get(Path.of("/"), conf, context.getStats()); when(plugin.createFSWithAsyncOptions(anyString(), anyString(), any())).thenReturn(fs); - when(plugin.getFsConfCopy()).thenReturn(new Configuration()); + when(plugin.getFsConfCopy()).thenReturn(conf); + when(plugin.createIcebergFileIO(any(), any(), any(), any(), any())) + .thenReturn(new DremioFileIO(fs, null, null, null, null, + new HadoopFileSystemConfigurationAdapter(conf))); } @Test public void testReadDataManifests() throws Exception { - readAndValidate(table.getLocation() + METADATA_JSON, SNAPSHOT_ID, ManifestContent.DATA, + readAndValidate(table.getLocation() + METADATA_JSON, SNAPSHOT_ID, ManifestContentType.DATA, ImmutableList.of(table.getLocation() + "/metadata/8a83125a-a077-4f1e-974b-fcbaf370b085-m0.avro")); } @Test public void testReadDeleteManifests() throws Exception { - readAndValidate(table.getLocation() + METADATA_JSON, SNAPSHOT_ID, ManifestContent.DELETES, + readAndValidate(table.getLocation() + METADATA_JSON, SNAPSHOT_ID, ManifestContentType.DELETES, ImmutableList.of( table.getLocation() + "/metadata/07fe993a-9195-4cbc-bf9a-6b81816b9758-m0.avro", table.getLocation() + "/metadata/d1e51173-03f4-4b54-865a-c6c3185a92a5-m0.avro", table.getLocation() + "/metadata/d45e915a-acf8-4914-9907-0772d5356e4a-m0.avro")); } - private void readAndValidate(String jsonPath, long snapshotId, ManifestContent manifestContent, List expectedManifestFiles) + private void readAndValidate(String jsonPath, long snapshotId, ManifestContentType manifestContent, List expectedManifestFiles) throws Exception { List actual = new ArrayList<>(); try (AutoCloseable closeable = with(ExecConstants.ENABLE_ICEBERG_MERGE_ON_READ_SCAN, true)) { diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergManifests.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergManifests.java index f081e8a00e..6ceb361181 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergManifests.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergManifests.java @@ -141,7 +141,7 @@ public int getManifestFileCount(PartitionSpec partitionSpec, int partitionValueS tableFolder.mkdir(); FileSystemPlugin fileSystemPlugin = BaseTestQuery.getMockedFileSystemPlugin(); - IcebergHadoopModel icebergHadoopModel = new IcebergHadoopModel(new Configuration(), fileSystemPlugin); + IcebergHadoopModel icebergHadoopModel = new IcebergHadoopModel(fileSystemPlugin); when(fileSystemPlugin.getIcebergModel()).thenReturn(icebergHadoopModel); SchemaConverter schemaConverter = SchemaConverter.getBuilder().setTableName(tableName).build(); IcebergOpCommitter committer = icebergHadoopModel.getCreateTableCommitter(tableName, @@ -170,8 +170,7 @@ public int getManifestFileCount(PartitionSpec partitionSpec, int partitionValueS } table = getIcebergTable(tableFolder, IcebergCatalogType.HADOOP); return table.currentSnapshot().allManifests(table.io()).size(); - } - finally { + } finally { tableFolder.delete(); } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergNessieVersionedViews.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergNessieVersionedViews.java index 6f6eb1773e..1b0be7d6df 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergNessieVersionedViews.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergNessieVersionedViews.java @@ -35,6 +35,8 @@ import org.apache.iceberg.view.ViewDefinition; import org.apache.iceberg.view.ViewUtils; import org.junit.jupiter.api.Test; +import org.projectnessie.model.ContentKey; +import org.projectnessie.model.Namespace; import org.projectnessie.model.TableReference; import com.dremio.common.exceptions.UserException; @@ -80,6 +82,7 @@ public class TestIcebergNessieVersionedViews extends BaseIcebergViewTest { @Test public void testCreateView() { createBranch(CREATE_BRANCH, VersionContext.NOT_SPECIFIED); + createNamespacesIfMissing(CREATE_BRANCH, ContentKey.of(createViewKey)); final ViewDefinition viewDefinition = ViewDefinition.of(SQL, SCHEMA, CATALOG_NAME, Collections.emptyList()); @@ -106,6 +109,7 @@ public void testCreateView() { @Test public void testReplaceView() { createBranch(REPLACE_BRANCH, VersionContext.NOT_SPECIFIED); + createNamespacesIfMissing(REPLACE_BRANCH, ContentKey.of(replaceViewKey)); final ViewDefinition viewDefinition = ViewDefinition.of(SQL, SCHEMA, CATALOG_NAME, Collections.emptyList()); @@ -144,6 +148,7 @@ public void testReplaceView() { @Test public void testDropView() { createBranch(DROP_BRANCH, VersionContext.NOT_SPECIFIED); + createNamespacesIfMissing(DROP_BRANCH, ContentKey.of(dropViewKey)); final ViewDefinition viewDefinition = ViewDefinition.of(SQL, SCHEMA, CATALOG_NAME, Collections.emptyList()); @@ -176,6 +181,7 @@ public void testDropView() { @Test public void testComplexDropView() { createBranch(COMPLEX_DROP_BRANCH, VersionContext.NOT_SPECIFIED); + createNamespacesIfMissing(COMPLEX_DROP_BRANCH, ContentKey.of(complexDropViewKey)); final ViewDefinition viewDefinition = ViewDefinition.of(SQL, SCHEMA, CATALOG_NAME, Collections.emptyList()); @@ -226,6 +232,7 @@ public void testComplexDropView() { @Test public void testDropViewOnDifferentBranch() { createBranch(OLD_DROP_BRANCH, VersionContext.NOT_SPECIFIED); + createNamespacesIfMissing(OLD_DROP_BRANCH, ContentKey.of(newDropViewKey)); final ViewDefinition viewDefinition = ViewDefinition.of(SQL, SCHEMA, CATALOG_NAME, Collections.emptyList()); @@ -266,7 +273,7 @@ public void testDropViewOnDifferentBranch() { @Test public void testGlobalMetadata() { createBranch(GLOBAL_METADATA_BRANCH, VersionContext.NOT_SPECIFIED); - + createNamespacesIfMissing(GLOBAL_METADATA_BRANCH, Namespace.of(VIEW_IDENTIFIER.namespace().levels())); final ViewDefinition viewDefinition = ViewDefinition.of(SQL, SCHEMA, CATALOG_NAME, Collections.emptyList()); final TableIdentifier viewIdentifier = @@ -314,6 +321,7 @@ public void testGlobalMetadata() { @Test public void testNonGlobalMetadata() { createBranch(NON_GLOBAL_METADATA_BRANCH, VersionContext.NOT_SPECIFIED); + createNamespacesIfMissing(NON_GLOBAL_METADATA_BRANCH, ContentKey.of(nonGlobalMetadataViewKey)); final ViewDefinition viewDefinition = ViewDefinition.of(SQL, SCHEMA, CATALOG_NAME, Collections.emptyList()); @@ -347,6 +355,7 @@ public void testNonGlobalMetadata() { @Test public void testViewDialect() { createBranch(DIALECT_BRANCH, VersionContext.NOT_SPECIFIED); + createNamespacesIfMissing(DIALECT_BRANCH, ContentKey.of(createViewKey)); final ViewDefinition viewDefinition = ViewDefinition.of(SQL, SCHEMA, CATALOG_NAME, Collections.emptyList()); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergOpCommitter.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergOpCommitter.java index 42ddab0cf8..f2011fc0b0 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergOpCommitter.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergOpCommitter.java @@ -63,6 +63,7 @@ import org.apache.iceberg.io.InputFile; import org.apache.iceberg.io.OutputFile; import org.junit.Assert; +import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; @@ -75,9 +76,7 @@ import com.dremio.exec.planner.acceleration.IncrementalUpdateUtils; import com.dremio.exec.planner.cost.ScanCostFactor; import com.dremio.exec.record.BatchSchema; -import com.dremio.exec.store.dfs.FileSystemPlugin; import com.dremio.exec.store.iceberg.manifestwriter.IcebergCommitOpHelper; -import com.dremio.exec.store.iceberg.model.IcebergCatalogType; import com.dremio.exec.store.iceberg.model.IcebergDmlOperationCommitter; import com.dremio.exec.store.iceberg.model.IcebergModel; import com.dremio.exec.store.iceberg.model.IcebergOpCommitter; @@ -114,7 +113,7 @@ public class TestIcebergOpCommitter extends BaseTestQuery implements SupportsTyp private final String folder = Files.createTempDir().getAbsolutePath(); private final DatasetCatalogGrpcClient client = new DatasetCatalogGrpcClient(getSabotContext().getDatasetCatalogBlockingStub().get()); - private IcebergModel icebergHadoopModel = getIcebergModel(null, IcebergCatalogType.NESSIE); + private IcebergModel icebergModel; private OperatorStats operatorStats; private OperatorContext operatorContext; @@ -122,7 +121,8 @@ public class TestIcebergOpCommitter extends BaseTestQuery implements SupportsTyp Field.nullablePrimitive("id", new ArrowType.Int(64, true)), Field.nullablePrimitive("data", new ArrowType.Utf8())); - public TestIcebergOpCommitter() { + @Before + public void beforeTest() { this.operatorStats = mock(OperatorStats.class); doNothing().when(operatorStats).addLongStat(any(), anyLong()); this.operatorContext = mock(OperatorContext.class); @@ -133,6 +133,7 @@ public TestIcebergOpCommitter() { OptionManager optionManager = mock(OptionManager.class); when(optionManager.getOption(ExecConstants.ENABLE_MAP_DATA_TYPE)).thenReturn(true); when(operatorContext.getOptions()).thenReturn(optionManager); + icebergModel = getIcebergModel(TEMP_SCHEMA); } public String initialiseTableWithLargeSchema(BatchSchema schema, String tableName) throws IOException { @@ -140,14 +141,12 @@ public String initialiseTableWithLargeSchema(BatchSchema schema, String tableNam final File tableFolder = new File(folder, tableName); tableFolder.mkdirs(); - FileSystemPlugin fileSystemPlugin = BaseTestQuery.getMockedFileSystemPlugin(); - when(fileSystemPlugin.getIcebergModel()).thenReturn(icebergHadoopModel); DatasetConfig config = getDatasetConfig(datasetPath); - IcebergOpCommitter fullRefreshCommitter = icebergHadoopModel.getFullMetadataRefreshCommitter(tableName, datasetPath, + IcebergOpCommitter fullRefreshCommitter = icebergModel.getFullMetadataRefreshCommitter(tableName, datasetPath, tableFolder.toPath().toString(), tableName, - icebergHadoopModel.getTableIdentifier(tableFolder.toPath().toString()), + icebergModel.getTableIdentifier(tableFolder.toPath().toString()), schema, Collections.emptyList(), config, operatorStats, null); fullRefreshCommitter.commit(); @@ -160,17 +159,17 @@ public String initialiseTableWithLargeSchema(BatchSchema schema, String tableNam String tag = getTag(datasetPath); config.setTag(tag); - Table table = getIcebergTable(new File(folder, tableName), IcebergCatalogType.NESSIE); + Table table = getIcebergTable(icebergModel, new File(folder, tableName)); TableOperations tableOperations = ((BaseTable) table).operations(); String metadataFileLocation = tableOperations.current().metadataFileLocation(); IcebergMetadata icebergMetadata = new IcebergMetadata(); icebergMetadata.setMetadataFileLocation(metadataFileLocation); config.getPhysicalDataset().setIcebergMetadata(icebergMetadata); - IcebergOpCommitter incrementalRefreshCommitter = icebergHadoopModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, datasetPath, + IcebergOpCommitter incrementalRefreshCommitter = icebergModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, datasetPath, tableFolder.toPath().toString(), tableName, - icebergHadoopModel.getTableIdentifier(tableFolder.toPath().toString()), + icebergModel.getTableIdentifier(tableFolder.toPath().toString()), schema, Collections.emptyList(), true, config); @@ -196,11 +195,11 @@ public void testAddOnlyMetadataRefreshCommitter() throws IOException { IcebergMetadata icebergMetadata = new IcebergMetadata(); icebergMetadata.setMetadataFileLocation(metadataFileLocation); datasetConfig.getPhysicalDataset().setIcebergMetadata(icebergMetadata); - IcebergOpCommitter insertTableCommitter = icebergHadoopModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, + IcebergOpCommitter insertTableCommitter = icebergModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, datasetPath, tableName, tableFolder.toPath().toString(), - icebergHadoopModel.getTableIdentifier(tableFolder.toPath().toString()), + icebergModel.getTableIdentifier(tableFolder.toPath().toString()), schema, Collections.emptyList(), true, datasetConfig); DataFile dataFile6 = getDatafile("books/add1.parquet"); @@ -208,7 +207,7 @@ public void testAddOnlyMetadataRefreshCommitter() throws IOException { ManifestFile m1 = writeManifest(tableFolder, "manifestFile2", dataFile6, dataFile7); insertTableCommitter.consumeManifestFile(m1); insertTableCommitter.commit(); - Table table = getIcebergTable(tableFolder, IcebergCatalogType.NESSIE); + Table table = getIcebergTable(icebergModel, tableFolder); List manifestFileList = table.currentSnapshot().allManifests(table.io()); Assert.assertEquals(2, manifestFileList.size()); for (ManifestFile manifestFile : manifestFileList) { @@ -236,11 +235,11 @@ public void testDeleteThenAddMetadataRefreshCommitter() throws IOException { IcebergMetadata icebergMetadata = new IcebergMetadata(); icebergMetadata.setMetadataFileLocation(metadataFileLocation); datasetConfig.getPhysicalDataset().setIcebergMetadata(icebergMetadata); - IcebergOpCommitter metaDataRefreshCommitter = icebergHadoopModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, + IcebergOpCommitter metaDataRefreshCommitter = icebergModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, datasetPath, tableFolder.toPath().toString(), tableName, - icebergHadoopModel.getTableIdentifier(tableFolder.toPath().toString()), + icebergModel.getTableIdentifier(tableFolder.toPath().toString()), schema, Collections.emptyList(), true, datasetConfig); @@ -259,7 +258,7 @@ public void testDeleteThenAddMetadataRefreshCommitter() throws IOException { // After this operation manifestList was expected to have two manifest file // One is manifestFile2 and other one is newly created due to delete data file. as This newly created Manifest is due to rewriting // of manifestFile1 file. it is expected to 2 existing file account and 3 deleted file count. - Table table = getIcebergTable(tableFolder, IcebergCatalogType.NESSIE); + Table table = getIcebergTable(icebergModel, tableFolder); List manifestFileList = table.currentSnapshot().allManifests(table.io()); for (ManifestFile manifestFile : manifestFileList) { if (manifestFile.path().contains("manifestFile2")) { @@ -300,11 +299,11 @@ public void testAcrossBatchMetadataRefreshCommitter() throws IOException { IcebergMetadata icebergMetadata = new IcebergMetadata(); icebergMetadata.setMetadataFileLocation(metadataFileLocation); datasetConfig.getPhysicalDataset().setIcebergMetadata(icebergMetadata); - IcebergOpCommitter metaDataRefreshCommitter = icebergHadoopModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, + IcebergOpCommitter metaDataRefreshCommitter = icebergModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, datasetPath, tableFolder.toPath().toString(), tableName, - icebergHadoopModel.getTableIdentifier(tableFolder.toPath().toString()), + icebergModel.getTableIdentifier(tableFolder.toPath().toString()), schema, Collections.emptyList(), true, datasetConfig); @@ -325,7 +324,7 @@ public void testAcrossBatchMetadataRefreshCommitter() throws IOException { // After this operation manifestList was expected to have two manifest file // One is manifestFile2 and other one is newly created due to delete data file. as This newly created Manifest is due to rewriting // of manifestFile1 file. it is expected to 2 existing file account and 3 deleted file count. - Table table = getIcebergTable(tableFolder, IcebergCatalogType.NESSIE); + Table table = getIcebergTable(icebergModel, tableFolder); List manifestFileList = table.currentSnapshot().allManifests(table.io()); for (ManifestFile manifestFile : manifestFileList) { if (manifestFile.path().contains("manifestFile2")) { @@ -351,9 +350,9 @@ public void testDmlOperation() throws IOException { IcebergMetadata icebergMetadata = new IcebergMetadata(); icebergMetadata.setMetadataFileLocation(metadataFileLocation); datasetConfig.getPhysicalDataset().setIcebergMetadata(icebergMetadata); - IcebergOpCommitter deleteCommitter = icebergHadoopModel.getDmlCommitter( + IcebergOpCommitter deleteCommitter = icebergModel.getDmlCommitter( operatorContext.getStats(), - icebergHadoopModel.getTableIdentifier(tableFolder.toPath().toString()), + icebergModel.getTableIdentifier(tableFolder.toPath().toString()), datasetConfig); // Add a new manifest list, and delete several previous datafiles @@ -377,7 +376,7 @@ public void testDmlOperation() throws IOException { // After this operation, the manifestList was expected to have two manifest file. // One is 'manifestFileDelete' and the other is the newly created due to delete data file. This newly created manifest // is due to rewriting of 'manifestFile1' file. It is expected to 1 existing file account and 4 deleted file count. - Table table = getIcebergTable(tableFolder, IcebergCatalogType.NESSIE); + Table table = getIcebergTable(icebergModel, tableFolder); List manifestFileList = table.currentSnapshot().allManifests(table.io()); Assert.assertEquals(2, manifestFileList.size()); for (ManifestFile manifestFile : manifestFileList) { @@ -405,13 +404,13 @@ public void testNumberOfSnapshot() throws IOException { IcebergMetadata icebergMetadata = new IcebergMetadata(); icebergMetadata.setMetadataFileLocation(metadataFileLocation); datasetConfig.getPhysicalDataset().setIcebergMetadata(icebergMetadata); - Table oldTable = getIcebergTable(tableFolder, IcebergCatalogType.NESSIE); + Table oldTable = getIcebergTable(icebergModel, tableFolder); Assert.assertEquals(3, Iterables.size(oldTable.snapshots())); - IcebergOpCommitter metaDataRefreshCommitter = icebergHadoopModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, + IcebergOpCommitter metaDataRefreshCommitter = icebergModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, datasetPath, tableFolder.toPath().toString(), tableName, - icebergHadoopModel.getTableIdentifier(tableFolder.toPath().toString()), + icebergModel.getTableIdentifier(tableFolder.toPath().toString()), schema, Collections.emptyList(), true, datasetConfig); @@ -425,7 +424,7 @@ public void testNumberOfSnapshot() throws IOException { metaDataRefreshCommitter.consumeDeleteDataFile(dataFile5); metaDataRefreshCommitter.consumeManifestFile(m1); metaDataRefreshCommitter.commit(); - Table table = getIcebergTable(tableFolder, IcebergCatalogType.NESSIE); + Table table = getIcebergTable(icebergModel, tableFolder); Assert.assertEquals(6, Iterables.size(table.snapshots())); table.refresh(); TableOperations tableOperations = ((BaseTable) table).operations(); @@ -433,18 +432,18 @@ public void testNumberOfSnapshot() throws IOException { icebergMetadata.setMetadataFileLocation(metadataFileLocation); datasetConfig.getPhysicalDataset().setIcebergMetadata(icebergMetadata); - metaDataRefreshCommitter = icebergHadoopModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, + metaDataRefreshCommitter = icebergModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, datasetPath, tableFolder.toPath().toString(), tableName, - icebergHadoopModel.getTableIdentifier(tableFolder.toPath().toString()), + icebergModel.getTableIdentifier(tableFolder.toPath().toString()), schema, Collections.emptyList(), true, datasetConfig); DataFile dataFile2 = getDatafile("books/add2.parquet"); ManifestFile m2 = writeManifest(tableFolder, "manifestFile3", dataFile2); metaDataRefreshCommitter.consumeManifestFile(m2); metaDataRefreshCommitter.commit(); - table = getIcebergTable(tableFolder, IcebergCatalogType.NESSIE); + table = getIcebergTable(icebergModel, tableFolder); Assert.assertEquals(8, Iterables.size(table.snapshots())); } finally { FileUtils.deleteDirectory(tableFolder); @@ -483,10 +482,10 @@ public void testMetadataRefreshSchemaUpdateAndUpPromotion() throws IOException { IcebergMetadata icebergMetadata = new IcebergMetadata(); icebergMetadata.setMetadataFileLocation(metadataFileLocation); datasetConfig.getPhysicalDataset().setIcebergMetadata(icebergMetadata); - IcebergOpCommitter insertTableCommitter = icebergHadoopModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, + IcebergOpCommitter insertTableCommitter = icebergModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, datasetPath, tableFolder.toPath().toString(), tableName, - icebergHadoopModel.getTableIdentifier(tableFolder.toPath().toString()), schema1, + icebergModel.getTableIdentifier(tableFolder.toPath().toString()), schema1, Collections.emptyList(),true, datasetConfig); BatchSchema schema2 = new BatchSchema(Arrays.asList( @@ -516,7 +515,7 @@ public void testMetadataRefreshSchemaUpdateAndUpPromotion() throws IOException { insertTableCommitter.updateSchema(consolidatedSchema); insertTableCommitter.commit(); - Table table = getIcebergTable(tableFolder, IcebergCatalogType.NESSIE); + Table table = getIcebergTable(icebergModel, tableFolder); Schema sc = table.schema(); SchemaConverter schemaConverter = SchemaConverter.getBuilder().setTableName(table.name()).build(); Assert.assertTrue(consolidatedSchema.equalsTypesWithoutPositions(schemaConverter.fromIceberg(sc))); @@ -536,10 +535,10 @@ public void testMetadataRefreshSchemaUpdate() throws IOException { IcebergMetadata icebergMetadata = new IcebergMetadata(); icebergMetadata.setMetadataFileLocation(metadataFileLocation); datasetConfig.getPhysicalDataset().setIcebergMetadata(icebergMetadata); - IcebergOpCommitter insertTableCommitter = icebergHadoopModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, + IcebergOpCommitter insertTableCommitter = icebergModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, datasetPath, tableFolder.toPath().toString(), tableName, - icebergHadoopModel.getTableIdentifier(tableFolder.toPath().toString()), + icebergModel.getTableIdentifier(tableFolder.toPath().toString()), schema, Collections.emptyList(),true, datasetConfig); @@ -566,7 +565,7 @@ public void testMetadataRefreshSchemaUpdate() throws IOException { insertTableCommitter.consumeDeleteDataFile(dataFile5); insertTableCommitter.commit(); - Table newTable = getIcebergTable(tableFolder, IcebergCatalogType.NESSIE); + Table newTable = getIcebergTable(icebergModel, tableFolder); Schema sc = newTable.schema(); SchemaConverter schemaConverter = SchemaConverter.getBuilder().setTableName(newTable.name()).build(); Assert.assertTrue(consolidatedSchema.equalsTypesWithoutPositions(schemaConverter.fromIceberg(sc))); @@ -587,10 +586,10 @@ public void testMetadataRefreshSchemaDropColumns() throws IOException { IcebergMetadata icebergMetadata = new IcebergMetadata(); icebergMetadata.setMetadataFileLocation(metadataFileLocation); datasetConfig.getPhysicalDataset().setIcebergMetadata(icebergMetadata); - IcebergOpCommitter insertTableCommitter = icebergHadoopModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, + IcebergOpCommitter insertTableCommitter = icebergModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, datasetPath, tableFolder.toPath().toString(), tableName, - icebergHadoopModel.getTableIdentifier(tableFolder.toPath().toString()), + icebergModel.getTableIdentifier(tableFolder.toPath().toString()), schema, Collections.emptyList(), false, datasetConfig); @@ -603,7 +602,7 @@ public void testMetadataRefreshSchemaDropColumns() throws IOException { insertTableCommitter.updateSchema(newSchema); insertTableCommitter.commit(); - Table newTable = getIcebergTable(tableFolder, IcebergCatalogType.NESSIE); + Table newTable = getIcebergTable(icebergModel, tableFolder); Schema sc = newTable.schema(); SchemaConverter schemaConverter = SchemaConverter.getBuilder().setTableName(newTable.name()).build(); Assert.assertTrue(newSchema.equalsTypesWithoutPositions(schemaConverter.fromIceberg(sc))); @@ -624,10 +623,10 @@ public void testMetadataRefreshDelete() throws IOException { icebergMetadata.setMetadataFileLocation(metadataFileLocation); datasetConfig.getPhysicalDataset().setIcebergMetadata(icebergMetadata); - IcebergOpCommitter insertTableCommitter = icebergHadoopModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, + IcebergOpCommitter insertTableCommitter = icebergModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, datasetPath, tableFolder.toPath().toString(), tableName, - icebergHadoopModel.getTableIdentifier(tableFolder.toPath().toString()), + icebergModel.getTableIdentifier(tableFolder.toPath().toString()), schema, Collections.emptyList(), true, datasetConfig); @@ -646,7 +645,7 @@ public void testMetadataRefreshDelete() throws IOException { insertTableCommitter.consumeDeleteDataFile(dataFile2Delete); insertTableCommitter.commit(); - Table newTable = getIcebergTable(tableFolder, IcebergCatalogType.NESSIE); + Table newTable = getIcebergTable(icebergModel, tableFolder); Schema sc = newTable.schema(); Assert.assertEquals(6, Iterables.size(newTable.snapshots())); @@ -668,18 +667,18 @@ public void testConcurrentIncrementalMetadataRefresh() throws Exception { datasetConfig.getPhysicalDataset().setIcebergMetadata(icebergMetadata); // Two concurrent iceberg committeres - IcebergOpCommitter insertTableCommitter1 = icebergHadoopModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, + IcebergOpCommitter insertTableCommitter1 = icebergModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, datasetPath, tableName, tableFolder.toPath().toString(), - icebergHadoopModel.getTableIdentifier(tableFolder.toPath().toString()), + icebergModel.getTableIdentifier(tableFolder.toPath().toString()), schema, Collections.emptyList(), true, datasetConfig); - IcebergOpCommitter insertTableCommitter2 = icebergHadoopModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, + IcebergOpCommitter insertTableCommitter2 = icebergModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, datasetPath, tableName, tableFolder.toPath().toString(), - icebergHadoopModel.getTableIdentifier(tableFolder.toPath().toString()), + icebergModel.getTableIdentifier(tableFolder.toPath().toString()), schema, Collections.emptyList(), true, datasetConfig); @@ -698,7 +697,7 @@ public void testConcurrentIncrementalMetadataRefresh() throws Exception { .hasErrorType(CONCURRENT_MODIFICATION) .hasMessageContaining("Concurrent DML operation has updated the table, please retry."); - Table table = getIcebergTable(tableFolder, IcebergCatalogType.NESSIE); + Table table = getIcebergTable(icebergModel, tableFolder); List manifestFileList = table.currentSnapshot().allManifests(table.io()); Assert.assertEquals(2, manifestFileList.size()); } finally { @@ -758,10 +757,10 @@ public void testIncrementalRefreshDroppedAndAddedColumns() throws Exception { datasetConfig.getPhysicalDataset().getInternalSchemaSettings().setDroppedColumns(droppedColumns.toByteString()); datasetConfig.getPhysicalDataset().getInternalSchemaSettings().setModifiedColumns(updatedColumns.toByteString()); - IcebergOpCommitter insertTableCommitter = icebergHadoopModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, + IcebergOpCommitter insertTableCommitter = icebergModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, datasetPath, tableFolder.toPath().toString(), tableName, - icebergHadoopModel.getTableIdentifier(tableFolder.toPath().toString()), + icebergModel.getTableIdentifier(tableFolder.toPath().toString()), schema, Collections.emptyList(), false, datasetConfig); @@ -778,7 +777,7 @@ public void testIncrementalRefreshDroppedAndAddedColumns() throws Exception { insertTableCommitter.updateSchema(newSchema); insertTableCommitter.commit(); - Table newTable = getIcebergTable(tableFolder, IcebergCatalogType.NESSIE); + Table newTable = getIcebergTable(icebergModel, tableFolder); Schema sc = newTable.schema(); SchemaConverter schemaConverter = SchemaConverter.getBuilder().setTableName(newTable.name()).build(); Assert.assertTrue(expectedSchema.equalsTypesWithoutPositions(schemaConverter.fromIceberg(sc))); @@ -793,7 +792,7 @@ ManifestFile writeManifest(File tableFolder, String fileName, DataFile... files) ManifestFile writeManifest(File tableFolder, String fileName, Long snapshotId, DataFile... files) throws IOException { File manifestFile = new File(folder, fileName + ".avro"); - Table table = getIcebergTable(tableFolder, IcebergCatalogType.NESSIE); + Table table = getIcebergTable(icebergModel, tableFolder); OutputFile outputFile = table.io().newOutputFile(manifestFile.getCanonicalPath()); ManifestWriter writer = ManifestFiles.write(1, table.spec(), outputFile, snapshotId); @@ -866,20 +865,20 @@ public void testConcurrentIncrementalRefresh() throws IOException { IcebergMetadata icebergMetadata = new IcebergMetadata(); icebergMetadata.setMetadataFileLocation(metadataFileLocation); datasetConfig.getPhysicalDataset().setIcebergMetadata(icebergMetadata); - IcebergOpCommitter commiter1 = icebergHadoopModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, + IcebergOpCommitter commiter1 = icebergModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, datasetPath, tableFolder.toPath().toString(), tableName, - icebergHadoopModel.getTableIdentifier(tableFolder.toPath().toString()), + icebergModel.getTableIdentifier(tableFolder.toPath().toString()), schema, Collections.emptyList(), true, datasetConfig); Assert.assertTrue(commiter1 instanceof IncrementalMetadataRefreshCommitter); - IcebergOpCommitter commiter2 = icebergHadoopModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, + IcebergOpCommitter commiter2 = icebergModel.getIncrementalMetadataRefreshCommitter(operatorContext, tableName, datasetPath, tableFolder.toPath().toString(), tableName, - icebergHadoopModel.getTableIdentifier(tableFolder.toPath().toString()), + icebergModel.getTableIdentifier(tableFolder.toPath().toString()), schema, Collections.emptyList(), true, datasetConfig); Assert.assertTrue(commiter2 instanceof IncrementalMetadataRefreshCommitter); @@ -921,7 +920,7 @@ public void testConcurrentIncrementalRefresh() throws IOException { // After this operation manifestList was expected to have two manifest file // One is manifestFile2 and other one is newly created due to delete data file. as This newly created Manifest is due to rewriting // of manifestFile1 file. it is expected to 2 existing file account and 3 deleted file count. - Table table = getIcebergTable(tableFolder, IcebergCatalogType.NESSIE); + Table table = getIcebergTable(icebergModel, tableFolder); List manifestFileList = table.currentSnapshot().allManifests(table.io()); for (ManifestFile manifestFile : manifestFileList) { if (manifestFile.path().contains("manifestFile2")) { @@ -963,15 +962,15 @@ public void testConcurrentTwoDmlOperations() throws IOException { icebergMetadata.setMetadataFileLocation(metadataFileLocation); datasetConfig.getPhysicalDataset().setIcebergMetadata(icebergMetadata); - IcebergOpCommitter committer1 = icebergHadoopModel.getDmlCommitter( + IcebergOpCommitter committer1 = icebergModel.getDmlCommitter( operatorContext.getStats(), - icebergHadoopModel.getTableIdentifier(tableFolder.toPath().toString()), + icebergModel.getTableIdentifier(tableFolder.toPath().toString()), datasetConfig); Assert.assertTrue(committer1 instanceof IcebergDmlOperationCommitter); - IcebergOpCommitter committer2 = icebergHadoopModel.getDmlCommitter( + IcebergOpCommitter committer2 = icebergModel.getDmlCommitter( operatorContext.getStats(), - icebergHadoopModel.getTableIdentifier(tableFolder.toPath().toString()), + icebergModel.getTableIdentifier(tableFolder.toPath().toString()), datasetConfig); Assert.assertTrue(committer2 instanceof IcebergDmlOperationCommitter); @@ -1009,7 +1008,7 @@ public void testConcurrentTwoDmlOperations() throws IOException { // After this operation, the manifestList was expected to have two manifest file. // One is 'manifestFileDelete' and the other is the newly created due to delete data file. This newly created manifest // is due to rewriting of 'manifestFile1' file. It is expected to 3 existing file account and 2 deleted file count. - Table table = getIcebergTable(tableFolder, IcebergCatalogType.NESSIE); + Table table = getIcebergTable(icebergModel, tableFolder); List manifestFileList = table.currentSnapshot().allManifests(table.io()); Assert.assertEquals(2, manifestFileList.size()); for (ManifestFile manifestFile : manifestFileList) { @@ -1037,12 +1036,12 @@ public void testDmlCommittedSnapshotNumber() throws IOException { icebergMetadata.setMetadataFileLocation(metadataFileLocation); datasetConfig.getPhysicalDataset().setIcebergMetadata(icebergMetadata); - Table tableBefore = getIcebergTable(tableFolder, IcebergCatalogType.NESSIE); + Table tableBefore = getIcebergTable(icebergModel, tableFolder); final int countBeforeDmlCommit = Iterables.size(tableBefore.snapshots()); - IcebergOpCommitter committer = icebergHadoopModel.getDmlCommitter( + IcebergOpCommitter committer = icebergModel.getDmlCommitter( operatorContext.getStats(), - icebergHadoopModel.getTableIdentifier(tableFolder.toPath().toString()), + icebergModel.getTableIdentifier(tableFolder.toPath().toString()), datasetConfig); Assert.assertTrue(committer instanceof IcebergDmlOperationCommitter); IcebergDmlOperationCommitter dmlCommitter = (IcebergDmlOperationCommitter) committer; @@ -1095,13 +1094,14 @@ public void testDeleteManifestFiles() throws IOException { DataFile dataFile2 = getDatafile(dataFile2Name); ManifestFile m = writeManifest(tableFolder, "manifestFileDml", dataFile1, dataFile2); - Table table = getIcebergTable(tableFolder, IcebergCatalogType.NESSIE); + Table table = getIcebergTable(icebergModel, tableFolder); InputFile inputFile = table.io().newInputFile(m.path(), m.length()); DremioFileIO dremioFileIO = Mockito.mock(DremioFileIO.class); Set actualDeletedFiles = new HashSet<>(); when(dremioFileIO.newInputFile(m.path(), m.length())).thenReturn(inputFile); doAnswer(new Answer() { + @Override public Void answer(InvocationOnMock invocation) { Object[] args = invocation.getArguments(); Assert.assertEquals("one file path arg is expected", args.length, 1); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergOptimizeSingleFileTracker.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergOptimizeSingleFileTracker.java index 0006e80c41..3b0ed26cb4 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergOptimizeSingleFileTracker.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergOptimizeSingleFileTracker.java @@ -23,7 +23,9 @@ import org.apache.commons.lang3.NotImplementedException; import org.apache.iceberg.DataFile; import org.apache.iceberg.DataFiles; +import org.apache.iceberg.DeleteFile; import org.apache.iceberg.FileFormat; +import org.apache.iceberg.FileMetadata; import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.Schema; import org.apache.iceberg.io.InputFile; @@ -171,6 +173,30 @@ public void testMultipleDataFiles() { assertThat(removed).containsExactly(add3.path().toString()); } + @Test + public void testSingleDataFileRewrittenDeleteFile() { + IcebergOptimizeSingleFileTracker tracker = new IcebergOptimizeSingleFileTracker(); + + DataFile add1 = testDataFile(SPEC_PARTITIONED, 1); + DataFile del1 = testDataFile(SPEC_PARTITIONED, 1); + DeleteFile del2 = testDeleteFile(SPEC_PARTITIONED, 1); + + tracker.consumeAddDataFile(add1); + tracker.consumeDeletedDataFile(del1); + tracker.consumeDeletedDeleteFile(del2); + + Set addDataFiles = Sets.newHashSet(add1); + Set delDataFiles = Sets.newHashSet(del1); + Set delDeleteFiles = Sets.newHashSet(del2); + + Set removed = tracker.removeSingleFileChanges(addDataFiles, delDataFiles); + + assertThat(addDataFiles).contains(add1); + assertThat(delDataFiles).contains(del1); + assertThat(delDeleteFiles).contains(del2); + assertThat(removed).isEmpty(); + } + public DataFile testDataFile(PartitionSpec spec, int partitionVal) { DataFiles.Builder builder = DataFiles.builder(spec); if (spec.isPartitioned()) { @@ -184,6 +210,20 @@ public DataFile testDataFile(PartitionSpec spec, int partitionVal) { .build(); } + public DeleteFile testDeleteFile(PartitionSpec spec, int partitionVal) { + FileMetadata.Builder builder = FileMetadata.deleteFileBuilder(spec); + if (spec.isPartitioned()) { + builder.withPartitionPath("n_nationkey=" + partitionVal); + } + + return builder + .ofPositionDeletes() + .withInputFile(new MockInputFile()) + .withRecordCount(4) + .withFormat(FileFormat.PARQUET) + .build(); + } + class MockInputFile implements InputFile { private final String location; diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergPartitionData.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergPartitionData.java index 8346a50a86..b471dc7abd 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergPartitionData.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergPartitionData.java @@ -20,7 +20,6 @@ import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; import java.io.File; import java.io.IOException; @@ -53,9 +52,8 @@ import org.junit.rules.TemporaryFolder; import com.dremio.BaseTestQuery; -import com.dremio.exec.store.dfs.FileSystemPlugin; -import com.dremio.exec.store.iceberg.hadoop.IcebergHadoopModel; import com.dremio.exec.store.iceberg.model.IcebergCatalogType; +import com.dremio.exec.store.iceberg.model.IcebergModel; import com.dremio.exec.store.iceberg.model.IcebergOpCommitter; import com.dremio.sabot.exec.context.OperatorStats; import com.google.common.collect.Lists; @@ -231,9 +229,7 @@ private void verifyPartitionValue(PartitionSpec partitionSpec, IcebergPartitionD .withPartition(partitionData) .build(); - FileSystemPlugin fileSystemPlugin = BaseTestQuery.getMockedFileSystemPlugin(); - IcebergHadoopModel icebergHadoopModel = new IcebergHadoopModel(new Configuration(), fileSystemPlugin); - when(fileSystemPlugin.getIcebergModel()).thenReturn(icebergHadoopModel); + IcebergModel icebergHadoopModel = getIcebergModel(TEMP_SCHEMA_HADOOP); SchemaConverter schemaConverter = SchemaConverter.getBuilder().setTableName(tableName).build(); IcebergOpCommitter committer = icebergHadoopModel.getCreateTableCommitter(tableName, icebergHadoopModel.getTableIdentifier(tableFolder.toPath().toString()), @@ -255,8 +251,7 @@ private void verifyPartitionValue(PartitionSpec partitionSpec, IcebergPartitionD } } - } - finally { + } finally { tableFolder.delete(); } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergPartitions.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergPartitions.java index 23822cbd98..f22a685e1b 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergPartitions.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergPartitions.java @@ -55,7 +55,6 @@ import com.dremio.exec.hadoop.HadoopFileSystem; import com.dremio.exec.physical.base.WriterOptions; import com.dremio.exec.proto.UserBitShared; -import com.dremio.exec.store.iceberg.model.IcebergCatalogType; import com.dremio.exec.store.iceberg.model.IcebergModel; import com.google.common.collect.ImmutableList; @@ -137,7 +136,7 @@ public void testHasNonIdentityPartitionColumns() { @Test public void testPartitions() throws Exception { File root = tempDir.newFolder(); - IcebergModel icebergModel = getIcebergModel(root, IcebergCatalogType.HADOOP); + IcebergModel icebergModel = getIcebergModel(TEMP_SCHEMA_HADOOP); HadoopTables tables = new HadoopTables(conf); Table table = tables.create(schema, spec, root.getAbsolutePath()); @@ -188,7 +187,7 @@ public void testPartitions() throws Exception { @Test public void testNonIdentityPartitions() throws Exception { File root = tempDir.newFolder(); - IcebergModel icebergModel = getIcebergModel(root, IcebergCatalogType.HADOOP); + IcebergModel icebergModel = getIcebergModel(TEMP_SCHEMA_HADOOP); HadoopTables tables = new HadoopTables(conf); PartitionSpec partitionSpec = PartitionSpec .builderFor(schema) diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergPartitionsFunction.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergPartitionsFunction.java new file mode 100644 index 0000000000..cdff1b0042 --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergPartitionsFunction.java @@ -0,0 +1,136 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store.iceberg; + +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import java.util.List; +import java.util.Map; + +import org.apache.commons.lang3.tuple.Pair; +import org.junit.Test; + +import com.dremio.common.expression.SchemaPath; +import com.dremio.common.types.TypeProtos; +import com.dremio.common.types.Types; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; + +/** + * Test class for iceberg table partitions functions select * from table(table_partitions('table')) + */ +public class TestIcebergPartitionsFunction extends IcebergMetadataTestTable { + + @Test + public void testTablePartitionsSchemaOnPartitionedTable() throws Exception { + List> expectedSchema = Lists.newArrayList(); + expectedSchema.add(Pair.of(SchemaPath.getSimplePath("partition"), Types.required(TypeProtos.MinorType.VARCHAR))); + expectedSchema.add(Pair.of(SchemaPath.getSimplePath("record_count"), Types.required(TypeProtos.MinorType.BIGINT))); + expectedSchema.add(Pair.of(SchemaPath.getSimplePath("file_count"), Types.required(TypeProtos.MinorType.INT))); + expectedSchema.add(Pair.of(SchemaPath.getSimplePath("spec_id"), Types.required(TypeProtos.MinorType.INT))); + addPartition("c1"); + expectedSchema(expectedSchema, String.format("SELECT * FROM table(table_partitions('\"%s\".\"%s\"')) limit 1", TEMP_SCHEMA_HADOOP, METADATA_TEST_TABLE_NAME)); + } + + @Test + public void testInvalidTablePartitionsSchemaOnPartitionedTable() throws Exception { + List> expectedSchema = Lists.newArrayList(); + expectedSchema.add(Pair.of(SchemaPath.getSimplePath("partition"), Types.required(TypeProtos.MinorType.LIST))); + expectedSchema.add(Pair.of(SchemaPath.getSimplePath("record_count"), Types.required(TypeProtos.MinorType.BIGINT))); + expectedSchema.add(Pair.of(SchemaPath.getSimplePath("file_count"), Types.required(TypeProtos.MinorType.INT))); + expectedSchema.add(Pair.of(SchemaPath.getSimplePath("spec_id"), Types.required(TypeProtos.MinorType.INT))); + addPartition("c1"); + assertThatThrownBy(() -> expectedSchema(expectedSchema, String.format("SELECT * FROM table(table_partitions('\"%s\".\"%s\"')) limit 1", TEMP_SCHEMA_HADOOP, METADATA_TEST_TABLE_NAME))) + .hasMessageContaining("Schema path or type mismatch") + .isInstanceOf(Exception.class); + } + + @Test + public void testTablePartitionsOnUnpartitionedTable() { + List> expectedSchema = Lists.newArrayList(); + expectedSchema.add(Pair.of(SchemaPath.getSimplePath("partition"), Types.required(TypeProtos.MinorType.VARCHAR))); + expectedSchema.add(Pair.of(SchemaPath.getSimplePath("record_count"), Types.required(TypeProtos.MinorType.BIGINT))); + expectedSchema.add(Pair.of(SchemaPath.getSimplePath("file_count"), Types.required(TypeProtos.MinorType.INT))); + expectedSchema.add(Pair.of(SchemaPath.getSimplePath("spec_id"), Types.required(TypeProtos.MinorType.INT))); + assertThatThrownBy(() -> expectedSchema(expectedSchema, String.format("SELECT * FROM table(table_partitions('\"%s\".\"%s\"')) limit 1", TEMP_SCHEMA_HADOOP, METADATA_TEST_TABLE_NAME))) + .hasMessageContaining("VALIDATION ERROR:") + .hasMessageContaining("Table %s.%s is not partitioned.", TEMP_SCHEMA_HADOOP, METADATA_TEST_TABLE_NAME) + .isInstanceOf(Exception.class); + } + + @Test + public void testTablePartitionsBeforeAddingData() throws Exception { + insertOneRecord(); + addPartition("c1"); + String[] expectedColumns = {"partition", "record_count", "file_count", "spec_id"}; + Object[] values = {"{}", 1L, 1, 0}; + queryAndMatchResults(String.format("SELECT * FROM table(table_partitions('\"%s\".\"%s\"'))", TEMP_SCHEMA_HADOOP, METADATA_TEST_TABLE_NAME), expectedColumns, values); + } + + @Test + public void testTablePartitionsAfterAddingData() throws Exception { + insertOneRecord(); + addPartition("c1"); + insertTwoRecords(); + final ImmutableList.Builder> recordBuilder = ImmutableList.builder(); + recordBuilder.add(ImmutableMap.of("`partition`", "{}", "`record_count`", 1L, "`file_count`", 1, "`spec_id`", 0)); + recordBuilder.add(ImmutableMap.of("`partition`", "{c1=1}", "`record_count`", 1L, "`file_count`", 1, "`spec_id`", 1)); + recordBuilder.add(ImmutableMap.of("`partition`", "{c1=2}", "`record_count`", 1L, "`file_count`", 1, "`spec_id`", 1)); + final List> baselineRecord = recordBuilder.build(); + queryAndMatchResults(String.format("SELECT * FROM table(table_partitions('\"%s\".\"%s\"'))", TEMP_SCHEMA_HADOOP, METADATA_TEST_TABLE_NAME), baselineRecord); + } + + @Test + public void testTablePartitionsStringPartitionField() throws Exception { + insertOneRecord(); + addPartition("c2"); + insertTwoRecords(); + final ImmutableList.Builder> recordBuilder = ImmutableList.builder(); + recordBuilder.add(ImmutableMap.of("`partition`", "{}", "`record_count`", 1L, "`file_count`", 1, "`spec_id`", 0)); + recordBuilder.add(ImmutableMap.of("`partition`", "{c2=a}", "`record_count`", 1L, "`file_count`", 1, "`spec_id`", 1)); + recordBuilder.add(ImmutableMap.of("`partition`", "{c2=b}", "`record_count`", 1L, "`file_count`", 1, "`spec_id`", 1)); + final List> baselineRecord = recordBuilder.build(); + queryAndMatchResults(String.format("SELECT * FROM table(table_partitions('\"%s\".\"%s\"'))", TEMP_SCHEMA_HADOOP, METADATA_TEST_TABLE_NAME), baselineRecord); + } + + @Test + public void testTablePartitionsTwoPartitionField() throws Exception { + insertOneRecord(); + addPartition("c1"); + addPartition("c2"); + insertTwoRecords(); + final ImmutableList.Builder> recordBuilder = ImmutableList.builder(); + recordBuilder.add(ImmutableMap.of("`partition`", "{}", "`record_count`", 1L, "`file_count`", 1, "`spec_id`", 0)); + recordBuilder.add(ImmutableMap.of("`partition`", "{c1=1, c2=a}", "`record_count`", 1L, "`file_count`", 1, "`spec_id`", 2)); + recordBuilder.add(ImmutableMap.of("`partition`", "{c1=2, c2=b}", "`record_count`", 1L, "`file_count`", 1, "`spec_id`", 2)); + final List> baselineRecord = recordBuilder.build(); + queryAndMatchResults(String.format("SELECT * FROM table(table_partitions('\"%s\".\"%s\"'))", TEMP_SCHEMA_HADOOP, METADATA_TEST_TABLE_NAME), baselineRecord); + } + + @Test + public void testTablePartitionsPartitionTransformField() throws Exception { + insertOneRecord(); + addPartition("truncate(2, c2)"); + insertTwoLongRecords(); + final ImmutableList.Builder> recordBuilder = ImmutableList.builder(); + recordBuilder.add(ImmutableMap.of("`partition`", "{}", "`record_count`", 1L, "`file_count`", 1, "`spec_id`", 0)); + recordBuilder.add(ImmutableMap.of("`partition`", "{c2_trunc_2=ab}", "`record_count`", 1L, "`file_count`", 1, "`spec_id`", 1)); + recordBuilder.add(ImmutableMap.of("`partition`", "{c2_trunc_2=bc}", "`record_count`", 1L, "`file_count`", 1, "`spec_id`", 1)); + final List> baselineRecord = recordBuilder.build(); + queryAndMatchResults(String.format("SELECT * FROM table(table_partitions('\"%s\".\"%s\"'))", TEMP_SCHEMA_HADOOP, METADATA_TEST_TABLE_NAME), baselineRecord); + } +} diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergScan.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergScan.java index 69803288c2..ae154fdbe4 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergScan.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergScan.java @@ -40,7 +40,6 @@ import com.dremio.BaseTestQuery; import com.dremio.exec.hadoop.HadoopFileSystem; -import com.dremio.exec.store.iceberg.model.IcebergCatalogType; import com.dremio.exec.store.iceberg.model.IcebergModel; import com.dremio.exec.util.ColumnUtils; @@ -163,7 +162,7 @@ public void testPartitionMismatchSpecSchema() throws Exception { copyFromJar("iceberg/partitionednation", testRootPath); File tableRoot = new File(testRootPath); - IcebergModel icebergModel = getIcebergModel(tableRoot, IcebergCatalogType.HADOOP); + IcebergModel icebergModel = getIcebergModel("dfs_hadoop"); Table table = icebergModel.getIcebergTable(icebergModel.getTableIdentifier(tableRoot.getPath())); // n_regionkey was renamed to regionkey diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergSchemaEvolution.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergSchemaEvolution.java index 3e4e27f317..f2c0f899f7 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergSchemaEvolution.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergSchemaEvolution.java @@ -27,7 +27,6 @@ import com.dremio.BaseTestQuery; import com.dremio.exec.planner.physical.PlannerSettings; import com.dremio.exec.planner.sql.ParserConfig; -import com.dremio.exec.store.iceberg.model.IcebergCatalogType; import com.dremio.exec.store.iceberg.model.IcebergModel; import com.dremio.service.namespace.NamespaceKey; import com.dremio.service.namespace.NamespaceService; @@ -53,7 +52,7 @@ private void addColumn(String table, String newColumn, String type) throws Excep test(alterTableCmd); } - private void testColumnRenameComplex(String complex_column_rename_test, String testSchema, IcebergCatalogType catalogType) throws Exception { + private void testColumnRenameComplex(String complex_column_rename_test, String testSchema) throws Exception { try { String createCommandSql = "create table " + testSchema + "." + complex_column_rename_test + " as select * from cp.\"/parquet/very_complex.parquet\""; @@ -90,7 +89,7 @@ private void testColumnRenameComplex(String complex_column_rename_test, String t Thread.sleep(1001); File rootFolder = new File(getDfsTestTmpSchemaLocation(), complex_column_rename_test); - IcebergModel icebergModel = getIcebergModel(rootFolder, catalogType); + IcebergModel icebergModel = getIcebergModel(testSchema); icebergModel.renameColumn( icebergModel.getTableIdentifier( rootFolder.getPath()), @@ -132,7 +131,7 @@ private void testColumnRenameComplex(String complex_column_rename_test, String t @Test public void testColumnRenameComplex() throws Exception { try (AutoCloseable c = enableIcebergTables()) { - testColumnRenameComplex("complex_column_rename_test", TEMP_SCHEMA_HADOOP, IcebergCatalogType.HADOOP); + testColumnRenameComplex("complex_column_rename_test", TEMP_SCHEMA_HADOOP); } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergSerDe.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergSerDe.java index 60c46e2b0d..2be436342a 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergSerDe.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergSerDe.java @@ -31,7 +31,9 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.iceberg.DataFile; import org.apache.iceberg.DataFiles; +import org.apache.iceberg.DeleteFile; import org.apache.iceberg.FileFormat; +import org.apache.iceberg.FileMetadata; import org.apache.iceberg.Files; import org.apache.iceberg.GenericManifestFile; import org.apache.iceberg.ManifestContent; @@ -122,6 +124,56 @@ public void testDataFileSerDe() throws Exception{ Assert.assertEquals((String)(d2.partition().get(1, String.class)), "def"); } + @Test + public void testDeleteFileSerDe() throws Exception { + File deleteFile = new File(folder.getRoot(), "a.parquet"); + deleteFile.createNewFile(); + + PartitionSpec partitionSpec = PartitionSpec + .builderFor(schema) + .identity("i") + .identity("data") + .build(); + + IcebergPartitionData icebergPartitionData = new IcebergPartitionData(partitionSpec.partitionType()); + icebergPartitionData.set(0, Integer.valueOf(10)); + icebergPartitionData.set(1, "def"); + + DeleteFile df1 = FileMetadata.deleteFileBuilder(partitionSpec) + .ofPositionDeletes() + .withInputFile(Files.localInput(deleteFile)) + .withRecordCount(50) + .withFormat(FileFormat.PARQUET) + .withPartition(icebergPartitionData) + .build(); + long df1RecordCount = df1.recordCount(); + + DeleteFile df2 = FileMetadata.deleteFileBuilder(partitionSpec) + .ofEqualityDeletes() + .withInputFile(Files.localInput(deleteFile)) + .withRecordCount(20) + .withFormat(FileFormat.PARQUET) + .withPartition(icebergPartitionData) + .build(); + long df2RecordCount = df2.recordCount(); + + byte[] positionalDeleteFileBytes = IcebergSerDe.serializeDeleteFile(df1); + DeleteFile deserializePositionalDeleteFile = IcebergSerDe.deserializeDeleteFile(positionalDeleteFileBytes); + long positionalDeleteFileRecordCount = deserializePositionalDeleteFile.recordCount(); + Assert.assertEquals(df1RecordCount, positionalDeleteFileRecordCount); + Assert.assertEquals((Integer)(deserializePositionalDeleteFile.partition().get(0, Integer.class)), Integer.valueOf(10)); + Assert.assertEquals((String)(deserializePositionalDeleteFile.partition().get(1, String.class)), "def"); + Assert.assertEquals(deserializePositionalDeleteFile.content().toString(), "POSITION_DELETES"); + + byte[] equalityDeleteFileBytes = IcebergSerDe.serializeDeleteFile(df2); + DeleteFile deserializeEqualityDeleteFile = IcebergSerDe.deserializeDeleteFile(equalityDeleteFileBytes); + long equalityDeleteFileRecordCount = deserializeEqualityDeleteFile.recordCount(); + Assert.assertEquals(df2RecordCount, equalityDeleteFileRecordCount); + Assert.assertEquals((Integer)(deserializeEqualityDeleteFile.partition().get(0, Integer.class)), Integer.valueOf(10)); + Assert.assertEquals((String)(deserializeEqualityDeleteFile.partition().get(1, String.class)), "def"); + Assert.assertEquals(deserializeEqualityDeleteFile.content().toString(), "EQUALITY_DELETES"); + } + @Test public void testManifestFileSerDe() { ManifestFile manifestFile = getManifestFile(); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergSplitGenTableFunction.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergSplitGenTableFunction.java index 623c337bee..3b09c48f85 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergSplitGenTableFunction.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergSplitGenTableFunction.java @@ -109,17 +109,12 @@ public void testSplitGenWithExtraColsOnlyInInput() throws Exception { Table input = t( th(SystemSchemas.DATAFILE_PATH, SystemSchemas.FILE_SIZE, SystemSchemas.PARTITION_INFO, SystemSchemas.COL_IDS, - "input_only_col1", "input_only_col2"), - tr("path1", 1000L * 1024 * 1024, IcebergSerDe.serializeToByteArray(PARTITION_1), COL_IDS_1, - "ignore", "ignore"), - tr("path2", 100L * 1024 * 1024, IcebergSerDe.serializeToByteArray(PARTITION_1), COL_IDS_1, - "ignore", "ignore"), - tr("path3", 500L * 1024 * 1024, IcebergSerDe.serializeToByteArray(PARTITION_3), COL_IDS_3, - "ignore", "ignore"), - tr("path4", 200L * 1024 * 1024, IcebergSerDe.serializeToByteArray(PARTITION_2), COL_IDS_2, - "ignore", "ignore"), - tr("path5", 350L * 1024 * 1024, IcebergSerDe.serializeToByteArray(PARTITION_3), COL_IDS_3, - "ignore", "ignore")); + "input_only_col1", "input_only_col2"), + tr("path1", 1000L * 1024 * 1024, IcebergSerDe.serializeToByteArray(PARTITION_1), COL_IDS_1, "ignore", "ignore"), + tr("path2", 100L * 1024 * 1024, IcebergSerDe.serializeToByteArray(PARTITION_1), COL_IDS_1, "ignore", "ignore"), + tr("path3", 500L * 1024 * 1024, IcebergSerDe.serializeToByteArray(PARTITION_3), COL_IDS_3, "ignore", "ignore"), + tr("path4", 200L * 1024 * 1024, IcebergSerDe.serializeToByteArray(PARTITION_2), COL_IDS_2, "ignore", "ignore"), + tr("path5", 350L * 1024 * 1024, IcebergSerDe.serializeToByteArray(PARTITION_3), COL_IDS_3, "ignore", "ignore")); Table output = t( th( @@ -183,9 +178,8 @@ public void testOutputBufferNotReused() throws Exception { validateOutputBufferNotReused(getPop(), input, 3); } - private Fixtures.DataRow inputRow(String path, long fileSizeInMb, NormalizedPartitionInfo partitionInfo, - byte[] colIds) throws Exception { + byte[] colIds) throws Exception { return tr(path, fileSizeInMb * 1024 * 1024, IcebergSerDe.serializeToByteArray(partitionInfo), colIds); } @@ -194,17 +188,15 @@ private Fixtures.DataRow outputRow(String path, long offsetInMb, long sizeInMb, return tr( tuple(path, offsetInMb * 1024 * 1024, sizeInMb * 1024 * 1024, fileSizeInMb * 1024 * 1024), createSplitInformation(path, offsetInMb * 1024 * 1024, sizeInMb * 1024 * 1024, - fileSizeInMb * 1024 * 1024, 0, partitionInfo), - colIds); + fileSizeInMb * 1024 * 1024, 0, partitionInfo), colIds); } private Fixtures.DataRow outputRowWithVersion(String path, long version, long offsetInMb, long sizeInMb, - long fileSizeInMb, NormalizedPartitionInfo partitionInfo, byte[] colIds) throws Exception { + long fileSizeInMb, NormalizedPartitionInfo partitionInfo, byte[] colIds) throws Exception { return tr( - tuple(path, offsetInMb * 1024 * 1024, sizeInMb * 1024 * 1024, fileSizeInMb * 1024 * 1024), - createSplitInformation(path, offsetInMb * 1024 * 1024, sizeInMb * 1024 * 1024, - fileSizeInMb * 1024 * 1024, version, partitionInfo), - colIds); + tuple(path, offsetInMb * 1024 * 1024, sizeInMb * 1024 * 1024, fileSizeInMb * 1024 * 1024), + createSplitInformation(path, offsetInMb * 1024 * 1024, sizeInMb * 1024 * 1024, + fileSizeInMb * 1024 * 1024, version, partitionInfo), colIds); } private TableFunctionPOP getPop() { diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergTableDrop.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergTableDrop.java index 90f4facf72..112d544559 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergTableDrop.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergTableDrop.java @@ -44,7 +44,6 @@ import com.dremio.BaseTestQuery; import com.dremio.exec.hadoop.HadoopFileSystem; -import com.dremio.exec.store.iceberg.model.IcebergCatalogType; import com.dremio.exec.store.iceberg.model.IcebergModel; import com.google.common.io.Resources; @@ -92,7 +91,7 @@ public void testDropTable() throws Exception { try (AutoCloseable c = enableIcebergTables()) { Path rootPath = Paths.get(getDfsTestTmpSchemaLocation(), "iceberg", "nation"); File tableRoot = rootPath.toFile(); - IcebergModel icebergModel = getIcebergModel(tableRoot, IcebergCatalogType.HADOOP); + IcebergModel icebergModel = getIcebergModel(TEMP_SCHEMA_HADOOP); Files.createDirectories(rootPath); String root = rootPath.toString(); @@ -130,7 +129,7 @@ public void testDropTable() throws Exception { errorMsgTestHelper( "select count(*) c from " + tableName, - "Object 'nation' not found within 'dfs_test_hadoop.dfs_test_hadoop.iceberg'"); + "Object 'nation' not found within 'dfs_test_hadoop.iceberg'"); } } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergTableFilesFunction.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergTableFilesFunction.java index 6561ab36e2..7e645045ec 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergTableFilesFunction.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergTableFilesFunction.java @@ -15,17 +15,36 @@ */ package com.dremio.exec.store.iceberg; +import static com.dremio.exec.planner.sql.DmlQueryTestUtils.createBasicTable; +import static com.dremio.exec.planner.sql.DmlQueryTestUtils.insertRows; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.net.URISyntaxException; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; +import java.util.Scanner; +import java.util.stream.Collectors; import org.apache.commons.lang3.tuple.Pair; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; import org.apache.iceberg.FileContent; import org.junit.Test; +import com.dremio.TestBuilder; import com.dremio.common.expression.SchemaPath; import com.dremio.common.types.TypeProtos; import com.dremio.common.types.Types; +import com.dremio.exec.planner.sql.DmlQueryTestUtils; +import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; /** @@ -33,6 +52,30 @@ */ public class TestIcebergTableFilesFunction extends IcebergMetadataTestTable { + @Test + public void testTableFilesColumnValuesWithLongValue() throws Exception { + insertOneRecord(); + + String query = String.format("SELECT content FROM table(table_files('\"%s\".\"%s\"')) limit 1", TEMP_SCHEMA_HADOOP, METADATA_TEST_TABLE_NAME); + String[] expectedColumns = {"content"}; + Object[] expectedValues = {FileContent.DATA.name()}; + queryAndMatchResults(query, expectedColumns, expectedValues); + + //Match count of data files. -> ONE + query = String.format("SELECT count(*) as file_count FROM table(table_files('\"%s\".\"%s\"'))", TEMP_SCHEMA_HADOOP, METADATA_TEST_TABLE_NAME); + expectedColumns = new String[]{"file_count"}; + expectedValues = new Object[]{1L}; + queryAndMatchResults(query, expectedColumns, expectedValues); + + insertOneLongRecord(); + + //Match count of data files. -> TWO + query = String.format("SELECT count(*) as file_count FROM table(table_files('\"%s\".\"%s\"'))", TEMP_SCHEMA_HADOOP, METADATA_TEST_TABLE_NAME); + expectedColumns = new String[]{"file_count"}; + expectedValues = new Object[]{2L}; + queryAndMatchResults(query, expectedColumns, expectedValues); + } + @Test public void testTableFilesColumnValues() throws Exception { insertOneRecord(); @@ -71,8 +114,55 @@ public void testTableFilesColumnValues() throws Exception { queryAndMatchResults(query, expectedColumns, expectedValues); } - private void insertTwoRecords() throws Exception { - String insertCommandSql = String.format("insert into %s.%s VALUES(1,'a', 2.0),(2,'b', 3.0)", TEMP_SCHEMA_HADOOP, METADATA_TEST_TABLE_NAME); + private void insertOneLongRecord() throws Exception { + String longString = " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " with email as (select lower(email_address) as email_address, lower(username) as username, source_pk1, source_type\n" + + " order by e.email_address"; + String insertCommandSql = String.format("insert into %s.%s VALUES(1,'%s', 2.0)", TEMP_SCHEMA_HADOOP, METADATA_TEST_TABLE_NAME, longString); test(insertCommandSql); Thread.sleep(1001); } @@ -96,7 +186,7 @@ public void testTableFilesSchema() throws Exception { expectedSchema.add(Pair.of(SchemaPath.getSimplePath("equality_ids"), Types.required(TypeProtos.MinorType.LIST))); expectedSchema.add(Pair.of(SchemaPath.getSimplePath("sort_order_id"), Types.required(TypeProtos.MinorType.INT))); expectedSchema.add(Pair.of(SchemaPath.getSimplePath("spec_id"), Types.required(TypeProtos.MinorType.INT))); - expectedSchema(expectedSchema,"SELECT * FROM table(table_files('\"%s\".\"%s\"')) limit 1", TEMP_SCHEMA_HADOOP, METADATA_TEST_TABLE_NAME); + expectedSchema(expectedSchema, "SELECT * FROM table(table_files('\"%s\".\"%s\"')) limit 1", TEMP_SCHEMA_HADOOP, METADATA_TEST_TABLE_NAME); } @Test @@ -118,8 +208,8 @@ public void testInvalidColumnTypeTableFilesSchema() { expectedSchema.add(Pair.of(SchemaPath.getSimplePath("equality_ids"), Types.required(TypeProtos.MinorType.LIST))); expectedSchema.add(Pair.of(SchemaPath.getSimplePath("sort_order_id"), Types.required(TypeProtos.MinorType.BIGINT))); expectedSchema.add(Pair.of(SchemaPath.getSimplePath("spec_id"), Types.required(TypeProtos.MinorType.INT))); - assertThatThrownBy(() -> expectedSchema(expectedSchema,"SELECT * FROM table(table_files('\"%s\".\"%s\"'))", TEMP_SCHEMA_HADOOP - , METADATA_TEST_TABLE_NAME)) + assertThatThrownBy(() -> expectedSchema(expectedSchema, "SELECT * FROM table(table_files('\"%s\".\"%s\"'))", TEMP_SCHEMA_HADOOP + , METADATA_TEST_TABLE_NAME)) .hasMessageContaining("Schema path or type mismatch for") .isInstanceOf(Exception.class); } @@ -131,4 +221,66 @@ public void incorrectTableName() { .hasMessageContaining("not found"); } + @Test + public void testPartitionData() throws Exception { + try (DmlQueryTestUtils.Table table = createBasicTable(TEMP_SCHEMA_HADOOP, 0, 2, 0, ImmutableSet.of(1))) { + insertRows(table, 1); + new TestBuilder(allocator) + .sqlQuery("SELECT \"partition\" FROM TABLE(table_files('%s'))", table.fqn) + .unOrdered() + .baselineColumns("partition") + .baselineValues("{column_0=0_0}") + .build() + .run(); + } + } + + @Test + public void testV2TableWithDeleteFiles() throws Exception { + String testRootPath = "/tmp/iceberg"; + safeCopy("iceberg/table_with_delete", testRootPath); + final String tableName = "dfs_hadoop.tmp.iceberg"; + runSQL(String.format("alter table %s refresh metadata", tableName)); + + new TestBuilder(allocator) + .sqlQuery("select content, file_path, \"file_format\", \"partition\", record_count, file_size_in_bytes, spec_id from table(table_files('%s'))", tableName) + .unOrdered() + .baselineColumns("content", "file_path", "file_format", "partition", "record_count", "file_size_in_bytes", "spec_id") + .baselineRecords(v2TableFiles()) + .build() + .run(); + } + + private List> v2TableFiles() throws FileNotFoundException { + List> tableFileRecords = new ArrayList<>(); + Scanner scanner = new Scanner(new File("/tmp/iceberg/v2_table_files_out.csv")); + List headers = Arrays.stream(scanner.nextLine().split(",")).map(s -> "`" + s + "`").collect(Collectors.toList()); + + while (scanner.hasNextLine()) { + Map record = new LinkedHashMap<>(); + String[] values = scanner.nextLine().split(","); + record.put(headers.get(0), values[0]); + record.put(headers.get(1), values[1]); + record.put(headers.get(2), values[2]); + record.put(headers.get(3), values[3]); + record.put(headers.get(4), Long.parseLong(values[4])); + record.put(headers.get(5), Long.parseLong(values[5])); + record.put(headers.get(6), Integer.parseInt(values[6])); + tableFileRecords.add(record); + } + return tableFileRecords; + } + + private static void safeCopy(String src, String testRoot) throws IOException, URISyntaxException { + Configuration conf = new Configuration(); + conf.set("fs.default.name", "local"); + FileSystem fs = FileSystem.get(conf); + Path path = new Path(testRoot); + if (fs.exists(path)) { + fs.delete(path, true); + } + fs.mkdirs(path); + + copyFromJar(src, Paths.get(testRoot)); + } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergTimeTravelQuery.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergTimeTravelQuery.java index 7b144b2b06..c83133c0b5 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergTimeTravelQuery.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestIcebergTimeTravelQuery.java @@ -16,6 +16,8 @@ package com.dremio.exec.store.iceberg; import static com.dremio.exec.ExecConstants.ENABLE_ICEBERG_TIME_TRAVEL; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import java.math.BigDecimal; import java.util.Arrays; @@ -30,8 +32,17 @@ import com.dremio.ArrowDsUtil; import com.dremio.TestBuilder; +import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.CatalogServiceImpl; +import com.dremio.exec.catalog.DremioTable; +import com.dremio.exec.catalog.TableVersionContext; +import com.dremio.exec.catalog.TableVersionType; +import com.dremio.exec.catalog.VersionedDatasetId; import com.dremio.exec.proto.UserBitShared; +import com.dremio.service.namespace.NamespaceKey; import com.dremio.test.UserExceptionAssert; +import com.fasterxml.jackson.core.JsonProcessingException; /** * Test class for Iceberg time travel query @@ -45,9 +56,12 @@ public class TestIcebergTimeTravelQuery extends BaseIcebergTable { private static long secondTimestampMs; private static TimestampString secondTimestamp; private static long secondSnapshotId; + private static Catalog catalog; @BeforeClass public static void initTable() throws Exception { + final CatalogServiceImpl catalogService = (CatalogServiceImpl) getSabotContext().getCatalogService(); + catalog = catalogService.getSystemUserCatalog(); createIcebergTable(); setSystemOption(ENABLE_ICEBERG_TIME_TRAVEL, "true"); final BaseTable table = new BaseTable(ops, tableName); @@ -200,4 +214,46 @@ private void expectSecondSnapshot(String query, Object... args) throws Exception .run(); } + @Test + public void testVersionedDatasetIdWithSnapshot() throws JsonProcessingException { + String query = String.format("SELECT * FROM dfs_hadoop.\"%s\" AT SNAPSHOT '%d'", tableFolder.toPath(), firstSnapshotId); + List keyPath = Arrays.asList("dfs_hadoop", tableFolder.toString()); + DremioTable snapshotTable = catalog.getTableSnapshotForQuery( new NamespaceKey(keyPath), new TableVersionContext(TableVersionType.SNAPSHOT_ID, String.valueOf(firstSnapshotId))); + VersionedDatasetId versionedDatasetId = VersionedDatasetId.fromString(snapshotTable.getDatasetConfig().getId().getId()); + DremioTable tableFromDatasetId = catalog.getTable(versionedDatasetId.asString()); + assertThat(snapshotTable.getDatasetConfig().equals(tableFromDatasetId.getDatasetConfig())).isTrue(); + } + + @Test + public void testVersionedDatasetIdWithTimestamp() throws JsonProcessingException { + String query = String.format("SELECT * FROM dfs_hadoop.\"%s\" AT TIMESTAMP '%s'", tableFolder.toPath(), secondTimestamp); + List keyPath = Arrays.asList("dfs_hadoop", tableFolder.toString()); + DremioTable timestampTable = catalog.getTableSnapshotForQuery( new NamespaceKey(keyPath), new TableVersionContext(TableVersionType.TIMESTAMP, secondTimestampMs)); + VersionedDatasetId versionedDatasetId = VersionedDatasetId.fromString(timestampTable.getDatasetConfig().getId().getId()); + DremioTable tableFromDatasetId = catalog.getTable(versionedDatasetId.asString()); + assertThat(timestampTable.getDatasetConfig().equals(tableFromDatasetId.getDatasetConfig())).isTrue(); + } + + @Test + public void testGetTableForNonexistentTable() { + String nonExistingTable = "NonExistentTable" +TimestampString.fromMillisSinceEpoch(System.currentTimeMillis()); + String query = String.format("SELECT * FROM dfs_hadoop.\"%s\" AT SNAPSHOT '%d'", nonExistingTable, firstSnapshotId); + List keyPath = Arrays.asList("dfs_hadoop", nonExistingTable); + + String expectedErrorMsg = String.format("Table 'dfs_hadoop.\"%s\"' not found", nonExistingTable); + assertThatThrownBy(()->catalog.getTableSnapshotForQuery( new NamespaceKey(keyPath), new TableVersionContext(TableVersionType.SNAPSHOT_ID, String.valueOf(firstSnapshotId)))) + .isInstanceOf(UserException.class) + .hasMessageContaining(expectedErrorMsg); + + TableVersionContext timeTravelVersion = new TableVersionContext(TableVersionType.SNAPSHOT_ID, String.format("%d",firstSnapshotId)); + VersionedDatasetId versionedDatasetId = VersionedDatasetId.newBuilder() + .setTableKey(keyPath) + .setContentId(null) + .setTableVersionContext(timeTravelVersion) + .build(); + + assertThat(catalog.getTable(versionedDatasetId.asString())).isNull(); + + } + } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestManifestScanTableFunction.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestManifestScanTableFunction.java index d2cb31b3a7..d2ba3a7e61 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestManifestScanTableFunction.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestManifestScanTableFunction.java @@ -65,6 +65,7 @@ import com.dremio.exec.catalog.MutablePlugin; import com.dremio.exec.catalog.StoragePluginId; import com.dremio.exec.hadoop.HadoopFileSystem; +import com.dremio.exec.hadoop.HadoopFileSystemConfigurationAdapter; import com.dremio.exec.physical.config.ImmutableManifestScanFilters; import com.dremio.exec.physical.config.ManifestScanFilters; import com.dremio.exec.physical.config.ManifestScanTableFunctionContext; @@ -121,6 +122,7 @@ public class TestManifestScanTableFunction extends BaseTestTableFunction { 3, Conversions.toByteBuffer(Types.StringType.get(), "t")); private static final byte[] COL_IDS = getIcebergDatasetXAttr(SCHEMA); + private static final Configuration CONF = new Configuration(); private static ManifestFile manifestFile1; private static ManifestFile manifestFile2; @@ -141,7 +143,7 @@ public class TestManifestScanTableFunction extends BaseTestTableFunction { @BeforeClass public static void initStatics() throws Exception { - fs = HadoopFileSystem.get(Path.of("/"), new Configuration()); + fs = HadoopFileSystem.get(Path.of("/"), CONF); PartitionKey partitionKey1 = new PartitionKey(PARTITION_SPEC_1, SCHEMA); partitionKey1.set(0, 10); @@ -264,7 +266,10 @@ public void prepareMocks() throws Exception { when(fec.getStoragePlugin(pluginId)).thenReturn(plugin); SupportsIcebergRootPointer sirp = (SupportsIcebergRootPointer) plugin; when(sirp.createFSWithAsyncOptions(anyString(), anyString(), any())).thenReturn(fs); - when(sirp.getFsConfCopy()).thenReturn(new Configuration()); + when(sirp.getFsConfCopy()).thenReturn(CONF); + when(sirp.createIcebergFileIO(any(), any(), any(), any(), any())) + .thenReturn(new DremioFileIO(fs, null, null, null, null, + new HadoopFileSystemConfigurationAdapter(CONF))); } @Test @@ -274,11 +279,11 @@ public void testPathGeneratingProcessor() throws Exception { BatchSchema outputSchema = SystemSchemas.ICEBERG_MANIFEST_SCAN_SCHEMA; RecordSet output = rs(outputSchema, r("datafile1.parquet", 100L, 1L, 1, serializedKey1, - createDatePartitionInfo("date", 10, 0), COL_IDS), + createDatePartitionInfo("date", 10, 0), COL_IDS, FileContent.DATA.name()), r("datafile2.parquet", 200L, 1L, 1, serializedKey2, - createDatePartitionInfo("date", 20, 0), COL_IDS), + createDatePartitionInfo("date", 20, 0), COL_IDS, FileContent.DATA.name()), r("datafile3.parquet", 300L, 1L, 1, serializedKey2, - createDatePartitionInfo("date", 20, 0), COL_IDS)); + createDatePartitionInfo("date", 20, 0), COL_IDS, FileContent.DATA.name())); validateSingle(getPop(outputSchema, ManifestContent.DATA), TableFunctionOperator.class, input, output, 2); } @@ -293,11 +298,11 @@ public void testPathGeneratingProcessorWithMinMaxColValues() throws Exception { Field.nullable("id_max", new ArrowType.Int(32, true)))); RecordSet output = rs(outputSchema, r("datafile1.parquet", 100L, 1L, 1, serializedKey1, - createDatePartitionInfo("date", 10, 0), COL_IDS, 0, 9), + createDatePartitionInfo("date", 10, 0), COL_IDS, FileContent.DATA.name(), 0, 9), r("datafile2.parquet", 200L, 1L, 1, serializedKey2, - createDatePartitionInfo("date", 20, 0), COL_IDS, 0, 9), + createDatePartitionInfo("date", 20, 0), COL_IDS, FileContent.DATA.name(), 0, 9), r("datafile3.parquet", 300L, 1L, 1, serializedKey2, - createDatePartitionInfo("date", 20, 0), COL_IDS, 0, 9)); + createDatePartitionInfo("date", 20, 0), COL_IDS, FileContent.DATA.name(), 0, 9)); validateSingle(getPop(outputSchema, ManifestContent.DATA), TableFunctionOperator.class, input, output, 2); } @@ -311,11 +316,11 @@ public void testPathGeneratingProcessorWithPartColValues() throws Exception { Field.nullable("date_val", new ArrowType.Int(32, true)))); RecordSet output = rs(outputSchema, r("datafile1.parquet", 100L, 1L, 1, serializedKey1, - createDatePartitionInfo("date", 10, 0), COL_IDS, 10), + createDatePartitionInfo("date", 10, 0), COL_IDS, FileContent.DATA.name(), 10), r("datafile2.parquet", 200L, 1L, 1, serializedKey2, - createDatePartitionInfo("date", 20, 0), COL_IDS, 20), + createDatePartitionInfo("date", 20, 0), COL_IDS, FileContent.DATA.name(), 20), r("datafile3.parquet", 300L, 1L, 1, serializedKey2, - createDatePartitionInfo("date", 20, 0), COL_IDS, 20)); + createDatePartitionInfo("date", 20, 0), COL_IDS, FileContent.DATA.name(), 20)); validateSingle(getPop(outputSchema, ManifestContent.DATA), TableFunctionOperator.class, input, output, 2); } @@ -324,9 +329,12 @@ public void testPathGeneratingProcessorWithPartColValues() throws Exception { public void testPathGeneratingProcessorWithDeleteManifests() throws Exception { RecordSet input = deletesInputRecordSet(); RecordSet output = rs(SystemSchemas.ICEBERG_DELETE_MANIFEST_SCAN_SCHEMA, - r(st("deletefile1.parquet", FileContent.POSITION_DELETES.id(), 10L, null), 1L, 1, serializedKey1), - r(st("deletefile2.parquet", FileContent.POSITION_DELETES.id(), 20L, null), 2L, 1, serializedKey2), - r(st("deletefile3.parquet", FileContent.POSITION_DELETES.id(), 30L, null), 3L, 1, serializedKey2)); + r(st("deletefile1.parquet", FileContent.POSITION_DELETES.id(), 10L, null), "deletefile1.parquet", + 100L, 1L, 1, serializedKey1, createEmptyPartitionInfo(0, 1), COL_IDS, FileContent.POSITION_DELETES.name()), + r(st("deletefile2.parquet", FileContent.POSITION_DELETES.id(), 20L, null),"deletefile2.parquet", + 200L, 2L, 1, serializedKey2, createEmptyPartitionInfo(0, 2), COL_IDS, FileContent.POSITION_DELETES.name()), + r(st("deletefile3.parquet", FileContent.POSITION_DELETES.id(), 30L, null), "deletefile3.parquet", + 300L, 3L, 1, serializedKey2, createEmptyPartitionInfo(0, 3), COL_IDS, FileContent.POSITION_DELETES.name())); validateSingle( getPop(SystemSchemas.ICEBERG_DELETE_MANIFEST_SCAN_SCHEMA, ManifestContent.DELETES, DELETE_PARTITION_SPEC_MAP), @@ -339,10 +347,15 @@ public void testPathGeneratingProcessorWithEqDeletes() throws Exception { inputRow(deleteManifestFile3, COL_IDS), inputRow(deleteManifestFile4, COL_IDS)); RecordSet output = rs(SystemSchemas.ICEBERG_DELETE_MANIFEST_SCAN_SCHEMA, - r(st("deletefile3.parquet", FileContent.POSITION_DELETES.id(), 30L, null), 3L, 1, serializedKey2), - r(st("deletefile4.parquet", FileContent.EQUALITY_DELETES.id(), 20L, li(1, 3)), 4L, 1, serializedKey2), - r(st("deletefile5.parquet", FileContent.EQUALITY_DELETES.id(), 20L, li(1)), 3L, 0, - serializedUnpartitionedKey)); + r(st("deletefile3.parquet", FileContent.POSITION_DELETES.id(), 30L, null), + "deletefile3.parquet", 300L, 3L, 1, serializedKey2, createEmptyPartitionInfo(0, 3), COL_IDS, + FileContent.POSITION_DELETES.name()), + r(st("deletefile4.parquet", FileContent.EQUALITY_DELETES.id(), 20L, li(1, 3)), + "deletefile4.parquet", 200L, 4L, 1, serializedKey2, createEmptyPartitionInfo(0, 4), COL_IDS, + FileContent.EQUALITY_DELETES.name()), + r(st("deletefile5.parquet", FileContent.EQUALITY_DELETES.id(), 20L, li(1)), + "deletefile5.parquet", 200L, 3L, 0, serializedUnpartitionedKey, createEmptyPartitionInfo(0, 3), + COL_IDS, FileContent.EQUALITY_DELETES.name())); validateSingle( getPop(SystemSchemas.ICEBERG_DELETE_MANIFEST_SCAN_SCHEMA, ManifestContent.DELETES, DELETE_PARTITION_SPEC_MAP), @@ -351,13 +364,15 @@ public void testPathGeneratingProcessorWithEqDeletes() throws Exception { @Test public void testWithLongDeleteFilePath() throws Exception { + String deleteFilePath = "deletefile-" + Strings.repeat("0", 2000) + "1.parquet"; + FileContent fileContent = FileContent.POSITION_DELETES; RecordSet input = rs(SystemSchemas.SPLIT_GEN_AND_COL_IDS_SCAN_SCHEMA, inputRow(deleteManifestWithLongPaths, COL_IDS)); BatchSchema outputSchema = SystemSchemas.ICEBERG_DELETE_MANIFEST_SCAN_SCHEMA; RecordSet output = rs(outputSchema, - r(st("deletefile-" + Strings.repeat("0", 2000) + "1.parquet", FileContent.POSITION_DELETES.id(), 10L, null), - 1L, 1, serializedKey1)); + r(st(deleteFilePath, fileContent.id(), 10L, null), deleteFilePath, 100L, 1L, 1, + serializedKey1, createDatePartitionInfo("date", 10, 0), COL_IDS, fileContent.name())); validateSingle(getPop(outputSchema, ManifestContent.DELETES), TableFunctionOperator.class, input, output, 2); } @@ -422,9 +437,9 @@ public void testFilterOnFileSizeExcludeSome() throws Exception { // Data file 2 skipped from output because it's size is 200 bytes RecordSet output = rs(outputSchema, r("datafile1.parquet", 100L, 1L, 1, serializedKey1, - createDatePartitionInfo("date", 10, 0), COL_IDS, 10), + createDatePartitionInfo("date", 10, 0), COL_IDS, FileContent.DATA.name(), 10), r("datafile3.parquet", 300L, 1L, 1, serializedKey2, - createDatePartitionInfo("date", 20, 0), COL_IDS, 20)); + createDatePartitionInfo("date", 20, 0), COL_IDS, FileContent.DATA.name(), 20)); LongRange excludeDataFile2Range = new LongRange(200L, 250L); ManifestScanFilters manifestScanFilters = new ImmutableManifestScanFilters.Builder() .setSkipDataFileSizeRange(excludeDataFile2Range).setMinPartitionSpecId(1).build(); @@ -442,11 +457,11 @@ public void testFilterWithPartitionSpecEvolutionChecks() throws Exception { RecordSet output = rs(outputSchema, r("datafile1.parquet", 100L, 1L, 1, serializedKey1, - createDatePartitionInfo("date", 10, 0), COL_IDS, 10), + createDatePartitionInfo("date", 10, 0), COL_IDS, FileContent.DATA.name(), 10), r("datafile2.parquet", 200L, 1L, 1, serializedKey2, - createDatePartitionInfo("date", 20, 0), COL_IDS, 20), + createDatePartitionInfo("date", 20, 0), COL_IDS, FileContent.DATA.name(), 20), r("datafile3.parquet", 300L, 1L, 1, serializedKey2, - createDatePartitionInfo("date", 20, 0), COL_IDS, 20)); + createDatePartitionInfo("date", 20, 0), COL_IDS, FileContent.DATA.name(), 20)); LongRange excludeAllRange = new LongRange(0L, Long.MAX_VALUE); int evolvedPartitionSpecId = PARTITION_SPEC_1.specId() + 1; @@ -488,10 +503,29 @@ private byte[] createDatePartitionInfo(String colName, int partitionVal, int ver partitionValueBuilder.setColumn(IncrementalUpdateUtils.UPDATE_COLUMN); partitionValueBuilder.setLongValue(version); partitionInfoBuilder.addValues(partitionValueBuilder.build()); + partitionValueBuilder.setColumn(SystemSchemas.IMPLICIT_SEQUENCE_NUMBER); + partitionValueBuilder.setLongValue(1); + partitionInfoBuilder.addValues(partitionValueBuilder.build()); return IcebergSerDe.serializeToByteArray(partitionInfoBuilder.build()); } + private byte[] createEmptyPartitionInfo(int version, long sequence) + throws Exception { + PartitionProtobuf.NormalizedPartitionInfo.Builder partitionInfoBuilder = PartitionProtobuf.NormalizedPartitionInfo.newBuilder().setId(String.valueOf(1)); + + PartitionProtobuf.PartitionValue.Builder partitionValueBuilder = PartitionProtobuf.PartitionValue.newBuilder(); + partitionValueBuilder.setColumn(IncrementalUpdateUtils.UPDATE_COLUMN); + partitionValueBuilder.setLongValue(version); + partitionInfoBuilder.addValues(partitionValueBuilder.build()); + partitionValueBuilder.setColumn(SystemSchemas.IMPLICIT_SEQUENCE_NUMBER); + partitionValueBuilder.setLongValue(sequence); + partitionInfoBuilder.addValues(partitionValueBuilder.build()); + + return IcebergSerDe.serializeToByteArray(partitionInfoBuilder.build()); + } + + private TableFunctionPOP getPop(BatchSchema outputSchema, ManifestContent manifestContent) throws Exception { return getPop(outputSchema, manifestContent, PARTITION_SPEC_MAP); } @@ -530,7 +564,8 @@ private TableFunctionPOP getPop(BatchSchema outputSchema, ManifestContent manife true, null, manifestContent, - manifestScanFilters))); + manifestScanFilters, + false))); } private static ManifestFile createDataManifest(String path, PartitionSpec spec, List files) diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestOptimizeManifestsTableFunction.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestOptimizeManifestsTableFunction.java new file mode 100644 index 0000000000..8eac52a5dd --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestOptimizeManifestsTableFunction.java @@ -0,0 +1,194 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store.iceberg; + +import static com.dremio.exec.planner.sql.DmlQueryTestUtils.createStockIcebergTable; +import static com.dremio.exec.planner.sql.DmlQueryTestUtils.loadTable; +import static com.dremio.exec.store.iceberg.OptimizeManifestsTableFunction.NO_CLUSTERING_RULE; +import static org.apache.iceberg.TableProperties.MANIFEST_TARGET_SIZE_BYTES_DEFAULT; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; + +import java.util.Collections; + +import org.apache.iceberg.RewriteManifests; +import org.apache.iceberg.Snapshot; +import org.apache.iceberg.Table; +import org.apache.iceberg.exceptions.CommitFailedException; +import org.junit.Test; + +import com.dremio.BaseTestQuery; +import com.dremio.common.exceptions.ExecutionSetupException; +import com.dremio.common.exceptions.UserException; +import com.dremio.context.UserContext; +import com.dremio.exec.catalog.StoragePluginId; +import com.dremio.exec.physical.base.OpProps; +import com.dremio.exec.physical.config.TableFunctionConfig; +import com.dremio.exec.planner.sql.DmlQueryTestUtils; +import com.dremio.exec.planner.sql.OptimizeTests; +import com.dremio.exec.store.dfs.IcebergTableProps; +import com.dremio.exec.store.iceberg.model.IcebergModel; +import com.dremio.exec.store.iceberg.model.IcebergTableIdentifier; +import com.dremio.io.file.FileSystem; +import com.dremio.sabot.exec.context.MetricDef; +import com.dremio.sabot.exec.context.OperatorContext; +import com.dremio.sabot.exec.context.OperatorStats; +import com.dremio.sabot.exec.fragment.FragmentExecutionContext; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterables; + +/** + * Tests for {@link OptimizeManifestsTableFunction} + */ +public class TestOptimizeManifestsTableFunction extends BaseTestQuery { + private static final String SOURCE = TEMP_SCHEMA_HADOOP; + + @Test + public void testCleanOrphans() throws Exception { + try (DmlQueryTestUtils.Table table = createStockIcebergTable(SOURCE, 0, 2, "clean_orphans")) { + OptimizeTests.insertCommits(table, 5); + + Table icebergTable = loadTable(table); + Snapshot originalSnapshot = icebergTable.currentSnapshot(); + Snapshot inProgressSnapshot = icebergTable.rewriteManifests().clusterBy(NO_CLUSTERING_RULE).apply(); + + OptimizeManifestsTableFunction.cleanOrphans(icebergTable.io(), inProgressSnapshot); + + OptimizeTests.assertNoOrphanManifests(icebergTable, originalSnapshot); + } + } + + @Test + public void testHasNoManifestRewritten() { + Snapshot snapshot = mock(Snapshot.class); + + when(snapshot.summary()).thenReturn(Collections.EMPTY_MAP); + assertThat(OptimizeManifestsTableFunction.hasNoManifestChanges(snapshot)).isTrue(); + + when(snapshot.summary()).thenReturn(ImmutableMap.of("manifests-created", "0")); + assertThat(OptimizeManifestsTableFunction.hasNoManifestChanges(snapshot)).isTrue(); + + when(snapshot.summary()).thenReturn(ImmutableMap.of("manifests-replaced", "0")); + assertThat(OptimizeManifestsTableFunction.hasNoManifestChanges(snapshot)).isTrue(); + + when(snapshot.summary()).thenReturn(ImmutableMap.of("manifests-created", "0", "manifests-replaced", "0")); + assertThat(OptimizeManifestsTableFunction.hasNoManifestChanges(snapshot)).isTrue(); + + // Inconsistent cases, when manifests replaced XOR manifests created + when(snapshot.summary()).thenReturn(ImmutableMap.of("manifests-created", "5")); + assertThat(OptimizeManifestsTableFunction.hasNoManifestChanges(snapshot)).isTrue(); + + when(snapshot.summary()).thenReturn(ImmutableMap.of("manifests-replaced", "5")); + assertThat(OptimizeManifestsTableFunction.hasNoManifestChanges(snapshot)).isTrue(); + } + + @Test + public void testHasNoManifestValid() { + Snapshot snapshot = mock(Snapshot.class); + when(snapshot.summary()).thenReturn(ImmutableMap.of("manifests-created", "10", "manifests-replaced", "1", "total-data-files", "10")); + assertThat(OptimizeManifestsTableFunction.hasNoManifestChanges(snapshot)).isFalse(); + + when(snapshot.summary()).thenReturn(ImmutableMap.of("manifests-created", "10", "manifests-replaced", "2", "total-data-files", "10", "total-delete-files", "1")); + assertThat(OptimizeManifestsTableFunction.hasNoManifestChanges(snapshot)).isFalse(); + + when(snapshot.summary()).thenReturn(ImmutableMap.of("manifests-created", "2", "manifests-replaced", "2", "total-data-files", "10")); + assertThat(OptimizeManifestsTableFunction.hasNoManifestChanges(snapshot)).isFalse(); + + when(snapshot.summary()).thenReturn(ImmutableMap.of("manifests-created", "3", "manifests-replaced", "3", "total-data-files", "10", "total-delete-files", "1")); + assertThat(OptimizeManifestsTableFunction.hasNoManifestChanges(snapshot)).isFalse(); + } + + @Test + public void testHasNoManifestRewrittenResidualFiles() { + Snapshot snapshot = mock(Snapshot.class); + when(snapshot.summary()).thenReturn(ImmutableMap.of("manifests-created", "1", "manifests-replaced", "1", "total-data-files", "10")); + assertThat(OptimizeManifestsTableFunction.hasNoManifestChanges(snapshot)).isTrue(); + + when(snapshot.summary()).thenReturn(ImmutableMap.of("manifests-created", "2", "manifests-replaced", "2", "total-data-files", "10", "total-delete-files", "10")); + assertThat(OptimizeManifestsTableFunction.hasNoManifestChanges(snapshot)).isTrue(); + } + + @Test + public void testOptimizeManifestFailureOnCommit() throws Exception { + try (DmlQueryTestUtils.Table table = createStockIcebergTable(SOURCE, 0, 2, "clean_orphans")) { + OptimizeTests.insertCommits(table, 5); + + Table icebergTable = spy(loadTable(table)); + Snapshot snapshot = icebergTable.currentSnapshot(); + RewriteManifests rewriteManifestSpy = spy(icebergTable.rewriteManifests()); + doThrow(new CommitFailedException("Forced failure of the commit")).when(rewriteManifestSpy).commit(); + when(icebergTable.rewriteManifests()).thenReturn(rewriteManifestSpy); + + OptimizeManifestsTableFunction optimizeManifestsTableFunction = initializeWithMocks(icebergTable); + + assertThatThrownBy(() -> optimizeManifestsTableFunction.noMoreToConsume()) + .isInstanceOf(UserException.class) + .hasMessage("Error while rewriting table manifests."); + + icebergTable.refresh(); + assertThat(icebergTable.currentSnapshot().snapshotId()).isEqualTo(snapshot.snapshotId()); + + // Ensure no orphans + Snapshot[] allSnapshots = Iterables.toArray(icebergTable.snapshots(), Snapshot.class); + OptimizeTests.assertNoOrphanManifests(icebergTable, allSnapshots); + } + } + + private OptimizeManifestsTableFunction initializeWithMocks(Table icebergTable) throws ExecutionSetupException { + FragmentExecutionContext fec = mock(FragmentExecutionContext.class); + OperatorContext context = mock(OperatorContext.class); + OpProps opProps = mock(OpProps.class); + TableFunctionConfig tableFunctionConfig = mock(TableFunctionConfig.class); + + OptimizeManifestsTableFunctionContext ctx = mock(OptimizeManifestsTableFunctionContext.class); + when(tableFunctionConfig.getFunctionContext()).thenReturn(ctx); + + StoragePluginId pluginId = mock(StoragePluginId.class); + when(ctx.getPluginId()).thenReturn(pluginId); + + SupportsIcebergMutablePlugin icebergMutablePlugin = mock(SupportsIcebergMutablePlugin.class); + when(fec.getStoragePlugin(any(StoragePluginId.class))).thenReturn(icebergMutablePlugin); + + IcebergTableProps tableProps = mock(IcebergTableProps.class); + when(ctx.getIcebergTableProps()).thenReturn(tableProps); + + when(opProps.getUserName()).thenReturn(UserContext.SYSTEM_USER_CONTEXT.getUserId()); + + IcebergModel icebergModel = mock(IcebergModel.class); + when(icebergMutablePlugin.getIcebergModel(any(IcebergTableProps.class), anyString(), any(OperatorContext.class), any(FileSystem.class))) + .thenReturn(icebergModel); + doNothing().when(icebergModel).refreshVersionContext(); + IcebergTableIdentifier icebergTableIdentifier = mock(IcebergTableIdentifier.class); + when(icebergModel.getTableIdentifier(anyString())).thenReturn(icebergTableIdentifier); + when(icebergModel.getIcebergTable(any(IcebergTableIdentifier.class))).thenReturn(icebergTable); + + OperatorStats operatorStats = mock(OperatorStats.class); + doNothing().when(operatorStats).addLongStat(any(MetricDef.class), anyLong()); + when(context.getStats()).thenReturn(operatorStats); + when(icebergModel.propertyAsLong(any(IcebergTableIdentifier.class), anyString(), anyLong())) + .thenReturn(MANIFEST_TARGET_SIZE_BYTES_DEFAULT); + + return new OptimizeManifestsTableFunction(fec, context, opProps, tableFunctionConfig); + } +} diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestPathGeneratingDatafileProcessor.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestPathGeneratingDatafileProcessor.java index 1b1a99b9ea..1391398a6b 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestPathGeneratingDatafileProcessor.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestPathGeneratingDatafileProcessor.java @@ -73,7 +73,7 @@ public void initialisePathGenDatafileProcessor() throws Exception { datafileProcessor = new DataFileContentReader(operatorContext, tableFunctionContext); datafileProcessor.setup(null, outgoing); setupPartitionData(); - datafileProcessor.initialise(partitionSpec); + datafileProcessor.initialise(partitionSpec, 0); } @After diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestRefresh.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestRefresh.java index 646c6706dd..5fa87cc29a 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestRefresh.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestRefresh.java @@ -44,7 +44,6 @@ import com.dremio.BaseTestQuery; import com.dremio.exec.hadoop.HadoopFileSystem; -import com.dremio.exec.store.iceberg.model.IcebergCatalogType; import com.dremio.exec.store.iceberg.model.IcebergModel; import com.google.common.io.Resources; @@ -92,7 +91,7 @@ public void testRefresh() throws Exception { try (AutoCloseable c = enableIcebergTables()) { Path rootPath = Paths.get(getDfsTestTmpSchemaLocation(), "iceberg", "metadata_refresh"); File tableRoot = rootPath.toFile(); - IcebergModel icebergModel = getIcebergModel(tableRoot, IcebergCatalogType.HADOOP); + IcebergModel icebergModel = getIcebergModel(TEMP_SCHEMA_HADOOP); Files.createDirectories(rootPath); String root = rootPath.toString(); String tableName = "dfs_test_hadoop.iceberg.metadata_refresh"; diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestSchemaConverter.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestSchemaConverter.java index a5e25f3632..33f830aa93 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestSchemaConverter.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/TestSchemaConverter.java @@ -243,7 +243,7 @@ public void mixed() throws Exception { Configuration conf = new Configuration(); FileSystemPlugin fileSystemPlugin = BaseTestQuery.getMockedFileSystemPlugin(); - IcebergHadoopModel icebergHadoopModel = new IcebergHadoopModel(new Configuration(), fileSystemPlugin); + IcebergHadoopModel icebergHadoopModel = new IcebergHadoopModel(fileSystemPlugin); when(fileSystemPlugin.getIcebergModel()).thenReturn(icebergHadoopModel); IcebergOpCommitter createTableCommitter = icebergHadoopModel.getCreateTableCommitter("testTableName", diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/manifestwriter/TestIcebergCommitOpHelper.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/manifestwriter/TestIcebergCommitOpHelper.java index a39a507d3d..995e73c55f 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/manifestwriter/TestIcebergCommitOpHelper.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/manifestwriter/TestIcebergCommitOpHelper.java @@ -41,7 +41,9 @@ import org.apache.arrow.vector.complex.impl.UnionListWriter; import org.apache.iceberg.DataFile; import org.apache.iceberg.DataFiles; +import org.apache.iceberg.DeleteFile; import org.apache.iceberg.FileFormat; +import org.apache.iceberg.FileMetadata; import org.apache.iceberg.PartitionField; import org.apache.iceberg.PartitionKey; import org.apache.iceberg.PartitionSpec; @@ -110,6 +112,7 @@ public class TestIcebergCommitOpHelper extends BaseTestOperator { private static FileSystemPlugin metadataPlugin; private static StoragePlugin sourceTablePlugin; + private static FileSystem metadataFileSystem; private static FileSystem sourceTableFileSystem; private ArrowBuf workBuf; @@ -120,6 +123,7 @@ public void beforeTest() throws Exception { testCloseables.add(workBuf); metadataPlugin = mock(FileSystemPlugin.class, RETURNS_DEEP_STUBS); + metadataFileSystem = mock(FileSystem.class); sourceTablePlugin = mock(StoragePlugin.class, withSettings().defaultAnswer(RETURNS_DEEP_STUBS).extraInterfaces(SupportsInternalIcebergTable.class)); sourceTableFileSystem = mock(FileSystem.class); @@ -150,6 +154,12 @@ public void testIncrementalRefreshPartitionPathExistenceChecks() throws Exceptio SPEC, ImmutableList.of(PARTITION_2A)); addInputRow(input, createDataFile("no_partition_path_match", SPEC, PARTITION_2B), OperationType.DELETE_DATAFILE, SPEC, ImmutableList.of(PARTITION_2B)); + addInputRow(input, createDeleteFile("posDelete1", SPEC, PARTITION_1C), OperationType.DELETE_DELETEFILE, + SPEC, ImmutableList.of(PARTITION_1C)); + addInputRow(input, createDeleteFile("subdir/posDelete2", SPEC, PARTITION_2A), OperationType.DELETE_DELETEFILE, + SPEC, ImmutableList.of(PARTITION_2A)); + addInputRow(input, createDeleteFile("no_partition_path_match_pos_delete", SPEC, PARTITION_2B), + OperationType.DELETE_DELETEFILE, SPEC, ImmutableList.of(PARTITION_2B)); helper.setup(input); helper.consumeData(input.getRecordCount()); @@ -202,6 +212,10 @@ public void testIncrementalRefreshUnpartitionedExistenceChecks() throws Exceptio UNPARTITIONED_SPEC, ImmutableList.of()); addInputRow(input, createDataFile("delete2", UNPARTITIONED_SPEC), OperationType.DELETE_DATAFILE, UNPARTITIONED_SPEC, ImmutableList.of()); + addInputRow(input, createDeleteFile("posDelete1", UNPARTITIONED_SPEC), OperationType.DELETE_DELETEFILE, + UNPARTITIONED_SPEC, ImmutableList.of()); + addInputRow(input, createDeleteFile("posDelete2", UNPARTITIONED_SPEC), OperationType.DELETE_DELETEFILE, + UNPARTITIONED_SPEC, ImmutableList.of()); helper.setup(input); helper.consumeData(input.getRecordCount()); @@ -221,7 +235,7 @@ private IcebergCommitOpHelper createCommitOpHelper(IcebergCommandType type, List final OperatorContextImpl context = testContext.getNewOperatorContext(allocator, pop, BATCH_SIZE, null); testCloseables.add(context); - return new IcebergCommitOpHelper(context, pop); + return new IcebergCommitOpHelper(context, pop, metadataFileSystem); } private VectorContainer createInputContainer() { @@ -337,6 +351,27 @@ private static DataFile createDataFile(String name, PartitionSpec spec) { .build(); } + private static DeleteFile createDeleteFile(String name, PartitionSpec spec, PartitionKey partitionKey) { + return FileMetadata.deleteFileBuilder(spec) + .ofPositionDeletes() + .withPath(partitionPath(partitionKey) + "/" + name) + .withFormat(FileFormat.PARQUET) + .withFileSizeInBytes(1) + .withRecordCount(1) + .withPartition(partitionKey) + .build(); + } + + private static DeleteFile createDeleteFile(String name, PartitionSpec spec) { + return FileMetadata.deleteFileBuilder(spec) + .ofPositionDeletes() + .withPath(SOURCE_TABLE_ROOT + "/" + name) + .withFormat(FileFormat.PARQUET) + .withFileSizeInBytes(1) + .withRecordCount(1) + .build(); + } + /** * A mock ReadSignatureProvider implementation which simply will call the partitionExists predicate for each * partition directory. diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/manifestwriter/TestManifestRecordWriter.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/manifestwriter/TestManifestRecordWriter.java index 6de4a09c22..5a1a53bd24 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/manifestwriter/TestManifestRecordWriter.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/manifestwriter/TestManifestRecordWriter.java @@ -23,6 +23,7 @@ import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; @@ -58,14 +59,20 @@ import org.apache.iceberg.DataFile; import org.apache.iceberg.DataFiles; import org.apache.iceberg.ManifestFile; +import org.apache.iceberg.ManifestFiles; +import org.apache.iceberg.ManifestWriter; import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.Schema; import org.apache.iceberg.SortOrder; import org.apache.iceberg.Table; import org.apache.iceberg.hadoop.HadoopTables; +import org.apache.iceberg.io.FileIO; +import org.apache.iceberg.io.OutputFile; import org.apache.iceberg.types.Types; import org.junit.Test; import org.mockito.ArgumentCaptor; +import org.mockito.MockedStatic; +import org.mockito.Mockito; import com.dremio.BaseTestQuery; import com.dremio.exec.catalog.CatalogOptions; @@ -882,6 +889,7 @@ private IcebergManifestWriterPOP getManifestWriter(String metadataLocation, bool when(fileSystemPlugin.getFsConfCopy()).thenReturn(configuration); final FileSystem fs = HadoopFileSystem.getLocal(new Configuration()); when(fileSystemPlugin.getSystemUserFS()).thenReturn(fs); + when(fileSystemPlugin.createFS(any(), any(), any())).thenReturn(fs); when(manifestWriterPOP.getLocation()).thenReturn(metadataLocation + "/queryID"); when(manifestWriterPOP.getPlugin()).thenReturn(fileSystemPlugin); WriterOptions writerOptions = mock(WriterOptions.class); @@ -900,4 +908,24 @@ private IcebergManifestWriterPOP getManifestWriter(String metadataLocation, bool when(writerOptions.getExtendedProperty()).thenReturn(null); return manifestWriterPOP; } + + @Test + public void testLazyManifestWriter() throws Exception { + FileIO mockIo = mock(FileIO.class); + OutputFile mockOutputFile = mock(OutputFile.class); + ManifestWriter mockManifestWriter = mock(ManifestWriter.class); + + when(mockIo.newOutputFile(anyString())) + .thenReturn(mockOutputFile); + + try (MockedStatic manifestFiles = Mockito.mockStatic(ManifestFiles.class)) { + manifestFiles.when(() -> ManifestFiles.write(null, mockOutputFile)).thenReturn(mockManifestWriter); + + LazyManifestWriter lazyManifestWriter = new LazyManifestWriter(mockIo, "hello", null); + assertTrue(!lazyManifestWriter.isInitialized()); + + lazyManifestWriter.getInstance(); + assertTrue(lazyManifestWriter.isInitialized()); + } + } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/model/TestFullMetadataRefreshCommitter.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/model/TestFullMetadataRefreshCommitter.java index a3c3a52176..3d877ed798 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/model/TestFullMetadataRefreshCommitter.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/model/TestFullMetadataRefreshCommitter.java @@ -25,6 +25,7 @@ import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; +import org.mockito.Answers; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; @@ -50,7 +51,7 @@ public class TestFullMetadataRefreshCommitter { private Table table; @Mock private Snapshot snapshot; - @Mock + @Mock(answer = Answers.RETURNS_DEEP_STUBS) private IcebergCommand icebergCommand; private FullMetadataRefreshCommitter fullMetadataRefreshCommitter; @@ -87,7 +88,7 @@ public void init() { Mockito.when(icebergCommand.endTransaction()).thenReturn(table); Mockito.when(table.currentSnapshot()).thenReturn(snapshot); Mockito.when(snapshot.summary()).thenReturn(new HashMap<>()); - Mockito.when(icebergCommand.getRootPointer()).thenReturn("/"); + Mockito.when(icebergCommand.getRootPointer()).thenReturn("/test/metadata.json"); Mockito.when(icebergCommand.getIcebergSchema()).thenReturn(new Schema(1, new ArrayList<>())); } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/model/TestIcebergBaseCommand.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/model/TestIcebergBaseCommand.java index ce9a6262bf..b195e07f49 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/model/TestIcebergBaseCommand.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/model/TestIcebergBaseCommand.java @@ -26,14 +26,12 @@ import org.mockito.Mockito; import com.dremio.BaseTestQuery; -import com.dremio.exec.catalog.MutablePlugin; public class TestIcebergBaseCommand extends BaseTestQuery { @Rule public TemporaryFolder folder = new TemporaryFolder(); private final TableOperations tableOperations = Mockito.mock(TableOperations.class); - private final MutablePlugin mutablePlugin = Mockito.mock(MutablePlugin.class); @Test public void testMissingManifestOnLoadTable() { @@ -51,8 +49,7 @@ public void testMissingRootPointer() { private class MockCommand extends IcebergBaseCommand { public MockCommand(File tableFolder) { - super(new Configuration(), tableFolder.getAbsolutePath(), null, TestIcebergBaseCommand.this.tableOperations, - TestIcebergBaseCommand.this.mutablePlugin); + super(new Configuration(), tableFolder.getAbsolutePath(), null, TestIcebergBaseCommand.this.tableOperations); } } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/model/TestIcebergOptimizeOperationCommitter.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/model/TestIcebergOptimizeOperationCommitter.java index 12dfe1e669..7db66fe36f 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/model/TestIcebergOptimizeOperationCommitter.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/model/TestIcebergOptimizeOperationCommitter.java @@ -34,6 +34,8 @@ import org.apache.iceberg.DataFile; import org.apache.iceberg.DataFiles; +import org.apache.iceberg.DeleteFile; +import org.apache.iceberg.FileMetadata; import org.apache.iceberg.ManifestFile; import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.Snapshot; @@ -72,39 +74,39 @@ public void testNoChange() { Table table = mock(Table.class); when(table.currentSnapshot()).thenReturn(currentSnapshot); when(command.loadTable()).thenReturn(table); - when(command.rewriteDataFiles(anySet(), anySet())).thenReturn(rewriteSnapshot); + when(command.rewriteFiles(anySet(), anySet(), anySet(), anySet(), anyLong())).thenReturn(rewriteSnapshot); WriterCommitterOutputHandler outputHandler = mock(WriterCommitterOutputHandler.class); doNothing().when(outputHandler).write(any(WriterCommitterRecord.class)); ArgumentCaptor writerRecordCaptor = ArgumentCaptor.forClass(WriterCommitterRecord.class); // No added, no deleted - IcebergOptimizeOperationCommitter opCommitter = new IcebergOptimizeOperationCommitter(command, getOperatorStats(), getDatasetConfig(), null, getTableProps(), mock(FileSystem.class)); + IcebergOptimizeOperationCommitter opCommitter = new IcebergOptimizeOperationCommitter(command, getOperatorStats(), getDatasetConfig(), null, null, getTableProps(), mock(FileSystem.class)); Snapshot commitSnapshot = opCommitter.commit(outputHandler); assertThat(commitSnapshot).isEqualTo(currentSnapshot).isNotEqualTo(rewriteSnapshot); - verify(command, never()).rewriteDataFiles(anySet(), anySet()); + verify(command, never()).rewriteFiles(anySet(), anySet(), anySet(), anySet(), anyLong()); // file deleted but no replace write or vice-versa opCommitter.consumeAddDataFile(getDatafile("/table/data/added1.parquet")); commitSnapshot = opCommitter.commit(outputHandler); assertThat(commitSnapshot).isEqualTo(currentSnapshot).isNotEqualTo(rewriteSnapshot); - verify(command, never()).rewriteDataFiles(anySet(), anySet()); + verify(command, never()).rewriteFiles(anySet(), anySet(), anySet(), anySet(), anyLong()); opCommitter.consumeDeleteDataFile(getDatafile("/table/data/deleted1.parquet")); commitSnapshot = opCommitter.commit(outputHandler); assertThat(commitSnapshot).isEqualTo(currentSnapshot).isNotEqualTo(rewriteSnapshot); - verify(command, never()).rewriteDataFiles(anySet(), anySet()); + verify(command, never()).rewriteFiles(anySet(), anySet(), anySet(), anySet(), anyLong()); - verify(outputHandler, times(6)).write(writerRecordCaptor.capture()); + verify(outputHandler, times(9)).write(writerRecordCaptor.capture()); assertThat(writerRecordCaptor.getAllValues()) .extracting(WriterCommitterRecord::operationType, WriterCommitterRecord::records) - .hasSize(6) + .hasSize(9) .containsExactly( - Tuple.tuple(OperationType.DELETE_DATAFILE.value, 0L), Tuple.tuple(OperationType.ADD_DATAFILE.value, 0L), - Tuple.tuple(OperationType.DELETE_DATAFILE.value, 0L), Tuple.tuple(OperationType.ADD_DATAFILE.value, 0L), - Tuple.tuple(OperationType.DELETE_DATAFILE.value, 0L), Tuple.tuple(OperationType.ADD_DATAFILE.value, 0L)); + Tuple.tuple(OperationType.DELETE_DATAFILE.value, 0L), Tuple.tuple(OperationType.DELETE_DELETEFILE.value, 0L), Tuple.tuple(OperationType.ADD_DATAFILE.value, 0L), + Tuple.tuple(OperationType.DELETE_DATAFILE.value, 0L), Tuple.tuple(OperationType.DELETE_DELETEFILE.value, 0L), Tuple.tuple(OperationType.ADD_DATAFILE.value, 0L), + Tuple.tuple(OperationType.DELETE_DATAFILE.value, 0L), Tuple.tuple(OperationType.DELETE_DELETEFILE.value, 0L), Tuple.tuple(OperationType.ADD_DATAFILE.value, 0L)); } @Test @@ -115,29 +117,34 @@ public void testRewrite() { Table table = mock(Table.class); when(table.currentSnapshot()).thenReturn(currentSnapshot); when(command.loadTable()).thenReturn(table); - when(command.rewriteDataFiles(anySet(), anySet())).thenReturn(rewriteSnapshot); + when(command.rewriteFiles(anySet(), anySet(), anySet(), anySet(), anyLong())).thenReturn(rewriteSnapshot); WriterCommitterOutputHandler outputHandler = mock(WriterCommitterOutputHandler.class); doNothing().when(outputHandler).write(any(WriterCommitterRecord.class)); ArgumentCaptor writerRecordCaptor = ArgumentCaptor.forClass(WriterCommitterRecord.class); - IcebergOptimizeOperationCommitter opCommitter = new IcebergOptimizeOperationCommitter(command, getOperatorStats(), getDatasetConfig(), 2L, getTableProps(), mock(FileSystem.class)); + IcebergOptimizeOperationCommitter opCommitter = new IcebergOptimizeOperationCommitter(command, getOperatorStats(), getDatasetConfig(), 2L, currentSnapshot.snapshotId(), getTableProps(), mock(FileSystem.class)); opCommitter.consumeAddDataFile(getDatafile("/a1.parquet")); opCommitter.consumeAddDataFile(getDatafile("/a2.parquet")); opCommitter.consumeDeleteDataFile(getDatafile("/d1.parquet")); opCommitter.consumeDeleteDataFile(getDatafile("/d2.parquet")); + opCommitter.consumeDeleteDeleteFile(getDeletefile("/pd1.parquet")); + opCommitter.consumeDeleteDeleteFile(getDeletefile("/pd2.parquet")); assertThat(opCommitter.getAddedDataFiles()).extracting("path").contains("/a1.parquet", "/a2.parquet"); assertThat(opCommitter.getRemovedDataFiles()).extracting("path").contains("/d1.parquet", "/d2.parquet"); + assertThat(opCommitter.getRemovedDeleteFiles()).extracting("path").contains("/pd1.parquet", "/pd2.parquet"); Snapshot commitSnapshot = opCommitter.commit(outputHandler); assertThat(commitSnapshot).isEqualTo(rewriteSnapshot).isNotEqualTo(currentSnapshot); - verify(command, times(1)).rewriteDataFiles(anySet(), anySet()); - verify(outputHandler, times(2)).write(writerRecordCaptor.capture()); + verify(command, times(1)).rewriteFiles(anySet(), anySet(), anySet(), anySet(), anyLong()); + verify(outputHandler, times(3)).write(writerRecordCaptor.capture()); assertThat(writerRecordCaptor.getAllValues()) .extracting(WriterCommitterRecord::operationType, WriterCommitterRecord::records) - .hasSize(2) - .containsExactly(Tuple.tuple(OperationType.DELETE_DATAFILE.value, 2L), Tuple.tuple(OperationType.ADD_DATAFILE.value, 2L)); + .hasSize(3) + .containsExactly(Tuple.tuple(OperationType.DELETE_DATAFILE.value, 2L), + Tuple.tuple(OperationType.DELETE_DELETEFILE.value, 2L), + Tuple.tuple(OperationType.ADD_DATAFILE.value, 2L)); } @Test @@ -148,33 +155,39 @@ public void testRewriteMinInputNotPassed() throws IOException { Table table = mock(Table.class); when(table.currentSnapshot()).thenReturn(currentSnapshot); when(command.loadTable()).thenReturn(table); - when(command.rewriteDataFiles(anySet(), anySet())).thenReturn(rewriteSnapshot); + when(command.rewriteFiles(anySet(), anySet(), anySet(), anySet(), anyLong())).thenReturn(rewriteSnapshot); FileSystem fs = mock(FileSystem.class); WriterCommitterOutputHandler outputHandler = mock(WriterCommitterOutputHandler.class); doNothing().when(outputHandler).write(any(WriterCommitterRecord.class)); ArgumentCaptor writerRecordCaptor = ArgumentCaptor.forClass(WriterCommitterRecord.class); - IcebergOptimizeOperationCommitter opCommitter = new IcebergOptimizeOperationCommitter(command, getOperatorStats(), getDatasetConfig(), 3L, getTableProps(), fs); + // minInputFiles is set to 5 as the evaluation criteria is based on total of removed data files and removed delete files. + IcebergOptimizeOperationCommitter opCommitter = new IcebergOptimizeOperationCommitter(command, getOperatorStats(), getDatasetConfig(), 5L, currentSnapshot.snapshotId(), getTableProps(), fs); opCommitter.consumeAddDataFile(getDatafile("/a1.parquet")); opCommitter.consumeAddDataFile(getDatafile("/a2.parquet")); opCommitter.consumeDeleteDataFile(getDatafile("/d1.parquet")); opCommitter.consumeDeleteDataFile(getDatafile("/d2.parquet")); + opCommitter.consumeDeleteDeleteFile(getDeletefile("/pd1.parquet")); + opCommitter.consumeDeleteDeleteFile(getDeletefile("/pd2.parquet")); assertThat(opCommitter.getAddedDataFiles()).extracting("path").contains("/a1.parquet", "/a2.parquet"); assertThat(opCommitter.getRemovedDataFiles()).extracting("path").contains("/d1.parquet", "/d2.parquet"); + assertThat(opCommitter.getRemovedDeleteFiles()).extracting("path").contains("/pd1.parquet", "/pd2.parquet"); Snapshot commitSnapshot = opCommitter.commit(outputHandler); assertThat(commitSnapshot).isEqualTo(currentSnapshot).isNotEqualTo(rewriteSnapshot); - verify(command, never()).rewriteDataFiles(anySet(), anySet()); + verify(command, never()).rewriteFiles(anySet(), anySet(), anySet(), anySet(), anyLong()); // Two data files and the empty directory due to NOOP verify(fs, times(3)).delete(any(Path.class), anyBoolean()); - verify(command, never()).rewriteDataFiles(anySet(), anySet()); - verify(outputHandler, times(2)).write(writerRecordCaptor.capture()); + verify(command, never()).rewriteFiles(anySet(), anySet(), anySet(), anySet(), anyLong()); + verify(outputHandler, times(3)).write(writerRecordCaptor.capture()); assertThat(writerRecordCaptor.getAllValues()) .extracting(WriterCommitterRecord::operationType, WriterCommitterRecord::records) - .hasSize(2) - .containsExactly(Tuple.tuple(OperationType.DELETE_DATAFILE.value, 0L), Tuple.tuple(OperationType.ADD_DATAFILE.value, 0L)); + .hasSize(3) + .containsExactly(Tuple.tuple(OperationType.DELETE_DATAFILE.value, 0L), + Tuple.tuple(OperationType.DELETE_DELETEFILE.value, 0L), + Tuple.tuple(OperationType.ADD_DATAFILE.value, 0L)); } @@ -184,7 +197,7 @@ public void testGetRootPointer() { String rootPointerLocation = "/table/metadata/v2.metadata.json"; when(command.getRootPointer()).thenReturn(rootPointerLocation); - IcebergOptimizeOperationCommitter opCommitter = new IcebergOptimizeOperationCommitter(command, getOperatorStats(), getDatasetConfig(), null, getTableProps(), mock(FileSystem.class)); + IcebergOptimizeOperationCommitter opCommitter = new IcebergOptimizeOperationCommitter(command, getOperatorStats(), getDatasetConfig(), null, null, getTableProps(), mock(FileSystem.class)); assertThat(opCommitter.getRootPointer()).isEqualTo(rootPointerLocation); assertThat(opCommitter.isIcebergTableUpdated()).isTrue(); @@ -193,7 +206,7 @@ public void testGetRootPointer() { @Test public void testUnsupportedOperations() { IcebergCommand command = mock(IcebergCommand.class); - IcebergOptimizeOperationCommitter opCommitter = new IcebergOptimizeOperationCommitter(command, getOperatorStats(), getDatasetConfig(), null, getTableProps(), mock(FileSystem.class)); + IcebergOptimizeOperationCommitter opCommitter = new IcebergOptimizeOperationCommitter(command, getOperatorStats(), getDatasetConfig(), null, null, getTableProps(), mock(FileSystem.class)); assertThatThrownBy(() -> opCommitter.consumeManifestFile(mock(ManifestFile.class))) .isInstanceOf(UnsupportedOperationException.class) @@ -240,4 +253,14 @@ private DataFile getDatafile(String path) { .build(); return dataFile; } + + private DeleteFile getDeletefile(String path) { + DeleteFile deleteFile = FileMetadata.deleteFileBuilder(PartitionSpec.unpartitioned()) + .ofPositionDeletes() + .withPath(path) + .withFileSizeInBytes(40) + .withRecordCount(9) + .build(); + return deleteFile; + } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/nessie/TestIcebergNessieCommand.java b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/nessie/TestIcebergNessieCommand.java index 9dd1d23711..ddd06630ee 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/nessie/TestIcebergNessieCommand.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/iceberg/nessie/TestIcebergNessieCommand.java @@ -16,34 +16,39 @@ package com.dremio.exec.store.iceberg.nessie; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import org.apache.hadoop.conf.Configuration; import org.junit.jupiter.api.Test; import org.mockito.Mockito; -import com.dremio.exec.catalog.MutablePlugin; import com.dremio.exec.store.iceberg.model.IcebergTableIdentifier; +import com.dremio.io.file.FileSystem; class TestIcebergNessieCommand { private static final IcebergTableIdentifier ID = new IcebergNessieTableIdentifier("ns", "ptr"); + private final Configuration conf = new Configuration(); + @Test - void testDeleteTableFailure() { - MutablePlugin plugin = Mockito.mock(MutablePlugin.class); + void testDeleteTableFailure() throws Exception { + FileSystem fs = Mockito.mock(FileSystem.class); IcebergNessieTableOperations ops = Mockito.mock(IcebergNessieTableOperations.class); - IcebergNessieCommand command = new IcebergNessieCommand(ID, null, null, ops, plugin); - Mockito.when(plugin.getFsConfCopy()).thenThrow(new RuntimeException("test-exception-1")); + IcebergNessieCommand command = new IcebergNessieCommand(ID, conf, fs, ops); + Mockito.when(fs.delete(any(), anyBoolean())).thenThrow(new RuntimeException("test-exception-1")); assertThatThrownBy(command::deleteTable).isInstanceOf(RuntimeException.class).hasMessage("test-exception-1"); Mockito.verify(ops, Mockito.times(1)).deleteKey(); } @Test - void testDoubleFailure() { - MutablePlugin plugin = Mockito.mock(MutablePlugin.class); + void testDoubleFailure() throws Exception { + FileSystem fs = Mockito.mock(FileSystem.class); IcebergNessieTableOperations ops = Mockito.mock(IcebergNessieTableOperations.class); - IcebergNessieCommand command = new IcebergNessieCommand(ID, null, null, ops, plugin); + IcebergNessieCommand command = new IcebergNessieCommand(ID, conf, fs, ops); RuntimeException innerException = new RuntimeException("test-exception-1"); Mockito.doThrow(innerException).when(ops).deleteKey(); - Mockito.when(plugin.getFsConfCopy()).thenThrow(new RuntimeException("test-exception-2")); + Mockito.when(fs.delete(any(), anyBoolean())).thenThrow(new RuntimeException("test-exception-2")); assertThatThrownBy(command::deleteTable) .isInstanceOf(RuntimeException.class) .hasMessage("test-exception-2") diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/json/InternalSchemaTestBase.java b/sabot/kernel/src/test/java/com/dremio/exec/store/json/InternalSchemaTestBase.java index 4fec711266..00e9704291 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/json/InternalSchemaTestBase.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/json/InternalSchemaTestBase.java @@ -166,7 +166,9 @@ void assertCoercionFailure(String dirName, String fileType, String tableType) { .withMessageContaining("to the column's data type") .withMessageContaining(tableType) .withMessageContaining("in table") - .withMessageContaining(dirName); + .withMessageContaining(dirName) + .withMessageContaining("and file") + .withMessageContaining(".json"); } void assertCastFailure(String dirName) { diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/metadatarefresh/RefreshDatasetTestUtils.java b/sabot/kernel/src/test/java/com/dremio/exec/store/metadatarefresh/RefreshDatasetTestUtils.java index 795cfddb7d..ddb58d0822 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/metadatarefresh/RefreshDatasetTestUtils.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/metadatarefresh/RefreshDatasetTestUtils.java @@ -25,7 +25,6 @@ import java.util.Set; import java.util.stream.Collectors; -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -40,11 +39,7 @@ public class RefreshDatasetTestUtils { - public static FileSystem setupLocalFS() throws IOException { - Configuration conf = new Configuration(); - conf.set("fs.default.name", "local"); - return FileSystem.get(conf); - } + public static void fsDelete(FileSystem fs, Path path) throws IOException { FileStatus[] statuses = fs.listStatus(path); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/metadatarefresh/TestNewMetadataRefresh.java b/sabot/kernel/src/test/java/com/dremio/exec/store/metadatarefresh/TestNewMetadataRefresh.java index 1f0dbadd27..e3ef312c82 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/metadatarefresh/TestNewMetadataRefresh.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/metadatarefresh/TestNewMetadataRefresh.java @@ -16,7 +16,6 @@ package com.dremio.exec.store.metadatarefresh; import static com.dremio.exec.store.metadatarefresh.RefreshDatasetTestUtils.fsDelete; -import static com.dremio.exec.store.metadatarefresh.RefreshDatasetTestUtils.setupLocalFS; import static com.dremio.exec.store.metadatarefresh.RefreshDatasetTestUtils.verifyIcebergMetadata; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.junit.Assert.assertTrue; @@ -34,6 +33,7 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; @@ -56,7 +56,16 @@ import com.dremio.BaseTestQuery; import com.dremio.common.exceptions.UserException; import com.dremio.common.exceptions.UserRemoteException; +import com.dremio.connector.metadata.DatasetHandle; +import com.dremio.connector.metadata.EntityPath; +import com.dremio.exec.catalog.CatalogServiceImpl; +import com.dremio.exec.catalog.MetadataObjectsUtils; +import com.dremio.exec.store.DatasetRetrievalOptions; +import com.dremio.exec.store.dfs.FileSystemPlugin; +import com.dremio.service.namespace.NamespaceException; +import com.dremio.service.namespace.NamespaceKey; import com.google.common.base.Stopwatch; +import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Sets; @@ -1132,4 +1141,71 @@ public void testRefreshAddFileInsideEmptyFolder() throws Exception { .go(); } } + + /** + * Verify that UnlimitedSplitsFileDatasetHandle will be used for Parquet Datasets + * (Regression test for DX-60716) + * when: + * parquet dataset (file/folder) + * unlimitedSplits on + * (auto-promote is off && dataset was previously promoted + * *or* auto-promote is on ) + */ + @Test + public void testForUnlimitedSplitsFileDatasetHandle() throws Exception { + + try (AutoCloseable c1 = enableUnlimitedSplitsSupportFlags()) { + // Test to ensure that UnlimitedSplitsFileDatasetHandle gets returned from getDatasetHandle() for parquet datasets + { + File dir = new File(testRootPath + "datasetHandleParquetTest"); + if (!dir.exists()) { + dir.mkdir(); + } + + File dataFile = new File(Resources.getResource("metadatarefresh/int.parquet").getFile()); + FileUtils.copyFileToDirectory(dataFile, dir, false); + + final CatalogServiceImpl pluginRegistry = (CatalogServiceImpl) getSabotContext().getCatalogService(); + final FileSystemPlugin msp = pluginRegistry.getSource("dfs"); + + // set auto-promote true to trigger getDatasetHandle() to return UnlimitedSplitsFileDatasetHandle if generated + DatasetRetrievalOptions options = DatasetRetrievalOptions.DEFAULT.toBuilder().setAutoPromote(true).build(); + NamespaceKey namespaceKey = new NamespaceKey(Arrays.asList("dfs", "tmp", "metadatarefresh", "datasetHandleParquetTest")); + final EntityPath entityPath = MetadataObjectsUtils.toEntityPath(namespaceKey); + Optional handle = msp.getDatasetHandle(entityPath, options.asGetDatasetOptions(null)); + + if (!handle.isPresent() || !(handle.get() instanceof UnlimitedSplitsFileDatasetHandle)) { + Assert.fail("Expected UnlimitedSplitsFileDatasetHandle to be created for parquet dataset"); + } + } + + // Test to ensure that UnlimitedSplitsFileDatasetHandle do not get returned from getDatasetHandle() for non-parquet datasets + { + File dir = new File(testRootPath + "datasetHandleCSVTest"); + if (!dir.exists()) { + dir.mkdir(); + } + + File dataFile = new File(Resources.getResource("metadatarefresh/bogus.csv").getFile()); + FileUtils.copyFileToDirectory(dataFile, dir, false); + + final CatalogServiceImpl pluginRegistry = (CatalogServiceImpl) getSabotContext().getCatalogService(); + final FileSystemPlugin msp = pluginRegistry.getSource("dfs"); + + // set auto-promote true to trigger getDatasetHandle() to return UnlimitedSplitsFileDatasetHandle if generated + DatasetRetrievalOptions options = DatasetRetrievalOptions.DEFAULT.toBuilder().setAutoPromote(true).build(); + NamespaceKey namespaceKey = new NamespaceKey(Arrays.asList("dfs", "tmp", "metadatarefresh", "datasetHandleCSVTest")); + final EntityPath entityPath = MetadataObjectsUtils.toEntityPath(namespaceKey); + Optional handle = msp.getDatasetHandle(entityPath, options.asGetDatasetOptions(null)); + + if (handle.isPresent() && (handle.get() instanceof UnlimitedSplitsFileDatasetHandle)) { + // matching an UnlimitedSplitsFileDatasetHandle for a CSV should be a failure + Assert.fail("Should not create UnlimitedSplitsFileDatasetHandle for non-parquet dataset"); + } + } + + } catch (NamespaceException ex) { + throw Throwables.propagate(ex); + } + } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/metadatarefresh/TestNewMetadataRefreshFailures.java b/sabot/kernel/src/test/java/com/dremio/exec/store/metadatarefresh/TestNewMetadataRefreshFailures.java index 56eac0ad12..db5dd2a1b6 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/metadatarefresh/TestNewMetadataRefreshFailures.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/metadatarefresh/TestNewMetadataRefreshFailures.java @@ -16,7 +16,6 @@ package com.dremio.exec.store.metadatarefresh; import static com.dremio.exec.store.metadatarefresh.RefreshDatasetTestUtils.fsDelete; -import static com.dremio.exec.store.metadatarefresh.RefreshDatasetTestUtils.setupLocalFS; import static com.dremio.exec.store.metadatarefresh.RefreshDatasetTestUtils.verifyIcebergMetadata; import static com.dremio.exec.store.metadatarefresh.TestNewMetadataRefresh.toMap; import static org.junit.Assert.assertEquals; diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/metadatarefresh/footerread/TestFooterReadTableFunction.java b/sabot/kernel/src/test/java/com/dremio/exec/store/metadatarefresh/footerread/TestFooterReadTableFunction.java index db9f972f5a..53b8e06e0b 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/metadatarefresh/footerread/TestFooterReadTableFunction.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/metadatarefresh/footerread/TestFooterReadTableFunction.java @@ -17,7 +17,6 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; import static org.mockito.Mockito.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; @@ -125,7 +124,7 @@ public void close() throws Exception { } @Test - public void testFooterReadTableFunctionForFSTables() throws URISyntaxException, ExecutionSetupException { + public void testFooterReadTableFunctionForFSTables() throws Exception { FooterReadTableFunction tableFunction = new FooterReadTableFunction(getFragmentExecutionContext(), getOpCtx(), null, getConfig(null, FileType.PARQUET)); tableFunction.setFs(fs); AtomicInteger counter = new AtomicInteger(0); @@ -235,14 +234,11 @@ public void testFooterReadTableFunctionForFSTables() throws URISyntaxException, } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("Unable to read footer for file")); - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); } } @Test - public void testFooterReadTableFunctionForHiveTables() throws URISyntaxException, ExecutionSetupException { + public void testFooterReadTableFunctionForHiveTables() throws Exception { incomingRow.accept(getFullPath("int96.parquet", FileType.PARQUET), 431L, currentTime, 0, true); // For hive case, we are setting the table schema to later check that outputSchemaVector returns the same schema // and doesn't learns the schema from the input file. @@ -251,7 +247,6 @@ public void testFooterReadTableFunctionForHiveTables() throws URISyntaxException FooterReadTableFunction tableFunction = new FooterReadTableFunction(getFragmentExecutionContext(), getOpCtx(), null, tableFunctionConfig); tableFunction.setFs(fs); - try { incoming.setAllCount(1); incoming.buildSchema(); outgoing = tableFunction.setup(incoming); @@ -269,20 +264,14 @@ public void testFooterReadTableFunctionForHiveTables() throws URISyntaxException assertEquals(1, tableFunction.processRow(0, 5)); verifyOutput(outputDatafileVector.get(0), outputOperationType.get(0), outputSchemaVector.get(0), tableSchema, new IcebergPartitionData(PartitionSpec.unpartitioned().partitionType()), OperationType.ADD_DATAFILE); - } - catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); - } } @Test - public void testFooterReadTableFunctionForUnions() throws URISyntaxException, ExecutionSetupException { + public void testFooterReadTableFunctionForUnions() throws Exception { incomingRow.accept(getFullPath("union_bigint_varchar_col2.parquet", FileType.PARQUET), 1507L, currentTime, 0, true); FooterReadTableFunction tableFunction = new FooterReadTableFunction(getFragmentExecutionContext(), getOpCtx(), null, getConfig(null, FileType.PARQUET)); tableFunction.setFs(fs); - try { incoming.setAllCount(2); incoming.buildSchema(); outgoing = tableFunction.setup(incoming); @@ -302,13 +291,10 @@ public void testFooterReadTableFunctionForUnions() throws URISyntaxException, Ex BatchSchema.of(Field.nullable("col1", new ArrowType.FloatingPoint(FloatingPointPrecision.DOUBLE)), Field.nullable("col2", new ArrowType.Utf8())), new IcebergPartitionData(PartitionSpec.unpartitioned().partitionType()), OperationType.ADD_DATAFILE); tableFunction.closeRow(); - } catch (Exception e) { - fail(e.getMessage()); - } } @Test - public void testFooterReadTableFunctionForListOfNull() throws URISyntaxException, ExecutionSetupException { + public void testFooterReadTableFunctionForListOfNull() throws Exception { incomingRow.accept(getFullPath("list_of_null_in_footer.parquet", FileType.PARQUET), 8416L, currentTime, 0, true); /*File schema: schema(id:: varchar, count:: int32, creationTime:: timestamp, @@ -322,7 +308,6 @@ public void testFooterReadTableFunctionForListOfNull() throws URISyntaxException FooterReadTableFunction tableFunction = new FooterReadTableFunction(getFragmentExecutionContext(), getOpCtx(), null, getConfig(null, FileType.PARQUET)); tableFunction.setFs(fs); - try { incoming.setAllCount(20); incoming.buildSchema(); outgoing = tableFunction.setup(incoming); @@ -360,9 +345,6 @@ public void testFooterReadTableFunctionForListOfNull() throws URISyntaxException Field.nullable("type", new ArrowType.Utf8())), new IcebergPartitionData(PartitionSpec.unpartitioned().partitionType()), OperationType.ADD_DATAFILE); tableFunction.closeRow(); - } catch (Exception e) { - fail(e.getMessage()); - } } @Test diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/BaseTestUnifiedParquetReader.java b/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/BaseTestUnifiedParquetReader.java index fc480edcd3..5b2bb6e7cd 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/BaseTestUnifiedParquetReader.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/BaseTestUnifiedParquetReader.java @@ -235,7 +235,8 @@ private InputStreamProvider createInputStreamProvider( null, fileAttributes.lastModifiedTime().toMillis(), false, - true); + true, ParquetFilters.NONE, + getParquetReaderFactory().newFilterCreator(context, ParquetReaderFactory.ManagedSchemaType.ICEBERG, null, context.getAllocator())); testCloseables.add(inputStreamProvider); return inputStreamProvider; diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestAllRowGroupsParquetReader.java b/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestAllRowGroupsParquetReader.java index 05b05dc27d..571cdd76de 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestAllRowGroupsParquetReader.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestAllRowGroupsParquetReader.java @@ -240,7 +240,7 @@ private void verifyRowGroupPrefetched(InputStreamProviderFactory spy, int rowGro eq(null), anyLong(), eq(false), - eq(true)); + eq(true), eq(ParquetFilters.NONE), eq(ParquetFilterCreator.DEFAULT)); } private void verifyRowGroupNotPrefetched(InputStreamProviderFactory spy, int rowGroupIndex) throws Exception { @@ -258,7 +258,7 @@ private void verifyRowGroupNotPrefetched(InputStreamProviderFactory spy, int row eq(null), anyLong(), eq(false), - eq(true)); + eq(true), eq(ParquetFilters.NONE), eq(ParquetFilterCreator.DEFAULT)); } private void verifyDatasetKey(InputStreamProviderFactory spy, List dataset) throws Exception { @@ -276,7 +276,7 @@ private void verifyDatasetKey(InputStreamProviderFactory spy, List datas eq(dataset), anyLong(), eq(false), - eq(true)); + eq(true), eq(ParquetFilters.NONE), eq(ParquetFilterCreator.DEFAULT)); } private void validateResults(AllRowGroupsParquetReader reader, int expectedStartRow, int expectedEndRow) diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestBoostBufferManager.java b/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestBoostBufferManager.java index 8f020e61a9..8aeed30161 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestBoostBufferManager.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestBoostBufferManager.java @@ -16,7 +16,6 @@ package com.dremio.exec.store.parquet; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; import static org.mockito.Mockito.RETURNS_DEEP_STUBS; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.mock; @@ -235,10 +234,6 @@ public void testBoostBufferManagerWithOneBatch() throws Exception { assertEquals("partition_extended_boostBuffer5", getPartitionExtendedProp((VarBinaryVector) mutator.getVector(BoostBufferManager.SPLITS_VECTOR),5)); assertEquals("TestBytes", new String(((VarBinaryVector)mutator.getVector(BoostBufferManager.COL_IDS)).get(0))); } - catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); - } } @Test @@ -307,10 +302,6 @@ public void testBoostBufferManagerWithMultipleBatches() throws Exception { assertEquals(batchesRead, 6); assertEquals("TestBytes", new String(((VarBinaryVector)mutator.getVector(BoostBufferManager.COL_IDS)).get(0))); } - catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); - } } private List buildSplit(int num) { diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestInternalIcebergHistoryFunction.java b/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestInternalIcebergHistoryFunction.java index f7a3bd2b04..f69eba9e2d 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestInternalIcebergHistoryFunction.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestInternalIcebergHistoryFunction.java @@ -16,7 +16,6 @@ package com.dremio.exec.store.parquet; import static com.dremio.exec.store.metadatarefresh.RefreshDatasetTestUtils.fsDelete; -import static com.dremio.exec.store.metadatarefresh.RefreshDatasetTestUtils.setupLocalFS; import java.io.File; import java.util.Map; diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestInternalIcebergSnapshotFunction.java b/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestInternalIcebergSnapshotFunction.java index a5a1eaa301..19d49f3b1d 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestInternalIcebergSnapshotFunction.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestInternalIcebergSnapshotFunction.java @@ -16,7 +16,6 @@ package com.dremio.exec.store.parquet; import static com.dremio.exec.store.metadatarefresh.RefreshDatasetTestUtils.fsDelete; -import static com.dremio.exec.store.metadatarefresh.RefreshDatasetTestUtils.setupLocalFS; import java.io.File; import java.time.Instant; diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestInternalIcebergTableFilesFunction.java b/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestInternalIcebergTableFilesFunction.java index 096520ab03..875418ce07 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestInternalIcebergTableFilesFunction.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestInternalIcebergTableFilesFunction.java @@ -16,7 +16,6 @@ package com.dremio.exec.store.parquet; import static com.dremio.exec.store.metadatarefresh.RefreshDatasetTestUtils.fsDelete; -import static com.dremio.exec.store.metadatarefresh.RefreshDatasetTestUtils.setupLocalFS; import static com.dremio.exec.store.metadatarefresh.RefreshDatasetTestUtils.verifyIcebergMetadata; import java.io.IOException; diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestParquetGroupScan.java b/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestParquetGroupScan.java index 8a4dd6c162..ef6feb46d0 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestParquetGroupScan.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestParquetGroupScan.java @@ -88,7 +88,8 @@ public void testWildCardEmptyNoCache() throws Exception { runSQL("SELECT COUNT(*) AS \"count\" FROM dfs_test.\"4376_3/604*\""); fail("Query should've failed!"); } catch (UserRemoteException uex) { - final String expectedMsg = "Object '4376_3/604*' not found within 'dfs_test'"; + final String expectedMsg = "The file format for 'dfs_test.\"4376_3/604*\"' could not be identified. In order for automatic format detection to succeed, " + + "files must include a file extension. Alternatively, manual promotion can be used to explicitly specify the format."; assertTrue(String.format("Error message should contain \"%s\" but was instead \"%s\"", expectedMsg, uex.getMessage()), uex.getMessage().contains(expectedMsg)); } @@ -116,7 +117,8 @@ public void testSelectEmptyNoCache() throws Exception { runSQL("SELECT COUNT(*) AS \"count\" FROM dfs_test.\"4376_5/6041\""); fail("Query should've failed!"); } catch (UserRemoteException uex) { - final String expectedMsg = "VALIDATION ERROR: Object 'dfs_test' not found"; + final String expectedMsg = "The file format for 'dfs_test.\"4376_5/6041\"' could not be identified. In order for automatic format detection to succeed, " + + "files must include a file extension. Alternatively, manual promotion can be used to explicitly specify the format."; assertTrue(String.format("Error message should contain \"%s\" but was instead \"%s\"", expectedMsg, uex.getMessage()), uex.getMessage().contains(expectedMsg)); } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestParquetRecordWriter.java b/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestParquetRecordWriter.java index ab54c52b7c..521e85913c 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestParquetRecordWriter.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestParquetRecordWriter.java @@ -95,6 +95,7 @@ public void testFileSize() throws Exception { final FileSystem newFs = targetPath.getFileSystem(hadoopConf); assertTrue(newFs.mkdirs(targetPath)); + @SuppressWarnings("checkstyle:LocalFinalVariableName") final BufferAllocator ALLOCATOR = allocatorRule.newAllocator("test-parquet-writer", 0, Long.MAX_VALUE); ParquetRecordWriter writer = mockParquetRecordWriter(hadoopConf, targetPath, 234234, ALLOCATOR, null); @@ -149,6 +150,7 @@ public void testBlockSizeWithTarget() throws Exception { final FileSystem newFs = targetPath.getFileSystem(hadoopConf); assertTrue(newFs.mkdirs(targetPath)); + @SuppressWarnings("checkstyle:LocalFinalVariableName") final BufferAllocator ALLOCATOR = allocatorRule.newAllocator("test-parquet-writer", 0, Long.MAX_VALUE); ParquetRecordWriter writer = mockParquetRecordWriter(hadoopConf, targetPath, 234236, ALLOCATOR, 100 * 1024 * 1024L); @@ -168,6 +170,7 @@ public void testBlockSizeWithNullTarget() throws Exception { final FileSystem newFs = targetPath.getFileSystem(hadoopConf); assertTrue(newFs.mkdirs(targetPath)); + @SuppressWarnings("checkstyle:LocalFinalVariableName") final BufferAllocator ALLOCATOR = allocatorRule.newAllocator("test-parquet-writer", 0, Long.MAX_VALUE); ParquetRecordWriter writer = mockParquetRecordWriter(hadoopConf, targetPath, 234236, ALLOCATOR, null); @@ -187,6 +190,7 @@ public void testBlockSizeWithInvalidTarget() throws Exception { final FileSystem newFs = targetPath.getFileSystem(hadoopConf); assertTrue(newFs.mkdirs(targetPath)); + @SuppressWarnings("checkstyle:LocalFinalVariableName") final BufferAllocator ALLOCATOR = allocatorRule.newAllocator("test-parquet-writer", 0, Long.MAX_VALUE); ParquetRecordWriter writer = mockParquetRecordWriter(hadoopConf, targetPath, 234236, ALLOCATOR, 0L); @@ -206,6 +210,7 @@ public void testOutOfMemory() throws Exception { final FileSystem newFs = targetPath.getFileSystem(hadoopConf); assertTrue(newFs.mkdirs(targetPath)); + @SuppressWarnings("checkstyle:LocalFinalVariableName") final BufferAllocator ALLOCATOR = allocatorRule.newAllocator("test-parquet-writer", 0, 128); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestParquetScanTableFunction.java b/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestParquetScanTableFunction.java index f01bd12f0b..cced9563c5 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestParquetScanTableFunction.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestParquetScanTableFunction.java @@ -21,7 +21,6 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doNothing; @@ -123,8 +122,7 @@ public void testDataFileSplitWithMultipleRowGroupsAndTrimming() throws Exception } @Test - public void testWorkOnOOBRuntimeFilter() { - try { + public void testWorkOnOOBRuntimeFilter() throws Exception { // Send 6 messages. 1/2 are independent filters, 3 is dup of 1 from a different minor frag and should be dropped, 4 comes from // a different sender but filter structure is similar to 2/3, 5 comes from same sender as 4 but has one extra column. // 6th comes from same sender as 4 but with one less non-partition column. @@ -245,10 +243,6 @@ public void testWorkOnOOBRuntimeFilter() { assertEquals(Lists.newArrayList("npCol3"), f5NonPartitionCols); AutoCloseables.close(scanOp.getRuntimeFilters()); - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); - } } @Test diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestParquetSplitReaderCreatorIterator.java b/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestParquetSplitReaderCreatorIterator.java index 70860e2a32..27e1473fee 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestParquetSplitReaderCreatorIterator.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/parquet/TestParquetSplitReaderCreatorIterator.java @@ -352,7 +352,7 @@ private ParquetSplitReaderCreatorIterator createSplitReaderCreator(boolean prefe when(config.getColumns()).thenReturn(Collections.singletonList(SchemaPath.getSimplePath("*"))); when(config.getFormatSettings()).thenReturn(FileConfig.getDefaultInstance()); when(optionManager.getOption(ExecConstants.FILESYSTEM_PARTITION_COLUMN_LABEL_VALIDATOR)).thenReturn("dir"); - when(inputStreamProviderFactory.create(any(),any(),any(),anyLong(),anyLong(),any(),any(),any(),any(),anyBoolean(),any(),anyLong(),anyBoolean(),anyBoolean())).thenReturn(inputStreamProvider); + when(inputStreamProviderFactory.create(any(),any(),any(),anyLong(),anyLong(),any(),any(),any(),any(),anyBoolean(),any(),anyLong(),anyBoolean(),anyBoolean(), any(), any())).thenReturn(inputStreamProvider); BlockMetaData blockMetaData = mock(BlockMetaData.class); when(footer.getBlocks()).thenReturn(Collections.singletonList(blockMetaData)); @@ -436,7 +436,7 @@ private ParquetSplitReaderCreatorIterator createSplitReaderCreatorIteratorForTab InputStreamProvider inputStreamProvider = mock(InputStreamProvider.class); MutableParquetMetadata footer = mock(MutableParquetMetadata.class); - when(inputStreamProviderFactory.create(any(),any(),any(),anyLong(),anyLong(),any(),any(),any(),any(),anyBoolean(),any(),anyLong(),anyBoolean(),anyBoolean())).thenReturn(inputStreamProvider); + when(inputStreamProviderFactory.create(any(),any(),any(),anyLong(),anyLong(),any(),any(),any(),any(),anyBoolean(),any(),anyLong(),anyBoolean(),anyBoolean(), any(), any())).thenReturn(inputStreamProvider); BlockMetaData blockMetaData = mock(BlockMetaData.class); when(footer.getBlocks()).thenReturn(Collections.singletonList(blockMetaData)); ColumnChunkMetaData chunkMetaData = mock(ColumnChunkMetaData.class); diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/parquet2/TestParquetReader.java b/sabot/kernel/src/test/java/com/dremio/exec/store/parquet2/TestParquetReader.java index 44e8b22fb0..b9c9ca270e 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/parquet2/TestParquetReader.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/parquet2/TestParquetReader.java @@ -29,10 +29,8 @@ import java.nio.file.Paths; import java.nio.file.StandardCopyOption; import java.util.Map; -import java.util.stream.Stream; import org.apache.arrow.vector.types.pojo.Schema; -import org.apache.commons.io.FileUtils; import org.apache.parquet.format.converter.ParquetMetadataConverter; import org.apache.parquet.hadoop.metadata.ParquetMetadata; import org.junit.AfterClass; @@ -169,7 +167,7 @@ public void testArrowSchemaOldInFooter() throws Exception { @Test public void testFilterOnNonExistentColumn() throws Exception { - final String parquetFiles = setupParquetFiles("testFilterOnNonExistentColumn", "nonexistingcols", "bothcols.parquet"); + final String parquetFiles = TestParquetUtil.setupParquetFiles("testFilterOnNonExistentColumn", "nonexistingcols", "bothcols.parquet", WORKING_PATH); try { testBuilder() .sqlQuery("SELECT * FROM dfs.\"" + parquetFiles + "\" where col1='bothvalscol1'") @@ -185,13 +183,13 @@ public void testFilterOnNonExistentColumn() throws Exception { .baselineValues("singlevalcol2") .go(); } finally { - delete(Paths.get(parquetFiles)); + TestParquetUtil.delete(Paths.get(parquetFiles)); } } @Test public void testAggregationFilterOnNonExistentColumn() throws Exception { - final String parquetFiles = setupParquetFiles("testAggregationFilterOnNonExistentColumn", "nonexistingcols", "bothcols.parquet"); + final String parquetFiles = TestParquetUtil.setupParquetFiles("testAggregationFilterOnNonExistentColumn", "nonexistingcols", "bothcols.parquet", WORKING_PATH); try { testBuilder() .sqlQuery("SELECT count(*) as cnt FROM dfs.\"" + parquetFiles + "\" where col1 = 'doesnotexist'") @@ -214,7 +212,7 @@ public void testAggregationFilterOnNonExistentColumn() throws Exception { .baselineValues(1L) .go(); } finally { - delete(Paths.get(parquetFiles)); + TestParquetUtil.delete(Paths.get(parquetFiles)); } } @@ -228,7 +226,7 @@ public void testChainedVectorizedRowiseReaderCase() throws Exception { * This case expects no records to be returned in the query when there's no match. Also, there shouldn't be an error. */ - final String parquetFiles = setupParquetFiles("testChainedVectorizedRowiseReaderNoResultCase", "chained_vectorised_rowwise_case", "yes_filter_col.parquet"); + final String parquetFiles = TestParquetUtil.setupParquetFiles("testChainedVectorizedRowiseReaderNoResultCase", "chained_vectorised_rowwise_case", "yes_filter_col.parquet", WORKING_PATH); try { // No match case testBuilder() @@ -255,7 +253,7 @@ public void testChainedVectorizedRowiseReaderCase() throws Exception { .baselineValues("F1Val1", "F2Val2") .go(); } finally { - delete(Paths.get(parquetFiles)); + TestParquetUtil.delete(Paths.get(parquetFiles)); } } @@ -272,45 +270,4 @@ public void testZeroRowsParquetPromotion() throws Exception{ .go(); } } - - private String setupParquetFiles(String testName, String folderName, String primaryParquet) throws Exception { - /* - * Copy primary parquet in a temporary folder and promote the same. This way, primary parquet's schema will be - * taken as the dremio dataset's schema. Then copy remaining files and refresh the dataset. - */ - final String parquetRefFolder = WORKING_PATH + "/src/test/resources/parquet/" + folderName; - String parquetFiles = Files.createTempDirectory(testName).toString(); - try { - Files.copy(Paths.get(parquetRefFolder, primaryParquet), Paths.get(parquetFiles, primaryParquet), StandardCopyOption.REPLACE_EXISTING); - runSQL("SELECT * FROM dfs.\"" + parquetFiles + "\""); // to detect schema and auto promote - - // Copy remaining files - try (Stream stream = Files.walk(Paths.get(parquetRefFolder))) { - stream - .filter(Files::isRegularFile) - .filter(p -> !p.getFileName().toString().equals(primaryParquet)) - .forEach(p -> { - try { - Files.copy(p, Paths.get(parquetFiles, p.getFileName().toString()), - StandardCopyOption.REPLACE_EXISTING); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - } - runSQL("alter table dfs.\"" + parquetFiles + "\" refresh metadata force update"); // so it detects second parquet - setEnableReAttempts(true); - runSQL("select * from dfs.\"" + parquetFiles + "\""); // need to run select * from pds to get correct schema update. Check DX-25496 for details. - return parquetFiles; - } catch (Exception e) { - delete(Paths.get(parquetFiles)); - throw e; - } finally { - setEnableReAttempts(false); - } - } - - private static void delete(java.nio.file.Path dir) throws Exception { - FileUtils.deleteDirectory(dir.toFile()); - } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/parquet2/TestParquetReaderMicroseconds.java b/sabot/kernel/src/test/java/com/dremio/exec/store/parquet2/TestParquetReaderMicroseconds.java new file mode 100644 index 0000000000..b0ea05fba1 --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/parquet2/TestParquetReaderMicroseconds.java @@ -0,0 +1,113 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store.parquet2; + +import static com.dremio.exec.ExecConstants.PARQUET_READER_VECTORIZE; + +import java.nio.file.Paths; +import java.util.List; +import java.util.Map; + +import org.joda.time.format.DateTimeFormatter; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import com.dremio.BaseTestQuery; +import com.dremio.common.util.JodaDateUtility; +import com.dremio.common.util.TestTools; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; + +/** + * Test reading TIMESTAMP and TIME MICROSECOND fields in parquet + * using the Rowwise reader. This test uses a file with mixed datatypes. + */ +public class TestParquetReaderMicroseconds extends BaseTestQuery { + private static final String WORKING_PATH = TestTools.getWorkingPath(); + + @BeforeClass + public static void setUp() { + setSystemOption(PARQUET_READER_VECTORIZE.getOptionName(), "false"); + } + + @AfterClass + public static void tearDown() { + resetSystemOption(PARQUET_READER_VECTORIZE.getOptionName()); + } + + + @Test + public void testRowwiseMicroseconds() throws Exception { + /* + * Test microsecond time and timestamp support for the Rowise reader. + * Rowise is used for files using RLE dictionary encoding. + */ + + final String parquetFiles = TestParquetUtil.setupParquetFiles("testRowiseMicroseconds", "rowise_microseconds", "rowise_micros.parquet", WORKING_PATH); + try { + + DateTimeFormatter JODA_MILLIS_FORMATTER = JodaDateUtility.formatTimeStampMilli; + { + final String colName = "colDTMicro"; + final ImmutableList.Builder> recordBuilder = ImmutableList.builder(); + recordBuilder.add(ImmutableMap.of("`" + colName + "`", JODA_MILLIS_FORMATTER.parseLocalDateTime("2023-01-23 12:13:14.567"))); + recordBuilder.add(ImmutableMap.of("`" + colName + "`", JODA_MILLIS_FORMATTER.parseLocalDateTime("2023-01-23 12:13:14.667"))); + final List> baseLineRecords = recordBuilder.build(); + + testBuilder() + .sqlQuery("SELECT " + colName + " FROM dfs.\"" + parquetFiles + "\"") + .unOrdered() + .baselineColumns(colName) + .baselineRecords(baseLineRecords) + .build() + .run(); + } + { + final String colName = "colTMicro32"; + final ImmutableList.Builder> recordBuilder = ImmutableList.builder(); + recordBuilder.add(ImmutableMap.of("`" + colName + "`", JODA_MILLIS_FORMATTER.parseLocalDateTime("1970-01-01 12:13:14.123"))); + recordBuilder.add(ImmutableMap.of("`" + colName + "`", JODA_MILLIS_FORMATTER.parseLocalDateTime("1970-01-01 12:13:14.843"))); + final List> baseLineRecords = recordBuilder.build(); + + testBuilder() + .sqlQuery("SELECT " + colName + " FROM dfs.\"" + parquetFiles + "\"") + .unOrdered() + .baselineColumns(colName) + .baselineRecords(baseLineRecords) + .build() + .run(); + } + { + final String colName = "colTMicro"; + final ImmutableList.Builder> recordBuilder = ImmutableList.builder(); + recordBuilder.add(ImmutableMap.of("`" + colName + "`", JODA_MILLIS_FORMATTER.parseLocalDateTime("1970-01-01 12:13:14.123"))); + recordBuilder.add(ImmutableMap.of("`" + colName + "`", JODA_MILLIS_FORMATTER.parseLocalDateTime("1970-01-01 12:13:14.843"))); + final List> baseLineRecords = recordBuilder.build(); + + testBuilder() + .sqlQuery("SELECT " + colName + " FROM dfs.\"" + parquetFiles + "\"") + .unOrdered() + .baselineColumns(colName) + .baselineRecords(baseLineRecords) + .build() + .run(); + } + } finally { + TestParquetUtil.delete(Paths.get(parquetFiles)); + } + } +} diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/parquet2/TestParquetUtil.java b/sabot/kernel/src/test/java/com/dremio/exec/store/parquet2/TestParquetUtil.java new file mode 100644 index 0000000000..8ebd537bca --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/parquet2/TestParquetUtil.java @@ -0,0 +1,67 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.exec.store.parquet2; + +import java.nio.file.Files; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; +import java.util.stream.Stream; + +import org.apache.commons.io.FileUtils; + +import com.dremio.BaseTestQuery; +public class TestParquetUtil { + + public static String setupParquetFiles(String testName, String folderName, String primaryParquet, String workingPath) throws Exception { + /* + * Copy primary parquet in a temporary folder and promote the same. This way, primary parquet's schema will be + * taken as the dremio dataset's schema. Then copy remaining files and refresh the dataset. + */ + final String parquetRefFolder = workingPath + "/src/test/resources/parquet/" + folderName; + String parquetFiles = Files.createTempDirectory(testName).toString(); + try { + Files.copy(Paths.get(parquetRefFolder, primaryParquet), Paths.get(parquetFiles, primaryParquet), StandardCopyOption.REPLACE_EXISTING); + BaseTestQuery.runSQL("SELECT * FROM dfs.\"" + parquetFiles + "\""); // to detect schema and auto promote + + // Copy remaining files + try (Stream stream = Files.walk(Paths.get(parquetRefFolder))) { + stream + .filter(Files::isRegularFile) + .filter(p -> !p.getFileName().toString().equals(primaryParquet)) + .forEach(p -> { + try { + Files.copy(p, Paths.get(parquetFiles, p.getFileName().toString()), + StandardCopyOption.REPLACE_EXISTING); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + BaseTestQuery.runSQL("alter table dfs.\"" + parquetFiles + "\" refresh metadata force update"); // so it detects second parquet + BaseTestQuery.setEnableReAttempts(true); + BaseTestQuery.runSQL("select * from dfs.\"" + parquetFiles + "\""); // need to run select * from pds to get correct schema update. Check DX-25496 for details. + return parquetFiles; + } catch (Exception e) { + delete(Paths.get(parquetFiles)); + throw e; + } finally { + BaseTestQuery.setEnableReAttempts(false); + } + } + public static void delete(java.nio.file.Path dir) throws Exception { + FileUtils.deleteDirectory(dir.toFile()); + } +} diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/store/TestAssignment.java b/sabot/kernel/src/test/java/com/dremio/exec/store/store/TestAssignment.java index cc8728af3b..a90389e1ed 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/store/TestAssignment.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/store/TestAssignment.java @@ -66,7 +66,8 @@ public void manyFiles() throws Exception { ListMultimap mappings = AssignmentCreator.getMappings(incomingEndpoints, chunks); System.out.println(mappings.keySet().size()); for (int i = 0; i < width; i++) { - Assert.assertTrue("no mapping for entry " + i, mappings.get(i) != null && mappings.get(i).size() > 0); + Assert.assertTrue("no mapping for entry " + i, mappings.containsKey(i)); + Assert.assertFalse("mapping for entry " + i + " was empty", mappings.get(i).isEmpty()); } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/sys/accel/TestAccelParser.java b/sabot/kernel/src/test/java/com/dremio/exec/store/sys/accel/TestAccelParser.java index 1360db9a3d..b7439a27d2 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/sys/accel/TestAccelParser.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/sys/accel/TestAccelParser.java @@ -70,4 +70,39 @@ public void toggleAggOn() throws SqlParseException { public void toggleAggOff() throws SqlParseException { parse("ALTER TABLE a.b.c DISABLE AGGREGATE ACCELERATION"); } + + @Test + public void createRawWithSpecifier() throws SqlParseException { + parse("ALTER TABLE a.b.c AT BRANCH main CREATE RAW REFLECTION reflection USING DISPLAY(x,y) DISTRIBUTE BY (r,z) PARTITION BY (s,l) LOCALSORT BY (n,x)"); + } + + @Test + public void createAggReflectionWithSpecifier() throws SqlParseException { + parse("ALTER TABLE a.b.c AT TAG footag CREATE AGGREGATE REFLECTION reflection USING DIMENSIONS (x by day,y) MEASURES (b (COUNT, SUM),c (COUNT, MIN, MAX)) DISTRIBUTE BY (r,z) PARTITION BY (s,l) LOCALSORT BY (n,x)"); + } + + @Test + public void dropReflectionWithSpecifier() throws SqlParseException { + parse("ALTER TABLE a.b.c AT REFERENCE foo DROP REFLECTION \"123\""); + } + + @Test + public void toggleRawOnWithSpecifier() throws SqlParseException { + parse("ALTER TABLE a.b.c AT TAG foo ENABLE RAW ACCELERATION"); + } + + @Test + public void toggleRawOffWithSpecifier() throws SqlParseException { + parse("ALTER TABLE a.b.c AT BRANCH dev DISABLE RAW ACCELERATION"); + } + + @Test + public void toggleAggOnWithSpecifier() throws SqlParseException { + parse("ALTER TABLE a.b.c AT REFERENCE fooref ENABLE AGGREGATE ACCELERATION"); + } + @Test + public void toggleAggOffWithSpecifier() throws SqlParseException { + parse("ALTER TABLE a.b.c AT BRANCH main DISABLE AGGREGATE ACCELERATION"); + } + } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/sys/udf/UserDefinedFunctionSerdeTest.java b/sabot/kernel/src/test/java/com/dremio/exec/store/sys/udf/UserDefinedFunctionSerdeTest.java index 6f1be79aea..4f83960d59 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/sys/udf/UserDefinedFunctionSerdeTest.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/sys/udf/UserDefinedFunctionSerdeTest.java @@ -35,27 +35,34 @@ public class UserDefinedFunctionSerdeTest { @Test public void testToProto() { - new GoldenFileTestBuilder<>(UserDefinedFunctionSerde::toProto) - .add("No args", - new UserDefinedFunction( - "foo", - "SELECT 1", - CompleteType.INT, - ImmutableList.of(), ImmutableList.of("dir", "space"))) - .add("1 arg", - new UserDefinedFunction( - "foo", - "SELECT 1", - CompleteType.INT, - ImmutableList.of( - new UserDefinedFunction.FunctionArg("bar", CompleteType.VARCHAR) - ), null)) - .runTests(); + GoldenFileTestBuilder.create(UserDefinedFunctionSerde::toProto) + .add("No args", + new UserDefinedFunction( + "foo", + "SELECT 1", + CompleteType.INT, + ImmutableList.of(), + ImmutableList.of("dir", "space"), + null, + null, + null)) + .add("1 arg", + new UserDefinedFunction( + "foo", + "SELECT 1", + CompleteType.INT, + ImmutableList.of( + new UserDefinedFunction.FunctionArg("bar", CompleteType.VARCHAR, null)), + null, + null, + null, + null)) + .runTests(); } @Test public void testFromProto () { - new GoldenFileTestBuilder<>(UserDefinedFunctionSerde::fromProto) + GoldenFileTestBuilder.create(UserDefinedFunctionSerde::fromProto) .add("No Args", new FunctionConfig() .setName("foo") @@ -88,7 +95,7 @@ public void testFromProto () { public void testRoundTrip () { ProtostuffSerializer protostuffSerializer = new ProtostuffSerializer<>(FunctionConfig.getSchema()); - new GoldenFileTestBuilder<>((UserDefinedFunction fc) -> { + GoldenFileTestBuilder.create((UserDefinedFunction fc) -> { FunctionConfig proto1 = UserDefinedFunctionSerde.toProto(fc); byte[] bytes = protostuffSerializer.convert(proto1); FunctionConfig proto2 = protostuffSerializer.revert(bytes); @@ -99,15 +106,15 @@ public void testRoundTrip () { "foo", "SELECT 1", CompleteType.INT, - ImmutableList.of(), ImmutableList.of("dir", "space"))) + ImmutableList.of(), ImmutableList.of("dir", "space"), null, null, null)) .add("1 arg", new UserDefinedFunction( "foo", "SELECT 1", CompleteType.INT, ImmutableList.of( - new UserDefinedFunction.FunctionArg("bar", CompleteType.VARCHAR) - ), null)) + new UserDefinedFunction.FunctionArg("bar", CompleteType.VARCHAR, null) + ), null, null, null, null)) .runTests(); } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/store/text/TestNewTextReader.java b/sabot/kernel/src/test/java/com/dremio/exec/store/text/TestNewTextReader.java index 27581328dc..00420e3b92 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/store/text/TestNewTextReader.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/store/text/TestNewTextReader.java @@ -35,7 +35,6 @@ import org.apache.commons.io.ByteOrderMark; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapred.FileSplit; -import org.junit.ClassRule; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; @@ -63,8 +62,8 @@ public class TestNewTextReader extends BaseTestQuery { - @ClassRule - public static final TemporaryFolder tempDir = new TemporaryFolder(); + @Rule + public final TemporaryFolder tempDir = new TemporaryFolder(); @Rule public final AllocatorRule allocatorRule = AllocatorRule.defaultAllocator(); @@ -117,6 +116,7 @@ public void ensureFailureOnNewLineDelimiterWithinQuotes() { @Test public void ensureColumnNameDisplayedinError() throws Exception { + @SuppressWarnings("checkstyle:LocalFinalVariableName") final String COL_NAME = "col1"; try { @@ -217,21 +217,17 @@ public void testValidateColumnNamesFillEmptyDuplicate() throws Exception { public void testCrLfSeparatedWithQuote() throws Exception { final String root = FileUtils.getResourceAsFile("/store/text/WithQuotedCrLf.tbl").toURI().toString(); final String query = String.format("select columns[0] as c0, columns[1] as c1, columns[2] as c2 \n" + - "from dfs_test.\"%s\" ", root); + "from table(dfs_test.\"%s\" (type => 'text', fieldDelimiter => '|', lineDelimiter => '\r\n'))", root); - try { - testBuilder() - .sqlQuery(query) - .unOrdered() - .baselineColumns("c0", "c1", "c2") - .baselineValues("a\n1", "a", "a") - .baselineValues("a", "a\n2", "a") - .baselineValues("a", "a", "a\n3") - .build() - .run(); - } catch (Exception e) { - assertTrue(e.getMessage().contains("did not find expected record in result set")); - } + testBuilder() + .sqlQuery(query) + .unOrdered() + .baselineColumns("c0", "c1", "c2") + .baselineValues("a\r\n1", "a", "a") + .baselineValues("a", "a\r\n2", "a") + .baselineValues("a", "a", "a\r\n3") + .build() + .run(); } @Test diff --git a/sabot/kernel/src/test/java/com/dremio/exec/util/BatchSchemaFieldTest.java b/sabot/kernel/src/test/java/com/dremio/exec/util/BatchSchemaFieldTest.java index 1822a71c44..4ed74a7a7b 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/util/BatchSchemaFieldTest.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/util/BatchSchemaFieldTest.java @@ -330,7 +330,7 @@ public void testFindField() { String fieldName = fieldPrefix + c; Optional field = searchableBatchSchema.findFieldIgnoreCase(fieldName); Assert.assertTrue(field.isPresent()); - Assert.assertNotNull(field.get()); + Assert.assertEquals(fieldName, field.get().getName()); } String fieldName = fieldPrefix + "non_existent"; diff --git a/sabot/kernel/src/test/java/com/dremio/exec/util/TestGlobalDictionaryBuilder.java b/sabot/kernel/src/test/java/com/dremio/exec/util/TestGlobalDictionaryBuilder.java index f2a3810531..9f7c0d08ac 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/util/TestGlobalDictionaryBuilder.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/util/TestGlobalDictionaryBuilder.java @@ -188,43 +188,47 @@ public void testExtractColumnName() throws Exception { public void testGlobalDictionary() throws Exception { try (final BufferAllocator bufferAllocator = allocatorRule.newAllocator("test-global-dictionary-builder", 0, Long.MAX_VALUE)) { final CompressionCodecFactory codec = CodecFactory.createDirectCodecFactory(conf, new ParquetDirectByteBufferAllocator(bufferAllocator), 0); - Map globalDictionaries = GlobalDictionaryBuilder.createGlobalDictionaries(codec, fs, tableDirPath, bufferAllocator).getColumnsToDictionaryFiles(); - assertEquals(1, globalDictionaries.size()); - ColumnDescriptor column = globalDictionaries.entrySet().iterator().next().getKey(); - assertTrue(Arrays.equals(new String[] {"phoneNumbers", "phone", "kind"}, column.getPath())); - long dictionaryVersion = GlobalDictionaryBuilder.getDictionaryVersion(fs, tableDirPath); - assertEquals(0, dictionaryVersion); - Path dictionaryRootPath = GlobalDictionaryBuilder.getDictionaryVersionedRootPath(fs, tableDirPath, dictionaryVersion); - try (final VectorContainer dict = GlobalDictionaryBuilder.readDictionary(fs, dictionaryRootPath, column, bufferAllocator)) { - assertEquals(4, dict.getRecordCount()); - final VarBinaryVector dictValues = dict.getValueAccessorById(VarBinaryVector.class, 0).getValueVector(); - assertEquals("cell", new String(dictValues.get(0), UTF_8)); - assertEquals("landline", new String(dictValues.get(1), UTF_8)); - assertEquals("mobile", new String(dictValues.get(2), UTF_8)); - assertEquals("work", new String(dictValues.get(3), UTF_8)); + try { + Map globalDictionaries = GlobalDictionaryBuilder.createGlobalDictionaries(codec, fs, tableDirPath, bufferAllocator).getColumnsToDictionaryFiles(); + assertEquals(1, globalDictionaries.size()); + ColumnDescriptor column = globalDictionaries.entrySet().iterator().next().getKey(); + assertTrue(Arrays.equals(new String[]{"phoneNumbers", "phone", "kind"}, column.getPath())); + long dictionaryVersion = GlobalDictionaryBuilder.getDictionaryVersion(fs, tableDirPath); + assertEquals(0, dictionaryVersion); + Path dictionaryRootPath = GlobalDictionaryBuilder.getDictionaryVersionedRootPath(fs, tableDirPath, dictionaryVersion); + try (final VectorContainer dict = GlobalDictionaryBuilder.readDictionary(fs, dictionaryRootPath, column, bufferAllocator)) { + assertEquals(4, dict.getRecordCount()); + final VarBinaryVector dictValues = dict.getValueAccessorById(VarBinaryVector.class, 0).getValueVector(); + assertEquals("cell", new String(dictValues.get(0), UTF_8)); + assertEquals("landline", new String(dictValues.get(1), UTF_8)); + assertEquals("mobile", new String(dictValues.get(2), UTF_8)); + assertEquals("work", new String(dictValues.get(3), UTF_8)); + } + assertEquals(1, GlobalDictionaryBuilder.listDictionaryFiles(fs, dictionaryRootPath).size()); + + // update global dictionary + globalDictionaries = GlobalDictionaryBuilder.updateGlobalDictionaries(codec, fs, tableDirPath, partitionDirPath, bufferAllocator).getColumnsToDictionaryFiles(); + assertEquals(1, globalDictionaries.size()); + column = globalDictionaries.entrySet().iterator().next().getKey(); + assertTrue(Arrays.equals(new String[]{"phoneNumbers", "phone", "kind"}, column.getPath())); + dictionaryVersion = GlobalDictionaryBuilder.getDictionaryVersion(fs, tableDirPath); + dictionaryRootPath = GlobalDictionaryBuilder.getDictionaryVersionedRootPath(fs, tableDirPath, dictionaryVersion); + assertEquals(1, dictionaryVersion); + try (final VectorContainer dict = GlobalDictionaryBuilder.readDictionary(fs, dictionaryRootPath, column, bufferAllocator)) { + assertEquals(5, dict.getRecordCount()); + final VarBinaryVector dictValues = dict.getValueAccessorById(VarBinaryVector.class, 0).getValueVector(); + assertEquals("cell", new String(dictValues.get(0), UTF_8)); + assertEquals("home", new String(dictValues.get(1), UTF_8)); + assertEquals("landline", new String(dictValues.get(2), UTF_8)); + assertEquals("mobile", new String(dictValues.get(3), UTF_8)); + assertEquals("work", new String(dictValues.get(4), UTF_8)); + } + + assertEquals(1, GlobalDictionaryBuilder.listDictionaryFiles(fs, dictionaryRootPath).size()); + assertEquals(0, GlobalDictionaryBuilder.listDictionaryFiles(fs, partitionDirPath).size()); + } finally { + codec.release(); } - assertEquals(1, GlobalDictionaryBuilder.listDictionaryFiles(fs, dictionaryRootPath).size()); - - // update global dictionary - globalDictionaries = GlobalDictionaryBuilder.updateGlobalDictionaries(codec, fs, tableDirPath, partitionDirPath, bufferAllocator).getColumnsToDictionaryFiles(); - assertEquals(1, globalDictionaries.size()); - column = globalDictionaries.entrySet().iterator().next().getKey(); - assertTrue(Arrays.equals(new String[] {"phoneNumbers", "phone", "kind"}, column.getPath())); - dictionaryVersion = GlobalDictionaryBuilder.getDictionaryVersion(fs, tableDirPath); - dictionaryRootPath = GlobalDictionaryBuilder.getDictionaryVersionedRootPath(fs, tableDirPath, dictionaryVersion); - assertEquals(1, dictionaryVersion); - try (final VectorContainer dict = GlobalDictionaryBuilder.readDictionary(fs, dictionaryRootPath, column, bufferAllocator)) { - assertEquals(5, dict.getRecordCount()); - final VarBinaryVector dictValues = dict.getValueAccessorById(VarBinaryVector.class, 0).getValueVector(); - assertEquals("cell", new String(dictValues.get(0), UTF_8)); - assertEquals("home", new String(dictValues.get(1), UTF_8)); - assertEquals("landline", new String(dictValues.get(2), UTF_8)); - assertEquals("mobile", new String(dictValues.get(3), UTF_8)); - assertEquals("work", new String(dictValues.get(4), UTF_8)); - } - - assertEquals(1, GlobalDictionaryBuilder.listDictionaryFiles(fs, dictionaryRootPath).size()); - assertEquals(0, GlobalDictionaryBuilder.listDictionaryFiles(fs, partitionDirPath).size()); } } @@ -232,24 +236,28 @@ public void testGlobalDictionary() throws Exception { public void testLocalDictionaries() throws IOException { try (final BufferAllocator bufferAllocator = allocatorRule.newAllocator("test-global-dictionary-builder", 0, Long.MAX_VALUE)) { final CompressionCodecFactory codecFactory = CodecFactory.createDirectCodecFactory(conf, new ParquetDirectByteBufferAllocator(bufferAllocator), 0); - Pair, Set> dictionaries1 = - LocalDictionariesReader.readDictionaries(fs, tableDirPath.resolve("phonebook1.parquet"), codecFactory); - Pair, Set> dictionaries2 = - LocalDictionariesReader.readDictionaries(fs, tableDirPath.resolve("phonebook2.parquet"), codecFactory); - Pair, Set> dictionaries3 = - LocalDictionariesReader.readDictionaries(fs, tableDirPath.resolve("phonebook3.parquet"), codecFactory); - Pair, Set> dictionaries4 = - LocalDictionariesReader.readDictionaries(fs, partitionDirPath.resolve("phonebook4.parquet"), codecFactory); - - assertEquals(2, dictionaries1.getKey().size()); // name and kind have dictionaries - assertEquals(1, dictionaries2.getKey().size()); - assertEquals(1, dictionaries3.getKey().size()); - assertEquals(1, dictionaries4.getKey().size()); - - assertEquals(0, dictionaries1.getValue().size()); - assertEquals(1, dictionaries2.getValue().size()); // skip name - assertEquals(1, dictionaries3.getValue().size()); // skip name - assertEquals(1, dictionaries4.getValue().size()); // skip name + try { + Pair, Set> dictionaries1 = + LocalDictionariesReader.readDictionaries(fs, tableDirPath.resolve("phonebook1.parquet"), codecFactory); + Pair, Set> dictionaries2 = + LocalDictionariesReader.readDictionaries(fs, tableDirPath.resolve("phonebook2.parquet"), codecFactory); + Pair, Set> dictionaries3 = + LocalDictionariesReader.readDictionaries(fs, tableDirPath.resolve("phonebook3.parquet"), codecFactory); + Pair, Set> dictionaries4 = + LocalDictionariesReader.readDictionaries(fs, partitionDirPath.resolve("phonebook4.parquet"), codecFactory); + + assertEquals(2, dictionaries1.getKey().size()); // name and kind have dictionaries + assertEquals(1, dictionaries2.getKey().size()); + assertEquals(1, dictionaries3.getKey().size()); + assertEquals(1, dictionaries4.getKey().size()); + + assertEquals(0, dictionaries1.getValue().size()); + assertEquals(1, dictionaries2.getValue().size()); // skip name + assertEquals(1, dictionaries3.getValue().size()); // skip name + assertEquals(1, dictionaries4.getValue().size()); // skip name + } finally { + codecFactory.release(); + } } } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/util/ValueListFilterTest.java b/sabot/kernel/src/test/java/com/dremio/exec/util/ValueListFilterTest.java index 19a945d60e..a2260c5f8f 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/util/ValueListFilterTest.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/util/ValueListFilterTest.java @@ -19,7 +19,6 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Proxy; @@ -281,7 +280,7 @@ private Set randomStrings(final int size) { } @Test - public void testCustomDecimal() { + public void testCustomDecimal() throws Exception { final byte precision = 38; final byte scale = 3; final byte blockSize = 16; @@ -303,8 +302,6 @@ public void testCustomDecimal() { assertEquals(Types.MinorType.DECIMAL, filter2.getFieldType()); assertEquals(precision, filter2.getPrecision()); assertEquals(scale, filter2.getScale()); - } catch (Exception e) { - fail(e.getMessage()); } } diff --git a/sabot/kernel/src/test/java/com/dremio/exec/util/rhash/TestRendezvousHash.java b/sabot/kernel/src/test/java/com/dremio/exec/util/rhash/TestRendezvousHash.java index e239e0bf6f..08ff4f0193 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/util/rhash/TestRendezvousHash.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/util/rhash/TestRendezvousHash.java @@ -37,6 +37,7 @@ public class TestRendezvousHash { private static final Random rand = new Random(); private static final HashFunction hfunc = Hashing.murmur3_128(); private static final Funnel strFunnel = new Funnel(){ + @Override public void funnel(String from, PrimitiveSink into) { into.putBytes(from.getBytes()); }}; diff --git a/sabot/kernel/src/test/java/com/dremio/exec/vector/complex/writer/TestExtendedTypes.java b/sabot/kernel/src/test/java/com/dremio/exec/vector/complex/writer/TestExtendedTypes.java index c6f7e82bd8..dc321490e8 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/vector/complex/writer/TestExtendedTypes.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/vector/complex/writer/TestExtendedTypes.java @@ -30,6 +30,7 @@ import com.dremio.BaseTestQuery; import com.dremio.common.util.TestTools; import com.dremio.exec.ExecConstants; +import com.dremio.exec.planner.physical.PlannerSettings; import com.dremio.sabot.rpc.user.QueryDataBatch; public class TestExtendedTypes extends BaseTestQuery { @@ -80,15 +81,17 @@ public void testMongoExtendedTypes() throws Exception { testNoResult(String.format("ALTER SESSION SET \"%s\" = 'json'", ExecConstants.OUTPUT_FORMAT_VALIDATOR.getOptionName())); testNoResult(String.format("ALTER SESSION SET \"%s\" = true", ExecConstants.JSON_EXTENDED_TYPES.getOptionName())); - int actualRecordCount = testSql(String.format("select * from dfs.\"%s\"", originalFile)); - assertEquals( + try (AutoCloseable ignore = withSystemOption(PlannerSettings.ENFORCE_VALID_JSON_DATE_FORMAT_ENABLED, false)) { + int actualRecordCount = testSql(String.format("select * from dfs.\"%s\"", originalFile)); + assertEquals( String.format( - "Received unexpected number of rows in output: expected=%d, received=%s", - 1, actualRecordCount), 1, actualRecordCount); - List resultList = testSqlWithResults(String.format("select * from dfs.\"%s\"", originalFile)); - String actual = getResultString(resultList, ","); - String expected = "dremio_timestamp_millies,bin,bin1\n2015-07-07T03:59:43.488,dremio,dremio\n"; - Assert.assertEquals(expected, actual); + "Received unexpected number of rows in output: expected=%d, received=%s", + 1, actualRecordCount), 1, actualRecordCount); + List resultList = testSqlWithResults(String.format("select * from dfs.\"%s\"", originalFile)); + String actual = getResultString(resultList, ","); + String expected = "dremio_timestamp_millies,bin,bin1\n2015-07-07T03:59:43.488,dremio,dremio\n"; + Assert.assertEquals(expected, actual); + } } finally { testNoResult(String.format("ALTER SESSION SET \"%s\" = '%s'", ExecConstants.OUTPUT_FORMAT_VALIDATOR.getOptionName(), diff --git a/sabot/kernel/src/test/java/com/dremio/exec/work/batch/TestSpoolingBuffer.java b/sabot/kernel/src/test/java/com/dremio/exec/work/batch/TestSpoolingBuffer.java index e52f920a7b..a89433f9b0 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/work/batch/TestSpoolingBuffer.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/work/batch/TestSpoolingBuffer.java @@ -176,8 +176,7 @@ public Void answer(InvocationOnMock invocationOnMock) throws Throwable { // have been able to finish spooling and sendOk(). Thread.sleep(100); } - } - catch (Exception e) + } catch (Exception e) { Map threads = Thread.getAllStackTraces(); System.out.println("Number of threads: " + threads.size()); @@ -258,8 +257,7 @@ public Void answer(InvocationOnMock invocationOnMock) throws Throwable { // Throw an exception for the case when this test has not timed out, but awaitTermination has timed out throw new Exception(); } - } - catch(Exception e) { + } catch (Exception e) { // Exception is caught for: // 1. test timed out but awaitTermination has not timed out // 2. test not timed out, but awaitTermination has timed out diff --git a/sabot/kernel/src/test/java/com/dremio/exec/work/foreman/TestResourceSchedulerInProfile.java b/sabot/kernel/src/test/java/com/dremio/exec/work/foreman/TestResourceSchedulerInProfile.java index 9cf8310f64..70fd5fcdef 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/work/foreman/TestResourceSchedulerInProfile.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/work/foreman/TestResourceSchedulerInProfile.java @@ -107,6 +107,10 @@ public ResourceSchedulingResult allocate(ResourceSchedulingContext queryContext, Futures.immediateFuture(new ResourceSet.ResourceSetNoOp())); } + @Override + public void cancel(ResourceSchedulingContext queryContext) { + } + @Override public GroupResourceInformation getGroupResourceInformation(OptionManager optionManager, ResourceSchedulingProperties resourceSchedulingProperties) { diff --git a/sabot/kernel/src/test/java/com/dremio/exec/work/protector/TestForemenWorkManager.java b/sabot/kernel/src/test/java/com/dremio/exec/work/protector/TestForemenWorkManager.java index 85c4b2b90a..eb1493ce84 100644 --- a/sabot/kernel/src/test/java/com/dremio/exec/work/protector/TestForemenWorkManager.java +++ b/sabot/kernel/src/test/java/com/dremio/exec/work/protector/TestForemenWorkManager.java @@ -48,7 +48,7 @@ public void testSubmitWork() { doReturn(false).when(config).getBoolean(any()); Provider commandPoolProvider = ()-> CommandPoolFactory.INSTANCE.newPool(config, null); ForemenWorkManager foremenWorkManager = new ForemenWorkManager(null, null, commandPoolProvider, - null, null, null, null, null, null); + null, null, null, null, null, null,null); foremenWorkManager = spy(foremenWorkManager); UserException userException = UserException.resourceError().message(UserException.QUERY_REJECTED_MSG).buildSilently(); @@ -56,7 +56,9 @@ public void testSubmitWork() { final UserResult[] userResult = new UserResult[1]; UserResponseHandler userResponseHandler = new UserResponseHandler() { + @Override public void sendData(RpcOutcomeListener outcomeListener, QueryWritableBatch result) {} + @Override public void completed(UserResult result) { userResult[0] = result; } }; diff --git a/sabot/kernel/src/test/java/com/dremio/plugins/TestNessieClientImpl.java b/sabot/kernel/src/test/java/com/dremio/plugins/TestNessieClientImpl.java index feba80cf63..9ce76cabb6 100644 --- a/sabot/kernel/src/test/java/com/dremio/plugins/TestNessieClientImpl.java +++ b/sabot/kernel/src/test/java/com/dremio/plugins/TestNessieClientImpl.java @@ -21,27 +21,33 @@ import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import java.net.ConnectException; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Random; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.apache.commons.collections4.ListUtils; import org.junit.Before; import org.junit.Rule; import org.junit.Test; -import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.junit.MockitoJUnit; import org.mockito.junit.MockitoRule; import org.mockito.quality.Strictness; import org.projectnessie.client.api.DeleteBranchBuilder; +import org.projectnessie.client.api.GetAllReferencesBuilder; import org.projectnessie.client.api.GetContentBuilder; import org.projectnessie.client.api.NessieApiV1; +import org.projectnessie.client.http.HttpClientException; import org.projectnessie.client.rest.NessieNotAuthorizedException; import org.projectnessie.error.ErrorCode; import org.projectnessie.error.ImmutableNessieError; @@ -53,10 +59,14 @@ import org.projectnessie.model.Content; import org.projectnessie.model.ContentKey; import org.projectnessie.model.IcebergTable; +import org.projectnessie.model.Reference; +import org.projectnessie.model.ReferencesResponse; import org.projectnessie.model.Tag; import com.dremio.exec.catalog.ResolvedVersionContext; import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.store.ConnectionRefusedException; +import com.dremio.exec.store.ReferenceInfo; public class TestNessieClientImpl { @@ -69,6 +79,15 @@ public class TestNessieClientImpl { private static final Map CONTENT_MAP = new HashMap() {{ put(CONTENT_KEY, CONTENT); }}; + private static final List BRANCHES = Arrays.asList( + Branch.of("main", "a0f4f33a14fa610c75ff8cd89b6a54f5df61fcb7"), + Branch.of("dev", "07b92b065b57ec8d69c5249daa33c329259f7284") + ); + private static final List TAGS = Arrays.asList( + Tag.of("version1", "63941c19fcae9207e5cdf567e53e5e417a46b75a"), + Tag.of("version2", "1231251c19fcae9207e5cdfdaa33c329259f7284") + ); + private static final List REFERENCES = ListUtils.union(BRANCHES, TAGS); @Rule public MockitoRule rule = MockitoJUnit.rule().strictness(Strictness.STRICT_STUBS); @@ -76,7 +95,6 @@ public class TestNessieClientImpl { @Mock private NessieApiV1 nessieApiV1; - @InjectMocks private NessieClientImpl nessieClient; private GetContentBuilder builder; @@ -84,6 +102,8 @@ public class TestNessieClientImpl { @Before public void setUp() { builder = mock(GetContentBuilder.class, RETURNS_SELF); + NessieClientImpl nessieClientToMock = new NessieClientImpl(nessieApiV1); + nessieClient = spy(nessieClientToMock); } @Test @@ -177,6 +197,17 @@ public void testResolveVersionContextThrowsNotAuthorized() throws NessieNotFound .hasMessageNotContaining("NessieNotAuthorizedException"); } + @Test + public void testGetDefaultBranchThrowsConnectionRefused() throws NessieNotFoundException { + + ConnectException connectException = new ConnectException("Connection Refused"); + when(nessieApiV1.getDefaultBranch()).thenThrow(new HttpClientException( + "Failed to execute GET request against 'http://localhost:19120/api/v2/trees/-'.", connectException)); + assertThatThrownBy(() -> nessieClient.getDefaultBranch()) + .isInstanceOf(ConnectionRefusedException.class) + .hasMessageContaining("Connection refused while connecting to the Nessie Server"); + } + private Map generateRandomMap(){ Map map = new HashMap<>(); ContentKey key = ContentKey.of(Arrays.asList(generateRandomString(), generateRandomString())); @@ -198,4 +229,44 @@ private String generateRandomString(){ } return buffer.toString(); } + + @Test + public void testListBranches() { + setUpReferences(BRANCHES); + List expectedBranches = BRANCHES.stream().map(ref -> new ReferenceInfo(NessieClientImpl.BRANCH_REFERENCE, ref.getName(), ref.getHash())).collect(Collectors.toList()); + Stream actualBranches = nessieClient.listBranches(); + assertThat(actualBranches) + .isNotNull() + .containsExactlyElementsOf(expectedBranches); + } + + @Test + public void testListTags() { + setUpReferences(TAGS); + List expectedTags = TAGS.stream().map(ref -> new ReferenceInfo(NessieClientImpl.TAG_REFERENCE, ref.getName(), ref.getHash())).collect(Collectors.toList()); + Stream actualTags = nessieClient.listTags(); + assertThat(actualTags) + .isNotNull() + .containsExactlyElementsOf(expectedTags); + } + + @Test + public void testListReferences() { + setUpReferences(REFERENCES); + List expectedReferences = REFERENCES.stream().map(ref -> new ReferenceInfo( + ref instanceof Branch ? NessieClientImpl.BRANCH_REFERENCE : NessieClientImpl.TAG_REFERENCE, ref.getName(), ref.getHash())).collect(Collectors.toList()); + Stream actualReferences = nessieClient.listReferences(); + assertThat(actualReferences) + .isNotNull() + .containsExactlyElementsOf(expectedReferences); + } + + @SuppressWarnings("unchecked") + private void setUpReferences(List references) { + GetAllReferencesBuilder getAllReferencesBuilder = mock(GetAllReferencesBuilder.class); + ReferencesResponse referencesResponse = mock(ReferencesResponse.class); + when(getAllReferencesBuilder.get()).thenReturn(referencesResponse); + when(referencesResponse.getReferences()).thenReturn((List) references); + when(nessieApiV1.getAllReferences()).thenReturn(getAllReferencesBuilder); + } } diff --git a/sabot/kernel/src/test/java/com/dremio/plugins/TestUsernameAwareNessieClientImpl.java b/sabot/kernel/src/test/java/com/dremio/plugins/TestUsernameAwareNessieClientImpl.java new file mode 100644 index 0000000000..9ead9143cd --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/plugins/TestUsernameAwareNessieClientImpl.java @@ -0,0 +1,129 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.plugins; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.when; + +import java.util.Collections; +import java.util.List; +import java.util.UUID; + +import org.junit.Rule; +import org.junit.Test; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnit; +import org.mockito.junit.MockitoRule; +import org.mockito.quality.Strictness; + +import com.dremio.context.RequestContext; +import com.dremio.context.UserContext; +import com.dremio.context.UsernameContext; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.service.users.User; +import com.dremio.service.users.UserNotFoundException; +import com.dremio.service.users.UserService; +import com.dremio.service.users.proto.UID; + +public class TestUsernameAwareNessieClientImpl { + @Rule + public MockitoRule rule = MockitoJUnit.rule().strictness(Strictness.STRICT_STUBS); + + @Mock + private NessieClient nessieClient; + + @Mock + private UserService userService; + + @Mock + private User user; + + @InjectMocks + private UsernameAwareNessieClientImpl usernameAwareNessieClient; + + private static final String USER_NAME = "testUser"; + + @Test + public void createNamespace() throws Exception { + List namespaceList = Collections.singletonList("folder1"); + VersionContext versionContext = VersionContext.ofBranch("main"); + String userId = UUID.randomUUID().toString(); + + when(userService.getUser(new UID(userId))).thenReturn(user); + when(user.getUserName()).thenReturn(USER_NAME); + doAnswer((unused) -> { + assertThat(RequestContext.current().get(UsernameContext.CTX_KEY).getUserName()).isEqualTo(USER_NAME); + return null; + }).when(nessieClient).createNamespace(namespaceList, versionContext); + + RequestContext.current() + .with(UserContext.CTX_KEY, new UserContext(userId)) + .run(() -> usernameAwareNessieClient.createNamespace(namespaceList, versionContext)); + } + + @Test + public void getDefaultBranch() throws Exception { + String userId = UUID.randomUUID().toString(); + + when(userService.getUser(new UID(userId))).thenReturn(user); + when(user.getUserName()).thenReturn(USER_NAME); + doAnswer((unused) -> { + assertThat(RequestContext.current().get(UsernameContext.CTX_KEY).getUserName()).isEqualTo(USER_NAME); + return null; + }).when(nessieClient).getDefaultBranch(); + + RequestContext.current() + .with(UserContext.CTX_KEY, new UserContext(userId)) + .call(usernameAwareNessieClient::getDefaultBranch); + } + + @Test + public void deleteNamespace() throws Exception { + List namespaceList = Collections.singletonList("folder1"); + VersionContext versionContext = VersionContext.ofBranch("main"); + String userId = UUID.randomUUID().toString(); + + when(userService.getUser(new UID(userId))).thenReturn(user); + when(user.getUserName()).thenReturn(USER_NAME); + doAnswer((unused) -> { + assertThat(RequestContext.current().get(UsernameContext.CTX_KEY).getUserName()).isEqualTo(USER_NAME); + return null; + }).when(nessieClient).deleteNamespace(namespaceList, versionContext); + + RequestContext.current() + .with(UserContext.CTX_KEY, new UserContext(userId)) + .run(() -> usernameAwareNessieClient.deleteNamespace(namespaceList, versionContext)); + } + + @Test + public void createNamespaceWhenUserNotFoundException() throws Exception { + List namespaceList = Collections.singletonList("folder1"); + VersionContext versionContext = VersionContext.ofBranch("main"); + String userId = UUID.randomUUID().toString(); + + when(userService.getUser(new UID(userId))).thenThrow(UserNotFoundException.class); + doAnswer((unused) -> { + assertThat(RequestContext.current().get(UsernameContext.CTX_KEY)).isNull(); + return null; + }).when(nessieClient).createNamespace(namespaceList, versionContext); + + RequestContext.current() + .with(UserContext.CTX_KEY, new UserContext(userId)) + .run(() -> usernameAwareNessieClient.createNamespace(namespaceList, versionContext)); + } +} diff --git a/sabot/kernel/src/test/java/com/dremio/sabot/BaseTestFunction.java b/sabot/kernel/src/test/java/com/dremio/sabot/BaseTestFunction.java index 9fdc7f82c9..37a5401595 100644 --- a/sabot/kernel/src/test/java/com/dremio/sabot/BaseTestFunction.java +++ b/sabot/kernel/src/test/java/com/dremio/sabot/BaseTestFunction.java @@ -103,7 +103,6 @@ private void testFunctionInner(String stringExpression, boolean runInterpretedMo try { validateSingle(p, ProjectOperator.class, input.toGenerator(getTestAllocator()), output, DEFAULT_BATCH); } catch(AssertionError | Exception e) { - e.printStackTrace(); throw new RuntimeException("Failure while testing function using code compilation.", e); } @@ -118,9 +117,6 @@ private void testFunctionInner(String stringExpression, boolean runInterpretedMo }catch(AssertionError | Exception e){ throw new RuntimeException(String.format("Failure while attempting to evaluate expr [%s] using inputs/outputs of %s.", stringExpression, Arrays.toString(fieldsArr)), e); } - - - System.out.println("Passed: " + stringExpression); } private void testInterp(Project project, LogicalExpression expr, Table input, Table expected) throws Exception { diff --git a/sabot/kernel/src/test/java/com/dremio/sabot/Fixtures.java b/sabot/kernel/src/test/java/com/dremio/sabot/Fixtures.java index d36a19bcbb..5ac6ae5f08 100644 --- a/sabot/kernel/src/test/java/com/dremio/sabot/Fixtures.java +++ b/sabot/kernel/src/test/java/com/dremio/sabot/Fixtures.java @@ -45,6 +45,7 @@ import org.apache.arrow.vector.VarCharVector; import org.apache.arrow.vector.complex.ListVector; import org.apache.arrow.vector.complex.StructVector; +import org.apache.arrow.vector.complex.UnionVector; import org.apache.arrow.vector.complex.impl.UnionListWriter; import org.apache.arrow.vector.complex.writer.BaseWriter; import org.apache.arrow.vector.types.FloatingPointPrecision; @@ -649,8 +650,7 @@ public static HeaderRow toHeader(final Map> fieldNameToChild allHeaders[headerIndex] = struct(header, fieldNameToChildren.get(header)); - } - else { + } else { allHeaders[headerIndex] = convertToHeader(header); } @@ -691,9 +691,7 @@ public static DataRow toRow(final List dataTypes, final Object... object if (objects[i] instanceof List){ cells[i] = listToCell((List) objects[i], dataTypes.get(i)); - } - //struct field - else if (objects[i] instanceof Map){ + } else if (objects[i] instanceof Map){ final Cell[] mapValueCells = new Cell[((Map) objects[i]).size()]; int cellIndex =0; for (final Object value : ((Map) objects[i]).values()){ @@ -701,8 +699,7 @@ else if (objects[i] instanceof Map){ cellIndex++; } cells[i] = toCell(mapValueCells); - } - else { + } else { cells[i] = toCell(objects[i]); } @@ -725,7 +722,7 @@ public static DataRow tr(Object... objects){ * @return */ private static Cell listToCell(final List list, final String dataType){ - if(dataType.equalsIgnoreCase("varchar")){ + if("varchar".equalsIgnoreCase(dataType)){ final VarCharList varcharList = new VarCharList(); varcharList.addAll(list); return new ListCell(varcharList); @@ -770,18 +767,15 @@ private static Cell toCell(Object obj){ IntList list = new IntList(); list.addAll((List) obj); return new ListCell(list); - } - else if(((List) obj).get(0) instanceof Long) { + } else if(((List) obj).get(0) instanceof Long) { BigIntList list = new BigIntList(); list.addAll((List) obj); return new ListCell(list); - } - else if(((List) obj).get(0) instanceof String) { + } else if(((List) obj).get(0) instanceof String || ((List) obj).get(0) instanceof Text) { VarCharList list = new VarCharList(); list.addAll((List) obj); return new ListCell(list); - } - else { + } else { throw new UnsupportedOperationException("Unknown list type"); } }else if(obj instanceof BigDecimal) { @@ -923,6 +917,7 @@ public Field toField(ColumnHeader header) { } + @Override @SuppressWarnings("unchecked") public CellCompare compare(ValueVector vector, int index, boolean isValid) { V obj = isValid ? (V)getVectorObject(vector, index) : null; @@ -1278,6 +1273,7 @@ boolean evaluateEquality(byte[] obj1, byte[] obj2) { return Arrays.equals(obj1, obj2); } + @Override public String toString(byte[] obj){ return BaseEncoding.base16().encode(obj); } @@ -1326,6 +1322,131 @@ boolean evaluateEquality(BigDecimal val1, BigDecimal val2) { } } + /** + * Represents a cell of Union data type in tabular query results + */ + public static class UnionCell implements Cell { + final Cell cell; + final Map types; + + public UnionCell(final Cell cells, final Map types) { + this.cell = cells; + this.types = types; + } + + @Override + public Field toField(final ColumnHeader header) { + Preconditions.checkArgument(header instanceof ComplexColumnHeader, "Header for complex(union) cell not provided."); + ComplexColumnHeader complexHeader = (ComplexColumnHeader) header; + Preconditions.checkArgument(complexHeader.fields.length == types.size(), "Union cell field count does not match header."); + + ArrayList children = new ArrayList<>(); + int i = 0; + for (Map.Entry entry : this.types.entrySet()) { + Field field = new Field(complexHeader.fields[i].name, new FieldType(true, entry.getKey(), null), null); + children.add(field); + i++; + } + final Field unionField = CompleteType.union(children).toField(complexHeader.name); + return unionField; + } + + @Override + public CellCompare compare(final ValueVector vector,final int index, final boolean isValid) { + final UnionVector uv = (UnionVector) vector; + final int nFields = uv.getField().getChildren().size(); + + Object val = null; + + if (isValid) { + val = uv.getObject(index); + } + + if (cell == null && val == null) { + return new CellCompare(true, "null"); + } + if (cell == null) { + return new CellCompare(false, valsToString(val) + " (null)"); + } + if (val == null) { + return new CellCompare(false, "null (" + cellsToString(cell) + ")"); + } + + if (types.size() != nFields) { + return new CellCompare(false, valsToString(val) + " (" + cellsToString(cell) + ")"); + } + + boolean isEqual = true; + + if (cell instanceof ValueCell) { + ValueCell valueCell = (ValueCell) cell; + isEqual = isEqual && valueCell.evaluateEquality(valueCell.obj, val); + } else { + throw new UnsupportedOperationException("Nested unions not supported"); + } + + if (isEqual) { + return new CellCompare(true, cellsToString(cell)); + } else { + return new CellCompare(false, valsToString(val) + " (" + cellsToString(cell) + ")"); + } + } + + @Override + public void set(final ValueVector v, final int index, final ArrowBuf workBuffer) { + final UnionVector structVector = (UnionVector) v; + int i = 0; + for (Map.Entry entry : types.entrySet()) { + if (entry.getValue()) { + cell.set(structVector.getChildrenFromFields().get(i), index, workBuffer); + } + i++; + + } + } + + @Override + public Object unwrap() { + return cell; + } + + private String cellsToString(final Cell... cells) { + StringBuilder builder = new StringBuilder(); + builder.append("{ "); + for (int i = 0; i < cells.length; i++) { + if (i > 0) { + builder.append(", "); + } + if (cells[i] == null) { + builder.append("null"); + } else { + builder.append(cells[i].unwrap().toString()); + } + } + builder.append(" }"); + + return builder.toString(); + } + + private String valsToString(final Object... vals) { + StringBuilder builder = new StringBuilder(); + builder.append("{ "); + for (int i = 0; i < vals.length; i++) { + if (i > 0) { + builder.append(", "); + } + if (vals[i] == null) { + builder.append("null"); + } else { + builder.append(vals[i].toString()); + } + } + builder.append(" }"); + + return builder.toString(); + } + } + public static class StructCell implements Cell { Cell[] cells; @@ -1445,6 +1566,16 @@ public static Cell[] tuple(Object... vals) { return Arrays.stream(vals).map(Fixtures::toCell).toArray(Cell[]::new); } + /** + * Convert value to a union cell for tabular result comparison + * @param val + * @param types + * @return + */ + public static UnionCell toUnionCell(final Object val, final Map types) { + Cell cell = toCell(val); + return new UnionCell(cell, types); + } public abstract static class ValueList extends ArrayList { public abstract CompleteType getValueType(); @@ -1468,10 +1599,12 @@ public void writeToVector(ValueVector v, int index, ArrowBuf workBuffer) { public static class IntList extends ValueList { + @Override public CompleteType getValueType() { return CompleteType.INT; } + @Override public void write(BaseWriter.ListWriter writer, Integer val, ArrowBuf workBuffer) { writer.integer().writeInt(val); } @@ -1479,10 +1612,12 @@ public void write(BaseWriter.ListWriter writer, Integer val, ArrowBuf workBuffer public static class BigIntList extends ValueList { + @Override public CompleteType getValueType() { return CompleteType.BIGINT; } + @Override public void write(BaseWriter.ListWriter writer, Long val, ArrowBuf workBuffer) { writer.bigInt().writeBigInt(val); } @@ -1496,10 +1631,12 @@ public static IntList intList(Integer... values) { public static class VarCharList extends ValueList { + @Override public CompleteType getValueType() { return CompleteType.VARCHAR; } + @Override public void write(BaseWriter.ListWriter writer, Text val, ArrowBuf workBuffer) { byte[] bytes = val.toString().getBytes(); workBuffer.setBytes(0, bytes); diff --git a/sabot/kernel/src/test/java/com/dremio/sabot/aggregate/hash/TestSpillingHashAgg.java b/sabot/kernel/src/test/java/com/dremio/sabot/aggregate/hash/TestSpillingHashAgg.java index cf0cf31550..6143b44715 100644 --- a/sabot/kernel/src/test/java/com/dremio/sabot/aggregate/hash/TestSpillingHashAgg.java +++ b/sabot/kernel/src/test/java/com/dremio/sabot/aggregate/hash/TestSpillingHashAgg.java @@ -192,8 +192,7 @@ public void testVeryLargeVarcharKey() throws Exception { exceptionThrown = true; } catch (Exception e) { e.printStackTrace(); - } - finally { + } finally { Assert.assertEquals(true, shortLenSuccess); Assert.assertEquals(true, exceptionThrown); } diff --git a/sabot/kernel/src/test/java/com/dremio/sabot/copier/TestConditionalCopier6RoundTrip.java b/sabot/kernel/src/test/java/com/dremio/sabot/copier/TestConditionalCopier6RoundTrip.java index c931880670..a11b40554b 100644 --- a/sabot/kernel/src/test/java/com/dremio/sabot/copier/TestConditionalCopier6RoundTrip.java +++ b/sabot/kernel/src/test/java/com/dremio/sabot/copier/TestConditionalCopier6RoundTrip.java @@ -530,8 +530,11 @@ public void intAppendWithEmpty(){ List copiers = new FieldBufferCopierFactory(testContext.getOptions()).getSixByteConditionalCopiers(ImmutableList.of(in), ImmutableList.of(out)); List emptyCopiers = ConditionalFieldBufferCopier6Util.getEmptySourceFourByteCopiers(ImmutableList.of(out)); try( + @SuppressWarnings("checkstyle:LocalFinalVariableName") final ArrowBuf sv6_0 = allocator.buffer(SV6_SIZE * countPerBatch); + @SuppressWarnings("checkstyle:LocalFinalVariableName") final ArrowBuf sv6_1 = allocator.buffer(SV6_SIZE * countPerBatch); + @SuppressWarnings("checkstyle:LocalFinalVariableName") final ArrowBuf sv6_2 = allocator.buffer(SV6_SIZE * countPerBatch); ){ fillSV6Full(sv6_0, 0, countPerBatch); diff --git a/sabot/kernel/src/test/java/com/dremio/sabot/exec/TestQueriesClerkBase.java b/sabot/kernel/src/test/java/com/dremio/sabot/exec/TestQueriesClerkBase.java index 3fbbecbc6f..f93bf72da6 100644 --- a/sabot/kernel/src/test/java/com/dremio/sabot/exec/TestQueriesClerkBase.java +++ b/sabot/kernel/src/test/java/com/dremio/sabot/exec/TestQueriesClerkBase.java @@ -132,38 +132,49 @@ private TestAllocator(BufferAllocator parent, String name, long limit) { numAllocators++; } + @Override public BufferAllocator newChildAllocator(String childName, long dummy1, long limit) { return newChildAllocator(childName, null, dummy1, limit); } + @Override public BufferAllocator newChildAllocator(String childName, AllocationListener listener, long dummy1, long limit) { return new TestAllocator(this, childName, limit); } + @Override public void close() { numAllocators--; } // NB: None of the functions below are implemented + @Override public ArrowBuf buffer(long var1) { throw new UnsupportedOperationException(); } + @Override public ArrowBuf buffer(long var1, BufferManager var2) { throw new UnsupportedOperationException(); } + @Override public long getAllocatedMemory() { throw new UnsupportedOperationException(); } + @Override public long getLimit() { return limit; } + @Override public long getInitReservation() { throw new UnsupportedOperationException(); } + @Override public void setLimit(long var1) { throw new UnsupportedOperationException(); } + @Override public long getPeakMemoryAllocation() { return 0; } + @Override public long getHeadroom() { throw new UnsupportedOperationException(); } @@ -178,21 +189,27 @@ public Collection getChildAllocators() { throw new UnsupportedOperationException(); } + @Override public AllocationReservation newReservation() { throw new UnsupportedOperationException(); } + @Override public ArrowBuf getEmpty() { throw new UnsupportedOperationException(); } + @Override public String getName() { return name; } + @Override public boolean isOverLimit() { throw new UnsupportedOperationException(); } + @Override public String toVerboseString() { throw new UnsupportedOperationException(); } + @Override public void assertOpen() { throw new UnsupportedOperationException(); } diff --git a/sabot/kernel/src/test/java/com/dremio/sabot/exec/TestThreadsStatsCollector.java b/sabot/kernel/src/test/java/com/dremio/sabot/exec/TestThreadsStatsCollector.java index 7705f79bd0..2355451ccc 100644 --- a/sabot/kernel/src/test/java/com/dremio/sabot/exec/TestThreadsStatsCollector.java +++ b/sabot/kernel/src/test/java/com/dremio/sabot/exec/TestThreadsStatsCollector.java @@ -26,6 +26,7 @@ public class TestThreadsStatsCollector { public void testOldThreadsArePruned() throws InterruptedException { Thread t = new Thread() { + @Override public void run () { try { sleep(400L); @@ -35,6 +36,7 @@ public void run () { }; Thread t1 = new Thread() { + @Override public void run () { try { sleep(400L); diff --git a/sabot/kernel/src/test/java/com/dremio/sabot/exec/fragment/TestFragmentExecutor.java b/sabot/kernel/src/test/java/com/dremio/sabot/exec/fragment/TestFragmentExecutor.java index bc5c6c60ba..b072ab6ffa 100644 --- a/sabot/kernel/src/test/java/com/dremio/sabot/exec/fragment/TestFragmentExecutor.java +++ b/sabot/kernel/src/test/java/com/dremio/sabot/exec/fragment/TestFragmentExecutor.java @@ -15,7 +15,6 @@ */ package com.dremio.sabot.exec.fragment; -import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.mock; @@ -63,7 +62,7 @@ public class TestFragmentExecutor { private BufferAllocator allocator = new RootAllocator(); @Test - public void testWorkOnOOBAfterSetup() { + public void testWorkOnOOBAfterSetup() throws Exception { try(ArrowBuf messageBuf = allocator.buffer(10L)) { FragmentExecutor exec = spy(getTestFragmentExecutor()); doNothing().when(exec).setupExecution(); @@ -86,9 +85,6 @@ public void testWorkOnOOBAfterSetup() { listener.overrideIsSetup(true); asyncTask.run(); verify(pipeline).workOnOOB(any(OutOfBandMessage.class)); - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); } } diff --git a/sabot/kernel/src/test/java/com/dremio/sabot/exec/heap/TestSpillingOperatorHeapController.java b/sabot/kernel/src/test/java/com/dremio/sabot/exec/heap/TestSpillingOperatorHeapController.java index 951c8836e8..94226d0fe4 100644 --- a/sabot/kernel/src/test/java/com/dremio/sabot/exec/heap/TestSpillingOperatorHeapController.java +++ b/sabot/kernel/src/test/java/com/dremio/sabot/exec/heap/TestSpillingOperatorHeapController.java @@ -28,11 +28,9 @@ import java.util.Random; import java.util.UUID; import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; import javax.management.ObjectName; -import org.junit.Assert; import org.junit.Test; public class TestSpillingOperatorHeapController { @@ -73,14 +71,7 @@ public void testSpillingOperatorMultiThreaded() { allFutures.add(CompletableFuture.supplyAsync(() -> generateAndRemove(100 + rand.nextInt(100), 1000 + rand.nextInt(1000), sut, false))); } - allFutures.forEach((f) -> { - try { - f.get(); - } catch (InterruptedException ignored) { - } catch (ExecutionException e) { - Assert.fail(e.getMessage()); - } - }); + allFutures.forEach(CompletableFuture::join); assertEquals(0, sut.maxParticipantsPerSlot()); assertEquals(0, sut.numParticipants()); assertEquals(0, sut.computeTotalOverhead()); diff --git a/sabot/kernel/src/test/java/com/dremio/sabot/join/hash/ListColumnsGenerator.java b/sabot/kernel/src/test/java/com/dremio/sabot/join/hash/ListColumnsGenerator.java new file mode 100644 index 0000000000..5638b0b81b --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/sabot/join/hash/ListColumnsGenerator.java @@ -0,0 +1,223 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.sabot.join.hash; + + +import static com.dremio.common.expression.CompleteType.LIST; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.arrow.memory.ArrowBuf; +import org.apache.arrow.memory.BufferAllocator; +import org.apache.arrow.vector.BaseFixedWidthVector; +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.IntVector; +import org.apache.arrow.vector.complex.ListVector; +import org.apache.arrow.vector.complex.impl.UnionListWriter; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.arrow.vector.types.pojo.FieldType; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import com.dremio.common.expression.CompleteType; +import com.dremio.exec.record.BatchSchema; +import com.dremio.exec.record.VectorAccessible; +import com.dremio.exec.record.VectorContainer; +import com.dremio.sabot.Generator; +import com.google.common.collect.ImmutableList; + +/** + * A vector container containing following types of vectors - + * IntVector, ListVector of Int, ListVector of String. + * Used for unit tests in {@link TestVHashJoinSpillBuildAndReplay} + * @param + */ +class ListColumnsGenerator implements Generator { + + private final int rows; + private final VectorContainer result; + private final List vectors; + + private int offset; + + private final String postFix; + + private final BufferAllocator allocator; + + + public ListColumnsGenerator(final BufferAllocator allocator, final int rows, final int offset, final String postFix) { + + this.allocator = allocator; + this.rows = rows; + result = new VectorContainer(allocator); + + this.offset = offset; + this.postFix = postFix; + + final ImmutableList.Builder vectorsBuilder = ImmutableList.builder(); + + final Field fieldId = new Field("id_"+ this.postFix, new FieldType(true, + new ArrowType.Int(32, true), null), null); + + final T idLeftVector = result.addOrGet(fieldId); + vectorsBuilder.add(idLeftVector); + + + + final List childrenField1 = ImmutableList.of(CompleteType.INT.toField("integerCol")); + + final Field fieldIntList = new Field("ints_" + this.postFix, FieldType.nullable(LIST.getType()), childrenField1); + + final T intListVector = result.addOrGet(fieldIntList); + vectorsBuilder.add(intListVector); + + final List childrenField2 = ImmutableList.of(CompleteType.VARCHAR.toField("strCol")); + + final Field fieldStringList = new Field("strings_" + this.postFix, FieldType.nullable(LIST.getType()), childrenField2); + + final T stringListVector = result.addOrGet(fieldStringList); + vectorsBuilder.add(stringListVector); + + this.vectors = vectorsBuilder.build(); + + result.buildSchema(BatchSchema.SelectionVectorMode.NONE); + } + + /** + * Generate a new record. + * @param records + * @return + */ + @Override + public int next(final int records) { + + + final int count = Math.min(rows - offset, records); + + if (count <= 0) { + return 0; + } + + final List> intValues = getIntList(count); + final List> stringValues = getStringList(count); + + result.allocateNew(); + for (int i = 0; i < count; i++) { + + insertIntoIntVector(offset, offset, (BaseFixedWidthVector) vectors.get(0)); + + + insetIntoIntListVector(intValues); + + insetIntoStringListVector(stringValues); + + offset++; + } + + result.setAllCount(count); + result.buildSchema(); + return count; + } + + private void insetIntoIntListVector(List> intValues) { + final UnionListWriter listWriter = new UnionListWriter((ListVector) vectors.get(1)); + + listWriter.setPosition(offset); + listWriter.startList(); + + for (List intList : intValues) { + for (int num : intList) { + listWriter.writeInt(num); + } + + } + + listWriter.endList(); + } + + private void insetIntoStringListVector(final List> strValues) { + final UnionListWriter listWriter = new UnionListWriter((ListVector) vectors.get(2)); + try (final ArrowBuf tempBuf = allocator.buffer(1024)) { + listWriter.setPosition(offset); + listWriter.startList(); + for (List intList : strValues) { + for (String str : intList) { + final byte[] varCharVal = str.getBytes(); + tempBuf.setBytes(0, varCharVal); + listWriter.writeVarChar(0, varCharVal.length, tempBuf); + } + + } + + listWriter.endList(); + + } + } + + private static List> getIntList(final int size) { + final List> listOfLists = new JsonStringArrayList<>(size); + final int listSize = 5; + for (int i = 0; i < size; i++) { + + final List list = new JsonStringArrayList<>(listSize); + for (int j = 0; j < listSize; j++) { + list.add(j); + } + listOfLists.add(list); + } + return listOfLists; + } + + private static List> getStringList(final int size) { + final List> listOfLists = new ArrayList<>(size); + final int listSize = 5; + + for (int i = 0; i < size; i++) { + final List list = new ArrayList<>(listSize); + for (int j = 0; j < listSize; j++) { + list.add(randomString(j)); + } + listOfLists.add(list); + } + return listOfLists; + } + + private static String randomString(final int num) { + + final StringBuilder builder = new StringBuilder(); + + builder.append((char)('0' + num)); + + return builder.toString(); + } + + @Override + public VectorAccessible getOutput() { + + return result; + } + + @Override + public void close() throws Exception { + result.close(); + } + + private static void insertIntoIntVector(final int index, final int value, final BaseFixedWidthVector vector) { + final IntVector vec = (IntVector)vector; + vec.setSafe(index, value); + } +} diff --git a/sabot/kernel/src/test/java/com/dremio/sabot/join/hash/MixedColumnGenerator.java b/sabot/kernel/src/test/java/com/dremio/sabot/join/hash/MixedColumnGenerator.java new file mode 100644 index 0000000000..3028a74f0a --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/sabot/join/hash/MixedColumnGenerator.java @@ -0,0 +1,155 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.sabot.join.hash; + + +import java.util.List; + +import org.apache.arrow.memory.ArrowBuf; +import org.apache.arrow.memory.BufferAllocator; +import org.apache.arrow.vector.BaseFixedWidthVector; +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.IntVector; +import org.apache.arrow.vector.complex.StructVector; +import org.apache.arrow.vector.complex.impl.NullableStructWriter; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.arrow.vector.types.pojo.FieldType; + +import com.dremio.common.expression.CompleteType; +import com.dremio.exec.record.BatchSchema; +import com.dremio.exec.record.VectorAccessible; +import com.dremio.exec.record.VectorContainer; +import com.dremio.sabot.Generator; +import com.google.common.collect.ImmutableList; + +/** + * A vector cotainer containing following types of vectors - + * IntVector, IntVector, StructVector + * + * Used for unit tests in {@link TestVHashJoinSpillBuildAndReplay} + * @param + */ +class MixedColumnGenerator implements Generator { + private final int rows; + private final VectorContainer result; + private final List vectors; + private int offset; + private final String postFix; + private final BufferAllocator allocator; + + public MixedColumnGenerator(final BufferAllocator allocator, final int rows, final int offset, final String postFix) { + + this.allocator = allocator; + this.rows = rows; + result = new VectorContainer(allocator); + + this.offset = offset; + this.postFix = postFix; + + final ImmutableList.Builder vectorsBuilder = ImmutableList.builder(); + + final Field fieldIdLeft = new Field("id_"+ this.postFix, new FieldType(true, + new ArrowType.Int(32, true), null), null); + + final T idLeftVector = result.addOrGet(fieldIdLeft); + vectorsBuilder.add(idLeftVector); + + final Field fieldInt = new Field("int_" + this.postFix, new FieldType(true, + new ArrowType.Int(32, true), null), null); + + final T intVector = result.addOrGet(fieldInt); + vectorsBuilder.add(intVector); + + final Field fieldStruct = CompleteType.struct( + CompleteType.VARCHAR.toField("child_string"), + CompleteType.INT.toField("child_int") + ).toField("struct_" + this.postFix); + + final T structVector = result.addOrGet(fieldStruct); + vectorsBuilder.add(structVector); + + this.vectors = vectorsBuilder.build(); + + result.buildSchema(BatchSchema.SelectionVectorMode.NONE); + } + + /** + * Generate a new record. + * @param records + * @return + */ + @Override + public int next(final int records) { + final int count = Math.min(rows - offset, records); + + if (count <= 0) { + return 0; + } + + result.allocateNew(); + for (int i = 0; i < count; i++) { + + insertIntoIntVector(offset, offset, (BaseFixedWidthVector) vectors.get(0)); + + int value = "left".equalsIgnoreCase(postFix) ? offset : offset + rows; + insertIntoIntVector( offset, value, (BaseFixedWidthVector) vectors.get(1)); + + insertIntoStructVector(allocator, offset, value, (StructVector)vectors.get(2)); + + offset++; + } + result.setAllCount(count); + result.buildSchema(); + return count; + } + + + @Override + public VectorAccessible getOutput() { + return result; + } + + @Override + public void close() throws Exception { + result.close(); + } + + private static void insertIntoIntVector(final int index, final int value, final BaseFixedWidthVector vector) { + IntVector vec = (IntVector)vector; + vec.setSafe(index, value); + } + + private static void insertIntoStructVector(final BufferAllocator allocator, final int index, final int value, final StructVector vector) { + + final NullableStructWriter structWriter = vector.getWriter(); + + try (final ArrowBuf tempBuf = allocator.buffer(1024)) { + + structWriter.setPosition(index); + + structWriter.start(); + + structWriter.integer("child_int").writeInt(value); + + final byte[] varCharVal = Integer.toString(value).getBytes(); + tempBuf.setBytes(0, varCharVal); + structWriter.varChar("child_string").writeVarChar(0, varCharVal.length,tempBuf); + + structWriter.end(); + } + } +} diff --git a/sabot/kernel/src/test/java/com/dremio/sabot/join/hash/TestVHashJoinSpillBuildAndReplay.java b/sabot/kernel/src/test/java/com/dremio/sabot/join/hash/TestVHashJoinSpillBuildAndReplay.java index b690c3885d..92d31550c6 100644 --- a/sabot/kernel/src/test/java/com/dremio/sabot/join/hash/TestVHashJoinSpillBuildAndReplay.java +++ b/sabot/kernel/src/test/java/com/dremio/sabot/join/hash/TestVHashJoinSpillBuildAndReplay.java @@ -15,19 +15,42 @@ */ package com.dremio.sabot.join.hash; +import static com.dremio.sabot.Fixtures.struct; +import static com.dremio.sabot.Fixtures.t; +import static com.dremio.sabot.Fixtures.th; +import static com.dremio.sabot.Fixtures.toUnionCell; +import static com.dremio.sabot.Fixtures.tr; +import static com.dremio.sabot.Fixtures.tuple; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.arrow.vector.types.FloatingPointPrecision; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.util.Text; +import org.apache.calcite.rel.core.JoinRelType; import org.junit.After; import org.junit.Before; +import org.junit.Test; +import com.dremio.common.logical.data.JoinCondition; import com.dremio.exec.ExecConstants; import com.dremio.options.OptionManager; import com.dremio.options.OptionValue; +import com.dremio.sabot.Fixtures; import com.dremio.sabot.op.join.hash.HashJoinOperator; import com.dremio.sabot.op.join.vhash.spill.VectorizedSpillingHashJoinOperator; +import com.google.common.collect.ImmutableList; // Test join with build, followed by spill & then, replay. public class TestVHashJoinSpillBuildAndReplay extends TestVHashJoinSpill { private final OptionManager options = testContext.getOptions(); private final int minReserve = VectorizedSpillingHashJoinOperator.MIN_RESERVE; + @Override @Before public void before() { options.setOption(OptionValue.createBoolean(OptionValue.OptionType.SYSTEM, HashJoinOperator.ENABLE_SPILL.getOptionName(), true)); @@ -39,6 +62,7 @@ public void before() { VectorizedSpillingHashJoinOperator.MIN_RESERVE = 7 * 1024 * 1024; } + @Override @After public void after() { options.setOption(HashJoinOperator.ENABLE_SPILL.getDefault()); @@ -47,4 +71,182 @@ public void after() { options.setOption(HashJoinOperator.NUM_PARTITIONS.getDefault()); VectorizedSpillingHashJoinOperator.MIN_RESERVE = minReserve; } + + @Override + @Test + public void manyColumns() throws Exception { + baseManyColumns(); + } + /** + * Test Hash Join Spill for two tables containing following columns - integer, list of + * integers , list of strings + * @throws Exception + */ + @Test + public void testListColumns() throws Exception { + + final JoinInfo joinInfo = getJoinInfo(Collections.singletonList(new JoinCondition("EQUALS", f("id_left"), f("id_right"))), JoinRelType.LEFT); + + //header of expected joined table + final Fixtures.HeaderRow joinedHeader = th("id_right", "ints_right", "strings_right", "id_left", "ints_left", "strings_left"); + + final int numberOfRows = 1; + final Fixtures.DataRow[] joinedData = getDataWithListVector(numberOfRows); + + //expected joined table. + final Fixtures.Table expected = t(joinedHeader, false, joinedData); + + //validate joined data against expected table + validateDual(joinInfo.operator, joinInfo.clazz, + new ListColumnsGenerator<>(getTestAllocator(), numberOfRows, 0, "left"), + new ListColumnsGenerator<>(getTestAllocator(), numberOfRows, 0, "right"), + 50, expected); + } + + @Test + public void testUnionColumns() throws Exception { + + final JoinInfo joinInfo = getJoinInfo(Collections.singletonList(new JoinCondition("EQUALS", f("id_left"), f("id_right"))), JoinRelType.LEFT); + + //header of expected joined table + //union column header in probe table + final Fixtures.ComplexColumnHeader leftUnionHeader = struct("union_left", ImmutableList.of("int", "float")); + //union column header in build table + final Fixtures.ComplexColumnHeader rightUnionHeader = struct("union_right", ImmutableList.of("int", "float")); + + final Fixtures.HeaderRow joinedHeader = th("id_right", rightUnionHeader, "id_left", leftUnionHeader); + + final int numberOfRows = 1; + final Fixtures.DataRow[] joinedData = getDataWithUnionVector(numberOfRows); + + //expected joined table. + final Fixtures.Table expected = t(joinedHeader, false, joinedData); + + //validate joined data against expected table + validateDual(joinInfo.operator, joinInfo.clazz, + new UnionColumnGenerator<>(getTestAllocator(), numberOfRows, 0, "left"), + new UnionColumnGenerator<>(getTestAllocator(), numberOfRows, 0, "right"), + 50, expected); + } + + /** + * Test Hash Join Spill for tables containing struct columns + * @throws Exception + */ + @Test + public void testStructColumns() throws Exception { + + final JoinInfo joinInfo = getJoinInfo(Arrays.asList(new JoinCondition("EQUALS", f("id_left"), f("id_right"))), JoinRelType.LEFT); + + //struct column header in probe table + final Fixtures.ComplexColumnHeader leftStructHeader = struct("struct_left", ImmutableList.of("child_string", "child_int")); + //struct column header in build table + final Fixtures.ComplexColumnHeader rightStructHeader = struct("struct_right", ImmutableList.of("child_string", "child_int")); + + //header of expected joined table + Fixtures.HeaderRow joinedHeader = th("id_right", "int_right", rightStructHeader, "id_left", "int_left", leftStructHeader); + + final int numberOfRows = 1; + final Fixtures.DataRow[] joinedData = getDataWithStructVector(numberOfRows); + + //expected joined table. + final Fixtures.Table expected = t(joinedHeader, false, joinedData); + + //validate joined data against expected table + validateDual(joinInfo.operator, joinInfo.clazz, + new MixedColumnGenerator<>(getTestAllocator(), numberOfRows, 0, "left"), + new MixedColumnGenerator<>(getTestAllocator(), numberOfRows, 0, "right"), + 50, expected); + } + + /** + * Creates data rows with following columns - + * int, int, struct{int, string}, int, int, struct{int, string} + * @param numberOfRows + * @return + */ + protected Fixtures.DataRow[] getDataWithStructVector(final int numberOfRows) { + final Fixtures.DataRow[] rows = new Fixtures.DataRow[numberOfRows]; + for (int i = 0; i < numberOfRows; i++) { + final Fixtures.Cell[] leftStructCell = tuple(Integer.toString(i), i); + final Fixtures.Cell[] rightStructCell = tuple( Integer.toString(i + numberOfRows), i + numberOfRows); + rows[i] = tr( i, i + numberOfRows, rightStructCell, i, i, leftStructCell); + } + return rows; + } + + /** + * Creates data rows with following sequence of columns - + * Int, Union, Int, Union + * @param numberOfRows + * @return + */ + protected Fixtures.DataRow[] getDataWithUnionVector(final int numberOfRows) { + final Fixtures.DataRow[] rows = new Fixtures.DataRow[numberOfRows]; + final Map unionTypes = new HashMap<>(); + for (int i = 0; i < numberOfRows; i++) { + unionTypes.clear(); + if (i % 2 == 0) { + + unionTypes.put(new ArrowType.Int(32, true), true); + unionTypes.put(new ArrowType.FloatingPoint(FloatingPointPrecision.SINGLE), false); + + Fixtures.UnionCell uCell = toUnionCell(i, unionTypes); + rows[i] = tr(i, uCell , i, uCell); + } else { + unionTypes.put(new ArrowType.Int(32, true), false); + unionTypes.put(new ArrowType.FloatingPoint(FloatingPointPrecision.SINGLE), true); + Fixtures.UnionCell uCell = toUnionCell((float)i, unionTypes); + rows[i] = tr(i, uCell, i, uCell); + } + + } + return rows; + } + + /** + * Generates data rows containing following columns - integer, list of integers, + * list of strings, integer, list of integers, list of strings + * @param numberOfRows + * @return + */ + protected Fixtures.DataRow[] getDataWithListVector(final int numberOfRows) { + + final List> intValues = getIntList(numberOfRows); + final List> stringValues = getStringList(numberOfRows); + final Fixtures.DataRow[] rows = new Fixtures.DataRow[numberOfRows]; + for (int i = 0; i < numberOfRows; i++) { + + rows[i] = tr( i, intValues.get(i), stringValues.get(i), i, intValues.get(i), stringValues.get(i)); + } + return rows; + } + + private static List> getIntList(final int size) { + final List> listOfLists = new ArrayList<>(size); + final int listSize = 5; + for (int i = 0; i < size; i++) { + + final List list = new ArrayList<>(listSize); + for (int j = 0; j < listSize; j++) { + list.add(j); + } + listOfLists.add(list); + } + return listOfLists; + } + + private static List> getStringList(int size) { + final List> listOfLists = new ArrayList<>(size); + final int listSize = 5; + + for (int i = 0; i < size; i++) { + final List list = new ArrayList<>(listSize); + for (int j = 0; j < listSize; j++) { + list.add(new Text(Integer.toString(j))); + } + listOfLists.add(list); + } + return listOfLists; + } } diff --git a/sabot/kernel/src/test/java/com/dremio/sabot/join/hash/TestVHashJoinSpillReplay.java b/sabot/kernel/src/test/java/com/dremio/sabot/join/hash/TestVHashJoinSpillReplay.java index 235aec4428..1abdb5d0f7 100644 --- a/sabot/kernel/src/test/java/com/dremio/sabot/join/hash/TestVHashJoinSpillReplay.java +++ b/sabot/kernel/src/test/java/com/dremio/sabot/join/hash/TestVHashJoinSpillReplay.java @@ -32,6 +32,7 @@ public class TestVHashJoinSpillReplay extends TestVHashJoinSpill { private final OptionManager options = testContext.getOptions(); private final int minReserve = VectorizedSpillingHashJoinOperator.MIN_RESERVE; + @Override @Before public void before() { options.setOption(OptionValue.createBoolean(OptionType.SYSTEM, HashJoinOperator.ENABLE_SPILL.getOptionName(), true)); @@ -42,6 +43,7 @@ public void before() { VectorizedSpillingHashJoinOperator.MIN_RESERVE = 9 * 1024 * 1024; } + @Override @After public void after() { options.setOption(HashJoinOperator.ENABLE_SPILL.getDefault()); diff --git a/sabot/kernel/src/test/java/com/dremio/sabot/join/hash/UnionColumnGenerator.java b/sabot/kernel/src/test/java/com/dremio/sabot/join/hash/UnionColumnGenerator.java new file mode 100644 index 0000000000..646d96d1ae --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/sabot/join/hash/UnionColumnGenerator.java @@ -0,0 +1,143 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.sabot.join.hash; + +import java.util.List; + +import org.apache.arrow.memory.BufferAllocator; +import org.apache.arrow.vector.BaseFixedWidthVector; +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.IntVector; +import org.apache.arrow.vector.complex.UnionVector; +import org.apache.arrow.vector.complex.impl.UnionWriter; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.arrow.vector.types.pojo.FieldType; + +import com.dremio.common.expression.CompleteType; +import com.dremio.exec.record.BatchSchema; +import com.dremio.exec.record.VectorAccessible; +import com.dremio.exec.record.VectorContainer; +import com.dremio.sabot.Generator; +import com.google.common.collect.ImmutableList; + +/** + * A vector container containing following types of vectors - + * IntVector, UnionVector + * + * Used for unit tests in {@link TestVHashJoinSpillBuildAndReplay} + * @param + */ +class UnionColumnGenerator implements Generator { + + private final int rows; + private final VectorContainer result; + private final List vectors; + private int offset; + private final String postFix; + private final BufferAllocator allocator; + + public UnionColumnGenerator(final BufferAllocator allocator, final int rows, final int offset, final String postFix) { + + this.allocator = allocator; + this.rows = rows; + result = new VectorContainer(allocator); + + this.offset = offset; + this.postFix = postFix; + + final ImmutableList.Builder vectorsBuilder = ImmutableList.builder(); + + final Field fieldIdLeft = new Field("id_"+ this.postFix, new FieldType(true, + new ArrowType.Int(32, true), null), null); + + final T idLeftVector = result.addOrGet(fieldIdLeft); + vectorsBuilder.add(idLeftVector); + + final Field unionField = CompleteType.union( + CompleteType.INT.toField("int"), + CompleteType.FLOAT.toField("float") + ).toField("union_" + this.postFix); + + final T unionVector = result.addOrGet(unionField); + vectorsBuilder.add(unionVector); + + this.vectors = vectorsBuilder.build(); + + result.buildSchema(BatchSchema.SelectionVectorMode.NONE); + } + + /** + * Generate a new record. + * @param records + * @return + */ + @Override + public int next(final int records) { + final int count = Math.min(rows - offset, records); + if (count <= 0) { + return 0; + } + + result.allocateNew(); + for (int i = 0; i < count; i++) { + insertIntoIntVector(offset, offset, (BaseFixedWidthVector) vectors.get(0)); + if (i % 2 == 0) { + insertIntIntoUnionVector(offset, i, (UnionVector) vectors.get(1)); + } else { + insertFloatIntoUnionVector(offset, (float) i, (UnionVector) vectors.get(1)); + } + offset++; + } + + result.setAllCount(count); + result.buildSchema(); + return count; + } + + @Override + public VectorAccessible getOutput() { + return result; + } + + @Override + public void close() throws Exception { + result.close(); + } + + private static void insertIntoIntVector(final int index, final int value, final BaseFixedWidthVector vector) { + IntVector vec = (IntVector)vector; + vec.setSafe(index, value); + } + + private static void insertIntIntoUnionVector(final int index, final int value, final UnionVector vector) { + final UnionWriter unionWriter = new UnionWriter(vector); + + unionWriter.setPosition(index); + + unionWriter.writeInt(value); + + } + + private static void insertFloatIntoUnionVector(final int index, final float value, final UnionVector vector) { + final UnionWriter unionWriter = new UnionWriter(vector); + + unionWriter.setPosition(index); + + unionWriter.writeFloat4(value); + + } +} diff --git a/sabot/kernel/src/test/java/com/dremio/sabot/op/join/nlje/TestNLJE.java b/sabot/kernel/src/test/java/com/dremio/sabot/op/join/nlje/TestNLJE.java index 09753d7cc6..636e94feaf 100644 --- a/sabot/kernel/src/test/java/com/dremio/sabot/op/join/nlje/TestNLJE.java +++ b/sabot/kernel/src/test/java/com/dremio/sabot/op/join/nlje/TestNLJE.java @@ -177,24 +177,28 @@ public void nljBatchBoundary() throws Exception { 2047, expected); } + @Override @Test public void noNullEquivalenceWithNullsLeft() { // disable since ordering is different. Assume.assumeFalse(true); } + @Override @Test public void noNullEquivalenceWithZeroKeyLeft() { // disable since ordering is different. Assume.assumeFalse(true); } + @Override @Test public void hugeBatch() { // disable ass this takes too long in unit tests (4B comparisons are required) Assume.assumeFalse(true); } + @Override @Test public void regionNationInner() { // disable since ordering is different. diff --git a/sabot/kernel/src/test/java/com/dremio/sabot/op/join/vhash/VectorizedHashJoinOperatorTest.java b/sabot/kernel/src/test/java/com/dremio/sabot/op/join/vhash/VectorizedHashJoinOperatorTest.java index 03ec8f117a..70eb67d6ed 100644 --- a/sabot/kernel/src/test/java/com/dremio/sabot/op/join/vhash/VectorizedHashJoinOperatorTest.java +++ b/sabot/kernel/src/test/java/com/dremio/sabot/op/join/vhash/VectorizedHashJoinOperatorTest.java @@ -19,7 +19,6 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.anyList; @@ -658,9 +657,6 @@ public void testTryPushRuntimeFilterBooleanColumnDrop() throws Exception { assertTrue(valueListFiltersCopy.get(0).isContainsFalse()); AutoCloseables.close(valueListFiltersCopy); } - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); } } diff --git a/sabot/kernel/src/test/java/com/dremio/sabot/op/join/vhash/spill/slicer/TestMerger.java b/sabot/kernel/src/test/java/com/dremio/sabot/op/join/vhash/spill/slicer/TestMerger.java index a9ae4b6095..8b8bb904c4 100644 --- a/sabot/kernel/src/test/java/com/dremio/sabot/op/join/vhash/spill/slicer/TestMerger.java +++ b/sabot/kernel/src/test/java/com/dremio/sabot/op/join/vhash/spill/slicer/TestMerger.java @@ -330,8 +330,7 @@ private UnionVector getFilledUnionVector(final List intList, final List if (index % 2 == 0) { unionWriter.writeInt(intList.get(intIndex)); intIndex++; - } - else { + } else { final byte[] varCharVal = stringList.get(stringIndex).getBytes(); tempBuf.setBytes(0, varCharVal); unionWriter.writeVarChar(0, varCharVal.length, tempBuf); diff --git a/sabot/kernel/src/test/java/com/dremio/sabot/op/scan/ScanOperatorTest.java b/sabot/kernel/src/test/java/com/dremio/sabot/op/scan/ScanOperatorTest.java index ca00344ae9..c04e3e715f 100644 --- a/sabot/kernel/src/test/java/com/dremio/sabot/op/scan/ScanOperatorTest.java +++ b/sabot/kernel/src/test/java/com/dremio/sabot/op/scan/ScanOperatorTest.java @@ -19,7 +19,6 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doNothing; @@ -83,8 +82,7 @@ public void cleanupAfterTest() { } @Test - public void testWorkOnOOBRuntimeFilter() { - try { + public void testWorkOnOOBRuntimeFilter() throws Exception { // Send 6 messages. 1/2 are independent filters, 3 is dup of 1 from a different minor frag and should be dropped, 4 comes from // a different sender but filter structure is similar to 2/3, 5 comes from same sender as 4 but has one extra column. // 6th comes from same sender as 4 but with one less non-partition column. @@ -204,10 +202,6 @@ public void testWorkOnOOBRuntimeFilter() { assertEquals(Lists.newArrayList("npCol3"), f5NonPartitionCols); AutoCloseables.close(scanOp.getRuntimeFilters()); - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); - } } @Test diff --git a/sabot/kernel/src/test/java/com/dremio/sabot/rpc/user/TestUserRpcServer.java b/sabot/kernel/src/test/java/com/dremio/sabot/rpc/user/TestUserRpcServer.java index b97f007082..a041657496 100644 --- a/sabot/kernel/src/test/java/com/dremio/sabot/rpc/user/TestUserRpcServer.java +++ b/sabot/kernel/src/test/java/com/dremio/sabot/rpc/user/TestUserRpcServer.java @@ -30,6 +30,7 @@ import static org.mockito.Mockito.when; import java.io.IOException; +import java.util.UUID; import javax.inject.Provider; @@ -44,6 +45,7 @@ import com.dremio.common.memory.DremioRootAllocator; import com.dremio.exec.proto.CoordinationProtos.NodeEndpoint; +import com.dremio.exec.proto.UserBitShared; import com.dremio.exec.rpc.Response; import com.dremio.exec.rpc.ResponseSender; import com.dremio.exec.rpc.RpcConfig; @@ -53,7 +55,10 @@ import com.dremio.exec.work.protector.UserWorker; import com.dremio.options.OptionValidatorListing; import com.dremio.sabot.rpc.user.UserRPCServer.UserClientConnectionImpl; +import com.dremio.service.users.User; +import com.dremio.service.users.UserNotFoundException; import com.dremio.service.users.UserService; +import com.dremio.service.users.proto.UID; import io.netty.buffer.ByteBuf; import io.netty.channel.ChannelFuture; @@ -72,6 +77,7 @@ public class TestUserRpcServer { @Mock Provider userServiceProvider; @Mock Provider nodeEndpointProvider; @Mock WorkIngestor ingestor; + @Mock UserBitShared.UserCredentials userCredentials; @Mock Provider worker; final BufferAllocator allocator = DremioRootAllocator.create(10, 1); final EventLoopGroup loopGroup = new DefaultEventLoop(); @@ -81,6 +87,9 @@ public class TestUserRpcServer { @Mock UserClientConnectionImpl connection; @Mock UserSession userSession; // Session is returned from the connection @Mock SocketChannel socketChannel; + @Mock UserService userService; + @Mock User user; + @Mock UID uid; final byte[] pBody = "abc".getBytes(UTF_8); @Mock ByteBuf dBody; @@ -91,6 +100,8 @@ public class TestUserRpcServer { private MockTracer tracer = new MockTracer(); private UserRPCServer server; + private static final String USERNAME = "test_user"; + @Captor ArgumentCaptor captorSender; public void setup(boolean tracingEnabled, boolean[] closed) { @@ -102,6 +113,17 @@ public void setup(WorkIngestor ingestor, boolean enabled, boolean[] closed) { // Simply connect the session to the connection. // It's up to the individual tests return from calls to the session. when(connection.getSession()).thenReturn(userSession); + when(userSession.getCredentials()).thenReturn(userCredentials); + when(userCredentials.getUserName()).thenReturn(USERNAME); + when(userServiceProvider.get()).thenReturn(userService); + try { + when(userService.getUser(USERNAME)).thenReturn(user); + } catch (UserNotFoundException e) { + // This doesn't actually happen because the mock is stubbed to return a real value, but it + // prevents us from needing to declare the exception in the method signature. + } + when(user.getUID()).thenReturn(uid); + when(uid.getId()).thenReturn(UUID.randomUUID().toString()); when(userSession.isTracingEnabled()).thenReturn(enabled); when(connection.newRequestHandle(anyInt())).thenReturn(() -> { closed[0] = true; }); diff --git a/sabot/kernel/src/test/java/com/dremio/test/dsl/RexDsl.java b/sabot/kernel/src/test/java/com/dremio/test/dsl/RexDsl.java new file mode 100644 index 0000000000..86bcb9940f --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/test/dsl/RexDsl.java @@ -0,0 +1,161 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.test.dsl; + +import static com.dremio.test.scaffolding.ScaffoldingRel.DATE_TYPE; +import static com.dremio.test.scaffolding.ScaffoldingRel.FLOAT_TYPE; +import static com.dremio.test.scaffolding.ScaffoldingRel.INT_NULL_TYPE; +import static com.dremio.test.scaffolding.ScaffoldingRel.INT_TYPE; +import static com.dremio.test.scaffolding.ScaffoldingRel.REX_BUILDER; +import static com.dremio.test.scaffolding.ScaffoldingRel.VARCHAR_NULL_TYPE; +import static com.dremio.test.scaffolding.ScaffoldingRel.VARCHAR_TYPE; + +import java.util.List; + +import org.apache.calcite.avatica.util.ByteString; +import org.apache.calcite.rel.core.CorrelationId; +import org.apache.calcite.rel.type.RelDataType; +import org.apache.calcite.rex.RexInputRef; +import org.apache.calcite.rex.RexLiteral; +import org.apache.calcite.rex.RexNode; +import org.apache.calcite.sql.fun.SqlStdOperatorTable; +import org.apache.calcite.util.DateString; + +/** + * Convince functions for build in line {@link RexNode}. + */ +public class RexDsl { + public static RexNode and(RexNode...exprs) { + return REX_BUILDER.makeCall(SqlStdOperatorTable.AND, exprs); + } + + public static RexNode or(RexNode...exprs) { + return REX_BUILDER.makeCall(SqlStdOperatorTable.OR, exprs); + } + + public static RexNode or(List exprs) { + return REX_BUILDER.makeCall(SqlStdOperatorTable.OR, exprs); + } + + /** + * Equals(=). + */ + public static RexNode eq(RexNode expr1, RexNode expr2) { + return REX_BUILDER.makeCall(SqlStdOperatorTable.EQUALS, expr1, expr2); + } + + /** + * Not Equals(!=). + */ + public static RexNode notEq(RexNode expr1, RexNode expr2) { + return REX_BUILDER.makeCall(SqlStdOperatorTable.NOT_EQUALS, expr1, expr2); + } + + /** + * Less than or equals to(<=). + */ + public static RexNode lte(RexNode expr1, RexNode expr2) { + return REX_BUILDER.makeCall(SqlStdOperatorTable.LESS_THAN_OR_EQUAL, expr1, expr2); + } + + /** + * Less than(<). + **/ + public static RexNode lt(RexNode expr1, RexNode expr2) { + return REX_BUILDER.makeCall(SqlStdOperatorTable.LESS_THAN, expr1, expr2); + } + + /** + * Greater than(>). + */ + public static RexNode gt(RexNode expr1, RexNode expr2) { + return REX_BUILDER.makeCall(SqlStdOperatorTable.GREATER_THAN, expr1, expr2); + } + + /** + * Greater than or Equal to(>=). + */ + public static RexNode gte(RexNode expr1, RexNode expr2) { + return REX_BUILDER.makeCall(SqlStdOperatorTable.GREATER_THAN_OR_EQUAL, expr1, expr2); + } + + /** + * Modules(expr1 % expr2). + */ + public static RexNode mod(RexNode expr1, RexNode expr2) { + return REX_BUILDER.makeCall(SqlStdOperatorTable.MOD,expr1, expr2); + } + + public static RexNode literal(int value) { + return REX_BUILDER.makeLiteral(value, INT_TYPE, false); + } + + public static RexNode literal(float value) { + return REX_BUILDER.makeLiteral(value, FLOAT_TYPE, false); + } + + public static RexNode literalDate(String value) { + return REX_BUILDER.makeDateLiteral(new DateString(value)); + } + + public static RexLiteral literalBinary(String base16Value) { + return REX_BUILDER.makeBinaryLiteral(ByteString.of(base16Value, 16)); + } + + public static RexNode literalNullable(int value) { + return REX_BUILDER.makeLiteral(value, INT_NULL_TYPE, false); + } + + public static RexNode literal(String value) { + return REX_BUILDER.makeLiteral(value); + } + + public static RexNode literal(boolean value) { + return REX_BUILDER.makeLiteral(value); + } + + public static RexInputRef intNullInput(int i) { + return REX_BUILDER.makeInputRef(INT_NULL_TYPE, i); + } + + public static RexInputRef intInput(int i) { + return REX_BUILDER.makeInputRef(INT_TYPE, i); + } + + public static RexInputRef dateInput(int i) { + return REX_BUILDER.makeInputRef(DATE_TYPE, i); + } + + public static RexNode intCorrel(CorrelationId correlationId) { + return REX_BUILDER.makeCorrel(INT_TYPE, correlationId); + } + + public static RexInputRef varcharInput(int i) { + return REX_BUILDER.makeInputRef(VARCHAR_TYPE, i); + } + + public static RexInputRef varcharNullInput(int i) { + return REX_BUILDER.makeInputRef(VARCHAR_NULL_TYPE, i); + } + + public static RexNode cast(RelDataType type, RexNode exp) { + return REX_BUILDER.makeCast(type, exp); + } + + public static RexNode caseExpr(RexNode... exp) { + return REX_BUILDER.makeCall(SqlStdOperatorTable.CASE, exp); + } +} diff --git a/sabot/kernel/src/test/java/com/dremio/test/scaffolding/ScaffoldingRel.java b/sabot/kernel/src/test/java/com/dremio/test/scaffolding/ScaffoldingRel.java new file mode 100644 index 0000000000..cb5a3509a7 --- /dev/null +++ b/sabot/kernel/src/test/java/com/dremio/test/scaffolding/ScaffoldingRel.java @@ -0,0 +1,76 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.test.scaffolding; + +import static org.apache.calcite.sql.type.SqlTypeName.ANY; +import static org.apache.calcite.sql.type.SqlTypeName.BIGINT; +import static org.apache.calcite.sql.type.SqlTypeName.BINARY; +import static org.apache.calcite.sql.type.SqlTypeName.BOOLEAN; +import static org.apache.calcite.sql.type.SqlTypeName.CHAR; +import static org.apache.calcite.sql.type.SqlTypeName.FLOAT; +import static org.apache.calcite.sql.type.SqlTypeName.INTEGER; +import static org.apache.calcite.sql.type.SqlTypeName.SMALLINT; +import static org.apache.calcite.sql.type.SqlTypeName.TINYINT; +import static org.apache.calcite.sql.type.SqlTypeName.VARCHAR; +import static org.apache.calcite.sql.type.SqlTypeUtil.createArrayType; + +import org.apache.calcite.rel.type.RelDataType; +import org.apache.calcite.rel.type.RelDataTypeFactory; +import org.apache.calcite.rex.RexBuilder; +import org.apache.calcite.sql.type.SqlTypeName; + +import com.dremio.exec.planner.types.JavaTypeFactoryImpl; + +/** + * Immutable global variables for RelNodes and RexNodes. + */ +public final class ScaffoldingRel { + public static final RelDataTypeFactory TYPE_FACTORY = JavaTypeFactoryImpl.INSTANCE; + public static final RelDataType VARCHAR_TYPE = TYPE_FACTORY.createSqlType(VARCHAR); + public static final RelDataType ANY_TYPE = TYPE_FACTORY.createSqlType(ANY); + public static final RelDataType VARCHAR_NULL_TYPE = + TYPE_FACTORY.createTypeWithNullability(VARCHAR_TYPE, true); + public static final RelDataType CHAR_NULL_TYPE = + TYPE_FACTORY.createTypeWithNullability(TYPE_FACTORY.createSqlType(CHAR), true); + public static final RelDataType FLOAT_TYPE = TYPE_FACTORY.createSqlType(FLOAT); + public static final RelDataType BIG_INT_NULL_TYPE = + TYPE_FACTORY.createTypeWithNullability(TYPE_FACTORY.createSqlType(BIGINT), true); + public static final RelDataType INT_TYPE = TYPE_FACTORY.createSqlType(INTEGER); + public static final RelDataType INT_NULL_TYPE = + TYPE_FACTORY.createTypeWithNullability(INT_TYPE, true); + public static final RelDataType INT_NULL_ARRAY_NULL_COLUMN_TYPE = + TYPE_FACTORY.createTypeWithNullability( + TYPE_FACTORY.createArrayType(INT_NULL_TYPE, -1), true); + public static final RelDataType INT_ARRAY_COLUMN_TYPE = createArrayType(TYPE_FACTORY,INT_TYPE,false); + public static final RelDataType VARCHAR_ARRAY_COLUMN_TYPE = createArrayType(TYPE_FACTORY,VARCHAR_TYPE,false); + public static final RelDataType SMALL_INT_NULL_TYPE = + TYPE_FACTORY.createTypeWithNullability(TYPE_FACTORY.createSqlType(SMALLINT), true); + public static final RelDataType TINY_INT_NULL_TYPE = + TYPE_FACTORY.createTypeWithNullability(TYPE_FACTORY.createSqlType(TINYINT), true); + public static final RelDataType DATE_TYPE = + TYPE_FACTORY.createSqlType(SqlTypeName.DATE); + public static final RelDataType DATE_NULL_TYPE = + TYPE_FACTORY.createTypeWithNullability(DATE_TYPE, true); + public static final RelDataType BOOLEAN_TYPE = TYPE_FACTORY.createSqlType(BOOLEAN); + public static final RelDataType BOOLEAN_NULL_TYPE = + TYPE_FACTORY.createTypeWithNullability(BOOLEAN_TYPE, true); + public static final RelDataType BINARY_TYPE = TYPE_FACTORY.createSqlType(BINARY); + + public static final RexBuilder REX_BUILDER = new RexBuilder(TYPE_FACTORY); + + private ScaffoldingRel() { + } +} diff --git a/sabot/kernel/src/test/java/io/airlift/tpch/CustomerGenerator.java b/sabot/kernel/src/test/java/io/airlift/tpch/CustomerGenerator.java index 05b8f83828..8bb592c512 100644 --- a/sabot/kernel/src/test/java/io/airlift/tpch/CustomerGenerator.java +++ b/sabot/kernel/src/test/java/io/airlift/tpch/CustomerGenerator.java @@ -105,6 +105,7 @@ private String getRandomTime(int seed) { return sb.toString(); } + @Override protected void generateRecord(long globalRecordIndex, int outputIndex){ final long customerKey = globalRecordIndex; final long nationKey = nationKeyRandom.nextValue(); diff --git a/sabot/kernel/src/test/java/io/airlift/tpch/DistributionLoader.java b/sabot/kernel/src/test/java/io/airlift/tpch/DistributionLoader.java index 9587bc3b12..1e62e6feb3 100644 --- a/sabot/kernel/src/test/java/io/airlift/tpch/DistributionLoader.java +++ b/sabot/kernel/src/test/java/io/airlift/tpch/DistributionLoader.java @@ -88,15 +88,13 @@ private static Distribution loadDistribution(Iterator lines, String name int weight; try { weight = Integer.parseInt(parts.get(1)); - } - catch (NumberFormatException e) { + } catch (NumberFormatException e) { throw new IllegalStateException(String.format("Invalid distribution %s: invalid weight on line %s", name, line)); } - if (value.equalsIgnoreCase("count")) { + if ("count".equalsIgnoreCase(value)) { count = weight; - } - else { + } else { members.put(value, weight); } } diff --git a/sabot/kernel/src/test/java/io/airlift/tpch/Distributions.java b/sabot/kernel/src/test/java/io/airlift/tpch/Distributions.java index 3071bc1ef0..593ebfaa27 100644 --- a/sabot/kernel/src/test/java/io/airlift/tpch/Distributions.java +++ b/sabot/kernel/src/test/java/io/airlift/tpch/Distributions.java @@ -51,8 +51,7 @@ public static synchronized Distributions getDefaultDistributions() URL resource = Resources.getResource("dists.dss"); checkState(resource != null, "Distribution file 'dists.dss' not found"); DEFAULT_DISTRIBUTIONS = new Distributions(loadDistribution(Resources.asCharSource(resource, UTF_8))); - } - catch (IOException e) { + } catch (IOException e) { throw Throwables.propagate(e); } } diff --git a/sabot/kernel/src/test/java/io/airlift/tpch/RandomBoundedLong.java b/sabot/kernel/src/test/java/io/airlift/tpch/RandomBoundedLong.java index e21cdb6a33..ee55848571 100644 --- a/sabot/kernel/src/test/java/io/airlift/tpch/RandomBoundedLong.java +++ b/sabot/kernel/src/test/java/io/airlift/tpch/RandomBoundedLong.java @@ -46,8 +46,7 @@ public RandomBoundedLong(long seed, boolean use64Bits, long lowValue, long highV if (use64Bits) { this.randomLong = new RandomLong(seed, expectedRowCount); this.randomInt = null; - } - else { + } else { this.randomLong = null; this.randomInt = new RandomInt(seed, expectedRowCount); } @@ -60,8 +59,7 @@ public long nextValue() { if (randomLong != null) { return randomLong.nextLong(lowValue, highValue); - } - else { + } else { return randomInt.nextInt((int) lowValue, (int) highValue); } } @@ -70,8 +68,7 @@ public void rowFinished() { if (randomLong != null) { randomLong.rowFinished(); - } - else { + } else { randomInt.rowFinished(); } } @@ -80,8 +77,7 @@ public void advanceRows(long rowCount) { if (randomLong != null) { randomLong.advanceRows(rowCount); - } - else { + } else { randomInt.advanceRows(rowCount); } } diff --git a/sabot/kernel/src/test/java/io/airlift/tpch/SparseUnionGenerator.java b/sabot/kernel/src/test/java/io/airlift/tpch/SparseUnionGenerator.java index 802de07754..58a8565f22 100644 --- a/sabot/kernel/src/test/java/io/airlift/tpch/SparseUnionGenerator.java +++ b/sabot/kernel/src/test/java/io/airlift/tpch/SparseUnionGenerator.java @@ -80,8 +80,7 @@ protected void generateRecord(final long globalRecordIndex, final int outputInde unionWriter.writeVarChar(0, varCharVal.length, tempBuf); } - } - else { + } else { unionWriter.writeInt(intRandom.nextValue()); } diff --git a/sabot/kernel/src/test/java/io/airlift/tpch/TemperatureGenerator.java b/sabot/kernel/src/test/java/io/airlift/tpch/TemperatureGenerator.java index 01e49007d0..a0741cb9d0 100644 --- a/sabot/kernel/src/test/java/io/airlift/tpch/TemperatureGenerator.java +++ b/sabot/kernel/src/test/java/io/airlift/tpch/TemperatureGenerator.java @@ -40,6 +40,7 @@ public TemperatureGenerator(final BufferAllocator allocator, final GenerationDef } + @Override protected void generateRecord(final long globalRecordIndex, final int outputIndex){ final UnionListWriter listWriter = new UnionListWriter(temperature); diff --git a/sabot/kernel/src/test/java/io/airlift/tpch/TextPool.java b/sabot/kernel/src/test/java/io/airlift/tpch/TextPool.java index 85b25c415b..b21e5ce3ec 100644 --- a/sabot/kernel/src/test/java/io/airlift/tpch/TextPool.java +++ b/sabot/kernel/src/test/java/io/airlift/tpch/TextPool.java @@ -54,6 +54,7 @@ public static synchronized TextPool getDefaultTestPool() { public TextPool(int size, Distributions distributions) { this(size, distributions, new TextGenerationProgressMonitor() { + @Override public void updateProgress(double progress) { } }); @@ -170,6 +171,7 @@ private static void generateNounPhrase(Distributions distributions, ByteArrayBui case ',': builder.erase(1); builder.append(", "); + continue; case ' ': continue; default: diff --git a/sabot/kernel/src/test/java/io/airlift/tpch/TextPoolGenerator.java b/sabot/kernel/src/test/java/io/airlift/tpch/TextPoolGenerator.java index 8862ec97a0..a5cbeb08ce 100644 --- a/sabot/kernel/src/test/java/io/airlift/tpch/TextPoolGenerator.java +++ b/sabot/kernel/src/test/java/io/airlift/tpch/TextPoolGenerator.java @@ -55,6 +55,7 @@ public class TextPoolGenerator { public TextPoolGenerator(int size, Distributions distributions) { this(size, distributions, new TextGenerationProgressMonitor() { + @Override public void updateProgress(double progress) { } }); diff --git a/sabot/kernel/src/test/java/io/airlift/tpch/TpchGenerator.java b/sabot/kernel/src/test/java/io/airlift/tpch/TpchGenerator.java index 84714a4c89..ca966559eb 100644 --- a/sabot/kernel/src/test/java/io/airlift/tpch/TpchGenerator.java +++ b/sabot/kernel/src/test/java/io/airlift/tpch/TpchGenerator.java @@ -85,6 +85,7 @@ public VectorAccessible getOutput() { return returned; } + @Override public int next(int desiredCount){ final long termination = Math.min(startIndex + rowCount, index + desiredCount); final int recordsGenerated = (int) (termination - index); @@ -116,6 +117,7 @@ public int next(int desiredCount){ protected abstract void generateRecord(long globalRecordIndex, int outputIndex); + @Override public void close() throws Exception { AutoCloseables.close((AutoCloseable) all, returned); } diff --git a/sabot/kernel/src/test/java/io/airlift/tpch/WordGroupsGenerator.java b/sabot/kernel/src/test/java/io/airlift/tpch/WordGroupsGenerator.java index 4a7167a8a7..dcfd584cc5 100644 --- a/sabot/kernel/src/test/java/io/airlift/tpch/WordGroupsGenerator.java +++ b/sabot/kernel/src/test/java/io/airlift/tpch/WordGroupsGenerator.java @@ -49,6 +49,7 @@ public WordGroupsGenerator(final BufferAllocator allocator, final GenerationDefi } + @Override protected void generateRecord(final long globalRecordIndex, final int outputIndex) { final UnionListWriter listWriter = new UnionListWriter(wordGroups); diff --git a/sabot/kernel/src/test/java/org/apache/arrow/vector/TestFixedListVarcharVector.java b/sabot/kernel/src/test/java/org/apache/arrow/vector/TestFixedListVarcharVector.java index 06ffe71260..814bc6d19e 100644 --- a/sabot/kernel/src/test/java/org/apache/arrow/vector/TestFixedListVarcharVector.java +++ b/sabot/kernel/src/test/java/org/apache/arrow/vector/TestFixedListVarcharVector.java @@ -66,16 +66,16 @@ public void cleanupAfterTest() throws Exception { } @Test - public void TestBasic() { - TestBasic(false, false, false); - TestBasic(true, false, false); - TestBasic(false, true, false); - TestBasic(false, true, true); - TestBasic(true, true, false); - TestBasic(true, true, true); + public void testBasic() { + testBasic(false, false, false); + testBasic(true, false, false); + testBasic(false, true, false); + testBasic(false, true, true); + testBasic(true, true, false); + testBasic(true, true, true); } - private void TestBasic(boolean distinct, boolean orderby, boolean asc) { + private void testBasic(boolean distinct, boolean orderby, boolean asc) { int batchSize = 100; FixedListVarcharVector flv = new FixedListVarcharVector("TestCompactionThreshold", testAllocator, @@ -108,8 +108,9 @@ private void TestBasic(boolean distinct, boolean orderby, boolean asc) { } } + @SuppressWarnings("checkstyle:LocalFinalVariableName") @Test - public void TestDistinctBasic() { + public void testDistinctBasic() { // Input Provided: // 0: "AA", "AA", BB // 1: "AAA", "BBB", "AAA" @@ -150,7 +151,7 @@ public void TestDistinctBasic() { } @Test - public void TestMoveValuesAndFreeSpace() { + public void testMoveValuesAndFreeSpace() { VarCharVector v1 = new VarCharVector("TestCopyOutVarchar", testAllocator); VarCharVector v2 = new VarCharVector("TestCopyOutVarchar", testAllocator); VarCharVector v3 = new VarCharVector("TestCopyOutVarchar", testAllocator); @@ -538,12 +539,13 @@ public void testCompactWithDistinctOrderByLimitSizeAndPhysicalRearrangement() { } @Test - public void TestLimitSizeBasic() { + public void testLimitSizeBasic() { // Input Provided: // 0: "AA", "AA", "BB" // 1: "AAA", "BBB", "AAA" // 2: "CC", "DDD" // 3: "CCC", "DDDD" + @SuppressWarnings("checkstyle:LocalFinalVariableName") final int SMALL_MAX_LIST_AGG_SIZE = 11; final String delimiter = ","; final ListVector tempSpace = FixedListVarcharVector.allocListVector(testAllocator, SMALL_MAX_LIST_AGG_SIZE); @@ -583,8 +585,9 @@ public void TestLimitSizeBasic() { flv.close(); } + @SuppressWarnings("checkstyle:LocalFinalVariableName") @Test - public void TestCompactWithDistinctAndLimitSize() { + public void testCompactWithDistinctAndLimitSize() { // Input Provided: // 0: "AA", "AA", "BB", "BB", "A", "B" // 1: "a", "b", "a", "b" @@ -629,8 +632,9 @@ public void TestCompactWithDistinctAndLimitSize() { flv.close(); } + @SuppressWarnings("checkstyle:LocalFinalVariableName") @Test - public void TestOutputToVectorBasic() { + public void testOutputToVectorBasic() { // Input Provided: // 0: "AA", "AA", "BB" // 1: "AAA", "BBB", "AAA" @@ -673,8 +677,9 @@ public void TestOutputToVectorBasic() { v1.close(); } + @SuppressWarnings("checkstyle:LocalFinalVariableName") @Test - public void TestAddValueToRowGroup() { + public void testAddValueToRowGroup() { // Input Provided: // 0: "AA", "AA", "BB" // 1: "aaaa", "bbbb", "aaaa" @@ -737,8 +742,9 @@ public void TestAddValueToRowGroup() { flv.close(); } + @SuppressWarnings("checkstyle:LocalFinalVariableName") @Test - public void TestAddValueToRowGroupWithOrderby() { + public void testAddValueToRowGroupWithOrderby() { // Input Provided: // 0: "AA", "BB", "AA" // 1: "aaa", "bbb", "aaa" @@ -843,7 +849,7 @@ private ListVector makeAndPrepareListVector(final int numberOfGroups, final int } @Test - public void TestAddListVectorToRowGroup() { + public void testAddListVectorToRowGroup() { int batchSize = 100; final String delimiter = ","; VarCharVector v1 = new VarCharVector("TestCopyOutVarchar", testAllocator); @@ -915,8 +921,9 @@ private static void assertListVector(final String[][] expected, final ListVector } } + @SuppressWarnings("checkstyle:LocalFinalVariableName") @Test - public void TestWriteRowGroupValues() { + public void testWriteRowGroupValues() { // ARRANGE final int VALUES_PER_BATCH = 64; final int MAX_LIST_AGG_SIZE = 1024 * 1024; // A high limit because this test isn't concerned with overflow behavior @@ -946,8 +953,9 @@ public void TestWriteRowGroupValues() { flv.close(); } + @SuppressWarnings("checkstyle:LocalFinalVariableName") @Test - public void TestOutputToListVector() { + public void testOutputToListVector() { // ARRANGE final int VALUES_PER_BATCH = 64; final int MAX_LIST_AGG_SIZE = 1024 * 1024; // A high limit because this test isn't concerned with overflow behavior @@ -993,7 +1001,7 @@ private void addListVectorToFlv(final ListVector listVector, FixedListVarcharVec } @Test - public void TestFlvWithAndWithoutAllowOneOverFlow() { + public void testFlvWithAndWithoutAllowOneOverFlow() { /** * The test does the following. * 1. Insert values into flv which has allowOneOverFlow set to true. @@ -1010,6 +1018,7 @@ public void TestFlvWithAndWithoutAllowOneOverFlow() { // 3: "CC", "DDD" // 4: null // 5: "CCC", "DDDD" + @SuppressWarnings("checkstyle:LocalFinalVariableName") final int SMALL_MAX_LIST_AGG_SIZE = 11; final String delimiter = ","; final ListVector tempSpace = FixedListVarcharVector.allocListVector(testAllocator, MAX_VALUES_PER_BATCH); @@ -1089,10 +1098,11 @@ public void TestFlvWithAndWithoutAllowOneOverFlow() { } @Test - public void TestAdjustMaxListAggSize() { + public void testAdjustMaxListAggSize() { // Input Provided: // 0: "123456789012345", "123", "12345678901234567890123456789" // 1: "123", "123456789012345", "12345678901234567890123456789" + @SuppressWarnings("checkstyle:LocalFinalVariableName") final int SMALL_MAX_LIST_AGG_SIZE = 10; final String delimiter = ","; final ListVector tempSpace = FixedListVarcharVector.allocListVector(testAllocator, SMALL_MAX_LIST_AGG_SIZE); @@ -1128,9 +1138,10 @@ public void TestAdjustMaxListAggSize() { } @Test - public void TestOrderbyMultipleCompacts() { + public void testOrderbyMultipleCompacts() { // Input Provided: // 0: "12345678", "12345678", "12345678" + @SuppressWarnings("checkstyle:LocalFinalVariableName") final int SMALL_MAX_LIST_AGG_SIZE = 25; final String delimiter = ""; final ListVector tempSpace = FixedListVarcharVector.allocListVector(testAllocator, SMALL_MAX_LIST_AGG_SIZE); @@ -1181,10 +1192,11 @@ public void TestOrderbyMultipleCompacts() { } @Test - public void TestOutputToVector() { + public void testOutputToVector() { // Input Provided: // 0: "01234567890123456789" // 1: "12345678", "12345678" + @SuppressWarnings("checkstyle:LocalFinalVariableName") final int SMALL_MAX_LIST_AGG_SIZE = 10; final String delimiter = ""; final ListVector tempSpace = FixedListVarcharVector.allocListVector(testAllocator, SMALL_MAX_LIST_AGG_SIZE); diff --git a/sabot/kernel/src/test/java/org/apache/arrow/vector/TestMutableVarcharVector.java b/sabot/kernel/src/test/java/org/apache/arrow/vector/TestMutableVarcharVector.java index a820b97a73..02fac963b3 100644 --- a/sabot/kernel/src/test/java/org/apache/arrow/vector/TestMutableVarcharVector.java +++ b/sabot/kernel/src/test/java/org/apache/arrow/vector/TestMutableVarcharVector.java @@ -424,8 +424,7 @@ public void TestCopyOut() //counters must match Assert.assertEquals(startIdx, j); Assert.assertEquals(TOTAL_STRINGS + firstIndex, v1.getNullCount()); - } - finally { + } finally { m1.close(); v1.close(); } @@ -487,8 +486,7 @@ public void TestCopyOutPostCompaction() Assert.assertEquals(startIdx, j); Assert.assertEquals(TOTAL_STRINGS + firstIndex, v2.getNullCount()); - } - finally { + } finally { m1.close(); v2.close(); } diff --git a/sabot/kernel/src/test/resources-nessie/META-INF/nessie-compatibility.properties b/sabot/kernel/src/test/resources-nessie/META-INF/nessie-compatibility.properties new file mode 100644 index 0000000000..86d97a1328 --- /dev/null +++ b/sabot/kernel/src/test/resources-nessie/META-INF/nessie-compatibility.properties @@ -0,0 +1,18 @@ +# +# Copyright (C) 2017-2019 Dremio Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Test against the current (build) version of Nessie +nessie.versions=@nessie.version@ diff --git a/sabot/kernel/src/test/resources/car-ownership.parquet b/sabot/kernel/src/test/resources/car-ownership.parquet new file mode 100644 index 0000000000..af24b6d2f1 Binary files /dev/null and b/sabot/kernel/src/test/resources/car-ownership.parquet differ diff --git a/sabot/kernel/src/test/resources/csv/regexp_col_like_test.csv b/sabot/kernel/src/test/resources/csv/regexp_col_like_test.csv new file mode 100644 index 0000000000..23168ede55 --- /dev/null +++ b/sabot/kernel/src/test/resources/csv/regexp_col_like_test.csv @@ -0,0 +1,21 @@ +/cat/,.*(product|pdp).*,false +/cat/,.*(cat).*,true +/cat/,.*(gear|greek).*,false +/cat/rat/,.*(product|pdp).*,false +/cat/rat/,.*(cat).*,true +/cat/rat/,.*(gear|greek).*,false +/greek.com/,.*(product|pdp).*,false +/greek.com/,.*(cat).*,false +/greek.com/,.*(gear|greek).*,true +/greekgear.com/,.*(product|pdp).*,false +/greekgear.com/,.*(cat).*,false +/greekgear.com/,.*(gear|greek).*,true +/gear.com/,.*(product|pdp).*,false +/gear.com/,.*(cat).*,false +/gear.com/,.*(gear|greek).*,true +/rat/greekCart,.*(product|pdp).*,false +/rat/greekCart,.*(cat).*,false +/rat/greekCart,.*(gear|greek).*,true +/product/pdp,.*(product|pdp).*,true +/product/pdp,.*(cat).*,false +/product/pdp,.*(gear|greek).*,false diff --git a/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/_delta_log/.00000000000000000000.json.crc b/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/_delta_log/.00000000000000000000.json.crc new file mode 100644 index 0000000000..7f3c15d958 Binary files /dev/null and b/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/_delta_log/.00000000000000000000.json.crc differ diff --git a/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/_delta_log/.00000000000000000001.json.crc b/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/_delta_log/.00000000000000000001.json.crc new file mode 100644 index 0000000000..59d97c5baf Binary files /dev/null and b/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/_delta_log/.00000000000000000001.json.crc differ diff --git a/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/_delta_log/00000000000000000000.json b/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/_delta_log/00000000000000000000.json new file mode 100644 index 0000000000..34f1f8ea9a --- /dev/null +++ b/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/_delta_log/00000000000000000000.json @@ -0,0 +1,3 @@ +{"protocol":{"minReaderVersion":1,"minWriterVersion":2}} +{"metaData":{"id":"f51936b4-37ad-43e9-9cd1-8383fb498755","format":{"provider":"parquet","options":{}},"schemaString":"{\"type\":\"struct\",\"fields\":[{\"name\":\"col1\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}},{\"name\":\"c1+c2/c3\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}}]}","partitionColumns":["c1+c2/c3"],"configuration":{},"createdTime":1668686310092}} +{"commitInfo":{"timestamp":1668686310122,"operation":"CREATE TABLE","operationParameters":{"isManaged":"true","description":null,"partitionBy":"[\"c1+c2/c3\"]","properties":"{}"},"isolationLevel":"Serializable","isBlindAppend":true,"operationMetrics":{},"engineInfo":"Apache-Spark/3.3.0 Delta-Lake/2.1.1","txnId":"7193378e-5d61-4d91-9832-a7c236e30094"}} diff --git a/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/_delta_log/00000000000000000001.json b/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/_delta_log/00000000000000000001.json new file mode 100644 index 0000000000..9c7eefbff9 --- /dev/null +++ b/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/_delta_log/00000000000000000001.json @@ -0,0 +1,4 @@ +{"add":{"path":"c1+c2%252Fc3=a%20b+c/part-00000-d6a9f174-5737-43db-b3f9-ca1bb0b7df66.c000.snappy.parquet","partitionValues":{"c1+c2/c3":"a b+c"},"size":458,"modificationTime":1668686399714,"dataChange":true,"stats":"{\"numRecords\":1,\"minValues\":{\"col1\":1},\"maxValues\":{\"col1\":1},\"nullCount\":{\"col1\":0}}"}} +{"add":{"path":"c1+c2%252Fc3=a%253Fb%2525c/part-00001-3bfa98e0-4a8f-48ab-bd26-14a93aef5110.c000.snappy.parquet","partitionValues":{"c1+c2/c3":"a?b%c"},"size":458,"modificationTime":1668686399714,"dataChange":true,"stats":"{\"numRecords\":1,\"minValues\":{\"col1\":2},\"maxValues\":{\"col1\":2},\"nullCount\":{\"col1\":0}}"}} +{"add":{"path":"c1+c2%252Fc3=a%253Db/part-00002-703c53f9-18db-4718-90ce-5aa59196f72c.c000.snappy.parquet","partitionValues":{"c1+c2/c3":"a=b"},"size":458,"modificationTime":1668686399714,"dataChange":true,"stats":"{\"numRecords\":1,\"minValues\":{\"col1\":3},\"maxValues\":{\"col1\":3},\"nullCount\":{\"col1\":0}}"}} +{"commitInfo":{"timestamp":1668686399722,"operation":"WRITE","operationParameters":{"mode":"Append","partitionBy":"[]"},"readVersion":0,"isolationLevel":"Serializable","isBlindAppend":true,"operationMetrics":{"numFiles":"3","numOutputRows":"3","numOutputBytes":"1374"},"engineInfo":"Apache-Spark/3.3.0 Delta-Lake/2.1.1","txnId":"ec5ee80e-1994-4428-8796-5616a5a8db6d"}} diff --git a/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/c1+c2%2Fc3=a b+c/.part-00000-d6a9f174-5737-43db-b3f9-ca1bb0b7df66.c000.snappy.parquet.crc b/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/c1+c2%2Fc3=a b+c/.part-00000-d6a9f174-5737-43db-b3f9-ca1bb0b7df66.c000.snappy.parquet.crc new file mode 100644 index 0000000000..2316410dff Binary files /dev/null and b/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/c1+c2%2Fc3=a b+c/.part-00000-d6a9f174-5737-43db-b3f9-ca1bb0b7df66.c000.snappy.parquet.crc differ diff --git a/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/c1+c2%2Fc3=a b+c/part-00000-d6a9f174-5737-43db-b3f9-ca1bb0b7df66.c000.snappy.parquet b/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/c1+c2%2Fc3=a b+c/part-00000-d6a9f174-5737-43db-b3f9-ca1bb0b7df66.c000.snappy.parquet new file mode 100644 index 0000000000..6d8827b483 Binary files /dev/null and b/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/c1+c2%2Fc3=a b+c/part-00000-d6a9f174-5737-43db-b3f9-ca1bb0b7df66.c000.snappy.parquet differ diff --git a/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/c1+c2%2Fc3=a%3Db/.part-00002-703c53f9-18db-4718-90ce-5aa59196f72c.c000.snappy.parquet.crc b/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/c1+c2%2Fc3=a%3Db/.part-00002-703c53f9-18db-4718-90ce-5aa59196f72c.c000.snappy.parquet.crc new file mode 100644 index 0000000000..b5db51914d Binary files /dev/null and b/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/c1+c2%2Fc3=a%3Db/.part-00002-703c53f9-18db-4718-90ce-5aa59196f72c.c000.snappy.parquet.crc differ diff --git a/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/c1+c2%2Fc3=a%3Db/part-00002-703c53f9-18db-4718-90ce-5aa59196f72c.c000.snappy.parquet b/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/c1+c2%2Fc3=a%3Db/part-00002-703c53f9-18db-4718-90ce-5aa59196f72c.c000.snappy.parquet new file mode 100644 index 0000000000..4e30bf74b4 Binary files /dev/null and b/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/c1+c2%2Fc3=a%3Db/part-00002-703c53f9-18db-4718-90ce-5aa59196f72c.c000.snappy.parquet differ diff --git a/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/c1+c2%2Fc3=a%3Fb%25c/.part-00001-3bfa98e0-4a8f-48ab-bd26-14a93aef5110.c000.snappy.parquet.crc b/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/c1+c2%2Fc3=a%3Fb%25c/.part-00001-3bfa98e0-4a8f-48ab-bd26-14a93aef5110.c000.snappy.parquet.crc new file mode 100644 index 0000000000..cfb7262268 Binary files /dev/null and b/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/c1+c2%2Fc3=a%3Fb%25c/.part-00001-3bfa98e0-4a8f-48ab-bd26-14a93aef5110.c000.snappy.parquet.crc differ diff --git a/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/c1+c2%2Fc3=a%3Fb%25c/part-00001-3bfa98e0-4a8f-48ab-bd26-14a93aef5110.c000.snappy.parquet b/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/c1+c2%2Fc3=a%3Fb%25c/part-00001-3bfa98e0-4a8f-48ab-bd26-14a93aef5110.c000.snappy.parquet new file mode 100644 index 0000000000..4ddeae794f Binary files /dev/null and b/sabot/kernel/src/test/resources/deltalake/deltaMixCharsName/c1+c2%2Fc3=a%3Fb%25c/part-00001-3bfa98e0-4a8f-48ab-bd26-14a93aef5110.c000.snappy.parquet differ diff --git a/sabot/kernel/src/test/resources/deltalake/empty_last_checkpoint b/sabot/kernel/src/test/resources/deltalake/empty_last_checkpoint new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sabot/kernel/src/test/resources/goldenfiles/expected/FilterSplitTest.goldenTest.yaml b/sabot/kernel/src/test/resources/goldenfiles/expected/FilterSplitTest.goldenTest.yaml new file mode 100644 index 0000000000..39a121dcfe --- /dev/null +++ b/sabot/kernel/src/test/resources/goldenfiles/expected/FilterSplitTest.goldenTest.yaml @@ -0,0 +1,40 @@ +# +# Copyright (C) 2017-2019 Dremio Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +--- + - + description: "simpleCompound: (a < 1 AND dir0 in (2,3))" + input: "AND(<($0, 1), OR(=($1, 2), =($1, 3)))" + output: "OR(=($1, 2), =($1, 3))" + - + description: "badFunc (dir0 || 1)" + input: "||($0, $1)" + - + description: "twoLevelDir: (dir0 = 1 and dir1 = 2) OR (dir0 = 3 and dir1 = 4)" + input: "OR(AND(=($1, 1), =($2, 2)), AND(=($1, 3), =($2, 4)))" + output: "OR(AND(=($1, 1), =($2, 2)), AND(=($1, 3), =($2, 4)))" + - + description: "badOr: (dir0 = 1 and dir1 = 2) OR (a < 5)" + input: "OR(AND(=($1, 1), =($2, 2)), <($0, 5))" + - + description: "disjunctiveNormalForm (a, dir0) IN ((0, 1), (2, 3))" + input: "OR(AND(=($0, 0), =($1, 1)), AND(=($0, 2), =($1, 3)))" + output: "OR(=($1, 1), =($1, 3))" + - + description: "Large DNF (a, dir0) IN (....)" + input: "OR(AND(=($0, 0), =($1, 0)), AND(=($0, 1), =($1, 1)), AND(=($0, 2), =($1, 2)), AND(=($0, 3), =($1, 3)), AND(=($0, 4), =($1, 4)), AND(=($0, 5), =($1, 5)), AND(=($0, 6), =($1, 6)), AND(=($0, 7), =($1, 7)), AND(=($0, 8), =($1, 8)), AND(=($0, 9), =($1, 9)), AND(=($0, 10), =($1, 10)), AND(=($0, 11), =($1, 11)), AND(=($0, 12), =($1, 12)), AND(=($0, 13), =($1, 13)), AND(=($0, 14), =($1, 14)), AND(=($0, 15), =($1, 15)), AND(=($0, 16), =($1, 16)), AND(=($0, 17), =($1, 17)), AND(=($0, 18), =($1, 18)), AND(=($0, 19), =($1, 19)), AND(=($0, 20), =($1, 20)), AND(=($0, 21), =($1, 21)), AND(=($0, 22), =($1, 22)), AND(=($0, 23), =($1, 23)), AND(=($0, 24), =($1, 24)), AND(=($0, 25), =($1, 25)), AND(=($0, 26), =($1, 26)), AND(=($0, 27), =($1, 27)), AND(=($0, 28), =($1, 28)), AND(=($0, 29), =($1, 29)), AND(=($0, 30), =($1, 30)), AND(=($0, 31), =($1, 31)), AND(=($0, 32), =($1, 32)), AND(=($0, 33), =($1, 33)), AND(=($0, 34), =($1, 34)), AND(=($0, 35), =($1, 35)), AND(=($0, 36), =($1, 36)), AND(=($0, 37), =($1, 37)), AND(=($0, 38), =($1, 38)), AND(=($0, 39), =($1, 39)), AND(=($0, 40), =($1, 40)), AND(=($0, 41), =($1, 41)), AND(=($0, 42), =($1, 42)), AND(=($0, 43), =($1, 43)), AND(=($0, 44), =($1, 44)), AND(=($0, 45), =($1, 45)), AND(=($0, 46), =($1, 46)), AND(=($0, 47), =($1, 47)), AND(=($0, 48), =($1, 48)), AND(=($0, 49), =($1, 49)), AND(=($0, 50), =($1, 50)), AND(=($0, 51), =($1, 51)), AND(=($0, 52), =($1, 52)), AND(=($0, 53), =($1, 53)), AND(=($0, 54), =($1, 54)), AND(=($0, 55), =($1, 55)), AND(=($0, 56), =($1, 56)), AND(=($0, 57), =($1, 57)), AND(=($0, 58), =($1, 58)), AND(=($0, 59), =($1, 59)), AND(=($0, 60), =($1, 60)), AND(=($0, 61), =($1, 61)), AND(=($0, 62), =($1, 62)), AND(=($0, 63), =($1, 63)), AND(=($0, 64), =($1, 64)), AND(=($0, 65), =($1, 65)), AND(=($0, 66), =($1, 66)), AND(=($0, 67), =($1, 67)), AND(=($0, 68), =($1, 68)), AND(=($0, 69), =($1, 69)), AND(=($0, 70), =($1, 70)), AND(=($0, 71), =($1, 71)), AND(=($0, 72), =($1, 72)), AND(=($0, 73), =($1, 73)), AND(=($0, 74), =($1, 74)), AND(=($0, 75), =($1, 75)), AND(=($0, 76), =($1, 76)), AND(=($0, 77), =($1, 77)), AND(=($0, 78), =($1, 78)), AND(=($0, 79), =($1, 79)), AND(=($0, 80), =($1, 80)), AND(=($0, 81), =($1, 81)), AND(=($0, 82), =($1, 82)), AND(=($0, 83), =($1, 83)), AND(=($0, 84), =($1, 84)), AND(=($0, 85), =($1, 85)), AND(=($0, 86), =($1, 86)), AND(=($0, 87), =($1, 87)), AND(=($0, 88), =($1, 88)), AND(=($0, 89), =($1, 89)), AND(=($0, 90), =($1, 90)), AND(=($0, 91), =($1, 91)), AND(=($0, 92), =($1, 92)), AND(=($0, 93), =($1, 93)), AND(=($0, 94), =($1, 94)), AND(=($0, 95), =($1, 95)), AND(=($0, 96), =($1, 96)), AND(=($0, 97), =($1, 97)), AND(=($0, 98), =($1, 98)), AND(=($0, 99), =($1, 99)))" + output: "OR(=($1, 0), =($1, 1), =($1, 2), =($1, 3), =($1, 4), =($1, 5), =($1, 6), =($1, 7), =($1, 8), =($1, 9), =($1, 10), =($1, 11), =($1, 12), =($1, 13), =($1, 14), =($1, 15), =($1, 16), =($1, 17), =($1, 18), =($1, 19), =($1, 20), =($1, 21), =($1, 22), =($1, 23), =($1, 24), =($1, 25), =($1, 26), =($1, 27), =($1, 28), =($1, 29), =($1, 30), =($1, 31), =($1, 32), =($1, 33), =($1, 34), =($1, 35), =($1, 36), =($1, 37), =($1, 38), =($1, 39), =($1, 40), =($1, 41), =($1, 42), =($1, 43), =($1, 44), =($1, 45), =($1, 46), =($1, 47), =($1, 48), =($1, 49), =($1, 50), =($1, 51), =($1, 52), =($1, 53), =($1, 54), =($1, 55), =($1, 56), =($1, 57), =($1, 58), =($1, 59), =($1, 60), =($1, 61), =($1, 62), =($1, 63), =($1, 64), =($1, 65), =($1, 66), =($1, 67), =($1, 68), =($1, 69), =($1, 70), =($1, 71), =($1, 72), =($1, 73), =($1, 74), =($1, 75), =($1, 76), =($1, 77), =($1, 78), =($1, 79), =($1, 80), =($1, 81), =($1, 82), =($1, 83), =($1, 84), =($1, 85), =($1, 86), =($1, 87), =($1, 88), =($1, 89), =($1, 90), =($1, 91), =($1, 92), =($1, 93), =($1, 94), =($1, 95), =($1, 96), =($1, 97), =($1, 98), =($1, 99))" diff --git a/sabot/kernel/src/test/resources/goldenfiles/header.txt b/sabot/kernel/src/test/resources/goldenfiles/header.txt new file mode 100644 index 0000000000..ee372f9031 --- /dev/null +++ b/sabot/kernel/src/test/resources/goldenfiles/header.txt @@ -0,0 +1,16 @@ +# +# Copyright (C) 2017-2019 Dremio Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + diff --git a/sabot/kernel/src/test/resources/iceberg/table_with_delete/v2_table_files_out.csv b/sabot/kernel/src/test/resources/iceberg/table_with_delete/v2_table_files_out.csv new file mode 100644 index 0000000000..233e7e3f48 --- /dev/null +++ b/sabot/kernel/src/test/resources/iceberg/table_with_delete/v2_table_files_out.csv @@ -0,0 +1,8 @@ +content,file_path,file_format,partition,record_count,file_size_in_bytes,spec_id +DATA,/tmp/iceberg/data/foo/foo-0.parquet,PARQUET,{category=foo},100,1196,0 +DATA,/tmp/iceberg/data/bar/bar-0.parquet,PARQUET,{category=bar},100,1197,0 +DATA,/tmp/iceberg/data/baz/baz-0.parquet,PARQUET,{category=baz},100,1198,0 +DATA,/tmp/iceberg/data/qux/qux-0.parquet,PARQUET,{category=qux},100,1198,0 +DATA,/tmp/iceberg/data/quux/quux-0.parquet,PARQUET,{category=quux},100,1206,0 +DATA,/tmp/iceberg/data/quuz/quuz-0.parquet,PARQUET,{category=quuz},100,1206,0 +POSITION_DELETES,/tmp/iceberg/data/qux/qux-delete-0.parquet,PARQUET,{category=qux},6,2097,0 diff --git a/sabot/kernel/src/test/resources/metadatarefresh/bogus.csv b/sabot/kernel/src/test/resources/metadatarefresh/bogus.csv new file mode 100644 index 0000000000..91378d4fbd --- /dev/null +++ b/sabot/kernel/src/test/resources/metadatarefresh/bogus.csv @@ -0,0 +1,2 @@ +c1,c2 +a,1 diff --git a/sabot/kernel/src/test/resources/null_str_int_array_map.parquet b/sabot/kernel/src/test/resources/null_str_int_array_map.parquet new file mode 100644 index 0000000000..8a9e8cdf13 Binary files /dev/null and b/sabot/kernel/src/test/resources/null_str_int_array_map.parquet differ diff --git a/sabot/kernel/src/test/resources/parquet/like_test.parquet b/sabot/kernel/src/test/resources/parquet/like_test.parquet new file mode 100644 index 0000000000..f639c1cf10 Binary files /dev/null and b/sabot/kernel/src/test/resources/parquet/like_test.parquet differ diff --git a/sabot/kernel/src/test/resources/parquet/like_test_2.parquet b/sabot/kernel/src/test/resources/parquet/like_test_2.parquet new file mode 100644 index 0000000000..204eca2d86 Binary files /dev/null and b/sabot/kernel/src/test/resources/parquet/like_test_2.parquet differ diff --git a/sabot/kernel/src/test/resources/parquet/microseconds/v2/rowwise_timestamp_time_micro_all.parquet b/sabot/kernel/src/test/resources/parquet/microseconds/v2/rowwise_timestamp_time_micro_all.parquet new file mode 100644 index 0000000000..e1a1318856 Binary files /dev/null and b/sabot/kernel/src/test/resources/parquet/microseconds/v2/rowwise_timestamp_time_micro_all.parquet differ diff --git a/sabot/kernel/src/test/resources/parquet/microseconds/v2/rowwise_timestamp_time_micro_bounds.parquet b/sabot/kernel/src/test/resources/parquet/microseconds/v2/rowwise_timestamp_time_micro_bounds.parquet new file mode 100644 index 0000000000..4c7e066939 Binary files /dev/null and b/sabot/kernel/src/test/resources/parquet/microseconds/v2/rowwise_timestamp_time_micro_bounds.parquet differ diff --git a/sabot/kernel/src/test/resources/parquet/microseconds/v2/vector_timestamp_time_micro_all.parquet b/sabot/kernel/src/test/resources/parquet/microseconds/v2/vector_timestamp_time_micro_all.parquet new file mode 100644 index 0000000000..8715303462 Binary files /dev/null and b/sabot/kernel/src/test/resources/parquet/microseconds/v2/vector_timestamp_time_micro_all.parquet differ diff --git a/sabot/kernel/src/test/resources/parquet/microseconds/v2/vector_timestamp_time_micro_bounds.parquet b/sabot/kernel/src/test/resources/parquet/microseconds/v2/vector_timestamp_time_micro_bounds.parquet new file mode 100644 index 0000000000..a63eb1217a Binary files /dev/null and b/sabot/kernel/src/test/resources/parquet/microseconds/v2/vector_timestamp_time_micro_bounds.parquet differ diff --git a/sabot/kernel/src/test/resources/parquet/rowise_microseconds/rowise_micros.parquet b/sabot/kernel/src/test/resources/parquet/rowise_microseconds/rowise_micros.parquet new file mode 100644 index 0000000000..18a2445f6d Binary files /dev/null and b/sabot/kernel/src/test/resources/parquet/rowise_microseconds/rowise_micros.parquet differ diff --git a/sabot/kernel/src/test/resources/store/text/FolderWithoutExtension/noExt b/sabot/kernel/src/test/resources/store/text/FolderWithoutExtension/noExt new file mode 100644 index 0000000000..a8925b4f28 --- /dev/null +++ b/sabot/kernel/src/test/resources/store/text/FolderWithoutExtension/noExt @@ -0,0 +1,3 @@ +A,B +C,D +E,F \ No newline at end of file diff --git a/sabot/kernel/src/test/resources/store/text/FolderWithoutExtension/noExt1 b/sabot/kernel/src/test/resources/store/text/FolderWithoutExtension/noExt1 new file mode 100644 index 0000000000..a8925b4f28 --- /dev/null +++ b/sabot/kernel/src/test/resources/store/text/FolderWithoutExtension/noExt1 @@ -0,0 +1,3 @@ +A,B +C,D +E,F \ No newline at end of file diff --git a/sabot/kernel/src/test/resources/store/text/FolderWithoutExtension/noExt2 b/sabot/kernel/src/test/resources/store/text/FolderWithoutExtension/noExt2 new file mode 100644 index 0000000000..a8925b4f28 --- /dev/null +++ b/sabot/kernel/src/test/resources/store/text/FolderWithoutExtension/noExt2 @@ -0,0 +1,3 @@ +A,B +C,D +E,F \ No newline at end of file diff --git a/sabot/kernel/src/test/resources/store/text/WithQuotedCrLf.tbl b/sabot/kernel/src/test/resources/store/text/WithQuotedCrLf.tbl index 9ee73d8006..f1b5d01dd4 100644 --- a/sabot/kernel/src/test/resources/store/text/WithQuotedCrLf.tbl +++ b/sabot/kernel/src/test/resources/store/text/WithQuotedCrLf.tbl @@ -1,6 +1,6 @@ -"a^M -1"|a|a^M -a|"a^M -2"|a^M -a|a|"a^M -3"^M +"a +1"|a|a +a|"a +2"|a +a|a|"a +3" diff --git a/sabot/kernel/src/test/resources/store/text/custom_quote_escape.csv b/sabot/kernel/src/test/resources/store/text/custom_quote_escape.csv index 8f2daa5403..1fac98bb62 100644 --- a/sabot/kernel/src/test/resources/store/text/custom_quote_escape.csv +++ b/sabot/kernel/src/test/resources/store/text/custom_quote_escape.csv @@ -1,3 +1,3 @@ c1,"c2","c3" -"\"r1c1",r1c2,"\"r1c3" -r2c1,"\"r2c2",r2c3 +"\"r1\"\c1\"",r1c2,"\"r1c3\"" +r2c1,"\"r2c2\"",r2c3 diff --git a/sabot/kernel/src/test/resources/store/text/double_double_quote.csv b/sabot/kernel/src/test/resources/store/text/double_double_quote.csv new file mode 100644 index 0000000000..7aa3c99861 --- /dev/null +++ b/sabot/kernel/src/test/resources/store/text/double_double_quote.csv @@ -0,0 +1,3 @@ +c1,"c2","c3" +"r1c1","This is value field value with an \"embedded\" quoted word using backslash-quote","r1c3" +r2c1,"This is value field value with an ""embedded"" quoted word using double-double-quote",r2c3 diff --git a/sabot/kernel/src/test/resources/store/text/quote_escape.csv b/sabot/kernel/src/test/resources/store/text/quote_escape.csv index a02da7618a..c69f2ee4ea 100644 --- a/sabot/kernel/src/test/resources/store/text/quote_escape.csv +++ b/sabot/kernel/src/test/resources/store/text/quote_escape.csv @@ -1,3 +1,3 @@ c1,"c2","c3" -"'"r1c1",r1c2,"'"r1c3" -r2c1,"'"r2c2",r2c3 +"""r1""c1""",r1c2,"""r1"c3""" +r2c1,"""r2c2""",r2c3 diff --git a/sabot/kernel/src/test/resources/store/text/testWithoutExtension b/sabot/kernel/src/test/resources/store/text/testWithoutExtension new file mode 100644 index 0000000000..a8925b4f28 --- /dev/null +++ b/sabot/kernel/src/test/resources/store/text/testWithoutExtension @@ -0,0 +1,3 @@ +A,B +C,D +E,F \ No newline at end of file diff --git a/sabot/kernel/src/test/resources/store/text/unescaped_quote.csv b/sabot/kernel/src/test/resources/store/text/unescaped_quote.csv index 67d5ed7601..2e14e2be89 100644 --- a/sabot/kernel/src/test/resources/store/text/unescaped_quote.csv +++ b/sabot/kernel/src/test/resources/store/text/unescaped_quote.csv @@ -1,3 +1,3 @@ c1,"c2","c3" -"r1c1"",r1c2,"r1c3"" +"r1"c1",r1c2,"r1"c3" r2c1,"r2c2"",r2c3 diff --git a/sabot/logical/pom.xml b/sabot/logical/pom.xml index dafe0d9794..21dca8b237 100644 --- a/sabot/logical/pom.xml +++ b/sabot/logical/pom.xml @@ -22,7 +22,7 @@ com.dremio.sabot dremio-sabot-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-sabot-logical diff --git a/sabot/logical/src/main/java/com/dremio/common/exceptions/ExpressionParsingException.java b/sabot/logical/src/main/java/com/dremio/common/exceptions/ExpressionParsingException.java index 4b460193dd..707aaf928e 100644 --- a/sabot/logical/src/main/java/com/dremio/common/exceptions/ExpressionParsingException.java +++ b/sabot/logical/src/main/java/com/dremio/common/exceptions/ExpressionParsingException.java @@ -16,7 +16,6 @@ package com.dremio.common.exceptions; public class ExpressionParsingException extends LogicalPlanParsingException { - static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ExpressionParsingException.class); public ExpressionParsingException() { super(); diff --git a/sabot/logical/src/main/java/com/dremio/common/exceptions/LogicalOperatorValidationException.java b/sabot/logical/src/main/java/com/dremio/common/exceptions/LogicalOperatorValidationException.java deleted file mode 100644 index 3d08c3685a..0000000000 --- a/sabot/logical/src/main/java/com/dremio/common/exceptions/LogicalOperatorValidationException.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.dremio.common.exceptions; - -public class LogicalOperatorValidationException extends LogicalPlanParsingException { - static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(LogicalOperatorValidationException.class); - - public LogicalOperatorValidationException() { - super(); - } - - public LogicalOperatorValidationException(String message, Throwable cause, boolean enableSuppression, - boolean writableStackTrace) { - super(message, cause, enableSuppression, writableStackTrace); - } - - public LogicalOperatorValidationException(String message, Throwable cause) { - super(message, cause); - } - - public LogicalOperatorValidationException(String message) { - super(message); - } - - public LogicalOperatorValidationException(Throwable cause) { - super(cause); - } - -} diff --git a/sabot/logical/src/main/java/com/dremio/common/exceptions/LogicalPlanParsingException.java b/sabot/logical/src/main/java/com/dremio/common/exceptions/LogicalPlanParsingException.java index b501c6b906..65fc6bcb47 100644 --- a/sabot/logical/src/main/java/com/dremio/common/exceptions/LogicalPlanParsingException.java +++ b/sabot/logical/src/main/java/com/dremio/common/exceptions/LogicalPlanParsingException.java @@ -14,12 +14,8 @@ * limitations under the License. */ package com.dremio.common.exceptions; -import com.dremio.common.logical.data.LogicalOperator; public class LogicalPlanParsingException extends RuntimeException{ - static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(LogicalPlanParsingException.class); - - private LogicalOperator operator; public LogicalPlanParsingException() { super(); @@ -42,5 +38,4 @@ public LogicalPlanParsingException(Throwable cause) { super(cause); } - } diff --git a/sabot/logical/src/main/java/com/dremio/common/expression/CodeModelArrowHelper.java b/sabot/logical/src/main/java/com/dremio/common/expression/CodeModelArrowHelper.java index 76873b7dd1..ca00a26d4c 100644 --- a/sabot/logical/src/main/java/com/dremio/common/expression/CodeModelArrowHelper.java +++ b/sabot/logical/src/main/java/com/dremio/common/expression/CodeModelArrowHelper.java @@ -63,14 +63,19 @@ import com.sun.codemodel.JCodeModel; import com.sun.codemodel.JType; -public class CodeModelArrowHelper { +public final class CodeModelArrowHelper { + + private CodeModelArrowHelper() { + // utility class + } + public static JClass getHolderType(CompleteType type, final JCodeModel model) { return model.ref(type.getHolderClass()); } public static JType getHolderType(JCodeModel model, com.dremio.common.types.TypeProtos.MinorType type, com.dremio.common.types.TypeProtos.DataMode mode) { - outside: switch (type) { + switch (type) { case UNION: return model._ref(UnionHolder.class); case STRUCT: @@ -85,8 +90,9 @@ public static JType getHolderType(JCodeModel model, com.dremio.common.types.Type case OPTIONAL: return model._ref(NullableTinyIntHolder.class); default: - break outside; + break; } + break; case UINT1: switch (mode) { case REQUIRED: @@ -94,8 +100,9 @@ public static JType getHolderType(JCodeModel model, com.dremio.common.types.Type case OPTIONAL: return model._ref(NullableUInt1Holder.class); default: - break outside; + break; } + break; case UINT2: switch (mode) { case REQUIRED: @@ -103,8 +110,9 @@ public static JType getHolderType(JCodeModel model, com.dremio.common.types.Type case OPTIONAL: return model._ref(NullableUInt2Holder.class); default: - break outside; + break; } + break; case SMALLINT: switch (mode) { case REQUIRED: @@ -112,8 +120,9 @@ public static JType getHolderType(JCodeModel model, com.dremio.common.types.Type case OPTIONAL: return model._ref(NullableSmallIntHolder.class); default: - break outside; + break; } + break; case INT: switch (mode) { case REQUIRED: @@ -121,8 +130,9 @@ public static JType getHolderType(JCodeModel model, com.dremio.common.types.Type case OPTIONAL: return model._ref(NullableIntHolder.class); default: - break outside; + break; } + break; case UINT4: switch (mode) { case REQUIRED: @@ -130,8 +140,9 @@ public static JType getHolderType(JCodeModel model, com.dremio.common.types.Type case OPTIONAL: return model._ref(NullableUInt4Holder.class); default: - break outside; + break; } + break; case FLOAT4: switch (mode) { case REQUIRED: @@ -139,8 +150,9 @@ public static JType getHolderType(JCodeModel model, com.dremio.common.types.Type case OPTIONAL: return model._ref(NullableFloat4Holder.class); default: - break outside; + break; } + break; case INTERVALYEAR: switch (mode) { case REQUIRED: @@ -148,8 +160,9 @@ public static JType getHolderType(JCodeModel model, com.dremio.common.types.Type case OPTIONAL: return model._ref(NullableIntervalYearHolder.class); default: - break outside; + break; } + break; case TIME: switch (mode) { case REQUIRED: @@ -157,8 +170,9 @@ public static JType getHolderType(JCodeModel model, com.dremio.common.types.Type case OPTIONAL: return model._ref(NullableTimeMilliHolder.class); default: - break outside; + break; } + break; case BIGINT: switch (mode) { case REQUIRED: @@ -166,8 +180,9 @@ public static JType getHolderType(JCodeModel model, com.dremio.common.types.Type case OPTIONAL: return model._ref(NullableBigIntHolder.class); default: - break outside; + break; } + break; case UINT8: switch (mode) { case REQUIRED: @@ -175,8 +190,9 @@ public static JType getHolderType(JCodeModel model, com.dremio.common.types.Type case OPTIONAL: return model._ref(NullableUInt8Holder.class); default: - break outside; + break; } + break; case FLOAT8: switch (mode) { case REQUIRED: @@ -184,8 +200,9 @@ public static JType getHolderType(JCodeModel model, com.dremio.common.types.Type case OPTIONAL: return model._ref(NullableFloat8Holder.class); default: - break outside; + break; } + break; case DATE: switch (mode) { case REQUIRED: @@ -193,8 +210,9 @@ public static JType getHolderType(JCodeModel model, com.dremio.common.types.Type case OPTIONAL: return model._ref(NullableDateMilliHolder.class); default: - break outside; + break; } + break; case TIMESTAMP: switch (mode) { case REQUIRED: @@ -202,8 +220,9 @@ public static JType getHolderType(JCodeModel model, com.dremio.common.types.Type case OPTIONAL: return model._ref(NullableTimeStampMilliHolder.class); default: - break outside; + break; } + break; case INTERVALDAY: switch (mode) { case REQUIRED: @@ -211,8 +230,9 @@ public static JType getHolderType(JCodeModel model, com.dremio.common.types.Type case OPTIONAL: return model._ref(NullableIntervalDayHolder.class); default: - break outside; + break; } + break; case DECIMAL: switch (mode) { case REQUIRED: @@ -220,8 +240,9 @@ public static JType getHolderType(JCodeModel model, com.dremio.common.types.Type case OPTIONAL: return model._ref(NullableDecimalHolder.class); default: - break outside; + break; } + break; case FIXEDSIZEBINARY: switch (mode) { case REQUIRED: @@ -229,22 +250,29 @@ public static JType getHolderType(JCodeModel model, com.dremio.common.types.Type case OPTIONAL: return model._ref(NullableFixedSizeBinaryHolder.class); default: - break outside; + break; } + break; case VARBINARY: switch (mode) { case REQUIRED: return model._ref(VarBinaryHolder.class); case OPTIONAL: return model._ref(NullableVarBinaryHolder.class); + default: + break; } + break; case VARCHAR: switch (mode) { case REQUIRED: return model._ref(VarCharHolder.class); case OPTIONAL: return model._ref(NullableVarCharHolder.class); + default: + break; } + break; case BIT: switch (mode) { case REQUIRED: @@ -252,8 +280,9 @@ public static JType getHolderType(JCodeModel model, com.dremio.common.types.Type case OPTIONAL: return model._ref(NullableBitHolder.class); default: - break outside; + break; } + break; case GENERIC_OBJECT: { return model._ref(ObjectHolder.class); } diff --git a/sabot/logical/src/main/java/com/dremio/common/expression/CompleteTypeInLogicalExpression.java b/sabot/logical/src/main/java/com/dremio/common/expression/CompleteTypeInLogicalExpression.java index d7eae091bb..c32a3b444d 100644 --- a/sabot/logical/src/main/java/com/dremio/common/expression/CompleteTypeInLogicalExpression.java +++ b/sabot/logical/src/main/java/com/dremio/common/expression/CompleteTypeInLogicalExpression.java @@ -41,6 +41,7 @@ public T accept(ExprVisitor visitor, V valu throw new UnsupportedOperationException(); } + @Override public int getSelfCost() { throw new UnsupportedOperationException(); } diff --git a/sabot/logical/src/main/java/com/dremio/common/expression/ErrorCollector.java b/sabot/logical/src/main/java/com/dremio/common/expression/ErrorCollector.java index 83a0b15519..27a343ba6e 100644 --- a/sabot/logical/src/main/java/com/dremio/common/expression/ErrorCollector.java +++ b/sabot/logical/src/main/java/com/dremio/common/expression/ErrorCollector.java @@ -42,6 +42,7 @@ public interface ErrorCollector extends AutoCloseable { public int getErrorCount(); + @Override public void close(); String toErrorString(); } diff --git a/sabot/logical/src/main/java/com/dremio/common/expression/ErrorCollectorImpl.java b/sabot/logical/src/main/java/com/dremio/common/expression/ErrorCollectorImpl.java index a09dc51d5d..222c78e02a 100644 --- a/sabot/logical/src/main/java/com/dremio/common/expression/ErrorCollectorImpl.java +++ b/sabot/logical/src/main/java/com/dremio/common/expression/ErrorCollectorImpl.java @@ -94,6 +94,7 @@ public String toErrorString() { return "\n" + Joiner.on("\n").join(errors); } + @Override public void close(){ if(!errors.isEmpty()){ throw UserException.functionError().message(Joiner.on("\n").join(errors)).build(logger); diff --git a/sabot/logical/src/main/java/com/dremio/common/expression/ExpressionStringBuilder.java b/sabot/logical/src/main/java/com/dremio/common/expression/ExpressionStringBuilder.java index d9bacb989a..f0edcf27be 100644 --- a/sabot/logical/src/main/java/com/dremio/common/expression/ExpressionStringBuilder.java +++ b/sabot/logical/src/main/java/com/dremio/common/expression/ExpressionStringBuilder.java @@ -302,6 +302,7 @@ public Void visitCastExpression(CastExpression e, StringBuilder sb) throws Runti case STRUCT: case LIST: case MAP: + case UNION: // do nothing else. break; case VAR16CHAR: diff --git a/sabot/logical/src/main/java/com/dremio/common/expression/LogicalExpressionBase.java b/sabot/logical/src/main/java/com/dremio/common/expression/LogicalExpressionBase.java index ceea8ff8a3..b56077be86 100644 --- a/sabot/logical/src/main/java/com/dremio/common/expression/LogicalExpressionBase.java +++ b/sabot/logical/src/main/java/com/dremio/common/expression/LogicalExpressionBase.java @@ -41,11 +41,13 @@ public CompleteType getCompleteType() { throw new UnsupportedOperationException(String.format("The type of %s doesn't currently support LogicalExpression.getCompleteType().", this.getClass().getName())); } + @Override @JsonIgnore public int getSelfCost() { return 0; } + @Override @JsonIgnore public int getCumulativeCost() { int cost = this.getSelfCost(); @@ -57,6 +59,7 @@ public int getCumulativeCost() { return cost; } + @Override public String toString() { StringBuilder sb = new StringBuilder(); ExpressionStringBuilder esb = new ExpressionStringBuilder(); diff --git a/sabot/logical/src/main/java/com/dremio/common/expression/NullExpression.java b/sabot/logical/src/main/java/com/dremio/common/expression/NullExpression.java index 7c52bd87ac..a2a2f4b1d5 100644 --- a/sabot/logical/src/main/java/com/dremio/common/expression/NullExpression.java +++ b/sabot/logical/src/main/java/com/dremio/common/expression/NullExpression.java @@ -38,8 +38,10 @@ public T accept(ExprVisitor visitor, V valu return visitor.visitNullExpression(this, value); } + @Override public int getSelfCost() { return 0 ; } + @Override public int getCumulativeCost() { return 0; } } diff --git a/sabot/logical/src/main/java/com/dremio/common/expression/SchemaPath.java b/sabot/logical/src/main/java/com/dremio/common/expression/SchemaPath.java index 60166577fa..bc9ef94ddb 100644 --- a/sabot/logical/src/main/java/com/dremio/common/expression/SchemaPath.java +++ b/sabot/logical/src/main/java/com/dremio/common/expression/SchemaPath.java @@ -52,6 +52,7 @@ public static SchemaPath getCompoundPath(String... strings) { return new SchemaPath(s); } + @Override public PathSegment getLastSegment() { PathSegment s= rootSegment; while (s.getChild() != null) { @@ -77,6 +78,7 @@ public static SchemaPath create(NamePart namePart) { * A simple is a path where there are no repeated elements outside the lowest level of the path. * @return Whether this path is a simple path. */ + @Override public boolean isSimplePath() { PathSegment seg = rootSegment; while (seg != null) { @@ -217,6 +219,7 @@ public int compareTo(SchemaPath o) { return this.getAsUnescapedPath().compareTo(o.getAsUnescapedPath()); } + @Override @JsonIgnore public int getSelfCost() { return 0; diff --git a/sabot/logical/src/main/java/com/dremio/common/expression/fn/impl/MurmurHash3.java b/sabot/logical/src/main/java/com/dremio/common/expression/fn/impl/MurmurHash3.java index 0eb0c596a0..1da77f2be5 100644 --- a/sabot/logical/src/main/java/com/dremio/common/expression/fn/impl/MurmurHash3.java +++ b/sabot/logical/src/main/java/com/dremio/common/expression/fn/impl/MurmurHash3.java @@ -45,6 +45,7 @@ public static long murmur3_64(long bStart, long bEnd, ArrowBuf buffer, int seed) return murmur3_128(bStart, bEnd, buffer, seed).getHash1(); } + @SuppressWarnings({"FallThrough", "checkstyle:MissingSwitchDefault"}) public static HashValPair murmur3_128(long bStart, long bEnd, ArrowBuf buffer, int seed) { long h1 = seed & 0x00000000FFFFFFFFL; long h2 = seed & 0x00000000FFFFFFFFL; @@ -153,6 +154,7 @@ public static long murmur3_64(long val, int seed) { } + @SuppressWarnings({"FallThrough", "checkstyle:MissingSwitchDefault"}) public static int murmur3_32(int bStart, int bEnd, ArrowBuf buffer, int seed) { final long c1 = 0xcc9e2d51L; diff --git a/sabot/logical/src/main/java/com/dremio/common/expression/visitors/AggregateChecker.java b/sabot/logical/src/main/java/com/dremio/common/expression/visitors/AggregateChecker.java index 77fc4ac647..2b6a35ea21 100644 --- a/sabot/logical/src/main/java/com/dremio/common/expression/visitors/AggregateChecker.java +++ b/sabot/logical/src/main/java/com/dremio/common/expression/visitors/AggregateChecker.java @@ -136,6 +136,7 @@ public Boolean visitDoubleConstant(DoubleExpression dExpr, ErrorCollector errors public Boolean visitBooleanConstant(BooleanExpression e, ErrorCollector errors) { return false; } + @Override public Boolean visitDecimalConstant(DecimalExpression decExpr, ErrorCollector errors) { return false; } diff --git a/sabot/logical/src/main/java/com/dremio/common/graph/AdjacencyList.java b/sabot/logical/src/main/java/com/dremio/common/graph/AdjacencyList.java index acb614f1c4..01db70a1be 100644 --- a/sabot/logical/src/main/java/com/dremio/common/graph/AdjacencyList.java +++ b/sabot/logical/src/main/java/com/dremio/common/graph/AdjacencyList.java @@ -155,6 +155,7 @@ public Node(final V operator) { this.nodeValue = operator; } + @Override public int compareTo(final Node argNode) { // just do an identity compare since elsewhere you should ensure that only one node exists for each nodeValue. return argNode == this ? 0 : -1; diff --git a/sabot/logical/src/main/java/com/dremio/common/graph/AdjacencyListBuilder.java b/sabot/logical/src/main/java/com/dremio/common/graph/AdjacencyListBuilder.java index 7b53cfa369..9a5cb13073 100644 --- a/sabot/logical/src/main/java/com/dremio/common/graph/AdjacencyListBuilder.java +++ b/sabot/logical/src/main/java/com/dremio/common/graph/AdjacencyListBuilder.java @@ -32,6 +32,7 @@ protected boolean requireDirected() { return true; } + @Override public boolean enter(V o) { visit(o); return true; diff --git a/sabot/logical/src/main/java/com/dremio/common/graph/Edge.java b/sabot/logical/src/main/java/com/dremio/common/graph/Edge.java index f7eed70b22..1f95c9751e 100644 --- a/sabot/logical/src/main/java/com/dremio/common/graph/Edge.java +++ b/sabot/logical/src/main/java/com/dremio/common/graph/Edge.java @@ -27,6 +27,7 @@ public Edge(final N argFrom, final N argTo, final int argWeight) { weight = argWeight; } + @Override public int compareTo(final Edge argEdge) { return weight - argEdge.weight; } diff --git a/sabot/logical/src/main/java/com/dremio/common/graph/GraphAlgos.java b/sabot/logical/src/main/java/com/dremio/common/graph/GraphAlgos.java index d2adc28257..4410d0a696 100644 --- a/sabot/logical/src/main/java/com/dremio/common/graph/GraphAlgos.java +++ b/sabot/logical/src/main/java/com/dremio/common/graph/GraphAlgos.java @@ -107,10 +107,10 @@ public static class Tarjan> { private int index = 0; private List.Node> stack = new LinkedList.Node>(); - private List.Node>> SCC = new LinkedList.Node>>(); + private List.Node>> scc = new LinkedList<>(); public List.Node>> executeTarjan(AdjacencyList graph) { - SCC.clear(); + scc.clear(); index = 0; stack.clear(); if (graph != null) { @@ -121,7 +121,7 @@ public List.Node>> executeTarjan(AdjacencyList graph) { } } } - return SCC; + return scc; } private List.Node>> tarjan(AdjacencyList.Node v, AdjacencyList list) { @@ -148,9 +148,9 @@ private List.Node>> tarjan(AdjacencyList.Node v, Adjace n = stack.remove(0); component.add(n); } while (n != v); - SCC.add(component); + scc.add(component); } - return SCC; + return scc; } } diff --git a/sabot/logical/src/main/java/com/dremio/common/logical/data/Order.java b/sabot/logical/src/main/java/com/dremio/common/logical/data/Order.java index c31b346770..86d1eb5a61 100644 --- a/sabot/logical/src/main/java/com/dremio/common/logical/data/Order.java +++ b/sabot/logical/src/main/java/com/dremio/common/logical/data/Order.java @@ -153,11 +153,9 @@ private static NullDirection getNullOrderingFromString( String strNullOrdering ) private static Direction filterSupportedDirections(Direction direction) { if (direction == null || direction == Direction.ASCENDING) { return Direction.ASCENDING; - } - else if (Direction.DESCENDING.equals( direction) ) { + } else if (Direction.DESCENDING.equals( direction) ) { return direction; - } - else { + } else { throw new IllegalArgumentException( "Unknown string (not \"ASC\", \"DESC\", " + "or null): \"" + direction + "\"" ); diff --git a/sabot/logical/src/main/java/com/dremio/common/logical/data/Window.java b/sabot/logical/src/main/java/com/dremio/common/logical/data/Window.java index 0e89789870..6d04e9cbe0 100644 --- a/sabot/logical/src/main/java/com/dremio/common/logical/data/Window.java +++ b/sabot/logical/src/main/java/com/dremio/common/logical/data/Window.java @@ -100,6 +100,7 @@ public Builder addWithin(FieldReference within, LogicalExpression expr) { return this; } + @Override public Window internalBuild() { //TODO withins can actually be empty: over(), over(order by ), ... checkState(!withins.isEmpty(), "Withins in window must not be empty."); diff --git a/sabot/pom.xml b/sabot/pom.xml index 2405ccd47d..f8c4e906f2 100644 --- a/sabot/pom.xml +++ b/sabot/pom.xml @@ -22,7 +22,7 @@ com.dremio dremio-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 com.dremio.sabot diff --git a/sabot/serializer/pom.xml b/sabot/serializer/pom.xml index 7e1edbb7be..9d4a9a34a9 100644 --- a/sabot/serializer/pom.xml +++ b/sabot/serializer/pom.xml @@ -24,7 +24,7 @@ com.dremio.sabot dremio-sabot-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-sabot-serializer @@ -64,12 +64,6 @@ test - - org.assertj - assertj-core - test - - com.fasterxml.jackson.datatype jackson-datatype-guava diff --git a/sabot/serializer/src/main/java/com/dremio/exec/planner/serializer/ProtoRelSerializerFactory.java b/sabot/serializer/src/main/java/com/dremio/exec/planner/serializer/ProtoRelSerializerFactory.java index 1a658f7334..9d6be15c20 100644 --- a/sabot/serializer/src/main/java/com/dremio/exec/planner/serializer/ProtoRelSerializerFactory.java +++ b/sabot/serializer/src/main/java/com/dremio/exec/planner/serializer/ProtoRelSerializerFactory.java @@ -24,6 +24,9 @@ import com.dremio.common.exceptions.UserException; import com.dremio.common.scanner.persistence.ScanResult; import com.dremio.exec.catalog.DremioCatalogReader; +import com.dremio.exec.catalog.DremioPrepareTable; +import com.dremio.exec.catalog.DremioTranslatableTable; +import com.dremio.exec.catalog.TableVersionContext; import com.dremio.exec.expr.fn.FunctionImplementationRegistry; import com.dremio.exec.planner.logical.DremioRelFactories; import com.dremio.exec.planner.serialization.DeserializationException; @@ -37,6 +40,7 @@ import com.dremio.plan.serialization.PRelNodeTypes; import com.dremio.plan.serialization.PRexInputRef; import com.dremio.plan.serialization.PRexNodeTypes; +import com.dremio.service.namespace.NamespaceKey; import com.google.protobuf.Descriptors.FieldDescriptor; import com.google.protobuf.util.JsonFormat; import com.google.protobuf.util.JsonFormat.TypeRegistry; @@ -98,7 +102,19 @@ public String serializeToJson(RelNode plan) { @Override public LogicalPlanDeserializer getDeserializer(RelOptCluster cluster, DremioCatalogReader catalogReader, FunctionImplementationRegistry registry, CatalogService catalogService) { - final TableRetriever tableRetriever = t -> catalogReader.getTable(t.getPathComponents()); + final TableRetriever tableRetriever = new TableRetriever() { + + @Override + public DremioPrepareTable getTable(NamespaceKey key) { + return catalogReader.getTable(key.getPathComponents()); + } + + @Override + public DremioTranslatableTable getTableSnapshot(NamespaceKey key, TableVersionContext context) { + return catalogReader.getTableSnapshot(key, context); + } + }; + final PluginRetriever pluginRetriever = t -> catalogService.getSource(t); final SqlOperatorConverter sqlOperatorConverter = new SqlOperatorConverter(registry); return new LogicalPlanDeserializer() { diff --git a/sabot/serializer/src/main/java/com/dremio/exec/planner/serializer/RelNodeSerde.java b/sabot/serializer/src/main/java/com/dremio/exec/planner/serializer/RelNodeSerde.java index 91e9d1453c..d865e3d9b3 100644 --- a/sabot/serializer/src/main/java/com/dremio/exec/planner/serializer/RelNodeSerde.java +++ b/sabot/serializer/src/main/java/com/dremio/exec/planner/serializer/RelNodeSerde.java @@ -24,6 +24,8 @@ import org.apache.calcite.tools.RelBuilder; import com.dremio.exec.catalog.DremioPrepareTable; +import com.dremio.exec.catalog.DremioTranslatableTable; +import com.dremio.exec.catalog.TableVersionContext; import com.dremio.exec.expr.fn.FunctionImplementationRegistry; import com.dremio.exec.store.StoragePlugin; import com.dremio.plan.serialization.PRelDataType; @@ -212,6 +214,8 @@ public interface TableRetriever { * @return Table object. */ DremioPrepareTable getTable(NamespaceKey key); + + DremioTranslatableTable getTableSnapshot(NamespaceKey key, TableVersionContext context); } /** diff --git a/sabot/serializer/src/main/java/com/dremio/exec/planner/serializer/RexDeserializer.java b/sabot/serializer/src/main/java/com/dremio/exec/planner/serializer/RexDeserializer.java index 18da667f21..e5b2415326 100644 --- a/sabot/serializer/src/main/java/com/dremio/exec/planner/serializer/RexDeserializer.java +++ b/sabot/serializer/src/main/java/com/dremio/exec/planner/serializer/RexDeserializer.java @@ -24,6 +24,8 @@ import java.util.Set; import java.util.stream.Collectors; +import javax.annotation.Nullable; + import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.core.CorrelationId; @@ -169,6 +171,7 @@ private RexNode convertSubQuery(PRexSubQuery subQuery) { list, cluster, sqlOperatorConverter); + CorrelationId correlationId = convertCorrelationId(subQuery.getCorrelationId()); switch (operator.getKind()) { case IN: case NOT_IN: @@ -179,16 +182,22 @@ private RexNode convertSubQuery(PRexSubQuery subQuery) { .getOperandsList() .stream() .map(this::convert) - .collect(Collectors.toList()))); + .collect(Collectors.toList())), + correlationId); case EXISTS: - return RexSubQuery.exists(rel); + return RexSubQuery.exists(rel, correlationId); case SCALAR_QUERY: - return RexSubQuery.scalar(rel); - case SOME: + return RexSubQuery.scalar(rel, correlationId); default: - break; + throw new IllegalStateException("Unsupported type case: " + operator.getKind()); } - throw new IllegalStateException("Unsupported type case: " + operator.getKind()); + } + + private CorrelationId convertCorrelationId(@Nullable Integer correlationId) { + if(correlationId == null) { + return null; + } + return new CorrelationId(correlationId); } private RexNode convertCorrelVariable(PRexCorrelVariable fieldAccess) { @@ -430,7 +439,9 @@ private RexNode convertLiteral(PRexLiteral literal) { } return rexBuilder.makeNullLiteral(type); } + break; default: + break; } if(literal.getDataType().getTypeName() == PSqlTypeName.ANY) { diff --git a/sabot/serializer/src/main/java/com/dremio/exec/planner/serializer/SqlOperatorConverter.java b/sabot/serializer/src/main/java/com/dremio/exec/planner/serializer/SqlOperatorConverter.java index ce41914ea9..ba7895ea95 100644 --- a/sabot/serializer/src/main/java/com/dremio/exec/planner/serializer/SqlOperatorConverter.java +++ b/sabot/serializer/src/main/java/com/dremio/exec/planner/serializer/SqlOperatorConverter.java @@ -22,15 +22,19 @@ import java.util.Set; import java.util.stream.Stream; +import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.dremio.exec.catalog.udf.UserDefinedFunctionArgumentOperator; import com.dremio.exec.expr.fn.FunctionImplementationRegistry; import com.dremio.exec.planner.sql.DremioSqlOperatorTable; import com.dremio.exec.planner.sql.OperatorTable; import com.dremio.exec.planner.sql.SqlFunctionImpl; +import com.dremio.exec.planner.types.JavaTypeFactoryImpl; +import com.dremio.plan.serialization.PFunctionParameter; import com.dremio.plan.serialization.PSqlOperator; import com.google.common.base.Preconditions; import com.google.common.collect.BiMap; @@ -80,34 +84,58 @@ public SqlOperator fromProto(PSqlOperator o) { } public PSqlOperator toProto(SqlOperator o) { + PSqlOperator.Builder builder = PSqlOperator.newBuilder(); SyEq wrapped = new SyEq<>(o); String key = SQL_STD_OPERATORS.inverse().get(wrapped); - if(key != null) { - return PSqlOperator.newBuilder().setName(key).build(); + + if (key != null) { + return builder.setName(key).build(); } final String functionName = o.getName(); + builder.setDname(functionName); + final String className = o.getClass().getName(); + builder.setClassName(className); + int minOperands = -1; int maxOperands = -1; if (o.getOperandTypeChecker() != null) { minOperands = o.getOperandTypeChecker().getOperandCountRange().getMin(); maxOperands = o.getOperandTypeChecker().getOperandCountRange().getMax(); } - if(operators.get(new FunctionKey(functionName, className, minOperands, maxOperands)) != null) { - return PSqlOperator.newBuilder() - .setDname(functionName) - .setClassName(className) - .setMinOperands(minOperands) - .setMaxOperands(maxOperands).build(); + + builder + .setMinOperands(minOperands) + .setMaxOperands(maxOperands); + + if (operators.get(new FunctionKey(functionName, className, minOperands, maxOperands)) != null) { + return builder.build(); + } + + if (o instanceof UserDefinedFunctionArgumentOperator.ArgumentOperator) { + UserDefinedFunctionArgumentOperator.ArgumentOperator argument = (UserDefinedFunctionArgumentOperator.ArgumentOperator) o; + int ordinal = argument.getOrdinal(); + String name = argument.getName(); + RelDataType type = argument.getReturnRelDataType(); + + TypeSerde typeSerde = new TypeSerde(JavaTypeFactoryImpl.INSTANCE); + PFunctionParameter pFunctionParameter = PFunctionParameter.newBuilder() + .setOrdinal(ordinal) + .setName(name) + .setType(typeSerde.toProto(type)) + .build(); + + return builder + .setFunctionParameter(pFunctionParameter) + .build(); } - if( !(o instanceof SqlFunctionImpl)) { + if (!(o instanceof SqlFunctionImpl)) { throw new UnsupportedOperationException(String.format("Unable to serialize operator [%s] of type [%s]", o.getClass().getName(), o.getName())); } throw new UnsupportedOperationException("Unable to support Dremio functions yet."); - } private static BiMap> populate() { diff --git a/sabot/serializer/src/main/java/com/dremio/exec/planner/serializer/logical/DefaultExpansionNodeSerde.java b/sabot/serializer/src/main/java/com/dremio/exec/planner/serializer/logical/DefaultExpansionNodeSerde.java index c4025f5814..626d0d16b7 100644 --- a/sabot/serializer/src/main/java/com/dremio/exec/planner/serializer/logical/DefaultExpansionNodeSerde.java +++ b/sabot/serializer/src/main/java/com/dremio/exec/planner/serializer/logical/DefaultExpansionNodeSerde.java @@ -20,10 +20,12 @@ import org.apache.calcite.rel.RelNode; +import com.dremio.exec.catalog.TableVersionContext; import com.dremio.exec.planner.acceleration.DefaultExpansionNode; import com.dremio.exec.planner.serializer.RelNodeSerde; import com.dremio.plan.serialization.PDefaultExpansionNode; import com.dremio.service.namespace.NamespaceKey; +import com.google.common.base.Strings; /** * Serde for {@link DefaultExpansionNode} @@ -32,17 +34,21 @@ public final class DefaultExpansionNodeSerde implements RelNodeSerde path = expansionNode.getPath().getPathComponents(); - return PDefaultExpansionNode.newBuilder() + PDefaultExpansionNode.Builder builder = PDefaultExpansionNode.newBuilder() .setInput(s.toProto(expansionNode.getInput())) .addAllPath(path) - .setContextSensitive(expansionNode.isContextSensitive()) - .build(); + .setContextSensitive(expansionNode.isContextSensitive()); + if (expansionNode.getVersionContext() != null) { + builder.setVersionContext(expansionNode.getVersionContext().serialize()); + } + return builder.build(); } @Override public DefaultExpansionNode deserialize(PDefaultExpansionNode node, RelFromProto s) { List path = new ArrayList<>(node.getPathList()); RelNode input = s.toRel(node.getInput()); - return (DefaultExpansionNode) DefaultExpansionNode.wrap(new NamespaceKey(path), input, input.getRowType(), node.getContextSensitive(), true); + return (DefaultExpansionNode) DefaultExpansionNode.wrap(new NamespaceKey(path), input, input.getRowType(), + node.getContextSensitive(), Strings.isNullOrEmpty(node.getVersionContext()) ? null : TableVersionContext.deserialize(node.getVersionContext())); } } diff --git a/sabot/serializer/src/main/java/com/dremio/exec/planner/serializer/logical/ExpansionNodeSerde.java b/sabot/serializer/src/main/java/com/dremio/exec/planner/serializer/logical/ExpansionNodeSerde.java index 350b8ee904..300c0e8726 100644 --- a/sabot/serializer/src/main/java/com/dremio/exec/planner/serializer/logical/ExpansionNodeSerde.java +++ b/sabot/serializer/src/main/java/com/dremio/exec/planner/serializer/logical/ExpansionNodeSerde.java @@ -20,6 +20,7 @@ import org.apache.calcite.rel.RelNode; +import com.dremio.exec.catalog.TableVersionContext; import com.dremio.exec.planner.acceleration.ExpansionNode; import com.dremio.exec.planner.serializer.RelNodeSerde; import com.dremio.plan.serialization.PExpansionNode; @@ -32,18 +33,22 @@ public final class ExpansionNodeSerde implements RelNodeSerde path = expansionNode.getPath().getPathComponents(); - return PExpansionNode.newBuilder() + PExpansionNode.Builder builder = PExpansionNode.newBuilder() .setInput(s.toProto(expansionNode.getInput())) .addAllPath(path) .setContextSensitive(expansionNode.isContextSensitive()) - .setIsDefault(expansionNode.isDefault()) - .build(); + .setIsDefault(expansionNode.isDefault()); + if (expansionNode.getVersionContext() != null) { + builder.setVersionContext(expansionNode.getVersionContext().serialize()); + } + return builder.build(); } @Override public ExpansionNode deserialize(PExpansionNode node, RelFromProto s) { List path = new ArrayList<>(node.getPathList()); RelNode input = s.toRel(node.getInput()); - return (ExpansionNode) ExpansionNode.wrap(new NamespaceKey(path), input, input.getRowType(), node.getContextSensitive(), node.getIsDefault()); + return (ExpansionNode) ExpansionNode.wrap(new NamespaceKey(path), input, input.getRowType(), node.getContextSensitive(), + node.getIsDefault(), node.getVersionContext() == null ? null : TableVersionContext.deserialize(node.getVersionContext())); } } diff --git a/sabot/serializer/src/main/java/com/dremio/exec/planner/serializer/logical/ScanCrelSerde.java b/sabot/serializer/src/main/java/com/dremio/exec/planner/serializer/logical/ScanCrelSerde.java index 73ceb6d392..b271a38972 100644 --- a/sabot/serializer/src/main/java/com/dremio/exec/planner/serializer/logical/ScanCrelSerde.java +++ b/sabot/serializer/src/main/java/com/dremio/exec/planner/serializer/logical/ScanCrelSerde.java @@ -17,9 +17,12 @@ import com.dremio.exec.calcite.logical.ScanCrel; import com.dremio.exec.catalog.DremioPrepareTable; +import com.dremio.exec.catalog.DremioTranslatableTable; +import com.dremio.exec.catalog.TableVersionContext; import com.dremio.exec.planner.serializer.RelNodeSerde; import com.dremio.plan.serialization.PScanCrel; import com.dremio.service.namespace.NamespaceKey; +import com.google.common.base.Strings; /** * Serde for ScanCrel @@ -39,6 +42,9 @@ public PScanCrel serialize(ScanCrel scan, RelToProto s) { if (scan.getTableMetadata().getVersion() != null) { builder.setDatasetVersion(scan.getTableMetadata().getVersion()); } + if (scan.getTableMetadata().getVersionContext() != null) { + builder.setVersionContext(scan.getTableMetadata().getVersionContext().serialize()); + } return builder .addAllPath(scan.getTableMetadata().getName().getPathComponents()) .build(); @@ -46,11 +52,16 @@ public PScanCrel serialize(ScanCrel scan, RelToProto s) { @Override public ScanCrel deserialize(PScanCrel node, RelFromProto s) { - DremioPrepareTable table = s.tables().getTable(new NamespaceKey(node.getPathList())); - if(table == null) { - throw new UnsupportedOperationException("Unable to find table."); + if (Strings.isNullOrEmpty(node.getVersionContext())) { + DremioPrepareTable table = s.tables().getTable(new NamespaceKey(node.getPathList())); + if(table == null) { + throw new UnsupportedOperationException("Unable to find table."); + } + return (ScanCrel) table.toRel(s.toRelContext()); + } else { + DremioTranslatableTable table = s.tables().getTableSnapshot(new NamespaceKey(node.getPathList()), + node.getVersionContext() == null ? null : TableVersionContext.deserialize(node.getVersionContext())); + return (ScanCrel) table.toRel(s.toRelContext(), null); } - - return (ScanCrel) table.toRel(s.toRelContext()); } } diff --git a/sabot/serializer/src/main/protobuf3/PRelNode.proto b/sabot/serializer/src/main/protobuf3/PRelNode.proto index 0e95f93c40..d540c403a5 100644 --- a/sabot/serializer/src/main/protobuf3/PRelNode.proto +++ b/sabot/serializer/src/main/protobuf3/PRelNode.proto @@ -67,6 +67,7 @@ message PScanCrel { int64 record_count = 5; repeated string partition_fields = 6; string dataset_version = 7; + string version_context = 8; } message PExternalQueryScanCrel { @@ -81,12 +82,14 @@ message PExpansionNode { repeated string path = 2; bool context_sensitive = 3; bool isDefault = 4; + string version_context = 5; } message PDefaultExpansionNode { int32 input = 1; repeated string path = 2; bool context_sensitive = 3; + string version_context = 4; } message PRelList { diff --git a/sabot/serializer/src/main/protobuf3/PRexNode.proto b/sabot/serializer/src/main/protobuf3/PRexNode.proto index 588a77b673..4793993dec 100644 --- a/sabot/serializer/src/main/protobuf3/PRexNode.proto +++ b/sabot/serializer/src/main/protobuf3/PRexNode.proto @@ -145,6 +145,8 @@ message PRexSubQuery { PSqlOperator sql_operator = 2; repeated PRexNode operands = 3; repeated google.protobuf.Any details = 4; // this is a PRelNode but is kept as bytes to avoid circular references in protobuf. + int32 correlation_id = 5; + } message PRexVariable { @@ -171,6 +173,7 @@ message PSqlOperator { string class_name = 4; int32 min_operands = 5; int32 max_operands = 6; + optional PFunctionParameter function_parameter = 7; } enum PSqlSyntax { @@ -194,3 +197,9 @@ message PRexPatternFieldRef { PRexInputRef rex_input_ref = 1; string alpha = 2; } + +message PFunctionParameter { + int32 ordinal = 1; + string name = 2; + PRelDataType type = 3; +} diff --git a/sabot/serializer/src/test/java/com/dremio/exec/planner/sql/MockCatalog.java b/sabot/serializer/src/test/java/com/dremio/exec/planner/sql/MockCatalog.java index 5123ec34ef..22746dff2e 100644 --- a/sabot/serializer/src/test/java/com/dremio/exec/planner/sql/MockCatalog.java +++ b/sabot/serializer/src/test/java/com/dremio/exec/planner/sql/MockCatalog.java @@ -79,6 +79,11 @@ public DremioTable getTableForQuery(NamespaceKey key) { return getTable(key); } + @Override + public DremioTable getTableSnapshotForQuery(NamespaceKey key, TableVersionContext context) { + throw new UnsupportedOperationException(); + } + @Override public DremioTable getTableSnapshot(NamespaceKey key, TableVersionContext context) { throw new UnsupportedOperationException(); diff --git a/sabot/serializer/src/test/java/com/dremio/exec/planner/sql/MockDremioQueryParser.java b/sabot/serializer/src/test/java/com/dremio/exec/planner/sql/MockDremioQueryParser.java index 3cd08ed783..ca2e573e6e 100644 --- a/sabot/serializer/src/test/java/com/dremio/exec/planner/sql/MockDremioQueryParser.java +++ b/sabot/serializer/src/test/java/com/dremio/exec/planner/sql/MockDremioQueryParser.java @@ -36,7 +36,6 @@ import org.apache.calcite.sql.validate.SqlValidatorCatalogReader; import org.apache.calcite.sql2rel.SqlToRelConverter; -import com.dremio.common.exceptions.UserException; import com.dremio.exec.catalog.DremioCatalogReader; import com.dremio.exec.catalog.SimpleCatalog; import com.dremio.exec.context.AdditionalContext; @@ -115,6 +114,7 @@ public MockDremioQueryParser(SqlOperatorTable operatorTable, SimpleCatalog ca * @param sql Sql to parse. * @return The validated SqlTree tree. */ + @Override public SqlNode parse(String sql) { try { SqlParser parser = SqlParser.create(sql, parserConfig); @@ -124,12 +124,13 @@ public SqlNode parse(String sql) { SqlNode sqlNode = validator.validate(node); return sqlNode; } catch (SqlParseException e) { - UserException.Builder builder = SqlExceptionHelper.parseError(sql, e); - builder.message(SqlExceptionHelper.QUERY_PARSING_ERROR); - throw builder.build(logger); + throw SqlExceptionHelper + .parseError(sql, e) + .build(logger); } } + @Override public RelNode toRel(String query) { final SqlNode sqlNode = parse(query); return convertSqlNodeToRel(sqlNode); @@ -139,6 +140,7 @@ public RelNode toRel(String query) { * Get the rel from a previously parsed sql tree. * @return The RelNode tree. */ + @Override public RelNode convertSqlNodeToRel(SqlNode sqlNode) { final SqlToRelConverter.Config config = SqlToRelConverter.configBuilder() .withInSubQueryThreshold((int) 1024) diff --git a/sabot/serializer/src/test/java/com/dremio/exec/planner/sql/TestSerializerRoundtrip.java b/sabot/serializer/src/test/java/com/dremio/exec/planner/sql/TestSerializerRoundtrip.java index b0132a490b..1147ca9a7b 100644 --- a/sabot/serializer/src/test/java/com/dremio/exec/planner/sql/TestSerializerRoundtrip.java +++ b/sabot/serializer/src/test/java/com/dremio/exec/planner/sql/TestSerializerRoundtrip.java @@ -96,7 +96,7 @@ public class TestSerializerRoundtrip { @Test public void testQueries() { - new GoldenFileTestBuilder(TestSerializerRoundtrip::executeTest) + GoldenFileTestBuilder.create(TestSerializerRoundtrip::executeTest) .add("SELECT", "select a,b,c from t1") .add("SELECT AS", "select T.a as A, T.b as B, T.c as C from t1 as T") .add("SELECT DISTINCT", "select distinct a from t1 where b > 10") @@ -133,7 +133,7 @@ public void testQueries() { @Test public void testColumnAliases() { - new GoldenFileTestBuilder(TestSerializerRoundtrip::executeTest) + GoldenFileTestBuilder.create(TestSerializerRoundtrip::executeTest) .add("SELECT", "SELECT ename as employee_name, LOWER(employee_name) as lower_employee_name FROM emp") // This fails since the alias extender only does one layer of unaliasing //.add("Double aliasing", "SELECT ename as employee_name, LOWER(employee_name) as lower_employee_name, UPPER(lower_employee_name) as upper_employee_name FROM emp") @@ -145,7 +145,7 @@ public void testColumnAliases() { @Test public void testQualify() { - new GoldenFileTestBuilder(TestSerializerRoundtrip::executeTest) + GoldenFileTestBuilder.create(TestSerializerRoundtrip::executeTest) .add("QUALIFY WITHOUT REFERENCES", "SELECT empno, ename, deptno FROM emp QUALIFY ROW_NUMBER() over (partition by ename order by deptno) = 1") .add("QUALIFY WITHOUT REFERENCES AND FILTER", "SELECT empno, ename, deptno FROM emp WHERE deptno > 5 QUALIFY ROW_NUMBER() over (partition by ename order by deptno) = 1") .add("QUALIFY WITH REFERENCES", "SELECT empno, ename, deptno, ROW_NUMBER() over (partition by ename order by deptno) as row_num FROM emp QUALIFY row_num = 1") @@ -168,7 +168,7 @@ public void testQualify() { @Test public void testSqlFunction() { - new GoldenFileTestBuilder(TestSerializerRoundtrip::executeTest) + GoldenFileTestBuilder.create(TestSerializerRoundtrip::executeTest) .add("ROUND", "SELECT ROUND(CAST(9.9 AS DECIMAL(2,1)))FROM (VALUES (1)) AS t(a)") .add("TRUNCATE", "SELECT TRUNCATE(CAST(9.9 AS DECIMAL(2,1))) FROM (VALUES (1)) AS t(a)") .add("MEDAIN", "SELECT MEDIAN(A) OVER (PARTITION BY b) FROM (VALUES(1, 2)) AS t(a, b)") @@ -179,7 +179,7 @@ public void testSqlFunction() { @Test public void testSqlToRelConvertTests() { - new GoldenFileTestBuilder(TestSerializerRoundtrip::executeTest) + GoldenFileTestBuilder.create(TestSerializerRoundtrip::executeTest) .allowExceptions() .add( "Integer Literal", @@ -997,8 +997,8 @@ public void testSqlToRelConvertTests() { @Test public void testTpchQueries() throws URISyntaxException, IOException { - GoldenFileTestBuilder builder = - new GoldenFileTestBuilder(TestSerializerRoundtrip::executeTest) + GoldenFileTestBuilder builder = + GoldenFileTestBuilder.create(TestSerializerRoundtrip::executeTest) .allowExceptions(); for (Path path : getQueryFilePaths()) { diff --git a/sabot/serializer/src/test/resources/goldenfiles/expected/TestSerializerRoundtrip.testSqlToRelConvertTests.yaml b/sabot/serializer/src/test/resources/goldenfiles/expected/TestSerializerRoundtrip.testSqlToRelConvertTests.yaml index 1c6ea0f6a7..92eef2f8ac 100644 --- a/sabot/serializer/src/test/resources/goldenfiles/expected/TestSerializerRoundtrip.testSqlToRelConvertTests.yaml +++ b/sabot/serializer/src/test/resources/goldenfiles/expected/TestSerializerRoundtrip.testSqlToRelConvertTests.yaml @@ -2615,7 +2615,7 @@ description: "Lateral Decorrelate" input: "select * from emp, LATERAL (select * from dept where emp.deptno=dept.deptno)" output: - queryPlanBinary: "CjMKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIFCgNFTVAKNAoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsVGFibGVTY2FuEgYKBERFUFQKuQMKJ3R5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEZpbHRlchKNAxABGogDKoUDCgQIARABEggKBkVRVUFMUxroAkLlAgrQAjrNAgrDAhAoSAJSDQoFRU1QTk8aBAgBEARSOwoFRU5BTUUQARowCAEQHSAUKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUjkKA0pPQhACGjAIARAdIAoqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSDQoDTUdSEAMaBAgBEARSFAoISElSRURBVEUQBBoGCAEQDSADUg0KA1NBTBAFGgQIARAEUg4KBENPTU0QBhoECAEQBFIQCgZERVBUTk8QBxoECAEQBFIRCgdTTEFDS0VSEAgaBAgBEAFSEQoHREVQVE5PMBAJGgQIARAEUjoKBE5BTUUQChowCAEQHSAKKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xIgUkY29yMBIQCgZERVBUTk8QBxoECAEQBBoIEgYKBAgBEAQKfgoodHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsUHJvamVjdBJSCAISBkRFUFROTxIETkFNRRoIEgYKBAgBEAQaNhI0CjAIARAdIAoqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQAQo8Cip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxDb3JyZWxhdGUSDhADIgUaAwoBByoDCgEHCoQDCih0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxQcm9qZWN0EtcCCAQSBUVNUE5PEgVFTkFNRRIDSk9CEgNNR1ISCEhJUkVEQVRFEgNTQUwSBENPTU0SBkRFUFROTxIHU0xBQ0tFUhIHREVQVE5PMBIETkFNRRoIEgYKBAgBEAQaNhI0CjAIARAdIBQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQARo2EjQKMAgBEB0gCiocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRACGgoSCAoECAEQBBADGgwSCgoGCAEQDSADEAQaChIICgQIARAEEAUaChIICgQIARAEEAYaChIICgQIARAEEAcaChIICgQIARABEAgaChIICgQIARAEEAkaNhI0CjAIARAdIAoqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQCg==" + queryPlanBinary: "CjMKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIFCgNFTVAKNAoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsVGFibGVTY2FuEgYKBERFUFQK6gIKJ3R5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEZpbHRlchK+AhABGrkCKrYCCgQIARABEggKBkVRVUFMUxqZAkKWAgqBAjr+AQr0ARAoSAJSDQoFRU1QTk8aBAgBEARSOwoFRU5BTUUQARowCAEQHSAUKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUjkKA0pPQhACGjAIARAdIAoqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSDQoDTUdSEAMaBAgBEARSFAoISElSRURBVEUQBBoGCAEQDSADUg0KA1NBTBAFGgQIARAEUg4KBENPTU0QBhoECAEQBFIQCgZERVBUTk8QBxoECAEQBFIRCgdTTEFDS0VSEAgaBAgBEAEiBSRjb3IwEhAKBkRFUFROTxAHGgQIARAEGggSBgoECAEQBAp+Cih0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxQcm9qZWN0ElIIAhIGREVQVE5PEgROQU1FGggSBgoECAEQBBo2EjQKMAgBEB0gCiocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRABCjwKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbENvcnJlbGF0ZRIOEAMiBRoDCgEHKgMKAQcKhAMKKHR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFByb2plY3QS1wIIBBIFRU1QTk8SBUVOQU1FEgNKT0ISA01HUhIISElSRURBVEUSA1NBTBIEQ09NTRIGREVQVE5PEgdTTEFDS0VSEgdERVBUTk8wEgROQU1FGggSBgoECAEQBBo2EjQKMAgBEB0gFCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRABGjYSNAowCAEQHSAKKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAIaChIICgQIARAEEAMaDBIKCgYIARANIAMQBBoKEggKBAgBEAQQBRoKEggKBAgBEAQQBhoKEggKBAgBEAQQBxoKEggKBAgBEAEQCBoKEggKBAgBEAQQCRo2EjQKMAgBEB0gCiocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAK" queryPlanBinaryHexDump: - "00000000 0A 33 0A 2A 74 79 70 65 2E 67 6F 6F 67 6C 65 61 .3.*type.googlea" - "00000010 70 69 73 2E 63 6F 6D 2F 70 6C 61 6E 2E 50 4C 6F pis.com/plan.PLo" @@ -2623,13 +2623,13 @@ - "00000030 0A 03 45 4D 50 0A 34 0A 2A 74 79 70 65 2E 67 6F ..EMP.4.*type.go" - "00000040 6F 67 6C 65 61 70 69 73 2E 63 6F 6D 2F 70 6C 61 ogleapis.com/pla" - "00000050 6E 2E 50 4C 6F 67 69 63 61 6C 54 61 62 6C 65 53 n.PLogicalTableS" - - "00000060 63 61 6E 12 06 0A 04 44 45 50 54 0A B9 03 0A 27 can....DEPT.¹..'" + - "00000060 63 61 6E 12 06 0A 04 44 45 50 54 0A EA 02 0A 27 can....DEPT.ê..'" - "00000070 74 79 70 65 2E 67 6F 6F 67 6C 65 61 70 69 73 2E type.googleapis." - "00000080 63 6F 6D 2F 70 6C 61 6E 2E 50 4C 6F 67 69 63 61 com/plan.PLogica" - - "00000090 6C 46 69 6C 74 65 72 12 8D 03 10 01 1A 88 03 2A lFilter........*" - - "000000A0 85 03 0A 04 08 01 10 01 12 08 0A 06 45 51 55 41 ............EQUA" - - "000000B0 4C 53 1A E8 02 42 E5 02 0A D0 02 3A CD 02 0A C3 LS.è.Bå..Ð.:Í..Ã" - - "000000C0 02 10 28 48 02 52 0D 0A 05 45 4D 50 4E 4F 1A 04 ..(H.R...EMPNO.." + - "00000090 6C 46 69 6C 74 65 72 12 BE 02 10 01 1A B9 02 2A lFilter.¾....¹.*" + - "000000A0 B6 02 0A 04 08 01 10 01 12 08 0A 06 45 51 55 41 ¶...........EQUA" + - "000000B0 4C 53 1A 99 02 42 96 02 0A 81 02 3A FE 01 0A F4 LS...B.....:þ..ô" + - "000000C0 01 10 28 48 02 52 0D 0A 05 45 4D 50 4E 4F 1A 04 ..(H.R...EMPNO.." - "000000D0 08 01 10 04 52 3B 0A 05 45 4E 41 4D 45 10 01 1A ....R;..ENAME..." - "000000E0 30 08 01 10 1D 20 14 2A 1C 0A 18 49 53 4F 2D 38 0.... .*...ISO-8" - "000000F0 38 35 39 2D 31 24 65 6E 5F 55 53 24 70 72 69 6D 859-1$en_US$prim" @@ -2644,50 +2644,45 @@ - "00000180 52 0E 0A 04 43 4F 4D 4D 10 06 1A 04 08 01 10 04 R...COMM........" - "00000190 52 10 0A 06 44 45 50 54 4E 4F 10 07 1A 04 08 01 R...DEPTNO......" - "000001A0 10 04 52 11 0A 07 53 4C 41 43 4B 45 52 10 08 1A ..R...SLACKER..." - - "000001B0 04 08 01 10 01 52 11 0A 07 44 45 50 54 4E 4F 30 .....R...DEPTNO0" - - "000001C0 10 09 1A 04 08 01 10 04 52 3A 0A 04 4E 41 4D 45 ........R:..NAME" - - "000001D0 10 0A 1A 30 08 01 10 1D 20 0A 2A 1C 0A 18 49 53 ...0.... .*...IS" - - "000001E0 4F 2D 38 38 35 39 2D 31 24 65 6E 5F 55 53 24 70 O-8859-1$en_US$p" - - "000001F0 72 69 6D 61 72 79 28 02 32 0A 49 53 4F 2D 38 38 rimary(.2.ISO-88" - - "00000200 35 39 2D 31 22 05 24 63 6F 72 30 12 10 0A 06 44 59-1\".$cor0....D" - - "00000210 45 50 54 4E 4F 10 07 1A 04 08 01 10 04 1A 08 12 EPTNO..........." - - "00000220 06 0A 04 08 01 10 04 0A 7E 0A 28 74 79 70 65 2E ........~.(type." - - "00000230 67 6F 6F 67 6C 65 61 70 69 73 2E 63 6F 6D 2F 70 googleapis.com/p" - - "00000240 6C 61 6E 2E 50 4C 6F 67 69 63 61 6C 50 72 6F 6A lan.PLogicalProj" - - "00000250 65 63 74 12 52 08 02 12 06 44 45 50 54 4E 4F 12 ect.R....DEPTNO." - - "00000260 04 4E 41 4D 45 1A 08 12 06 0A 04 08 01 10 04 1A .NAME..........." - - "00000270 36 12 34 0A 30 08 01 10 1D 20 0A 2A 1C 0A 18 49 6.4.0.... .*...I" - - "00000280 53 4F 2D 38 38 35 39 2D 31 24 65 6E 5F 55 53 24 SO-8859-1$en_US$" - - "00000290 70 72 69 6D 61 72 79 28 02 32 0A 49 53 4F 2D 38 primary(.2.ISO-8" - - "000002A0 38 35 39 2D 31 10 01 0A 3C 0A 2A 74 79 70 65 2E 859-1...<.*type." - - "000002B0 67 6F 6F 67 6C 65 61 70 69 73 2E 63 6F 6D 2F 70 googleapis.com/p" - - "000002C0 6C 61 6E 2E 50 4C 6F 67 69 63 61 6C 43 6F 72 72 lan.PLogicalCorr" - - "000002D0 65 6C 61 74 65 12 0E 10 03 22 05 1A 03 0A 01 07 elate....\"......" - - "000002E0 2A 03 0A 01 07 0A 84 03 0A 28 74 79 70 65 2E 67 *........(type.g" - - "000002F0 6F 6F 67 6C 65 61 70 69 73 2E 63 6F 6D 2F 70 6C oogleapis.com/pl" - - "00000300 61 6E 2E 50 4C 6F 67 69 63 61 6C 50 72 6F 6A 65 an.PLogicalProje" - - "00000310 63 74 12 D7 02 08 04 12 05 45 4D 50 4E 4F 12 05 ct.×.....EMPNO.." - - "00000320 45 4E 41 4D 45 12 03 4A 4F 42 12 03 4D 47 52 12 ENAME..JOB..MGR." - - "00000330 08 48 49 52 45 44 41 54 45 12 03 53 41 4C 12 04 .HIREDATE..SAL.." - - "00000340 43 4F 4D 4D 12 06 44 45 50 54 4E 4F 12 07 53 4C COMM..DEPTNO..SL" - - "00000350 41 43 4B 45 52 12 07 44 45 50 54 4E 4F 30 12 04 ACKER..DEPTNO0.." - - "00000360 4E 41 4D 45 1A 08 12 06 0A 04 08 01 10 04 1A 36 NAME...........6" - - "00000370 12 34 0A 30 08 01 10 1D 20 14 2A 1C 0A 18 49 53 .4.0.... .*...IS" - - "00000380 4F 2D 38 38 35 39 2D 31 24 65 6E 5F 55 53 24 70 O-8859-1$en_US$p" - - "00000390 72 69 6D 61 72 79 28 02 32 0A 49 53 4F 2D 38 38 rimary(.2.ISO-88" - - "000003A0 35 39 2D 31 10 01 1A 36 12 34 0A 30 08 01 10 1D 59-1...6.4.0...." - - "000003B0 20 0A 2A 1C 0A 18 49 53 4F 2D 38 38 35 39 2D 31 .*...ISO-8859-1" - - "000003C0 24 65 6E 5F 55 53 24 70 72 69 6D 61 72 79 28 02 $en_US$primary(." - - "000003D0 32 0A 49 53 4F 2D 38 38 35 39 2D 31 10 02 1A 0A 2.ISO-8859-1...." - - "000003E0 12 08 0A 04 08 01 10 04 10 03 1A 0C 12 0A 0A 06 ................" - - "000003F0 08 01 10 0D 20 03 10 04 1A 0A 12 08 0A 04 08 01 .... ..........." - - "00000400 10 04 10 05 1A 0A 12 08 0A 04 08 01 10 04 10 06 ................" - - "00000410 1A 0A 12 08 0A 04 08 01 10 04 10 07 1A 0A 12 08 ................" - - "00000420 0A 04 08 01 10 01 10 08 1A 0A 12 08 0A 04 08 01 ................" - - "00000430 10 04 10 09 1A 36 12 34 0A 30 08 01 10 1D 20 0A .....6.4.0.... ." - - "00000440 2A 1C 0A 18 49 53 4F 2D 38 38 35 39 2D 31 24 65 *...ISO-8859-1$e" - - "00000450 6E 5F 55 53 24 70 72 69 6D 61 72 79 28 02 32 0A n_US$primary(.2." - - "00000460 49 53 4F 2D 38 38 35 39 2D 31 10 0A ISO-8859-1.." + - "000001B0 04 08 01 10 01 22 05 24 63 6F 72 30 12 10 0A 06 .....\".$cor0...." + - "000001C0 44 45 50 54 4E 4F 10 07 1A 04 08 01 10 04 1A 08 DEPTNO.........." + - "000001D0 12 06 0A 04 08 01 10 04 0A 7E 0A 28 74 79 70 65 .........~.(type" + - "000001E0 2E 67 6F 6F 67 6C 65 61 70 69 73 2E 63 6F 6D 2F .googleapis.com/" + - "000001F0 70 6C 61 6E 2E 50 4C 6F 67 69 63 61 6C 50 72 6F plan.PLogicalPro" + - "00000200 6A 65 63 74 12 52 08 02 12 06 44 45 50 54 4E 4F ject.R....DEPTNO" + - "00000210 12 04 4E 41 4D 45 1A 08 12 06 0A 04 08 01 10 04 ..NAME.........." + - "00000220 1A 36 12 34 0A 30 08 01 10 1D 20 0A 2A 1C 0A 18 .6.4.0.... .*..." + - "00000230 49 53 4F 2D 38 38 35 39 2D 31 24 65 6E 5F 55 53 ISO-8859-1$en_US" + - "00000240 24 70 72 69 6D 61 72 79 28 02 32 0A 49 53 4F 2D $primary(.2.ISO-" + - "00000250 38 38 35 39 2D 31 10 01 0A 3C 0A 2A 74 79 70 65 8859-1...<.*type" + - "00000260 2E 67 6F 6F 67 6C 65 61 70 69 73 2E 63 6F 6D 2F .googleapis.com/" + - "00000270 70 6C 61 6E 2E 50 4C 6F 67 69 63 61 6C 43 6F 72 plan.PLogicalCor" + - "00000280 72 65 6C 61 74 65 12 0E 10 03 22 05 1A 03 0A 01 relate....\"....." + - "00000290 07 2A 03 0A 01 07 0A 84 03 0A 28 74 79 70 65 2E .*........(type." + - "000002A0 67 6F 6F 67 6C 65 61 70 69 73 2E 63 6F 6D 2F 70 googleapis.com/p" + - "000002B0 6C 61 6E 2E 50 4C 6F 67 69 63 61 6C 50 72 6F 6A lan.PLogicalProj" + - "000002C0 65 63 74 12 D7 02 08 04 12 05 45 4D 50 4E 4F 12 ect.×.....EMPNO." + - "000002D0 05 45 4E 41 4D 45 12 03 4A 4F 42 12 03 4D 47 52 .ENAME..JOB..MGR" + - "000002E0 12 08 48 49 52 45 44 41 54 45 12 03 53 41 4C 12 ..HIREDATE..SAL." + - "000002F0 04 43 4F 4D 4D 12 06 44 45 50 54 4E 4F 12 07 53 .COMM..DEPTNO..S" + - "00000300 4C 41 43 4B 45 52 12 07 44 45 50 54 4E 4F 30 12 LACKER..DEPTNO0." + - "00000310 04 4E 41 4D 45 1A 08 12 06 0A 04 08 01 10 04 1A .NAME..........." + - "00000320 36 12 34 0A 30 08 01 10 1D 20 14 2A 1C 0A 18 49 6.4.0.... .*...I" + - "00000330 53 4F 2D 38 38 35 39 2D 31 24 65 6E 5F 55 53 24 SO-8859-1$en_US$" + - "00000340 70 72 69 6D 61 72 79 28 02 32 0A 49 53 4F 2D 38 primary(.2.ISO-8" + - "00000350 38 35 39 2D 31 10 01 1A 36 12 34 0A 30 08 01 10 859-1...6.4.0..." + - "00000360 1D 20 0A 2A 1C 0A 18 49 53 4F 2D 38 38 35 39 2D . .*...ISO-8859-" + - "00000370 31 24 65 6E 5F 55 53 24 70 72 69 6D 61 72 79 28 1$en_US$primary(" + - "00000380 02 32 0A 49 53 4F 2D 38 38 35 39 2D 31 10 02 1A .2.ISO-8859-1..." + - "00000390 0A 12 08 0A 04 08 01 10 04 10 03 1A 0C 12 0A 0A ................" + - "000003A0 06 08 01 10 0D 20 03 10 04 1A 0A 12 08 0A 04 08 ..... .........." + - "000003B0 01 10 04 10 05 1A 0A 12 08 0A 04 08 01 10 04 10 ................" + - "000003C0 06 1A 0A 12 08 0A 04 08 01 10 04 10 07 1A 0A 12 ................" + - "000003D0 08 0A 04 08 01 10 01 10 08 1A 0A 12 08 0A 04 08 ................" + - "000003E0 01 10 04 10 09 1A 36 12 34 0A 30 08 01 10 1D 20 ......6.4.0.... " + - "000003F0 0A 2A 1C 0A 18 49 53 4F 2D 38 38 35 39 2D 31 24 .*...ISO-8859-1$" + - "00000400 65 6E 5F 55 53 24 70 72 69 6D 61 72 79 28 02 32 en_US$primary(.2" + - "00000410 0A 49 53 4F 2D 38 38 35 39 2D 31 10 0A .ISO-8859-1.." queryPlanText: - "LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10])" - " LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{7}])" @@ -7379,31 +7374,115 @@ input: "update emp set sal = ? where slacker = false" - description: "Update Extended Column Collision" - exceptionMessage: "Failure parsing the query." + exceptionMessage: + - "Encountered \"(\" at line 1, column 19." + - "Was expecting one of:" + - " \"SET\" ..." + - " \"AS\" ..." + - " ..." + - " ..." + - " ..." + - " ..." + - " ..." + - " \".\" ..." + - " \"[\" ..." + - " " input: "update empdefaults(empno INTEGER NOT NULL, deptno INTEGER) set deptno = 1, empno = 20, ename = 'Bob' where deptno = 10" - description: "Update Extended Column Case Sensitive Collision" - exceptionMessage: "Failure parsing the query." + exceptionMessage: + - "Encountered \"(\" at line 1, column 19." + - "Was expecting one of:" + - " \"SET\" ..." + - " \"AS\" ..." + - " ..." + - " ..." + - " ..." + - " ..." + - " ..." + - " \".\" ..." + - " \"[\" ..." + - " " input: "update empdefaults(\"slacker\" INTEGER, deptno INTEGER) set deptno = 1, \"slacker\" = 100 where ename = 'Bob'" - description: "Update Extended Column Modifiable View Collision" - exceptionMessage: "Failure parsing the query." + exceptionMessage: + - "Encountered \"(\" at line 1, column 27." + - "Was expecting one of:" + - " \"SET\" ..." + - " \"AS\" ..." + - " ..." + - " ..." + - " ..." + - " ..." + - " ..." + - " \".\" ..." + - " \"[\" ..." + - " " input: "update EMP_MODIFIABLEVIEW3(empno INTEGER NOT NULL, deptno INTEGER) set deptno = 20, empno = 20, ename = 'Bob' where empno = 10" - description: "Update Extended Column Modifiable View Case Sensitive Collision" - exceptionMessage: "Failure parsing the query." + exceptionMessage: + - "Encountered \"(\" at line 1, column 27." + - "Was expecting one of:" + - " \"SET\" ..." + - " \"AS\" ..." + - " ..." + - " ..." + - " ..." + - " ..." + - " ..." + - " \".\" ..." + - " \"[\" ..." + - " " input: "update EMP_MODIFIABLEVIEW2(\"slacker\" INTEGER, deptno INTEGER) set deptno = 20, \"slacker\" = 100 where ename = 'Bob'" - description: "Update Extended Column Modifiable View Extended Collision" - exceptionMessage: "Failure parsing the query." + exceptionMessage: + - "Encountered \"(\" at line 1, column 27." + - "Was expecting one of:" + - " \"SET\" ..." + - " \"AS\" ..." + - " ..." + - " ..." + - " ..." + - " ..." + - " ..." + - " \".\" ..." + - " \"[\" ..." + - " " input: "update EMP_MODIFIABLEVIEW2(\"slacker\" INTEGER, extra BOOLEAN) set deptno = 20, \"slacker\" = 100, extra = true where ename = 'Bob'" - description: "Update Extended Column Modifiable View Underlying Collision" - exceptionMessage: "Failure parsing the query." + exceptionMessage: + - "Encountered \"(\" at line 1, column 27." + - "Was expecting one of:" + - " \"SET\" ..." + - " \"AS\" ..." + - " ..." + - " ..." + - " ..." + - " ..." + - " ..." + - " \".\" ..." + - " \"[\" ..." + - " " input: "update EMP_MODIFIABLEVIEW3(extra BOOLEAN, comm INTEGER) set empno = 20, comm = true, extra = true where ename = 'Bob'" - description: "Update Extended Column" - exceptionMessage: "Failure parsing the query." + exceptionMessage: + - "Encountered \"(\" at line 1, column 19." + - "Was expecting one of:" + - " \"SET\" ..." + - " \"AS\" ..." + - " ..." + - " ..." + - " ..." + - " ..." + - " ..." + - " \".\" ..." + - " \"[\" ..." + - " " input: "update empdefaults(updated TIMESTAMP) set deptno = 1, updated = timestamp '2017-03-12 13:03:05', empno = 20, ename = 'Bob' where deptno = 10" - description: "Update Modifiable View" @@ -7411,7 +7490,19 @@ input: "update EMP_MODIFIABLEVIEW2 set sal = sal + 5000 where slacker = false" - description: "Update Extended Column Modifiable View" - exceptionMessage: "Failure parsing the query." + exceptionMessage: + - "Encountered \"(\" at line 1, column 27." + - "Was expecting one of:" + - " \"SET\" ..." + - " \"AS\" ..." + - " ..." + - " ..." + - " ..." + - " ..." + - " ..." + - " \".\" ..." + - " \"[\" ..." + - " " input: "update EMP_MODIFIABLEVIEW2(updated TIMESTAMP) set updated = timestamp '2017-03-12 13:03:05', sal = sal + 5000 where slacker = false" - description: "Delete" @@ -8529,7 +8620,7 @@ description: "Lateral Decorrelate Rex" input: "select * from emp, LATERAL (select * from dept where emp.deptno=dept.deptno)" output: - queryPlanBinary: "CjMKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIFCgNFTVAKNAoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsVGFibGVTY2FuEgYKBERFUFQKuQMKJ3R5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEZpbHRlchKNAxABGogDKoUDCgQIARABEggKBkVRVUFMUxroAkLlAgrQAjrNAgrDAhAoSAJSDQoFRU1QTk8aBAgBEARSOwoFRU5BTUUQARowCAEQHSAUKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUjkKA0pPQhACGjAIARAdIAoqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSDQoDTUdSEAMaBAgBEARSFAoISElSRURBVEUQBBoGCAEQDSADUg0KA1NBTBAFGgQIARAEUg4KBENPTU0QBhoECAEQBFIQCgZERVBUTk8QBxoECAEQBFIRCgdTTEFDS0VSEAgaBAgBEAFSEQoHREVQVE5PMBAJGgQIARAEUjoKBE5BTUUQChowCAEQHSAKKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xIgUkY29yMBIQCgZERVBUTk8QBxoECAEQBBoIEgYKBAgBEAQKfgoodHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsUHJvamVjdBJSCAISBkRFUFROTxIETkFNRRoIEgYKBAgBEAQaNhI0CjAIARAdIAoqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQAQo8Cip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxDb3JyZWxhdGUSDhADIgUaAwoBByoDCgEHCoQDCih0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxQcm9qZWN0EtcCCAQSBUVNUE5PEgVFTkFNRRIDSk9CEgNNR1ISCEhJUkVEQVRFEgNTQUwSBENPTU0SBkRFUFROTxIHU0xBQ0tFUhIHREVQVE5PMBIETkFNRRoIEgYKBAgBEAQaNhI0CjAIARAdIBQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQARo2EjQKMAgBEB0gCiocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRACGgoSCAoECAEQBBADGgwSCgoGCAEQDSADEAQaChIICgQIARAEEAUaChIICgQIARAEEAYaChIICgQIARAEEAcaChIICgQIARABEAgaChIICgQIARAEEAkaNhI0CjAIARAdIAoqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQCg==" + queryPlanBinary: "CjMKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIFCgNFTVAKNAoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsVGFibGVTY2FuEgYKBERFUFQK6gIKJ3R5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEZpbHRlchK+AhABGrkCKrYCCgQIARABEggKBkVRVUFMUxqZAkKWAgqBAjr+AQr0ARAoSAJSDQoFRU1QTk8aBAgBEARSOwoFRU5BTUUQARowCAEQHSAUKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUjkKA0pPQhACGjAIARAdIAoqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSDQoDTUdSEAMaBAgBEARSFAoISElSRURBVEUQBBoGCAEQDSADUg0KA1NBTBAFGgQIARAEUg4KBENPTU0QBhoECAEQBFIQCgZERVBUTk8QBxoECAEQBFIRCgdTTEFDS0VSEAgaBAgBEAEiBSRjb3IwEhAKBkRFUFROTxAHGgQIARAEGggSBgoECAEQBAp+Cih0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxQcm9qZWN0ElIIAhIGREVQVE5PEgROQU1FGggSBgoECAEQBBo2EjQKMAgBEB0gCiocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRABCjwKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbENvcnJlbGF0ZRIOEAMiBRoDCgEHKgMKAQcKhAMKKHR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFByb2plY3QS1wIIBBIFRU1QTk8SBUVOQU1FEgNKT0ISA01HUhIISElSRURBVEUSA1NBTBIEQ09NTRIGREVQVE5PEgdTTEFDS0VSEgdERVBUTk8wEgROQU1FGggSBgoECAEQBBo2EjQKMAgBEB0gFCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRABGjYSNAowCAEQHSAKKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAIaChIICgQIARAEEAMaDBIKCgYIARANIAMQBBoKEggKBAgBEAQQBRoKEggKBAgBEAQQBhoKEggKBAgBEAQQBxoKEggKBAgBEAEQCBoKEggKBAgBEAQQCRo2EjQKMAgBEB0gCiocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAK" queryPlanBinaryHexDump: - "00000000 0A 33 0A 2A 74 79 70 65 2E 67 6F 6F 67 6C 65 61 .3.*type.googlea" - "00000010 70 69 73 2E 63 6F 6D 2F 70 6C 61 6E 2E 50 4C 6F pis.com/plan.PLo" @@ -8537,13 +8628,13 @@ - "00000030 0A 03 45 4D 50 0A 34 0A 2A 74 79 70 65 2E 67 6F ..EMP.4.*type.go" - "00000040 6F 67 6C 65 61 70 69 73 2E 63 6F 6D 2F 70 6C 61 ogleapis.com/pla" - "00000050 6E 2E 50 4C 6F 67 69 63 61 6C 54 61 62 6C 65 53 n.PLogicalTableS" - - "00000060 63 61 6E 12 06 0A 04 44 45 50 54 0A B9 03 0A 27 can....DEPT.¹..'" + - "00000060 63 61 6E 12 06 0A 04 44 45 50 54 0A EA 02 0A 27 can....DEPT.ê..'" - "00000070 74 79 70 65 2E 67 6F 6F 67 6C 65 61 70 69 73 2E type.googleapis." - "00000080 63 6F 6D 2F 70 6C 61 6E 2E 50 4C 6F 67 69 63 61 com/plan.PLogica" - - "00000090 6C 46 69 6C 74 65 72 12 8D 03 10 01 1A 88 03 2A lFilter........*" - - "000000A0 85 03 0A 04 08 01 10 01 12 08 0A 06 45 51 55 41 ............EQUA" - - "000000B0 4C 53 1A E8 02 42 E5 02 0A D0 02 3A CD 02 0A C3 LS.è.Bå..Ð.:Í..Ã" - - "000000C0 02 10 28 48 02 52 0D 0A 05 45 4D 50 4E 4F 1A 04 ..(H.R...EMPNO.." + - "00000090 6C 46 69 6C 74 65 72 12 BE 02 10 01 1A B9 02 2A lFilter.¾....¹.*" + - "000000A0 B6 02 0A 04 08 01 10 01 12 08 0A 06 45 51 55 41 ¶...........EQUA" + - "000000B0 4C 53 1A 99 02 42 96 02 0A 81 02 3A FE 01 0A F4 LS...B.....:þ..ô" + - "000000C0 01 10 28 48 02 52 0D 0A 05 45 4D 50 4E 4F 1A 04 ..(H.R...EMPNO.." - "000000D0 08 01 10 04 52 3B 0A 05 45 4E 41 4D 45 10 01 1A ....R;..ENAME..." - "000000E0 30 08 01 10 1D 20 14 2A 1C 0A 18 49 53 4F 2D 38 0.... .*...ISO-8" - "000000F0 38 35 39 2D 31 24 65 6E 5F 55 53 24 70 72 69 6D 859-1$en_US$prim" @@ -8558,50 +8649,45 @@ - "00000180 52 0E 0A 04 43 4F 4D 4D 10 06 1A 04 08 01 10 04 R...COMM........" - "00000190 52 10 0A 06 44 45 50 54 4E 4F 10 07 1A 04 08 01 R...DEPTNO......" - "000001A0 10 04 52 11 0A 07 53 4C 41 43 4B 45 52 10 08 1A ..R...SLACKER..." - - "000001B0 04 08 01 10 01 52 11 0A 07 44 45 50 54 4E 4F 30 .....R...DEPTNO0" - - "000001C0 10 09 1A 04 08 01 10 04 52 3A 0A 04 4E 41 4D 45 ........R:..NAME" - - "000001D0 10 0A 1A 30 08 01 10 1D 20 0A 2A 1C 0A 18 49 53 ...0.... .*...IS" - - "000001E0 4F 2D 38 38 35 39 2D 31 24 65 6E 5F 55 53 24 70 O-8859-1$en_US$p" - - "000001F0 72 69 6D 61 72 79 28 02 32 0A 49 53 4F 2D 38 38 rimary(.2.ISO-88" - - "00000200 35 39 2D 31 22 05 24 63 6F 72 30 12 10 0A 06 44 59-1\".$cor0....D" - - "00000210 45 50 54 4E 4F 10 07 1A 04 08 01 10 04 1A 08 12 EPTNO..........." - - "00000220 06 0A 04 08 01 10 04 0A 7E 0A 28 74 79 70 65 2E ........~.(type." - - "00000230 67 6F 6F 67 6C 65 61 70 69 73 2E 63 6F 6D 2F 70 googleapis.com/p" - - "00000240 6C 61 6E 2E 50 4C 6F 67 69 63 61 6C 50 72 6F 6A lan.PLogicalProj" - - "00000250 65 63 74 12 52 08 02 12 06 44 45 50 54 4E 4F 12 ect.R....DEPTNO." - - "00000260 04 4E 41 4D 45 1A 08 12 06 0A 04 08 01 10 04 1A .NAME..........." - - "00000270 36 12 34 0A 30 08 01 10 1D 20 0A 2A 1C 0A 18 49 6.4.0.... .*...I" - - "00000280 53 4F 2D 38 38 35 39 2D 31 24 65 6E 5F 55 53 24 SO-8859-1$en_US$" - - "00000290 70 72 69 6D 61 72 79 28 02 32 0A 49 53 4F 2D 38 primary(.2.ISO-8" - - "000002A0 38 35 39 2D 31 10 01 0A 3C 0A 2A 74 79 70 65 2E 859-1...<.*type." - - "000002B0 67 6F 6F 67 6C 65 61 70 69 73 2E 63 6F 6D 2F 70 googleapis.com/p" - - "000002C0 6C 61 6E 2E 50 4C 6F 67 69 63 61 6C 43 6F 72 72 lan.PLogicalCorr" - - "000002D0 65 6C 61 74 65 12 0E 10 03 22 05 1A 03 0A 01 07 elate....\"......" - - "000002E0 2A 03 0A 01 07 0A 84 03 0A 28 74 79 70 65 2E 67 *........(type.g" - - "000002F0 6F 6F 67 6C 65 61 70 69 73 2E 63 6F 6D 2F 70 6C oogleapis.com/pl" - - "00000300 61 6E 2E 50 4C 6F 67 69 63 61 6C 50 72 6F 6A 65 an.PLogicalProje" - - "00000310 63 74 12 D7 02 08 04 12 05 45 4D 50 4E 4F 12 05 ct.×.....EMPNO.." - - "00000320 45 4E 41 4D 45 12 03 4A 4F 42 12 03 4D 47 52 12 ENAME..JOB..MGR." - - "00000330 08 48 49 52 45 44 41 54 45 12 03 53 41 4C 12 04 .HIREDATE..SAL.." - - "00000340 43 4F 4D 4D 12 06 44 45 50 54 4E 4F 12 07 53 4C COMM..DEPTNO..SL" - - "00000350 41 43 4B 45 52 12 07 44 45 50 54 4E 4F 30 12 04 ACKER..DEPTNO0.." - - "00000360 4E 41 4D 45 1A 08 12 06 0A 04 08 01 10 04 1A 36 NAME...........6" - - "00000370 12 34 0A 30 08 01 10 1D 20 14 2A 1C 0A 18 49 53 .4.0.... .*...IS" - - "00000380 4F 2D 38 38 35 39 2D 31 24 65 6E 5F 55 53 24 70 O-8859-1$en_US$p" - - "00000390 72 69 6D 61 72 79 28 02 32 0A 49 53 4F 2D 38 38 rimary(.2.ISO-88" - - "000003A0 35 39 2D 31 10 01 1A 36 12 34 0A 30 08 01 10 1D 59-1...6.4.0...." - - "000003B0 20 0A 2A 1C 0A 18 49 53 4F 2D 38 38 35 39 2D 31 .*...ISO-8859-1" - - "000003C0 24 65 6E 5F 55 53 24 70 72 69 6D 61 72 79 28 02 $en_US$primary(." - - "000003D0 32 0A 49 53 4F 2D 38 38 35 39 2D 31 10 02 1A 0A 2.ISO-8859-1...." - - "000003E0 12 08 0A 04 08 01 10 04 10 03 1A 0C 12 0A 0A 06 ................" - - "000003F0 08 01 10 0D 20 03 10 04 1A 0A 12 08 0A 04 08 01 .... ..........." - - "00000400 10 04 10 05 1A 0A 12 08 0A 04 08 01 10 04 10 06 ................" - - "00000410 1A 0A 12 08 0A 04 08 01 10 04 10 07 1A 0A 12 08 ................" - - "00000420 0A 04 08 01 10 01 10 08 1A 0A 12 08 0A 04 08 01 ................" - - "00000430 10 04 10 09 1A 36 12 34 0A 30 08 01 10 1D 20 0A .....6.4.0.... ." - - "00000440 2A 1C 0A 18 49 53 4F 2D 38 38 35 39 2D 31 24 65 *...ISO-8859-1$e" - - "00000450 6E 5F 55 53 24 70 72 69 6D 61 72 79 28 02 32 0A n_US$primary(.2." - - "00000460 49 53 4F 2D 38 38 35 39 2D 31 10 0A ISO-8859-1.." + - "000001B0 04 08 01 10 01 22 05 24 63 6F 72 30 12 10 0A 06 .....\".$cor0...." + - "000001C0 44 45 50 54 4E 4F 10 07 1A 04 08 01 10 04 1A 08 DEPTNO.........." + - "000001D0 12 06 0A 04 08 01 10 04 0A 7E 0A 28 74 79 70 65 .........~.(type" + - "000001E0 2E 67 6F 6F 67 6C 65 61 70 69 73 2E 63 6F 6D 2F .googleapis.com/" + - "000001F0 70 6C 61 6E 2E 50 4C 6F 67 69 63 61 6C 50 72 6F plan.PLogicalPro" + - "00000200 6A 65 63 74 12 52 08 02 12 06 44 45 50 54 4E 4F ject.R....DEPTNO" + - "00000210 12 04 4E 41 4D 45 1A 08 12 06 0A 04 08 01 10 04 ..NAME.........." + - "00000220 1A 36 12 34 0A 30 08 01 10 1D 20 0A 2A 1C 0A 18 .6.4.0.... .*..." + - "00000230 49 53 4F 2D 38 38 35 39 2D 31 24 65 6E 5F 55 53 ISO-8859-1$en_US" + - "00000240 24 70 72 69 6D 61 72 79 28 02 32 0A 49 53 4F 2D $primary(.2.ISO-" + - "00000250 38 38 35 39 2D 31 10 01 0A 3C 0A 2A 74 79 70 65 8859-1...<.*type" + - "00000260 2E 67 6F 6F 67 6C 65 61 70 69 73 2E 63 6F 6D 2F .googleapis.com/" + - "00000270 70 6C 61 6E 2E 50 4C 6F 67 69 63 61 6C 43 6F 72 plan.PLogicalCor" + - "00000280 72 65 6C 61 74 65 12 0E 10 03 22 05 1A 03 0A 01 relate....\"....." + - "00000290 07 2A 03 0A 01 07 0A 84 03 0A 28 74 79 70 65 2E .*........(type." + - "000002A0 67 6F 6F 67 6C 65 61 70 69 73 2E 63 6F 6D 2F 70 googleapis.com/p" + - "000002B0 6C 61 6E 2E 50 4C 6F 67 69 63 61 6C 50 72 6F 6A lan.PLogicalProj" + - "000002C0 65 63 74 12 D7 02 08 04 12 05 45 4D 50 4E 4F 12 ect.×.....EMPNO." + - "000002D0 05 45 4E 41 4D 45 12 03 4A 4F 42 12 03 4D 47 52 .ENAME..JOB..MGR" + - "000002E0 12 08 48 49 52 45 44 41 54 45 12 03 53 41 4C 12 ..HIREDATE..SAL." + - "000002F0 04 43 4F 4D 4D 12 06 44 45 50 54 4E 4F 12 07 53 .COMM..DEPTNO..S" + - "00000300 4C 41 43 4B 45 52 12 07 44 45 50 54 4E 4F 30 12 LACKER..DEPTNO0." + - "00000310 04 4E 41 4D 45 1A 08 12 06 0A 04 08 01 10 04 1A .NAME..........." + - "00000320 36 12 34 0A 30 08 01 10 1D 20 14 2A 1C 0A 18 49 6.4.0.... .*...I" + - "00000330 53 4F 2D 38 38 35 39 2D 31 24 65 6E 5F 55 53 24 SO-8859-1$en_US$" + - "00000340 70 72 69 6D 61 72 79 28 02 32 0A 49 53 4F 2D 38 primary(.2.ISO-8" + - "00000350 38 35 39 2D 31 10 01 1A 36 12 34 0A 30 08 01 10 859-1...6.4.0..." + - "00000360 1D 20 0A 2A 1C 0A 18 49 53 4F 2D 38 38 35 39 2D . .*...ISO-8859-" + - "00000370 31 24 65 6E 5F 55 53 24 70 72 69 6D 61 72 79 28 1$en_US$primary(" + - "00000380 02 32 0A 49 53 4F 2D 38 38 35 39 2D 31 10 02 1A .2.ISO-8859-1..." + - "00000390 0A 12 08 0A 04 08 01 10 04 10 03 1A 0C 12 0A 0A ................" + - "000003A0 06 08 01 10 0D 20 03 10 04 1A 0A 12 08 0A 04 08 ..... .........." + - "000003B0 01 10 04 10 05 1A 0A 12 08 0A 04 08 01 10 04 10 ................" + - "000003C0 06 1A 0A 12 08 0A 04 08 01 10 04 10 07 1A 0A 12 ................" + - "000003D0 08 0A 04 08 01 10 01 10 08 1A 0A 12 08 0A 04 08 ................" + - "000003E0 01 10 04 10 09 1A 36 12 34 0A 30 08 01 10 1D 20 ......6.4.0.... " + - "000003F0 0A 2A 1C 0A 18 49 53 4F 2D 38 38 35 39 2D 31 24 .*...ISO-8859-1$" + - "00000400 65 6E 5F 55 53 24 70 72 69 6D 61 72 79 28 02 32 en_US$primary(.2" + - "00000410 0A 49 53 4F 2D 38 38 35 39 2D 31 10 0A .ISO-8859-1.." queryPlanText: - "LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10])" - " LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{7}])" @@ -8613,7 +8699,7 @@ description: "Lateral Decorrelate Theta Rex" input: "select * from emp, LATERAL (select * from dept where emp.deptno < dept.deptno)" output: - queryPlanBinary: "CjMKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIFCgNFTVAKNAoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsVGFibGVTY2FuEgYKBERFUFQKvAMKJ3R5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEZpbHRlchKQAxABGosDKogDCgQIARABEgsKCUxFU1NfVEhBThroAkLlAgrQAjrNAgrDAhAoSAJSDQoFRU1QTk8aBAgBEARSOwoFRU5BTUUQARowCAEQHSAUKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUjkKA0pPQhACGjAIARAdIAoqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSDQoDTUdSEAMaBAgBEARSFAoISElSRURBVEUQBBoGCAEQDSADUg0KA1NBTBAFGgQIARAEUg4KBENPTU0QBhoECAEQBFIQCgZERVBUTk8QBxoECAEQBFIRCgdTTEFDS0VSEAgaBAgBEAFSEQoHREVQVE5PMBAJGgQIARAEUjoKBE5BTUUQChowCAEQHSAKKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xIgUkY29yMBIQCgZERVBUTk8QBxoECAEQBBoIEgYKBAgBEAQKfgoodHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsUHJvamVjdBJSCAISBkRFUFROTxIETkFNRRoIEgYKBAgBEAQaNhI0CjAIARAdIAoqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQAQo8Cip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxDb3JyZWxhdGUSDhADIgUaAwoBByoDCgEHCoQDCih0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxQcm9qZWN0EtcCCAQSBUVNUE5PEgVFTkFNRRIDSk9CEgNNR1ISCEhJUkVEQVRFEgNTQUwSBENPTU0SBkRFUFROTxIHU0xBQ0tFUhIHREVQVE5PMBIETkFNRRoIEgYKBAgBEAQaNhI0CjAIARAdIBQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQARo2EjQKMAgBEB0gCiocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRACGgoSCAoECAEQBBADGgwSCgoGCAEQDSADEAQaChIICgQIARAEEAUaChIICgQIARAEEAYaChIICgQIARAEEAcaChIICgQIARABEAgaChIICgQIARAEEAkaNhI0CjAIARAdIAoqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQCg==" + queryPlanBinary: "CjMKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIFCgNFTVAKNAoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsVGFibGVTY2FuEgYKBERFUFQK7QIKJ3R5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEZpbHRlchLBAhABGrwCKrkCCgQIARABEgsKCUxFU1NfVEhBThqZAkKWAgqBAjr+AQr0ARAoSAJSDQoFRU1QTk8aBAgBEARSOwoFRU5BTUUQARowCAEQHSAUKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUjkKA0pPQhACGjAIARAdIAoqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSDQoDTUdSEAMaBAgBEARSFAoISElSRURBVEUQBBoGCAEQDSADUg0KA1NBTBAFGgQIARAEUg4KBENPTU0QBhoECAEQBFIQCgZERVBUTk8QBxoECAEQBFIRCgdTTEFDS0VSEAgaBAgBEAEiBSRjb3IwEhAKBkRFUFROTxAHGgQIARAEGggSBgoECAEQBAp+Cih0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxQcm9qZWN0ElIIAhIGREVQVE5PEgROQU1FGggSBgoECAEQBBo2EjQKMAgBEB0gCiocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRABCjwKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbENvcnJlbGF0ZRIOEAMiBRoDCgEHKgMKAQcKhAMKKHR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFByb2plY3QS1wIIBBIFRU1QTk8SBUVOQU1FEgNKT0ISA01HUhIISElSRURBVEUSA1NBTBIEQ09NTRIGREVQVE5PEgdTTEFDS0VSEgdERVBUTk8wEgROQU1FGggSBgoECAEQBBo2EjQKMAgBEB0gFCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRABGjYSNAowCAEQHSAKKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAIaChIICgQIARAEEAMaDBIKCgYIARANIAMQBBoKEggKBAgBEAQQBRoKEggKBAgBEAQQBhoKEggKBAgBEAQQBxoKEggKBAgBEAEQCBoKEggKBAgBEAQQCRo2EjQKMAgBEB0gCiocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAK" queryPlanBinaryHexDump: - "00000000 0A 33 0A 2A 74 79 70 65 2E 67 6F 6F 67 6C 65 61 .3.*type.googlea" - "00000010 70 69 73 2E 63 6F 6D 2F 70 6C 61 6E 2E 50 4C 6F pis.com/plan.PLo" @@ -8621,13 +8707,13 @@ - "00000030 0A 03 45 4D 50 0A 34 0A 2A 74 79 70 65 2E 67 6F ..EMP.4.*type.go" - "00000040 6F 67 6C 65 61 70 69 73 2E 63 6F 6D 2F 70 6C 61 ogleapis.com/pla" - "00000050 6E 2E 50 4C 6F 67 69 63 61 6C 54 61 62 6C 65 53 n.PLogicalTableS" - - "00000060 63 61 6E 12 06 0A 04 44 45 50 54 0A BC 03 0A 27 can....DEPT.¼..'" + - "00000060 63 61 6E 12 06 0A 04 44 45 50 54 0A ED 02 0A 27 can....DEPT.í..'" - "00000070 74 79 70 65 2E 67 6F 6F 67 6C 65 61 70 69 73 2E type.googleapis." - "00000080 63 6F 6D 2F 70 6C 61 6E 2E 50 4C 6F 67 69 63 61 com/plan.PLogica" - - "00000090 6C 46 69 6C 74 65 72 12 90 03 10 01 1A 8B 03 2A lFilter........*" - - "000000A0 88 03 0A 04 08 01 10 01 12 0B 0A 09 4C 45 53 53 ............LESS" - - "000000B0 5F 54 48 41 4E 1A E8 02 42 E5 02 0A D0 02 3A CD _THAN.è.Bå..Ð.:Í" - - "000000C0 02 0A C3 02 10 28 48 02 52 0D 0A 05 45 4D 50 4E ..Ã..(H.R...EMPN" + - "00000090 6C 46 69 6C 74 65 72 12 C1 02 10 01 1A BC 02 2A lFilter.Á....¼.*" + - "000000A0 B9 02 0A 04 08 01 10 01 12 0B 0A 09 4C 45 53 53 ¹...........LESS" + - "000000B0 5F 54 48 41 4E 1A 99 02 42 96 02 0A 81 02 3A FE _THAN...B.....:þ" + - "000000C0 01 0A F4 01 10 28 48 02 52 0D 0A 05 45 4D 50 4E ..ô..(H.R...EMPN" - "000000D0 4F 1A 04 08 01 10 04 52 3B 0A 05 45 4E 41 4D 45 O......R;..ENAME" - "000000E0 10 01 1A 30 08 01 10 1D 20 14 2A 1C 0A 18 49 53 ...0.... .*...IS" - "000000F0 4F 2D 38 38 35 39 2D 31 24 65 6E 5F 55 53 24 70 O-8859-1$en_US$p" @@ -8642,50 +8728,45 @@ - "00000180 01 10 04 52 0E 0A 04 43 4F 4D 4D 10 06 1A 04 08 ...R...COMM....." - "00000190 01 10 04 52 10 0A 06 44 45 50 54 4E 4F 10 07 1A ...R...DEPTNO..." - "000001A0 04 08 01 10 04 52 11 0A 07 53 4C 41 43 4B 45 52 .....R...SLACKER" - - "000001B0 10 08 1A 04 08 01 10 01 52 11 0A 07 44 45 50 54 ........R...DEPT" - - "000001C0 4E 4F 30 10 09 1A 04 08 01 10 04 52 3A 0A 04 4E NO0........R:..N" - - "000001D0 41 4D 45 10 0A 1A 30 08 01 10 1D 20 0A 2A 1C 0A AME...0.... .*.." - - "000001E0 18 49 53 4F 2D 38 38 35 39 2D 31 24 65 6E 5F 55 .ISO-8859-1$en_U" - - "000001F0 53 24 70 72 69 6D 61 72 79 28 02 32 0A 49 53 4F S$primary(.2.ISO" - - "00000200 2D 38 38 35 39 2D 31 22 05 24 63 6F 72 30 12 10 -8859-1\".$cor0.." - - "00000210 0A 06 44 45 50 54 4E 4F 10 07 1A 04 08 01 10 04 ..DEPTNO........" - - "00000220 1A 08 12 06 0A 04 08 01 10 04 0A 7E 0A 28 74 79 ...........~.(ty" - - "00000230 70 65 2E 67 6F 6F 67 6C 65 61 70 69 73 2E 63 6F pe.googleapis.co" - - "00000240 6D 2F 70 6C 61 6E 2E 50 4C 6F 67 69 63 61 6C 50 m/plan.PLogicalP" - - "00000250 72 6F 6A 65 63 74 12 52 08 02 12 06 44 45 50 54 roject.R....DEPT" - - "00000260 4E 4F 12 04 4E 41 4D 45 1A 08 12 06 0A 04 08 01 NO..NAME........" - - "00000270 10 04 1A 36 12 34 0A 30 08 01 10 1D 20 0A 2A 1C ...6.4.0.... .*." - - "00000280 0A 18 49 53 4F 2D 38 38 35 39 2D 31 24 65 6E 5F ..ISO-8859-1$en_" - - "00000290 55 53 24 70 72 69 6D 61 72 79 28 02 32 0A 49 53 US$primary(.2.IS" - - "000002A0 4F 2D 38 38 35 39 2D 31 10 01 0A 3C 0A 2A 74 79 O-8859-1...<.*ty" - - "000002B0 70 65 2E 67 6F 6F 67 6C 65 61 70 69 73 2E 63 6F pe.googleapis.co" - - "000002C0 6D 2F 70 6C 61 6E 2E 50 4C 6F 67 69 63 61 6C 43 m/plan.PLogicalC" - - "000002D0 6F 72 72 65 6C 61 74 65 12 0E 10 03 22 05 1A 03 orrelate....\"..." - - "000002E0 0A 01 07 2A 03 0A 01 07 0A 84 03 0A 28 74 79 70 ...*........(typ" - - "000002F0 65 2E 67 6F 6F 67 6C 65 61 70 69 73 2E 63 6F 6D e.googleapis.com" - - "00000300 2F 70 6C 61 6E 2E 50 4C 6F 67 69 63 61 6C 50 72 /plan.PLogicalPr" - - "00000310 6F 6A 65 63 74 12 D7 02 08 04 12 05 45 4D 50 4E oject.×.....EMPN" - - "00000320 4F 12 05 45 4E 41 4D 45 12 03 4A 4F 42 12 03 4D O..ENAME..JOB..M" - - "00000330 47 52 12 08 48 49 52 45 44 41 54 45 12 03 53 41 GR..HIREDATE..SA" - - "00000340 4C 12 04 43 4F 4D 4D 12 06 44 45 50 54 4E 4F 12 L..COMM..DEPTNO." - - "00000350 07 53 4C 41 43 4B 45 52 12 07 44 45 50 54 4E 4F .SLACKER..DEPTNO" - - "00000360 30 12 04 4E 41 4D 45 1A 08 12 06 0A 04 08 01 10 0..NAME........." - - "00000370 04 1A 36 12 34 0A 30 08 01 10 1D 20 14 2A 1C 0A ..6.4.0.... .*.." - - "00000380 18 49 53 4F 2D 38 38 35 39 2D 31 24 65 6E 5F 55 .ISO-8859-1$en_U" - - "00000390 53 24 70 72 69 6D 61 72 79 28 02 32 0A 49 53 4F S$primary(.2.ISO" - - "000003A0 2D 38 38 35 39 2D 31 10 01 1A 36 12 34 0A 30 08 -8859-1...6.4.0." - - "000003B0 01 10 1D 20 0A 2A 1C 0A 18 49 53 4F 2D 38 38 35 ... .*...ISO-885" - - "000003C0 39 2D 31 24 65 6E 5F 55 53 24 70 72 69 6D 61 72 9-1$en_US$primar" - - "000003D0 79 28 02 32 0A 49 53 4F 2D 38 38 35 39 2D 31 10 y(.2.ISO-8859-1." - - "000003E0 02 1A 0A 12 08 0A 04 08 01 10 04 10 03 1A 0C 12 ................" - - "000003F0 0A 0A 06 08 01 10 0D 20 03 10 04 1A 0A 12 08 0A ....... ........" - - "00000400 04 08 01 10 04 10 05 1A 0A 12 08 0A 04 08 01 10 ................" - - "00000410 04 10 06 1A 0A 12 08 0A 04 08 01 10 04 10 07 1A ................" - - "00000420 0A 12 08 0A 04 08 01 10 01 10 08 1A 0A 12 08 0A ................" - - "00000430 04 08 01 10 04 10 09 1A 36 12 34 0A 30 08 01 10 ........6.4.0..." - - "00000440 1D 20 0A 2A 1C 0A 18 49 53 4F 2D 38 38 35 39 2D . .*...ISO-8859-" - - "00000450 31 24 65 6E 5F 55 53 24 70 72 69 6D 61 72 79 28 1$en_US$primary(" - - "00000460 02 32 0A 49 53 4F 2D 38 38 35 39 2D 31 10 0A .2.ISO-8859-1.." + - "000001B0 10 08 1A 04 08 01 10 01 22 05 24 63 6F 72 30 12 ........\".$cor0." + - "000001C0 10 0A 06 44 45 50 54 4E 4F 10 07 1A 04 08 01 10 ...DEPTNO......." + - "000001D0 04 1A 08 12 06 0A 04 08 01 10 04 0A 7E 0A 28 74 ............~.(t" + - "000001E0 79 70 65 2E 67 6F 6F 67 6C 65 61 70 69 73 2E 63 ype.googleapis.c" + - "000001F0 6F 6D 2F 70 6C 61 6E 2E 50 4C 6F 67 69 63 61 6C om/plan.PLogical" + - "00000200 50 72 6F 6A 65 63 74 12 52 08 02 12 06 44 45 50 Project.R....DEP" + - "00000210 54 4E 4F 12 04 4E 41 4D 45 1A 08 12 06 0A 04 08 TNO..NAME......." + - "00000220 01 10 04 1A 36 12 34 0A 30 08 01 10 1D 20 0A 2A ....6.4.0.... .*" + - "00000230 1C 0A 18 49 53 4F 2D 38 38 35 39 2D 31 24 65 6E ...ISO-8859-1$en" + - "00000240 5F 55 53 24 70 72 69 6D 61 72 79 28 02 32 0A 49 _US$primary(.2.I" + - "00000250 53 4F 2D 38 38 35 39 2D 31 10 01 0A 3C 0A 2A 74 SO-8859-1...<.*t" + - "00000260 79 70 65 2E 67 6F 6F 67 6C 65 61 70 69 73 2E 63 ype.googleapis.c" + - "00000270 6F 6D 2F 70 6C 61 6E 2E 50 4C 6F 67 69 63 61 6C om/plan.PLogical" + - "00000280 43 6F 72 72 65 6C 61 74 65 12 0E 10 03 22 05 1A Correlate....\".." + - "00000290 03 0A 01 07 2A 03 0A 01 07 0A 84 03 0A 28 74 79 ....*........(ty" + - "000002A0 70 65 2E 67 6F 6F 67 6C 65 61 70 69 73 2E 63 6F pe.googleapis.co" + - "000002B0 6D 2F 70 6C 61 6E 2E 50 4C 6F 67 69 63 61 6C 50 m/plan.PLogicalP" + - "000002C0 72 6F 6A 65 63 74 12 D7 02 08 04 12 05 45 4D 50 roject.×.....EMP" + - "000002D0 4E 4F 12 05 45 4E 41 4D 45 12 03 4A 4F 42 12 03 NO..ENAME..JOB.." + - "000002E0 4D 47 52 12 08 48 49 52 45 44 41 54 45 12 03 53 MGR..HIREDATE..S" + - "000002F0 41 4C 12 04 43 4F 4D 4D 12 06 44 45 50 54 4E 4F AL..COMM..DEPTNO" + - "00000300 12 07 53 4C 41 43 4B 45 52 12 07 44 45 50 54 4E ..SLACKER..DEPTN" + - "00000310 4F 30 12 04 4E 41 4D 45 1A 08 12 06 0A 04 08 01 O0..NAME........" + - "00000320 10 04 1A 36 12 34 0A 30 08 01 10 1D 20 14 2A 1C ...6.4.0.... .*." + - "00000330 0A 18 49 53 4F 2D 38 38 35 39 2D 31 24 65 6E 5F ..ISO-8859-1$en_" + - "00000340 55 53 24 70 72 69 6D 61 72 79 28 02 32 0A 49 53 US$primary(.2.IS" + - "00000350 4F 2D 38 38 35 39 2D 31 10 01 1A 36 12 34 0A 30 O-8859-1...6.4.0" + - "00000360 08 01 10 1D 20 0A 2A 1C 0A 18 49 53 4F 2D 38 38 .... .*...ISO-88" + - "00000370 35 39 2D 31 24 65 6E 5F 55 53 24 70 72 69 6D 61 59-1$en_US$prima" + - "00000380 72 79 28 02 32 0A 49 53 4F 2D 38 38 35 39 2D 31 ry(.2.ISO-8859-1" + - "00000390 10 02 1A 0A 12 08 0A 04 08 01 10 04 10 03 1A 0C ................" + - "000003A0 12 0A 0A 06 08 01 10 0D 20 03 10 04 1A 0A 12 08 ........ ......." + - "000003B0 0A 04 08 01 10 04 10 05 1A 0A 12 08 0A 04 08 01 ................" + - "000003C0 10 04 10 06 1A 0A 12 08 0A 04 08 01 10 04 10 07 ................" + - "000003D0 1A 0A 12 08 0A 04 08 01 10 01 10 08 1A 0A 12 08 ................" + - "000003E0 0A 04 08 01 10 04 10 09 1A 36 12 34 0A 30 08 01 .........6.4.0.." + - "000003F0 10 1D 20 0A 2A 1C 0A 18 49 53 4F 2D 38 38 35 39 .. .*...ISO-8859" + - "00000400 2D 31 24 65 6E 5F 55 53 24 70 72 69 6D 61 72 79 -1$en_US$primary" + - "00000410 28 02 32 0A 49 53 4F 2D 38 38 35 39 2D 31 10 0A (.2.ISO-8859-1.." queryPlanText: - "LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10])" - " LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{7}])" diff --git a/sabot/serializer/src/test/resources/goldenfiles/expected/TestSerializerRoundtrip.testTpchQueries.yaml b/sabot/serializer/src/test/resources/goldenfiles/expected/TestSerializerRoundtrip.testTpchQueries.yaml index 91c9ebddb0..b6b2db031b 100644 --- a/sabot/serializer/src/test/resources/goldenfiles/expected/TestSerializerRoundtrip.testTpchQueries.yaml +++ b/sabot/serializer/src/test/resources/goldenfiles/expected/TestSerializerRoundtrip.testTpchQueries.yaml @@ -43,12 +43,6 @@ - " l_returnflag," - " l_linestatus " output: - queryPlanText: - - "LogicalSort(sort0=[$0], sort1=[$1], dir0=[ASC], dir1=[ASC])" - - " LogicalAggregate(group=[{0, 1}], sum_qty=[SUM($2)], sum_base_price=[SUM($3)], sum_disc_price=[SUM($4)], sum_charge=[SUM($5)], avg_qty=[AVG($2)], avg_price=[AVG($3)], avg_disc=[AVG($6)], count_order=[COUNT()])" - - " LogicalProject(l_returnflag=[$8], l_linestatus=[$9], L_QUANTITY=[$4], L_EXTENDEDPRICE=[$5], $f4=[*($5, -(1, $6))], $f5=[*(*($5, -(1, $6)), +(1, $7))], L_DISCOUNT=[$6])" - - " LogicalFilter(condition=[<=($10, -(1998-12-01, 10368000000:INTERVAL DAY(3)))])" - - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" queryPlanBinary: "CkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9saW5laXRlbS5wYXJxdWV0CpEBCid0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxGaWx0ZXISZhpkKmIKBAgBEAESFAoSTEVTU19USEFOX09SX0VRVUFMGgoSCAoECAEQChAKGjgqNgoCEAoSDAoKTUlOVVNfREFURRoLIgkKAhAKMMFSUAoaFSITCgYQEhgGIAMSBxIFAmn7IABQEgr7AwoodHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsUHJvamVjdBLOAwgBEgxsX3JldHVybmZsYWcSDGxfbGluZXN0YXR1cxIKTF9RVUFOVElUWRIPTF9FWFRFTkRFRFBSSUNFEgMkZjQSAyRmNRIKTF9ESVNDT1VOVBo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAgaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAJGgoSCAoECAEQBBAEGgoSCAoECAEQCRAFGk4qTAoECAEQCRIKCghNVUxUSVBMWRoKEggKBAgBEAkQBRosKioKBAgBEAkSBwoFTUlOVVMaDSILCgIQBBIDEgEBUAYaChIICgQIARAJEAYakgEqjwEKBAgBEAkSCgoITVVMVElQTFkaTipMCgQIARAJEgoKCE1VTFRJUExZGgoSCAoECAEQCRAFGiwqKgoECAEQCRIHCgVNSU5VUxoNIgsKAhAEEgMSAQFQBhoKEggKBAgBEAkQBhorKikKBAgBEAkSBgoEUExVUxoNIgsKAhAEEgMSAQFQBhoKEggKBAgBEAkQBxoKEggKBAgBEAkQBgqEAwoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsQWdncmVnYXRlEtUCCAISJBoFCgNTVU0qCQoHc3VtX3F0eTIBAjj///////////8BSgIKABIrGgUKA1NVTSoQCg5zdW1fYmFzZV9wcmljZTIBAzj///////////8BSgIKABIrGgUKA1NVTSoQCg5zdW1fZGlzY19wcmljZTIBBDj///////////8BSgIKABInGgUKA1NVTSoMCgpzdW1fY2hhcmdlMgEFOP///////////wFKAgoAEiQaBQoDQVZHKgkKB2F2Z19xdHkyAQI4////////////AUoCCgASJhoFCgNBVkcqCwoJYXZnX3ByaWNlMgEDOP///////////wFKAgoAEiUaBQoDQVZHKgoKCGF2Z19kaXNjMgEGOP///////////wFKAgoAEicaBwoFQ09VTlQqDQoLY291bnRfb3JkZXI4////////////AUoCCgAiBAoCAAEqBAoCAAEKOQoldHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsU29ydBIQCAMiBBABGAMiBggBEAEYAw==" queryPlanBinaryHexDump: - "00000000 0A 49 0A 2A 74 79 70 65 2E 67 6F 6F 67 6C 65 61 .I.*type.googlea" @@ -125,6 +119,12 @@ - "00000470 67 6C 65 61 70 69 73 2E 63 6F 6D 2F 70 6C 61 6E gleapis.com/plan" - "00000480 2E 50 4C 6F 67 69 63 61 6C 53 6F 72 74 12 10 08 .PLogicalSort..." - "00000490 03 22 04 10 01 18 03 22 06 08 01 10 01 18 03 .\".....\"......." + queryPlanText: + - "LogicalSort(sort0=[$0], sort1=[$1], dir0=[ASC], dir1=[ASC])" + - " LogicalAggregate(group=[{0, 1}], sum_qty=[SUM($2)], sum_base_price=[SUM($3)], sum_disc_price=[SUM($4)], sum_charge=[SUM($5)], avg_qty=[AVG($2)], avg_price=[AVG($3)], avg_disc=[AVG($6)], count_order=[COUNT()])" + - " LogicalProject(l_returnflag=[$8], l_linestatus=[$9], L_QUANTITY=[$4], L_EXTENDEDPRICE=[$5], $f4=[*($5, -(1, $6))], $f5=[*(*($5, -(1, $6)), +(1, $7))], L_DISCOUNT=[$6])" + - " LogicalFilter(condition=[<=($10, -(1998-12-01, 10368000000:INTERVAL DAY(3)))])" + - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" - description: "02.sql" input: @@ -177,30 +177,6 @@ - " p.p_partkey" - "limit 100 " output: - queryPlanText: - - "LogicalSort(sort0=[$0], sort1=[$2], sort2=[$1], sort3=[$3], dir0=[DESC], dir1=[ASC], dir2=[ASC], dir3=[ASC], fetch=[100])" - - " LogicalProject(s_acctbal=[$14], s_name=[$10], n_name=[$22], p_partkey=[$0], p_mfgr=[$2], s_address=[$11], s_phone=[$13], s_comment=[$15])" - - " LogicalFilter(condition=[AND(=($0, $16), =($9, $17), =($5, 41), LIKE($4, '%NICKEL':VARCHAR(7)), =($12, $21), =($23, $25), =($26, 'EUROPE'), =($19, $28))])" - - " LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{0}])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalTableScan(table=[[cp, tpch/part.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/supplier.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/partsupp.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/nation.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/region.parquet]])" - - " LogicalAggregate(group=[{}], EXPR$0=[MIN($0)])" - - " LogicalProject(PS_SUPPLYCOST=[$3])" - - " LogicalFilter(condition=[AND(=($cor0.P_PARTKEY, $0), =($5, $1), =($8, $12), =($14, $16), =($17, 'EUROPE'))])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalTableScan(table=[[cp, tpch/partsupp.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/supplier.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/nation.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/region.parquet]])" queryPlanBinary: "CkUKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIXCgJjcAoRdHBjaC9wYXJ0LnBhcnF1ZXQKSQoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsVGFibGVTY2FuEhsKAmNwChV0cGNoL3N1cHBsaWVyLnBhcnF1ZXQKOQoldHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsSm9pbhIQEAEYASIKIggKAhABIAFQAQpJCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxUYWJsZVNjYW4SGwoCY3AKFXRwY2gvcGFydHN1cHAucGFycXVldAo7CiV0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxKb2luEhIIAhADGAEiCiIICgIQASABUAEKRwoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsVGFibGVTY2FuEhkKAmNwChN0cGNoL25hdGlvbi5wYXJxdWV0CjsKJXR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEpvaW4SEggEEAUYASIKIggKAhABIAFQAQpHCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxUYWJsZVNjYW4SGQoCY3AKE3RwY2gvcmVnaW9uLnBhcnF1ZXQKOwoldHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsSm9pbhISCAYQBxgBIgoiCAoCEAEgAVABCkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9wYXJ0c3VwcC5wYXJxdWV0CkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9zdXBwbGllci5wYXJxdWV0CjsKJXR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEpvaW4SEggJEAoYASIKIggKAhABIAFQAQpHCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxUYWJsZVNjYW4SGQoCY3AKE3RwY2gvbmF0aW9uLnBhcnF1ZXQKOwoldHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsSm9pbhISCAsQDBgBIgoiCAoCEAEgAVABCkcKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIZCgJjcAoTdHBjaC9yZWdpb24ucGFycXVldAo7CiV0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxKb2luEhIIDRAOGAEiCiIICgIQASABUAEKnA0KJ3R5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEZpbHRlchLwDBAPGusMKugMCgQIARABEgUKA0FORBrECirBCgoECAEQARIICgZFUVVBTFMapApCoQoKiwo6iAoK/gkQKEgCUhEKCVBfUEFSVEtFWRoECAEQBFI+CgZQX05BTUUQARoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSPgoGUF9NRkdSEAIaMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUj8KB1BfQlJBTkQQAxoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSPgoGUF9UWVBFEAQaMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUhAKBlBfU0laRRAFGgQIARAEUkMKC1BfQ09OVEFJTkVSEAYaMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUhcKDVBfUkVUQUlMUFJJQ0UQBxoECAEQCVJBCglQX0NPTU1FTlQQCBoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSEwoJU19TVVBQS0VZEAkaBAgBEARSPgoGU19OQU1FEAoaMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUkEKCVNfQUREUkVTUxALGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVIVCgtTX05BVElPTktFWRAMGgQIARAEUj8KB1NfUEhPTkUQDRoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSEwoJU19BQ0NUQkFMEA4aBAgBEAlSQQoJU19DT01NRU5UEA8aMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUhQKClBTX1BBUlRLRVkQEBoECAEQBFIUCgpQU19TVVBQS0VZEBEaBAgBEARSFQoLUFNfQVZBSUxRVFkQEhoECAEQBFIXCg1QU19TVVBQTFlDT1NUEBMaBAgBEAlSQgoKUFNfQ09NTUVOVBAUGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVIVCgtOX05BVElPTktFWRAVGgQIARAEUj4KBk5fTkFNRRAWGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVIVCgtOX1JFR0lPTktFWRAXGgQIARAEUkEKCU5fQ09NTUVOVBAYGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVIVCgtSX1JFR0lPTktFWRAZGgQIARAEUj4KBlJfTkFNRRAaGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVJBCglSX0NPTU1FTlQQGxoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEiBSRjb3IwEhEKCVBfUEFSVEtFWRoECAEQBBoIEgYKBAgBEAQaKiooCgQIARABEggKBkVRVUFMUxoKEggKBAgBEAQQBRoKEggKBAgBEAQQARoqKigKBAgBEAESCAoGRVFVQUxTGgoSCAoECAEQBBAIGgoSCAoECAEQBBAMGioqKAoECAEQARIICgZFUVVBTFMaChIICgQIARAEEA4aChIICgQIARAEEBAajQEqigEKBAgBEAESCAoGRVFVQUxTGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQERo+IjwKMBAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoGRVVST1BFUBwKSQoodHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsUHJvamVjdBIdCBASDVBTX1NVUFBMWUNPU1QaChIICgQIARAJEAMKWQoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsQWdncmVnYXRlEisIERIjGgUKA01JTioICgZFWFBSJDAyAQA4////////////AUoCCgAiACoACkAKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbENvcnJlbGF0ZRISCAgQEhgBIgUaAwoBACoDCgEACucECid0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxGaWx0ZXISuwQQExq2BCqzBAoECAEQARIFCgNBTkQaKComCgQIARABEggKBkVRVUFMUxoIEgYKBAgBEAQaChIICgQIARAEEBAaKiooCgQIARABEggKBkVRVUFMUxoKEggKBAgBEAQQCRoKEggKBAgBEAQQERotKisKBAgBEAESCAoGRVFVQUxTGgoSCAoECAEQBBAFGg0iCwoCEAQSAxIBKVAGGooBKocBCgQIARABEgYKBExJS0UaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAEGj0iOwouEB0gByocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoHJU5JQ0tFTFAcGioqKAoECAEQARIICgZFUVVBTFMaChIICgQIARAEEAwaChIICgQIARAEEBUaKiooCgQIARABEggKBkVRVUFMUxoKEggKBAgBEAQQFxoKEggKBAgBEAQQGRqNASqKAQoECAEQARIICgZFUVVBTFMaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAaGj4iPAowEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGgZFVVJPUEVQHBoqKigKBAgBEAESCAoGRVFVQUxTGgoSCAoECAEQCRATGgoSCAoECAEQCRAcCu4DCih0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxQcm9qZWN0EsEDCBQSCXNfYWNjdGJhbBIGc19uYW1lEgZuX25hbWUSCXBfcGFydGtleRIGcF9tZmdyEglzX2FkZHJlc3MSB3NfcGhvbmUSCXNfY29tbWVudBoKEggKBAgBEAkQDho4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAoaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAWGggSBgoECAEQBBo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAIaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRALGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQDRo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEA8KWAoldHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsU29ydBIvCBUaDSILCgIQBBIDEgFkUAYiBBADGAIiBggCEAEYAyIGCAEQARgDIgYIAxABGAM=" queryPlanBinaryHexDump: - "00000000 0A 45 0A 2A 74 79 70 65 2E 67 6F 6F 67 6C 65 61 .E.*type.googlea" @@ -467,6 +443,30 @@ - "00001050 0D 22 0B 0A 02 10 04 12 03 12 01 64 50 06 22 04 .\".........dP.\"." - "00001060 10 03 18 02 22 06 08 02 10 01 18 03 22 06 08 01 ....\".......\"..." - "00001070 10 01 18 03 22 06 08 03 10 01 18 03 ....\"......." + queryPlanText: + - "LogicalSort(sort0=[$0], sort1=[$2], sort2=[$1], sort3=[$3], dir0=[DESC], dir1=[ASC], dir2=[ASC], dir3=[ASC], fetch=[100])" + - " LogicalProject(s_acctbal=[$14], s_name=[$10], n_name=[$22], p_partkey=[$0], p_mfgr=[$2], s_address=[$11], s_phone=[$13], s_comment=[$15])" + - " LogicalFilter(condition=[AND(=($0, $16), =($9, $17), =($5, 41), LIKE($4, '%NICKEL':VARCHAR(7)), =($12, $21), =($23, $25), =($26, 'EUROPE'), =($19, $28))])" + - " LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{0}])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalTableScan(table=[[cp, tpch/part.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/supplier.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/partsupp.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/nation.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/region.parquet]])" + - " LogicalAggregate(group=[{}], EXPR$0=[MIN($0)])" + - " LogicalProject(PS_SUPPLYCOST=[$3])" + - " LogicalFilter(condition=[AND(=($cor0.P_PARTKEY, $0), =($5, $1), =($8, $12), =($14, $16), =($17, 'EUROPE'))])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalTableScan(table=[[cp, tpch/partsupp.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/supplier.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/nation.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/region.parquet]])" - description: "03.sql" input: @@ -498,17 +498,6 @@ - " o.o_orderdate" - "limit 10 " output: - queryPlanText: - - "LogicalSort(sort0=[$1], sort1=[$2], dir0=[DESC], dir1=[ASC], fetch=[10])" - - " LogicalProject(l_orderkey=[$0], revenue=[$3], o_orderdate=[$1], o_shippriority=[$2])" - - " LogicalAggregate(group=[{0, 1, 2}], revenue=[SUM($3)])" - - " LogicalProject(l_orderkey=[$17], o_orderdate=[$12], o_shippriority=[$15], $f3=[*($22, -(1, $23))])" - - " LogicalFilter(condition=[AND(=($6, 'HOUSEHOLD'), =($0, $9), =($17, $8), <($12, 1995-03-25), >($27, 1995-03-25))])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalTableScan(table=[[cp, tpch/customer.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/orders.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" queryPlanBinary: "CkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9jdXN0b21lci5wYXJxdWV0CkcKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIZCgJjcAoTdHBjaC9vcmRlcnMucGFycXVldAo5CiV0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxKb2luEhAQARgBIgoiCAoCEAEgAVABCkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9saW5laXRlbS5wYXJxdWV0CjsKJXR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEpvaW4SEggCEAMYASIKIggKAhABIAFQAQqNAwondHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsRmlsdGVyEuECEAQa3AIq2QIKBAgBEAESBQoDQU5EGpABKo0BCgQIARABEggKBkVRVUFMUxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAYaQSI/CjAQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaCUhPVVNFSE9MRFAcGigqJgoECAEQARIICgZFUVVBTFMaCBIGCgQIARAEGgoSCAoECAEQBBAJGioqKAoECAEQARIICgZFUVVBTFMaChIICgQIARAEEBEaChIICgQIARAEEAgaLiosCgQIARABEgsKCUxFU1NfVEhBThoKEggKBAgBEAoQDBoLIgkKAhAKMP5HUAoaMSovCgQIARABEg4KDEdSRUFURVJfVEhBThoKEggKBAgBEAoQGxoLIgkKAhAKMP5HUAoK/wEKKHR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFByb2plY3QS0gEIBRIKbF9vcmRlcmtleRILb19vcmRlcmRhdGUSDm9fc2hpcHByaW9yaXR5EgMkZjMaChIICgQIARAEEBEaChIICgQIARAKEAwaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAPGk4qTAoECAEQCRIKCghNVUxUSVBMWRoKEggKBAgBEAkQFhosKioKBAgBEAkSBwoFTUlOVVMaDSILCgIQBBIDEgEBUAYaChIICgQIARAJEBcKZAoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsQWdncmVnYXRlEjYIBhIkGgUKA1NVTSoJCgdyZXZlbnVlMgEDOP///////////wFKAgoAIgUKAwABAioFCgMAAQIKvQEKKHR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFByb2plY3QSkAEIBxIKbF9vcmRlcmtleRIHcmV2ZW51ZRILb19vcmRlcmRhdGUSDm9fc2hpcHByaW9yaXR5GggSBgoECAEQBBoKEggKBAgBEAkQAxoKEggKBAgBEAoQARo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAIKSgoldHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsU29ydBIhCAgaDSILCgIQBBIDEgEKUAYiBggBEAMYAiIGCAIQARgD" queryPlanBinaryHexDump: - "00000000 0A 49 0A 2A 74 79 70 65 2E 67 6F 6F 67 6C 65 61 .I.*type.googlea" @@ -597,6 +586,17 @@ - "00000530 69 63 61 6C 53 6F 72 74 12 21 08 08 1A 0D 22 0B icalSort.!....\"." - "00000540 0A 02 10 04 12 03 12 01 0A 50 06 22 06 08 01 10 .........P.\"...." - "00000550 03 18 02 22 06 08 02 10 01 18 03 ...\"......." + queryPlanText: + - "LogicalSort(sort0=[$1], sort1=[$2], dir0=[DESC], dir1=[ASC], fetch=[10])" + - " LogicalProject(l_orderkey=[$0], revenue=[$3], o_orderdate=[$1], o_shippriority=[$2])" + - " LogicalAggregate(group=[{0, 1, 2}], revenue=[SUM($3)])" + - " LogicalProject(l_orderkey=[$17], o_orderdate=[$12], o_shippriority=[$15], $f3=[*($22, -(1, $23))])" + - " LogicalFilter(condition=[AND(=($6, 'HOUSEHOLD'), =($0, $9), =($17, $8), <($12, 1995-03-25), >($27, 1995-03-25))])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalTableScan(table=[[cp, tpch/customer.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/orders.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" - description: "04.sql" input: @@ -625,18 +625,6 @@ - "order by" - " o.o_orderpriority " output: - queryPlanText: - - "LogicalSort(sort0=[$0], dir0=[ASC])" - - " LogicalAggregate(group=[{0}], order_count=[COUNT()])" - - " LogicalProject(o_orderpriority=[$5])" - - " LogicalFilter(condition=[AND(>=($4, 1996-10-01), <($4, +(1996-10-01, 3:INTERVAL MONTH)), IS NOT NULL($9))])" - - " LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{0}])" - - " LogicalTableScan(table=[[cp, tpch/orders.parquet]])" - - " LogicalAggregate(group=[{}], agg#0=[MIN($0)])" - - " LogicalProject($f0=[1:BIGINT])" - - " LogicalProject(L_ORDERKEY=[$0], L_PARTKEY=[$1], L_SUPPKEY=[$2], L_LINENUMBER=[$3], L_QUANTITY=[$4], L_EXTENDEDPRICE=[$5], L_DISCOUNT=[$6], L_TAX=[$7], L_RETURNFLAG=[$8], L_LINESTATUS=[$9], L_SHIPDATE=[$10], L_COMMITDATE=[$11], L_RECEIPTDATE=[$12], L_SHIPINSTRUCT=[$13], L_SHIPMODE=[$14], L_COMMENT=[$15])" - - " LogicalFilter(condition=[AND(=($0, $cor0.O_ORDERKEY), <($11, $12))])" - - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" queryPlanBinary: "CkcKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIZCgJjcAoTdHBjaC9vcmRlcnMucGFycXVldApJCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxUYWJsZVNjYW4SGwoCY3AKFXRwY2gvbGluZWl0ZW0ucGFycXVldAryBAondHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsRmlsdGVyEsYEEAEawQQqvgQKBAgBEAESBQoDQU5EGv8DKvwDCgQIARABEggKBkVRVUFMUxoIEgYKBAgBEAQa3wNC3AMKxQM6wgMKuAMQKEgCUhIKCk9fT1JERVJLRVkaBAgBEARSEwoJT19DVVNUS0VZEAEaBAgBEARSRQoNT19PUkRFUlNUQVRVUxACGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVIWCgxPX1RPVEFMUFJJQ0UQAxoECAEQCVIVCgtPX09SREVSREFURRAEGgQIARAKUkcKD09fT1JERVJQUklPUklUWRAFGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVI/CgdPX0NMRVJLEAYaMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUkYKDk9fU0hJUFBSSU9SSVRZEAcaMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUkEKCU9fQ09NTUVOVBAIGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMSIFJGNvcjASEgoKT19PUkRFUktFWRoECAEQBBotKisKBAgBEAESCwoJTEVTU19USEFOGgoSCAoECAEQChALGgoSCAoECAEQChAMCp8FCih0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxQcm9qZWN0EvIECAISCkxfT1JERVJLRVkSCUxfUEFSVEtFWRIJTF9TVVBQS0VZEgxMX0xJTkVOVU1CRVISCkxfUVVBTlRJVFkSD0xfRVhURU5ERURQUklDRRIKTF9ESVNDT1VOVBIFTF9UQVgSDExfUkVUVVJORkxBRxIMTF9MSU5FU1RBVFVTEgpMX1NISVBEQVRFEgxMX0NPTU1JVERBVEUSDUxfUkVDRUlQVERBVEUSDkxfU0hJUElOU1RSVUNUEgpMX1NISVBNT0RFEglMX0NPTU1FTlQaCBIGCgQIARAEGgoSCAoECAEQBBABGgoSCAoECAEQBBACGgoSCAoECAEQBBADGgoSCAoECAEQBBAEGgoSCAoECAEQCRAFGgoSCAoECAEQCRAGGgoSCAoECAEQCRAHGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQCBo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAkaChIICgQIARAKEAoaChIICgQIARAKEAsaChIICgQIARAKEAwaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRANGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQDho4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEA8KQgoodHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsUHJvamVjdBIWCAMSAyRmMBoNIgsKAhAFEgMSAQFQBgpPCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxBZ2dyZWdhdGUSIQgEEhkaBQoDTUlOMgEAOP///////////wFKAgoAIgAqAAo+Cip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxDb3JyZWxhdGUSEBAFGAEiBRoDCgEAKgMKAQAK+AEKJ3R5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEZpbHRlchLMARAGGscBKsQBCgQIARABEgUKA0FORBo6KjgKBAgBEAESFwoVR1JFQVRFUl9USEFOX09SX0VRVUFMGgoSCAoECAEQChAEGgsiCQoCEAowqkxQChpWKlQKBAgBEAESCwoJTEVTU19USEFOGgoSCAoECAEQChAEGjMqMQoCEAoSDwoNREFURVRJTUVfUExVUxoLIgkKAhAKMKpMUAoaDSILCgIQERIDEgEDUBEaISofCgIQARINCgtJU19OT1RfTlVMTBoKEggKBAgBEAUQCQp5Cih0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxQcm9qZWN0Ek0IBxIPb19vcmRlcnByaW9yaXR5GjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQBQpjCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxBZ2dyZWdhdGUSNQgIEicaBwoFQ09VTlQqDQoLb3JkZXJfY291bnQ4////////////AUoCCgAiAwoBACoDCgEACjEKJXR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFNvcnQSCAgJIgQQARgD" queryPlanBinaryHexDump: - "00000000 0A 47 0A 2A 74 79 70 65 2E 67 6F 6F 67 6C 65 61 .G.*type.googlea" @@ -776,6 +764,18 @@ - "00000860 79 70 65 2E 67 6F 6F 67 6C 65 61 70 69 73 2E 63 ype.googleapis.c" - "00000870 6F 6D 2F 70 6C 61 6E 2E 50 4C 6F 67 69 63 61 6C om/plan.PLogical" - "00000880 53 6F 72 74 12 08 08 09 22 04 10 01 18 03 Sort....\"....." + queryPlanText: + - "LogicalSort(sort0=[$0], dir0=[ASC])" + - " LogicalAggregate(group=[{0}], order_count=[COUNT()])" + - " LogicalProject(o_orderpriority=[$5])" + - " LogicalFilter(condition=[AND(>=($4, 1996-10-01), <($4, +(1996-10-01, 3:INTERVAL MONTH)), IS NOT NULL($9))])" + - " LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{0}])" + - " LogicalTableScan(table=[[cp, tpch/orders.parquet]])" + - " LogicalAggregate(group=[{}], agg#0=[MIN($0)])" + - " LogicalProject($f0=[1:BIGINT])" + - " LogicalProject(L_ORDERKEY=[$0], L_PARTKEY=[$1], L_SUPPKEY=[$2], L_LINENUMBER=[$3], L_QUANTITY=[$4], L_EXTENDEDPRICE=[$5], L_DISCOUNT=[$6], L_TAX=[$7], L_RETURNFLAG=[$8], L_LINESTATUS=[$9], L_SHIPDATE=[$10], L_COMMITDATE=[$11], L_RECEIPTDATE=[$12], L_SHIPINSTRUCT=[$13], L_SHIPMODE=[$14], L_COMMENT=[$15])" + - " LogicalFilter(condition=[AND(=($0, $cor0.O_ORDERKEY), <($11, $12))])" + - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" - description: "05.sql" input: @@ -808,22 +808,6 @@ - "order by" - " revenue desc " output: - queryPlanText: - - "LogicalSort(sort0=[$1], dir0=[DESC])" - - " LogicalAggregate(group=[{0}], revenue=[SUM($1)])" - - " LogicalProject(n_name=[$41], $f1=[*($22, -(1, $23))])" - - " LogicalFilter(condition=[AND(=($0, $9), =($17, $8), =($19, $33), =($3, $36), =($36, $40), =($42, $44), =($45, 'EUROPE'), >=($12, 1997-01-01), <($12, +(1997-01-01, 12:INTERVAL YEAR)))])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalTableScan(table=[[cp, tpch/customer.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/orders.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/supplier.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/nation.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/region.parquet]])" queryPlanBinary: "CkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9jdXN0b21lci5wYXJxdWV0CkcKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIZCgJjcAoTdHBjaC9vcmRlcnMucGFycXVldAo5CiV0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxKb2luEhAQARgBIgoiCAoCEAEgAVABCkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9saW5laXRlbS5wYXJxdWV0CjsKJXR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEpvaW4SEggCEAMYASIKIggKAhABIAFQAQpJCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxUYWJsZVNjYW4SGwoCY3AKFXRwY2gvc3VwcGxpZXIucGFycXVldAo7CiV0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxKb2luEhIIBBAFGAEiCiIICgIQASABUAEKRwoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsVGFibGVTY2FuEhkKAmNwChN0cGNoL25hdGlvbi5wYXJxdWV0CjsKJXR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEpvaW4SEggGEAcYASIKIggKAhABIAFQAQpHCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxUYWJsZVNjYW4SGQoCY3AKE3RwY2gvcmVnaW9uLnBhcnF1ZXQKOwoldHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsSm9pbhISCAgQCRgBIgoiCAoCEAEgAVABCusECid0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxGaWx0ZXISvwQQChq6BCq3BAoECAEQARIFCgNBTkQaKComCgQIARABEggKBkVRVUFMUxoIEgYKBAgBEAQaChIICgQIARAEEAkaKiooCgQIARABEggKBkVRVUFMUxoKEggKBAgBEAQQERoKEggKBAgBEAQQCBoqKigKBAgBEAESCAoGRVFVQUxTGgoSCAoECAEQBBATGgoSCAoECAEQBBAhGioqKAoECAEQARIICgZFUVVBTFMaChIICgQIARAEEAMaChIICgQIARAEECQaKiooCgQIARABEggKBkVRVUFMUxoKEggKBAgBEAQQJBoKEggKBAgBEAQQKBoqKigKBAgBEAESCAoGRVFVQUxTGgoSCAoECAEQBBAqGgoSCAoECAEQBBAsGo0BKooBCgQIARABEggKBkVRVUFMUxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEC0aPiI8CjAQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaBkVVUk9QRVAcGjoqOAoECAEQARIXChVHUkVBVEVSX1RIQU5fT1JfRVFVQUwaChIICgQIARAKEAwaCyIJCgIQCjCGTVAKGlYqVAoECAEQARILCglMRVNTX1RIQU4aChIICgQIARAKEAwaMyoxCgIQChIPCg1EQVRFVElNRV9QTFVTGgsiCQoCEAowhk1QChoNIgsKAhAPEgMSAQxQDwrGAQoodHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsUHJvamVjdBKZAQgLEgZuX25hbWUSAyRmMRo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xECkaTipMCgQIARAJEgoKCE1VTFRJUExZGgoSCAoECAEQCRAWGiwqKgoECAEQCRIHCgVNSU5VUxoNIgsKAhAEEgMSAQFQBhoKEggKBAgBEAkQFwpgCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxBZ2dyZWdhdGUSMggMEiQaBQoDU1VNKgkKB3JldmVudWUyAQE4////////////AUoCCgAiAwoBACoDCgEACjMKJXR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFNvcnQSCggNIgYIARADGAI=" queryPlanBinaryHexDump: - "00000000 0A 49 0A 2A 74 79 70 65 2E 67 6F 6F 67 6C 65 61 .I.*type.googlea" @@ -934,6 +918,22 @@ - "00000690 67 6C 65 61 70 69 73 2E 63 6F 6D 2F 70 6C 61 6E gleapis.com/plan" - "000006A0 2E 50 4C 6F 67 69 63 61 6C 53 6F 72 74 12 0A 08 .PLogicalSort..." - "000006B0 0D 22 06 08 01 10 03 18 02 .\"......." + queryPlanText: + - "LogicalSort(sort0=[$1], dir0=[DESC])" + - " LogicalAggregate(group=[{0}], revenue=[SUM($1)])" + - " LogicalProject(n_name=[$41], $f1=[*($22, -(1, $23))])" + - " LogicalFilter(condition=[AND(=($0, $9), =($17, $8), =($19, $33), =($3, $36), =($36, $40), =($42, $44), =($45, 'EUROPE'), >=($12, 1997-01-01), <($12, +(1997-01-01, 12:INTERVAL YEAR)))])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalTableScan(table=[[cp, tpch/customer.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/orders.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/supplier.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/nation.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/region.parquet]])" - description: "06.sql" input: @@ -950,11 +950,6 @@ - " l_discount between 0.03 - 0.01 and 0.03 + 0.01" - " and l_quantity < 24 " output: - queryPlanText: - - "LogicalAggregate(group=[{}], revenue=[SUM($0)])" - - " LogicalProject($f0=[*($5, $6)])" - - " LogicalFilter(condition=[AND(>=($10, 1997-01-01), <($10, +(1997-01-01, 12:INTERVAL YEAR)), >=($6, -(0.03:DECIMAL(3, 2), 0.01:DECIMAL(3, 2))), <=($6, +(0.03:DECIMAL(3, 2), 0.01:DECIMAL(3, 2))), <($4, 24))])" - - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" queryPlanBinary: "CkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9saW5laXRlbS5wYXJxdWV0Ct0DCid0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxGaWx0ZXISsQMargMqqwMKBAgBEAESBQoDQU5EGjoqOAoECAEQARIXChVHUkVBVEVSX1RIQU5fT1JfRVFVQUwaChIICgQIARAKEAoaCyIJCgIQCjCGTVAKGlYqVAoECAEQARILCglMRVNTX1RIQU4aChIICgQIARAKEAoaMyoxCgIQChIPCg1EQVRFVElNRV9QTFVTGgsiCQoCEAowhk1QChoNIgsKAhAPEgMSAQxQDxpsKmoKBAgBEAESFwoVR1JFQVRFUl9USEFOX09SX0VRVUFMGgoSCAoECAEQCRAGGj0qOwoGEAYYAiAEEgcKBU1JTlVTGhMiEQoGEAYYAiADEgUIAhIBA1AGGhMiEQoGEAYYAiADEgUIAhIBAVAGGmgqZgoECAEQARIUChJMRVNTX1RIQU5fT1JfRVFVQUwaChIICgQIARAJEAYaPCo6CgYQBhgCIAQSBgoEUExVUxoTIhEKBhAGGAIgAxIFCAISAQNQBhoTIhEKBhAGGAIgAxIFCAISAQFQBhowKi4KBAgBEAESCwoJTEVTU19USEFOGgoSCAoECAEQBBAEGg0iCwoCEAQSAxIBGFAGCmEKKHR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFByb2plY3QSNQgBEgMkZjAaLCoqCgQIARAJEgoKCE1VTFRJUExZGgoSCAoECAEQCRAFGgoSCAoECAEQCRAGCloKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEFnZ3JlZ2F0ZRIsCAISJBoFCgNTVU0qCQoHcmV2ZW51ZTIBADj///////////8BSgIKACIAKgA=" queryPlanBinaryHexDump: - "00000000 0A 49 0A 2A 74 79 70 65 2E 67 6F 6F 67 6C 65 61 .I.*type.googlea" @@ -1004,6 +999,11 @@ - "000002C0 12 24 1A 05 0A 03 53 55 4D 2A 09 0A 07 72 65 76 .$....SUM*...rev" - "000002D0 65 6E 75 65 32 01 00 38 FF FF FF FF FF FF FF FF enue2..8........" - "000002E0 FF 01 4A 02 0A 00 22 00 2A 00 ..J...\".*." + queryPlanText: + - "LogicalAggregate(group=[{}], revenue=[SUM($0)])" + - " LogicalProject($f0=[*($5, $6)])" + - " LogicalFilter(condition=[AND(>=($10, 1997-01-01), <($10, +(1997-01-01, 12:INTERVAL YEAR)), >=($6, -(0.03:DECIMAL(3, 2), 0.01:DECIMAL(3, 2))), <=($6, +(0.03:DECIMAL(3, 2), 0.01:DECIMAL(3, 2))), <($4, 24))])" + - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" - description: "07.sql" input: @@ -1048,22 +1048,6 @@ - " cust_nation," - " l_year " output: - queryPlanText: - - "LogicalSort(sort0=[$0], sort1=[$1], sort2=[$2], dir0=[ASC], dir1=[ASC], dir2=[ASC])" - - " LogicalAggregate(group=[{0, 1, 2}], revenue=[SUM($3)])" - - " LogicalProject(supp_nation=[$41], cust_nation=[$45], l_year=[EXTRACT(FLAG(YEAR), $17)], volume=[*($12, -(1, $13))])" - - " LogicalFilter(condition=[AND(=($0, $9), =($23, $7), =($32, $24), =($3, $40), =($35, $44), OR(AND(=($41, 'EGYPT'), =($45, 'UNITED STATES')), AND(=($41, 'UNITED STATES'), =($45, 'EGYPT'))), >=($17, 1995-01-01), <=($17, 1996-12-31))])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalTableScan(table=[[cp, tpch/supplier.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/orders.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/customer.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/nation.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/nation.parquet]])" queryPlanBinary: "CkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9zdXBwbGllci5wYXJxdWV0CkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9saW5laXRlbS5wYXJxdWV0CjkKJXR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEpvaW4SEBABGAEiCiIICgIQASABUAEKRwoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsVGFibGVTY2FuEhkKAmNwChN0cGNoL29yZGVycy5wYXJxdWV0CjsKJXR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEpvaW4SEggCEAMYASIKIggKAhABIAFQAQpJCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxUYWJsZVNjYW4SGwoCY3AKFXRwY2gvY3VzdG9tZXIucGFycXVldAo7CiV0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxKb2luEhIIBBAFGAEiCiIICgIQASABUAEKRwoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsVGFibGVTY2FuEhkKAmNwChN0cGNoL25hdGlvbi5wYXJxdWV0CjsKJXR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEpvaW4SEggGEAcYASIKIggKAhABIAFQAQpHCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxUYWJsZVNjYW4SGQoCY3AKE3RwY2gvbmF0aW9uLnBhcnF1ZXQKOwoldHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsSm9pbhISCAgQCRgBIgoiCAoCEAEgAVABCpQICid0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxGaWx0ZXIS6AcQChrjByrgBwoECAEQARIFCgNBTkQaKComCgQIARABEggKBkVRVUFMUxoIEgYKBAgBEAQaChIICgQIARAEEAkaKiooCgQIARABEggKBkVRVUFMUxoKEggKBAgBEAQQFxoKEggKBAgBEAQQBxoqKigKBAgBEAESCAoGRVFVQUxTGgoSCAoECAEQBBAgGgoSCAoECAEQBBAYGioqKAoECAEQARIICgZFUVVBTFMaChIICgQIARAEEAMaChIICgQIARAEECgaKiooCgQIARABEggKBkVRVUFMUxoKEggKBAgBEAQQIxoKEggKBAgBEAQQLBqBBSr+BAoECAEQARIECgJPUhq2AiqzAgoECAEQARIFCgNBTkQajAEqiQEKBAgBEAESCAoGRVFVQUxTGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQKRo9IjsKMBAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoFRUdZUFRQHBqUASqRAQoECAEQARIICgZFUVVBTFMaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAtGkUiQwowEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGg1VTklURUQgU1RBVEVTUBwatgIqswIKBAgBEAESBQoDQU5EGpQBKpEBCgQIARABEggKBkVRVUFMUxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xECkaRSJDCjAQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaDVVOSVRFRCBTVEFURVNQHBqMASqJAQoECAEQARIICgZFUVVBTFMaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAtGj0iOwowEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGgVFR1lQVFAcGjoqOAoECAEQARIXChVHUkVBVEVSX1RIQU5fT1JfRVFVQUwaChIICgQIARAKEBEaCyIJCgIQCjCrR1AKGjcqNQoECAEQARIUChJMRVNTX1RIQU5fT1JfRVFVQUwaChIICgQIARAKEBEaCyIJCgIQCjCFTVAKCv8CCih0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxQcm9qZWN0EtICCAsSC3N1cHBfbmF0aW9uEgtjdXN0X25hdGlvbhIGbF95ZWFyEgZ2b2x1bWUaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRApGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQLRpgKl4KBAgBEAUSCQoHRVhUUkFDVBo/Ij0KAhAiSjUKLW9yZy5hcGFjaGUuY2FsY2l0ZS5hdmF0aWNhLnV0aWwuVGltZVVuaXRSYW5nZRIEWUVBUlAiGgoSCAoECAEQChARGk4qTAoECAEQCRIKCghNVUxUSVBMWRoKEggKBAgBEAkQDBosKioKBAgBEAkSBwoFTUlOVVMaDSILCgIQBBIDEgEBUAYaChIICgQIARAJEA0KZAoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsQWdncmVnYXRlEjYIDBIkGgUKA1NVTSoJCgdyZXZlbnVlMgEDOP///////////wFKAgoAIgUKAwABAioFCgMAAQIKQQoldHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsU29ydBIYCA0iBBABGAMiBggBEAEYAyIGCAIQARgD" queryPlanBinaryHexDump: - "00000000 0A 49 0A 2A 74 79 70 65 2E 67 6F 6F 67 6C 65 61 .I.*type.googlea" @@ -1213,6 +1197,22 @@ - "00000900 6D 2F 70 6C 61 6E 2E 50 4C 6F 67 69 63 61 6C 53 m/plan.PLogicalS" - "00000910 6F 72 74 12 18 08 0D 22 04 10 01 18 03 22 06 08 ort....\".....\".." - "00000920 01 10 01 18 03 22 06 08 02 10 01 18 03 .....\"......." + queryPlanText: + - "LogicalSort(sort0=[$0], sort1=[$1], sort2=[$2], dir0=[ASC], dir1=[ASC], dir2=[ASC])" + - " LogicalAggregate(group=[{0, 1, 2}], revenue=[SUM($3)])" + - " LogicalProject(supp_nation=[$41], cust_nation=[$45], l_year=[EXTRACT(FLAG(YEAR), $17)], volume=[*($12, -(1, $13))])" + - " LogicalFilter(condition=[AND(=($0, $9), =($23, $7), =($32, $24), =($3, $40), =($35, $44), OR(AND(=($41, 'EGYPT'), =($45, 'UNITED STATES')), AND(=($41, 'UNITED STATES'), =($45, 'EGYPT'))), >=($17, 1995-01-01), <=($17, 1996-12-31))])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalTableScan(table=[[cp, tpch/supplier.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/orders.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/customer.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/nation.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/nation.parquet]])" - description: "08.sql" input: @@ -1255,28 +1255,6 @@ - "order by" - " o_year " output: - queryPlanText: - - "LogicalSort(sort0=[$0], dir0=[ASC])" - - " LogicalProject(o_year=[$0], mkt_share=[/($1, $2)])" - - " LogicalAggregate(group=[{0}], agg#0=[SUM($1)], agg#1=[SUM($2)])" - - " LogicalProject(o_year=[$0], $f1=[CASE(=($2, 'EGYPT'), $1, 0:DOUBLE)], volume=[$1])" - - " LogicalProject(o_year=[EXTRACT(FLAG(YEAR), $36)], volume=[*($21, -(1, $22))], nation=[$54])" - - " LogicalFilter(condition=[AND(=($0, $17), =($9, $18), =($16, $32), =($33, $41), =($44, $49), =($51, $57), =($58, 'MIDDLE EAST'), =($12, $53), >=($36, 1995-01-01), <=($36, 1996-12-31), =($4, 'PROMO BRUSHED COPPER'))])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalTableScan(table=[[cp, tpch/part.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/supplier.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/orders.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/customer.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/nation.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/nation.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/region.parquet]])" queryPlanBinary: "CkUKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIXCgJjcAoRdHBjaC9wYXJ0LnBhcnF1ZXQKSQoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsVGFibGVTY2FuEhsKAmNwChV0cGNoL3N1cHBsaWVyLnBhcnF1ZXQKOQoldHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsSm9pbhIQEAEYASIKIggKAhABIAFQAQpJCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxUYWJsZVNjYW4SGwoCY3AKFXRwY2gvbGluZWl0ZW0ucGFycXVldAo7CiV0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxKb2luEhIIAhADGAEiCiIICgIQASABUAEKRwoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsVGFibGVTY2FuEhkKAmNwChN0cGNoL29yZGVycy5wYXJxdWV0CjsKJXR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEpvaW4SEggEEAUYASIKIggKAhABIAFQAQpJCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxUYWJsZVNjYW4SGwoCY3AKFXRwY2gvY3VzdG9tZXIucGFycXVldAo7CiV0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxKb2luEhIIBhAHGAEiCiIICgIQASABUAEKRwoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsVGFibGVTY2FuEhkKAmNwChN0cGNoL25hdGlvbi5wYXJxdWV0CjsKJXR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEpvaW4SEggIEAkYASIKIggKAhABIAFQAQpHCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxUYWJsZVNjYW4SGQoCY3AKE3RwY2gvbmF0aW9uLnBhcnF1ZXQKOwoldHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsSm9pbhISCAoQCxgBIgoiCAoCEAEgAVABCkcKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIZCgJjcAoTdHBjaC9yZWdpb24ucGFycXVldAo7CiV0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxKb2luEhIIDBANGAEiCiIICgIQASABUAEKmwYKJ3R5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEZpbHRlchLvBRAOGuoFKucFCgQIARABEgUKA0FORBooKiYKBAgBEAESCAoGRVFVQUxTGggSBgoECAEQBBoKEggKBAgBEAQQERoqKigKBAgBEAESCAoGRVFVQUxTGgoSCAoECAEQBBAJGgoSCAoECAEQBBASGioqKAoECAEQARIICgZFUVVBTFMaChIICgQIARAEEBAaChIICgQIARAEECAaKiooCgQIARABEggKBkVRVUFMUxoKEggKBAgBEAQQIRoKEggKBAgBEAQQKRoqKigKBAgBEAESCAoGRVFVQUxTGgoSCAoECAEQBBAsGgoSCAoECAEQBBAxGioqKAoECAEQARIICgZFUVVBTFMaChIICgQIARAEEDMaChIICgQIARAEEDkakgEqjwEKBAgBEAESCAoGRVFVQUxTGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQOhpDIkEKMBAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoLTUlERExFIEVBU1RQHBoqKigKBAgBEAESCAoGRVFVQUxTGgoSCAoECAEQBBAMGgoSCAoECAEQBBA1GjoqOAoECAEQARIXChVHUkVBVEVSX1RIQU5fT1JfRVFVQUwaChIICgQIARAKECQaCyIJCgIQCjCrR1AKGjcqNQoECAEQARIUChJMRVNTX1RIQU5fT1JfRVFVQUwaChIICgQIARAKECQaCyIJCgIQCjCFTVAKGpsBKpgBCgQIARABEggKBkVRVUFMUxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAQaTCJKCjAQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaFFBST01PIEJSVVNIRUQgQ09QUEVSUBwKswIKKHR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFByb2plY3QShgIIDxIGb195ZWFyEgZ2b2x1bWUSBm5hdGlvbhpgKl4KBAgBEAUSCQoHRVhUUkFDVBo/Ij0KAhAiSjUKLW9yZy5hcGFjaGUuY2FsY2l0ZS5hdmF0aWNhLnV0aWwuVGltZVVuaXRSYW5nZRIEWUVBUlAiGgoSCAoECAEQChAkGk4qTAoECAEQCRIKCghNVUxUSVBMWRoKEggKBAgBEAkQFRosKioKBAgBEAkSBwoFTUlOVVMaDSILCgIQBBIDEgEBUAYaChIICgQIARAJEBYaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRA2CpgCCih0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxQcm9qZWN0EusBCBASBm9feWVhchIDJGYxEgZ2b2x1bWUaCBIGCgQIARAFGrsBKrgBCgQIARAJEgYKBENBU0UajAEqiQEKBAgBEAESCAoGRVFVQUxTGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQAho9IjsKMBAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoFRUdZUFRQHBoKEggKBAgBEAkQARoNIgsKAhAJEgMSAQBQBhoKEggKBAgBEAkQAQpwCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxBZ2dyZWdhdGUSQggREhkaBQoDU1VNMgEBOP///////////wFKAgoAEhkaBQoDU1VNMgECOP///////////wFKAgoAIgMKAQAqAwoBAAp3Cih0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxQcm9qZWN0EksIEhIGb195ZWFyEglta3Rfc2hhcmUaCBIGCgQIARAFGioqKAoECAEQCRIICgZESVZJREUaChIICgQIARAJEAEaChIICgQIARAJEAIKMQoldHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsU29ydBIICBMiBBABGAM=" queryPlanBinaryHexDump: - "00000000 0A 45 0A 2A 74 79 70 65 2E 67 6F 6F 67 6C 65 61 .E.*type.googlea" @@ -1448,6 +1426,28 @@ - "00000A60 69 73 2E 63 6F 6D 2F 70 6C 61 6E 2E 50 4C 6F 67 is.com/plan.PLog" - "00000A70 69 63 61 6C 53 6F 72 74 12 08 08 13 22 04 10 01 icalSort....\"..." - "00000A80 18 03 .." + queryPlanText: + - "LogicalSort(sort0=[$0], dir0=[ASC])" + - " LogicalProject(o_year=[$0], mkt_share=[/($1, $2)])" + - " LogicalAggregate(group=[{0}], agg#0=[SUM($1)], agg#1=[SUM($2)])" + - " LogicalProject(o_year=[$0], $f1=[CASE(=($2, 'EGYPT'), $1, 0:DOUBLE)], volume=[$1])" + - " LogicalProject(o_year=[EXTRACT(FLAG(YEAR), $36)], volume=[*($21, -(1, $22))], nation=[$54])" + - " LogicalFilter(condition=[AND(=($0, $17), =($9, $18), =($16, $32), =($33, $41), =($44, $49), =($51, $57), =($58, 'MIDDLE EAST'), =($12, $53), >=($36, 1995-01-01), <=($36, 1996-12-31), =($4, 'PROMO BRUSHED COPPER'))])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalTableScan(table=[[cp, tpch/part.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/supplier.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/orders.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/customer.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/nation.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/nation.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/region.parquet]])" - description: "09.sql" input: @@ -1485,22 +1485,6 @@ - " nation," - " o_year desc " output: - queryPlanText: - - "LogicalSort(sort0=[$0], sort1=[$1], dir0=[ASC], dir1=[DESC])" - - " LogicalAggregate(group=[{0, 1}], sum_profit=[SUM($2)])" - - " LogicalProject(nation=[$47], o_year=[EXTRACT(FLAG(YEAR), $41)], amount=[-(*($21, -(1, $22)), *($35, $20))])" - - " LogicalFilter(condition=[AND(=($9, $18), =($33, $18), =($32, $17), =($0, $17), =($37, $16), =($12, $46), LIKE($1, '%yellow%':VARCHAR(8)))])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalTableScan(table=[[cp, tpch/part.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/supplier.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/partsupp.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/orders.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/nation.parquet]])" queryPlanBinary: "CkUKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIXCgJjcAoRdHBjaC9wYXJ0LnBhcnF1ZXQKSQoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsVGFibGVTY2FuEhsKAmNwChV0cGNoL3N1cHBsaWVyLnBhcnF1ZXQKOQoldHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsSm9pbhIQEAEYASIKIggKAhABIAFQAQpJCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxUYWJsZVNjYW4SGwoCY3AKFXRwY2gvbGluZWl0ZW0ucGFycXVldAo7CiV0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxKb2luEhIIAhADGAEiCiIICgIQASABUAEKSQoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsVGFibGVTY2FuEhsKAmNwChV0cGNoL3BhcnRzdXBwLnBhcnF1ZXQKOwoldHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsSm9pbhISCAQQBRgBIgoiCAoCEAEgAVABCkcKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIZCgJjcAoTdHBjaC9vcmRlcnMucGFycXVldAo7CiV0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxKb2luEhIIBhAHGAEiCiIICgIQASABUAEKRwoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsVGFibGVTY2FuEhkKAmNwChN0cGNoL25hdGlvbi5wYXJxdWV0CjsKJXR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEpvaW4SEggIEAkYASIKIggKAhABIAFQAQrVAwondHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsRmlsdGVyEqkDEAoapAMqoQMKBAgBEAESBQoDQU5EGioqKAoECAEQARIICgZFUVVBTFMaChIICgQIARAEEAkaChIICgQIARAEEBIaKiooCgQIARABEggKBkVRVUFMUxoKEggKBAgBEAQQIRoKEggKBAgBEAQQEhoqKigKBAgBEAESCAoGRVFVQUxTGgoSCAoECAEQBBAgGgoSCAoECAEQBBARGigqJgoECAEQARIICgZFUVVBTFMaCBIGCgQIARAEGgoSCAoECAEQBBARGioqKAoECAEQARIICgZFUVVBTFMaChIICgQIARAEECUaChIICgQIARAEEBAaKiooCgQIARABEggKBkVRVUFMUxoKEggKBAgBEAQQDBoKEggKBAgBEAQQLhqLASqIAQoECAEQARIGCgRMSUtFGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQARo+IjwKLhAdIAgqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaCCV5ZWxsb3clUBwK9gIKKHR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFByb2plY3QSyQIICxIGbmF0aW9uEgZvX3llYXISBmFtb3VudBo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEC8aYCpeCgQIARAFEgkKB0VYVFJBQ1QaPyI9CgIQIko1Ci1vcmcuYXBhY2hlLmNhbGNpdGUuYXZhdGljYS51dGlsLlRpbWVVbml0UmFuZ2USBFlFQVJQIhoKEggKBAgBEAoQKRqQASqNAQoECAEQCRIHCgVNSU5VUxpOKkwKBAgBEAkSCgoITVVMVElQTFkaChIICgQIARAJEBUaLCoqCgQIARAJEgcKBU1JTlVTGg0iCwoCEAQSAxIBAVAGGgoSCAoECAEQCRAWGiwqKgoECAEQCRIKCghNVUxUSVBMWRoKEggKBAgBEAkQIxoKEggKBAgBEAQQFAplCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxBZ2dyZWdhdGUSNwgMEicaBQoDU1VNKgwKCnN1bV9wcm9maXQyAQI4////////////AUoCCgAiBAoCAAEqBAoCAAEKOQoldHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsU29ydBIQCA0iBBABGAMiBggBEAMYAg==" queryPlanBinaryHexDump: - "00000000 0A 45 0A 2A 74 79 70 65 2E 67 6F 6F 67 6C 65 61 .E.*type.googlea" @@ -1613,6 +1597,22 @@ - "000006B0 61 70 69 73 2E 63 6F 6D 2F 70 6C 61 6E 2E 50 4C apis.com/plan.PL" - "000006C0 6F 67 69 63 61 6C 53 6F 72 74 12 10 08 0D 22 04 ogicalSort....\"." - "000006D0 10 01 18 03 22 06 08 01 10 03 18 02 ....\"......." + queryPlanText: + - "LogicalSort(sort0=[$0], sort1=[$1], dir0=[ASC], dir1=[DESC])" + - " LogicalAggregate(group=[{0, 1}], sum_profit=[SUM($2)])" + - " LogicalProject(nation=[$47], o_year=[EXTRACT(FLAG(YEAR), $41)], amount=[-(*($21, -(1, $22)), *($35, $20))])" + - " LogicalFilter(condition=[AND(=($9, $18), =($33, $18), =($32, $17), =($0, $17), =($37, $16), =($12, $46), LIKE($1, '%yellow%':VARCHAR(8)))])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalTableScan(table=[[cp, tpch/part.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/supplier.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/partsupp.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/orders.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/nation.parquet]])" - description: "10.sql" input: @@ -1650,19 +1650,6 @@ - " revenue desc" - "limit 20 " output: - queryPlanText: - - "LogicalSort(sort0=[$2], dir0=[DESC], fetch=[20])" - - " LogicalProject(c_custkey=[$0], c_name=[$1], revenue=[$7], c_acctbal=[$2], n_name=[$4], c_address=[$5], c_phone=[$3], c_comment=[$6])" - - " LogicalAggregate(group=[{0, 1, 2, 3, 4, 5, 6}], revenue=[SUM($7)])" - - " LogicalProject(c_custkey=[$0], c_name=[$1], c_acctbal=[$5], c_phone=[$4], n_name=[$34], c_address=[$2], c_comment=[$7], $f7=[*($22, -(1, $23))])" - - " LogicalFilter(condition=[AND(=($0, $9), =($17, $8), >=($12, 1994-03-01), <($12, +(1994-03-01, 3:INTERVAL MONTH)), =($25, 'R'), =($3, $33))])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalTableScan(table=[[cp, tpch/customer.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/orders.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/nation.parquet]])" queryPlanBinary: "CkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9jdXN0b21lci5wYXJxdWV0CkcKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIZCgJjcAoTdHBjaC9vcmRlcnMucGFycXVldAo5CiV0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxKb2luEhAQARgBIgoiCAoCEAEgAVABCkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9saW5laXRlbS5wYXJxdWV0CjsKJXR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEpvaW4SEggCEAMYASIKIggKAhABIAFQAQpHCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxUYWJsZVNjYW4SGQoCY3AKE3RwY2gvbmF0aW9uLnBhcnF1ZXQKOwoldHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsSm9pbhISCAQQBRgBIgoiCAoCEAEgAVABCuIDCid0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxGaWx0ZXIStgMQBhqxAyquAwoECAEQARIFCgNBTkQaKComCgQIARABEggKBkVRVUFMUxoIEgYKBAgBEAQaChIICgQIARAEEAkaKiooCgQIARABEggKBkVRVUFMUxoKEggKBAgBEAQQERoKEggKBAgBEAQQCBo6KjgKBAgBEAESFwoVR1JFQVRFUl9USEFOX09SX0VRVUFMGgoSCAoECAEQChAMGgsiCQoCEAow+URQChpWKlQKBAgBEAESCwoJTEVTU19USEFOGgoSCAoECAEQChAMGjMqMQoCEAoSDwoNREFURVRJTUVfUExVUxoLIgkKAhAKMPlEUAoaDSILCgIQERIDEgEDUBEaiAEqhQEKBAgBEAESCAoGRVFVQUxTGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQGRo5IjcKMBAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoBUlAcGioqKAoECAEQARIICgZFUVVBTFMaChIICgQIARAEEAMaChIICgQIARAEECEKgQQKKHR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFByb2plY3QS1AMIBxIJY19jdXN0a2V5EgZjX25hbWUSCWNfYWNjdGJhbBIHY19waG9uZRIGbl9uYW1lEgljX2FkZHJlc3MSCWNfY29tbWVudBIDJGY3GggSBgoECAEQBBo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAEaChIICgQIARAJEAUaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAEGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQIho4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAIaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAHGk4qTAoECAEQCRIKCghNVUxUSVBMWRoKEggKBAgBEAkQFhosKioKBAgBEAkSBwoFTUlOVVMaDSILCgIQBBIDEgEBUAYaChIICgQIARAJEBcKbAoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsQWdncmVnYXRlEj4ICBIkGgUKA1NVTSoJCgdyZXZlbnVlMgEHOP///////////wFKAgoAIgkKBwABAgMEBQYqCQoHAAECAwQFBgrBAwoodHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsUHJvamVjdBKUAwgJEgljX2N1c3RrZXkSBmNfbmFtZRIHcmV2ZW51ZRIJY19hY2N0YmFsEgZuX25hbWUSCWNfYWRkcmVzcxIHY19waG9uZRIJY19jb21tZW50GggSBgoECAEQBBo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAEaChIICgQIARAJEAcaChIICgQIARAJEAIaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAEGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQBRo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAMaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAGCkIKJXR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFNvcnQSGQgKGg0iCwoCEAQSAxIBFFAGIgYIAhADGAI=" queryPlanBinaryHexDump: - "00000000 0A 49 0A 2A 74 79 70 65 2E 67 6F 6F 67 6C 65 61 .I.*type.googlea" @@ -1797,6 +1784,19 @@ - "00000810 70 6C 61 6E 2E 50 4C 6F 67 69 63 61 6C 53 6F 72 plan.PLogicalSor" - "00000820 74 12 19 08 0A 1A 0D 22 0B 0A 02 10 04 12 03 12 t......\"........" - "00000830 01 14 50 06 22 06 08 02 10 03 18 02 ..P.\"......." + queryPlanText: + - "LogicalSort(sort0=[$2], dir0=[DESC], fetch=[20])" + - " LogicalProject(c_custkey=[$0], c_name=[$1], revenue=[$7], c_acctbal=[$2], n_name=[$4], c_address=[$5], c_phone=[$3], c_comment=[$6])" + - " LogicalAggregate(group=[{0, 1, 2, 3, 4, 5, 6}], revenue=[SUM($7)])" + - " LogicalProject(c_custkey=[$0], c_name=[$1], c_acctbal=[$5], c_phone=[$4], n_name=[$34], c_address=[$2], c_comment=[$7], $f7=[*($22, -(1, $23))])" + - " LogicalFilter(condition=[AND(=($0, $9), =($17, $8), >=($12, 1994-03-01), <($12, +(1994-03-01, 3:INTERVAL MONTH)), =($25, 'R'), =($3, $33))])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalTableScan(table=[[cp, tpch/customer.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/orders.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/nation.parquet]])" - description: "11.sql" input: @@ -1829,28 +1829,6 @@ - "order by" - " \"value\" desc " output: - queryPlanText: - - "LogicalSort(sort0=[$1], dir0=[DESC])" - - " LogicalProject(ps_partkey=[$0], value=[$1])" - - " LogicalFilter(condition=[>($1, $2)])" - - " LogicalJoin(condition=[true], joinType=[left])" - - " LogicalAggregate(group=[{0}], value=[SUM($1)])" - - " LogicalProject(ps_partkey=[$0], $f1=[*($3, $2)])" - - " LogicalFilter(condition=[AND(=($1, $5), =($8, $12), =($13, 'JAPAN'))])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalTableScan(table=[[cp, tpch/partsupp.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/supplier.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/nation.parquet]])" - - " LogicalProject(EXPR$0=[*($0, 0.0001000000:DECIMAL(11, 10))])" - - " LogicalAggregate(group=[{}], agg#0=[SUM($0)])" - - " LogicalProject($f0=[*($3, $2)])" - - " LogicalFilter(condition=[AND(=($1, $5), =($8, $12), =($13, 'JAPAN'))])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalTableScan(table=[[cp, tpch/partsupp.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/supplier.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/nation.parquet]])" queryPlanBinary: "CkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9wYXJ0c3VwcC5wYXJxdWV0CkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9zdXBwbGllci5wYXJxdWV0CjkKJXR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEpvaW4SEBABGAEiCiIICgIQASABUAEKRwoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsVGFibGVTY2FuEhkKAmNwChN0cGNoL25hdGlvbi5wYXJxdWV0CjsKJXR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEpvaW4SEggCEAMYASIKIggKAhABIAFQAQqoAgondHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsRmlsdGVyEvwBEAQa9wEq9AEKBAgBEAESBQoDQU5EGioqKAoECAEQARIICgZFUVVBTFMaChIICgQIARAEEAEaChIICgQIARAEEAUaKiooCgQIARABEggKBkVRVUFMUxoKEggKBAgBEAQQCBoKEggKBAgBEAQQDBqMASqJAQoECAEQARIICgZFUVVBTFMaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRANGj0iOwowEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGgVKQVBBTlAcCncKKHR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFByb2plY3QSSwgFEgpwc19wYXJ0a2V5EgMkZjEaCBIGCgQIARAEGiwqKgoECAEQCRIKCghNVUxUSVBMWRoKEggKBAgBEAkQAxoKEggKBAgBEAQQAgpeCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxBZ2dyZWdhdGUSMAgGEiIaBQoDU1VNKgcKBXZhbHVlMgEBOP///////////wFKAgoAIgMKAQAqAwoBAApJCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxUYWJsZVNjYW4SGwoCY3AKFXRwY2gvcGFydHN1cHAucGFycXVldApJCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxUYWJsZVNjYW4SGwoCY3AKFXRwY2gvc3VwcGxpZXIucGFycXVldAo7CiV0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxKb2luEhIICBAJGAEiCiIICgIQASABUAEKRwoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsVGFibGVTY2FuEhkKAmNwChN0cGNoL25hdGlvbi5wYXJxdWV0CjsKJXR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEpvaW4SEggKEAsYASIKIggKAhABIAFQAQqoAgondHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsRmlsdGVyEvwBEAwa9wEq9AEKBAgBEAESBQoDQU5EGioqKAoECAEQARIICgZFUVVBTFMaChIICgQIARAEEAEaChIICgQIARAEEAUaKiooCgQIARABEggKBkVRVUFMUxoKEggKBAgBEAQQCBoKEggKBAgBEAQQDBqMASqJAQoECAEQARIICgZFUVVBTFMaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRANGj0iOwowEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGgVKQVBBTlAcCmEKKHR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFByb2plY3QSNQgNEgMkZjAaLCoqCgQIARAJEgoKCE1VTFRJUExZGgoSCAoECAEQCRADGgoSCAoECAEQBBACCk8KKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEFnZ3JlZ2F0ZRIhCA4SGRoFCgNTVU0yAQA4////////////AUoCCgAiACoACm0KKHR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFByb2plY3QSQQgPEgZFWFBSJDAaNSozCgQIARAJEgoKCE1VTFRJUExZGggSBgoECAEQCRoVIhMKBhAGGAogCxIHCAoSAw9CQFAGCjsKJXR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEpvaW4SEggHEBAYAiIKIggKAhABIAFQAQpfCid0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxGaWx0ZXISNBARGjAqLgoECAEQARIOCgxHUkVBVEVSX1RIQU4aChIICgQIARAJEAEaChIICgQIARAJEAIKVwoodHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsUHJvamVjdBIrCBISCnBzX3BhcnRrZXkSBXZhbHVlGggSBgoECAEQBBoKEggKBAgBEAkQAQozCiV0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxTb3J0EgoIEyIGCAEQAxgC" queryPlanBinaryHexDump: - "00000000 0A 49 0A 2A 74 79 70 65 2E 67 6F 6F 67 6C 65 61 .I.*type.googlea" @@ -1984,6 +1962,28 @@ - "00000800 65 2E 67 6F 6F 67 6C 65 61 70 69 73 2E 63 6F 6D e.googleapis.com" - "00000810 2F 70 6C 61 6E 2E 50 4C 6F 67 69 63 61 6C 53 6F /plan.PLogicalSo" - "00000820 72 74 12 0A 08 13 22 06 08 01 10 03 18 02 rt....\"......." + queryPlanText: + - "LogicalSort(sort0=[$1], dir0=[DESC])" + - " LogicalProject(ps_partkey=[$0], value=[$1])" + - " LogicalFilter(condition=[>($1, $2)])" + - " LogicalJoin(condition=[true], joinType=[left])" + - " LogicalAggregate(group=[{0}], value=[SUM($1)])" + - " LogicalProject(ps_partkey=[$0], $f1=[*($3, $2)])" + - " LogicalFilter(condition=[AND(=($1, $5), =($8, $12), =($13, 'JAPAN'))])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalTableScan(table=[[cp, tpch/partsupp.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/supplier.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/nation.parquet]])" + - " LogicalProject(EXPR$0=[*($0, 0.0001000000:DECIMAL(11, 10))])" + - " LogicalAggregate(group=[{}], agg#0=[SUM($0)])" + - " LogicalProject($f0=[*($3, $2)])" + - " LogicalFilter(condition=[AND(=($1, $5), =($8, $12), =($13, 'JAPAN'))])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalTableScan(table=[[cp, tpch/partsupp.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/supplier.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/nation.parquet]])" - description: "12.sql" input: @@ -2017,14 +2017,6 @@ - "order by" - " l.l_shipmode " output: - queryPlanText: - - "LogicalSort(sort0=[$0], dir0=[ASC])" - - " LogicalAggregate(group=[{0}], high_line_count=[SUM($1)], low_line_count=[SUM($2)])" - - " LogicalProject(l_shipmode=[$23], $f1=[CASE(OR(=($5, '1-URGENT'), =($5, '2-HIGH')), 1, 0)], $f2=[CASE(AND(<>($5, '1-URGENT'), <>($5, '2-HIGH')), 1, 0)])" - - " LogicalFilter(condition=[AND(=($0, $9), OR(=($23, 'TRUCK':VARCHAR(5)), =($23, 'REG AIR':VARCHAR(7))), <($20, $21), <($19, $20), >=($21, 1994-01-01), <($21, +(1994-01-01, 12:INTERVAL YEAR)))])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalTableScan(table=[[cp, tpch/orders.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" queryPlanBinary: "CkcKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIZCgJjcAoTdHBjaC9vcmRlcnMucGFycXVldApJCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxUYWJsZVNjYW4SGwoCY3AKFXRwY2gvbGluZWl0ZW0ucGFycXVldAo5CiV0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxKb2luEhAQARgBIgoiCAoCEAEgAVABCosFCid0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxGaWx0ZXIS3wQQAhraBCrXBAoECAEQARIFCgNBTkQaKComCgQIARABEggKBkVRVUFMUxoIEgYKBAgBEAQaChIICgQIARAEEAkaqwIqqAIKBAgBEAESBAoCT1IaigEqhwEKBAgBEAESCAoGRVFVQUxTGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQFxo7IjkKLhAdIAUqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaBVRSVUNLUBwajAEqiQEKBAgBEAESCAoGRVFVQUxTGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQFxo9IjsKLhAdIAcqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaB1JFRyBBSVJQHBotKisKBAgBEAESCwoJTEVTU19USEFOGgoSCAoECAEQChAUGgoSCAoECAEQChAVGi0qKwoECAEQARILCglMRVNTX1RIQU4aChIICgQIARAKEBMaChIICgQIARAKEBQaOio4CgQIARABEhcKFUdSRUFURVJfVEhBTl9PUl9FUVVBTBoKEggKBAgBEAoQFRoLIgkKAhAKML5EUAoaVipUCgQIARABEgsKCUxFU1NfVEhBThoKEggKBAgBEAoQFRozKjEKAhAKEg8KDURBVEVUSU1FX1BMVVMaCyIJCgIQCjC+RFAKGg0iCwoCEA8SAxIBDFAPCtAGCih0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxQcm9qZWN0EqMGCAMSCmxfc2hpcG1vZGUSAyRmMRIDJGYyGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQFxrhAireAgoCEAQSBgoEQ0FTRRqxAiquAgoECAEQARIECgJPUhqPASqMAQoECAEQARIICgZFUVVBTFMaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAFGkAiPgowEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGggxLVVSR0VOVFAcGo0BKooBCgQIARABEggKBkVRVUFMUxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAUaPiI8CjAQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaBjItSElHSFAcGg0iCwoCEAQSAxIBAVAGGg0iCwoCEAQSAxIBAFAGGuoCKucCCgIQBBIGCgRDQVNFGroCKrcCCgQIARABEgUKA0FORBqTASqQAQoECAEQARIMCgpOT1RfRVFVQUxTGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQBRpAIj4KMBAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoIMS1VUkdFTlRQHBqRASqOAQoECAEQARIMCgpOT1RfRVFVQUxTGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQBRo+IjwKMBAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoGMi1ISUdIUBwaDSILCgIQBBIDEgEBUAYaDSILCgIQBBIDEgEAUAYKlQEKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEFnZ3JlZ2F0ZRJnCAQSLBoFCgNTVU0qEQoPaGlnaF9saW5lX2NvdW50MgEBOP///////////wFKAgoAEisaBQoDU1VNKhAKDmxvd19saW5lX2NvdW50MgECOP///////////wFKAgoAIgMKAQAqAwoBAAoxCiV0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxTb3J0EggIBSIEEAEYAw==" queryPlanBinaryHexDump: - "00000000 0A 47 0A 2A 74 79 70 65 2E 67 6F 6F 67 6C 65 61 .G.*type.googlea" @@ -2147,6 +2139,14 @@ - "00000750 2E 67 6F 6F 67 6C 65 61 70 69 73 2E 63 6F 6D 2F .googleapis.com/" - "00000760 70 6C 61 6E 2E 50 4C 6F 67 69 63 61 6C 53 6F 72 plan.PLogicalSor" - "00000770 74 12 08 08 05 22 04 10 01 18 03 t....\"....." + queryPlanText: + - "LogicalSort(sort0=[$0], dir0=[ASC])" + - " LogicalAggregate(group=[{0}], high_line_count=[SUM($1)], low_line_count=[SUM($2)])" + - " LogicalProject(l_shipmode=[$23], $f1=[CASE(OR(=($5, '1-URGENT'), =($5, '2-HIGH')), 1, 0)], $f2=[CASE(AND(<>($5, '1-URGENT'), <>($5, '2-HIGH')), 1, 0)])" + - " LogicalFilter(condition=[AND(=($0, $9), OR(=($23, 'TRUCK':VARCHAR(5)), =($23, 'REG AIR':VARCHAR(7))), <($20, $21), <($19, $20), >=($21, 1994-01-01), <($21, +(1994-01-01, 12:INTERVAL YEAR)))])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalTableScan(table=[[cp, tpch/orders.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" - description: "13.sql" input: @@ -2173,17 +2173,6 @@ - " custdist desc," - " c_count desc " output: - queryPlanText: - - "LogicalSort(sort0=[$1], sort1=[$0], dir0=[DESC], dir1=[DESC])" - - " LogicalAggregate(group=[{0}], custdist=[COUNT()])" - - " LogicalProject(c_count=[$1])" - - " LogicalAggregate(group=[{0}], EXPR$1=[COUNT($1)])" - - " LogicalProject(c_custkey=[$0], O_ORDERKEY=[$8])" - - " LogicalProject(C_CUSTKEY=[$0], C_NAME=[$1], C_ADDRESS=[$2], C_NATIONKEY=[$3], C_PHONE=[$4], C_ACCTBAL=[$5], C_MKTSEGMENT=[$6], C_COMMENT=[$7], O_ORDERKEY=[$8], O_CUSTKEY=[$9], O_ORDERSTATUS=[$10], O_TOTALPRICE=[$11], O_ORDERDATE=[$12], O_ORDERPRIORITY=[$13], O_CLERK=[$14], O_SHIPPRIORITY=[$15], O_COMMENT=[$16])" - - " LogicalJoin(condition=[AND(=($0, $9), $17)], joinType=[left])" - - " LogicalTableScan(table=[[cp, tpch/customer.parquet]])" - - " LogicalProject(O_ORDERKEY=[$0], O_CUSTKEY=[$1], O_ORDERSTATUS=[$2], O_TOTALPRICE=[$3], O_ORDERDATE=[$4], O_ORDERPRIORITY=[$5], O_CLERK=[$6], O_SHIPPRIORITY=[$7], O_COMMENT=[$8], $f9=[NOT(LIKE($8, '%special%requests%':VARCHAR(18)))])" - - " LogicalTableScan(table=[[cp, tpch/orders.parquet]])" queryPlanBinary: "CkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9jdXN0b21lci5wYXJxdWV0CkcKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIZCgJjcAoTdHBjaC9vcmRlcnMucGFycXVldAqlBQoodHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsUHJvamVjdBL4BAgBEgpPX09SREVSS0VZEglPX0NVU1RLRVkSDU9fT1JERVJTVEFUVVMSDE9fVE9UQUxQUklDRRILT19PUkRFUkRBVEUSD09fT1JERVJQUklPUklUWRIHT19DTEVSSxIOT19TSElQUFJJT1JJVFkSCU9fQ09NTUVOVBIDJGY5GggSBgoECAEQBBoKEggKBAgBEAQQARo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAIaChIICgQIARAJEAMaChIICgQIARAKEAQaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAFGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQBho4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAcaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAIGqgBKqUBCgQIARABEgUKA05PVBqVASqSAQoECAEQARIGCgRMSUtFGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQCBpIIkYKLhAdIBIqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaEiVzcGVjaWFsJXJlcXVlc3RzJVAcCnQKJXR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEpvaW4SSxACGAIiRSpDCgQIARABEgUKA0FORBooKiYKBAgBEAESCAoGRVFVQUxTGggSBgoECAEQBBoKEggKBAgBEAQQCRoKEggKBAgBEAEQEQqTBwoodHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsUHJvamVjdBLmBggDEglDX0NVU1RLRVkSBkNfTkFNRRIJQ19BRERSRVNTEgtDX05BVElPTktFWRIHQ19QSE9ORRIJQ19BQ0NUQkFMEgxDX01LVFNFR01FTlQSCUNfQ09NTUVOVBIKT19PUkRFUktFWRIJT19DVVNUS0VZEg1PX09SREVSU1RBVFVTEgxPX1RPVEFMUFJJQ0USC09fT1JERVJEQVRFEg9PX09SREVSUFJJT1JJVFkSB09fQ0xFUksSDk9fU0hJUFBSSU9SSVRZEglPX0NPTU1FTlQaCBIGCgQIARAEGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQARo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAIaChIICgQIARAEEAMaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAEGgoSCAoECAEQCRAFGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQBho4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAcaChIICgQIARAEEAgaChIICgQIARAEEAkaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAKGgoSCAoECAEQCRALGgoSCAoECAEQChAMGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQDRo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEA4aOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAPGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQEApbCih0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxQcm9qZWN0Ei8IBBIJY19jdXN0a2V5EgpPX09SREVSS0VZGggSBgoECAEQBBoKEggKBAgBEAQQCAphCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxBZ2dyZWdhdGUSMwgFEiUaBwoFQ09VTlQqCAoGRVhQUiQxMgEBOP///////////wFKAgoAIgMKAQAqAwoBAApBCih0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxQcm9qZWN0EhUIBhIHY19jb3VudBoIEgYKAhAFEAEKYAoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsQWdncmVnYXRlEjIIBxIkGgcKBUNPVU5UKgoKCGN1c3RkaXN0OP///////////wFKAgoAIgMKAQAqAwoBAAo5CiV0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxTb3J0EhAICCIGCAEQAxgCIgQQAxgC" queryPlanBinaryHexDump: - "00000000 0A 49 0A 2A 74 79 70 65 2E 67 6F 6F 67 6C 65 61 .I.*type.googlea" @@ -2329,6 +2318,17 @@ - "000008C0 2E 63 6F 6D 2F 70 6C 61 6E 2E 50 4C 6F 67 69 63 .com/plan.PLogic" - "000008D0 61 6C 53 6F 72 74 12 10 08 08 22 06 08 01 10 03 alSort....\"....." - "000008E0 18 02 22 04 10 03 18 02 ..\"....." + queryPlanText: + - "LogicalSort(sort0=[$1], sort1=[$0], dir0=[DESC], dir1=[DESC])" + - " LogicalAggregate(group=[{0}], custdist=[COUNT()])" + - " LogicalProject(c_count=[$1])" + - " LogicalAggregate(group=[{0}], EXPR$1=[COUNT($1)])" + - " LogicalProject(c_custkey=[$0], O_ORDERKEY=[$8])" + - " LogicalProject(C_CUSTKEY=[$0], C_NAME=[$1], C_ADDRESS=[$2], C_NATIONKEY=[$3], C_PHONE=[$4], C_ACCTBAL=[$5], C_MKTSEGMENT=[$6], C_COMMENT=[$7], O_ORDERKEY=[$8], O_CUSTKEY=[$9], O_ORDERSTATUS=[$10], O_TOTALPRICE=[$11], O_ORDERDATE=[$12], O_ORDERPRIORITY=[$13], O_CLERK=[$14], O_SHIPPRIORITY=[$15], O_COMMENT=[$16])" + - " LogicalJoin(condition=[AND(=($0, $9), $17)], joinType=[left])" + - " LogicalTableScan(table=[[cp, tpch/customer.parquet]])" + - " LogicalProject(O_ORDERKEY=[$0], O_CUSTKEY=[$1], O_ORDERSTATUS=[$2], O_TOTALPRICE=[$3], O_ORDERDATE=[$4], O_ORDERPRIORITY=[$5], O_CLERK=[$6], O_SHIPPRIORITY=[$7], O_COMMENT=[$8], $f9=[NOT(LIKE($8, '%special%requests%':VARCHAR(18)))])" + - " LogicalTableScan(table=[[cp, tpch/orders.parquet]])" - description: "14.sql" input: @@ -2347,14 +2347,6 @@ - " and l.l_shipdate >= date '1994-08-01'" - " and l.l_shipdate < date '1994-08-01' + interval '1' month " output: - queryPlanText: - - "LogicalProject(promo_revenue=[/(*(100.00:DECIMAL(5, 2), $0), $1)])" - - " LogicalAggregate(group=[{}], agg#0=[SUM($0)], agg#1=[SUM($1)])" - - " LogicalProject($f0=[CASE(LIKE($20, 'PROMO%':VARCHAR(6)), *($5, -(1, $6)), 0:DOUBLE)], $f1=[*($5, -(1, $6))])" - - " LogicalFilter(condition=[AND(=($1, $16), >=($10, 1994-08-01), <($10, +(1994-08-01, 1:INTERVAL MONTH)))])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/part.parquet]])" queryPlanBinary: "CkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9saW5laXRlbS5wYXJxdWV0CkUKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIXCgJjcAoRdHBjaC9wYXJ0LnBhcnF1ZXQKOQoldHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsSm9pbhIQEAEYASIKIggKAhABIAFQAQqBAgondHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsRmlsdGVyEtUBEAIa0AEqzQEKBAgBEAESBQoDQU5EGioqKAoECAEQARIICgZFUVVBTFMaChIICgQIARAEEAEaChIICgQIARAEEBAaOio4CgQIARABEhcKFUdSRUFURVJfVEhBTl9PUl9FUVVBTBoKEggKBAgBEAoQChoLIgkKAhAKMJJGUAoaVipUCgQIARABEgsKCUxFU1NfVEhBThoKEggKBAgBEAoQChozKjEKAhAKEg8KDURBVEVUSU1FX1BMVVMaCyIJCgIQCjCSRlAKGg0iCwoCEBESAxIBAVARCogDCih0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxQcm9qZWN0EtsCCAMSAyRmMBIDJGYxGvwBKvkBCgQIARAJEgYKBENBU0UaiQEqhgEKBAgBEAESBgoETElLRRo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEBQaPCI6Ci4QHSAGKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGgZQUk9NTyVQHBpOKkwKBAgBEAkSCgoITVVMVElQTFkaChIICgQIARAJEAUaLCoqCgQIARAJEgcKBU1JTlVTGg0iCwoCEAQSAxIBAVAGGgoSCAoECAEQCRAGGg0iCwoCEAkSAxIBAFAGGk4qTAoECAEQCRIKCghNVUxUSVBMWRoKEggKBAgBEAkQBRosKioKBAgBEAkSBwoFTUlOVVMaDSILCgIQBBIDEgEBUAYaChIICgQIARAJEAYKagoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsQWdncmVnYXRlEjwIBBIZGgUKA1NVTTIBADj///////////8BSgIKABIZGgUKA1NVTTIBATj///////////8BSgIKACIAKgAKkwEKKHR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFByb2plY3QSZwgFEg1wcm9tb19yZXZlbnVlGlQqUgoECAEQCRIICgZESVZJREUaNCoyCgQIARAJEgoKCE1VTFRJUExZGhQiEgoGEAYYAiAFEgYIAhICJxBQBhoIEgYKBAgBEAkaChIICgQIARAJEAE=" queryPlanBinaryHexDump: - "00000000 0A 49 0A 2A 74 79 70 65 2E 67 6F 6F 67 6C 65 61 .I.*type.googlea" @@ -2427,8 +2419,23 @@ - "00000430 4C 59 1A 14 22 12 0A 06 10 06 18 02 20 05 12 06 LY..\"....... ..." - "00000440 08 02 12 02 27 10 50 06 1A 08 12 06 0A 04 08 01 ....'.P........." - "00000450 10 09 1A 0A 12 08 0A 04 08 01 10 09 10 01 .............." + queryPlanText: + - "LogicalProject(promo_revenue=[/(*(100.00:DECIMAL(5, 2), $0), $1)])" + - " LogicalAggregate(group=[{}], agg#0=[SUM($0)], agg#1=[SUM($1)])" + - " LogicalProject($f0=[CASE(LIKE($20, 'PROMO%':VARCHAR(6)), *($5, -(1, $6)), 0:DOUBLE)], $f1=[*($5, -(1, $6))])" + - " LogicalFilter(condition=[AND(=($1, $16), >=($10, 1994-08-01), <($10, +(1994-08-01, 1:INTERVAL MONTH)))])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/part.parquet]])" - description: "15.sql" + exceptionMessage: + - "Encountered \"create\" at line 4, column 1." + - "Was expecting one of:" + - " " + - " \".\" ..." + - " \"[\" ..." + - " " input: - "-- tpch15 using 1395599672 as a seed to the RNG" - "use dfs_test -- views can only be created in dfs schema" @@ -2466,7 +2473,6 @@ - " s.s_suppkey " - "" - "drop view revenue0 " - exceptionMessage: "Failure parsing the query." - description: "16.sql" input: @@ -2485,44 +2491,23 @@ - " and p.p_type not like 'MEDIUM PLATED%'" - " and p.p_size in (38, 2, 8, 31, 44, 5, 14, 24)" - " and ps.ps_suppkey not in (" - - " select" - - " s.s_suppkey" - - " from" - - " cp.\"tpch/supplier.parquet\" s" - - " where" - - " s.s_comment like '%Customer%Complaints%'" - - " )" - - "group by" - - " p.p_brand," - - " p.p_type," - - " p.p_size" - - "order by" - - " supplier_cnt desc," - - " p.p_brand," - - " p.p_type," - - " p.p_size " - output: - queryPlanText: - - "LogicalSort(sort0=[$3], sort1=[$0], sort2=[$1], sort3=[$2], dir0=[DESC], dir1=[ASC], dir2=[ASC], dir3=[ASC])" - - " LogicalAggregate(group=[{0, 1, 2}], supplier_cnt=[COUNT(DISTINCT $3)])" - - " LogicalProject(p_brand=[$8], p_type=[$9], p_size=[$10], PS_SUPPKEY=[$1])" - - " LogicalFilter(condition=[AND(=($5, $0), <>($8, 'Brand#21'), NOT(LIKE($9, 'MEDIUM PLATED%':VARCHAR(14))), OR(=($10, 38), =($10, 2), =($10, 8), =($10, 31), =($10, 44), =($10, 5), =($10, 14), =($10, 24)), NOT(CASE(=($14, 0), false, IS NOT NULL($18), true, IS NULL($16), null:BOOLEAN, <($15, $14), null:BOOLEAN, false)))])" - - " LogicalJoin(condition=[=($16, $17)], joinType=[left])" - - " LogicalProject(PS_PARTKEY=[$0], PS_SUPPKEY=[$1], PS_AVAILQTY=[$2], PS_SUPPLYCOST=[$3], PS_COMMENT=[$4], P_PARTKEY=[$5], P_NAME=[$6], P_MFGR=[$7], P_BRAND=[$8], P_TYPE=[$9], P_SIZE=[$10], P_CONTAINER=[$11], P_RETAILPRICE=[$12], P_COMMENT=[$13], $f0=[$14], $f1=[$15], PS_SUPPKEY0=[$1])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalTableScan(table=[[cp, tpch/partsupp.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/part.parquet]])" - - " LogicalAggregate(group=[{}], agg#0=[COUNT()], agg#1=[COUNT($0)])" - - " LogicalProject(s_suppkey=[$0], $f1=[1:BIGINT])" - - " LogicalProject(s_suppkey=[$0])" - - " LogicalFilter(condition=[LIKE($6, '%Customer%Complaints%':VARCHAR(21))])" - - " LogicalTableScan(table=[[cp, tpch/supplier.parquet]])" - - " LogicalAggregate(group=[{0}], agg#0=[MIN($1)])" - - " LogicalProject(s_suppkey=[$0], $f1=[1:BIGINT])" - - " LogicalProject(s_suppkey=[$0])" - - " LogicalFilter(condition=[LIKE($6, '%Customer%Complaints%':VARCHAR(21))])" - - " LogicalTableScan(table=[[cp, tpch/supplier.parquet]])" + - " select" + - " s.s_suppkey" + - " from" + - " cp.\"tpch/supplier.parquet\" s" + - " where" + - " s.s_comment like '%Customer%Complaints%'" + - " )" + - "group by" + - " p.p_brand," + - " p.p_type," + - " p.p_size" + - "order by" + - " supplier_cnt desc," + - " p.p_brand," + - " p.p_type," + - " p.p_size " + output: queryPlanBinary: "CkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9wYXJ0c3VwcC5wYXJxdWV0CkUKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIXCgJjcAoRdHBjaC9wYXJ0LnBhcnF1ZXQKOQoldHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsSm9pbhIQEAEYASIKIggKAhABIAFQAQpJCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxUYWJsZVNjYW4SGwoCY3AKFXRwY2gvc3VwcGxpZXIucGFycXVldArJAQondHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsRmlsdGVyEp0BEAMamAEqlQEKBAgBEAESBgoETElLRRo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAYaSyJJCi4QHSAVKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGhUlQ3VzdG9tZXIlQ29tcGxhaW50cyVQHApDCih0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxQcm9qZWN0EhcIBBIJc19zdXBwa2V5GggSBgoECAEQBApXCih0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxQcm9qZWN0EisIBRIJc19zdXBwa2V5EgMkZjEaCBIGCgQIARAEGg0iCwoCEAUSAxIBAVAGCmsKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEFnZ3JlZ2F0ZRI9CAYSGBoHCgVDT1VOVDj///////////8BSgIKABIbGgcKBUNPVU5UMgEAOP///////////wFKAgoAIgAqAAo7CiV0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxKb2luEhIIAhAHGAEiCiIICgIQASABUAEK6QUKKHR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFByb2plY3QSvAUICBIKUFNfUEFSVEtFWRIKUFNfU1VQUEtFWRILUFNfQVZBSUxRVFkSDVBTX1NVUFBMWUNPU1QSClBTX0NPTU1FTlQSCVBfUEFSVEtFWRIGUF9OQU1FEgZQX01GR1ISB1BfQlJBTkQSBlBfVFlQRRIGUF9TSVpFEgtQX0NPTlRBSU5FUhINUF9SRVRBSUxQUklDRRIJUF9DT01NRU5UEgMkZjASAyRmMRILUFNfU1VQUEtFWTAaCBIGCgQIARAEGgoSCAoECAEQBBABGgoSCAoECAEQBBACGgoSCAoECAEQCRADGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQBBoKEggKBAgBEAQQBRo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAYaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAHGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQCBo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAkaChIICgQIARAEEAoaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRALGgoSCAoECAEQCRAMGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQDRoIEgYKAhAFEA4aCBIGCgIQBRAPGgoSCAoECAEQBBABCkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9zdXBwbGllci5wYXJxdWV0CskBCid0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxGaWx0ZXISnQEQChqYASqVAQoECAEQARIGCgRMSUtFGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQBhpLIkkKLhAdIBUqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaFSVDdXN0b21lciVDb21wbGFpbnRzJVAcCkMKKHR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFByb2plY3QSFwgLEglzX3N1cHBrZXkaCBIGCgQIARAEClcKKHR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFByb2plY3QSKwgMEglzX3N1cHBrZXkSAyRmMRoIEgYKBAgBEAQaDSILCgIQBRIDEgEBUAYKVQoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsQWdncmVnYXRlEicIDRIZGgUKA01JTjIBATj///////////8BSgIKACIDCgEAKgMKAQAKWwoldHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsSm9pbhIyCAkQDhgCIioqKAoECAEQARIICgZFUVVBTFMaChIICgQIARAEEBAaChIICgQIARAEEBEKqwgKJ3R5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEZpbHRlchL/BxAPGvoHKvcHCgQIARABEgUKA0FORBooKiYKBAgBEAESCAoGRVFVQUxTGgoSCAoECAEQBBAFGggSBgoECAEQBBqTASqQAQoECAEQARIMCgpOT1RfRVFVQUxTGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQCBpAIj4KMBAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoIQnJhbmQjMjFQHBqkASqhAQoECAEQARIFCgNOT1QakQEqjgEKBAgBEAESBgoETElLRRo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAkaRCJCCi4QHSAOKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGg5NRURJVU0gUExBVEVEJVAcGocDKoQDCgQIARABEgQKAk9SGi0qKwoECAEQARIICgZFUVVBTFMaChIICgQIARAEEAoaDSILCgIQBBIDEgEmUAYaLSorCgQIARABEggKBkVRVUFMUxoKEggKBAgBEAQQChoNIgsKAhAEEgMSAQJQBhotKisKBAgBEAESCAoGRVFVQUxTGgoSCAoECAEQBBAKGg0iCwoCEAQSAxIBCFAGGi0qKwoECAEQARIICgZFUVVBTFMaChIICgQIARAEEAoaDSILCgIQBBIDEgEfUAYaLSorCgQIARABEggKBkVRVUFMUxoKEggKBAgBEAQQChoNIgsKAhAEEgMSASxQBhotKisKBAgBEAESCAoGRVFVQUxTGgoSCAoECAEQBBAKGg0iCwoCEAQSAxIBBVAGGi0qKwoECAEQARIICgZFUVVBTFMaChIICgQIARAEEAoaDSILCgIQBBIDEgEOUAYaLSorCgQIARABEggKBkVRVUFMUxoKEggKBAgBEAQQChoNIgsKAhAEEgMSARhQBhr2ASrzAQoECAEQARIFCgNOT1Qa4wEq4AEKBAgBEAESBgoEQ0FTRRopKicKAhABEggKBkVRVUFMUxoIEgYKAhAFEA4aDSILCgIQBRIDEgEAUAYaCiIICgIQASAAUAEaISofCgIQARINCgtJU19OT1RfTlVMTBoKEggKBAgBEAUQEhoKIggKAhABIAFQARodKhsKAhABEgkKB0lTX05VTEwaChIICgQIARAEEBAaCiIICgQIARABUCAaJyolCgIQARILCglMRVNTX1RIQU4aCBIGCgIQBRAPGggSBgoCEAUQDhoKIggKBAgBEAFQIBoKIggKAhABIABQAQrgAQoodHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsUHJvamVjdBKzAQgQEgdwX2JyYW5kEgZwX3R5cGUSBnBfc2l6ZRIKUFNfU1VQUEtFWRo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAgaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAJGgoSCAoECAEQBBAKGgoSCAoECAEQBBABCm0KKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEFnZ3JlZ2F0ZRI/CBESLQgBGgcKBUNPVU5UKg4KDHN1cHBsaWVyX2NudDIBAzj///////////8BSgIKACIFCgMAAQIqBQoDAAECCkkKJXR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFNvcnQSIAgSIgYIAxADGAIiBBABGAMiBggBEAEYAyIGCAIQARgD" queryPlanBinaryHexDump: - "00000000 0A 49 0A 2A 74 79 70 65 2E 67 6F 6F 67 6C 65 61 .I.*type.googlea" @@ -2754,6 +2739,27 @@ - "00000E20 6F 67 69 63 61 6C 53 6F 72 74 12 20 08 12 22 06 ogicalSort. ..\"." - "00000E30 08 03 10 03 18 02 22 04 10 01 18 03 22 06 08 01 ......\".....\"..." - "00000E40 10 01 18 03 22 06 08 02 10 01 18 03 ....\"......." + queryPlanText: + - "LogicalSort(sort0=[$3], sort1=[$0], sort2=[$1], sort3=[$2], dir0=[DESC], dir1=[ASC], dir2=[ASC], dir3=[ASC])" + - " LogicalAggregate(group=[{0, 1, 2}], supplier_cnt=[COUNT(DISTINCT $3)])" + - " LogicalProject(p_brand=[$8], p_type=[$9], p_size=[$10], PS_SUPPKEY=[$1])" + - " LogicalFilter(condition=[AND(=($5, $0), <>($8, 'Brand#21'), NOT(LIKE($9, 'MEDIUM PLATED%':VARCHAR(14))), OR(=($10, 38), =($10, 2), =($10, 8), =($10, 31), =($10, 44), =($10, 5), =($10, 14), =($10, 24)), NOT(CASE(=($14, 0), false, IS NOT NULL($18), true, IS NULL($16), null:BOOLEAN, <($15, $14), null:BOOLEAN, false)))])" + - " LogicalJoin(condition=[=($16, $17)], joinType=[left])" + - " LogicalProject(PS_PARTKEY=[$0], PS_SUPPKEY=[$1], PS_AVAILQTY=[$2], PS_SUPPLYCOST=[$3], PS_COMMENT=[$4], P_PARTKEY=[$5], P_NAME=[$6], P_MFGR=[$7], P_BRAND=[$8], P_TYPE=[$9], P_SIZE=[$10], P_CONTAINER=[$11], P_RETAILPRICE=[$12], P_COMMENT=[$13], $f0=[$14], $f1=[$15], PS_SUPPKEY0=[$1])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalTableScan(table=[[cp, tpch/partsupp.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/part.parquet]])" + - " LogicalAggregate(group=[{}], agg#0=[COUNT()], agg#1=[COUNT($0)])" + - " LogicalProject(s_suppkey=[$0], $f1=[1:BIGINT])" + - " LogicalProject(s_suppkey=[$0])" + - " LogicalFilter(condition=[LIKE($6, '%Customer%Complaints%':VARCHAR(21))])" + - " LogicalTableScan(table=[[cp, tpch/supplier.parquet]])" + - " LogicalAggregate(group=[{0}], agg#0=[MIN($1)])" + - " LogicalProject(s_suppkey=[$0], $f1=[1:BIGINT])" + - " LogicalProject(s_suppkey=[$0])" + - " LogicalFilter(condition=[LIKE($6, '%Customer%Complaints%':VARCHAR(21))])" + - " LogicalTableScan(table=[[cp, tpch/supplier.parquet]])" - description: "17.sql" input: @@ -2776,20 +2782,6 @@ - " l2.l_partkey = p.p_partkey" - " ) " output: - queryPlanText: - - "LogicalProject(avg_yearly=[/($0, 7.0:DECIMAL(2, 1))])" - - " LogicalAggregate(group=[{}], agg#0=[SUM($0)])" - - " LogicalProject(L_EXTENDEDPRICE=[$5])" - - " LogicalFilter(condition=[AND(=($16, $1), =($19, 'Brand#13'), =($22, 'JUMBO CAN'), <($4, $25))])" - - " LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{16}])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/part.parquet]])" - - " LogicalProject(EXPR$0=[*(0.2:DECIMAL(2, 1), $0)])" - - " LogicalAggregate(group=[{}], agg#0=[AVG($0)])" - - " LogicalProject(L_QUANTITY=[$4])" - - " LogicalFilter(condition=[=($1, $cor0.P_PARTKEY)])" - - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" queryPlanBinary: "CkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9saW5laXRlbS5wYXJxdWV0CkUKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIXCgJjcAoRdHBjaC9wYXJ0LnBhcnF1ZXQKOQoldHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsSm9pbhIQEAEYASIKIggKAhABIAFQAQpJCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxUYWJsZVNjYW4SGwoCY3AKFXRwY2gvbGluZWl0ZW0ucGFycXVldAqYCQondHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsRmlsdGVyEuwIEAMa5wgq5AgKBAgBEAESCAoGRVFVQUxTGgoSCAoECAEQBBABGsUIQsIICqoIOqcICp0IEChIAlISCgpMX09SREVSS0VZGgQIARAEUhMKCUxfUEFSVEtFWRABGgQIARAEUhMKCUxfU1VQUEtFWRACGgQIARAEUhYKDExfTElORU5VTUJFUhADGgQIARAEUhQKCkxfUVVBTlRJVFkQBBoECAEQBFIZCg9MX0VYVEVOREVEUFJJQ0UQBRoECAEQCVIUCgpMX0RJU0NPVU5UEAYaBAgBEAlSDwoFTF9UQVgQBxoECAEQCVJECgxMX1JFVFVSTkZMQUcQCBoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSRAoMTF9MSU5FU1RBVFVTEAkaMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUhQKCkxfU0hJUERBVEUQChoECAEQClIWCgxMX0NPTU1JVERBVEUQCxoECAEQClIXCg1MX1JFQ0VJUFREQVRFEAwaBAgBEApSRgoOTF9TSElQSU5TVFJVQ1QQDRoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSQgoKTF9TSElQTU9ERRAOGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVJBCglMX0NPTU1FTlQQDxoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSEwoJUF9QQVJUS0VZEBAaBAgBEARSPgoGUF9OQU1FEBEaMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUj4KBlBfTUZHUhASGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVI/CgdQX0JSQU5EEBMaMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUj4KBlBfVFlQRRAUGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVIQCgZQX1NJWkUQFRoECAEQBFJDCgtQX0NPTlRBSU5FUhAWGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVIXCg1QX1JFVEFJTFBSSUNFEBcaBAgBEAlSQQoJUF9DT01NRU5UEBgaMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xIgUkY29yMBITCglQX1BBUlRLRVkQEBoECAEQBApGCih0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxQcm9qZWN0EhoIBBIKTF9RVUFOVElUWRoKEggKBAgBEAQQBApPCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxBZ2dyZWdhdGUSIQgFEhkaBQoDQVZHMgEAOP///////////wFKAgoAIgAqAAprCih0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxQcm9qZWN0Ej8IBhIGRVhQUiQwGjMqMQoECAEQCRIKCghNVUxUSVBMWRoTIhEKBhAGGAEgAhIFCAESAQJQBhoIEgYKBAgBEAkKQAoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsQ29ycmVsYXRlEhIIAhAHGAEiBRoDCgEQKgMKARAKwQMKJ3R5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEZpbHRlchKVAxAIGpADKo0DCgQIARABEgUKA0FORBoqKigKBAgBEAESCAoGRVFVQUxTGgoSCAoECAEQBBAQGgoSCAoECAEQBBABGo8BKowBCgQIARABEggKBkVRVUFMUxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEBMaQCI+CjAQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaCEJyYW5kIzEzUBwakAEqjQEKBAgBEAESCAoGRVFVQUxTGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQFhpBIj8KMBAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoJSlVNQk8gQ0FOUBwaLSorCgQIARABEgsKCUxFU1NfVEhBThoKEggKBAgBEAQQBBoKEggKBAgBEAkQGQpLCih0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxQcm9qZWN0Eh8ICRIPTF9FWFRFTkRFRFBSSUNFGgoSCAoECAEQCRAFCk8KKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEFnZ3JlZ2F0ZRIhCAoSGRoFCgNTVU0yAQA4////////////AUoCCgAiACoACm0KKHR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFByb2plY3QSQQgLEgphdmdfeWVhcmx5GjEqLwoECAEQCRIICgZESVZJREUaCBIGCgQIARAJGhMiEQoGEAYYASACEgUIARIBRlAG" queryPlanBinaryHexDump: - "00000000 0A 49 0A 2A 74 79 70 65 2E 67 6F 6F 67 6C 65 61 .I.*type.googlea" @@ -2949,6 +2941,20 @@ - "000009A0 01 10 09 12 08 0A 06 44 49 56 49 44 45 1A 08 12 .......DIVIDE..." - "000009B0 06 0A 04 08 01 10 09 1A 13 22 11 0A 06 10 06 18 .........\"......" - "000009C0 01 20 02 12 05 08 01 12 01 46 50 06 . .......FP." + queryPlanText: + - "LogicalProject(avg_yearly=[/($0, 7.0:DECIMAL(2, 1))])" + - " LogicalAggregate(group=[{}], agg#0=[SUM($0)])" + - " LogicalProject(L_EXTENDEDPRICE=[$5])" + - " LogicalFilter(condition=[AND(=($16, $1), =($19, 'Brand#13'), =($22, 'JUMBO CAN'), <($4, $25))])" + - " LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{16}])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/part.parquet]])" + - " LogicalProject(EXPR$0=[*(0.2:DECIMAL(2, 1), $0)])" + - " LogicalAggregate(group=[{}], agg#0=[AVG($0)])" + - " LogicalProject(L_QUANTITY=[$4])" + - " LogicalFilter(condition=[=($1, $cor0.P_PARTKEY)])" + - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" - description: "18.sql" input: @@ -2987,23 +2993,6 @@ - " o.o_orderdate" - "limit 100 " output: - queryPlanText: - - "LogicalSort(sort0=[$4], sort1=[$3], dir0=[DESC], dir1=[ASC], fetch=[100])" - - " LogicalAggregate(group=[{0, 1, 2, 3, 4}], EXPR$5=[SUM($5)])" - - " LogicalProject(c_name=[$1], c_custkey=[$0], o_orderkey=[$8], o_orderdate=[$12], o_totalprice=[$11], L_QUANTITY=[$21])" - - " LogicalFilter(condition=[AND(true, =($0, $9), =($8, $17))])" - - " LogicalJoin(condition=[=($8, $33)], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalTableScan(table=[[cp, tpch/customer.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/orders.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" - - " LogicalAggregate(group=[{0}])" - - " LogicalProject(l_orderkey=[$0])" - - " LogicalFilter(condition=[>($1, 300)])" - - " LogicalAggregate(group=[{0}], agg#0=[SUM($1)])" - - " LogicalProject(l_orderkey=[$0], L_QUANTITY=[$4])" - - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" queryPlanBinary: "CkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9jdXN0b21lci5wYXJxdWV0CkcKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIZCgJjcAoTdHBjaC9vcmRlcnMucGFycXVldAo5CiV0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxKb2luEhAQARgBIgoiCAoCEAEgAVABCkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9saW5laXRlbS5wYXJxdWV0CjsKJXR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEpvaW4SEggCEAMYASIKIggKAhABIAFQAQpJCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxUYWJsZVNjYW4SGwoCY3AKFXRwY2gvbGluZWl0ZW0ucGFycXVldApcCih0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxQcm9qZWN0EjAIBRIKbF9vcmRlcmtleRIKTF9RVUFOVElUWRoIEgYKBAgBEAQaChIICgQIARAEEAQKVQoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsQWdncmVnYXRlEicIBhIZGgUKA1NVTTIBATj///////////8BSgIKACIDCgEAKgMKAQAKYwondHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsRmlsdGVyEjgQBxo0KjIKBAgBEAESDgoMR1JFQVRFUl9USEFOGgoSCAoECAEQBRABGg4iDAoCEAQSBBICASxQBgpECih0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxQcm9qZWN0EhgICBIKbF9vcmRlcmtleRoIEgYKBAgBEAQKOgoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsQWdncmVnYXRlEgwICSIDCgEAKgMKAQAKWwoldHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsSm9pbhIyCAQQChgBIioqKAoECAEQARIICgZFUVVBTFMaChIICgQIARAEEAgaChIICgQIARAEECEKoAEKJ3R5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEZpbHRlchJ1EAsacSpvCgQIARABEgUKA0FORBoKIggKAhABIAFQARooKiYKBAgBEAESCAoGRVFVQUxTGggSBgoECAEQBBoKEggKBAgBEAQQCRoqKigKBAgBEAESCAoGRVFVQUxTGgoSCAoECAEQBBAIGgoSCAoECAEQBBARCukBCih0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxQcm9qZWN0ErwBCAwSBmNfbmFtZRIJY19jdXN0a2V5EgpvX29yZGVya2V5EgtvX29yZGVyZGF0ZRIMb190b3RhbHByaWNlEgpMX1FVQU5USVRZGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQARoIEgYKBAgBEAQaChIICgQIARAEEAgaChIICgQIARAKEAwaChIICgQIARAJEAsaChIICgQIARAEEBUKZwoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsQWdncmVnYXRlEjkIDRIjGgUKA1NVTSoICgZFWFBSJDUyAQU4////////////AUoCCgAiBwoFAAECAwQqBwoFAAECAwQKSgoldHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsU29ydBIhCA4aDSILCgIQBBIDEgFkUAYiBggEEAMYAiIGCAMQARgD" queryPlanBinaryHexDump: - "00000000 0A 49 0A 2A 74 79 70 65 2E 67 6F 6F 67 6C 65 61 .I.*type.googlea" @@ -3100,6 +3089,23 @@ - "000005B0 50 4C 6F 67 69 63 61 6C 53 6F 72 74 12 21 08 0E PLogicalSort.!.." - "000005C0 1A 0D 22 0B 0A 02 10 04 12 03 12 01 64 50 06 22 ..\".........dP.\"" - "000005D0 06 08 04 10 03 18 02 22 06 08 03 10 01 18 03 .......\"......." + queryPlanText: + - "LogicalSort(sort0=[$4], sort1=[$3], dir0=[DESC], dir1=[ASC], fetch=[100])" + - " LogicalAggregate(group=[{0, 1, 2, 3, 4}], EXPR$5=[SUM($5)])" + - " LogicalProject(c_name=[$1], c_custkey=[$0], o_orderkey=[$8], o_orderdate=[$12], o_totalprice=[$11], L_QUANTITY=[$21])" + - " LogicalFilter(condition=[AND(true, =($0, $9), =($8, $17))])" + - " LogicalJoin(condition=[=($8, $33)], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalTableScan(table=[[cp, tpch/customer.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/orders.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" + - " LogicalAggregate(group=[{0}])" + - " LogicalProject(l_orderkey=[$0])" + - " LogicalFilter(condition=[>($1, 300)])" + - " LogicalAggregate(group=[{0}], agg#0=[SUM($1)])" + - " LogicalProject(l_orderkey=[$0], L_QUANTITY=[$4])" + - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" - description: "19.sql" input: @@ -3140,13 +3146,6 @@ - " and l.l_shipinstruct = 'DELIVER IN PERSON'" - " ) " output: - queryPlanText: - - "LogicalAggregate(group=[{}], revenue=[SUM($0)])" - - " LogicalProject($f0=[*($5, -(1, $6))])" - - " LogicalFilter(condition=[OR(AND(=($16, $1), =($19, 'Brand#41'), OR(=($22, 'SM CASE':VARCHAR(7)), =($22, 'SM BOX':VARCHAR(6)), =($22, 'SM PACK':VARCHAR(7)), =($22, 'SM PKG':VARCHAR(6))), >=($4, 2), <=($4, +(2, 10)), >=($21, 1), <=($21, 5), OR(=($14, 'AIR':VARCHAR(3)), =($14, 'AIR REG':VARCHAR(7))), =($13, 'DELIVER IN PERSON')), AND(=($16, $1), =($19, 'Brand#13'), OR(=($22, 'MED BAG':VARCHAR(7)), =($22, 'MED BOX':VARCHAR(7)), =($22, 'MED PKG':VARCHAR(7)), =($22, 'MED PACK':VARCHAR(8))), >=($4, 14), <=($4, +(14, 10)), >=($21, 1), <=($21, 10), OR(=($14, 'AIR':VARCHAR(3)), =($14, 'AIR REG':VARCHAR(7))), =($13, 'DELIVER IN PERSON')), AND(=($16, $1), =($19, 'Brand#55'), OR(=($22, 'LG CASE':VARCHAR(7)), =($22, 'LG BOX':VARCHAR(6)), =($22, 'LG PACK':VARCHAR(7)), =($22, 'LG PKG':VARCHAR(6))), >=($4, 23), <=($4, +(23, 10)), >=($21, 1), <=($21, 15), OR(=($14, 'AIR':VARCHAR(3)), =($14, 'AIR REG':VARCHAR(7))), =($13, 'DELIVER IN PERSON')))])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/part.parquet]])" queryPlanBinary: "CkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9saW5laXRlbS5wYXJxdWV0CkUKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIXCgJjcAoRdHBjaC9wYXJ0LnBhcnF1ZXQKOQoldHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsSm9pbhIQEAEYASIKIggKAhABIAFQAQqiJAondHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsRmlsdGVyEvYjEAIa8SMq7iMKBAgBEAESBAoCT1Ia8gsq7wsKBAgBEAESBQoDQU5EGioqKAoECAEQARIICgZFUVVBTFMaChIICgQIARAEEBAaChIICgQIARAEEAEajwEqjAEKBAgBEAESCAoGRVFVQUxTGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQExpAIj4KMBAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoIQnJhbmQjNDFQHBrJBCrGBAoECAEQARIECgJPUhqMASqJAQoECAEQARIICgZFUVVBTFMaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAWGj0iOwouEB0gByocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoHU00gQ0FTRVAcGosBKogBCgQIARABEggKBkVRVUFMUxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEBYaPCI6Ci4QHSAGKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGgZTTSBCT1hQHBqMASqJAQoECAEQARIICgZFUVVBTFMaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAWGj0iOwouEB0gByocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoHU00gUEFDS1AcGosBKogBCgQIARABEggKBkVRVUFMUxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEBYaPCI6Ci4QHSAGKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGgZTTSBQS0dQHBo8KjoKBAgBEAESFwoVR1JFQVRFUl9USEFOX09SX0VRVUFMGgoSCAoECAEQBBAEGg0iCwoCEAQSAxIBAlAGGlgqVgoECAEQARIUChJMRVNTX1RIQU5fT1JfRVFVQUwaChIICgQIARAEEAQaLCoqCgIQBBIGCgRQTFVTGg0iCwoCEAQSAxIBAlAGGg0iCwoCEAQSAxIBClAGGjwqOgoECAEQARIXChVHUkVBVEVSX1RIQU5fT1JfRVFVQUwaChIICgQIARAEEBUaDSILCgIQBBIDEgEBUAYaOSo3CgQIARABEhQKEkxFU1NfVEhBTl9PUl9FUVVBTBoKEggKBAgBEAQQFRoNIgsKAhAEEgMSAQVQBhqpAiqmAgoECAEQARIECgJPUhqIASqFAQoECAEQARIICgZFUVVBTFMaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAOGjkiNwouEB0gAyocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoDQUlSUBwajAEqiQEKBAgBEAESCAoGRVFVQUxTGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQDho9IjsKLhAdIAcqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaB0FJUiBSRUdQHBqYASqVAQoECAEQARIICgZFUVVBTFMaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRANGkkiRwowEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGhFERUxJVkVSIElOIFBFUlNPTlAcGvULKvILCgQIARABEgUKA0FORBoqKigKBAgBEAESCAoGRVFVQUxTGgoSCAoECAEQBBAQGgoSCAoECAEQBBABGo8BKowBCgQIARABEggKBkVRVUFMUxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEBMaQCI+CjAQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaCEJyYW5kIzEzUBwazAQqyQQKBAgBEAESBAoCT1IajAEqiQEKBAgBEAESCAoGRVFVQUxTGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQFho9IjsKLhAdIAcqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaB01FRCBCQUdQHBqMASqJAQoECAEQARIICgZFUVVBTFMaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAWGj0iOwouEB0gByocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoHTUVEIEJPWFAcGowBKokBCgQIARABEggKBkVRVUFMUxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEBYaPSI7Ci4QHSAHKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGgdNRUQgUEtHUBwajQEqigEKBAgBEAESCAoGRVFVQUxTGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQFho+IjwKLhAdIAgqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaCE1FRCBQQUNLUBwaPCo6CgQIARABEhcKFUdSRUFURVJfVEhBTl9PUl9FUVVBTBoKEggKBAgBEAQQBBoNIgsKAhAEEgMSAQ5QBhpYKlYKBAgBEAESFAoSTEVTU19USEFOX09SX0VRVUFMGgoSCAoECAEQBBAEGiwqKgoCEAQSBgoEUExVUxoNIgsKAhAEEgMSAQ5QBhoNIgsKAhAEEgMSAQpQBho8KjoKBAgBEAESFwoVR1JFQVRFUl9USEFOX09SX0VRVUFMGgoSCAoECAEQBBAVGg0iCwoCEAQSAxIBAVAGGjkqNwoECAEQARIUChJMRVNTX1RIQU5fT1JfRVFVQUwaChIICgQIARAEEBUaDSILCgIQBBIDEgEKUAYaqQIqpgIKBAgBEAESBAoCT1IaiAEqhQEKBAgBEAESCAoGRVFVQUxTGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQDho5IjcKLhAdIAMqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaA0FJUlAcGowBKokBCgQIARABEggKBkVRVUFMUxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEA4aPSI7Ci4QHSAHKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGgdBSVIgUkVHUBwamAEqlQEKBAgBEAESCAoGRVFVQUxTGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQDRpJIkcKMBAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoRREVMSVZFUiBJTiBQRVJTT05QHBryCyrvCwoECAEQARIFCgNBTkQaKiooCgQIARABEggKBkVRVUFMUxoKEggKBAgBEAQQEBoKEggKBAgBEAQQARqPASqMAQoECAEQARIICgZFUVVBTFMaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRATGkAiPgowEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGghCcmFuZCM1NVAcGskEKsYECgQIARABEgQKAk9SGowBKokBCgQIARABEggKBkVRVUFMUxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEBYaPSI7Ci4QHSAHKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGgdMRyBDQVNFUBwaiwEqiAEKBAgBEAESCAoGRVFVQUxTGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQFho8IjoKLhAdIAYqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaBkxHIEJPWFAcGowBKokBCgQIARABEggKBkVRVUFMUxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEBYaPSI7Ci4QHSAHKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGgdMRyBQQUNLUBwaiwEqiAEKBAgBEAESCAoGRVFVQUxTGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQFho8IjoKLhAdIAYqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaBkxHIFBLR1AcGjwqOgoECAEQARIXChVHUkVBVEVSX1RIQU5fT1JfRVFVQUwaChIICgQIARAEEAQaDSILCgIQBBIDEgEXUAYaWCpWCgQIARABEhQKEkxFU1NfVEhBTl9PUl9FUVVBTBoKEggKBAgBEAQQBBosKioKAhAEEgYKBFBMVVMaDSILCgIQBBIDEgEXUAYaDSILCgIQBBIDEgEKUAYaPCo6CgQIARABEhcKFUdSRUFURVJfVEhBTl9PUl9FUVVBTBoKEggKBAgBEAQQFRoNIgsKAhAEEgMSAQFQBho5KjcKBAgBEAESFAoSTEVTU19USEFOX09SX0VRVUFMGgoSCAoECAEQBBAVGg0iCwoCEAQSAxIBD1AGGqkCKqYCCgQIARABEgQKAk9SGogBKoUBCgQIARABEggKBkVRVUFMUxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEA4aOSI3Ci4QHSADKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGgNBSVJQHBqMASqJAQoECAEQARIICgZFUVVBTFMaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAOGj0iOwouEB0gByocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoHQUlSIFJFR1AcGpgBKpUBCgQIARABEggKBkVRVUFMUxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEA0aSSJHCjAQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaEURFTElWRVIgSU4gUEVSU09OUBwKgwEKKHR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFByb2plY3QSVwgDEgMkZjAaTipMCgQIARAJEgoKCE1VTFRJUExZGgoSCAoECAEQCRAFGiwqKgoECAEQCRIHCgVNSU5VUxoNIgsKAhAEEgMSAQFQBhoKEggKBAgBEAkQBgpaCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxBZ2dyZWdhdGUSLAgEEiQaBQoDU1VNKgkKB3JldmVudWUyAQA4////////////AUoCCgAiACoA" queryPlanBinaryHexDump: - "00000000 0A 49 0A 2A 74 79 70 65 2E 67 6F 6F 67 6C 65 61 .I.*type.googlea" @@ -3467,6 +3466,13 @@ - "000013B0 53 55 4D 2A 09 0A 07 72 65 76 65 6E 75 65 32 01 SUM*...revenue2." - "000013C0 00 38 FF FF FF FF FF FF FF FF FF 01 4A 02 0A 00 .8..........J..." - "000013D0 22 00 2A 00 \".*." + queryPlanText: + - "LogicalAggregate(group=[{}], revenue=[SUM($0)])" + - " LogicalProject($f0=[*($5, -(1, $6))])" + - " LogicalFilter(condition=[OR(AND(=($16, $1), =($19, 'Brand#41'), OR(=($22, 'SM CASE':VARCHAR(7)), =($22, 'SM BOX':VARCHAR(6)), =($22, 'SM PACK':VARCHAR(7)), =($22, 'SM PKG':VARCHAR(6))), >=($4, 2), <=($4, +(2, 10)), >=($21, 1), <=($21, 5), OR(=($14, 'AIR':VARCHAR(3)), =($14, 'AIR REG':VARCHAR(7))), =($13, 'DELIVER IN PERSON')), AND(=($16, $1), =($19, 'Brand#13'), OR(=($22, 'MED BAG':VARCHAR(7)), =($22, 'MED BOX':VARCHAR(7)), =($22, 'MED PKG':VARCHAR(7)), =($22, 'MED PACK':VARCHAR(8))), >=($4, 14), <=($4, +(14, 10)), >=($21, 1), <=($21, 10), OR(=($14, 'AIR':VARCHAR(3)), =($14, 'AIR REG':VARCHAR(7))), =($13, 'DELIVER IN PERSON')), AND(=($16, $1), =($19, 'Brand#55'), OR(=($22, 'LG CASE':VARCHAR(7)), =($22, 'LG BOX':VARCHAR(6)), =($22, 'LG PACK':VARCHAR(7)), =($22, 'LG PKG':VARCHAR(6))), >=($4, 23), <=($4, +(23, 10)), >=($21, 1), <=($21, 15), OR(=($14, 'AIR':VARCHAR(3)), =($14, 'AIR REG':VARCHAR(7))), =($13, 'DELIVER IN PERSON')))])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/part.parquet]])" - description: "19_1.sql" input: @@ -3506,13 +3512,6 @@ - " and l.l_shipinstruct = 'DELIVER IN PERSON'" - " ) ) " output: - queryPlanText: - - "LogicalAggregate(group=[{}], revenue=[SUM($0)])" - - " LogicalProject($f0=[*($5, -(1, $6))])" - - " LogicalFilter(condition=[AND(=($16, $1), OR(AND(=($19, 'Brand#41'), OR(=($22, 'SM CASE':VARCHAR(7)), =($22, 'SM BOX':VARCHAR(6)), =($22, 'SM PACK':VARCHAR(7)), =($22, 'SM PKG':VARCHAR(6))), >=($4, 2), <=($4, +(2, 10)), >=($21, 1), <=($21, 5), OR(=($14, 'AIR':VARCHAR(3)), =($14, 'AIR REG':VARCHAR(7))), =($13, 'DELIVER IN PERSON')), AND(=($19, 'Brand#13'), OR(=($22, 'MED BAG':VARCHAR(7)), =($22, 'MED BOX':VARCHAR(7)), =($22, 'MED PKG':VARCHAR(7)), =($22, 'MED PACK':VARCHAR(8))), >=($4, 14), <=($4, +(14, 10)), >=($21, 1), <=($21, 10), OR(=($14, 'AIR':VARCHAR(3)), =($14, 'AIR REG':VARCHAR(7))), =($13, 'DELIVER IN PERSON')), AND(=($19, 'Brand#55'), OR(=($22, 'LG CASE':VARCHAR(7)), =($22, 'LG BOX':VARCHAR(6)), =($22, 'LG PACK':VARCHAR(7)), =($22, 'LG PKG':VARCHAR(6))), >=($4, 23), <=($4, +(23, 10)), >=($21, 1), <=($21, 15), OR(=($14, 'AIR':VARCHAR(3)), =($14, 'AIR REG':VARCHAR(7))), =($13, 'DELIVER IN PERSON'))))])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/part.parquet]])" queryPlanBinary: "CkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9saW5laXRlbS5wYXJxdWV0CkUKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIXCgJjcAoRdHBjaC9wYXJ0LnBhcnF1ZXQKOQoldHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsSm9pbhIQEAEYASIKIggKAhABIAFQAQrdIwondHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsRmlsdGVyErEjEAIarCMqqSMKBAgBEAESBQoDQU5EGioqKAoECAEQARIICgZFUVVBTFMaChIICgQIARAEEBAaChIICgQIARAEEAEa7SIq6iIKBAgBEAESBAoCT1IaxgsqwwsKBAgBEAESBQoDQU5EGo8BKowBCgQIARABEggKBkVRVUFMUxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEBMaQCI+CjAQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaCEJyYW5kIzQxUBwayQQqxgQKBAgBEAESBAoCT1IajAEqiQEKBAgBEAESCAoGRVFVQUxTGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQFho9IjsKLhAdIAcqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaB1NNIENBU0VQHBqLASqIAQoECAEQARIICgZFUVVBTFMaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAWGjwiOgouEB0gBiocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoGU00gQk9YUBwajAEqiQEKBAgBEAESCAoGRVFVQUxTGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQFho9IjsKLhAdIAcqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaB1NNIFBBQ0tQHBqLASqIAQoECAEQARIICgZFUVVBTFMaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAWGjwiOgouEB0gBiocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoGU00gUEtHUBwaPCo6CgQIARABEhcKFUdSRUFURVJfVEhBTl9PUl9FUVVBTBoKEggKBAgBEAQQBBoNIgsKAhAEEgMSAQJQBhpYKlYKBAgBEAESFAoSTEVTU19USEFOX09SX0VRVUFMGgoSCAoECAEQBBAEGiwqKgoCEAQSBgoEUExVUxoNIgsKAhAEEgMSAQJQBhoNIgsKAhAEEgMSAQpQBho8KjoKBAgBEAESFwoVR1JFQVRFUl9USEFOX09SX0VRVUFMGgoSCAoECAEQBBAVGg0iCwoCEAQSAxIBAVAGGjkqNwoECAEQARIUChJMRVNTX1RIQU5fT1JfRVFVQUwaChIICgQIARAEEBUaDSILCgIQBBIDEgEFUAYaqQIqpgIKBAgBEAESBAoCT1IaiAEqhQEKBAgBEAESCAoGRVFVQUxTGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQDho5IjcKLhAdIAMqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaA0FJUlAcGowBKokBCgQIARABEggKBkVRVUFMUxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEA4aPSI7Ci4QHSAHKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGgdBSVIgUkVHUBwamAEqlQEKBAgBEAESCAoGRVFVQUxTGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQDRpJIkcKMBAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoRREVMSVZFUiBJTiBQRVJTT05QHBrJCyrGCwoECAEQARIFCgNBTkQajwEqjAEKBAgBEAESCAoGRVFVQUxTGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQExpAIj4KMBAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoIQnJhbmQjMTNQHBrMBCrJBAoECAEQARIECgJPUhqMASqJAQoECAEQARIICgZFUVVBTFMaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAWGj0iOwouEB0gByocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoHTUVEIEJBR1AcGowBKokBCgQIARABEggKBkVRVUFMUxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEBYaPSI7Ci4QHSAHKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGgdNRUQgQk9YUBwajAEqiQEKBAgBEAESCAoGRVFVQUxTGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQFho9IjsKLhAdIAcqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaB01FRCBQS0dQHBqNASqKAQoECAEQARIICgZFUVVBTFMaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAWGj4iPAouEB0gCCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoITUVEIFBBQ0tQHBo8KjoKBAgBEAESFwoVR1JFQVRFUl9USEFOX09SX0VRVUFMGgoSCAoECAEQBBAEGg0iCwoCEAQSAxIBDlAGGlgqVgoECAEQARIUChJMRVNTX1RIQU5fT1JfRVFVQUwaChIICgQIARAEEAQaLCoqCgIQBBIGCgRQTFVTGg0iCwoCEAQSAxIBDlAGGg0iCwoCEAQSAxIBClAGGjwqOgoECAEQARIXChVHUkVBVEVSX1RIQU5fT1JfRVFVQUwaChIICgQIARAEEBUaDSILCgIQBBIDEgEBUAYaOSo3CgQIARABEhQKEkxFU1NfVEhBTl9PUl9FUVVBTBoKEggKBAgBEAQQFRoNIgsKAhAEEgMSAQpQBhqpAiqmAgoECAEQARIECgJPUhqIASqFAQoECAEQARIICgZFUVVBTFMaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAOGjkiNwouEB0gAyocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoDQUlSUBwajAEqiQEKBAgBEAESCAoGRVFVQUxTGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQDho9IjsKLhAdIAcqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaB0FJUiBSRUdQHBqYASqVAQoECAEQARIICgZFUVVBTFMaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRANGkkiRwowEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGhFERUxJVkVSIElOIFBFUlNPTlAcGsYLKsMLCgQIARABEgUKA0FORBqPASqMAQoECAEQARIICgZFUVVBTFMaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRATGkAiPgowEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGghCcmFuZCM1NVAcGskEKsYECgQIARABEgQKAk9SGowBKokBCgQIARABEggKBkVRVUFMUxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEBYaPSI7Ci4QHSAHKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGgdMRyBDQVNFUBwaiwEqiAEKBAgBEAESCAoGRVFVQUxTGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQFho8IjoKLhAdIAYqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaBkxHIEJPWFAcGowBKokBCgQIARABEggKBkVRVUFMUxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEBYaPSI7Ci4QHSAHKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGgdMRyBQQUNLUBwaiwEqiAEKBAgBEAESCAoGRVFVQUxTGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQFho8IjoKLhAdIAYqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaBkxHIFBLR1AcGjwqOgoECAEQARIXChVHUkVBVEVSX1RIQU5fT1JfRVFVQUwaChIICgQIARAEEAQaDSILCgIQBBIDEgEXUAYaWCpWCgQIARABEhQKEkxFU1NfVEhBTl9PUl9FUVVBTBoKEggKBAgBEAQQBBosKioKAhAEEgYKBFBMVVMaDSILCgIQBBIDEgEXUAYaDSILCgIQBBIDEgEKUAYaPCo6CgQIARABEhcKFUdSRUFURVJfVEhBTl9PUl9FUVVBTBoKEggKBAgBEAQQFRoNIgsKAhAEEgMSAQFQBho5KjcKBAgBEAESFAoSTEVTU19USEFOX09SX0VRVUFMGgoSCAoECAEQBBAVGg0iCwoCEAQSAxIBD1AGGqkCKqYCCgQIARABEgQKAk9SGogBKoUBCgQIARABEggKBkVRVUFMUxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEA4aOSI3Ci4QHSADKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGgNBSVJQHBqMASqJAQoECAEQARIICgZFUVVBTFMaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAOGj0iOwouEB0gByocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoHQUlSIFJFR1AcGpgBKpUBCgQIARABEggKBkVRVUFMUxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEA0aSSJHCjAQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaEURFTElWRVIgSU4gUEVSU09OUBwKgwEKKHR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFByb2plY3QSVwgDEgMkZjAaTipMCgQIARAJEgoKCE1VTFRJUExZGgoSCAoECAEQCRAFGiwqKgoECAEQCRIHCgVNSU5VUxoNIgsKAhAEEgMSAQFQBhoKEggKBAgBEAkQBgpaCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxBZ2dyZWdhdGUSLAgEEiQaBQoDU1VNKgkKB3JldmVudWUyAQA4////////////AUoCCgAiACoA" queryPlanBinaryHexDump: - "00000000 0A 49 0A 2A 74 79 70 65 2E 67 6F 6F 67 6C 65 61 .I.*type.googlea" @@ -3828,6 +3827,13 @@ - "00001360 65 12 2C 08 04 12 24 1A 05 0A 03 53 55 4D 2A 09 e.,...$....SUM*." - "00001370 0A 07 72 65 76 65 6E 75 65 32 01 00 38 FF FF FF ..revenue2..8..." - "00001380 FF FF FF FF FF FF 01 4A 02 0A 00 22 00 2A 00 .......J...\".*." + queryPlanText: + - "LogicalAggregate(group=[{}], revenue=[SUM($0)])" + - " LogicalProject($f0=[*($5, -(1, $6))])" + - " LogicalFilter(condition=[AND(=($16, $1), OR(AND(=($19, 'Brand#41'), OR(=($22, 'SM CASE':VARCHAR(7)), =($22, 'SM BOX':VARCHAR(6)), =($22, 'SM PACK':VARCHAR(7)), =($22, 'SM PKG':VARCHAR(6))), >=($4, 2), <=($4, +(2, 10)), >=($21, 1), <=($21, 5), OR(=($14, 'AIR':VARCHAR(3)), =($14, 'AIR REG':VARCHAR(7))), =($13, 'DELIVER IN PERSON')), AND(=($19, 'Brand#13'), OR(=($22, 'MED BAG':VARCHAR(7)), =($22, 'MED BOX':VARCHAR(7)), =($22, 'MED PKG':VARCHAR(7)), =($22, 'MED PACK':VARCHAR(8))), >=($4, 14), <=($4, +(14, 10)), >=($21, 1), <=($21, 10), OR(=($14, 'AIR':VARCHAR(3)), =($14, 'AIR REG':VARCHAR(7))), =($13, 'DELIVER IN PERSON')), AND(=($19, 'Brand#55'), OR(=($22, 'LG CASE':VARCHAR(7)), =($22, 'LG BOX':VARCHAR(6)), =($22, 'LG PACK':VARCHAR(7)), =($22, 'LG PKG':VARCHAR(6))), >=($4, 23), <=($4, +(23, 10)), >=($21, 1), <=($21, 15), OR(=($14, 'AIR':VARCHAR(3)), =($14, 'AIR REG':VARCHAR(7))), =($13, 'DELIVER IN PERSON'))))])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/part.parquet]])" - description: "20.sql" input: @@ -3870,29 +3876,6 @@ - "order by" - " s.s_name " output: - queryPlanText: - - "LogicalSort(sort0=[$0], dir0=[ASC])" - - " LogicalProject(s_name=[$1], s_address=[$2])" - - " LogicalFilter(condition=[AND(true, =($3, $7), =($8, 'KENYA'))])" - - " LogicalJoin(condition=[=($0, $11)], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalTableScan(table=[[cp, tpch/supplier.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/nation.parquet]])" - - " LogicalAggregate(group=[{0}])" - - " LogicalProject(ps_suppkey=[$1])" - - " LogicalFilter(condition=[AND(true, >($2, $6))])" - - " LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{0, 1}])" - - " LogicalJoin(condition=[=($0, $5)], joinType=[inner])" - - " LogicalTableScan(table=[[cp, tpch/partsupp.parquet]])" - - " LogicalAggregate(group=[{0}])" - - " LogicalProject(p_partkey=[$0])" - - " LogicalFilter(condition=[LIKE($1, 'antique%':VARCHAR(8))])" - - " LogicalTableScan(table=[[cp, tpch/part.parquet]])" - - " LogicalProject(EXPR$0=[*(0.5:DECIMAL(2, 1), $0)])" - - " LogicalAggregate(group=[{}], agg#0=[SUM($0)])" - - " LogicalProject(L_QUANTITY=[$4])" - - " LogicalFilter(condition=[AND(=($1, $cor0.PS_PARTKEY), =($2, $cor0.PS_SUPPKEY), >=($10, 1993-01-01), <($10, +(1993-01-01, 12:INTERVAL YEAR)))])" - - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" queryPlanBinary: "CkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9zdXBwbGllci5wYXJxdWV0CkcKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIZCgJjcAoTdHBjaC9uYXRpb24ucGFycXVldAo5CiV0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxKb2luEhAQARgBIgoiCAoCEAEgAVABCkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9wYXJ0c3VwcC5wYXJxdWV0CkUKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIXCgJjcAoRdHBjaC9wYXJ0LnBhcnF1ZXQKvAEKJ3R5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEZpbHRlchKQARAEGosBKogBCgQIARABEgYKBExJS0UaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRABGj4iPAouEB0gCCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoIYW50aXF1ZSVQHApDCih0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxQcm9qZWN0EhcIBRIJcF9wYXJ0a2V5GggSBgoECAEQBAo6Cip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxBZ2dyZWdhdGUSDAgGIgMKAQAqAwoBAApZCiV0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxKb2luEjAIAxAHGAEiKComCgQIARABEggKBkVRVUFMUxoIEgYKBAgBEAQaChIICgQIARAEEAUKSQoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsVGFibGVTY2FuEhsKAmNwChV0cGNoL2xpbmVpdGVtLnBhcnF1ZXQKswUKJ3R5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEZpbHRlchKHBRAJGoIFKv8ECgQIARABEgUKA0FORBrrASroAQoECAEQARIICgZFUVVBTFMaChIICgQIARAEEAEayQFCxgEKrwE6rAEKogEQKEgCUhIKClBTX1BBUlRLRVkaBAgBEARSFAoKUFNfU1VQUEtFWRABGgQIARAEUhUKC1BTX0FWQUlMUVRZEAIaBAgBEARSFwoNUFNfU1VQUExZQ09TVBADGgQIARAJUkIKClBTX0NPTU1FTlQQBBoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEiBSRjb3IwEhIKClBTX1BBUlRLRVkaBAgBEAQa7QEq6gEKBAgBEAESCAoGRVFVQUxTGgoSCAoECAEQBBACGssBQsgBCq8BOqwBCqIBEChIAlISCgpQU19QQVJUS0VZGgQIARAEUhQKClBTX1NVUFBLRVkQARoECAEQBFIVCgtQU19BVkFJTFFUWRACGgQIARAEUhcKDVBTX1NVUFBMWUNPU1QQAxoECAEQCVJCCgpQU19DT01NRU5UEAQaMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xIgUkY29yMBIUCgpQU19TVVBQS0VZEAEaBAgBEAQaOio4CgQIARABEhcKFUdSRUFURVJfVEhBTl9PUl9FUVVBTBoKEggKBAgBEAoQChoLIgkKAhAKMNFBUAoaVipUCgQIARABEgsKCUxFU1NfVEhBThoKEggKBAgBEAoQChozKjEKAhAKEg8KDURBVEVUSU1FX1BMVVMaCyIJCgIQCjDRQVAKGg0iCwoCEA8SAxIBDFAPCkYKKHR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFByb2plY3QSGggKEgpMX1FVQU5USVRZGgoSCAoECAEQBBAECk8KKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEFnZ3JlZ2F0ZRIhCAsSGRoFCgNTVU0yAQA4////////////AUoCCgAiACoACm8KKHR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFByb2plY3QSQwgMEgZFWFBSJDAaNyo1CggIARAGGAEgFhIKCghNVUxUSVBMWRoTIhEKBhAGGAEgAhIFCAESAQVQBhoIEgYKBAgBEAUKQgoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsQ29ycmVsYXRlEhQICBANGAEiBhoECgIAASoECgIAAQqAAQondHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsRmlsdGVyElUQDhpRKk8KBAgBEAESBQoDQU5EGgoiCAoCEAEgAVABGjQqMgoECAEQARIOCgxHUkVBVEVSX1RIQU4aChIICgQIARAEEAIaDhIMCggIARAGGAEgFhAGCkYKKHR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFByb2plY3QSGggPEgpwc19zdXBwa2V5GgoSCAoECAEQBBABCjoKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEFnZ3JlZ2F0ZRIMCBAiAwoBACoDCgEAClkKJXR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEpvaW4SMAgCEBEYASIoKiYKBAgBEAESCAoGRVFVQUxTGggSBgoECAEQBBoKEggKBAgBEAQQCwqIAgondHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsRmlsdGVyEtwBEBIa1wEq1AEKBAgBEAESBQoDQU5EGgoiCAoCEAEgAVABGioqKAoECAEQARIICgZFUVVBTFMaChIICgQIARAEEAMaChIICgQIARAEEAcajAEqiQEKBAgBEAESCAoGRVFVQUxTGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQCBo9IjsKMBAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoFS0VOWUFQHAq2AQoodHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsUHJvamVjdBKJAQgTEgZzX25hbWUSCXNfYWRkcmVzcxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAEaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRACCjEKJXR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFNvcnQSCAgUIgQQARgD" queryPlanBinaryHexDump: - "00000000 0A 49 0A 2A 74 79 70 65 2E 67 6F 6F 67 6C 65 61 .I.*type.googlea" @@ -4066,6 +4049,29 @@ - "00000A80 61 70 69 73 2E 63 6F 6D 2F 70 6C 61 6E 2E 50 4C apis.com/plan.PL" - "00000A90 6F 67 69 63 61 6C 53 6F 72 74 12 08 08 14 22 04 ogicalSort....\"." - "00000AA0 10 01 18 03 ...." + queryPlanText: + - "LogicalSort(sort0=[$0], dir0=[ASC])" + - " LogicalProject(s_name=[$1], s_address=[$2])" + - " LogicalFilter(condition=[AND(true, =($3, $7), =($8, 'KENYA'))])" + - " LogicalJoin(condition=[=($0, $11)], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalTableScan(table=[[cp, tpch/supplier.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/nation.parquet]])" + - " LogicalAggregate(group=[{0}])" + - " LogicalProject(ps_suppkey=[$1])" + - " LogicalFilter(condition=[AND(true, >($2, $6))])" + - " LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{0, 1}])" + - " LogicalJoin(condition=[=($0, $5)], joinType=[inner])" + - " LogicalTableScan(table=[[cp, tpch/partsupp.parquet]])" + - " LogicalAggregate(group=[{0}])" + - " LogicalProject(p_partkey=[$0])" + - " LogicalFilter(condition=[LIKE($1, 'antique%':VARCHAR(8))])" + - " LogicalTableScan(table=[[cp, tpch/part.parquet]])" + - " LogicalProject(EXPR$0=[*(0.5:DECIMAL(2, 1), $0)])" + - " LogicalAggregate(group=[{}], agg#0=[SUM($0)])" + - " LogicalProject(L_QUANTITY=[$4])" + - " LogicalFilter(condition=[AND(=($1, $cor0.PS_PARTKEY), =($2, $cor0.PS_SUPPKEY), >=($10, 1993-01-01), <($10, +(1993-01-01, 12:INTERVAL YEAR)))])" + - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" - description: "21.sql" input: @@ -4111,30 +4117,6 @@ - " s.s_name" - "limit 100 " output: - queryPlanText: - - "LogicalSort(sort0=[$1], sort1=[$0], dir0=[DESC], dir1=[ASC], fetch=[100])" - - " LogicalAggregate(group=[{0}], numwait=[COUNT()])" - - " LogicalProject(s_name=[$1])" - - " LogicalFilter(condition=[AND(=($0, $9), =($23, $7), =($25, 'F'), >($19, $18), IS NOT NULL($36), NOT(IS NOT NULL($37)), =($3, $32), =($33, 'BRAZIL'))])" - - " LogicalCorrelate(correlation=[$cor2], joinType=[left], requiredColumns=[{7, 9}])" - - " LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{7, 9}])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalJoin(condition=[true], joinType=[inner])" - - " LogicalTableScan(table=[[cp, tpch/supplier.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/orders.parquet]])" - - " LogicalTableScan(table=[[cp, tpch/nation.parquet]])" - - " LogicalAggregate(group=[{}], agg#0=[MIN($0)])" - - " LogicalProject($f0=[1:BIGINT])" - - " LogicalProject(L_ORDERKEY=[$0], L_PARTKEY=[$1], L_SUPPKEY=[$2], L_LINENUMBER=[$3], L_QUANTITY=[$4], L_EXTENDEDPRICE=[$5], L_DISCOUNT=[$6], L_TAX=[$7], L_RETURNFLAG=[$8], L_LINESTATUS=[$9], L_SHIPDATE=[$10], L_COMMITDATE=[$11], L_RECEIPTDATE=[$12], L_SHIPINSTRUCT=[$13], L_SHIPMODE=[$14], L_COMMENT=[$15])" - - " LogicalFilter(condition=[AND(=($0, $cor0.L_ORDERKEY), <>($2, $cor0.L_SUPPKEY))])" - - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" - - " LogicalAggregate(group=[{}], agg#0=[MIN($0)])" - - " LogicalProject($f0=[1:BIGINT])" - - " LogicalProject(L_ORDERKEY=[$0], L_PARTKEY=[$1], L_SUPPKEY=[$2], L_LINENUMBER=[$3], L_QUANTITY=[$4], L_EXTENDEDPRICE=[$5], L_DISCOUNT=[$6], L_TAX=[$7], L_RETURNFLAG=[$8], L_LINESTATUS=[$9], L_SHIPDATE=[$10], L_COMMITDATE=[$11], L_RECEIPTDATE=[$12], L_SHIPINSTRUCT=[$13], L_SHIPMODE=[$14], L_COMMENT=[$15])" - - " LogicalFilter(condition=[AND(=($0, $cor2.L_ORDERKEY), <>($2, $cor2.L_SUPPKEY), >($12, $11))])" - - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" queryPlanBinary: "CkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9zdXBwbGllci5wYXJxdWV0CkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9saW5laXRlbS5wYXJxdWV0CjkKJXR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEpvaW4SEBABGAEiCiIICgIQASABUAEKRwoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsVGFibGVTY2FuEhkKAmNwChN0cGNoL29yZGVycy5wYXJxdWV0CjsKJXR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEpvaW4SEggCEAMYASIKIggKAhABIAFQAQpHCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxUYWJsZVNjYW4SGQoCY3AKE3RwY2gvbmF0aW9uLnBhcnF1ZXQKOwoldHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsSm9pbhISCAQQBRgBIgoiCAoCEAEgAVABCkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9saW5laXRlbS5wYXJxdWV0CuQZCid0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxGaWx0ZXISuBkQBxqzGSqwGQoECAEQARIFCgNBTkQazAwqyQwKBAgBEAESCAoGRVFVQUxTGggSBgoECAEQBBqsDEKpDAqQDDqNDAqDDBAoSAJSEQoJU19TVVBQS0VZGgQIARAEUj4KBlNfTkFNRRABGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVJBCglTX0FERFJFU1MQAhoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSFQoLU19OQVRJT05LRVkQAxoECAEQBFI/CgdTX1BIT05FEAQaMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUhMKCVNfQUNDVEJBTBAFGgQIARAJUkEKCVNfQ09NTUVOVBAGGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVIUCgpMX09SREVSS0VZEAcaBAgBEARSEwoJTF9QQVJUS0VZEAgaBAgBEARSEwoJTF9TVVBQS0VZEAkaBAgBEARSFgoMTF9MSU5FTlVNQkVSEAoaBAgBEARSFAoKTF9RVUFOVElUWRALGgQIARAEUhkKD0xfRVhURU5ERURQUklDRRAMGgQIARAJUhQKCkxfRElTQ09VTlQQDRoECAEQCVIPCgVMX1RBWBAOGgQIARAJUkQKDExfUkVUVVJORkxBRxAPGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVJECgxMX0xJTkVTVEFUVVMQEBoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSFAoKTF9TSElQREFURRARGgQIARAKUhYKDExfQ09NTUlUREFURRASGgQIARAKUhcKDUxfUkVDRUlQVERBVEUQExoECAEQClJGCg5MX1NISVBJTlNUUlVDVBAUGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVJCCgpMX1NISVBNT0RFEBUaMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUkEKCUxfQ09NTUVOVBAWGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVIUCgpPX09SREVSS0VZEBcaBAgBEARSEwoJT19DVVNUS0VZEBgaBAgBEARSRQoNT19PUkRFUlNUQVRVUxAZGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVIWCgxPX1RPVEFMUFJJQ0UQGhoECAEQCVIVCgtPX09SREVSREFURRAbGgQIARAKUkcKD09fT1JERVJQUklPUklUWRAcGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVI/CgdPX0NMRVJLEB0aMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUkYKDk9fU0hJUFBSSU9SSVRZEB4aMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUkEKCU9fQ09NTUVOVBAfGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVIVCgtOX05BVElPTktFWRAgGgQIARAEUj4KBk5fTkFNRRAhGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVIVCgtOX1JFR0lPTktFWRAiGgQIARAEUkEKCU5fQ09NTUVOVBAjGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMSIFJGNvcjASFAoKTF9PUkRFUktFWRAHGgQIARAEGtEMKs4MCgQIARABEgwKCk5PVF9FUVVBTFMaChIICgQIARAEEAIaqwxCqAwKkAw6jQwKgwwQKEgCUhEKCVNfU1VQUEtFWRoECAEQBFI+CgZTX05BTUUQARoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSQQoJU19BRERSRVNTEAIaMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUhUKC1NfTkFUSU9OS0VZEAMaBAgBEARSPwoHU19QSE9ORRAEGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVITCglTX0FDQ1RCQUwQBRoECAEQCVJBCglTX0NPTU1FTlQQBhoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSFAoKTF9PUkRFUktFWRAHGgQIARAEUhMKCUxfUEFSVEtFWRAIGgQIARAEUhMKCUxfU1VQUEtFWRAJGgQIARAEUhYKDExfTElORU5VTUJFUhAKGgQIARAEUhQKCkxfUVVBTlRJVFkQCxoECAEQBFIZCg9MX0VYVEVOREVEUFJJQ0UQDBoECAEQCVIUCgpMX0RJU0NPVU5UEA0aBAgBEAlSDwoFTF9UQVgQDhoECAEQCVJECgxMX1JFVFVSTkZMQUcQDxoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSRAoMTF9MSU5FU1RBVFVTEBAaMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUhQKCkxfU0hJUERBVEUQERoECAEQClIWCgxMX0NPTU1JVERBVEUQEhoECAEQClIXCg1MX1JFQ0VJUFREQVRFEBMaBAgBEApSRgoOTF9TSElQSU5TVFJVQ1QQFBoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSQgoKTF9TSElQTU9ERRAVGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVJBCglMX0NPTU1FTlQQFhoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSFAoKT19PUkRFUktFWRAXGgQIARAEUhMKCU9fQ1VTVEtFWRAYGgQIARAEUkUKDU9fT1JERVJTVEFUVVMQGRoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSFgoMT19UT1RBTFBSSUNFEBoaBAgBEAlSFQoLT19PUkRFUkRBVEUQGxoECAEQClJHCg9PX09SREVSUFJJT1JJVFkQHBoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSPwoHT19DTEVSSxAdGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVJGCg5PX1NISVBQUklPUklUWRAeGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVJBCglPX0NPTU1FTlQQHxoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSFQoLTl9OQVRJT05LRVkQIBoECAEQBFI+CgZOX05BTUUQIRoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSFQoLTl9SRUdJT05LRVkQIhoECAEQBFJBCglOX0NPTU1FTlQQIxoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEiBSRjb3IwEhMKCUxfU1VQUEtFWRAJGgQIARAECp8FCih0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxQcm9qZWN0EvIECAgSCkxfT1JERVJLRVkSCUxfUEFSVEtFWRIJTF9TVVBQS0VZEgxMX0xJTkVOVU1CRVISCkxfUVVBTlRJVFkSD0xfRVhURU5ERURQUklDRRIKTF9ESVNDT1VOVBIFTF9UQVgSDExfUkVUVVJORkxBRxIMTF9MSU5FU1RBVFVTEgpMX1NISVBEQVRFEgxMX0NPTU1JVERBVEUSDUxfUkVDRUlQVERBVEUSDkxfU0hJUElOU1RSVUNUEgpMX1NISVBNT0RFEglMX0NPTU1FTlQaCBIGCgQIARAEGgoSCAoECAEQBBABGgoSCAoECAEQBBACGgoSCAoECAEQBBADGgoSCAoECAEQBBAEGgoSCAoECAEQCRAFGgoSCAoECAEQCRAGGgoSCAoECAEQCRAHGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQCBo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAkaChIICgQIARAKEAoaChIICgQIARAKEAsaChIICgQIARAKEAwaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRANGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQDho4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEA8KQgoodHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsUHJvamVjdBIWCAkSAyRmMBoNIgsKAhAFEgMSAQFQBgpPCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxBZ2dyZWdhdGUSIQgKEhkaBQoDTUlOMgEAOP///////////wFKAgoAIgAqAApCCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxDb3JyZWxhdGUSFAgGEAsYASIGGgQKAgcJKgQKAgcJCkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9saW5laXRlbS5wYXJxdWV0CpoaCid0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxGaWx0ZXIS7hkQDRrpGSrmGQoECAEQARIFCgNBTkQazgwqywwKBAgBEAESCAoGRVFVQUxTGggSBgoECAEQBBquDEKrDAqSDDqPDAqDDBAoSAJSEQoJU19TVVBQS0VZGgQIARAEUj4KBlNfTkFNRRABGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVJBCglTX0FERFJFU1MQAhoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSFQoLU19OQVRJT05LRVkQAxoECAEQBFI/CgdTX1BIT05FEAQaMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUhMKCVNfQUNDVEJBTBAFGgQIARAJUkEKCVNfQ09NTUVOVBAGGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVIUCgpMX09SREVSS0VZEAcaBAgBEARSEwoJTF9QQVJUS0VZEAgaBAgBEARSEwoJTF9TVVBQS0VZEAkaBAgBEARSFgoMTF9MSU5FTlVNQkVSEAoaBAgBEARSFAoKTF9RVUFOVElUWRALGgQIARAEUhkKD0xfRVhURU5ERURQUklDRRAMGgQIARAJUhQKCkxfRElTQ09VTlQQDRoECAEQCVIPCgVMX1RBWBAOGgQIARAJUkQKDExfUkVUVVJORkxBRxAPGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVJECgxMX0xJTkVTVEFUVVMQEBoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSFAoKTF9TSElQREFURRARGgQIARAKUhYKDExfQ09NTUlUREFURRASGgQIARAKUhcKDUxfUkVDRUlQVERBVEUQExoECAEQClJGCg5MX1NISVBJTlNUUlVDVBAUGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVJCCgpMX1NISVBNT0RFEBUaMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUkEKCUxfQ09NTUVOVBAWGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVIUCgpPX09SREVSS0VZEBcaBAgBEARSEwoJT19DVVNUS0VZEBgaBAgBEARSRQoNT19PUkRFUlNUQVRVUxAZGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVIWCgxPX1RPVEFMUFJJQ0UQGhoECAEQCVIVCgtPX09SREVSREFURRAbGgQIARAKUkcKD09fT1JERVJQUklPUklUWRAcGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVI/CgdPX0NMRVJLEB0aMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUkYKDk9fU0hJUFBSSU9SSVRZEB4aMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUkEKCU9fQ09NTUVOVBAfGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVIVCgtOX05BVElPTktFWRAgGgQIARAEUj4KBk5fTkFNRRAhGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVIVCgtOX1JFR0lPTktFWRAiGgQIARAEUkEKCU5fQ09NTUVOVBAjGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRgCIgUkY29yMhIUCgpMX09SREVSS0VZEAcaBAgBEAQa0wwq0AwKBAgBEAESDAoKTk9UX0VRVUFMUxoKEggKBAgBEAQQAhqtDEKqDAqSDDqPDAqDDBAoSAJSEQoJU19TVVBQS0VZGgQIARAEUj4KBlNfTkFNRRABGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVJBCglTX0FERFJFU1MQAhoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSFQoLU19OQVRJT05LRVkQAxoECAEQBFI/CgdTX1BIT05FEAQaMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUhMKCVNfQUNDVEJBTBAFGgQIARAJUkEKCVNfQ09NTUVOVBAGGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVIUCgpMX09SREVSS0VZEAcaBAgBEARSEwoJTF9QQVJUS0VZEAgaBAgBEARSEwoJTF9TVVBQS0VZEAkaBAgBEARSFgoMTF9MSU5FTlVNQkVSEAoaBAgBEARSFAoKTF9RVUFOVElUWRALGgQIARAEUhkKD0xfRVhURU5ERURQUklDRRAMGgQIARAJUhQKCkxfRElTQ09VTlQQDRoECAEQCVIPCgVMX1RBWBAOGgQIARAJUkQKDExfUkVUVVJORkxBRxAPGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVJECgxMX0xJTkVTVEFUVVMQEBoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSFAoKTF9TSElQREFURRARGgQIARAKUhYKDExfQ09NTUlUREFURRASGgQIARAKUhcKDUxfUkVDRUlQVERBVEUQExoECAEQClJGCg5MX1NISVBJTlNUUlVDVBAUGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVJCCgpMX1NISVBNT0RFEBUaMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUkEKCUxfQ09NTUVOVBAWGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVIUCgpPX09SREVSS0VZEBcaBAgBEARSEwoJT19DVVNUS0VZEBgaBAgBEARSRQoNT19PUkRFUlNUQVRVUxAZGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVIWCgxPX1RPVEFMUFJJQ0UQGhoECAEQCVIVCgtPX09SREVSREFURRAbGgQIARAKUkcKD09fT1JERVJQUklPUklUWRAcGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVI/CgdPX0NMRVJLEB0aMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUkYKDk9fU0hJUFBSSU9SSVRZEB4aMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUkEKCU9fQ09NTUVOVBAfGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVIVCgtOX05BVElPTktFWRAgGgQIARAEUj4KBk5fTkFNRRAhGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVIVCgtOX1JFR0lPTktFWRAiGgQIARAEUkEKCU5fQ09NTUVOVBAjGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRgCIgUkY29yMhITCglMX1NVUFBLRVkQCRoECAEQBBowKi4KBAgBEAESDgoMR1JFQVRFUl9USEFOGgoSCAoECAEQChAMGgoSCAoECAEQChALCp8FCih0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxQcm9qZWN0EvIECA4SCkxfT1JERVJLRVkSCUxfUEFSVEtFWRIJTF9TVVBQS0VZEgxMX0xJTkVOVU1CRVISCkxfUVVBTlRJVFkSD0xfRVhURU5ERURQUklDRRIKTF9ESVNDT1VOVBIFTF9UQVgSDExfUkVUVVJORkxBRxIMTF9MSU5FU1RBVFVTEgpMX1NISVBEQVRFEgxMX0NPTU1JVERBVEUSDUxfUkVDRUlQVERBVEUSDkxfU0hJUElOU1RSVUNUEgpMX1NISVBNT0RFEglMX0NPTU1FTlQaCBIGCgQIARAEGgoSCAoECAEQBBABGgoSCAoECAEQBBACGgoSCAoECAEQBBADGgoSCAoECAEQBBAEGgoSCAoECAEQCRAFGgoSCAoECAEQCRAGGgoSCAoECAEQCRAHGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQCBo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAkaChIICgQIARAKEAoaChIICgQIARAKEAsaChIICgQIARAKEAwaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRANGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQDho4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEA8KQgoodHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsUHJvamVjdBIWCA8SAyRmMBoNIgsKAhAFEgMSAQFQBgpPCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxBZ2dyZWdhdGUSIQgQEhkaBQoDTUlOMgEAOP///////////wFKAgoAIgAqAApECip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxDb3JyZWxhdGUSFggMEBEYASIICAIaBAoCBwkqBAoCBwkK5QQKJ3R5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEZpbHRlchK5BBASGrQEKrEECgQIARABEgUKA0FORBooKiYKBAgBEAESCAoGRVFVQUxTGggSBgoECAEQBBoKEggKBAgBEAQQCRoqKigKBAgBEAESCAoGRVFVQUxTGgoSCAoECAEQBBAXGgoSCAoECAEQBBAHGogBKoUBCgQIARABEggKBkVRVUFMUxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEBkaOSI3CjAQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaAUZQHBowKi4KBAgBEAESDgoMR1JFQVRFUl9USEFOGgoSCAoECAEQChATGgoSCAoECAEQChASGiEqHwoCEAESDQoLSVNfTk9UX05VTEwaChIICgQIARAFECQaMCouCgIQARIFCgNOT1QaISofCgIQARINCgtJU19OT1RfTlVMTBoKEggKBAgBEAUQJRoqKigKBAgBEAESCAoGRVFVQUxTGgoSCAoECAEQBBADGgoSCAoECAEQBBAgGo0BKooBCgQIARABEggKBkVRVUFMUxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xECEaPiI8CjAQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaBkJSQVpJTFAcCnAKKHR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFByb2plY3QSRAgTEgZzX25hbWUaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRABCl8KKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEFnZ3JlZ2F0ZRIxCBQSIxoHCgVDT1VOVCoJCgdudW13YWl0OP///////////wFKAgoAIgMKAQAqAwoBAApICiV0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxTb3J0Eh8IFRoNIgsKAhAEEgMSAWRQBiIGCAEQAxgCIgQQARgD" queryPlanBinaryHexDump: - "00000000 0A 49 0A 2A 74 79 70 65 2E 67 6F 6F 67 6C 65 61 .I.*type.googlea" @@ -4761,6 +4743,30 @@ - "000026D0 6F 72 74 12 1F 08 15 1A 0D 22 0B 0A 02 10 04 12 ort......\"......" - "000026E0 03 12 01 64 50 06 22 06 08 01 10 03 18 02 22 04 ...dP.\".......\"." - "000026F0 10 01 18 03 ...." + queryPlanText: + - "LogicalSort(sort0=[$1], sort1=[$0], dir0=[DESC], dir1=[ASC], fetch=[100])" + - " LogicalAggregate(group=[{0}], numwait=[COUNT()])" + - " LogicalProject(s_name=[$1])" + - " LogicalFilter(condition=[AND(=($0, $9), =($23, $7), =($25, 'F'), >($19, $18), IS NOT NULL($36), NOT(IS NOT NULL($37)), =($3, $32), =($33, 'BRAZIL'))])" + - " LogicalCorrelate(correlation=[$cor2], joinType=[left], requiredColumns=[{7, 9}])" + - " LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{7, 9}])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalJoin(condition=[true], joinType=[inner])" + - " LogicalTableScan(table=[[cp, tpch/supplier.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/orders.parquet]])" + - " LogicalTableScan(table=[[cp, tpch/nation.parquet]])" + - " LogicalAggregate(group=[{}], agg#0=[MIN($0)])" + - " LogicalProject($f0=[1:BIGINT])" + - " LogicalProject(L_ORDERKEY=[$0], L_PARTKEY=[$1], L_SUPPKEY=[$2], L_LINENUMBER=[$3], L_QUANTITY=[$4], L_EXTENDEDPRICE=[$5], L_DISCOUNT=[$6], L_TAX=[$7], L_RETURNFLAG=[$8], L_LINESTATUS=[$9], L_SHIPDATE=[$10], L_COMMITDATE=[$11], L_RECEIPTDATE=[$12], L_SHIPINSTRUCT=[$13], L_SHIPMODE=[$14], L_COMMENT=[$15])" + - " LogicalFilter(condition=[AND(=($0, $cor0.L_ORDERKEY), <>($2, $cor0.L_SUPPKEY))])" + - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" + - " LogicalAggregate(group=[{}], agg#0=[MIN($0)])" + - " LogicalProject($f0=[1:BIGINT])" + - " LogicalProject(L_ORDERKEY=[$0], L_PARTKEY=[$1], L_SUPPKEY=[$2], L_LINENUMBER=[$3], L_QUANTITY=[$4], L_EXTENDEDPRICE=[$5], L_DISCOUNT=[$6], L_TAX=[$7], L_RETURNFLAG=[$8], L_LINESTATUS=[$9], L_SHIPDATE=[$10], L_COMMITDATE=[$11], L_RECEIPTDATE=[$12], L_SHIPINSTRUCT=[$13], L_SHIPMODE=[$14], L_COMMENT=[$15])" + - " LogicalFilter(condition=[AND(=($0, $cor2.L_ORDERKEY), <>($2, $cor2.L_SUPPKEY), >($12, $11))])" + - " LogicalTableScan(table=[[cp, tpch/lineitem.parquet]])" - description: "22.sql" input: @@ -4803,23 +4809,6 @@ - "order by" - " cntrycode " output: - queryPlanText: - - "LogicalSort(sort0=[$0], dir0=[ASC])" - - " LogicalAggregate(group=[{0}], numcust=[COUNT()], totacctbal=[SUM($1)])" - - " LogicalProject(cntrycode=[SUBSTRING($4, 1, 2)], c_acctbal=[$5])" - - " LogicalFilter(condition=[AND(OR(=(SUBSTRING($4, 1, 2), '24':VARCHAR(2)), =(SUBSTRING($4, 1, 2), '31':VARCHAR(2)), =(SUBSTRING($4, 1, 2), '11':VARCHAR(2)), =(SUBSTRING($4, 1, 2), '16':VARCHAR(2)), =(SUBSTRING($4, 1, 2), '21':VARCHAR(2)), =(SUBSTRING($4, 1, 2), '20':VARCHAR(2)), =(SUBSTRING($4, 1, 2), '34':VARCHAR(2))), >($5, $8), NOT(IS NOT NULL($9)))])" - - " LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{0}])" - - " LogicalJoin(condition=[true], joinType=[left])" - - " LogicalTableScan(table=[[cp, tpch/customer.parquet]])" - - " LogicalAggregate(group=[{}], EXPR$0=[AVG($0)])" - - " LogicalProject(C_ACCTBAL=[$5])" - - " LogicalFilter(condition=[AND(>($5, 0.00:DECIMAL(3, 2)), OR(=(SUBSTRING($4, 1, 2), '24':VARCHAR(2)), =(SUBSTRING($4, 1, 2), '31':VARCHAR(2)), =(SUBSTRING($4, 1, 2), '11':VARCHAR(2)), =(SUBSTRING($4, 1, 2), '16':VARCHAR(2)), =(SUBSTRING($4, 1, 2), '21':VARCHAR(2)), =(SUBSTRING($4, 1, 2), '20':VARCHAR(2)), =(SUBSTRING($4, 1, 2), '34':VARCHAR(2))))])" - - " LogicalTableScan(table=[[cp, tpch/customer.parquet]])" - - " LogicalAggregate(group=[{}], agg#0=[MIN($0)])" - - " LogicalProject($f0=[1:BIGINT])" - - " LogicalProject(O_ORDERKEY=[$0], O_CUSTKEY=[$1], O_ORDERSTATUS=[$2], O_TOTALPRICE=[$3], O_ORDERDATE=[$4], O_ORDERPRIORITY=[$5], O_CLERK=[$6], O_SHIPPRIORITY=[$7], O_COMMENT=[$8])" - - " LogicalFilter(condition=[=($1, $cor0.C_CUSTKEY)])" - - " LogicalTableScan(table=[[cp, tpch/orders.parquet]])" queryPlanBinary: "CkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9jdXN0b21lci5wYXJxdWV0CkkKKnR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFRhYmxlU2NhbhIbCgJjcAoVdHBjaC9jdXN0b21lci5wYXJxdWV0CpcOCid0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxGaWx0ZXIS6w0QARrmDSrjDQoECAEQARIFCgNBTkQaOSo3CgQIARABEg4KDEdSRUFURVJfVEhBThoKEggKBAgBEAkQBRoTIhEKBhAGGAIgAxIFCAISAQBQBhqYDSqVDQoECAEQARIECgJPUhrsASrpAQoECAEQARIICgZFUVVBTFManAEqmQEKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEgsKCVNVQlNUUklORxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAQaDSILCgIQBBIDEgEBUAYaDSILCgIQBBIDEgECUAYaOCI2Ci4QHSACKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGgIyNFAcGuwBKukBCgQIARABEggKBkVRVUFMUxqcASqZAQoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTESCwoJU1VCU1RSSU5HGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQBBoNIgsKAhAEEgMSAQFQBhoNIgsKAhAEEgMSAQJQBho4IjYKLhAdIAIqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaAjMxUBwa7AEq6QEKBAgBEAESCAoGRVFVQUxTGpwBKpkBCjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRILCglTVUJTVFJJTkcaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAEGg0iCwoCEAQSAxIBAVAGGg0iCwoCEAQSAxIBAlAGGjgiNgouEB0gAiocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoCMTFQHBrsASrpAQoECAEQARIICgZFUVVBTFManAEqmQEKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEgsKCVNVQlNUUklORxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAQaDSILCgIQBBIDEgEBUAYaDSILCgIQBBIDEgECUAYaOCI2Ci4QHSACKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGgIxNlAcGuwBKukBCgQIARABEggKBkVRVUFMUxqcASqZAQoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTESCwoJU1VCU1RSSU5HGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQBBoNIgsKAhAEEgMSAQFQBhoNIgsKAhAEEgMSAQJQBho4IjYKLhAdIAIqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaAjIxUBwa7AEq6QEKBAgBEAESCAoGRVFVQUxTGpwBKpkBCjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRILCglTVUJTVFJJTkcaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAEGg0iCwoCEAQSAxIBAVAGGg0iCwoCEAQSAxIBAlAGGjgiNgouEB0gAiocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoCMjBQHBrsASrpAQoECAEQARIICgZFUVVBTFManAEqmQEKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEgsKCVNVQlNUUklORxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAQaDSILCgIQBBIDEgEBUAYaDSILCgIQBBIDEgECUAYaOCI2Ci4QHSACKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGgIzNFAcCkUKKHR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFByb2plY3QSGQgCEglDX0FDQ1RCQUwaChIICgQIARAJEAUKWQoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsQWdncmVnYXRlEisIAxIjGgUKA0FWRyoICgZFWFBSJDAyAQA4////////////AUoCCgAiACoACjkKJXR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbEpvaW4SEBAEGAIiCiIICgIQASABUAEKRwoqdHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsVGFibGVTY2FuEhkKAmNwChN0cGNoL29yZGVycy5wYXJxdWV0CokECid0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxGaWx0ZXIS3QMQBhrYAyrVAwoECAEQARIICgZFUVVBTFMaChIICgQIARAEEAEatgNCswMKnQM6mgMKkAMQKEgCUhEKCUNfQ1VTVEtFWRoECAEQBFI+CgZDX05BTUUQARoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSQQoJQ19BRERSRVNTEAIaMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xUhUKC0NfTkFUSU9OS0VZEAMaBAgBEARSPwoHQ19QSE9ORRAEGjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMVITCglDX0FDQ1RCQUwQBRoECAEQCVJECgxDX01LVFNFR01FTlQQBhoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTFSQQoJQ19DT01NRU5UEAcaMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xIgUkY29yMBIRCglDX0NVU1RLRVkaBAgBEAQK9QMKKHR5cGUuZ29vZ2xlYXBpcy5jb20vcGxhbi5QTG9naWNhbFByb2plY3QSyAMIBxIKT19PUkRFUktFWRIJT19DVVNUS0VZEg1PX09SREVSU1RBVFVTEgxPX1RPVEFMUFJJQ0USC09fT1JERVJEQVRFEg9PX09SREVSUFJJT1JJVFkSB09fQ0xFUksSDk9fU0hJUFBSSU9SSVRZEglPX0NPTU1FTlQaCBIGCgQIARAEGgoSCAoECAEQBBABGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQAhoKEggKBAgBEAkQAxoKEggKBAgBEAoQBBo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAUaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAGGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQBxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAgKQgoodHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsUHJvamVjdBIWCAgSAyRmMBoNIgsKAhAFEgMSAQFQBgpPCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxBZ2dyZWdhdGUSIQgJEhkaBQoDTUlOMgEAOP///////////wFKAgoAIgAqAApACip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxDb3JyZWxhdGUSEggFEAoYASIFGgMKAQAqAwoBAArADgondHlwZS5nb29nbGVhcGlzLmNvbS9wbGFuLlBMb2dpY2FsRmlsdGVyEpQOEAsajw4qjA4KBAgBEAESBQoDQU5EGpgNKpUNCgQIARABEgQKAk9SGuwBKukBCgQIARABEggKBkVRVUFMUxqcASqZAQoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTESCwoJU1VCU1RSSU5HGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQBBoNIgsKAhAEEgMSAQFQBhoNIgsKAhAEEgMSAQJQBho4IjYKLhAdIAIqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaAjI0UBwa7AEq6QEKBAgBEAESCAoGRVFVQUxTGpwBKpkBCjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRILCglTVUJTVFJJTkcaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAEGg0iCwoCEAQSAxIBAVAGGg0iCwoCEAQSAxIBAlAGGjgiNgouEB0gAiocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoCMzFQHBrsASrpAQoECAEQARIICgZFUVVBTFManAEqmQEKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEgsKCVNVQlNUUklORxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAQaDSILCgIQBBIDEgEBUAYaDSILCgIQBBIDEgECUAYaOCI2Ci4QHSACKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGgIxMVAcGuwBKukBCgQIARABEggKBkVRVUFMUxqcASqZAQoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTESCwoJU1VCU1RSSU5HGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQBBoNIgsKAhAEEgMSAQFQBhoNIgsKAhAEEgMSAQJQBho4IjYKLhAdIAIqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaAjE2UBwa7AEq6QEKBAgBEAESCAoGRVFVQUxTGpwBKpkBCjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRILCglTVUJTVFJJTkcaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAEGg0iCwoCEAQSAxIBAVAGGg0iCwoCEAQSAxIBAlAGGjgiNgouEB0gAiocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRoCMjFQHBrsASrpAQoECAEQARIICgZFUVVBTFManAEqmQEKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEgsKCVNVQlNUUklORxo4EjYKMggBEB0ggIAEKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xEAQaDSILCgIQBBIDEgEBUAYaDSILCgIQBBIDEgECUAYaOCI2Ci4QHSACKhwKGElTTy04ODU5LTEkZW5fVVMkcHJpbWFyeSgCMgpJU08tODg1OS0xGgIyMFAcGuwBKukBCgQIARABEggKBkVRVUFMUxqcASqZAQoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTESCwoJU1VCU1RSSU5HGjgSNgoyCAEQHSCAgAQqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEQBBoNIgsKAhAEEgMSAQFQBhoNIgsKAhAEEgMSAQJQBho4IjYKLhAdIAIqHAoYSVNPLTg4NTktMSRlbl9VUyRwcmltYXJ5KAIyCklTTy04ODU5LTEaAjM0UBwaMCouCgQIARABEg4KDEdSRUFURVJfVEhBThoKEggKBAgBEAkQBRoKEggKBAgBEAkQCBowKi4KAhABEgUKA05PVBohKh8KAhABEg0KC0lTX05PVF9OVUxMGgoSCAoECAEQBRAJCvABCih0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxQcm9qZWN0EsMBCAwSCWNudHJ5Y29kZRIJY19hY2N0YmFsGpwBKpkBCjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRILCglTVUJTVFJJTkcaOBI2CjIIARAdIICABCocChhJU08tODg1OS0xJGVuX1VTJHByaW1hcnkoAjIKSVNPLTg4NTktMRAEGg0iCwoCEAQSAxIBAVAGGg0iCwoCEAQSAxIBAlAGGgoSCAoECAEQCRAFCogBCip0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxBZ2dyZWdhdGUSWggNEiMaBwoFQ09VTlQqCQoHbnVtY3VzdDj///////////8BSgIKABInGgUKA1NVTSoMCgp0b3RhY2N0YmFsMgEBOP///////////wFKAgoAIgMKAQAqAwoBAAoxCiV0eXBlLmdvb2dsZWFwaXMuY29tL3BsYW4uUExvZ2ljYWxTb3J0EggIDiIEEAEYAw==" queryPlanBinaryHexDump: - "00000000 0A 49 0A 2A 74 79 70 65 2E 67 6F 6F 67 6C 65 61 .I.*type.googlea" @@ -5185,3 +5174,20 @@ - "00001680 65 61 70 69 73 2E 63 6F 6D 2F 70 6C 61 6E 2E 50 eapis.com/plan.P" - "00001690 4C 6F 67 69 63 61 6C 53 6F 72 74 12 08 08 0E 22 LogicalSort....\"" - "000016A0 04 10 01 18 03 ....." + queryPlanText: + - "LogicalSort(sort0=[$0], dir0=[ASC])" + - " LogicalAggregate(group=[{0}], numcust=[COUNT()], totacctbal=[SUM($1)])" + - " LogicalProject(cntrycode=[SUBSTRING($4, 1, 2)], c_acctbal=[$5])" + - " LogicalFilter(condition=[AND(OR(=(SUBSTRING($4, 1, 2), '24':VARCHAR(2)), =(SUBSTRING($4, 1, 2), '31':VARCHAR(2)), =(SUBSTRING($4, 1, 2), '11':VARCHAR(2)), =(SUBSTRING($4, 1, 2), '16':VARCHAR(2)), =(SUBSTRING($4, 1, 2), '21':VARCHAR(2)), =(SUBSTRING($4, 1, 2), '20':VARCHAR(2)), =(SUBSTRING($4, 1, 2), '34':VARCHAR(2))), >($5, $8), NOT(IS NOT NULL($9)))])" + - " LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{0}])" + - " LogicalJoin(condition=[true], joinType=[left])" + - " LogicalTableScan(table=[[cp, tpch/customer.parquet]])" + - " LogicalAggregate(group=[{}], EXPR$0=[AVG($0)])" + - " LogicalProject(C_ACCTBAL=[$5])" + - " LogicalFilter(condition=[AND(>($5, 0.00:DECIMAL(3, 2)), OR(=(SUBSTRING($4, 1, 2), '24':VARCHAR(2)), =(SUBSTRING($4, 1, 2), '31':VARCHAR(2)), =(SUBSTRING($4, 1, 2), '11':VARCHAR(2)), =(SUBSTRING($4, 1, 2), '16':VARCHAR(2)), =(SUBSTRING($4, 1, 2), '21':VARCHAR(2)), =(SUBSTRING($4, 1, 2), '20':VARCHAR(2)), =(SUBSTRING($4, 1, 2), '34':VARCHAR(2))))])" + - " LogicalTableScan(table=[[cp, tpch/customer.parquet]])" + - " LogicalAggregate(group=[{}], agg#0=[MIN($0)])" + - " LogicalProject($f0=[1:BIGINT])" + - " LogicalProject(O_ORDERKEY=[$0], O_CUSTKEY=[$1], O_ORDERSTATUS=[$2], O_TOTALPRICE=[$3], O_ORDERDATE=[$4], O_ORDERPRIORITY=[$5], O_CLERK=[$6], O_SHIPPRIORITY=[$7], O_COMMENT=[$8])" + - " LogicalFilter(condition=[=($1, $cor0.C_CUSTKEY)])" + - " LogicalTableScan(table=[[cp, tpch/orders.parquet]])" diff --git a/sabot/vector-tools/pom.xml b/sabot/vector-tools/pom.xml index 26d47c12fd..af95032ebc 100644 --- a/sabot/vector-tools/pom.xml +++ b/sabot/vector-tools/pom.xml @@ -22,7 +22,7 @@ com.dremio.sabot dremio-sabot-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-sabot-vector-tools diff --git a/sabot/vector-tools/src/main/java/com/dremio/common/expression/PathSegment.java b/sabot/vector-tools/src/main/java/com/dremio/common/expression/PathSegment.java index 7ffd77eb45..de04d83e60 100644 --- a/sabot/vector-tools/src/main/java/com/dremio/common/expression/PathSegment.java +++ b/sabot/vector-tools/src/main/java/com/dremio/common/expression/PathSegment.java @@ -82,6 +82,7 @@ public ArraySegment getArraySegment() { return this; } + @Override public OUT accept(SchemaPathVisitor visitor, IN in){ return visitor.visitArray(this, in); } @@ -179,6 +180,7 @@ public NameSegment getNameSegment() { return this; } + @Override public OUT accept(SchemaPathVisitor visitor, IN in){ return visitor.visitName(this, in); } diff --git a/sabot/vector-tools/src/main/java/com/dremio/exec/exception/NoSupportedUpPromotionOrCoercionException.java b/sabot/vector-tools/src/main/java/com/dremio/exec/exception/NoSupportedUpPromotionOrCoercionException.java index c29f19ac72..9b0373ee2f 100644 --- a/sabot/vector-tools/src/main/java/com/dremio/exec/exception/NoSupportedUpPromotionOrCoercionException.java +++ b/sabot/vector-tools/src/main/java/com/dremio/exec/exception/NoSupportedUpPromotionOrCoercionException.java @@ -30,7 +30,9 @@ public class NoSupportedUpPromotionOrCoercionException extends RuntimeException private final CompleteType fileType; private final CompleteType tableType; private final List columns; + @SuppressWarnings("checkstyle:MutableException") private List datasetPath; + @SuppressWarnings("checkstyle:MutableException") private String filePath; public NoSupportedUpPromotionOrCoercionException(CompleteType fileType, CompleteType tableType) { diff --git a/sabot/vector-tools/src/main/java/com/dremio/exec/record/ArrowRecordBatchLoader.java b/sabot/vector-tools/src/main/java/com/dremio/exec/record/ArrowRecordBatchLoader.java index b1094de560..89a0a86ea9 100644 --- a/sabot/vector-tools/src/main/java/com/dremio/exec/record/ArrowRecordBatchLoader.java +++ b/sabot/vector-tools/src/main/java/com/dremio/exec/record/ArrowRecordBatchLoader.java @@ -247,6 +247,7 @@ public void clear() { close(); } + @Override public void close(){ container.clear(); resetRecordCount(); diff --git a/sabot/vector-tools/src/main/java/com/dremio/exec/record/DeadBuf.java b/sabot/vector-tools/src/main/java/com/dremio/exec/record/DeadBuf.java index 29b9b2eedf..65a517cac5 100644 --- a/sabot/vector-tools/src/main/java/com/dremio/exec/record/DeadBuf.java +++ b/sabot/vector-tools/src/main/java/com/dremio/exec/record/DeadBuf.java @@ -85,6 +85,7 @@ public long getLongLE(int index) { throw new UnsupportedOperationException(ERROR_MESSAGE); } + @Override public ByteBuf writeLongLE(long value) { throw new UnsupportedOperationException(ERROR_MESSAGE); } diff --git a/sabot/vector-tools/src/main/java/com/dremio/exec/record/RecordBatchLoader.java b/sabot/vector-tools/src/main/java/com/dremio/exec/record/RecordBatchLoader.java index 66ab6d9008..de5a53bd58 100644 --- a/sabot/vector-tools/src/main/java/com/dremio/exec/record/RecordBatchLoader.java +++ b/sabot/vector-tools/src/main/java/com/dremio/exec/record/RecordBatchLoader.java @@ -231,6 +231,7 @@ public void clear() { close(); } + @Override public void close(){ container.clear(); resetRecordCount(); diff --git a/sabot/vector-tools/src/main/java/com/dremio/exec/record/VectorWrapper.java b/sabot/vector-tools/src/main/java/com/dremio/exec/record/VectorWrapper.java index 85fe6408cf..09fa7ba1d6 100644 --- a/sabot/vector-tools/src/main/java/com/dremio/exec/record/VectorWrapper.java +++ b/sabot/vector-tools/src/main/java/com/dremio/exec/record/VectorWrapper.java @@ -31,6 +31,7 @@ public interface VectorWrapper extends AutoCloseable { public T[] getValueVectors(); public boolean isHyper(); public void clear(); + @Override public void close(); public VectorWrapper cloneAndTransfer(BufferAllocator allocator, CallBack callback); public VectorWrapper getChildWrapper(int[] ids); diff --git a/sabot/vector-tools/src/main/java/com/dremio/exec/vector/ObjectVector.java b/sabot/vector-tools/src/main/java/com/dremio/exec/vector/ObjectVector.java index 24a31349f6..06ab4d9c9d 100644 --- a/sabot/vector-tools/src/main/java/com/dremio/exec/vector/ObjectVector.java +++ b/sabot/vector-tools/src/main/java/com/dremio/exec/vector/ObjectVector.java @@ -100,6 +100,7 @@ public void setValueCount(int valueCount) { count = valueCount; } + @Override public void reset() { count = 0; maxCount = 0; diff --git a/sabot/vector-tools/src/main/java/com/dremio/sabot/op/receiver/RawFragmentBatch.java b/sabot/vector-tools/src/main/java/com/dremio/sabot/op/receiver/RawFragmentBatch.java index 960475046b..c90b39835f 100644 --- a/sabot/vector-tools/src/main/java/com/dremio/sabot/op/receiver/RawFragmentBatch.java +++ b/sabot/vector-tools/src/main/java/com/dremio/sabot/op/receiver/RawFragmentBatch.java @@ -53,6 +53,7 @@ public String toString() { return "RawFragmentBatch [header=" + header + ", body=" + body + "]"; } + @Override public void close() { if (body != null) { body.close(); diff --git a/sabot/vector-tools/src/main/java/org/apache/arrow/vector/FixedWidthVectorHelper.java b/sabot/vector-tools/src/main/java/org/apache/arrow/vector/FixedWidthVectorHelper.java index b6f2be7dfd..6adbc12c23 100644 --- a/sabot/vector-tools/src/main/java/org/apache/arrow/vector/FixedWidthVectorHelper.java +++ b/sabot/vector-tools/src/main/java/org/apache/arrow/vector/FixedWidthVectorHelper.java @@ -31,6 +31,7 @@ public FixedWidthVectorHelper(T vector) { this.bitVector = vector instanceof BitVector; } + @Override public SerializedField.Builder getMetadataBuilder() { return super.getMetadataBuilder() .addChild(buildValidityMetadata()) diff --git a/sabot/vector-tools/src/main/java/org/apache/arrow/vector/NullVectorHelper.java b/sabot/vector-tools/src/main/java/org/apache/arrow/vector/NullVectorHelper.java index ca8932b7ac..327dbe7e55 100644 --- a/sabot/vector-tools/src/main/java/org/apache/arrow/vector/NullVectorHelper.java +++ b/sabot/vector-tools/src/main/java/org/apache/arrow/vector/NullVectorHelper.java @@ -29,6 +29,7 @@ public NullVectorHelper(NullVector vector) { this.vector = vector; } + @Override public SerializedField getMetadata() { return SerializedField.newBuilder() .setMajorType(com.dremio.common.types.Types.optional(com.dremio.common.types.TypeProtos.MinorType.NULL)) @@ -37,6 +38,7 @@ public SerializedField getMetadata() { .build(); } + @Override public void load(SerializedField metadata, ArrowBuf buffer) { } diff --git a/sabot/vector-tools/src/main/java/org/apache/arrow/vector/ValidityVectorHelper.java b/sabot/vector-tools/src/main/java/org/apache/arrow/vector/ValidityVectorHelper.java index e1553aab76..d7e3304b00 100644 --- a/sabot/vector-tools/src/main/java/org/apache/arrow/vector/ValidityVectorHelper.java +++ b/sabot/vector-tools/src/main/java/org/apache/arrow/vector/ValidityVectorHelper.java @@ -35,6 +35,7 @@ public ValidityVectorHelper(BitVector vector) { this.vector = vector; } + @Override public void load(SerializedField metadata, ArrowBuf buffer) { Preconditions.checkArgument(vector.getName().equals(metadata.getNamePart().getName()), "The " + "field %s doesn't match the provided metadata %s.", vector.getName(), metadata); diff --git a/sabot/vector-tools/src/main/java/org/apache/arrow/vector/VariableWidthVectorHelper.java b/sabot/vector-tools/src/main/java/org/apache/arrow/vector/VariableWidthVectorHelper.java index 020d7f3a2a..8770ba41db 100644 --- a/sabot/vector-tools/src/main/java/org/apache/arrow/vector/VariableWidthVectorHelper.java +++ b/sabot/vector-tools/src/main/java/org/apache/arrow/vector/VariableWidthVectorHelper.java @@ -27,6 +27,7 @@ public VariableWidthVectorHelper(T vector) { super(vector); } + @Override public SerializedField.Builder getMetadataBuilder() { return super.getMetadataBuilder() .addChild(buildValidityMetadata()) diff --git a/sabot/vector-tools/src/main/java/org/apache/arrow/vector/complex/BaseRepeatedValueVectorHelper.java b/sabot/vector-tools/src/main/java/org/apache/arrow/vector/complex/BaseRepeatedValueVectorHelper.java index a9fcdf380a..7f56cbba01 100644 --- a/sabot/vector-tools/src/main/java/org/apache/arrow/vector/complex/BaseRepeatedValueVectorHelper.java +++ b/sabot/vector-tools/src/main/java/org/apache/arrow/vector/complex/BaseRepeatedValueVectorHelper.java @@ -37,6 +37,7 @@ public BaseRepeatedValueVectorHelper(T vector) { this.vector = vector; } + @Override public SerializedField.Builder getMetadataBuilder() { SerializedField offsetField = buildOffsetMetadata(); return super.getMetadataBuilder() diff --git a/sabot/vector-tools/src/main/java/org/apache/arrow/vector/complex/ListVectorHelper.java b/sabot/vector-tools/src/main/java/org/apache/arrow/vector/complex/ListVectorHelper.java index 27ffcb49cf..16a0f4c465 100644 --- a/sabot/vector-tools/src/main/java/org/apache/arrow/vector/complex/ListVectorHelper.java +++ b/sabot/vector-tools/src/main/java/org/apache/arrow/vector/complex/ListVectorHelper.java @@ -83,6 +83,7 @@ private void loadValidityBuffer(SerializedField metadata, ArrowBuf buffer) { listVector.validityBuffer.getReferenceManager().retain(1); } + @Override public void materialize(Field field) { if (field.getChildren().size() == 0) { return; @@ -92,6 +93,7 @@ public void materialize(Field field) { TypeHelper.getHelper(innerVector).ifPresent(t -> t.materialize(innerField)); } + @Override public SerializedField.Builder getMetadataBuilder() { return SerializedField.newBuilder() .setMajorType(MajorType.newBuilder().setMinorType(MinorType.LIST).setMode(DataMode.OPTIONAL).build()) @@ -103,6 +105,7 @@ public SerializedField.Builder getMetadataBuilder() { .addChild(TypeHelper.getMetadata(listVector.vector)); } + @Override protected SerializedField buildValidityMetadata() { SerializedField.Builder validityBuilder = SerializedField.newBuilder() .setNamePart(NamePart.newBuilder().setName("$bits$").build()) diff --git a/sabot/vector-tools/src/main/java/org/apache/arrow/vector/complex/NonNullableStructVectorHelper.java b/sabot/vector-tools/src/main/java/org/apache/arrow/vector/complex/NonNullableStructVectorHelper.java index ce686be8e6..8386b9f167 100644 --- a/sabot/vector-tools/src/main/java/org/apache/arrow/vector/complex/NonNullableStructVectorHelper.java +++ b/sabot/vector-tools/src/main/java/org/apache/arrow/vector/complex/NonNullableStructVectorHelper.java @@ -44,6 +44,7 @@ public NonNullableStructVectorHelper(NonNullableStructVector vector) { this.structVector = vector; } + @Override public void load(SerializedField metadata, ArrowBuf buf) { final List fields = metadata.getChildList(); structVector.valueCount = metadata.getValueCount(); @@ -69,6 +70,7 @@ public void load(SerializedField metadata, ArrowBuf buf) { Preconditions.checkState(bufOffset == buf.capacity()); } + @Override public void materialize(Field field) { List children = field.getChildren(); @@ -79,6 +81,7 @@ public void materialize(Field field) { } } + @Override public SerializedField getMetadata() { SerializedField.Builder b = SerializedField.newBuilder() .setNamePart(NamePart.newBuilder().setName(structVector.getField().getName())) diff --git a/sabot/vector-tools/src/main/java/org/apache/arrow/vector/complex/StructVectorHelper.java b/sabot/vector-tools/src/main/java/org/apache/arrow/vector/complex/StructVectorHelper.java index f9c8d387bd..bdff6747ef 100644 --- a/sabot/vector-tools/src/main/java/org/apache/arrow/vector/complex/StructVectorHelper.java +++ b/sabot/vector-tools/src/main/java/org/apache/arrow/vector/complex/StructVectorHelper.java @@ -44,6 +44,7 @@ public StructVectorHelper(StructVector vector) { this.structVector = vector; } + @Override public void load(SerializedField metadata, ArrowBuf buf) { /* clear the current buffers (if any) */ structVector.clear(); @@ -118,6 +119,7 @@ private int load(ArrowBuf buf, int bufOffset, final SerializedField child, Field return bufOffset; } + @Override public void materialize(Field field) { List children = field.getChildren(); @@ -128,6 +130,7 @@ public void materialize(Field field) { } } + @Override public SerializedField getMetadata() { int bufferSize = structVector.getBufferSize(); SerializedField.Builder b = SerializedField.newBuilder() diff --git a/sabot/vector-tools/src/main/java/org/apache/arrow/vector/complex/UnionVectorHelper.java b/sabot/vector-tools/src/main/java/org/apache/arrow/vector/complex/UnionVectorHelper.java index 5adb51792e..c4f20f3059 100644 --- a/sabot/vector-tools/src/main/java/org/apache/arrow/vector/complex/UnionVectorHelper.java +++ b/sabot/vector-tools/src/main/java/org/apache/arrow/vector/complex/UnionVectorHelper.java @@ -37,6 +37,7 @@ public UnionVectorHelper(UnionVector vector) { this.unionVector = vector; } + @Override public void load(UserBitShared.SerializedField metadata, ArrowBuf buffer) { /* clear the current buffers (if any) */ unionVector.clear(); @@ -62,6 +63,7 @@ private void loadTypeBuffer(SerializedField metadata, ArrowBuf buffer) { unionVector.typeBuffer .getReferenceManager().retain(1); } + @Override public void materialize(Field field) { for (Field child : field.getChildren()) { FieldVector v = TypeHelper.getNewVector(child, unionVector.getAllocator()); @@ -70,6 +72,7 @@ public void materialize(Field field) { } } + @Override public SerializedField getMetadata() { SerializedField.Builder b = SerializedField.newBuilder() .setNamePart(NamePart.newBuilder().setName(unionVector.getField().getName())) diff --git a/sabot/vector-tools/src/main/java/org/apache/arrow/vector/util/BasicTypeHelper.java b/sabot/vector-tools/src/main/java/org/apache/arrow/vector/util/BasicTypeHelper.java index b559239bb2..c3d3302bc6 100644 --- a/sabot/vector-tools/src/main/java/org/apache/arrow/vector/util/BasicTypeHelper.java +++ b/sabot/vector-tools/src/main/java/org/apache/arrow/vector/util/BasicTypeHelper.java @@ -789,103 +789,103 @@ public static void setNotNull(ValueHolder holder) { case TINYINT: if (holder instanceof NullableTinyIntHolder) { ((NullableTinyIntHolder) holder).isSet = 1; - return; } + return; case UINT1: if (holder instanceof NullableUInt1Holder) { ((NullableUInt1Holder) holder).isSet = 1; - return; } + return; case UINT2: if (holder instanceof NullableUInt2Holder) { ((NullableUInt2Holder) holder).isSet = 1; - return; } + return; case SMALLINT: if (holder instanceof NullableSmallIntHolder) { ((NullableSmallIntHolder) holder).isSet = 1; - return; } + return; case INT: if (holder instanceof NullableIntHolder) { ((NullableIntHolder) holder).isSet = 1; - return; } + return; case UINT4: if (holder instanceof NullableUInt4Holder) { ((NullableUInt4Holder) holder).isSet = 1; - return; } + return; case FLOAT4: if (holder instanceof NullableFloat4Holder) { ((NullableFloat4Holder) holder).isSet = 1; - return; } + return; case INTERVALYEAR: if (holder instanceof NullableIntervalYearHolder) { ((NullableIntervalYearHolder) holder).isSet = 1; - return; } + return; case TIMEMILLI: if (holder instanceof NullableTimeMilliHolder) { ((NullableTimeMilliHolder) holder).isSet = 1; - return; } + return; case BIGINT: if (holder instanceof NullableBigIntHolder) { ((NullableBigIntHolder) holder).isSet = 1; - return; } + return; case UINT8: if (holder instanceof NullableUInt8Holder) { ((NullableUInt8Holder) holder).isSet = 1; - return; } + return; case FLOAT8: if (holder instanceof NullableFloat8Holder) { ((NullableFloat8Holder) holder).isSet = 1; - return; } + return; case DATEMILLI: if (holder instanceof NullableDateMilliHolder) { ((NullableDateMilliHolder) holder).isSet = 1; - return; } + return; case TIMESTAMPMILLI: if (holder instanceof NullableTimeStampMilliHolder) { ((NullableTimeStampMilliHolder) holder).isSet = 1; - return; } + return; case INTERVALDAY: if (holder instanceof NullableIntervalDayHolder) { ((NullableIntervalDayHolder) holder).isSet = 1; - return; } + return; case DECIMAL: if (holder instanceof NullableDecimalHolder) { ((NullableDecimalHolder) holder).isSet = 1; - return; } + return; case FIXEDSIZEBINARY: if (holder instanceof NullableFixedSizeBinaryHolder) { ((NullableFixedSizeBinaryHolder) holder).isSet = 1; - return; } + return; case VARBINARY: if (holder instanceof NullableVarBinaryHolder) { ((NullableVarBinaryHolder) holder).isSet = 1; - return; } + return; case VARCHAR: if (holder instanceof NullableVarCharHolder) { ((NullableVarCharHolder) holder).isSet = 1; - return; } + return; case BIT: if (holder instanceof NullableBitHolder) { ((NullableBitHolder) holder).isSet = 1; - return; } + return; default: throw new UnsupportedOperationException(buildErrorMessage("set not null", type)); } @@ -1020,7 +1020,6 @@ public static boolean isNull(ValueHolder holder) { } } - @SuppressWarnings("checkstyle:MissingSwitchDefault") public static void setValueSafe(ValueVector vector, int index, ValueHolder holder) { MajorType type = getMajorTypeForField(vector.getField()); @@ -1041,6 +1040,8 @@ public static void setValueSafe(ValueVector vector, int index, ValueHolder holde ((TinyIntVector) vector).setSafe(index, (TinyIntHolder) holder); } return; + default: + return; } case UINT1: switch (type.getMode()) { @@ -1058,6 +1059,8 @@ public static void setValueSafe(ValueVector vector, int index, ValueHolder holde ((UInt1Vector) vector).setSafe(index, (UInt1Holder) holder); } return; + default: + return; } case UINT2: switch (type.getMode()) { @@ -1075,6 +1078,8 @@ public static void setValueSafe(ValueVector vector, int index, ValueHolder holde ((UInt2Vector) vector).setSafe(index, (UInt2Holder) holder); } return; + default: + return; } case SMALLINT: switch (type.getMode()) { @@ -1092,6 +1097,8 @@ public static void setValueSafe(ValueVector vector, int index, ValueHolder holde ((SmallIntVector) vector).setSafe(index, (SmallIntHolder) holder); } return; + default: + return; } case INT: switch (type.getMode()) { @@ -1109,6 +1116,8 @@ public static void setValueSafe(ValueVector vector, int index, ValueHolder holde ((IntVector) vector).setSafe(index, (IntHolder) holder); } return; + default: + return; } case UINT4: switch (type.getMode()) { @@ -1126,6 +1135,8 @@ public static void setValueSafe(ValueVector vector, int index, ValueHolder holde ((UInt4Vector) vector).setSafe(index, (UInt4Holder) holder); } return; + default: + return; } case FLOAT4: switch (type.getMode()) { @@ -1143,6 +1154,8 @@ public static void setValueSafe(ValueVector vector, int index, ValueHolder holde ((Float4Vector) vector).setSafe(index, (Float4Holder) holder); } return; + default: + return; } // unsupported type DateDay case INTERVALYEAR: @@ -1161,6 +1174,8 @@ public static void setValueSafe(ValueVector vector, int index, ValueHolder holde ((IntervalYearVector) vector).setSafe(index, (IntervalYearHolder) holder); } return; + default: + return; } // unsupported type TimeSec case TIME: @@ -1179,6 +1194,8 @@ public static void setValueSafe(ValueVector vector, int index, ValueHolder holde ((TimeMilliVector) vector).setSafe(index, (TimeMilliHolder) holder); } return; + default: + return; } case BIGINT: switch (type.getMode()) { @@ -1196,6 +1213,8 @@ public static void setValueSafe(ValueVector vector, int index, ValueHolder holde ((BigIntVector) vector).setSafe(index, (BigIntHolder) holder); } return; + default: + return; } case UINT8: switch (type.getMode()) { @@ -1213,6 +1232,8 @@ public static void setValueSafe(ValueVector vector, int index, ValueHolder holde ((UInt8Vector) vector).setSafe(index, (UInt8Holder) holder); } return; + default: + return; } case FLOAT8: switch (type.getMode()) { @@ -1230,6 +1251,8 @@ public static void setValueSafe(ValueVector vector, int index, ValueHolder holde ((Float8Vector) vector).setSafe(index, (Float8Holder) holder); } return; + default: + return; } case DATE: switch (type.getMode()) { @@ -1247,6 +1270,8 @@ public static void setValueSafe(ValueVector vector, int index, ValueHolder holde ((DateMilliVector) vector).setSafe(index, (DateMilliHolder) holder); } return; + default: + return; } // unsupported type Duration // unsupported type TimeStampSec @@ -1266,6 +1291,8 @@ public static void setValueSafe(ValueVector vector, int index, ValueHolder holde ((TimeStampMilliVector) vector).setSafe(index, (TimeStampMilliHolder) holder); } return; + default: + return; } // unsupported type TimeStampMicro // unsupported type TimeStampNano @@ -1291,6 +1318,8 @@ public static void setValueSafe(ValueVector vector, int index, ValueHolder holde ((IntervalDayVector) vector).setSafe(index, (IntervalDayHolder) holder); } return; + default: + return; } case DECIMAL: switch (type.getMode()) { @@ -1308,6 +1337,8 @@ public static void setValueSafe(ValueVector vector, int index, ValueHolder holde ((DecimalVector) vector).setSafe(index, (DecimalHolder) holder); } return; + default: + return; } case FIXEDSIZEBINARY: switch (type.getMode()) { @@ -1325,6 +1356,8 @@ public static void setValueSafe(ValueVector vector, int index, ValueHolder holde ((FixedSizeBinaryVector) vector).setSafe(index, (FixedSizeBinaryHolder) holder); } return; + default: + return; } case VARBINARY: switch (type.getMode()) { @@ -1342,6 +1375,8 @@ public static void setValueSafe(ValueVector vector, int index, ValueHolder holde ((VarBinaryVector) vector).setSafe(index, (VarBinaryHolder) holder); } return; + default: + return; } case VARCHAR: switch (type.getMode()) { @@ -1359,6 +1394,8 @@ public static void setValueSafe(ValueVector vector, int index, ValueHolder holde ((VarCharVector) vector).setSafe(index, (VarCharHolder) holder); } return; + default: + return; } case BIT: switch (type.getMode()) { @@ -1376,9 +1413,12 @@ public static void setValueSafe(ValueVector vector, int index, ValueHolder holde ((BitVector) vector).setSafe(index, (BitHolder) holder); } return; + default: + return; } case GENERIC_OBJECT: ((ObjectVector) vector).setSafe(index, (ObjectHolder) holder); + return; default: throw new UnsupportedOperationException( buildErrorMessage("set value safe", getArrowMinorType(type.getMinorType()))); @@ -1581,7 +1621,6 @@ public static ValueHolder getValue(ValueVector vector, int index) { } } - @SuppressWarnings("checkstyle:MissingSwitchDefault") public static ValueHolder deNullify(ValueHolder holder) { MajorType type = getValueHolderMajorType(holder.getClass()); @@ -1590,6 +1629,7 @@ public static ValueHolder deNullify(ValueHolder holder) { switch (type.getMode()) { case REQUIRED: + case REPEATED: return holder; case OPTIONAL: if (((NullableTinyIntHolder) holder).isSet == 1) { @@ -1601,13 +1641,14 @@ public static ValueHolder deNullify(ValueHolder holder) { } else { throw new UnsupportedOperationException("You can not convert a null value into a non-null value!"); } - case REPEATED: - return holder; + default: + throw new AssertionError("Unsupported data mode: " + type.getMode()); } case UINT1: switch (type.getMode()) { case REQUIRED: + case REPEATED: return holder; case OPTIONAL: if (((NullableUInt1Holder) holder).isSet == 1) { @@ -1619,13 +1660,14 @@ public static ValueHolder deNullify(ValueHolder holder) { } else { throw new UnsupportedOperationException("You can not convert a null value into a non-null value!"); } - case REPEATED: - return holder; + default: + throw new AssertionError("Unsupported data mode: " + type.getMode()); } case UINT2: switch (type.getMode()) { case REQUIRED: + case REPEATED: return holder; case OPTIONAL: if (((NullableUInt2Holder) holder).isSet == 1) { @@ -1637,13 +1679,14 @@ public static ValueHolder deNullify(ValueHolder holder) { } else { throw new UnsupportedOperationException("You can not convert a null value into a non-null value!"); } - case REPEATED: - return holder; + default: + throw new AssertionError("Unsupported data mode: " + type.getMode()); } case SMALLINT: switch (type.getMode()) { case REQUIRED: + case REPEATED: return holder; case OPTIONAL: if (((NullableSmallIntHolder) holder).isSet == 1) { @@ -1655,13 +1698,14 @@ public static ValueHolder deNullify(ValueHolder holder) { } else { throw new UnsupportedOperationException("You can not convert a null value into a non-null value!"); } - case REPEATED: - return holder; + default: + throw new AssertionError("Unsupported data mode: " + type.getMode()); } case INT: switch (type.getMode()) { case REQUIRED: + case REPEATED: return holder; case OPTIONAL: if (((NullableIntHolder) holder).isSet == 1) { @@ -1673,13 +1717,14 @@ public static ValueHolder deNullify(ValueHolder holder) { } else { throw new UnsupportedOperationException("You can not convert a null value into a non-null value!"); } - case REPEATED: - return holder; + default: + throw new AssertionError("Unsupported data mode: " + type.getMode()); } case UINT4: switch (type.getMode()) { case REQUIRED: + case REPEATED: return holder; case OPTIONAL: if (((NullableUInt4Holder) holder).isSet == 1) { @@ -1691,13 +1736,14 @@ public static ValueHolder deNullify(ValueHolder holder) { } else { throw new UnsupportedOperationException("You can not convert a null value into a non-null value!"); } - case REPEATED: - return holder; + default: + throw new AssertionError("Unsupported data mode: " + type.getMode()); } case FLOAT4: switch (type.getMode()) { case REQUIRED: + case REPEATED: return holder; case OPTIONAL: if (((NullableFloat4Holder) holder).isSet == 1) { @@ -1709,14 +1755,15 @@ public static ValueHolder deNullify(ValueHolder holder) { } else { throw new UnsupportedOperationException("You can not convert a null value into a non-null value!"); } - case REPEATED: - return holder; + default: + throw new AssertionError("Unsupported data mode: " + type.getMode()); } // unsupported type DateDay case INTERVALYEAR: switch (type.getMode()) { case REQUIRED: + case REPEATED: return holder; case OPTIONAL: if (((NullableIntervalYearHolder) holder).isSet == 1) { @@ -1728,14 +1775,15 @@ public static ValueHolder deNullify(ValueHolder holder) { } else { throw new UnsupportedOperationException("You can not convert a null value into a non-null value!"); } - case REPEATED: - return holder; + default: + throw new AssertionError("Unsupported data mode: " + type.getMode()); } // unsupported type TimeSec case TIME: switch (type.getMode()) { case REQUIRED: + case REPEATED: return holder; case OPTIONAL: if (((NullableTimeMilliHolder) holder).isSet == 1) { @@ -1747,13 +1795,14 @@ public static ValueHolder deNullify(ValueHolder holder) { } else { throw new UnsupportedOperationException("You can not convert a null value into a non-null value!"); } - case REPEATED: - return holder; + default: + throw new AssertionError("Unsupported data mode: " + type.getMode()); } case BIGINT: switch (type.getMode()) { case REQUIRED: + case REPEATED: return holder; case OPTIONAL: if (((NullableBigIntHolder) holder).isSet == 1) { @@ -1765,13 +1814,14 @@ public static ValueHolder deNullify(ValueHolder holder) { } else { throw new UnsupportedOperationException("You can not convert a null value into a non-null value!"); } - case REPEATED: - return holder; + default: + throw new AssertionError("Unsupported data mode: " + type.getMode()); } case UINT8: switch (type.getMode()) { case REQUIRED: + case REPEATED: return holder; case OPTIONAL: if (((NullableUInt8Holder) holder).isSet == 1) { @@ -1783,13 +1833,14 @@ public static ValueHolder deNullify(ValueHolder holder) { } else { throw new UnsupportedOperationException("You can not convert a null value into a non-null value!"); } - case REPEATED: - return holder; + default: + throw new AssertionError("Unsupported data mode: " + type.getMode()); } case FLOAT8: switch (type.getMode()) { case REQUIRED: + case REPEATED: return holder; case OPTIONAL: if (((NullableFloat8Holder) holder).isSet == 1) { @@ -1801,13 +1852,14 @@ public static ValueHolder deNullify(ValueHolder holder) { } else { throw new UnsupportedOperationException("You can not convert a null value into a non-null value!"); } - case REPEATED: - return holder; + default: + throw new AssertionError("Unsupported data mode: " + type.getMode()); } case DATE: switch (type.getMode()) { case REQUIRED: + case REPEATED: return holder; case OPTIONAL: if (((NullableDateMilliHolder) holder).isSet == 1) { @@ -1819,8 +1871,8 @@ public static ValueHolder deNullify(ValueHolder holder) { } else { throw new UnsupportedOperationException("You can not convert a null value into a non-null value!"); } - case REPEATED: - return holder; + default: + throw new AssertionError("Unsupported data mode: " + type.getMode()); } // unsupported type Duration // unsupported type TimeStampSec @@ -1828,6 +1880,7 @@ public static ValueHolder deNullify(ValueHolder holder) { switch (type.getMode()) { case REQUIRED: + case REPEATED: return holder; case OPTIONAL: if (((NullableTimeStampMilliHolder) holder).isSet == 1) { @@ -1839,8 +1892,8 @@ public static ValueHolder deNullify(ValueHolder holder) { } else { throw new UnsupportedOperationException("You can not convert a null value into a non-null value!"); } - case REPEATED: - return holder; + default: + throw new AssertionError("Unsupported data mode: " + type.getMode()); } // unsupported type TimeStampMicro // unsupported type TimeStampNano @@ -1854,6 +1907,7 @@ public static ValueHolder deNullify(ValueHolder holder) { switch (type.getMode()) { case REQUIRED: + case REPEATED: return holder; case OPTIONAL: if (((NullableIntervalDayHolder) holder).isSet == 1) { @@ -1866,13 +1920,14 @@ public static ValueHolder deNullify(ValueHolder holder) { } else { throw new UnsupportedOperationException("You can not convert a null value into a non-null value!"); } - case REPEATED: - return holder; + default: + throw new AssertionError("Unsupported data mode: " + type.getMode()); } case DECIMAL: switch (type.getMode()) { case REQUIRED: + case REPEATED: return holder; case OPTIONAL: if (((NullableDecimalHolder) holder).isSet == 1) { @@ -1887,13 +1942,14 @@ public static ValueHolder deNullify(ValueHolder holder) { } else { throw new UnsupportedOperationException("You can not convert a null value into a non-null value!"); } - case REPEATED: - return holder; + default: + throw new AssertionError("Unsupported data mode: " + type.getMode()); } case FIXEDSIZEBINARY: switch (type.getMode()) { case REQUIRED: + case REPEATED: return holder; case OPTIONAL: if (((NullableFixedSizeBinaryHolder) holder).isSet == 1) { @@ -1906,13 +1962,14 @@ public static ValueHolder deNullify(ValueHolder holder) { } else { throw new UnsupportedOperationException("You can not convert a null value into a non-null value!"); } - case REPEATED: - return holder; + default: + throw new AssertionError("Unsupported data mode: " + type.getMode()); } case VARBINARY: switch (type.getMode()) { case REQUIRED: + case REPEATED: return holder; case OPTIONAL: if (((NullableVarBinaryHolder) holder).isSet == 1) { @@ -1926,13 +1983,14 @@ public static ValueHolder deNullify(ValueHolder holder) { } else { throw new UnsupportedOperationException("You can not convert a null value into a non-null value!"); } - case REPEATED: - return holder; + default: + throw new AssertionError("Unsupported data mode: " + type.getMode()); } case VARCHAR: switch (type.getMode()) { case REQUIRED: + case REPEATED: return holder; case OPTIONAL: if (((NullableVarCharHolder) holder).isSet == 1) { @@ -1946,13 +2004,14 @@ public static ValueHolder deNullify(ValueHolder holder) { } else { throw new UnsupportedOperationException("You can not convert a null value into a non-null value!"); } - case REPEATED: - return holder; + default: + throw new AssertionError("Unsupported data mode: " + type.getMode()); } case BIT: switch (type.getMode()) { case REQUIRED: + case REPEATED: return holder; case OPTIONAL: if (((NullableBitHolder) holder).isSet == 1) { @@ -1964,15 +2023,14 @@ public static ValueHolder deNullify(ValueHolder holder) { } else { throw new UnsupportedOperationException("You can not convert a null value into a non-null value!"); } - case REPEATED: - return holder; + default: + throw new AssertionError("Unsupported data mode: " + type.getMode()); } default: throw new UnsupportedOperationException(buildErrorMessage("deNullify", getArrowMinorType(type.getMinorType()))); } } - @SuppressWarnings("checkstyle:MissingSwitchDefault") public static ValueHolder nullify(ValueHolder holder) { MajorType type = getValueHolderMajorType(holder.getClass()); @@ -1987,6 +2045,7 @@ public static ValueHolder nullify(ValueHolder holder) { case OPTIONAL: return holder; case REPEATED: + default: throw new UnsupportedOperationException("You can not convert repeated type " + type + " to nullable type!"); } case UINT1: @@ -1999,6 +2058,7 @@ public static ValueHolder nullify(ValueHolder holder) { case OPTIONAL: return holder; case REPEATED: + default: throw new UnsupportedOperationException("You can not convert repeated type " + type + " to nullable type!"); } case UINT2: @@ -2011,6 +2071,7 @@ public static ValueHolder nullify(ValueHolder holder) { case OPTIONAL: return holder; case REPEATED: + default: throw new UnsupportedOperationException("You can not convert repeated type " + type + " to nullable type!"); } case SMALLINT: @@ -2023,6 +2084,7 @@ public static ValueHolder nullify(ValueHolder holder) { case OPTIONAL: return holder; case REPEATED: + default: throw new UnsupportedOperationException("You can not convert repeated type " + type + " to nullable type!"); } case INT: @@ -2035,6 +2097,7 @@ public static ValueHolder nullify(ValueHolder holder) { case OPTIONAL: return holder; case REPEATED: + default: throw new UnsupportedOperationException("You can not convert repeated type " + type + " to nullable type!"); } case UINT4: @@ -2047,6 +2110,7 @@ public static ValueHolder nullify(ValueHolder holder) { case OPTIONAL: return holder; case REPEATED: + default: throw new UnsupportedOperationException("You can not convert repeated type " + type + " to nullable type!"); } case FLOAT4: @@ -2059,6 +2123,7 @@ public static ValueHolder nullify(ValueHolder holder) { case OPTIONAL: return holder; case REPEATED: + default: throw new UnsupportedOperationException("You can not convert repeated type " + type + " to nullable type!"); } // unsupported type DateDay @@ -2072,6 +2137,7 @@ public static ValueHolder nullify(ValueHolder holder) { case OPTIONAL: return holder; case REPEATED: + default: throw new UnsupportedOperationException("You can not convert repeated type " + type + " to nullable type!"); } // unsupported type TimeSec @@ -2085,6 +2151,7 @@ public static ValueHolder nullify(ValueHolder holder) { case OPTIONAL: return holder; case REPEATED: + default: throw new UnsupportedOperationException("You can not convert repeated type " + type + " to nullable type!"); } case BIGINT: @@ -2097,6 +2164,7 @@ public static ValueHolder nullify(ValueHolder holder) { case OPTIONAL: return holder; case REPEATED: + default: throw new UnsupportedOperationException("You can not convert repeated type " + type + " to nullable type!"); } case UINT8: @@ -2109,6 +2177,7 @@ public static ValueHolder nullify(ValueHolder holder) { case OPTIONAL: return holder; case REPEATED: + default: throw new UnsupportedOperationException("You can not convert repeated type " + type + " to nullable type!"); } case FLOAT8: @@ -2121,6 +2190,7 @@ public static ValueHolder nullify(ValueHolder holder) { case OPTIONAL: return holder; case REPEATED: + default: throw new UnsupportedOperationException("You can not convert repeated type " + type + " to nullable type!"); } case DATE: @@ -2133,6 +2203,7 @@ public static ValueHolder nullify(ValueHolder holder) { case OPTIONAL: return holder; case REPEATED: + default: throw new UnsupportedOperationException("You can not convert repeated type " + type + " to nullable type!"); } // unsupported type Duration @@ -2147,6 +2218,7 @@ public static ValueHolder nullify(ValueHolder holder) { case OPTIONAL: return holder; case REPEATED: + default: throw new UnsupportedOperationException("You can not convert repeated type " + type + " to nullable type!"); } // unsupported type TimeStampMicro @@ -2168,6 +2240,7 @@ public static ValueHolder nullify(ValueHolder holder) { case OPTIONAL: return holder; case REPEATED: + default: throw new UnsupportedOperationException("You can not convert repeated type " + type + " to nullable type!"); } case DECIMAL: @@ -2183,6 +2256,7 @@ public static ValueHolder nullify(ValueHolder holder) { case OPTIONAL: return holder; case REPEATED: + default: throw new UnsupportedOperationException("You can not convert repeated type " + type + " to nullable type!"); } case FIXEDSIZEBINARY: @@ -2196,6 +2270,7 @@ public static ValueHolder nullify(ValueHolder holder) { case OPTIONAL: return holder; case REPEATED: + default: throw new UnsupportedOperationException("You can not convert repeated type " + type + " to nullable type!"); } case VARBINARY: @@ -2210,6 +2285,7 @@ public static ValueHolder nullify(ValueHolder holder) { case OPTIONAL: return holder; case REPEATED: + default: throw new UnsupportedOperationException("You can not convert repeated type " + type + " to nullable type!"); } case VARCHAR: @@ -2224,6 +2300,7 @@ public static ValueHolder nullify(ValueHolder holder) { case OPTIONAL: return holder; case REPEATED: + default: throw new UnsupportedOperationException("You can not convert repeated type " + type + " to nullable type!"); } case BIT: @@ -2236,6 +2313,7 @@ public static ValueHolder nullify(ValueHolder holder) { case OPTIONAL: return holder; case REPEATED: + default: throw new UnsupportedOperationException("You can not convert repeated type " + type + " to nullable type!"); } default: diff --git a/sample-data/pom.xml b/sample-data/pom.xml index 2c1c640710..83b7bddd69 100644 --- a/sample-data/pom.xml +++ b/sample-data/pom.xml @@ -22,7 +22,7 @@ com.dremio dremio-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-sample-data diff --git a/services/accelerator-api/pom.xml b/services/accelerator-api/pom.xml index dbb46bc595..eb4379c587 100644 --- a/services/accelerator-api/pom.xml +++ b/services/accelerator-api/pom.xml @@ -22,7 +22,7 @@ dremio-services-parent com.dremio.services - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 4.0.0 diff --git a/services/accelerator/pom.xml b/services/accelerator/pom.xml index 675ddf84dd..581dc9dc10 100644 --- a/services/accelerator/pom.xml +++ b/services/accelerator/pom.xml @@ -22,7 +22,7 @@ com.dremio.services dremio-services-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-services-accelerator @@ -136,6 +136,10 @@ dremio-accelerator-api ${project.version} + + io.opentelemetry.instrumentation + opentelemetry-instrumentation-annotations + diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/AccelerationManagerImpl.java b/services/accelerator/src/main/java/com/dremio/service/reflection/AccelerationManagerImpl.java index da20a39bd3..dbfc19af53 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/AccelerationManagerImpl.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/AccelerationManagerImpl.java @@ -24,17 +24,23 @@ import javax.inject.Provider; import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.CatalogUtil; +import com.dremio.exec.catalog.EntityExplorer; +import com.dremio.exec.catalog.TableVersionType; +import com.dremio.exec.catalog.VersionedDatasetId; import com.dremio.exec.ops.ReflectionContext; +import com.dremio.exec.planner.sql.SchemaUtilities; import com.dremio.exec.planner.sql.parser.SqlCreateReflection; import com.dremio.exec.planner.sql.parser.SqlCreateReflection.MeasureType; import com.dremio.exec.planner.sql.parser.SqlCreateReflection.NameAndMeasures; +import com.dremio.exec.store.CatalogService; import com.dremio.exec.store.sys.accel.AccelerationDetailsPopulator; import com.dremio.exec.store.sys.accel.AccelerationManager; import com.dremio.exec.store.sys.accel.LayoutDefinition; import com.dremio.exec.store.sys.accel.LayoutDefinition.Type; import com.dremio.service.accelerator.AccelerationUtils; import com.dremio.service.namespace.NamespaceKey; -import com.dremio.service.namespace.NamespaceService; import com.dremio.service.namespace.dataset.proto.DatasetConfig; import com.dremio.service.reflection.proto.DimensionGranularity; import com.dremio.service.reflection.proto.ExternalReflection; @@ -46,6 +52,7 @@ import com.dremio.service.reflection.proto.ReflectionGoalState; import com.dremio.service.reflection.proto.ReflectionMeasureField; import com.dremio.service.reflection.proto.ReflectionType; +import com.fasterxml.jackson.core.JsonProcessingException; import com.google.common.base.Function; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableList; @@ -57,17 +64,17 @@ public class AccelerationManagerImpl implements AccelerationManager { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(AccelerationManagerImpl.class); - private final Provider namespaceService; private final Provider reflectionAdministrationServiceFactory; private final Provider reflectionService; + private final Provider catalogService; public AccelerationManagerImpl( Provider reflectionService, Provider reflectionAdministrationServiceFactory, - Provider namespaceService) { + Provider catalogService) { super(); this.reflectionService = reflectionService; - this.namespaceService = namespaceService; + this.catalogService = catalogService; this.reflectionAdministrationServiceFactory = reflectionAdministrationServiceFactory; } @@ -81,18 +88,21 @@ public void dropAcceleration(List path, boolean raiseErrorIfNotFound) { } @Override - public void addLayout(List path, LayoutDefinition definition, ReflectionContext reflectionContext) { - final NamespaceKey key = new NamespaceKey(path); - final DatasetConfig dataset; - + public void addLayout(SchemaUtilities.TableWithPath tableWithPath, LayoutDefinition definition, ReflectionContext reflectionContext) { + final NamespaceKey key = new NamespaceKey(tableWithPath.getPath()); + final DatasetConfig datasetConfig; + final EntityExplorer catalog = CatalogUtil.getSystemCatalogForReflections(catalogService.get()); + String datasetId = null; try { - dataset = namespaceService.get().getDataset(key); - if(dataset == null) { + datasetId = tableWithPath.getTable().getDatasetConfig().getId().getId(); + datasetConfig = CatalogUtil.getDatasetConfig(catalog, datasetId); + if(datasetConfig == null) { throw UserException.validationError().message("Unable to find requested dataset %s.", key).build(logger); } } catch(Exception e) { throw UserException.validationError(e).message("Unable to find requested dataset %s.", key).build(logger); } + validateReflectionSupportForTimeTravelOnArctic(tableWithPath.getPath().get(0), datasetId, (Catalog) catalog); ReflectionGoal goal = new ReflectionGoal(); ReflectionDetails details = new ReflectionDetails(); @@ -111,7 +121,7 @@ public void addLayout(List path, LayoutDefinition definition, Reflection goal.setArrowCachingEnabled(definition.getArrowCachingEnabled()); goal.setState(ReflectionGoalState.ENABLED); goal.setType(definition.getType() == Type.AGGREGATE ? ReflectionType.AGGREGATION : ReflectionType.RAW); - goal.setDatasetId(dataset.getId().getId()); + goal.setDatasetId(datasetConfig.getId().getId()); reflectionAdministrationServiceFactory.get().get(reflectionContext).create(goal); } @@ -185,10 +195,9 @@ public void addExternalReflection(String name, List table, List } @Override - public void dropLayout(List path, final String layoutIdOrName, ReflectionContext reflectionContext) { - NamespaceKey key = new NamespaceKey(path); + public void dropLayout(SchemaUtilities.TableWithPath tableWithPath, final String layoutIdOrName, ReflectionContext reflectionContext) { ReflectionAdministrationService administrationReflectionService = reflectionAdministrationServiceFactory.get().get(reflectionContext); - for (ReflectionGoal rg : administrationReflectionService.getReflectionsByDatasetPath(key)) { + for (ReflectionGoal rg : administrationReflectionService.getReflectionsByDatasetId(tableWithPath.getTable().getDatasetConfig().getId().getId())) { if (rg.getId().getId().equals(layoutIdOrName) || layoutIdOrName.equals(rg.getName())) { administrationReflectionService.remove(rg); // only match first and exist. @@ -196,7 +205,7 @@ public void dropLayout(List path, final String layoutIdOrName, Reflectio } } - Optional er = StreamSupport.stream(administrationReflectionService.getExternalReflectionByDatasetPath(path).spliterator(), false) + Optional er = StreamSupport.stream(administrationReflectionService.getExternalReflectionByDatasetPath(tableWithPath.getPath()).spliterator(), false) .filter(externalReflection -> { return layoutIdOrName.equalsIgnoreCase(externalReflection.getName()) || layoutIdOrName.equals(externalReflection.getId()); @@ -211,10 +220,11 @@ public void dropLayout(List path, final String layoutIdOrName, Reflectio } @Override - public void toggleAcceleration(List path, Type type, boolean enable, ReflectionContext reflectionContext) { + public void toggleAcceleration(SchemaUtilities.TableWithPath tableWithPath, Type type, boolean enable, ReflectionContext reflectionContext) { Exception ex = null; ReflectionAdministrationService administrationReflectionService = reflectionAdministrationServiceFactory.get().get(reflectionContext); - for(ReflectionGoal g : administrationReflectionService.getReflectionsByDatasetPath(new NamespaceKey(path))) { + + for(ReflectionGoal g : administrationReflectionService.getReflectionsByDatasetId(tableWithPath.getTable().getDatasetConfig().getId().getId())) { if( (type == Type.AGGREGATE && g.getType() != ReflectionType.AGGREGATION) || (type == Type.RAW && g.getType() != ReflectionType.RAW) || @@ -247,7 +257,7 @@ public void replanlayout(String layoutId) { @Override public AccelerationDetailsPopulator newPopulator() { - return new ReflectionDetailsPopulatorImpl(namespaceService.get(), reflectionService.get()); + return new ReflectionDetailsPopulatorImpl( reflectionService.get(), catalogService.get()); } @SuppressWarnings("unchecked") @@ -258,4 +268,25 @@ public T unwrap(Class clazz) { } return null; } + + // This helper is to determine if there ia TIMESTAMP specified on an Arctic table. + // We want to disallow AT TIMESTAMP specifiication when creating reflections. This is because the VersionDatasetId + // will not contain the branch information if we allow this. So when we later go to lookup the table using the saved VersionedDatasetId, + // we won't have the branch information to lookup the table in Nessie and will always lookup only in the default branch. + // Eg if the currrent context is dev and we are creating a reflection AT TIMESTAMP T1 , the table is resolved to . + // Later when we lookup the table, we don't save the 'dev' branch context in it, so we will lookup the tableat . + private void validateReflectionSupportForTimeTravelOnArctic(String source, String datasetId, Catalog catalog) { + VersionedDatasetId versionedDatasetId = null; + try { + // first check to see if it's a VersionedDatasetId + versionedDatasetId = VersionedDatasetId.fromString(datasetId); + } catch (JsonProcessingException e) { + // Assume this is a non versioned dataset id . + return; + } + if ((CatalogUtil.requestedPluginSupportsVersionedTables(source, catalog)) && + versionedDatasetId.getVersionContext().getType() == TableVersionType.TIMESTAMP) { + throw UserException.validationError().message("Cannot create reflection on versioned table or view with TIMESTAMP specified. Please use BRANCH, TAG or COMMIT instead.").build(logger); + } + } } diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/DatasetHashUtils.java b/services/accelerator/src/main/java/com/dremio/service/reflection/DatasetHashUtils.java new file mode 100644 index 0000000000..8740b42c52 --- /dev/null +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/DatasetHashUtils.java @@ -0,0 +1,292 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.service.reflection; + +import java.util.ArrayList; +import java.util.Deque; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Queue; + +import org.apache.calcite.plan.RelOptUtil; +import org.apache.calcite.rel.RelNode; +import org.apache.calcite.rel.RelRoot; +import org.apache.calcite.rel.core.TableScan; +import org.apache.calcite.util.Pair; + +import com.dremio.common.utils.protos.AttemptId; +import com.dremio.exec.catalog.CatalogEntityKey; +import com.dremio.exec.catalog.CatalogUser; +import com.dremio.exec.catalog.CatalogUtil; +import com.dremio.exec.catalog.DremioTable; +import com.dremio.exec.catalog.EntityExplorer; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.ops.QueryContext; +import com.dremio.exec.planner.RoutingShuttle; +import com.dremio.exec.planner.acceleration.ExpansionNode; +import com.dremio.exec.planner.acceleration.substitution.SubstitutionUtils; +import com.dremio.exec.planner.observer.AbstractAttemptObserver; +import com.dremio.exec.planner.sql.DremioSqlToRelConverter; +import com.dremio.exec.planner.sql.SqlConverter; +import com.dremio.exec.server.MaterializationDescriptorProvider; +import com.dremio.exec.server.SabotContext; +import com.dremio.exec.store.CatalogService; +import com.dremio.sabot.rpc.user.UserSession; +import com.dremio.service.namespace.NamespaceException; +import com.dremio.service.namespace.NamespaceKey; +import com.dremio.service.namespace.NamespaceNotFoundException; +import com.dremio.service.namespace.dataset.proto.DatasetConfig; +import com.dremio.service.namespace.dataset.proto.DatasetType; +import com.dremio.service.namespace.dataset.proto.ParentDataset; +import com.dremio.service.namespace.dataset.proto.ViewFieldType; +import com.dremio.service.users.SystemUser; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.ArrayListMultimap; +import com.google.common.collect.ListMultimap; + +import io.protostuff.LinkedBuffer; +import io.protostuff.ProtostuffIOUtil; + +public class DatasetHashUtils { + + /** + * @return true if the dataset type is PHYSICAL_* + */ + public static boolean isPhysicalDataset(DatasetType t) { + return t == DatasetType.PHYSICAL_DATASET || + t == DatasetType.PHYSICAL_DATASET_SOURCE_FILE || + t == DatasetType.PHYSICAL_DATASET_SOURCE_FOLDER || + t == DatasetType.PHYSICAL_DATASET_HOME_FILE || + t == DatasetType.PHYSICAL_DATASET_HOME_FOLDER; + } + + /** + * Computes a hash for the input dataset by recursively looking through parent views and tables. + * + * @param dataset Dataset which we are computing a hash for + * @param catalogService + * @param ignorePds Exclude parent PDS from hash so that an additive change such as adding a column doesn't cause + * a reflection anchored on a child view to require a full refresh (when previously incremental) + * @return + * @throws NamespaceException + */ + public static Integer computeDatasetHash(DatasetConfig dataset, CatalogService catalogService, boolean ignorePds) throws NamespaceException { + Queue q = new LinkedList<>(); + q.add(dataset); + int hash = 1; + boolean isFirst = true; + EntityExplorer catalog = CatalogUtil.getSystemCatalogForReflections(catalogService); + while (!q.isEmpty()) { + dataset = q.poll(); + if (isPhysicalDataset(dataset.getType())) { + if (!ignorePds || isFirst) { + hash = 31 * hash + (dataset.getRecordSchema() == null ? 1 : dataset.getRecordSchema().hashCode()); + } + } else { + int schemaHash = 0; + if (isFirst) { + final List types = new ArrayList<>(); + dataset.getVirtualDataset().getSqlFieldsList().forEach(type -> { + if (type.getSerializedField() != null) { + ViewFieldType newType = new ViewFieldType(); + ProtostuffIOUtil.mergeFrom(ProtostuffIOUtil.toByteArray(type, ViewFieldType.getSchema(), LinkedBuffer.allocate()), newType, ViewFieldType.getSchema()); + types.add(newType.setSerializedField(null)); + } else { + types.add(type); + } + }); + schemaHash = types.hashCode(); + } + hash = 31 * hash + dataset.getVirtualDataset().getSql().hashCode() + schemaHash; + if (dataset.getVirtualDataset().getParentsList() != null) { // select 1 has null parents list + for (ParentDataset parent : dataset.getVirtualDataset().getParentsList()) { + int size = parent.getDatasetPathList().size(); + if (!(size > 1 && parent.getDatasetPathList().get(size - 1).equalsIgnoreCase("external_query"))) { + DatasetConfig datasetConfig = CatalogUtil.getDatasetConfig(catalog, new NamespaceKey(parent.getDatasetPathList())); + if (datasetConfig == null) { + throw new NamespaceNotFoundException(new NamespaceKey(parent.getDatasetPathList()), "Not found"); + } + q.add(datasetConfig); + } + } + } + } + isFirst = false; + } + return hash; + } + + /** + * check with ignorePds true and then also false, for backward compatibility + */ + static boolean hashEquals(int hash, DatasetConfig dataset, CatalogService cs) throws NamespaceException { + return + hash == computeDatasetHash(dataset, cs, true) + || + hash == computeDatasetHash(dataset, cs, false); + } + + /** + * Similar to above {@link #computeDatasetHash(DatasetConfig, CatalogService, boolean)} except can handle + * versioned parents such as views/tables from versioned sources. Prior to versioned sources, a view's parents were cached in the + * KV store on save. However, for versioned views or Sonar Views that reference versioned tables, the KV store + * no longer stores the parents. So instead, we need to get the parents for a view from the RelNode tree + * which is capable of resolving version context based on AT syntax, refs and/or default source version. + * + * @param dataset Dataset which we are computing a hash for + * @param catalogService + * @param relNode RelNode tree for dataset + * @param ignorePds Same as ignorePds in {@link #computeDatasetHash(DatasetConfig, CatalogService, boolean)} + * @return + */ + public static Integer computeDatasetHash(DatasetConfig dataset, CatalogService catalogService, + RelNode relNode, boolean ignorePds) { + ParentDatasetBuilder builder = new ParentDatasetBuilder(dataset, relNode, catalogService); + Queue> q = new LinkedList<>(); + Pair current = Pair.of(SubstitutionUtils.VersionedPath.of(dataset.getFullPathList(), null), dataset); + q.add(current); + int hash = 1; + boolean isFirst = true; + while (!q.isEmpty()) { + current = q.poll(); + dataset = current.right; + if (isPhysicalDataset(dataset.getType())) { + if (!ignorePds || isFirst) { + hash = 31 * hash + (dataset.getRecordSchema() == null ? 1 : dataset.getRecordSchema().hashCode()); + } + } else { + int schemaHash = 0; + if (isFirst) { + final List types = new ArrayList<>(); + dataset.getVirtualDataset().getSqlFieldsList().forEach(type -> { + if (type.getSerializedField() != null) { + ViewFieldType newType = new ViewFieldType(); + ProtostuffIOUtil.mergeFrom(ProtostuffIOUtil.toByteArray(type, ViewFieldType.getSchema(), LinkedBuffer.allocate()), newType, ViewFieldType.getSchema()); + types.add(newType.setSerializedField(null)); + } else { + types.add(type); + } + }); + schemaHash = types.hashCode(); + } + hash = 31 * hash + dataset.getVirtualDataset().getSql().hashCode() + schemaHash; + // A versioned view doesn't (actually can't) store their parent datasets as part of the view metadata + for (Pair parent : builder.getParents(current.left)) { + int size = parent.left.left.size(); + if (!(size > 1 && parent.left.left.get(size - 1).equalsIgnoreCase("external_query"))) { + q.add(parent); + } + } + } + isFirst = false; + } + return hash; + } + + /** + * ParentDatasetBuilder builds a mapping of views to parent datasets. Since both the view and the parent dataset + * can be versioned tables/views, we track both the path components and table version context for each table/view. + */ + @VisibleForTesting + static class ParentDatasetBuilder extends RoutingShuttle { + private Deque expansions = new LinkedList<>(); + private ListMultimap> parents = ArrayListMultimap.create(); + private EntityExplorer catalog; + + public ParentDatasetBuilder(final DatasetConfig config, final RelNode relNode, final CatalogService catalogService) { + catalog = CatalogUtil.getSystemCatalogForReflections(catalogService); + // Initialize with query root + expansions.add(SubstitutionUtils.VersionedPath.of(config.getFullPathList(), null)); + relNode.accept(this); + } + + public List> getParents(SubstitutionUtils.VersionedPath child) { + return parents.get(child); + } + + @Override + public RelNode visit(RelNode other) { + if (other instanceof ExpansionNode) { + ExpansionNode expansionNode = (ExpansionNode) other; + SubstitutionUtils.VersionedPath scan = SubstitutionUtils.VersionedPath.of(expansionNode.getPath().getPathComponents(), expansionNode.getVersionContext()); + DremioTable view = CatalogUtil.getTable(CatalogEntityKey.newBuilder(). + keyComponents(scan.left). + tableVersionContext(scan.right).build(), catalog); + parents.put(expansions.peekLast(), Pair.of(scan, view.getDatasetConfig())); + expansions.addLast(scan); + RelNode child = visitChild(other, 0, other.getInput(0)); + expansions.removeLast(); + return child; + } + return this.visitChildren(other); + } + + @Override + public RelNode visit(TableScan tableScan) { + DremioTable table = tableScan.getTable().unwrap(DremioTable.class); + parents.put(expansions.peekLast(), Pair.of(SubstitutionUtils.VersionedPath.of(table.getPath().getPathComponents(), + table.getDataset().getVersionContext()), table.getDatasetConfig())); + return tableScan; + } + } + + /** + * TODO: REMOVE ME + * Sample code to show how to construct a QueryContext and SqlConverter so that one can turn a DatasetConfig + * into a RelNode query tree. + * + * @param sabotContext + * @param dataset + * @return + */ + public static RelNode expandView(SabotContext sabotContext, DatasetConfig dataset) + { + NamespaceKey anchorView = new NamespaceKey(dataset.getFullPathList()); + Map versionContextMap = ReflectionUtils.buildVersionContext(dataset.getId().getId()); + final UserSession session = ReflectionServiceImpl.systemSession(sabotContext.getOptionManager()); + for (Map.Entry versionContext : versionContextMap.entrySet()) { + session.setSessionVersionForSource(versionContext.getKey(), versionContext.getValue()); + } + RelRoot root = null; + try (QueryContext context = new QueryContext(session, sabotContext, new AttemptId().toQueryId(), + java.util.Optional.of(false), java.util.Optional.of(false))) { + SqlConverter converter = new SqlConverter( + context.getPlannerSettings(), + context.getOperatorTable(), + context, + MaterializationDescriptorProvider.EMPTY, + context.getFunctionRegistry(), + context.getSession(), + AbstractAttemptObserver.NOOP, + context.getCatalog(), + context.getSubstitutionProviderFactory(), + context.getConfig(), + context.getScanResult(), + context.getRelMetadataQuerySupplier()); + + root = DremioSqlToRelConverter.expandView(null, new CatalogUser(SystemUser.SYSTEM_USERNAME), + "select * from " + anchorView.getSchemaPath(), null, converter, + null, null); + + System.out.println(RelOptUtil.toString(root.rel)); + + } catch (Exception e) { + throw new RuntimeException(e); + } + return root.rel; + } +} diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/DependencyEntry.java b/services/accelerator/src/main/java/com/dremio/service/reflection/DependencyEntry.java index fb6a856a0c..d744747af1 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/DependencyEntry.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/DependencyEntry.java @@ -134,6 +134,7 @@ public DependencyType getType() { return DependencyType.DATASET; } + @Override public List getPath() { return path; } diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/DependencyGraph.java b/services/accelerator/src/main/java/com/dremio/service/reflection/DependencyGraph.java index d65485ee34..32e6fd5b50 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/DependencyGraph.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/DependencyGraph.java @@ -56,9 +56,14 @@ public class DependencyGraph { } public synchronized void loadFromStore() { + int total = 0; + int noDependencies = 0; + int errors = 0; for (Map.Entry entry : dependenciesStore.getAll()) { + total++; final List dependencies = entry.getValue().getEntryList(); if (dependencies == null || dependencies.isEmpty()) { + noDependencies++; continue; } @@ -73,8 +78,10 @@ public DependencyEntry apply(ReflectionDependencyEntry entry) { } catch (DependencyException e) { // this should never happen as we don't allow saving cyclic dependencies in the in-memory graph logger.warn("Found a cyclic dependency while loading dependencies for {}, skipping", entry.getKey().getId(), e); + errors++; } } + logger.info("Loaded reflection dependency graph: totalReflections={},noDependencyReflections={},dependencyExceptions={}", total, noDependencies, errors); } synchronized List getPredecessors(final ReflectionId reflectionId) { diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/DependencyManager.java b/services/accelerator/src/main/java/com/dremio/service/reflection/DependencyManager.java index d59358989b..0910b4b7f0 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/DependencyManager.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/DependencyManager.java @@ -15,6 +15,7 @@ */ package com.dremio.service.reflection; +import static com.dremio.exec.catalog.VersionedDatasetId.isVersionedDatasetId; import static com.dremio.service.reflection.DependencyUtils.filterDatasetDependencies; import static com.dremio.service.reflection.DependencyUtils.filterReflectionDependencies; import static com.dremio.service.reflection.DependencyUtils.filterTableFunctionDependencies; @@ -29,8 +30,9 @@ import javax.annotation.Nullable; +import com.dremio.exec.catalog.CatalogEntityKey; +import com.dremio.exec.catalog.VersionedDatasetId; import com.dremio.exec.store.sys.accel.AccelerationManager.ExcludedReflectionsProvider; -import com.dremio.service.namespace.NamespaceKey; import com.dremio.service.namespace.dataset.proto.AccelerationSettings; import com.dremio.service.reflection.DependencyEntry.DatasetDependency; import com.dremio.service.reflection.DependencyEntry.ReflectionDependency; @@ -45,10 +47,12 @@ import com.dremio.service.reflection.store.DependenciesStore; import com.dremio.service.reflection.store.MaterializationStore; import com.dremio.service.reflection.store.ReflectionEntriesStore; +import com.fasterxml.jackson.core.JsonProcessingException; import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.collect.FluentIterable; +import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Ordering; /** @@ -134,10 +138,10 @@ private boolean dontGiveUpHelper(final ReflectionId reflectionId, final Dependen return filterReflectionDependencies(dependencies).allMatch(dependency -> { return dontGiveUpHelper(dependency.getReflectionId(), dependencyResolutionContext); }) && filterDatasetDependencies(dependencies).allMatch(dependency -> { - final AccelerationSettings settings = dependencyResolutionContext.getReflectionSettings(dependency.getNamespaceKey()); + final AccelerationSettings settings = dependencyResolutionContext.getReflectionSettings(createCatalogEntityKey(dependency.getPath(), dependency.getId())); return Boolean.TRUE.equals(settings.getNeverRefresh()); }) && filterTableFunctionDependencies(dependencies).allMatch(dependency -> { - final AccelerationSettings settings = dependencyResolutionContext.getReflectionSettings(new NamespaceKey(dependency.getSourceName())); + final AccelerationSettings settings = dependencyResolutionContext.getReflectionSettings(createCatalogEntityKey(ImmutableList.of(dependency.getSourceName()), dependency.getId())); return Boolean.TRUE.equals(settings.getNeverRefresh()); }); } @@ -208,7 +212,7 @@ public Long apply(ReflectionDependency dependency) { @Override public Long apply(DatasetDependency dependency) { // first account for the dataset's refresh period - final AccelerationSettings settings = dependencyResolutionContext.getReflectionSettings(dependency.getNamespaceKey()); + final AccelerationSettings settings = dependencyResolutionContext.getReflectionSettings(createCatalogEntityKey(dependency.getPath(), dependency.getId())); final long refreshStart = Boolean.TRUE.equals(settings.getNeverRefresh()) || settings.getRefreshPeriod() == 0 ? 0 : currentTime - settings.getRefreshPeriod(); // then account for any refresh request against the dataset @@ -225,7 +229,7 @@ public Long apply(DatasetDependency dependency) { @Nullable @Override public Long apply(TableFunctionDependency entry) { - final AccelerationSettings settings = dependencyResolutionContext.getReflectionSettings(new NamespaceKey(entry.getSourceName())); + final AccelerationSettings settings = dependencyResolutionContext.getReflectionSettings(createCatalogEntityKey(ImmutableList.of(entry.getSourceName()), entry.getId())); final long refreshStart = Boolean.TRUE.equals(settings.getNeverRefresh()) || settings.getRefreshPeriod() == 0 ? 0 : currentTime - settings.getRefreshPeriod(); return refreshStart; } @@ -283,7 +287,7 @@ public Optional getGracePeriod(final ReflectionId reflectionId, final Depe @Nullable @Override public Long apply(DatasetDependency entry) { - final AccelerationSettings settings = dependencyResolutionContext.getReflectionSettings(entry.getNamespaceKey()); + final AccelerationSettings settings = dependencyResolutionContext.getReflectionSettings(createCatalogEntityKey(entry.getPath(), entry.getId())); // for reflections that never expire, use a grace period of 1000 years from now return Boolean.TRUE.equals(settings.getNeverExpire()) ? (TimeUnit.DAYS.toMillis(365)*1000) : settings.getGracePeriod(); } @@ -293,7 +297,7 @@ public Long apply(DatasetDependency entry) { @Nullable @Override public Long apply(TableFunctionDependency entry) { - final AccelerationSettings settings = dependencyResolutionContext.getReflectionSettings(new NamespaceKey(entry.getSourceName())); + final AccelerationSettings settings = dependencyResolutionContext.getReflectionSettings(createCatalogEntityKey(ImmutableList.of(entry.getSourceName()), entry.getId())); return Boolean.TRUE.equals(settings.getNeverExpire()) ? (TimeUnit.DAYS.toMillis(365)*1000) : settings.getGracePeriod(); } })) @@ -308,6 +312,31 @@ public Long apply(TableFunctionDependency entry) { return Optional.of(Ordering.natural().min(gracePeriods)); } + /** + * Creates a CatalogEntityKey from a reflection dependency that can be used to retrieve the entity's reflection settings. + * + * For a dataset dependency, the dataset may be versioned in which case we extract the version context from the datasetId. + * For an external table dependency, only the root/source name will be passed in and datasetId can be ignored. + * + * @param path + * @param datasetId + * @return catalogEntityKey + */ + private CatalogEntityKey createCatalogEntityKey(List path, String datasetId) { + final CatalogEntityKey.Builder builder = CatalogEntityKey.newBuilder().keyComponents(path); + if (isVersionedDatasetId(datasetId)) { + VersionedDatasetId versionedDatasetId = null; + try { + versionedDatasetId = VersionedDatasetId.fromString(datasetId); + } catch (JsonProcessingException e) { + throw new IllegalStateException(String.format("Could not parse VersionedDatasetId from string : %s", datasetId), e); + } + assert (path.equals(versionedDatasetId.getTableKey())); + builder.tableVersionContext(versionedDatasetId.getVersionContext()); + } + return builder.build(); + } + /** * The new materialization is only as fresh as the most stale input materialization. This method finds which of the * input materializations has the earliest expiration. The new materialization's expiration must be equal to or sooner diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/DependencyResolutionContext.java b/services/accelerator/src/main/java/com/dremio/service/reflection/DependencyResolutionContext.java index d15cc562bf..d622fd4d7a 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/DependencyResolutionContext.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/DependencyResolutionContext.java @@ -17,7 +17,7 @@ import java.util.Optional; -import com.dremio.service.namespace.NamespaceKey; +import com.dremio.exec.catalog.CatalogEntityKey; import com.dremio.service.namespace.dataset.proto.AccelerationSettings; import com.dremio.service.reflection.proto.ReflectionId; import com.dremio.service.reflection.proto.RefreshRequest; @@ -30,7 +30,7 @@ */ public interface DependencyResolutionContext extends AutoCloseable { Optional getLastSuccessfulRefresh(ReflectionId id); - AccelerationSettings getReflectionSettings(NamespaceKey key); + AccelerationSettings getReflectionSettings(CatalogEntityKey key); RefreshRequest getRefreshRequest(String datasetId); /** diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/DependencyResolutionContextFactory.java b/services/accelerator/src/main/java/com/dremio/service/reflection/DependencyResolutionContextFactory.java index 6cbaa0c4f8..33c0b5baa3 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/DependencyResolutionContextFactory.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/DependencyResolutionContextFactory.java @@ -19,8 +19,8 @@ import java.util.Map; import java.util.Optional; +import com.dremio.exec.catalog.CatalogEntityKey; import com.dremio.options.OptionManager; -import com.dremio.service.namespace.NamespaceKey; import com.dremio.service.namespace.dataset.proto.AccelerationSettings; import com.dremio.service.reflection.proto.ReflectionEntry; import com.dremio.service.reflection.proto.ReflectionId; @@ -44,7 +44,7 @@ public class DependencyResolutionContextFactory { private final ReflectionEntriesStore entriesStore; // Reflection settings by dataset. These rarely change and so we can mostly reuse them between syncs. - private Map settingsMap; + private Map settingsMap; private int lastSettingsHash; DependencyResolutionContextFactory(ReflectionSettings reflectionSettings, RefreshRequestsStore requestsStore, OptionManager optionManager, ReflectionEntriesStore entriesStore) { @@ -87,7 +87,7 @@ public Optional getLastSuccessfulRefresh(ReflectionId id) } } @Override - public AccelerationSettings getReflectionSettings(NamespaceKey key) { + public AccelerationSettings getReflectionSettings(CatalogEntityKey key) { return reflectionSettings.getReflectionSettings(key); } @Override @@ -135,7 +135,7 @@ public Optional getLastSuccessfulRefresh(ReflectionId id) { return entriesMap.get(id); } @Override - public AccelerationSettings getReflectionSettings(NamespaceKey key) { + public AccelerationSettings getReflectionSettings(CatalogEntityKey key) { settingsCacheRequests++; return settingsMap.computeIfAbsent(key, k -> { settingsCacheMisses++; diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/DependencyUtils.java b/services/accelerator/src/main/java/com/dremio/service/reflection/DependencyUtils.java index 3542646442..2edf5cb50d 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/DependencyUtils.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/DependencyUtils.java @@ -22,13 +22,18 @@ import java.util.UUID; import com.dremio.common.utils.PathUtils; +import com.dremio.exec.catalog.CatalogEntityKey; +import com.dremio.exec.catalog.CatalogUtil; +import com.dremio.exec.catalog.DremioTable; +import com.dremio.exec.catalog.EntityExplorer; +import com.dremio.exec.catalog.TableVersionContext; +import com.dremio.exec.store.CatalogService; import com.dremio.service.job.proto.Acceleration; import com.dremio.service.job.proto.JobInfo; import com.dremio.service.job.proto.ScanPath; import com.dremio.service.namespace.NamespaceException; import com.dremio.service.namespace.NamespaceKey; -import com.dremio.service.namespace.NamespaceService; -import com.dremio.service.namespace.dataset.proto.DatasetConfig; +import com.dremio.service.namespace.NamespaceNotFoundException; import com.dremio.service.reflection.DependencyEntry.DatasetDependency; import com.dremio.service.reflection.DependencyEntry.ReflectionDependency; import com.dremio.service.reflection.DependencyEntry.TableFunctionDependency; @@ -123,10 +128,10 @@ public TableFunctionDependency apply(DependencyEntry entry) { * * @throws NamespaceException if can't access a dataset dependency in the Namespace */ - public static ExtractedDependencies extractDependencies(final NamespaceService namespaceService, final JobInfo jobInfo, - final RefreshDecision decision) throws NamespaceException { + public static ExtractedDependencies extractDependencies(final JobInfo jobInfo, + final RefreshDecision decision, CatalogService catalogService) throws NamespaceException { final Set plandDependencies = Sets.newHashSet(); - + EntityExplorer catalog = CatalogUtil.getSystemCatalogForReflections(catalogService); // add all substitutions if (jobInfo.getAcceleration() != null) { final List substitutions = jobInfo.getAcceleration().getSubstitutionsList(); @@ -143,9 +148,16 @@ public static ExtractedDependencies extractDependencies(final NamespaceService n for (ScanPath scanPath : jobScanPaths) { // make sure to exclude scans from materializations if (!scanPath.getPathList().get(0).equals(ACCELERATOR_STORAGEPLUGIN_NAME)) { - final List path = scanPath.getPathList(); - final DatasetConfig config = namespaceService.getDataset(new NamespaceKey(path)); - plandDependencies.add(DependencyEntry.of(config.getId().getId(), path)); + TableVersionContext versionContext = null; + if (scanPath.getVersionContext() != null) { + versionContext = TableVersionContext.deserialize(scanPath.getVersionContext()); + } + DremioTable table = CatalogUtil.getTable(CatalogEntityKey.newBuilder(). + keyComponents(scanPath.getPathList()).tableVersionContext(versionContext).build(), catalog); + if (table == null) { + throw new NamespaceNotFoundException(new NamespaceKey(scanPath.getPathList()), "Dataset not found in catalog " + scanPath.getVersionContext()); + } + plandDependencies.add(DependencyEntry.of(table.getDatasetConfig().getId().getId(), scanPath.getPathList())); } } } @@ -162,9 +174,16 @@ public static ExtractedDependencies extractDependencies(final NamespaceService n final List scanPaths = decision.getScanPathsList(); if (scanPaths != null) { for (ScanPath scanPath : scanPaths) { - final List path = scanPath.getPathList(); - final DatasetConfig config = namespaceService.getDataset(new NamespaceKey(path)); - decisionDependencies.add(DependencyEntry.of(config.getId().getId(), path)); + TableVersionContext versionContext = null; + if (scanPath.getVersionContext() != null) { + versionContext = TableVersionContext.deserialize(scanPath.getVersionContext()); + } + DremioTable table = CatalogUtil.getTable(CatalogEntityKey.newBuilder(). + keyComponents(scanPath.getPathList()).tableVersionContext(versionContext).build(), catalog); + if (table == null) { + throw new NamespaceNotFoundException(new NamespaceKey(scanPath.getPathList()), "Dataset not found in catalog " + scanPath.getVersionContext()); + } + decisionDependencies.add(DependencyEntry.of(table.getDatasetConfig().getId().getId(), scanPath.getPathList())); } } diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/IncrementalUpdateServiceUtils.java b/services/accelerator/src/main/java/com/dremio/service/reflection/IncrementalUpdateServiceUtils.java index ed6275fb47..22fd2ed24c 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/IncrementalUpdateServiceUtils.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/IncrementalUpdateServiceUtils.java @@ -17,6 +17,7 @@ import java.util.ArrayList; import java.util.List; +import java.util.Optional; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.core.TableScan; @@ -29,6 +30,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.dremio.exec.catalog.CatalogEntityKey; +import com.dremio.exec.catalog.DremioTable; import com.dremio.exec.planner.RoutingShuttle; import com.dremio.exec.planner.StatelessRelShuttleImpl; import com.dremio.exec.planner.acceleration.ExpansionNode; @@ -37,6 +40,8 @@ import com.dremio.service.namespace.NamespaceKey; import com.dremio.service.namespace.dataset.proto.AccelerationSettings; import com.dremio.service.namespace.dataset.proto.RefreshMethod; +import com.dremio.service.reflection.proto.ReflectionEntry; +import com.dremio.service.reflection.proto.ReflectionId; import com.google.common.base.Preconditions; /** @@ -48,9 +53,9 @@ public class IncrementalUpdateServiceUtils { /** * compute acceleration settings from the plan */ - public static AccelerationSettings extractRefreshSettings(final RelNode normalizedPlan, ReflectionSettings reflectionSettings) { - final boolean incremental = getIncremental(normalizedPlan, reflectionSettings); - final String refreshField = !incremental ? null : findRefreshField(normalizedPlan, reflectionSettings); + public static AccelerationSettings extractRefreshSettings(final RelNode normalizedPlan, ReflectionSettings reflectionSettings, ReflectionService service) { + final boolean incremental = getIncremental(normalizedPlan, reflectionSettings, service); + final String refreshField = !incremental ? null : findRefreshField(normalizedPlan, reflectionSettings, service); final RefreshMethod refreshMethod = incremental ? RefreshMethod.INCREMENTAL : RefreshMethod.FULL; return new AccelerationSettings() @@ -58,14 +63,29 @@ public static AccelerationSettings extractRefreshSettings(final RelNode normaliz .setRefreshField(refreshField); } - private static String findRefreshField(RelNode plan, final ReflectionSettings reflectionSettings) { + private static String findRefreshField(RelNode plan, final ReflectionSettings reflectionSettings, ReflectionService service) { final Pointer refreshField = new Pointer<>(); plan.accept(new StatelessRelShuttleImpl() { @Override public RelNode visit(TableScan tableScan) { List tablePath = tableScan.getTable().getQualifiedName(); - final AccelerationSettings settings = reflectionSettings.getReflectionSettings(new NamespaceKey(tablePath)); - refreshField.value = settings.getRefreshField(); + NamespaceKey tableKey = new NamespaceKey(tablePath); + // If the scan is over a reflection inherit its refresh field. + // Search the ReflectionService using the ReflectionId. + if (tableKey.getRoot().equals(ReflectionServiceImpl.ACCELERATOR_STORAGEPLUGIN_NAME)) { + Optional entry = service.getEntry(new ReflectionId(tablePath.get(1))); + refreshField.value = entry.get().getRefreshField(); + } else { + DremioTable table = tableScan.getTable().unwrap(DremioTable.class); + final CatalogEntityKey.Builder builder = + CatalogEntityKey.newBuilder().keyComponents(table.getPath().getPathComponents()); + if (table.getDataset().getVersionContext() != null) { + builder.tableVersionContext(table.getDataset().getVersionContext()); + } + final CatalogEntityKey catalogEntityKey = builder.build(); + final AccelerationSettings settings = reflectionSettings.getReflectionSettings(catalogEntityKey); + refreshField.value = settings.getRefreshField(); + } return tableScan; } }); @@ -75,8 +95,8 @@ public RelNode visit(TableScan tableScan) { /** * Check if a plan can support incremental update */ - private static boolean getIncremental(RelNode plan, final ReflectionSettings reflectionSettings) { - IncrementalChecker checker = new IncrementalChecker(reflectionSettings); + private static boolean getIncremental(RelNode plan, final ReflectionSettings reflectionSettings, ReflectionService service) { + IncrementalChecker checker = new IncrementalChecker(reflectionSettings, service); plan.accept(checker); return checker.isIncremental(); } @@ -89,14 +109,16 @@ private static boolean getIncremental(RelNode plan, final ReflectionSettings ref */ private static class IncrementalChecker extends RoutingShuttle { private final ReflectionSettings reflectionSettings; + private final ReflectionService service; private RelNode unsupportedOperator = null; private List unsupportedAggregates = new ArrayList<>(); private boolean isIncremental = false; private int aggCount = 0; - IncrementalChecker(ReflectionSettings reflectionSettings) { + IncrementalChecker(ReflectionSettings reflectionSettings, ReflectionService service) { this.reflectionSettings = Preconditions.checkNotNull(reflectionSettings, "reflection settings required"); + this.service = Preconditions.checkNotNull(service,"reflection service required"); } public boolean isIncremental() { @@ -136,11 +158,27 @@ public RelNode visit(RelNode other) { @Override public RelNode visit(TableScan tableScan) { List tablePath = tableScan.getTable().getQualifiedName(); - final AccelerationSettings settings = reflectionSettings.getReflectionSettings(new NamespaceKey(tablePath)); - isIncremental = settings.getMethod() == RefreshMethod.INCREMENTAL; + NamespaceKey tableKey = new NamespaceKey(tablePath); + // If the scan is over a reflection inherit its refresh method. + // Search the ReflectionService using the ReflectionId. + if (tableKey.getRoot().equals(ReflectionServiceImpl.ACCELERATOR_STORAGEPLUGIN_NAME)) { + Optional entry = service.getEntry(new ReflectionId(tablePath.get(1))); + isIncremental = entry.get().getRefreshMethod() == RefreshMethod.INCREMENTAL; + } else { + DremioTable table = tableScan.getTable().unwrap(DremioTable.class); + final CatalogEntityKey.Builder builder = + CatalogEntityKey.newBuilder().keyComponents(table.getPath().getPathComponents()); + if (table.getDataset().getVersionContext() != null) { + builder.tableVersionContext(table.getDataset().getVersionContext()); + } + final CatalogEntityKey catalogEntityKey = builder.build(); + final AccelerationSettings settings = reflectionSettings.getReflectionSettings(catalogEntityKey); + isIncremental = settings.getMethod() == RefreshMethod.INCREMENTAL; + } return tableScan; } + @Override public RelNode visit(LogicalAggregate aggregate) { aggCount++; aggregate.getAggCallList().forEach(a -> { diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/MaterializationCache.java b/services/accelerator/src/main/java/com/dremio/service/reflection/MaterializationCache.java index 9d2832055f..cdbb72960a 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/MaterializationCache.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/MaterializationCache.java @@ -25,15 +25,15 @@ import org.apache.calcite.rel.core.TableScan; import com.dremio.exec.calcite.logical.ScanCrel; +import com.dremio.exec.catalog.CatalogUtil; +import com.dremio.exec.catalog.EntityExplorer; import com.dremio.exec.planner.acceleration.CachedMaterializationDescriptor; import com.dremio.exec.planner.acceleration.DremioMaterialization; import com.dremio.exec.planner.acceleration.MaterializationDescriptor; import com.dremio.exec.record.BatchSchema; import com.dremio.exec.store.CatalogService; import com.dremio.service.Pointer; -import com.dremio.service.namespace.NamespaceException; import com.dremio.service.namespace.NamespaceKey; -import com.dremio.service.namespace.NamespaceService; import com.dremio.service.namespace.dataset.proto.DatasetConfig; import com.dremio.service.reflection.proto.ExternalReflection; import com.dremio.service.reflection.proto.Materialization; @@ -86,13 +86,11 @@ public interface CacheViewer { } private final CacheHelper provider; - private final NamespaceService namespaceService; private final ReflectionStatusService reflectionStatusService; private final CatalogService catalogService; - MaterializationCache(CacheHelper provider, NamespaceService namespaceService, ReflectionStatusService reflectionStatusService, CatalogService catalogService) { + MaterializationCache(CacheHelper provider, ReflectionStatusService reflectionStatusService, CatalogService catalogService) { this.provider = Preconditions.checkNotNull(provider, "materialization provider required"); - this.namespaceService = Preconditions.checkNotNull(namespaceService, "namespace service required"); this.reflectionStatusService = Preconditions.checkNotNull(reflectionStatusService, "reflection status service required"); this.catalogService = Preconditions.checkNotNull(catalogService, "catalog service required"); } @@ -137,6 +135,8 @@ private Map updateCache(Map provided = provider.getValidMaterializations(); // this will hold the updated cache final Map updated = Maps.newHashMap(); + EntityExplorer catalog = CatalogUtil.getSystemCatalogForReflections(catalogService); + // cache is enabled so we want to reuse as much of the existing cache as possible. Make sure to: // remove all cached descriptors that no longer exist @@ -146,7 +146,7 @@ private Map updateCache(Map updateCache(Map updateCache(Map updated = new Pointer<>(false); materialization.getTableRel().accept(new RelShuttleImpl() { @@ -176,14 +176,14 @@ private boolean isExternalReflectionMetadataUpdated(CachedMaterializationDescrip public RelNode visit(TableScan tableScan) { if (tableScan instanceof ScanCrel) { String version = ((ScanCrel) tableScan).getTableMetadata().getVersion(); - try { - DatasetConfig dataset = namespaceService.getDataset(new NamespaceKey(tableScan.getTable().getQualifiedName())); - if (!dataset.getTag().equals(version)) { + DatasetConfig datasetConfig = CatalogUtil.getDatasetConfig(catalog, new NamespaceKey(tableScan.getTable().getQualifiedName())); + if (datasetConfig == null) { + updated.value = true; + } else { + if (!datasetConfig.getTag().equals(version)) { logger.debug("Dataset {} has new data. Invalidating cache for external reflection", tableScan.getTable().getQualifiedName()); updated.value = true; } - } catch (NamespaceException e) { - updated.value = true; } } else { updated.value = true; @@ -230,20 +230,19 @@ private void updateEntry(Map cache, Mat } } - private boolean schemaChanged(MaterializationDescriptor old, Materialization materialization) { - if (namespaceService == null) { - return false; - } - try { - //TODO is this enough ? shouldn't we use the dataset hash instead ?? - final NamespaceKey matKey = new NamespaceKey(ReflectionUtils.getMaterializationPath(materialization)); - ByteString schemaString = namespaceService.getDataset(matKey).getRecordSchema(); - BatchSchema newSchema = BatchSchema.deserialize(schemaString); - BatchSchema oldSchema = ((CachedMaterializationDescriptor) old).getMaterialization().getSchema(); - return !oldSchema.equals(newSchema); - } catch (NamespaceException e) { + private boolean schemaChanged(MaterializationDescriptor old, Materialization materialization, EntityExplorer catalog) { + //TODO is this enough ? shouldn't we use the dataset hash instead ?? + final NamespaceKey matKey = new NamespaceKey(ReflectionUtils.getMaterializationPath(materialization)); + + DatasetConfig datasetConfig = CatalogUtil.getDatasetConfig(catalog, matKey); + if (datasetConfig == null ) { return true; } + + ByteString schemaString = datasetConfig.getRecordSchema(); + BatchSchema newSchema = BatchSchema.deserialize(schemaString); + BatchSchema oldSchema = ((CachedMaterializationDescriptor) old).getMaterialization().getSchema(); + return !oldSchema.equals(newSchema); } /** diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionAdministrationService.java b/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionAdministrationService.java index 0c57988398..750e187c35 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionAdministrationService.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionAdministrationService.java @@ -18,8 +18,8 @@ import java.util.List; import java.util.Optional; +import com.dremio.exec.catalog.CatalogEntityKey; import com.dremio.exec.ops.ReflectionContext; -import com.dremio.service.namespace.NamespaceKey; import com.dremio.service.reflection.proto.ExternalReflection; import com.dremio.service.reflection.proto.Materialization; import com.dremio.service.reflection.proto.ReflectionGoal; @@ -31,7 +31,7 @@ public interface ReflectionAdministrationService { Iterable getAllReflections(); - Iterable getReflectionsByDatasetPath(NamespaceKey path); + Iterable getReflectionsByDatasetPath(CatalogEntityKey path); Iterable getReflectionsByDatasetId(String datasetid); diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionDetailsPopulatorImpl.java b/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionDetailsPopulatorImpl.java index 4194c39cce..3aee0af375 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionDetailsPopulatorImpl.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionDetailsPopulatorImpl.java @@ -15,7 +15,7 @@ */ package com.dremio.service.reflection; -import static com.dremio.service.reflection.ReflectionUtils.isPhysicalDataset; +import static com.dremio.service.reflection.DatasetHashUtils.isPhysicalDataset; import java.util.ArrayList; import java.util.Collections; @@ -27,9 +27,12 @@ import org.apache.calcite.rel.RelNode; +import com.dremio.exec.catalog.CatalogUtil; +import com.dremio.exec.catalog.EntityExplorer; import com.dremio.exec.planner.acceleration.DremioMaterialization; import com.dremio.exec.planner.acceleration.substitution.SubstitutionInfo; import com.dremio.exec.proto.UserBitShared.QueryProfile; +import com.dremio.exec.store.CatalogService; import com.dremio.exec.store.sys.accel.AccelerationDetailsPopulator; import com.dremio.reflection.hints.ReflectionExplanationsAndQueryDistance; import com.dremio.sabot.kernel.proto.ReflectionExplanation; @@ -49,7 +52,6 @@ import com.dremio.service.accelerator.proto.ReflectionRelationship; import com.dremio.service.accelerator.proto.SubstitutionState; import com.dremio.service.namespace.NamespaceKey; -import com.dremio.service.namespace.NamespaceService; import com.dremio.service.namespace.dataset.proto.AccelerationSettings; import com.dremio.service.namespace.dataset.proto.DatasetConfig; import com.dremio.service.reflection.proto.DimensionGranularity; @@ -76,8 +78,8 @@ class ReflectionDetailsPopulatorImpl implements AccelerationDetailsPopulator { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ReflectionDetailsPopulatorImpl.class); - private final NamespaceService namespace; private final ReflectionService reflections; + private final CatalogService catalogService; private final AccelerationDetails details = new AccelerationDetails(); private final Map consideredReflections = new HashMap<>(); private List substitutionErrors = Collections.emptyList(); @@ -85,9 +87,9 @@ class ReflectionDetailsPopulatorImpl implements AccelerationDetailsPopulator { private final List matchedReflectionIds = new ArrayList<>(); private final List chosenReflectionIds = new ArrayList<>(); - ReflectionDetailsPopulatorImpl(NamespaceService namespace, ReflectionService reflections) { + ReflectionDetailsPopulatorImpl(ReflectionService reflections, CatalogService catalogService) { this.reflections = reflections; - this.namespace = namespace; + this.catalogService = catalogService; } @Override @@ -156,6 +158,7 @@ private AccelerationSettings getAccelerationSettings(DatasetConfig config) { @Override public byte[] computeAcceleration() { + EntityExplorer catalog = CatalogUtil.getSystemCatalogForReflections(catalogService); try { if (!consideredReflections.isEmpty()) { List relationships = Lists.newArrayList(); @@ -178,7 +181,7 @@ public byte[] computeAcceleration() { refreshChainStartTime = 0; } - DatasetConfig datasetConfig = namespace.findDatasetByUUID(reflection.getDatasetId()); + DatasetConfig datasetConfig = CatalogUtil.getDatasetConfig(catalog, reflection.getDatasetId()); if(datasetConfig == null) { continue; } @@ -213,7 +216,7 @@ public byte[] computeAcceleration() { final ExternalReflection externalReflection = externalReflectionOptional.get(); - DatasetConfig datasetConfig = namespace.findDatasetByUUID(externalReflection.getQueryDatasetId()); + DatasetConfig datasetConfig = CatalogUtil.getDatasetConfig(catalog, externalReflection.getQueryDatasetId()); if(datasetConfig == null) { continue; } diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionManager.java b/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionManager.java index 18fabe4015..495ed69929 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionManager.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionManager.java @@ -57,13 +57,13 @@ import com.dremio.common.util.DremioEdition; import com.dremio.common.utils.PathUtils; import com.dremio.datastore.WarningTimer; +import com.dremio.exec.catalog.CatalogUtil; +import com.dremio.exec.catalog.EntityExplorer; import com.dremio.exec.planner.physical.PlannerSettings; import com.dremio.exec.server.SabotContext; import com.dremio.exec.store.CatalogService; import com.dremio.exec.store.dfs.FileSelection; -import com.dremio.exec.store.dfs.FileSystemPlugin; import com.dremio.exec.store.iceberg.model.IcebergModel; -import com.dremio.io.file.Path; import com.dremio.options.OptionManager; import com.dremio.proto.model.UpdateId; import com.dremio.service.job.CancelJobRequest; @@ -82,7 +82,6 @@ import com.dremio.service.jobs.JobStatusListener; import com.dremio.service.jobs.JobsProtoUtil; import com.dremio.service.jobs.JobsService; -import com.dremio.service.namespace.NamespaceService; import com.dremio.service.namespace.dataset.proto.RefreshMethod; import com.dremio.service.reflection.ReflectionServiceImpl.DescriptorCache; import com.dremio.service.reflection.ReflectionServiceImpl.ExpansionHelper; @@ -118,6 +117,9 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.instrumentation.annotations.WithSpan; + /** * Manages reflections, excluding external reflections, by observing changes to the reflection goals, datasets, materialization * jobs and executing the appropriate handling logic sequentially. @@ -142,7 +144,6 @@ public interface WakeUpCallback { static final long WAKEUP_OVERLAP_MS = 60_000; private final SabotContext sabotContext; private final JobsService jobsService; - private final NamespaceService namespaceService; private final OptionManager optionManager; private final ReflectionGoalsStore userStore; private final ReflectionEntriesStore reflectionStore; @@ -157,26 +158,22 @@ public interface WakeUpCallback { private final BufferAllocator allocator; private final ReflectionGoalChecker reflectionGoalChecker; private final RefreshStartHandler refreshStartHandler; - private final AccelerationStoragePlugin accelerationPlugin; - private volatile Path accelerationBasePath; private final CatalogService catalogService; private volatile EntryCounts lastStats = new EntryCounts(); private long lastWakeupTime; private long lastOrphanCheckTime; - private IcebergModel icebergModel; private DependencyResolutionContextFactory dependencyResolutionContextFactory; - ReflectionManager(SabotContext sabotContext, JobsService jobsService, NamespaceService namespaceService, + ReflectionManager(SabotContext sabotContext, JobsService jobsService, CatalogService catalogService, OptionManager optionManager, ReflectionGoalsStore userStore, ReflectionEntriesStore reflectionStore, ExternalReflectionStore externalReflectionStore, MaterializationStore materializationStore, DependencyManager dependencyManager, DescriptorCache descriptorCache, Set reflectionsToUpdate, WakeUpCallback wakeUpCallback, - Supplier expansionHelper, Supplier planCacheInvalidationHelper, BufferAllocator allocator, FileSystemPlugin accelerationPlugin, - Path accelerationBasePath, ReflectionGoalChecker reflectionGoalChecker, RefreshStartHandler refreshStartHandler, - CatalogService catalogService, DependencyResolutionContextFactory dependencyResolutionContextFactory) { + Supplier expansionHelper, Supplier planCacheInvalidationHelper, + BufferAllocator allocator, ReflectionGoalChecker reflectionGoalChecker, RefreshStartHandler refreshStartHandler, + DependencyResolutionContextFactory dependencyResolutionContextFactory) { this.sabotContext = Preconditions.checkNotNull(sabotContext, "sabotContext required"); this.jobsService = Preconditions.checkNotNull(jobsService, "jobsService required"); - this.namespaceService = Preconditions.checkNotNull(namespaceService, "namespaceService required"); this.optionManager = Preconditions.checkNotNull(optionManager, "optionManager required"); this.userStore = Preconditions.checkNotNull(userStore, "reflection user store required"); this.reflectionStore = Preconditions.checkNotNull(reflectionStore, "reflection store required"); @@ -190,8 +187,6 @@ public interface WakeUpCallback { this.planCacheInvalidationHelper = Preconditions.checkNotNull(planCacheInvalidationHelper, "planCacheInvalidatorHelper required"); this.allocator = Preconditions.checkNotNull(allocator, "allocator required"); this.catalogService = Preconditions.checkNotNull(catalogService, "catalogService required"); - this.accelerationPlugin = (AccelerationStoragePlugin) Preconditions.checkNotNull(accelerationPlugin); - this.accelerationBasePath = Preconditions.checkNotNull(accelerationBasePath); this.reflectionGoalChecker = Preconditions.checkNotNull(reflectionGoalChecker); this.refreshStartHandler = Preconditions.checkNotNull(refreshStartHandler); this.dependencyResolutionContextFactory = Preconditions.checkNotNull(dependencyResolutionContextFactory); @@ -218,10 +213,11 @@ public void run() { } } + @WithSpan @VisibleForTesting void sync() { - long lastWakeupTime = System.currentTimeMillis(); - final long previousLastWakeupTime = lastWakeupTime - WAKEUP_OVERLAP_MS; + long currentTime = System.currentTimeMillis(); + final long lookbackTime = currentTime - WAKEUP_OVERLAP_MS; // updating the store's lastWakeupTime here. This ensures that if we're failing we don't do a denial of service attack // this assumes we properly handle exceptions for each goal/entry independently and we don't exit the loop before we // go through all entities otherwise we may "skip" handling some entities in case of failures @@ -230,17 +226,23 @@ void sync() { final long orphanThreshold = System.currentTimeMillis() - optionManager.getOption(MATERIALIZATION_ORPHAN_REFRESH) * 1000; final long deletionThreshold = System.currentTimeMillis() - deletionGracePeriod; final int numEntriesToDelete = (int) optionManager.getOption(REFLECTION_DELETION_NUM_ENTRIES); + Span.current().setAttribute("dremio.reflectionmanager.deletion_grace_period", deletionGracePeriod); + Span.current().setAttribute("dremio.reflectionmanager.num_entries_to_delete", numEntriesToDelete); + Span.current().setAttribute("dremio.reflectionmanager.current_time", currentTime); + Span.current().setAttribute("dremio.reflectionmanager.last_wakeup_time", lastWakeupTime); + handleReflectionsToUpdate(); handleDeletedDatasets(); - handleGoals(previousLastWakeupTime); + handleGoals(lookbackTime); try (DependencyResolutionContext context = dependencyResolutionContextFactory.create()) { + Span.current().setAttribute("dremio.reflectionmanager.has_acceleration_settings_changed", context.hasAccelerationSettingsChanged()); handleEntries(context); } deleteDeprecatedMaterializations(deletionThreshold, numEntriesToDelete); deprecateMaterializations(); deleteDeprecatedGoals(deletionThreshold); deleteMaterializationOrphans(orphanThreshold, deletionThreshold); - this.lastWakeupTime = lastWakeupTime; + this.lastWakeupTime = currentTime; } /** @@ -269,7 +271,7 @@ private void deleteOrphanMaterialization(Materialization materialization) { } } - + @WithSpan private void deleteMaterializationOrphans(long orphanThreshold, long depreciateDeletionThreshold) { if (orphanThreshold <= this.lastOrphanCheckTime) { @@ -298,6 +300,7 @@ private void deleteMaterializationOrphans(long orphanThreshold, long depreciateD * handle all reflections marked by the reflection service as need to update.
        * those are reflections with plans that couldn't be expended and thus need to be set in UPDATE state */ + @WithSpan private void handleReflectionsToUpdate() { final Iterator iterator = reflectionsToUpdate.iterator(); while (iterator.hasNext()) { @@ -321,6 +324,7 @@ private void handleReflectionsToUpdate() { * * @param deletionThreshold threshold after which deprecated reflection goals are deleted */ + @WithSpan private void deleteDeprecatedGoals(long deletionThreshold) { Iterable goalsDueForDeletion = userStore.getDeletedBefore(deletionThreshold); for (ReflectionGoal goal : goalsDueForDeletion) { @@ -329,6 +333,7 @@ private void deleteDeprecatedGoals(long deletionThreshold) { } } + @WithSpan private void deprecateMaterializations() { final long now = System.currentTimeMillis(); Iterable materializations = materializationStore.getAllExpiredWhen(now); @@ -347,6 +352,7 @@ private void deprecateMaterializations() { * @param deletionThreshold threshold time after which deprecated materialization are deleted * @param numEntries number of entries that should be deleted now */ + @WithSpan private void deleteDeprecatedMaterializations(long deletionThreshold, int numEntries) { Iterable materializations = materializationStore.getDeletableEntriesModifiedBefore(deletionThreshold, numEntries); for (Materialization materialization : materializations) { @@ -362,6 +368,7 @@ private void deleteDeprecatedMaterializations(long deletionThreshold, int numEnt /** * 2nd pass: go through the reflection store */ + @WithSpan private void handleEntries(DependencyResolutionContext dependencyResolutionContext) { final long noDependencyRefreshPeriodMs = optionManager.getOption(ReflectionOptions.NO_DEPENDENCY_REFRESH_PERIOD_SECONDS) * 1000; @@ -377,19 +384,42 @@ private void handleEntries(DependencyResolutionContext dependencyResolutionConte } } this.lastStats = ec; + Span.current().setAttribute("dremio.reflectionmanager.entries_active", ec.active); + Span.current().setAttribute("dremio.reflectionmanager.entries_failed", ec.failed); + Span.current().setAttribute("dremio.reflectionmanager.entries_unknown", ec.unknown); + Span.current().setAttribute("dremio.reflectionmanager.entries_refreshing", ec.refreshing); } + @WithSpan private void handleDeletedDatasets() { Iterable goals = userStore.getAllNotDeleted(); + EntityExplorer catalog = CatalogUtil.getSystemCatalogForReflections(catalogService); + int total = 0; + int errors = 0; for (ReflectionGoal goal : goals) { - handleDatasetDeletion(goal.getDatasetId(), goal); + try { + handleDatasetDeletion(goal.getDatasetId(), goal, catalog); + } catch (Exception exception) { + // Usually source is down but need to catch all exceptions + logger.debug("Unable the handleDatasetDeletion for {}", getId(goal), exception); + errors++; + } + total++; } Iterable externalReflections = externalReflectionStore.getExternalReflections(); for (ExternalReflection externalReflection : externalReflections) { - handleDatasetDeletionForExternalReflection(externalReflection); + try { + handleDatasetDeletionForExternalReflection(externalReflection, catalog); + } catch (Exception exception) { + // Usually source is down but need to catch all exceptions + logger.debug("Unable the handleDatasetDeletion for {}", getId(externalReflection), exception); + errors++; + } + total++; } - + Span.current().setAttribute("dremio.reflectionmanager.handle_deleted_datasets.total", total); + Span.current().setAttribute("dremio.reflectionmanager.handle_deleted_datasets.errors", errors); } /** @@ -438,6 +468,7 @@ void handleEntry(ReflectionEntry entry, final long noDependencyRefreshPeriodMs, // only refresh ACTIVE reflections when they are due for refresh break; } + // fall through to refresh ACTIVE reflections that are due for refresh case REFRESH: counts.refreshing++; logger.info("Refresh due for {}", getId(entry)); @@ -499,7 +530,7 @@ private void handleRefreshingEntry(final ReflectionEntry entry, DependencyResolu } JobAttempt lastAttempt = JobsProtoUtil.getLastAttempt(job); final RefreshDoneHandler handler = new RefreshDoneHandler(entry, m, job, jobsService, - namespaceService, materializationStore, dependencyManager, expansionHelper, accelerationBasePath, allocator, + materializationStore, dependencyManager, expansionHelper, getAccelerationPlugin().getConfig().getPath(), allocator, catalogService, dependencyResolutionContext); switch (lastAttempt.getState()) { case COMPLETED: @@ -590,7 +621,7 @@ private void updateDependenciesIfPossible(final ReflectionEntry entry, final Job try { final RefreshDecision decision = refreshDoneHandler.getRefreshDecision(jobAttempt); - refreshDoneHandler.updateDependencies(entry, jobAttempt.getInfo(), decision, namespaceService, + refreshDoneHandler.updateDependencies(entry, jobAttempt.getInfo(), decision, dependencyManager); } catch (Exception | AssertionError e) { logger.warn("Couldn't retrieve any dependency for {}", getId(entry), e); @@ -607,6 +638,7 @@ private void updateDependenciesIfPossible(final ReflectionEntry entry, final Job * * @param lastWakeupTime previous wakeup time */ + @WithSpan private void handleGoals(long lastWakeupTime) { Iterable goals = userStore.getModifiedOrCreatedSince(lastWakeupTime); for (ReflectionGoal goal : goals) { @@ -618,11 +650,24 @@ private void handleGoals(long lastWakeupTime) { } } - private void handleDatasetDeletion(String datasetId, ReflectionGoal goal) { + /** + * Checks if dataset has been deleted from reflection manager's point of view. + * For example, a DROP TABLE will delete a table. But a DROP BRANCH and DROP TAG + * could also result in a deleted table. ASSIGN BRANCH and ASSIGN TAG could also + * result in a deleted table if the table is no longer present in the updated ref's commit log. + * + * Catalog returns null in the above scenarios. Catalog could also throw a UserException when + * the source is down in which case we don't delete the dataset's reflections. + * + * @param datasetId + * @param goal + * @param catalog + */ + private void handleDatasetDeletion(String datasetId, ReflectionGoal goal, EntityExplorer catalog) { // make sure the corresponding dataset was not deleted - if (namespaceService.findDatasetByUUID(datasetId) == null) { + if (catalog.getTable(datasetId) == null) { // dataset not found, mark goal as deleted - logger.debug("dataset deleted for {}", getId(goal)); + logger.debug("dataset with id {} deleted for {}", datasetId, getId(goal)); final ReflectionGoal goal2 = userStore.get(goal.getId()); if (goal2 != null) { @@ -631,20 +676,20 @@ private void handleDatasetDeletion(String datasetId, ReflectionGoal goal) { return; } catch (ConcurrentModificationException cme) { // someone's changed the reflection goal, we'll delete it next time the manager wakes up - logger.debug("concurrent modification when updating goal state to deleted for {}", getId(goal2)); + logger.debug("concurrent modification when updating goal state to deleted for {} on dataset with id {}", + getId(goal2), + datasetId); } } // something wrong here - throw new IllegalStateException("no reflection found for " + getId(goal)); + throw new IllegalStateException("no reflection found for " + getId(goal) + "on dataset with id" + datasetId); } } - - - private void handleDatasetDeletionForExternalReflection(ExternalReflection externalReflection) { - if (namespaceService.findDatasetByUUID(externalReflection.getQueryDatasetId()) == null - || namespaceService.findDatasetByUUID(externalReflection.getTargetDatasetId()) == null) { + private void handleDatasetDeletionForExternalReflection(ExternalReflection externalReflection, EntityExplorer catalog) { + if (catalog.getTable(externalReflection.getQueryDatasetId()) == null + || catalog.getTable(externalReflection.getTargetDatasetId()) == null) { externalReflectionStore.deleteExternalReflection(externalReflection.getId()); } } @@ -817,20 +862,6 @@ private void cancelRefreshJobIfAny(ReflectionEntry entry) { // when the materialization entry is deleted } - void setAccelerationBasePath(Path path) { - if (path.equals(accelerationBasePath)) { - return; - } - Iterable entries = reflectionStore.find(); - // if there are already reflections don't update the path if the input and current path is different. - if (Iterables.size(entries) > 0) { - logger.warn("Failed to set acceleration base path as there are reflections present. Input path {} existing path {}", - path, accelerationBasePath); - return; - } - this.accelerationBasePath = path; - } - @VisibleForTesting void handleSuccessfulJob(ReflectionEntry entry, Materialization materialization, com.dremio.service.job.JobDetails job, RefreshDoneHandler handler) { @@ -965,7 +996,7 @@ private boolean compactIfNecessary(ReflectionEntry entry, Materialization materi // start compaction job final String sql = String.format("COMPACT MATERIALIZATION \"%s\".\"%s\" AS '%s'", entry.getId().getId(), materialization.getId().getId(), newMaterialization.getId().getId()); - final JobId compactionJobId = submitRefreshJob(jobsService, namespaceService, entry, materialization, sql, + final JobId compactionJobId = submitRefreshJob(jobsService, catalogService, entry, materialization, sql, new WakeUpManagerWhenJobDone(wakeUpCallback, "compaction job done")); newMaterialization @@ -992,7 +1023,8 @@ private void compactionJobSucceeded(ReflectionEntry entry, Materialization mater final JobDetails jobDetails = ReflectionUtils.computeJobDetails(lastAttempt); final List dataPartitions = computeDataPartitions(jobInfo); final MaterializationMetrics metrics = ReflectionUtils.computeMetrics(job, jobsService, allocator, JobsProtoUtil.toStuff(job.getJobId())); - final List refreshPath = ReflectionUtils.getRefreshPath(JobsProtoUtil.toStuff(job.getJobId()), accelerationBasePath, jobsService, allocator); + final List refreshPath = ReflectionUtils.getRefreshPath(JobsProtoUtil.toStuff(job.getJobId()), + getAccelerationPlugin().getConfig().getPath(), jobsService, allocator); final boolean isIcebergRefresh = materialization.getIsIcebergDataset() != null && materialization.getIsIcebergDataset(); final String icebergBasePath = ReflectionUtils.getIcebergReflectionBasePath(refreshPath, isIcebergRefresh); final Refresh refresh = ReflectionUtils.createRefresh(materialization.getReflectionId(), refreshPath, seriesId, @@ -1057,7 +1089,7 @@ private void refreshMetadata(ReflectionEntry entry, Materialization materializat final String sql = String.format("LOAD MATERIALIZATION METADATA \"%s\".\"%s\"", materialization.getReflectionId().getId(), materialization.getId().getId()); - final JobId jobId = submitRefreshJob(jobsService, namespaceService, entry, materialization, sql, + final JobId jobId = submitRefreshJob(jobsService, catalogService, entry, materialization, sql, new WakeUpManagerWhenJobDone(wakeUpCallback, "metadata refresh job done")); entry.setState(METADATA_REFRESH) @@ -1165,15 +1197,22 @@ private Table getIcebergTable(ReflectionId reflectionId, String basePath) { final String path = PathUtils.getPathJoiner().join(ImmutableList.of( reflectionId.getId(), basePath)); + final AccelerationStoragePlugin accelerationPlugin = getAccelerationPlugin(); final FileSelection fileSelection = accelerationPlugin.getIcebergFileSelection(path); - final IcebergModel icebergModel = getIcebergModel(); + if (fileSelection == null) { + throw new IllegalStateException(String.format("Acceleration path does not exist: %s", + accelerationPlugin.resolveTablePathToValidPath(path).toString())); + } + final IcebergModel icebergModel = accelerationPlugin.getIcebergModel(); return icebergModel.getIcebergTable(icebergModel.getTableIdentifier(fileSelection.getSelectionRoot())); } - private IcebergModel getIcebergModel() { - if (icebergModel == null) { - icebergModel = accelerationPlugin.getIcebergModel(); - } - return icebergModel; + /** + * Dist path may be updated after coordinator startup so it's important + * to not cache the accelerator storage plugin or its dist path. + * @return + */ + private AccelerationStoragePlugin getAccelerationPlugin() { + return catalogService.getSource(ReflectionServiceImpl.ACCELERATOR_STORAGEPLUGIN_NAME); } } diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionOptions.java b/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionOptions.java index 4395efc0c2..e91f0b04f8 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionOptions.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionOptions.java @@ -68,6 +68,8 @@ public interface ReflectionOptions { BooleanValidator REFRESH_AFTER_DESERIALIZATION_FAILURE = new BooleanValidator("reflection.manager.auto_refresh_failed", false); // should reflection settings and refresh cache be enabled during reflection manager syncs BooleanValidator REFLECTION_MANAGER_SYNC_CACHE = new BooleanValidator("reflection.manager.sync.cache.enabled", true); - // Enable reflection tab in NESSIE and ARCTIC source dialogs - BooleanValidator REFLECTION_ARCTIC_ENABLED = new BooleanValidator("reflection.arctic.enabled", false); + // Allow default raw reflections to be used in REFRESH REFLECTION jobs + BooleanValidator ACCELERATION_ENABLE_DEFAULT_RAW_REFRESH = new BooleanValidator("accelerator.enable_default_raw_reflection_refresh", true); + // should incrementally refreshed default raw reflections containing filters/aggs be used + BooleanValidator ENABLE_INCREMENTAL_DEFAULT_RAW_REFLECTIONS_WITH_AGGS = new BooleanValidator("reflection.manager.enable_incremental_default_raw_with_aggs", true); } diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionService.java b/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionService.java index 84e6a90981..2ca5fc94fb 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionService.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionService.java @@ -24,10 +24,10 @@ import javax.inject.Provider; +import com.dremio.exec.catalog.CatalogEntityKey; import com.dremio.exec.store.sys.accel.AccelerationListManager; import com.dremio.exec.store.sys.accel.AccelerationManager.ExcludedReflectionsProvider; import com.dremio.service.Service; -import com.dremio.service.namespace.NamespaceKey; import com.dremio.service.reflection.MaterializationCache.CacheViewer; import com.dremio.service.reflection.proto.ExternalReflection; import com.dremio.service.reflection.proto.Materialization; @@ -52,6 +52,7 @@ public interface ReflectionService extends Service, ReflectionAdministrationServ ExcludedReflectionsProvider getExcludedReflectionsProvider(); + @Override Optional getLastDoneMaterialization(ReflectionId reflectionId); Materialization getLastMaterialization(ReflectionId reflectionId); @@ -76,8 +77,6 @@ public interface ReflectionService extends Service, ReflectionAdministrationServ ReflectionManager getReflectionManager(); - void updateAccelerationBasePath(); - /** * mainly useful to reduce conflicts on the implementation when we update this interface */ @@ -88,7 +87,7 @@ public Iterable getAllReflections() { } @Override - public Iterable getReflectionsByDatasetPath(NamespaceKey path) { + public Iterable getReflectionsByDatasetPath(CatalogEntityKey path) { return Collections.emptyList(); } @@ -238,9 +237,6 @@ public ReflectionManager getReflectionManager() { return null; } - @Override - public void updateAccelerationBasePath() { - } } /** diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionServiceImpl.java b/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionServiceImpl.java index 02091f4539..f635aec187 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionServiceImpl.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionServiceImpl.java @@ -17,12 +17,12 @@ import static com.dremio.common.utils.SqlUtils.quotedCompound; import static com.dremio.options.OptionValue.OptionType.SYSTEM; +import static com.dremio.service.reflection.DatasetHashUtils.computeDatasetHash; import static com.dremio.service.reflection.ReflectionOptions.MATERIALIZATION_CACHE_ENABLED; import static com.dremio.service.reflection.ReflectionOptions.MATERIALIZATION_CACHE_REFRESH_DELAY_MILLIS; import static com.dremio.service.reflection.ReflectionOptions.REFLECTION_ENABLE_SUBSTITUTION; import static com.dremio.service.reflection.ReflectionOptions.REFLECTION_MANAGER_REFRESH_DELAY_MILLIS; import static com.dremio.service.reflection.ReflectionOptions.REFLECTION_PERIODIC_WAKEUP_ONLY; -import static com.dremio.service.reflection.ReflectionUtils.computeDatasetHash; import static com.dremio.service.reflection.ReflectionUtils.hasMissingPartitions; import static com.dremio.service.scheduler.ScheduleUtils.scheduleForRunningOnceAt; import static com.dremio.service.users.SystemUser.SYSTEM_USERNAME; @@ -61,11 +61,16 @@ import com.dremio.common.exceptions.ErrorHelper; import com.dremio.common.exceptions.UserException; import com.dremio.common.utils.protos.AttemptId; +import com.dremio.context.RequestContext; import com.dremio.datastore.api.LegacyKVStoreProvider; import com.dremio.exec.catalog.CachingCatalog; import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.CatalogEntityKey; +import com.dremio.exec.catalog.CatalogUtil; import com.dremio.exec.catalog.DelegatingCatalog; import com.dremio.exec.catalog.DremioTable; +import com.dremio.exec.catalog.EntityExplorer; +import com.dremio.exec.catalog.TableVersionContext; import com.dremio.exec.ops.QueryContext; import com.dremio.exec.planner.PlanCache; import com.dremio.exec.planner.acceleration.CachedMaterializationDescriptor; @@ -144,6 +149,7 @@ import com.google.common.collect.Ordering; import com.google.common.collect.Sets; +import io.opentelemetry.instrumentation.annotations.WithSpan; import io.protostuff.ByteString; /** @@ -210,6 +216,9 @@ interface DescriptorCache { private WakeupHandler wakeupHandler; private boolean isMasterLessEnabled; + @VisibleForTesting + boolean isReflectionServiceStarting = true; + private final CacheViewer cacheViewer = new CacheViewer() { @Override public boolean isCached(MaterializationId id) { @@ -226,6 +235,8 @@ public boolean isCached(MaterializationId id) { private final MaterializationDescriptorFactory materializationDescriptorFactory; + private final Provider requestContextProvider; + private ReflectionManager reflectionManager = null; public ReflectionServiceImpl( @@ -239,7 +250,8 @@ public ReflectionServiceImpl( ExecutorService executorService, Provider foremenWorkManagerProvider, boolean isMaster, - BufferAllocator allocator) { + BufferAllocator allocator, + Provider requestContextProvider) { this.schedulerService = Preconditions.checkNotNull(schedulerService, "scheduler service required"); this.jobsService = Preconditions.checkNotNull(jobsService, "jobs service required"); this.catalogService = Preconditions.checkNotNull(catalogService, "catalog service required"); @@ -252,9 +264,10 @@ public NamespaceService get() { return sabotContext.get().getNamespaceService(SYSTEM_USERNAME); } }; - this.reflectionSettings = new ReflectionSettingsImpl(namespaceService, storeProvider); + this.reflectionSettings = new ReflectionSettingsImpl(namespaceService, catalogService, storeProvider); this.isMaster = isMaster; this.allocator = allocator.newChildAllocator(getClass().getName(), 0, Long.MAX_VALUE); + this.requestContextProvider = requestContextProvider; userStore = new ReflectionGoalsStore(storeProvider); internalStore = new ReflectionEntriesStore(storeProvider); @@ -268,21 +281,22 @@ public NamespaceService get() { public QueryContext get() { final UserSession session = systemSession(getOptionManager()); return new QueryContext(session, sabotContext.get(), new AttemptId().toQueryId(), - java.util.Optional.of(false)); + // Never check metadata validity and allow promotion only after coordinator startup + java.util.Optional.of(false), java.util.Optional.of(isReflectionServiceStarting)); } }; this.expansionHelper = new Supplier() { @Override public ExpansionHelper get() { - return new ExpansionHelper(queryContext.get()); + return new ExpansionHelper(getQueryContext().get()); } }; this.planCacheInvalidationHelper = new Supplier() { @Override public PlanCacheInvalidationHelper get() { - return new PlanCacheInvalidationHelper(queryContext.get(), foremenWorkManagerProvider.get()); + return new PlanCacheInvalidationHelper(getQueryContext().get(), foremenWorkManagerProvider.get()); } }; @@ -293,6 +307,16 @@ public PlanCacheInvalidationHelper get() { DEFAULT_MATERIALIZATION_DESCRIPTOR_FACTORY); } + @VisibleForTesting + Supplier getQueryContext() { + return this.queryContext; + } + + @VisibleForTesting + Supplier getExpansionHelper() { + return this.expansionHelper; + } + public MaterializationDescriptorProvider getMaterializationDescriptor() { return materializationDescriptorProvider; } @@ -303,7 +327,7 @@ public void start() { this.isMasterLessEnabled = sabotContext.get().getDremioConfig().isMasterlessEnabled(); // populate the materialization cache - materializationCache = new MaterializationCache(cacheHelper, namespaceService.get(), reflectionStatusService.get(), catalogService.get()); + materializationCache = new MaterializationCache(cacheHelper, reflectionStatusService.get(), catalogService.get()); if (isCacheEnabled()) { // refresh the cache in-thread before any query gets planned materializationCache.refresh(); @@ -320,7 +344,7 @@ public void start() { // no automatic rePlan allowed after this point. Any failure to expand should cause the corresponding // materialization to be marked as failed - cacheHelper.disableReplan(); + isReflectionServiceStarting = false; // only start the managers on the master node if (isMaster) { @@ -395,7 +419,7 @@ private void masterInit() { this.reflectionManager = new ReflectionManager( sabotContext.get(), jobsService.get(), - namespaceService.get(), + catalogService.get(), getOptionManager(), userStore, internalStore, @@ -405,42 +429,26 @@ private void masterInit() { new DescriptorCacheImpl(), reflectionsToUpdate, this::wakeupManager, - expansionHelper, + getExpansionHelper(), planCacheInvalidationHelper, allocator, - accelerationPlugin, - accelerationPlugin.getConfig().getPath(), ReflectionGoalChecker.Instance, new RefreshStartHandler( - namespaceService.get(), + catalogService.get(), jobsService.get(), materializationStore, this::wakeupManager ), - catalogService.get(), new DependencyResolutionContextFactory(reflectionSettings, requestsStore, getOptionManager(), internalStore) ); - wakeupHandler = new WakeupHandler(executorService, reflectionManager); - } - - @Override - public void updateAccelerationBasePath() { - if (reflectionManager != null) { - final FileSystemPlugin accelerationPlugin = sabotContext.get().getCatalogService() - .getSource(ReflectionServiceImpl.ACCELERATOR_STORAGEPLUGIN_NAME); - reflectionManager.setAccelerationBasePath(accelerationPlugin.getConfig().getPath()); - } + wakeupHandler = new WakeupHandler(executorService, reflectionManager, requestContextProvider); } public RefreshHelper getRefreshHelper() { - return new RefreshHelper() { - @Override - public NamespaceService getNamespace() { - return namespaceService.get(); - } + return new RefreshHelper() { @Override public ReflectionSettings getReflectionSettings() { @@ -452,6 +460,8 @@ public MaterializationStore getMaterializationStore() { return materializationStore; } + @Override + public CatalogService getCatalogService() { return catalogService.get(); }; }; } @@ -476,7 +486,7 @@ private boolean isCacheEnabled() { return getOptionManager().getOption(MATERIALIZATION_CACHE_ENABLED); } - private static UserSession systemSession(OptionManager options) { + static UserSession systemSession(OptionManager options) { final UserBitShared.UserCredentials credentials = UserBitShared.UserCredentials.newBuilder() .setUserName(SYSTEM_USERNAME) .build(); @@ -490,7 +500,8 @@ private static UserSession systemSession(OptionManager options) { /** * @return non expired DONE materializations that have at least one refresh */ - private Iterable getValidMaterializations() { + @VisibleForTesting + Iterable getValidMaterializations() { final long now = System.currentTimeMillis(); return Iterables.filter(materializationStore.getAllDoneWhen(now), new Predicate() { @Override @@ -540,14 +551,16 @@ public ReflectionId create(ReflectionGoal goal) { public ReflectionId createExternalReflection(String name, List dataset, List targetDataset) { ReflectionId id = new ReflectionId(UUID.randomUUID().toString()); try { - DatasetConfig datasetConfig = namespaceService.get().getDataset(new NamespaceKey(dataset)); + EntityExplorer catalog = CatalogUtil.getSystemCatalogForReflections(catalogService.get()); + DatasetConfig datasetConfig = CatalogUtil.getDatasetConfig(catalog, new NamespaceKey(dataset)); if (datasetConfig == null) { throw UserException .validationError() .message(String.format("Dataset %s not found", quotedCompound(dataset))) .build(logger); } - DatasetConfig targetDatasetConfig = namespaceService.get().getDataset(new NamespaceKey(targetDataset)); + + DatasetConfig targetDatasetConfig = CatalogUtil.getDatasetConfig(catalog, new NamespaceKey(targetDataset)); if (targetDatasetConfig == null) { throw UserException .validationError() @@ -558,18 +571,18 @@ public ReflectionId createExternalReflection(String name, List dataset, .setId(id.getId()) .setName(name) .setQueryDatasetId(datasetConfig.getId().getId()) - .setQueryDatasetHash(computeDatasetHash(datasetConfig, namespaceService.get(), true)) + .setQueryDatasetHash(computeDatasetHash(datasetConfig, catalogService.get(), true)) .setTargetDatasetId(targetDatasetConfig.getId().getId()) - .setTargetDatasetHash(computeDatasetHash(targetDatasetConfig, namespaceService.get(), true)); + .setTargetDatasetHash(computeDatasetHash(targetDatasetConfig, catalogService.get(), true)); // check that we are able to get a MaterializationDescriptor before storing it - MaterializationDescriptor descriptor = ReflectionUtils.getMaterializationDescriptor(externalReflection, namespaceService.get(), catalogService.get()); + MaterializationDescriptor descriptor = ReflectionUtils.getMaterializationDescriptor(externalReflection, catalogService.get()); if (descriptor == null) { throw UserException.validationError().message("Failed to validate external reflection " + name).build(logger); } // validate that we can convert to a materialization - try (ExpansionHelper helper = expansionHelper.get()){ + try (ExpansionHelper helper = getExpansionHelper().get()){ descriptor.getMaterializationFor(helper.getConverter()); } externalReflectionStore.addExternalReflection(externalReflection); @@ -586,12 +599,8 @@ public Optional getExternalReflectionById(String id) { @Override public Iterable getExternalReflectionByDatasetPath(List datasetPath) { - DatasetConfig datasetConfig; - try { - datasetConfig = namespaceService.get().getDataset(new NamespaceKey(datasetPath)); - } catch (NamespaceException e) { - throw UserException.validationError(e).build(logger); - } + EntityExplorer catalog = CatalogUtil.getSystemCatalogForReflections(catalogService.get()); + DatasetConfig datasetConfig = CatalogUtil.getDatasetConfig(catalog, new NamespaceKey(datasetPath)); if (datasetConfig == null) { throw UserException.validationError().message(String.format("Dataset %s not found", quotedCompound(datasetPath))).build(logger); } @@ -692,6 +701,7 @@ private Stream getGoalDependencies(Refle final List dependencyEntries = dependencyManager.getDependencies(goalId); return StreamSupport.stream(dependencyEntries.spliterator(), false).map(new Function() { + @Override public AccelerationListManager.DependencyInfo apply(DependencyEntry entry){ return new AccelerationListManager.DependencyInfo( goalId.getId(), @@ -714,32 +724,36 @@ public Iterable getAllReflections() { return ReflectionUtils.getAllReflections(userStore); } + @VisibleForTesting @Override - public Iterable getReflectionsByDatasetPath(NamespaceKey path) { - try { - DatasetConfig config = namespaceService.get().getDataset(path); - return getReflectionsByDatasetId(config.getId().getId()); - }catch(NamespaceException ex) { - throw Throwables.propagate(ex); + public Iterable getReflectionsByDatasetPath(CatalogEntityKey path) { + final EntityExplorer catalog = CatalogUtil.getSystemCatalogForReflections(catalogService.get()); + DremioTable table = CatalogUtil.getTable(path, catalog); + if (table == null) { + Throwables.propagate(new NamespaceNotFoundException(path.toNamespaceKey(), "Dataset not found in catalog")); } + return getReflectionsByDatasetId(table.getDatasetConfig().getId().getId()); } + @VisibleForTesting - public Iterable getReflectionGoals(final NamespaceKey path, final String reflectionName) { - try { - DatasetConfig config = namespaceService.get().getDataset(path); - return FluentIterable.from(getReflectionsByDatasetId(config.getId().getId())).filter(new Predicate() { + public Iterable getReflectionGoals(final NamespaceKey path, final String reflectionName) { + EntityExplorer catalog = CatalogUtil.getSystemCatalogForReflections(catalogService.get()); + DatasetConfig datasetConfig = CatalogUtil.getDatasetConfig(catalog, path); + if (datasetConfig == null) { + Throwables.propagate(new NamespaceNotFoundException(path, "Dataset not found in catalog")); + } + return FluentIterable.from(getReflectionsByDatasetId(datasetConfig.getId().getId())).filter(new Predicate() { @Override public boolean apply(ReflectionGoal input) { return reflectionName.equals(input.getName()); - }}); - }catch(NamespaceException ex) { - throw Throwables.propagate(ex); - } + } + }); } @Override + @WithSpan public Iterable getReflectionsByDatasetId(String datasetid) { return userStore.getByDatasetId(datasetid); } @@ -808,8 +822,10 @@ public Iterable getRefreshes(Materialization materialization) { } @Override + @WithSpan public List getRecommendedReflections(String datasetId) { - DatasetConfig datasetConfig = namespaceService.get().findDatasetByUUID(datasetId); + EntityExplorer catalog = CatalogUtil.getSystemCatalogForReflections(catalogService.get()); + DatasetConfig datasetConfig = CatalogUtil.getDatasetConfig(catalog, datasetId); if (datasetConfig == null) { throw new NotFoundException("Dataset not found"); @@ -817,7 +833,7 @@ public List getRecommendedReflections(String datasetId) { ReflectionAnalyzer analyzer = new ReflectionAnalyzer(jobsService.get(), catalogService.get(), allocator); - TableStats tableStats = analyzer.analyze(new NamespaceKey(datasetConfig.getFullPathList())); + TableStats tableStats = analyzer.analyze(datasetId); ReflectionSuggester suggester = new ReflectionSuggester(datasetConfig, tableStats); @@ -883,7 +899,8 @@ private Future wakeupManager(String reason, boolean periodic) { return CompletableFuture.completedFuture(null); } - private MaterializationDescriptor getDescriptor(Materialization materialization) throws CacheException { + @VisibleForTesting + MaterializationDescriptor getDescriptor(Materialization materialization) throws CacheException { final ReflectionGoal goal = userStore.get(materialization.getReflectionId()); if (!ReflectionGoalChecker.checkGoal(goal, materialization)) { // reflection goal changed and corresponding materialization is no longer valid @@ -951,7 +968,7 @@ public MaterializationDescriptor apply(Materialization m) { @Override public MaterializationDescriptor apply(ExternalReflection externalReflection) { try { - return ReflectionUtils.getMaterializationDescriptor(externalReflection, namespaceService.get(), catalogService.get()); + return ReflectionUtils.getMaterializationDescriptor(externalReflection, catalogService.get()); } catch (Exception e) { logger.debug("failed to get MaterializationDescriptor for external reflection {}", externalReflection.getName()); return null; @@ -988,24 +1005,36 @@ public MaterializationDescriptor apply(ReflectionId reflectionId) { .toList(); } - private boolean isDefaultReflectionEnabled(NamespaceKey path) { - try { - DatasetConfig datasetConfig = namespaceService.get().getDataset(path); - if (!datasetConfig.getType().equals(DatasetType.VIRTUAL_DATASET) || datasetConfig.getVirtualDataset() == null) { - return false; - } - return Optional.ofNullable(datasetConfig.getVirtualDataset().getDefaultReflectionEnabled()).orElse(true); - } catch (NamespaceException e) { + /** + * Checks if default raw reflection has been enabled/disabled on the VDS. + * Uses the same caching catalog from planning so table metadata should already be in cache. + */ + private boolean isDefaultReflectionEnabled(CatalogEntityKey path, Catalog catalog) { + DremioTable table = CatalogUtil.getTable(path, catalog); + if (table == null) { logger.debug("Dataset {} not found", path); return false; } + DatasetConfig datasetConfig = table.getDatasetConfig(); + if (!datasetConfig.getType().equals(DatasetType.VIRTUAL_DATASET) || datasetConfig.getVirtualDataset() == null) { + return false; + } + return Optional.ofNullable(datasetConfig.getVirtualDataset().getDefaultReflectionEnabled()).orElse(true); } @Override - public java.util.Optional getDefaultRawMaterialization(NamespaceKey path, List vdsFields) { + public java.util.Optional getDefaultRawMaterialization(NamespaceKey path, + TableVersionContext versionContext, + List vdsFields, Catalog catalogWithSchema) { if (isSubstitutionEnabled()) { + // Path is already canonicalized so clear any default schema from the catalog + final Catalog catalog = catalogWithSchema.resolveCatalog((NamespaceKey) null); try { - for (ReflectionGoal goal : getReflectionsByDatasetPath(path)) { + CatalogEntityKey versionedKey = CatalogEntityKey.newBuilder().keyComponents(path.getPathComponents()).tableVersionContext(versionContext).build(); + DremioTable table = Preconditions.checkNotNull(CatalogUtil.getTable(versionedKey, catalog), + String.format("View %s should have been found in catalog cache", versionedKey)); + + for (ReflectionGoal goal : getReflectionsByDatasetId(table.getDatasetConfig().getId().getId())) { if (goal.getType() == ReflectionType.RAW) { List displayFields = goal.getDetails().getDisplayFieldList().stream().map(ReflectionField::getName).sorted().collect(Collectors.toList()); if (displayFields.equals(vdsFields)) { @@ -1038,12 +1067,14 @@ public java.util.Optional getDefaultRawMaterializatio // we know that there are default reflections available for the dataset. This way we avoid // hitting the namspace for every dataset in the tree, even if the dataset doesn't have any // reflections - if (!isDefaultReflectionEnabled(path)) { + if (!isDefaultReflectionEnabled(versionedKey, catalog)) { return java.util.Optional.empty(); } CachedMaterializationDescriptor desc = expandedMaterializations.iterator().next(); - if (!(desc.getMaterialization().getIncrementalUpdateSettings().isIncremental() && desc.getMaterialization().hasAgg())) { + if (!(desc.getMaterialization().getIncrementalUpdateSettings().isIncremental() && desc.getMaterialization().hasAgg()) + || getOptionManager().getOption(ReflectionOptions.ENABLE_INCREMENTAL_DEFAULT_RAW_REFLECTIONS_WITH_AGGS)) { // Do not apply default reflections for incremental refresh if there is an agg in the query plan + // unless we have the support key enabled. return java.util.Optional.of(desc); } } @@ -1063,11 +1094,6 @@ public java.util.Optional getDefaultRawMaterializatio } private final class CacheHelperImpl implements CacheHelper { - private boolean rePlanIfNecessary = true; - - void disableReplan() { - rePlanIfNecessary = false; - } @Override public Iterable getValidMaterializations() { @@ -1082,7 +1108,7 @@ public Iterable getExternalReflections() { @Override public MaterializationDescriptor getDescriptor(ExternalReflection externalReflection) throws CacheException { try { - return ReflectionUtils.getMaterializationDescriptor(externalReflection, namespaceService.get(), catalogService.get()); + return ReflectionUtils.getMaterializationDescriptor(externalReflection, catalogService.get()); } catch (NamespaceException e) { throw new CacheException("Unable to get descriptor for " + externalReflection.getName()); } @@ -1098,6 +1124,11 @@ public CachedMaterializationDescriptor expand(Materialization materialization) t return new CachedMaterializationDescriptor(descriptor, expanded, catalogService.get()); } + /** + * Materialization expansion behaves differently depending on whether expansion is happening during coordinator + * startup. See {@link ReflectionServiceImpl#isReflectionServiceStarting}. + * On coordinator startup, we never want to trigger an inline metadata refresh because the executors aren't available yet. + */ @Override public DremioMaterialization expand(MaterializationDescriptor descriptor) { final ReflectionId rId = new ReflectionId(descriptor.getLayoutId()); @@ -1109,7 +1140,7 @@ public DremioMaterialization expand(MaterializationDescriptor descriptor) { // get a new converter for each materialization. This ensures that we // always index flattens from zero. This is a partial fix for flatten // matching. We should really do a better job in matching. - try (ExpansionHelper helper = expansionHelper.get()) { + try (ExpansionHelper helper = getExpansionHelper().get()) { return descriptor.getMaterializationFor(helper.getConverter()); } catch (DeserializationException e) { final UserException uex = ErrorHelper.findWrappedCause(e, UserException.class); @@ -1126,7 +1157,7 @@ public DremioMaterialization expand(MaterializationDescriptor descriptor) { return null; } - if (!rePlanIfNecessary) { + if (!isReflectionServiceStarting) { // replan not allowed, just rethrow the exception throw e; } @@ -1134,7 +1165,7 @@ public DremioMaterialization expand(MaterializationDescriptor descriptor) { logger.debug("failed to expand materialization descriptor {}/{}. Associated reflection will be scheduled for update", descriptor.getLayoutId(), descriptor.getMaterializationId(), e); } catch (MaterializationExpander.ExpansionException e) { - if (!rePlanIfNecessary) { + if (!isReflectionServiceStarting) { // replan not allowed, just rethrow the exception throw e; } @@ -1166,14 +1197,17 @@ public DremioMaterialization expand(MaterializationDescriptor descriptor) { } private void rebuildPlan(ReflectionGoal goal, ReflectionEntry entry, Materialization materialization) { - try (ExpansionHelper helper = expansionHelper.get()){ + try (ExpansionHelper helper = getExpansionHelper().get()){ SqlHandlerConfig config = new SqlHandlerConfig(helper.getContext(), helper.getConverter(), AttemptObservers.of(), null); - ReflectionPlanGenerator generator = new ReflectionPlanGenerator(config, namespaceService.get(), sabotContext.get().getConfig(), goal, + ReflectionPlanGenerator generator = new ReflectionPlanGenerator(config, catalogService.get(), sabotContext.get().getConfig(), goal, entry, materialization, reflectionSettings, materializationStore, false, Optional.ofNullable(materialization.getStripVersion()).orElse(StrippingFactory.NO_STRIP_VERSION)); generator.generateNormalizedPlan(); ByteString logicalPlanBytes = generator.getRefreshDecision().getLogicalPlan(); materialization.setLogicalPlan(logicalPlanBytes); materializationStore.save(materialization); + } catch (Exception e) { + logger.debug("Failed to rebuild plan for {}", ReflectionUtils.getId(goal), e); + throw e; } } @@ -1181,6 +1215,7 @@ public void resetCache() { materializationCache.resetCache(); } + @Override public ReflectionManager getReflectionManager() { return reflectionManager; } @@ -1217,8 +1252,16 @@ public CacheRefresher() { @Override public void run() { try { - refreshCache(); - this.planCacheSynchronizer.sync(); + if (requestContextProvider != null) { + requestContextProvider.get().run(() -> { + refreshCache(); + this.planCacheSynchronizer.sync(); + } + ); + } else { + refreshCache(); + this.planCacheSynchronizer.sync(); + } } finally { scheduleNextCacheRefresh(this); } @@ -1314,18 +1357,14 @@ public void invalidateReflectionAssociatedPlanCache(String datasetId) { } else if (config.getType() == DatasetType.VIRTUAL_DATASET && config.getVirtualDataset().getParentsList() != null) { for (ParentDataset parent : config.getVirtualDataset().getParentsList()){ try { - configQueue.add(context.getNamespaceService(SYSTEM_USERNAME).getDataset(new NamespaceKey(parent.getDatasetPathList()))); - } catch (NamespaceException ex) { - //Here means Parent doesnt exist in Catalog. But it can still exist in Source (provided by the plugin). - //Try to resolve the Parent from the Source Plugin using Catalog. - //Eg. for "history" `sys.project.history.jobs` tables. These are defined only in the Source and dont exist in Catalog. DremioTable table = catalog.getTable(new NamespaceKey(parent.getDatasetPathList())); if (table != null) { configQueue.add(table.getDatasetConfig()); } else { - //Could not find parent anywhere... Shouldnt occur in normal situations. logger.info("Can't find parent dataset {}", parent.getDatasetPathList()); } + } catch (Exception exception) { + logger.warn("Can't find parent dataset {}", parent.getDatasetPathList(), exception); } } } @@ -1333,4 +1372,6 @@ public void invalidateReflectionAssociatedPlanCache(String datasetId) { } } } + + } diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionSettings.java b/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionSettings.java index 3b456f1686..9d1b069e8e 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionSettings.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionSettings.java @@ -17,6 +17,7 @@ import java.util.Optional; +import com.dremio.exec.catalog.CatalogEntityKey; import com.dremio.service.namespace.NamespaceKey; import com.dremio.service.namespace.dataset.proto.AccelerationSettings; @@ -25,12 +26,25 @@ */ public interface ReflectionSettings { // only returns a AccelerationSettings if one is specifically defined for the specified key + @Deprecated Optional getStoredReflectionSettings(NamespaceKey key); + // only returns a AccelerationSettings if one is specifically defined for the specified key + Optional getStoredReflectionSettings(CatalogEntityKey key); + + @Deprecated AccelerationSettings getReflectionSettings(NamespaceKey key); + AccelerationSettings getReflectionSettings(CatalogEntityKey key); + + @Deprecated void setReflectionSettings(NamespaceKey key, AccelerationSettings settings); + void setReflectionSettings(CatalogEntityKey key, AccelerationSettings settings); + void removeSettings(NamespaceKey key); + + void removeSettings(CatalogEntityKey key); + default int getAllHash() { return 0; }; } diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionSettingsImpl.java b/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionSettingsImpl.java index de7c9c8097..6ae6d5cf57 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionSettingsImpl.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionSettingsImpl.java @@ -21,6 +21,10 @@ import javax.inject.Provider; import com.dremio.datastore.api.LegacyKVStoreProvider; +import com.dremio.exec.catalog.CatalogEntityKey; +import com.dremio.exec.catalog.CatalogUtil; +import com.dremio.exec.catalog.EntityExplorer; +import com.dremio.exec.store.CatalogService; import com.dremio.service.namespace.NamespaceException; import com.dremio.service.namespace.NamespaceKey; import com.dremio.service.namespace.NamespaceService; @@ -39,20 +43,32 @@ public class ReflectionSettingsImpl implements ReflectionSettings { private final Provider namespace; private final ReflectionSettingsStore store; + private final Provider catalogServiceProvider; - public ReflectionSettingsImpl(Provider namespace, Provider storeProvider) { + public ReflectionSettingsImpl(Provider namespace, Provider catalogServiceProvider, Provider storeProvider) { this.namespace = Preconditions.checkNotNull(namespace, "namespace service required"); this.store = new ReflectionSettingsStore(storeProvider); + this.catalogServiceProvider = catalogServiceProvider; } // only returns a AccelerationSettings if one is specifically defined for the specified key @Override public Optional getStoredReflectionSettings(NamespaceKey key) { + return getStoredReflectionSettings(CatalogEntityKey.fromNamespaceKey(key)); + } + + @Override + public Optional getStoredReflectionSettings(CatalogEntityKey key) { return Optional.ofNullable(store.get(key)); } @Override public AccelerationSettings getReflectionSettings(NamespaceKey key) { + return getReflectionSettings(CatalogEntityKey.fromNamespaceKey(key)); + } + + @Override + public AccelerationSettings getReflectionSettings(CatalogEntityKey key) { // first check if the settings have been set at the dataset level AccelerationSettings settings = store.get(key); if (settings != null) { @@ -60,10 +76,10 @@ public AccelerationSettings getReflectionSettings(NamespaceKey key) { } // no settings found, try to retrieve the source's settings - final NamespaceKey rootKey = new NamespaceKey(key.getRoot()); - if (!rootKey.equals(key)) { + final NamespaceKey rootKey = new NamespaceKey(key.getRootEntity()); + if (!rootKey.equals(key.toNamespaceKey())) { try { - namespace.get().getSource(new NamespaceKey(key.getRoot())); + namespace.get().getSource(new NamespaceKey(key.getRootEntity())); // root parent is a source, return its settings from the store return getReflectionSettings(rootKey); } catch (NamespaceException e) { @@ -73,11 +89,11 @@ public AccelerationSettings getReflectionSettings(NamespaceKey key) { // otherwise, return the default settings, they depend if the dataset is a home dataset or not boolean homeDataset = false; - try { - DatasetConfig config = namespace.get().getDataset(key); + final EntityExplorer catalog = CatalogUtil.getSystemCatalogForReflections(catalogServiceProvider.get()); + DatasetConfig config = CatalogUtil.getDatasetConfig(catalog, key.toNamespaceKey()); + //Check if its a home dataset + if (config != null) { homeDataset = ReflectionUtils.isHomeDataset(config.getType()); - } catch (NamespaceException e) { - // no dataset found, probably a source. In all cases it's not a home pds :) } if (homeDataset) { @@ -95,6 +111,11 @@ public AccelerationSettings getReflectionSettings(NamespaceKey key) { @Override public void setReflectionSettings(NamespaceKey key, AccelerationSettings settings) { + setReflectionSettings(CatalogEntityKey.fromNamespaceKey(key), settings); + } + + @Override + public void setReflectionSettings(CatalogEntityKey key, AccelerationSettings settings) { // if some settings already exist just override them, otherwise remove the version as the passed settings may be // coming from the parent source AccelerationSettings previous = store.get(key); @@ -113,6 +134,11 @@ public void setReflectionSettings(NamespaceKey key, AccelerationSettings setting @Override public void removeSettings(NamespaceKey key) { + removeSettings(CatalogEntityKey.fromNamespaceKey(key)); + } + + @Override + public void removeSettings(CatalogEntityKey key) { store.delete(key); } @@ -125,5 +151,4 @@ public int getAllHash() { } return result; } - } diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionStatusServiceImpl.java b/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionStatusServiceImpl.java index 88b156533a..cb3e509a6f 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionStatusServiceImpl.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionStatusServiceImpl.java @@ -34,6 +34,7 @@ import com.dremio.datastore.api.LegacyKVStoreProvider; import com.dremio.exec.catalog.CatalogUser; +import com.dremio.exec.catalog.CatalogUtil; import com.dremio.exec.catalog.DremioTable; import com.dremio.exec.catalog.EntityExplorer; import com.dremio.exec.catalog.MetadataRequestOptions; @@ -46,7 +47,6 @@ import com.dremio.service.acceleration.ReflectionDescriptionServiceRPC; import com.dremio.service.accelerator.AccelerationUtils; import com.dremio.service.namespace.NamespaceException; -import com.dremio.service.namespace.NamespaceService; import com.dremio.service.namespace.dataset.proto.DatasetConfig; import com.dremio.service.reflection.MaterializationCache.CacheViewer; import com.dremio.service.reflection.ReflectionStatus.AVAILABILITY_STATUS; @@ -83,7 +83,6 @@ public class ReflectionStatusServiceImpl implements ReflectionStatusService { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ReflectionStatusServiceImpl.class); private final Provider> nodeEndpointsProvider; - private final Provider namespaceService; private final Provider catalogService; private final Provider cacheViewer; @@ -100,7 +99,6 @@ public class ReflectionStatusServiceImpl implements ReflectionStatusService { @VisibleForTesting ReflectionStatusServiceImpl( Provider> nodeEndpointsProvider, - Provider namespaceService, Provider cacheViewer, ReflectionGoalsStore goalsStore, ReflectionEntriesStore entriesStore, @@ -109,28 +107,24 @@ public class ReflectionStatusServiceImpl implements ReflectionStatusService { ReflectionValidator validator, Provider catalogService) { this.nodeEndpointsProvider = nodeEndpointsProvider; - this.namespaceService = Preconditions.checkNotNull(namespaceService, "namespace service required"); this.cacheViewer = Preconditions.checkNotNull(cacheViewer, "cache viewer required"); this.goalsStore = Preconditions.checkNotNull(goalsStore, "goals store required"); this.entriesStore = Preconditions.checkNotNull(entriesStore, "entries store required"); this.materializationStore = Preconditions.checkNotNull(materializationStore, "materialization store required"); this.externalReflectionStore = Preconditions.checkNotNull(externalReflectionStore, "external reflection store required"); - this.validator = Preconditions.checkNotNull(validator, "validator required"); this.catalogService = Preconditions.checkNotNull(catalogService, "catalog service required"); } public ReflectionStatusServiceImpl( - Provider> nodeEndpointsProvider, - Provider namespaceServiceProvider, - Provider catalogService, - Provider storeProvider, - Provider cacheViewer) { + Provider> nodeEndpointsProvider, + Provider catalogService, + Provider storeProvider, + Provider cacheViewer) { Preconditions.checkNotNull(storeProvider, "kv store provider required"); Preconditions.checkNotNull(catalogService, "catalog service required"); this.nodeEndpointsProvider = nodeEndpointsProvider; this.cacheViewer = Preconditions.checkNotNull(cacheViewer, "cache viewer required"); - this.namespaceService = namespaceServiceProvider; this.catalogService = catalogService; goalsStore = new ReflectionGoalsStore(storeProvider); @@ -242,7 +236,7 @@ public ReflectionStatus getReflectionStatus(ReflectionGoal goal, if (lastMaterializationDone.isPresent()) { Materialization materialization = lastMaterializationDone.get(); lastDataFetch = materialization.getLastRefreshFromPds(); - lastRefreshDuration = materialization.getLastRefreshDurationMillis(); + lastRefreshDuration = Optional.ofNullable(materialization.getLastRefreshDurationMillis()).orElse(-1L); expiresAt = Optional.ofNullable(materialization.getExpiration()).orElse(0L); final Set activeHosts = getActiveHosts(); @@ -290,21 +284,22 @@ private Set getActiveHosts() { } private ExternalReflectionStatus.STATUS computeStatus(ExternalReflection reflection) { + EntityExplorer catalog = CatalogUtil.getSystemCatalogForReflections(catalogService.get()); // check if the reflection is still valid - final DatasetConfig queryDataset = namespaceService.get().findDatasetByUUID(reflection.getQueryDatasetId()); - if (queryDataset == null) { + DatasetConfig queryDatasetConfig = CatalogUtil.getDatasetConfig(catalog, reflection.getQueryDatasetId()); + if (queryDatasetConfig == null) { return ExternalReflectionStatus.STATUS.INVALID; } - final DatasetConfig targetDataset = namespaceService.get().findDatasetByUUID(reflection.getTargetDatasetId()); - if (targetDataset == null) { + DatasetConfig targetDatasetConfig = CatalogUtil.getDatasetConfig(catalog, reflection.getTargetDatasetId() ); + if (targetDatasetConfig == null) { return ExternalReflectionStatus.STATUS.INVALID; } // now check if the query and target datasets didn't change try { - if (!ReflectionUtils.hashEquals(reflection.getQueryDatasetHash(), queryDataset, namespaceService.get())) { + if (!DatasetHashUtils.hashEquals(reflection.getQueryDatasetHash(), queryDatasetConfig, catalogService.get())) { return ExternalReflectionStatus.STATUS.OUT_OF_SYNC; - } else if (!ReflectionUtils.hashEquals(reflection.getTargetDatasetHash(), targetDataset, namespaceService.get())) { + } else if (!DatasetHashUtils.hashEquals(reflection.getTargetDatasetHash(), targetDatasetConfig, catalogService.get())) { return ExternalReflectionStatus.STATUS.OUT_OF_SYNC; } } catch (NamespaceException e) { @@ -314,7 +309,7 @@ private ExternalReflectionStatus.STATUS computeStatus(ExternalReflection reflect // check that we are still able to get a MaterializationDescriptor try { - if (ReflectionUtils.getMaterializationDescriptor(reflection, namespaceService.get(), catalogService.get()) == null) { + if (ReflectionUtils.getMaterializationDescriptor(reflection, catalogService.get()) == null) { return ExternalReflectionStatus.STATUS.INVALID; } } catch(NamespaceException e) { @@ -344,53 +339,55 @@ public ExternalReflectionStatus getExternalReflectionStatus(ReflectionId id) { public Iterator getReflections() { final Iterable goalReflections = ReflectionUtils.getAllReflections(goalsStore); final Iterable externalReflections = externalReflectionStore.getExternalReflections(); + EntityExplorer catalog = CatalogUtil.getSystemCatalogForReflections(catalogService.get()); Stream reflections = StreamSupport.stream(goalReflections.spliterator(), false).map(goal -> { - try { - final DatasetConfig datasetConfig = namespaceService.get().findDatasetByUUID(goal.getDatasetId()); - if (datasetConfig == null) { - return null; - } - final Optional statusOpt = getNoThrowStatus(goal.getId()); - String combinedStatus = "UNKNOWN"; - int numFailures = 0; - if (statusOpt.isPresent()) { - combinedStatus = statusOpt.get().getCombinedStatus().toString(); - numFailures = statusOpt.get().getNumFailures(); - } - - return new AccelerationListManager.ReflectionInfo( - goal.getId().getId(), - goal.getName(), - goal.getType().toString(), - combinedStatus, - numFailures, - datasetConfig.getId().getId(), - quotedCompound(datasetConfig.getFullPathList()), - datasetConfig.getType().toString(), - JOINER.join(AccelerationUtils.selfOrEmpty(goal.getDetails().getSortFieldList()).stream().map(ReflectionField::getName).collect(Collectors.toList())), - JOINER.join(AccelerationUtils.selfOrEmpty(goal.getDetails().getPartitionFieldList()).stream().map(ReflectionField::getName).collect(Collectors.toList())), - JOINER.join(AccelerationUtils.selfOrEmpty(goal.getDetails().getDistributionFieldList()).stream().map(ReflectionField::getName).collect(Collectors.toList())), - JOINER.join(AccelerationUtils.selfOrEmpty(goal.getDetails().getDimensionFieldList()).stream().map(ReflectionDimensionField::getName).collect(Collectors.toList())), - JOINER.join(AccelerationUtils.selfOrEmpty(goal.getDetails().getMeasureFieldList()).stream().map(ReflectionMeasureField::getName).collect(Collectors.toList())), - JOINER.join(AccelerationUtils.selfOrEmpty(goal.getDetails().getDisplayFieldList()).stream().map(ReflectionField::getName).collect(Collectors.toList())), - null, - goal.getArrowCachingEnabled() - ); - } catch (Exception e) { - logger.error("Unable to get ReflectionInfo for {}", getId(goal), e); + try { + final DatasetConfig datasetConfig = CatalogUtil.getDatasetConfig(catalog, goal.getDatasetId()); + if (datasetConfig == null) { + return null; } - return null; + final Optional statusOpt = getNoThrowStatus(goal.getId()); + String combinedStatus = "UNKNOWN"; + int numFailures = 0; + if (statusOpt.isPresent()) { + combinedStatus = statusOpt.get().getCombinedStatus().toString(); + numFailures = statusOpt.get().getNumFailures(); + } + + return new AccelerationListManager.ReflectionInfo( + goal.getId().getId(), + goal.getName(), + goal.getType().toString(), + combinedStatus, + numFailures, + datasetConfig.getId().getId(), + quotedCompound(datasetConfig.getFullPathList()), + datasetConfig.getType().toString(), + JOINER.join(AccelerationUtils.selfOrEmpty(goal.getDetails().getSortFieldList()).stream().map(ReflectionField::getName).collect(Collectors.toList())), + JOINER.join(AccelerationUtils.selfOrEmpty(goal.getDetails().getPartitionFieldList()).stream().map(ReflectionField::getName).collect(Collectors.toList())), + JOINER.join(AccelerationUtils.selfOrEmpty(goal.getDetails().getDistributionFieldList()).stream().map(ReflectionField::getName).collect(Collectors.toList())), + JOINER.join(AccelerationUtils.selfOrEmpty(goal.getDetails().getDimensionFieldList()).stream().map(ReflectionDimensionField::getName).collect(Collectors.toList())), + JOINER.join(AccelerationUtils.selfOrEmpty(goal.getDetails().getMeasureFieldList()).stream().map(ReflectionMeasureField::getName).collect(Collectors.toList())), + JOINER.join(AccelerationUtils.selfOrEmpty(goal.getDetails().getDisplayFieldList()).stream().map(ReflectionField::getName).collect(Collectors.toList())), + null, + goal.getArrowCachingEnabled() + ); + } catch (Exception e) { + logger.debug("Unable to get ReflectionInfo for {}", getId(goal), e); + } + return null; }).filter(Objects::nonNull); Stream externalReflectionsInfo = StreamSupport.stream - (externalReflections.spliterator(), false) + (externalReflections.spliterator(), false) .map(externalReflection -> { - DatasetConfig dataset = namespaceService.get().findDatasetByUUID(externalReflection.getQueryDatasetId()); + try { + DatasetConfig dataset = CatalogUtil.getDatasetConfig(catalog, externalReflection.getQueryDatasetId()); if (dataset == null) { return null; } - DatasetConfig targetDataset = namespaceService.get().findDatasetByUUID(externalReflection.getTargetDatasetId()); + DatasetConfig targetDataset = CatalogUtil.getDatasetConfig(catalog, externalReflection.getTargetDatasetId()); if (targetDataset == null) { return null; } @@ -415,6 +412,10 @@ public Iterator getReflections() { targetDatasetPath, false ); + } catch (Exception e) { + logger.debug("Unable to get ReflectionInfo for {}", getId(externalReflection), e); + } + return null; }).filter(Objects::nonNull); return Stream.concat(reflections, externalReflectionsInfo).iterator(); } diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionUtils.java b/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionUtils.java index 77c4a0d086..e66ebb12b3 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionUtils.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionUtils.java @@ -20,13 +20,12 @@ import static com.dremio.service.reflection.ReflectionServiceImpl.ACCELERATOR_STORAGEPLUGIN_NAME; import static com.dremio.service.users.SystemUser.SYSTEM_USERNAME; -import java.util.ArrayList; import java.util.Collection; import java.util.Collections; -import java.util.LinkedList; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.Optional; -import java.util.Queue; import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; @@ -40,9 +39,14 @@ import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexNode; -import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections4.CollectionUtils; import com.dremio.common.utils.PathUtils; +import com.dremio.exec.catalog.CatalogUtil; +import com.dremio.exec.catalog.EntityExplorer; +import com.dremio.exec.catalog.TableVersionType; +import com.dremio.exec.catalog.VersionContext; +import com.dremio.exec.catalog.VersionedDatasetId; import com.dremio.exec.planner.acceleration.ExternalMaterializationDescriptor; import com.dremio.exec.planner.acceleration.IncrementalUpdateSettings; import com.dremio.exec.planner.acceleration.JoinDependencyProperties; @@ -75,10 +79,8 @@ import com.dremio.service.jobs.MultiJobStatusListener; import com.dremio.service.namespace.NamespaceException; import com.dremio.service.namespace.NamespaceKey; -import com.dremio.service.namespace.NamespaceService; import com.dremio.service.namespace.dataset.proto.DatasetConfig; import com.dremio.service.namespace.dataset.proto.DatasetType; -import com.dremio.service.namespace.dataset.proto.ParentDataset; import com.dremio.service.namespace.dataset.proto.RefreshMethod; import com.dremio.service.namespace.dataset.proto.ViewFieldType; import com.dremio.service.reflection.proto.DataPartition; @@ -100,6 +102,7 @@ import com.dremio.service.reflection.proto.RefreshId; import com.dremio.service.reflection.store.MaterializationStore; import com.dremio.service.reflection.store.ReflectionGoalsStore; +import com.fasterxml.jackson.core.JsonProcessingException; import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; @@ -109,8 +112,8 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import io.protostuff.LinkedBuffer; -import io.protostuff.ProtostuffIOUtil; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.instrumentation.annotations.WithSpan; /** * Helper functions for Reflection management @@ -118,68 +121,23 @@ public class ReflectionUtils { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ReflectionUtils.class); - /** - * @return true if the dataset type is PHYSICAL_* - */ - public static boolean isPhysicalDataset(DatasetType t) { - return t == DatasetType.PHYSICAL_DATASET || - t == DatasetType.PHYSICAL_DATASET_SOURCE_FILE || - t == DatasetType.PHYSICAL_DATASET_SOURCE_FOLDER || - t == DatasetType.PHYSICAL_DATASET_HOME_FILE || - t == DatasetType.PHYSICAL_DATASET_HOME_FOLDER; - } - public static boolean isHomeDataset(DatasetType t) { return t == DatasetType.PHYSICAL_DATASET_HOME_FILE || t == DatasetType.PHYSICAL_DATASET_HOME_FOLDER; } - public static Integer computeDatasetHash(DatasetConfig dataset, NamespaceService namespaceService, boolean ignorePds) throws NamespaceException { - Queue q = new LinkedList<>(); - q.add(dataset); - int hash = 1; - boolean isFirst = true; - while (!q.isEmpty()) { - dataset = q.poll(); - if (isPhysicalDataset(dataset.getType())) { - if (!ignorePds || isFirst) { - hash = 31 * hash + (dataset.getRecordSchema() == null ? 1 : dataset.getRecordSchema().hashCode()); - } - } else { - int schemaHash = 0; - if (isFirst) { - final List types = new ArrayList<>(); - dataset.getVirtualDataset().getSqlFieldsList().forEach(type -> { - if (type.getSerializedField() != null) { - ViewFieldType newType = new ViewFieldType(); - ProtostuffIOUtil.mergeFrom(ProtostuffIOUtil.toByteArray(type, ViewFieldType.getSchema(), LinkedBuffer.allocate()), newType, ViewFieldType.getSchema()); - types.add(newType.setSerializedField(null)); - } else { - types.add(type); - } - }); - schemaHash = types.hashCode(); - } - hash = 31 * hash + dataset.getVirtualDataset().getSql().hashCode() + schemaHash; - for (ParentDataset parent : dataset.getVirtualDataset().getParentsList()) { - int size = parent.getDatasetPathList().size(); - if( !(size > 1 && parent.getDatasetPathList().get(size-1).equalsIgnoreCase("external_query"))) { - q.add(namespaceService.getDataset(new NamespaceKey(parent.getDatasetPathList()))); - } - } - } - isFirst = false; - } - return hash; - } + @WithSpan + public static JobId submitRefreshJob(JobsService jobsService, CatalogService catalogService, ReflectionEntry entry, + Materialization materialization, String sql, JobStatusListener jobStatusListener) { - public static JobId submitRefreshJob(JobsService jobsService, NamespaceService namespaceService, ReflectionEntry entry, - Materialization materialization, String sql, JobStatusListener jobStatusListener) { final SqlQuery query = SqlQuery.newBuilder() .setSql(sql) .addAllContext(Collections.emptyList()) .setUsername(SYSTEM_USERNAME) .build(); - NamespaceKey datasetPathList = new NamespaceKey(namespaceService.findDatasetByUUID(entry.getDatasetId()).getFullPathList()); + EntityExplorer catalog = CatalogUtil.getSystemCatalogForReflections(catalogService); + DatasetConfig config = CatalogUtil.getDatasetConfig(catalog, entry.getDatasetId()); + + NamespaceKey datasetPathList = new NamespaceKey(config.getFullPathList()); JobProtobuf.MaterializationSummary materializationSummary = JobProtobuf.MaterializationSummary.newBuilder() .setDatasetId(entry.getDatasetId()) .setReflectionId(entry.getId().getId()) @@ -203,6 +161,11 @@ public static JobId submitRefreshJob(JobsService jobsService, NamespaceService n new MultiJobStatusListener(submittedListener, jobStatusListener)) .getJobId(); submittedListener.await(); + Span.current().setAttribute("dremio.reflectionmanager.jobId", jobId.getId()); + Span.current().setAttribute("dremio.reflectionmanager.reflection", getId(entry)); + Span.current().setAttribute("dremio.reflectionmanager.materialization", getId(materialization)); + Span.current().setAttribute("dremio.reflectionmanager.datasetId", entry.getDatasetId()); + Span.current().setAttribute("dremio.reflectionmanager.sql", sql); return jobId; } @@ -225,6 +188,13 @@ public static String getId(ReflectionId reflectionId) { return String.format("reflection %s", reflectionId.getId()); } + /** + * computes a log-friendly external reflection id + */ + public static String getId(ExternalReflection externalReflection) { + return String.format("external reflection %s[%s]", externalReflection.getId(), externalReflection.getName()); + } + /** * computes a log-friendly reflection id */ @@ -301,43 +271,34 @@ static boolean hasMissingPartitions(List partitions, Set return !hosts.containsAll(partitionNames); } - /** - * check with ignorePds true and then also false, for backward compatibility - */ - static boolean hashEquals(int hash, DatasetConfig dataset, NamespaceService ns) throws NamespaceException { - return - hash == computeDatasetHash(dataset, ns, true) - || - hash == computeDatasetHash(dataset, ns, false); - } - public static MaterializationDescriptor getMaterializationDescriptor(final ExternalReflection externalReflection, - final NamespaceService namespaceService, final CatalogService catalogService) throws NamespaceException { - DatasetConfig queryDataset = namespaceService.findDatasetByUUID(externalReflection.getQueryDatasetId()); - DatasetConfig targetDataset = namespaceService.findDatasetByUUID(externalReflection.getTargetDatasetId()); + final CatalogService catalogService) throws NamespaceException { + EntityExplorer catalog = CatalogUtil.getSystemCatalogForReflections(catalogService); + DatasetConfig queryDatasetConfig = CatalogUtil.getDatasetConfig(catalog, externalReflection.getQueryDatasetId()); + DatasetConfig targetDatasetConfig = CatalogUtil.getDatasetConfig(catalog, externalReflection.getTargetDatasetId()); - if (queryDataset == null) { + if (queryDatasetConfig == null) { logger.debug("Dataset {} not found", externalReflection.getQueryDatasetId()); return null; } - if (targetDataset == null) { + if (targetDatasetConfig == null) { logger.debug("Dataset {} not found", externalReflection.getQueryDatasetId()); return null; } - if (!hashEquals(externalReflection.getQueryDatasetHash(), queryDataset, namespaceService)) { + if (!DatasetHashUtils.hashEquals(externalReflection.getQueryDatasetHash(), queryDatasetConfig, catalogService)) { logger.debug("Reflection {} excluded because query dataset {} is out of sync", externalReflection.getName(), - PathUtils.constructFullPath(queryDataset.getFullPathList()) + PathUtils.constructFullPath(queryDatasetConfig.getFullPathList()) ); return null; } - if (!hashEquals(externalReflection.getTargetDatasetHash(), targetDataset, namespaceService)) { + if (!DatasetHashUtils.hashEquals(externalReflection.getTargetDatasetHash(), targetDatasetConfig, catalogService)) { logger.debug("Reflection {} excluded because target dataset {} is out of sync", externalReflection.getName(), - PathUtils.constructFullPath(targetDataset.getFullPathList()) + PathUtils.constructFullPath(targetDatasetConfig.getFullPathList()) ); return null; } @@ -357,8 +318,8 @@ public static MaterializationDescriptor getMaterializationDescriptor(final Exter ), externalReflection.getId(), Optional.ofNullable(externalReflection.getTag()).orElse("0"), - queryDataset.getFullPathList(), - targetDataset.getFullPathList(), + queryDatasetConfig.getFullPathList(), + targetDatasetConfig.getFullPathList(), catalogService ); } @@ -614,8 +575,8 @@ public static List getRefreshPath(final JobId jobId, final Path accelera // relative path to the acceleration base path final String path = PathUtils.relativePath( - Path.of(Path.getContainerSpecificRelativePath(Path.of(text.toString()))), - Path.of(Path.getContainerSpecificRelativePath(accelerationBasePath))); + Path.of(Path.getContainerSpecificRelativePath(Path.of(text.toString()))), + Path.of(Path.getContainerSpecificRelativePath(accelerationBasePath))); // extract first 2 components of the path "."" List components = PathUtils.toPathComponents(path); @@ -674,9 +635,13 @@ public static MaterializationMetrics computeMetrics(com.dremio.service.job.JobDe } final int numFiles = fileSizes.size(); - // alternative is to implement QuickSelect to compute the median in linear time - Collections.sort(fileSizes); - final long medianFileSize = fileSizes.get(numFiles / 2); + long medianFileSize = 0; + //prevent an IndexOutOfBoundsException if numFiles is 0 and we are trying to get the 0th file + if(numFiles > 0){ + // alternative is to implement QuickSelect to compute the median in linear time + Collections.sort(fileSizes); + medianFileSize = fileSizes.get(numFiles / 2); + } return new MaterializationMetrics() .setFootprint(footprint) @@ -727,4 +692,35 @@ private static boolean areBothListsEqualWithOrder(List list1, List lis return list1.equals(list2); } } + + public static VersionedDatasetId getVersionDatasetId(String datasetId) { + if (!VersionedDatasetId.isVersioned(datasetId)) { + return null; + } + VersionedDatasetId versionedDatasetId; + try { + versionedDatasetId = VersionedDatasetId.fromString(datasetId); + } catch (JsonProcessingException e) { + throw new IllegalStateException(e); + } + return versionedDatasetId; + } + + public static Map buildVersionContext(String datasetId) { + Map sourceMappings = new HashMap<>(); + VersionedDatasetId versionedDatasetId = getVersionDatasetId(datasetId); + if (versionedDatasetId == null) { + return sourceMappings; + } + String source = versionedDatasetId.getTableKey().get(0); + if (versionedDatasetId.getVersionContext().getType() == TableVersionType.BRANCH) { + sourceMappings.put(source, VersionContext.ofBranch(versionedDatasetId.getVersionContext().getValue().toString())); + } else if (versionedDatasetId.getVersionContext().getType() == TableVersionType.TAG) { + sourceMappings.put(source, VersionContext.ofTag(versionedDatasetId.getVersionContext().getValue().toString())); + } else if (versionedDatasetId.getVersionContext().getType() == TableVersionType.COMMIT_HASH_ONLY) { + sourceMappings.put(source, VersionContext.ofBareCommit(versionedDatasetId.getVersionContext().getValue().toString())); + } + return sourceMappings; + } + } diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/analysis/ReflectionAnalyzer.java b/services/accelerator/src/main/java/com/dremio/service/reflection/analysis/ReflectionAnalyzer.java index 4858ca3fa2..de864c6009 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/analysis/ReflectionAnalyzer.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/analysis/ReflectionAnalyzer.java @@ -35,6 +35,7 @@ import com.dremio.exec.catalog.CatalogUser; import com.dremio.exec.catalog.DremioTable; import com.dremio.exec.catalog.MetadataRequestOptions; +import com.dremio.exec.catalog.VersionedDatasetId; import com.dremio.exec.planner.types.JavaTypeFactoryImpl; import com.dremio.exec.store.CatalogService; import com.dremio.exec.store.SchemaConfig; @@ -50,6 +51,7 @@ import com.dremio.service.jobs.JobsService; import com.dremio.service.namespace.NamespaceKey; import com.dremio.service.users.SystemUser; +import com.fasterxml.jackson.core.JsonProcessingException; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; @@ -60,8 +62,10 @@ import com.google.common.collect.Multimap; /** - * Analyzes acceleration and generates statistics. - */ + * Analyzes a dataset by synchronously running a SQL statement to collect column statistics + * and total row count. Since stats are collected as a preview job which considers only + * the first 10K rows (leaf level limit), the stats can be grossly inaccurate. + * */ public class ReflectionAnalyzer { private static final Logger logger = LoggerFactory.getLogger(ReflectionAnalyzer.class); private static final NamespaceKey NONE_PATH = new NamespaceKey(ImmutableList.of("__none")); @@ -109,8 +113,12 @@ public ReflectionAnalyzer(final JobsService jobsService, final CatalogService ca this.bufferAllocator = Preconditions.checkNotNull(allocator, "Buffer allocator is required"); } - public TableStats analyze(final NamespaceKey path) { - final RelDataType rowType = getRowType(path); + public TableStats analyze(final String datasetId) { + final DremioTable table = catalogService.getCatalog(MetadataRequestOptions.of( + SchemaConfig.newBuilder(CatalogUser.from(SystemUser.SYSTEM_USERNAME)).build())) + .getTable(datasetId); + Preconditions.checkNotNull(table, "Unknown datasetId %s", datasetId); + final RelDataType rowType = table.getRowType(JavaTypeFactoryImpl.INSTANCE); final List fields = FluentIterable.from(rowType.getFieldList()) .filter(new Predicate() { @@ -139,7 +147,17 @@ public Iterable apply(@Nullable final RelDataTypeField fie } }); - final String pathString = path.getSchemaPath(); + String pathString = table.getPath().getSchemaPath(); + // Append version context to dataset path if versioned + if (VersionedDatasetId.isVersionedDatasetId(datasetId)) { + final VersionedDatasetId versionedDatasetId; + try { + versionedDatasetId = VersionedDatasetId.fromString(datasetId); + } catch (JsonProcessingException e) { + throw new IllegalStateException(String.format("Unable to parse versionedDatasetId %s", datasetId), e); + } + pathString += " at " + versionedDatasetId.getVersionContext().toSql(); + } final String selection = Joiner.on(", ").join( FluentIterable.from(statColumns) @@ -187,14 +205,6 @@ public ColumnStats apply(@Nullable final RelDataTypeField input) { } } - public RelDataType getRowType(final NamespaceKey path) { - DremioTable table = catalogService.getCatalog(MetadataRequestOptions.of( - SchemaConfig.newBuilder(CatalogUser.from(SystemUser.SYSTEM_USERNAME)).build())) - .getTable(path); - Preconditions.checkNotNull(table, "Unknown dataset %s", path); - return table.getRowType(JavaTypeFactoryImpl.INSTANCE); - } - protected Iterable getStatColumnsPerField(final RelDataTypeField field) { final RelDataTypeFamily family = field.getType().getFamily(); Collection dims = DIMENSIONS.get(family); @@ -322,7 +332,7 @@ public TableStats setCount(Long count) { /** * javadoc */ - static class RField { + public static class RField { private String name; private String typeFamily; @@ -348,7 +358,7 @@ public RField setTypeFamily(String typeFamily) { /** * javadoc */ - static class ColumnStats { + public static class ColumnStats { static final Long DEFAULT_CARDINALITY = -1L; static final Long DEFAULT_COUNT = -1L; static final Double DEFAULT_AVERAGE_LENGTH = -1.0d; diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/compact/CompactRefreshHandler.java b/services/accelerator/src/main/java/com/dremio/service/reflection/compact/CompactRefreshHandler.java index 18d8719e1a..c3e1a496f7 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/compact/CompactRefreshHandler.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/compact/CompactRefreshHandler.java @@ -78,6 +78,7 @@ public class CompactRefreshHandler implements SqlToPlanHandler { private final WriterOptionManager writerOptionManager; private String textPlan; + private Rel drel; public CompactRefreshHandler() { this.writerOptionManager = WriterOptionManager.Instance; @@ -158,7 +159,7 @@ public PhysicalPlan getPlan(SqlHandlerConfig config, String sql, SqlNode sqlNode final PlanNormalizer planNormalizer = new PlanNormalizer(config); final RelNode initial = getPlan(config, tableSchemaPath, planNormalizer); - final Rel drel = PrelTransformer.convertToDrelMaintainingNames(config, initial); + drel = PrelTransformer.convertToDrelMaintainingNames(config, initial); final List fields = drel.getRowType().getFieldNames(); final long ringCount = config.getContext().getOptions().getOption(PlannerSettings.RING_COUNT); final Rel writerDrel = new WriterRel( @@ -199,6 +200,11 @@ public String getTextPlan() { return textPlan; } + @Override + public Rel getLogicalPlan() { + return drel; + } + private RelNode getPlan(SqlHandlerConfig sqlHandlerConfig, List refreshTablePath, PlanNormalizer planNormalizer) { SqlSelect select = new SqlSelect( SqlParserPos.ZERO, diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/load/LoadMaterializationHandler.java b/services/accelerator/src/main/java/com/dremio/service/reflection/load/LoadMaterializationHandler.java index e47bbd6147..5423d8e580 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/load/LoadMaterializationHandler.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/load/LoadMaterializationHandler.java @@ -22,7 +22,7 @@ import java.util.Optional; import org.apache.calcite.sql.SqlNode; -import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections4.CollectionUtils; import com.dremio.exec.catalog.Catalog; import com.dremio.exec.catalog.CatalogUser; diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/materialization/AccelerationStoragePlugin.java b/services/accelerator/src/main/java/com/dremio/service/reflection/materialization/AccelerationStoragePlugin.java index 5d468efba0..910d091352 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/materialization/AccelerationStoragePlugin.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/materialization/AccelerationStoragePlugin.java @@ -143,7 +143,13 @@ public void start() throws IOException { @Override public FileSystem createFS(String userName, OperatorContext operatorContext, boolean metadata) throws IOException { - return new AccelerationFileSystem(super.createFS(userName, operatorContext, metadata)); + FileSystem fs = new AccelerationFileSystem(super.createFS(userName, operatorContext, metadata)); + if (fs.isPdfs()) { + // Logging to help with debugging DX-54664 + IllegalStateException exception = new IllegalStateException("AccelerationStoragePlugin does not support PDFS. User: " + userName); + logger.error(exception.getMessage(), exception); + } + return fs; } @Override diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/refresh/ReflectionPlanGenerator.java b/services/accelerator/src/main/java/com/dremio/service/reflection/refresh/ReflectionPlanGenerator.java index 11ea70814a..2834e068e5 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/refresh/ReflectionPlanGenerator.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/refresh/ReflectionPlanGenerator.java @@ -16,22 +16,33 @@ package com.dremio.service.reflection.refresh; import org.apache.calcite.rel.RelNode; +import org.apache.calcite.sql.SqlFunctionCategory; import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlLiteral; import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNodeList; import org.apache.calcite.sql.SqlSelect; +import org.apache.calcite.sql.SqlUnresolvedFunction; import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.calcite.tools.RelConversionException; import org.apache.calcite.tools.ValidationException; +import org.apache.calcite.util.TimestampString; import com.dremio.common.config.SabotConfig; +import com.dremio.exec.catalog.CatalogUtil; +import com.dremio.exec.catalog.EntityExplorer; +import com.dremio.exec.catalog.TableVersionType; +import com.dremio.exec.catalog.VersionedDatasetId; import com.dremio.exec.planner.sql.SqlExceptionHelper; import com.dremio.exec.planner.sql.handlers.ConvertedRelNode; import com.dremio.exec.planner.sql.handlers.PrelTransformer; import com.dremio.exec.planner.sql.handlers.SqlHandlerConfig; +import com.dremio.exec.planner.sql.parser.SqlVersionedTableCollectionCall; +import com.dremio.exec.planner.sql.parser.SqlVersionedTableMacroCall; +import com.dremio.exec.store.CatalogService; +import com.dremio.exec.tablefunctions.TableMacroNames; import com.dremio.exec.work.foreman.ForemanSetupException; import com.dremio.service.namespace.NamespaceKey; -import com.dremio.service.namespace.NamespaceService; import com.dremio.service.namespace.dataset.proto.DatasetConfig; import com.dremio.service.reflection.ReflectionSettings; import com.dremio.service.reflection.ReflectionUtils; @@ -51,7 +62,7 @@ public class ReflectionPlanGenerator { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ReflectionPlanGenerator.class); - private final NamespaceService namespaceService; + private final CatalogService catalogService; private final SabotConfig config; private final SqlHandlerConfig sqlHandlerConfig; private final ReflectionGoal goal; @@ -65,18 +76,18 @@ public class ReflectionPlanGenerator { private RefreshDecision refreshDecision; public ReflectionPlanGenerator( - SqlHandlerConfig sqlHandlerConfig, - NamespaceService namespaceService, - SabotConfig config, - ReflectionGoal goal, - ReflectionEntry entry, - Materialization materialization, - ReflectionSettings reflectionSettings, - MaterializationStore materializationStore, - boolean forceFullUpdate, - int stripVersion - ) { - this.namespaceService = Preconditions.checkNotNull(namespaceService, "namespace service required"); + SqlHandlerConfig sqlHandlerConfig, + CatalogService catalogService, + SabotConfig config, + ReflectionGoal goal, + ReflectionEntry entry, + Materialization materialization, + ReflectionSettings reflectionSettings, + MaterializationStore materializationStore, + boolean forceFullUpdate, + int stripVersion + ) { + this.catalogService = Preconditions.checkNotNull(catalogService, "Catalog service required"); this.config = Preconditions.checkNotNull(config, "sabot config required"); this.sqlHandlerConfig = Preconditions.checkNotNull(sqlHandlerConfig, "SqlHandlerConfig required."); this.entry = entry; @@ -99,7 +110,7 @@ public RelNode generateNormalizedPlan() { goal, entry, materialization, - namespaceService, + catalogService, config, reflectionSettings, materializationStore, @@ -108,18 +119,41 @@ public RelNode generateNormalizedPlan() { ); // retrieve reflection's dataset - final DatasetConfig dataset = namespaceService.findDatasetByUUID(goal.getDatasetId()); - if (dataset == null) { + final EntityExplorer catalog = CatalogUtil.getSystemCatalogForReflections(catalogService); + DatasetConfig datasetConfig = CatalogUtil.getDatasetConfig(catalog, goal.getDatasetId()); + if (datasetConfig == null) { throw new IllegalStateException(String.format("Dataset %s not found for %s", goal.getDatasetId(), ReflectionUtils.getId(goal))); } // generate dataset's plan and viewFieldTypes - final NamespaceKey path = new NamespaceKey(dataset.getFullPathList()); + final NamespaceKey path = new NamespaceKey(datasetConfig.getFullPathList()); + final SqlNode from; + final VersionedDatasetId versionedDatasetId = ReflectionUtils.getVersionDatasetId(goal.getDatasetId()); + if (versionedDatasetId != null) { + // For reflections on versioned datasets, call UDF to resolve to the correct dataset version + final TableVersionType tableVersionType = versionedDatasetId.getVersionContext().getType(); + SqlNode versionSpecifier = SqlLiteral.createCharString(versionedDatasetId.getVersionContext().getValue().toString(), SqlParserPos.ZERO); + if (tableVersionType == TableVersionType.TIMESTAMP) { + versionSpecifier = SqlLiteral.createTimestamp(TimestampString.fromMillisSinceEpoch( + Long.valueOf(versionedDatasetId.getVersionContext().getValue().toString())), 0, SqlParserPos.ZERO); + } + from = new SqlVersionedTableCollectionCall(SqlParserPos.ZERO, + new SqlVersionedTableMacroCall( + new SqlUnresolvedFunction(new SqlIdentifier(TableMacroNames.TIME_TRAVEL, SqlParserPos.ZERO), null, null, null, null, + SqlFunctionCategory.USER_DEFINED_TABLE_FUNCTION), + new SqlNode[]{SqlLiteral.createCharString(path.getSchemaPath(), SqlParserPos.ZERO)}, + tableVersionType, + versionSpecifier, + null, SqlParserPos.ZERO) + ); + } else { + from = new SqlIdentifier(path.getPathComponents(), SqlParserPos.ZERO); + } SqlSelect select = new SqlSelect( SqlParserPos.ZERO, new SqlNodeList(SqlParserPos.ZERO), new SqlNodeList(ImmutableList.of(SqlIdentifier.star(SqlParserPos.ZERO)), SqlParserPos.ZERO), - new SqlIdentifier(path.getPathComponents(), SqlParserPos.ZERO), + from, null, null, null, diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/refresh/ReflectionPlanNormalizer.java b/services/accelerator/src/main/java/com/dremio/service/reflection/refresh/ReflectionPlanNormalizer.java index 15b6dbec7d..e3ff54862b 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/refresh/ReflectionPlanNormalizer.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/refresh/ReflectionPlanNormalizer.java @@ -24,7 +24,9 @@ import com.dremio.common.config.SabotConfig; import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.CatalogUtil; import com.dremio.exec.catalog.DremioTable; +import com.dremio.exec.catalog.EntityExplorer; import com.dremio.exec.planner.acceleration.ExpansionNode; import com.dremio.exec.planner.acceleration.IncrementalUpdateUtils; import com.dremio.exec.planner.acceleration.IncrementalUpdateUtils.MaterializationShuttle; @@ -34,14 +36,15 @@ import com.dremio.exec.planner.sql.handlers.RelTransformer; import com.dremio.exec.planner.sql.handlers.SqlHandlerConfig; import com.dremio.exec.proto.UserBitShared; +import com.dremio.exec.store.CatalogService; import com.dremio.options.OptionManager; import com.dremio.proto.model.UpdateId; -import com.dremio.service.namespace.NamespaceService; import com.dremio.service.namespace.dataset.proto.AccelerationSettings; import com.dremio.service.namespace.dataset.proto.DatasetConfig; import com.dremio.service.namespace.dataset.proto.RefreshMethod; import com.dremio.service.reflection.IncrementalUpdateServiceUtils; import com.dremio.service.reflection.ReflectionOptions; +import com.dremio.service.reflection.ReflectionService; import com.dremio.service.reflection.ReflectionSettings; import com.dremio.service.reflection.ReflectionUtils; import com.dremio.service.reflection.proto.Materialization; @@ -59,7 +62,7 @@ class ReflectionPlanNormalizer implements RelTransformer { private final ReflectionGoal goal; private final ReflectionEntry entry; private final Materialization materialization; - private final NamespaceService namespace; + private final CatalogService catalogService; private final SabotConfig config; private final ReflectionSettings reflectionSettings; private final MaterializationStore materializationStore; @@ -70,21 +73,21 @@ class ReflectionPlanNormalizer implements RelTransformer { private RefreshDecision refreshDecision; public ReflectionPlanNormalizer( - SqlHandlerConfig sqlHandlerConfig, - ReflectionGoal goal, - ReflectionEntry entry, - Materialization materialization, - NamespaceService namespace, - SabotConfig config, - ReflectionSettings reflectionSettings, - MaterializationStore materializationStore, - boolean forceFullUpdate, - int stripVersion) { + SqlHandlerConfig sqlHandlerConfig, + ReflectionGoal goal, + ReflectionEntry entry, + Materialization materialization, + CatalogService catalogService, + SabotConfig config, + ReflectionSettings reflectionSettings, + MaterializationStore materializationStore, + boolean forceFullUpdate, + int stripVersion) { this.sqlHandlerConfig = sqlHandlerConfig; this.goal = goal; this.entry = entry; this.materialization = materialization; - this.namespace = namespace; + this.catalogService = catalogService; this.config = config; this.reflectionSettings = reflectionSettings; this.materializationStore = materializationStore; @@ -122,11 +125,12 @@ public RelNode transform(RelNode relNode) { } final RelNode datasetPlan = removeUpdateColumn(relNode); - final DatasetConfig dataset = namespace.findDatasetByUUID(goal.getDatasetId()); - if (dataset == null) { + EntityExplorer catalog = CatalogUtil.getSystemCatalogForReflections(catalogService); + DatasetConfig datasetConfig = CatalogUtil.getDatasetConfig(catalog, goal.getDatasetId()); + if (datasetConfig == null) { throw new IllegalStateException(String.format("Dataset %s not found for %s", goal.getDatasetId(), ReflectionUtils.getId(goal))); } - final ReflectionExpander expander = new ReflectionExpander(datasetPlan, dataset); + final ReflectionExpander expander = new ReflectionExpander(datasetPlan, datasetConfig); final RelNode plan = expander.expand(goal); // we serialize the plan before normalization so we can recreate later. @@ -138,7 +142,9 @@ public RelNode transform(RelNode relNode) { // if we detect that the plan is in fact incrementally updateable after stripping and normalizing, we want to strip again with isIncremental flag set to true // to get the proper stripping - if (IncrementalUpdateServiceUtils.extractRefreshSettings(strippedPlan, reflectionSettings).getMethod() == RefreshMethod.INCREMENTAL) { + ReflectionService service = sqlHandlerConfig.getContext().getAccelerationManager().unwrap(ReflectionService.class); + + if (IncrementalUpdateServiceUtils.extractRefreshSettings(strippedPlan, reflectionSettings, service).getMethod() == RefreshMethod.INCREMENTAL) { strippedPlan = factory.strip(plan, mapReflectionType(goal.getType()), true, stripVersion).getNormalized(); } @@ -155,7 +161,7 @@ public RelNode transform(RelNode relNode) { entry, materialization, reflectionSettings, - namespace, + catalogService, materializationStore, plan, strippedPlan, @@ -163,7 +169,8 @@ public RelNode transform(RelNode relNode) { serializerFactory, strictRefresh, forceFullUpdate, - sqlHandlerConfig.getContext().getFunctionRegistry()); + sqlHandlerConfig.getContext().getFunctionRegistry(), + service); if (isIncremental(refreshDecision)) { strippedPlan = strippedPlan.accept(getIncremental(refreshDecision)); diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/refresh/RefreshDecisionMaker.java b/services/accelerator/src/main/java/com/dremio/service/reflection/refresh/RefreshDecisionMaker.java index 936a4258cf..83b580c2a5 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/refresh/RefreshDecisionMaker.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/refresh/RefreshDecisionMaker.java @@ -20,20 +20,23 @@ import org.apache.calcite.rel.RelNode; import com.dremio.common.exceptions.UserException; +import com.dremio.exec.catalog.CatalogUtil; import com.dremio.exec.catalog.DremioTable; +import com.dremio.exec.catalog.EntityExplorer; import com.dremio.exec.expr.fn.FunctionImplementationRegistry; import com.dremio.exec.planner.acceleration.PlanHasher; import com.dremio.exec.planner.serialization.LogicalPlanSerializer; import com.dremio.exec.planner.serialization.RelSerializerFactory; +import com.dremio.exec.store.CatalogService; import com.dremio.proto.model.UpdateId; import com.dremio.service.job.proto.ScanPath; -import com.dremio.service.namespace.NamespaceService; import com.dremio.service.namespace.dataset.proto.AccelerationSettings; import com.dremio.service.namespace.dataset.proto.DatasetConfig; import com.dremio.service.namespace.dataset.proto.RefreshMethod; +import com.dremio.service.reflection.DatasetHashUtils; import com.dremio.service.reflection.IncrementalUpdateServiceUtils; +import com.dremio.service.reflection.ReflectionService; import com.dremio.service.reflection.ReflectionSettings; -import com.dremio.service.reflection.ReflectionUtils; import com.dremio.service.reflection.proto.Materialization; import com.dremio.service.reflection.proto.ReflectionEntry; import com.dremio.service.reflection.proto.Refresh; @@ -56,25 +59,26 @@ class RefreshDecisionMaker { * @return The refresh decisions made */ static RefreshDecision getRefreshDecision( - ReflectionEntry entry, - Materialization materialization, - ReflectionSettings reflectionSettings, - NamespaceService namespace, - MaterializationStore materializationStore, - RelNode plan, - RelNode strippedPlan, - Iterable requestedTables, - RelSerializerFactory serializerFactory, - boolean strictRefresh, - boolean forceFullUpdate, - FunctionImplementationRegistry functionImplementationRegistry) { + ReflectionEntry entry, + Materialization materialization, + ReflectionSettings reflectionSettings, + CatalogService catalogService, + MaterializationStore materializationStore, + RelNode plan, + RelNode strippedPlan, + Iterable requestedTables, + RelSerializerFactory serializerFactory, + boolean strictRefresh, + boolean forceFullUpdate, + FunctionImplementationRegistry functionImplementationRegistry, + ReflectionService service) { final long newSeriesId = System.currentTimeMillis(); final RefreshDecision decision = new RefreshDecision(); // We load settings here to determine what type of update we need to do (full or incremental) - final AccelerationSettings settings = IncrementalUpdateServiceUtils.extractRefreshSettings(strippedPlan, reflectionSettings); + final AccelerationSettings settings = IncrementalUpdateServiceUtils.extractRefreshSettings(strippedPlan, reflectionSettings, service); decision.setAccelerationSettings(settings); @@ -85,13 +89,17 @@ static RefreshDecision getRefreshDecision( @Override public boolean apply(DremioTable table) { final DatasetConfig dataset = table.getDatasetConfig(); - return dataset != null && ReflectionUtils.isPhysicalDataset(dataset.getType()); + return dataset != null && DatasetHashUtils.isPhysicalDataset(dataset.getType()); } }).transform(new Function() { @Override public ScanPath apply(DremioTable table) { final List datasetPath = table.getPath().getPathComponents(); - return new ScanPath().setPathList(datasetPath); + ScanPath path = new ScanPath().setPathList(datasetPath); + if (table.getDataset().getVersionContext() != null) { + path.setVersionContext(table.getDataset().getVersionContext().serialize()); + } + return path; } }).toList(); decision.setScanPathsList(scanPathsList); @@ -113,20 +121,21 @@ public ScanPath apply(DremioTable table) { final Integer entryDatasetHash; final Integer decisionDatasetHash; + final EntityExplorer catalog = CatalogUtil.getSystemCatalogForReflections(catalogService); try { - final DatasetConfig dataset = namespace.findDatasetByUUID(entry.getDatasetId()); + DatasetConfig datasetConfig = CatalogUtil.getDatasetConfig(catalog, entry.getDatasetId()); if (!strictRefresh) { if (entry.getShallowDatasetHash() == null && refresh != null) { - decisionDatasetHash = ReflectionUtils.computeDatasetHash(dataset, namespace, false); - decision.setDatasetHash(ReflectionUtils.computeDatasetHash(dataset, namespace, true)); + decisionDatasetHash = DatasetHashUtils.computeDatasetHash(datasetConfig, catalogService, strippedPlan, false); + decision.setDatasetHash(DatasetHashUtils.computeDatasetHash(datasetConfig, catalogService, strippedPlan,true)); entryDatasetHash = entry.getDatasetHash(); } else { - decisionDatasetHash = ReflectionUtils.computeDatasetHash(dataset, namespace, true); + decisionDatasetHash = DatasetHashUtils.computeDatasetHash(datasetConfig, catalogService, strippedPlan,true); decision.setDatasetHash(decisionDatasetHash); entryDatasetHash = entry.getShallowDatasetHash(); } } else { - decisionDatasetHash = ReflectionUtils.computeDatasetHash(dataset, namespace, false); + decisionDatasetHash = DatasetHashUtils.computeDatasetHash(datasetConfig, catalogService, strippedPlan,false); decision.setDatasetHash(decisionDatasetHash); entryDatasetHash = entry.getDatasetHash(); } diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/refresh/RefreshDoneHandler.java b/services/accelerator/src/main/java/com/dremio/service/reflection/refresh/RefreshDoneHandler.java index 8ca2e8529c..9cc2cf7bee 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/refresh/RefreshDoneHandler.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/refresh/RefreshDoneHandler.java @@ -27,6 +27,8 @@ import com.dremio.common.exceptions.UserException; import com.dremio.common.utils.PathUtils; +import com.dremio.common.utils.protos.AttemptId; +import com.dremio.common.utils.protos.AttemptIdUtils; import com.dremio.exec.planner.acceleration.MaterializationExpander; import com.dremio.exec.planner.acceleration.StrippingFactory; import com.dremio.exec.planner.acceleration.UpdateIdWrapper; @@ -47,7 +49,6 @@ import com.dremio.service.jobs.JobsService; import com.dremio.service.jobs.JoinAnalyzer; import com.dremio.service.namespace.NamespaceException; -import com.dremio.service.namespace.NamespaceService; import com.dremio.service.namespace.dataset.proto.RefreshMethod; import com.dremio.service.reflection.DependencyGraph.DependencyException; import com.dremio.service.reflection.DependencyManager; @@ -81,7 +82,6 @@ public class RefreshDoneHandler { protected static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RefreshDoneHandler.class); private final DependencyManager dependencyManager; - private final NamespaceService namespaceService; private final MaterializationStore materializationStore; private final Supplier expansionHelper; private final Path accelerationBasePath; @@ -99,7 +99,6 @@ public RefreshDoneHandler( Materialization materialization, com.dremio.service.job.JobDetails job, JobsService jobsService, - NamespaceService namespaceService, MaterializationStore materializationStore, DependencyManager dependencyManager, Supplier expansionHelper, @@ -111,7 +110,6 @@ public RefreshDoneHandler( this.materialization = Preconditions.checkNotNull(materialization, "materialization required"); this.job = Preconditions.checkNotNull(job, "jobDetails required"); this.jobsService = Preconditions.checkNotNull(jobsService, "jobsService required"); - this.namespaceService = Preconditions.checkNotNull(namespaceService, "namespace service required"); this.dependencyManager = Preconditions.checkNotNull(dependencyManager, "dependencies required"); this.materializationStore = materializationStore; this.expansionHelper = Preconditions.checkNotNull(expansionHelper, "expansion helper required"); @@ -138,14 +136,16 @@ public RefreshDecision handle() throws NamespaceException, DependencyException { final ByteString planBytes = Preconditions.checkNotNull(decision.getLogicalPlan(), "refresh jobInfo has no logical plan"); - updateDependencies(reflection, lastAttempt.getInfo(), decision, namespaceService, dependencyManager); + updateDependencies(reflection, lastAttempt.getInfo(), decision, dependencyManager); failIfNotEnoughRefreshesAvailable(decision); final JobDetails details = ReflectionUtils.computeJobDetails(lastAttempt); - final boolean dataWritten = Optional.ofNullable(details.getOutputRecords()).orElse(0L) > 0; - if (dataWritten) { - createAndSaveRefresh(details, decision); + boolean dataWritten = Optional.ofNullable(details.getOutputRecords()).orElse(0L) > 0; + + boolean isEmptyReflection = getIsEmptyReflection(decision.getInitialRefresh().booleanValue(), dataWritten, materialization); + if (dataWritten || isEmptyReflection) { + createAndSaveRefresh(details, decision, lastAttempt); } else { logger.debug("materialization {} didn't write any data, we won't create a refresh entry", getId(materialization)); } @@ -185,7 +185,7 @@ public RefreshDecision handle() throws NamespaceException, DependencyException { .setLogicalPlanStrippedHash(decision.getLogicalPlanStrippedHash()) .setStripVersion(StrippingFactory.LATEST_STRIP_VERSION) .setSeriesId(decision.getSeriesId()) - .setSeriesOrdinal(dataWritten ? decision.getSeriesOrdinal() : decision.getSeriesOrdinal() - 1) + .setSeriesOrdinal(dataWritten ? decision.getSeriesOrdinal() : Math.max(decision.getSeriesOrdinal() - 1, 0)) .setJoinAnalysis(computeJoinAnalysis()) .setPartitionList(getDataPartitions()); } @@ -195,6 +195,21 @@ public RefreshDecision handle() throws NamespaceException, DependencyException { return decision; } + /** Determines if the current reflection has 0 rows and is an Iceberg Reflection + * We will only allow the initial refresh for Iceberg Materialization to be empty + * We don't need to handle incremental refreshes, as we can still use the previous refresh in the series + * We don't want to handle non-Iceberg materialization due to possible path discrepancy + * @param initialRefresh is it Initial Refresh or later refresh + * @param dataWritten Was any data written while saving the reflection + * @param materialization current materialization + * @return true if it is based on an Empty Iceberg Reflection + */ + public static boolean getIsEmptyReflection(boolean initialRefresh, boolean dataWritten, Materialization materialization){ + + boolean allowEmptyRefresh = initialRefresh && materialization.getIsIcebergDataset() != null + && materialization.getIsIcebergDataset(); + return !dataWritten && allowEmptyRefresh; +} public RefreshDecision getRefreshDecision(final JobAttempt jobAttempt) { if(jobAttempt.getExtraInfoList() == null || jobAttempt.getExtraInfoList().isEmpty()) { throw new IllegalStateException("No refresh decision found in refresh job."); @@ -231,8 +246,8 @@ private void failIfNotEnoughRefreshesAvailable(final RefreshDecision decision) { } public void updateDependencies(final ReflectionEntry entry, final JobInfo info, final RefreshDecision decision, - final NamespaceService namespaceService, final DependencyManager dependencyManager) throws NamespaceException, DependencyException { - final ExtractedDependencies dependencies = DependencyUtils.extractDependencies(namespaceService, info, decision); + final DependencyManager dependencyManager) throws NamespaceException, DependencyException { + final ExtractedDependencies dependencies = DependencyUtils.extractDependencies(info, decision, catalogService); if (decision.getInitialRefresh()) { if (dependencies.isEmpty()) { throw UserException.reflectionError() @@ -250,13 +265,14 @@ public void updateDependencies(final ReflectionEntry entry, final JobInfo info, dependencyManager.updateDontGiveUp(entry, dependencyResolutionContext); } - private void createAndSaveRefresh(final JobDetails details, final RefreshDecision decision) { + private void createAndSaveRefresh(final JobDetails details, final RefreshDecision decision,final JobAttempt lastAttempt) { final JobId jobId = JobsProtoUtil.toStuff(job.getJobId()); final boolean isFull = decision.getAccelerationSettings().getMethod() == RefreshMethod.FULL; final UpdateId updateId = isFull ? new UpdateId() : getUpdateId(jobId, jobsService, allocator); final MaterializationMetrics metrics = ReflectionUtils.computeMetrics(job, jobsService, allocator, jobId); final List dataPartitions = ReflectionUtils.computeDataPartitions(JobsProtoUtil.getLastAttempt(job).getInfo()); - final List refreshPath = ReflectionUtils.getRefreshPath(jobId, accelerationBasePath, jobsService, allocator); + final AttemptId attemptId = AttemptIdUtils.fromString(lastAttempt.getAttemptId()); + final List refreshPath = RefreshHandler.getRefreshPath(materialization.getReflectionId(),materialization,decision, attemptId); final boolean isIcebergRefresh = materialization.getIsIcebergDataset() != null && materialization.getIsIcebergDataset(); final String icebergBasePath = ReflectionUtils.getIcebergReflectionBasePath(refreshPath, isIcebergRefresh); Preconditions.checkArgument(!isIcebergRefresh || decision.getInitialRefresh() || icebergBasePath.equals(materialization.getBasePath())); @@ -268,7 +284,6 @@ private void createAndSaveRefresh(final JobDetails details, final RefreshDecisio logger.debug("Refresh written to {} for {}", PathUtils.constructFullPath(refreshPath), ReflectionUtils.getId(materialization)); } - private List getDataPartitions() { return ImmutableList.copyOf(materializationStore.getRefreshes(materialization) .transformAndConcat(new Function>() { diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/refresh/RefreshHandler.java b/services/accelerator/src/main/java/com/dremio/service/reflection/refresh/RefreshHandler.java index afd53ef4fc..f8d7a698f5 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/refresh/RefreshHandler.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/refresh/RefreshHandler.java @@ -28,7 +28,7 @@ import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.calcite.sql.SqlNode; import org.apache.calcite.util.Pair; -import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections4.CollectionUtils; import com.dremio.common.config.SabotConfig; import com.dremio.common.exceptions.UserException; @@ -55,18 +55,19 @@ import com.dremio.exec.planner.sql.handlers.query.SqlToPlanHandler; import com.dremio.exec.planner.sql.parser.SqlRefreshReflection; import com.dremio.exec.proto.UserBitShared; +import com.dremio.exec.store.CatalogService; import com.dremio.exec.store.dfs.IcebergTableProps; import com.dremio.exec.store.iceberg.model.IcebergCommandType; import com.dremio.exec.store.sys.accel.AccelerationManager.ExcludedReflectionsProvider; import com.dremio.resource.common.ReflectionRoutingManager; import com.dremio.service.namespace.NamespaceException; import com.dremio.service.namespace.NamespaceKey; -import com.dremio.service.namespace.NamespaceService; import com.dremio.service.namespace.dataset.proto.DatasetConfig; import com.dremio.service.namespace.dataset.proto.RefreshMethod; import com.dremio.service.namespace.space.proto.FolderConfig; import com.dremio.service.namespace.space.proto.SpaceConfig; import com.dremio.service.reflection.ReflectionGoalChecker; +import com.dremio.service.reflection.ReflectionOptions; import com.dremio.service.reflection.ReflectionService; import com.dremio.service.reflection.ReflectionServiceImpl; import com.dremio.service.reflection.ReflectionSettings; @@ -93,12 +94,14 @@ public class RefreshHandler implements SqlToPlanHandler { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RefreshHandler.class); + private static final int MATERIALIZATION_ID_REFRESH_PATH_OFFSET = 2; public static final String DECISION_NAME = RefreshDecision.class.getName(); public static final Serializer ABSTRACT_SERIALIZER = ProtostuffSerializer.of(RefreshDecision.getSchema()); private final WriterOptionManager writerOptionManager; private String textPlan; + private Rel drel; public RefreshHandler() { this.writerOptionManager = WriterOptionManager.Instance; @@ -151,40 +154,35 @@ public PhysicalPlan getPlan(SqlHandlerConfig config, String sql, SqlNode sqlNode } final RefreshHelper helper = ((ReflectionServiceImpl) service).getRefreshHelper(); - final NamespaceService namespace = helper.getNamespace(); final ReflectionSettings reflectionSettings = helper.getReflectionSettings(); final MaterializationStore materializationStore = helper.getMaterializationStore(); + final CatalogService catalogService = helper.getCatalogService(); - // Disable default raw reflections during plan generation for a refresh - config.getConverter().getSubstitutionProvider().disableDefaultRawReflection(); RefreshDecision[] refreshDecisions = new RefreshDecision[1]; + final RelNode initial = determineMaterializationPlan( config, goal, entry, materialization, service.getExcludedReflectionsProvider(), - namespace, + catalogService, config.getContext().getConfig(), reflectionSettings, materializationStore, refreshDecisions); - config.getConverter().getSubstitutionProvider().resetDefaultRawReflection(); + if(!config.getContext().getOptions().getOption(ReflectionOptions.ACCELERATION_ENABLE_DEFAULT_RAW_REFRESH)){ + config.getConverter().getSubstitutionProvider().resetDefaultRawReflection(); + } - final Rel drel = PrelTransformer.convertToDrelMaintainingNames(config, initial); + drel = PrelTransformer.convertToDrelMaintainingNames(config, initial); // Append the attempt number to the table path final UserBitShared.QueryId queryId = config.getContext().getQueryId(); final AttemptId attemptId = AttemptId.of(queryId); - final boolean isIcebergIncrementalRefresh = isIcebergInsertRefresh(materialization, refreshDecisions[0]); - final String materializationPath = isIcebergIncrementalRefresh ? - materialization.getBasePath() : materialization.getId().getId() + "_" + attemptId.getAttemptNum(); - final String materializationId = materializationPath.split("_")[0]; - final List tablePath = ImmutableList.of( - ReflectionServiceImpl.ACCELERATOR_STORAGEPLUGIN_NAME, - reflectionId.getId(), - materializationPath); + final List tablePath = getRefreshPath(reflectionId, materialization, refreshDecisions[0], attemptId); + final String materializationId = tablePath.get(MATERIALIZATION_ID_REFRESH_PATH_OFFSET).split("_")[0]; List primaryKey = getPrimaryKeyFromMaterializationPlan(initial); if(!CollectionUtils.isEmpty(primaryKey)) { @@ -247,10 +245,12 @@ public PhysicalPlan getPlan(SqlHandlerConfig config, String sql, SqlNode sqlNode boolean inheritanceEnabled = config.getContext().getOptions().getOption("planner.reflection_routing_inheritance_enabled").getBoolVal(); if (reflectionRoutingManager.getIsQueue()) { String queueId = datasetConfig.getQueueId(); + final String queueName; if (queueId == null && inheritanceEnabled) { - queueId = getInheritedReflectionRouting(true, datasetConfig, config); + queueName = getInheritedReflectionRouting(true, datasetConfig, config); + } else { + queueName = reflectionRoutingManager.getQueueNameById(queueId); } - final String queueName = reflectionRoutingManager.getQueueNameById(queueId); if (queueName != null && reflectionRoutingManager.checkQueueExists(queueName)) { config.getContext().getSession().setRoutingQueue(queueName); } else if (queueName != null) { @@ -276,6 +276,28 @@ public PhysicalPlan getPlan(SqlHandlerConfig config, String sql, SqlNode sqlNode } } + /** + * Returns the expected refresh path for the current refresh of a reflection + * @param reflectionId - the ID of the reflection we are finding the path for + * @param materialization - materialization for the reflection + * @param refreshDecision - refresh decision for the reflection + * @param attemptId - current attempt ID + * @return The refresh path represented as a list of strings + */ + public static List getRefreshPath(final ReflectionId reflectionId, final Materialization materialization, + final RefreshDecision refreshDecision, final AttemptId attemptId) { + + final boolean isIcebergIncrementalRefresh = isIcebergInsertRefresh(materialization, refreshDecision); + final String materializationPath = isIcebergIncrementalRefresh ? + materialization.getBasePath() : materialization.getId().getId() + "_" + attemptId.getAttemptNum(); + //if you change the order of materializationPath in return value of this function + //please make sure you update MATERIALIZATION_ID_REFRESH_PATH_OFFSET + return ImmutableList.of( + ReflectionServiceImpl.ACCELERATOR_STORAGEPLUGIN_NAME, + reflectionId.getId(), + materializationPath); + } + private List getPrimaryKeyFromMaterializationPlan(RelNode node) { SimpleReflectionFinderVisitor visitor = new SimpleReflectionFinderVisitor(); node.accept(visitor); @@ -297,12 +319,13 @@ private List getPrimaryKeyFromMaterializationPlan(RelNode node) { return null; } - private String getInheritedReflectionRouting(boolean isQueue, DatasetConfig datasetConfig, SqlHandlerConfig config) { + private String getInheritedReflectionRouting(boolean isQueue, DatasetConfig datasetConfig, SqlHandlerConfig config) throws Exception { ImmutableList pathList = ImmutableList.copyOf(datasetConfig.getFullPathList()); // We want to try inherit routing queue from folder or space level. // The last entry in the path list will be the name of the current dataset, // so we remove it since it isn't a space or folder. pathList = pathList.subList(0, pathList.size() - 1); + ReflectionRoutingManager reflectionRoutingManager = config.getContext().getReflectionRoutingManager(); while (!pathList.isEmpty()) { if (pathList.size() == 1) { try { @@ -310,11 +333,14 @@ private String getInheritedReflectionRouting(boolean isQueue, DatasetConfig data if (isQueue) { String inheritedQueueId = spaceConfig.getQueueId(); if (inheritedQueueId != null) { - return inheritedQueueId; + final String queueName = reflectionRoutingManager.getQueueNameById(inheritedQueueId); + if (queueName != null && reflectionRoutingManager.checkQueueExists(queueName)) { + return queueName; + } } } else { String inheritedEngineName = spaceConfig.getEngineName(); - if (inheritedEngineName != null) { + if (inheritedEngineName != null && reflectionRoutingManager.checkEngineExists(inheritedEngineName)) { return inheritedEngineName; } } @@ -328,11 +354,14 @@ private String getInheritedReflectionRouting(boolean isQueue, DatasetConfig data if (isQueue) { String inheritedQueueId = folderConfig.getQueueId(); if (inheritedQueueId != null) { - return inheritedQueueId; + final String queueName = reflectionRoutingManager.getQueueNameById(inheritedQueueId); + if (queueName != null && reflectionRoutingManager.checkQueueExists(queueName)) { + return queueName; + } } } else { String inheritedEngineName = folderConfig.getEngineName(); - if (inheritedEngineName != null) { + if (inheritedEngineName != null && reflectionRoutingManager.checkEngineExists(inheritedEngineName)) { return inheritedEngineName; } } @@ -360,7 +389,7 @@ private IcebergTableProps getIcebergTableProps(Materialization materialization, return icebergTableProps; } - private boolean isIcebergInsertRefresh(Materialization materialization, RefreshDecision refreshDecision) { + private static boolean isIcebergInsertRefresh(Materialization materialization, RefreshDecision refreshDecision) { return materialization.getIsIcebergDataset() && !refreshDecision.getInitialRefresh() && materialization.getBasePath() != null && @@ -368,24 +397,24 @@ private boolean isIcebergInsertRefresh(Materialization materialization, RefreshD } private RelNode determineMaterializationPlan( - final SqlHandlerConfig sqlHandlerConfig, - ReflectionGoal goal, - ReflectionEntry entry, - Materialization materialization, - ExcludedReflectionsProvider exclusionsProvider, - NamespaceService namespace, - SabotConfig config, - ReflectionSettings reflectionSettings, - MaterializationStore materializationStore, - RefreshDecision[] refreshDecisions) { - - final ReflectionPlanGenerator planGenerator = new ReflectionPlanGenerator(sqlHandlerConfig, namespace, - config, goal, entry, materialization, + final SqlHandlerConfig sqlHandlerConfig, + ReflectionGoal goal, + ReflectionEntry entry, + Materialization materialization, + ExcludedReflectionsProvider exclusionsProvider, + CatalogService catalogService, + SabotConfig config, + ReflectionSettings reflectionSettings, + MaterializationStore materializationStore, + RefreshDecision[] refreshDecisions) { + + // Disable default raw reflections for saving the materialization plan. + // The materialization plan should not include other reflections, otherwise it will fail to match into queries. + sqlHandlerConfig.getConverter().getSubstitutionProvider().disableDefaultRawReflection(); + final ReflectionPlanGenerator planGenerator = new ReflectionPlanGenerator(sqlHandlerConfig, + catalogService, config, goal, entry, materialization, reflectionSettings, materializationStore, getForceFullRefresh(materialization), StrippingFactory.LATEST_STRIP_VERSION); - final RelNode normalizedPlan = planGenerator.generateNormalizedPlan(); - - // avoid accelerating this CTAS with the materialization itself // we set exclusions before we get to the logical phase (since toRel() is triggered in SqlToRelConverter, prior to planning). final List exclusions = ImmutableList.builder() @@ -394,6 +423,8 @@ private RelNode determineMaterializationPlan( .build(); sqlHandlerConfig.getConverter().getSession().getSubstitutionSettings().setExclusions(exclusions); + final RelNode normalizedPlan = planGenerator.generateNormalizedPlan(); + RefreshDecision decision = planGenerator.getRefreshDecision(); refreshDecisions[0] = decision; @@ -411,6 +442,16 @@ private RelNode determineMaterializationPlan( logger.trace(RelOptUtil.toString(normalizedPlan)); } + // If the support key is enabled, allow the REFRESH REFLECTION job to be accelerated by default raw reflections + // by generating a second normalized plan with DRRs enabled. + if(sqlHandlerConfig.getContext().getOptions().getOption(ReflectionOptions.ACCELERATION_ENABLE_DEFAULT_RAW_REFRESH)){ + sqlHandlerConfig.getConverter().getSubstitutionProvider().resetDefaultRawReflection(); + final ReflectionPlanGenerator acceleratedPlanGenerator = new ReflectionPlanGenerator(sqlHandlerConfig, + catalogService, config, goal, entry, materialization, + reflectionSettings, materializationStore, getForceFullRefresh(materialization), StrippingFactory.LATEST_STRIP_VERSION); + return acceleratedPlanGenerator.generateNormalizedPlan(); + } + return normalizedPlan; } @@ -424,4 +465,9 @@ public String getTextPlan() { return textPlan; } + @Override + public Rel getLogicalPlan() { + return drel; + } + } diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/refresh/RefreshHelper.java b/services/accelerator/src/main/java/com/dremio/service/reflection/refresh/RefreshHelper.java index fa689e8cf8..5bd3795437 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/refresh/RefreshHelper.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/refresh/RefreshHelper.java @@ -15,7 +15,7 @@ */ package com.dremio.service.reflection.refresh; -import com.dremio.service.namespace.NamespaceService; +import com.dremio.exec.store.CatalogService; import com.dremio.service.reflection.ReflectionSettings; import com.dremio.service.reflection.store.MaterializationStore; @@ -23,7 +23,7 @@ * An package private interface that allows sharing of ReflectionService assets for the RefreshHandler. */ public interface RefreshHelper { - NamespaceService getNamespace(); ReflectionSettings getReflectionSettings(); MaterializationStore getMaterializationStore(); + CatalogService getCatalogService(); } diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/refresh/RefreshStartHandler.java b/services/accelerator/src/main/java/com/dremio/service/reflection/refresh/RefreshStartHandler.java index 9217685da8..7c1c4431d3 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/refresh/RefreshStartHandler.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/refresh/RefreshStartHandler.java @@ -19,10 +19,10 @@ import com.dremio.exec.ExecConstants; import com.dremio.exec.planner.physical.PlannerSettings; +import com.dremio.exec.store.CatalogService; import com.dremio.options.OptionManager; import com.dremio.service.job.proto.JobId; import com.dremio.service.jobs.JobsService; -import com.dremio.service.namespace.NamespaceService; import com.dremio.service.reflection.ReflectionManager.WakeUpCallback; import com.dremio.service.reflection.ReflectionUtils; import com.dremio.service.reflection.WakeUpManagerWhenJobDone; @@ -42,15 +42,15 @@ public class RefreshStartHandler { protected static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RefreshStartHandler.class); - private final NamespaceService namespaceService; + private final CatalogService catalogService; private final JobsService jobsService; private final MaterializationStore materializationStore; private final WakeUpCallback wakeUpCallback; - public RefreshStartHandler(NamespaceService namespaceService, + public RefreshStartHandler(CatalogService catalogService, JobsService jobsService, MaterializationStore materializationStore, WakeUpCallback wakeUpCallback) { - this.namespaceService = Preconditions.checkNotNull(namespaceService, "namespace service required"); + this.catalogService = Preconditions.checkNotNull(catalogService, "Catalog service required"); this.jobsService = Preconditions.checkNotNull(jobsService, "jobs service required"); this.materializationStore = Preconditions.checkNotNull(materializationStore, "materialization store required"); this.wakeUpCallback = Preconditions.checkNotNull(wakeUpCallback, "wakeup callback required"); @@ -79,7 +79,7 @@ public JobId startJob(ReflectionEntry entry, long jobSubmissionTime, OptionManag final String sql = String.format("REFRESH REFLECTION '%s' AS '%s'", reflectionId.getId(), materialization.getId().getId()); - final JobId jobId = ReflectionUtils.submitRefreshJob(jobsService, namespaceService, entry, materialization, sql, + final JobId jobId = ReflectionUtils.submitRefreshJob(jobsService, catalogService, entry, materialization, sql, new WakeUpManagerWhenJobDone(wakeUpCallback, "materialization job done")); logger.debug("Submitted REFRESH REFLECTION job {} for {}", jobId.getId(), ReflectionUtils.getId(entry, materialization)); diff --git a/services/accelerator/src/main/java/com/dremio/service/reflection/store/ReflectionSettingsStore.java b/services/accelerator/src/main/java/com/dremio/service/reflection/store/ReflectionSettingsStore.java index 81d79a26b2..50251e8a40 100644 --- a/services/accelerator/src/main/java/com/dremio/service/reflection/store/ReflectionSettingsStore.java +++ b/services/accelerator/src/main/java/com/dremio/service/reflection/store/ReflectionSettingsStore.java @@ -25,7 +25,7 @@ import com.dremio.datastore.api.LegacyKVStoreProvider; import com.dremio.datastore.api.LegacyStoreBuildingFactory; import com.dremio.datastore.format.Format; -import com.dremio.service.namespace.NamespaceKey; +import com.dremio.exec.catalog.CatalogEntityKey; import com.dremio.service.namespace.dataset.proto.AccelerationSettings; import com.google.common.base.Preconditions; import com.google.common.base.Supplier; @@ -37,27 +37,27 @@ public class ReflectionSettingsStore { private static final String TABLE_NAME = "reflection_settings"; - private final Supplier> store; + private final Supplier> store; public ReflectionSettingsStore(final Provider provider) { Preconditions.checkNotNull(provider, "kvstore provider required"); - store = Suppliers.memoize(new Supplier>() { + store = Suppliers.memoize(new Supplier>() { @Override - public LegacyKVStore get() { + public LegacyKVStore get() { return provider.get().getStore(StoreCreator.class); } }); } - public AccelerationSettings get(NamespaceKey key) { + public AccelerationSettings get(CatalogEntityKey key) { return store.get().get(key); } - public void save(NamespaceKey key, AccelerationSettings settings) { + public void save(CatalogEntityKey key, AccelerationSettings settings) { store.get().put(key, settings); } - public void delete(NamespaceKey key) { + public void delete(CatalogEntityKey key) { store.get().delete(key); } @@ -86,19 +86,19 @@ public void setTag(AccelerationSettings value, String tag) { /** * {@link ReflectionSettingsStore} creator */ - public static final class StoreCreator implements LegacyKVStoreCreationFunction { + public static final class StoreCreator implements LegacyKVStoreCreationFunction { @Override - public LegacyKVStore build(LegacyStoreBuildingFactory factory) { - return factory.newStore() + public LegacyKVStore build(LegacyStoreBuildingFactory factory) { + return factory.newStore() .name(TABLE_NAME) - .keyFormat(Format.wrapped(NamespaceKey.class, NamespaceKey::toString, NamespaceKey::new, Format.ofString())) + .keyFormat(Format.wrapped(CatalogEntityKey.class, CatalogEntityKey::toString, CatalogEntityKey::new, Format.ofString())) .valueFormat(Format.ofProtostuff(AccelerationSettings.class)) .versionExtractor(AccelerationSettingsVersionExtractor.class) .build(); } } - public Iterable> getAll() { + public Iterable> getAll() { return store.get().find(); } } diff --git a/services/accelerator/src/test/java/com/dremio/service/reflection/TestDatasetHashUtils.java b/services/accelerator/src/test/java/com/dremio/service/reflection/TestDatasetHashUtils.java new file mode 100644 index 0000000000..dc35f90149 --- /dev/null +++ b/services/accelerator/src/test/java/com/dremio/service/reflection/TestDatasetHashUtils.java @@ -0,0 +1,252 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.service.reflection; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.util.List; + +import org.apache.calcite.plan.RelOptTable; +import org.apache.calcite.rel.RelShuttle; +import org.apache.calcite.util.Pair; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mockito; + +import com.dremio.exec.calcite.logical.ScanCrel; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.DremioTable; +import com.dremio.exec.catalog.TableVersionContext; +import com.dremio.exec.catalog.TableVersionType; +import com.dremio.exec.planner.acceleration.ExpansionNode; +import com.dremio.exec.planner.acceleration.substitution.SubstitutionUtils; +import com.dremio.exec.store.CatalogService; +import com.dremio.exec.store.TableMetadata; +import com.dremio.service.namespace.NamespaceKey; +import com.dremio.service.namespace.dataset.proto.DatasetConfig; +import com.dremio.service.namespace.dataset.proto.DatasetType; +import com.dremio.service.namespace.dataset.proto.ParentDataset; +import com.dremio.service.namespace.dataset.proto.VirtualDataset; +import com.google.common.collect.ImmutableList; + +import io.protostuff.ByteString; + +public class TestDatasetHashUtils { + + private CatalogService catalogService; + private Catalog catalog; + + private TableMetadata t1TableMetadata; + private ScanCrel t1Node; + private DatasetConfig t1Config; + private DremioTable t1Table; + + private DatasetConfig v1Config; + private ExpansionNode v1Node; + private DremioTable v1Table; + + private DatasetConfig v2Config; + private ExpansionNode v2Node; + private DremioTable v2Table; + + @Before + public void setup() { + catalogService = Mockito.mock(CatalogService.class); + catalog = Mockito.mock(Catalog.class); + when(catalogService.getCatalog(Mockito.any())).thenReturn(catalog); + + // Create datasetconfig and relnode for table t1 + t1Config = new DatasetConfig() + .setType(DatasetType.PHYSICAL_DATASET) + .setRecordSchema(ByteString.bytesDefaultValue("recordSchema")) + .setFullPathList(ImmutableList.of("t1")); + t1Table = mock(DremioTable.class); + when(t1Table.getPath()).thenReturn(new NamespaceKey(ImmutableList.of("t1"))); + t1TableMetadata = mock(TableMetadata.class); + when(t1Table.getDataset()).thenReturn(t1TableMetadata); + when(t1Table.getDatasetConfig()).thenReturn(t1Config); + t1Node = mock(ScanCrel.class); + when(t1Node.accept(any(RelShuttle.class))).thenCallRealMethod(); + RelOptTable t1RelOptTable = mock(RelOptTable.class); + when(t1RelOptTable.unwrap(DremioTable.class)).thenReturn(t1Table); + when(t1Node.getTable()).thenReturn(t1RelOptTable); + when(catalog.getTable(new NamespaceKey(ImmutableList.of("t1")))).thenReturn(t1Table); + + // Create datasetconfig and relnode for view v1 + v1Config = new DatasetConfig() + .setType(DatasetType.VIRTUAL_DATASET) + .setFullPathList(ImmutableList.of("v1")); + VirtualDataset virtualDataset = new VirtualDataset() + .setSqlFieldsList(ImmutableList.of()) + .setSql("select * from t1"); + v1Config.setVirtualDataset(virtualDataset); + v1Table = mock(DremioTable.class); + when(v1Table.getDatasetConfig()).thenReturn(v1Config); + v1Node = mock(ExpansionNode.class); + when(v1Node.getPath()).thenReturn(new NamespaceKey(ImmutableList.of("v1"))); + when(v1Node.accept(any(RelShuttle.class))).thenCallRealMethod(); + ParentDataset parentDataset = new ParentDataset() + .setDatasetPathList(ImmutableList.of("t1")); + virtualDataset.setParentsList(ImmutableList.of(parentDataset)); + when(catalog.getTable(new NamespaceKey("v1"))).thenReturn(v1Table); + when(v1Node.getInput(0)).thenReturn(t1Node); + + // Create datasetconfig and relnode for view v2 + v2Config = new DatasetConfig() + .setType(DatasetType.VIRTUAL_DATASET) + .setFullPathList(ImmutableList.of("v2")); + VirtualDataset virtualDataset2 = new VirtualDataset() + .setSqlFieldsList(ImmutableList.of()) + .setSql("select * from v1"); + v2Config.setVirtualDataset(virtualDataset2); + v2Table = mock(DremioTable.class); + when(v2Table.getDatasetConfig()).thenReturn(v2Config); + v2Node = mock(ExpansionNode.class); + when(v2Node.getPath()).thenReturn(new NamespaceKey(ImmutableList.of("v2"))); + when(v2Node.accept(any(RelShuttle.class))).thenCallRealMethod(); + ParentDataset parentDataset2 = new ParentDataset() + .setDatasetPathList(ImmutableList.of("v1")); + virtualDataset2.setParentsList(ImmutableList.of(parentDataset2)); + when(catalog.getTable(new NamespaceKey("v2"))).thenReturn(v2Table); + when(v2Node.getInput(0)).thenReturn(v1Node); + } + + /** + * Validate that {@link DatasetHashUtils.ParentDatasetBuilder} builds a mapping of views to parent datasets + */ + @Test + public void testParentDatasetBuilderForNonVersionedDatasets() { + DatasetHashUtils.ParentDatasetBuilder builder = new DatasetHashUtils.ParentDatasetBuilder(v2Config, v1Node, catalogService); + List> parents = builder.getParents(SubstitutionUtils.VersionedPath.of(v2Config.getFullPathList(), null)); + assertEquals(1, parents.size()); + assertEquals(SubstitutionUtils.VersionedPath.of(ImmutableList.of("v1"), null), parents.get(0).left); + assertEquals(v1Config, parents.get(0).right); + + parents = builder.getParents(SubstitutionUtils.VersionedPath.of(ImmutableList.of("v1"), null)); + assertEquals(1, parents.size()); + assertEquals(SubstitutionUtils.VersionedPath.of(ImmutableList.of("t1"), null), parents.get(0).left); + assertEquals(t1Config, parents.get(0).right); + } + + /** + * Validate that {@link DatasetHashUtils.ParentDatasetBuilder} builds a mapping of views to parent datasets + */ + @Test + public void testParentDatasetBuilderForVersionedDatasets() { + when(t1TableMetadata.getVersionContext()).thenReturn(new TableVersionContext(TableVersionType.BRANCH, "etl")); + when(v1Node.getVersionContext()).thenReturn(new TableVersionContext(TableVersionType.BRANCH, "etl")); + when(catalog.getTableSnapshot(new NamespaceKey(ImmutableList.of("v1")), new TableVersionContext(TableVersionType.BRANCH, "etl"))).thenReturn(v1Table); + when(catalog.getTableSnapshot(new NamespaceKey(ImmutableList.of("t1")), new TableVersionContext(TableVersionType.BRANCH, "etl"))).thenReturn(t1Table); + + + DatasetHashUtils.ParentDatasetBuilder builder = new DatasetHashUtils.ParentDatasetBuilder(v2Config, v1Node, catalogService); + List> parents = builder.getParents(SubstitutionUtils.VersionedPath.of(v2Config.getFullPathList(), null)); + assertEquals(1, parents.size()); + assertEquals(SubstitutionUtils.VersionedPath.of(ImmutableList.of("v1"), new TableVersionContext(TableVersionType.BRANCH, "etl")), parents.get(0).left); + assertEquals(v1Config, parents.get(0).right); + + parents = builder.getParents(SubstitutionUtils.VersionedPath.of(ImmutableList.of("v1"), new TableVersionContext(TableVersionType.BRANCH, "etl"))); + assertEquals(1, parents.size()); + assertEquals(SubstitutionUtils.VersionedPath.of(ImmutableList.of("t1"), new TableVersionContext(TableVersionType.BRANCH, "etl")), parents.get(0).left); + assertEquals(t1Config, parents.get(0).right); + } + + + + /** + * Validate that computeDatasetHash using relNode tree produces same hash as using the dataset config's + * parent dataset. + * + * @throws Exception + */ + @Test + public void testViewHashForNonVersionedDatasets() throws Exception { + + assertEquals(DatasetHashUtils.computeDatasetHash(v1Config, catalogService, false), + DatasetHashUtils.computeDatasetHash(v1Config, catalogService, t1Node,false)); + + assertEquals(DatasetHashUtils.computeDatasetHash(v1Config, catalogService, true), + DatasetHashUtils.computeDatasetHash(v1Config, catalogService, t1Node,true)); + } + + /** + * Validate that computeDatasetHash using relNode tree produces same hash as using the dataset config's + * parent dataset. + * + * @throws Exception + */ + @Test + public void testNestedViewHashForNonVersionedDatasets() throws Exception { + + assertEquals(DatasetHashUtils.computeDatasetHash(v2Config, catalogService, false), + DatasetHashUtils.computeDatasetHash(v2Config, catalogService, v1Node,false)); + + assertEquals(DatasetHashUtils.computeDatasetHash(v2Config, catalogService, true), + DatasetHashUtils.computeDatasetHash(v2Config, catalogService, v1Node,true)); + } + + /** + * Validate that computeDatasetHash using relNode tree produces same hash as using the dataset config's + * parent dataset. + * + * @throws Exception + */ + @Test + public void testTableHashForNonVersionedDatasets() throws Exception { + assertEquals(DatasetHashUtils.computeDatasetHash(t1Config, catalogService, false), + DatasetHashUtils.computeDatasetHash(t1Config, catalogService, t1Node,false)); + + assertEquals(DatasetHashUtils.computeDatasetHash(t1Config, catalogService, true), + DatasetHashUtils.computeDatasetHash(t1Config, catalogService, t1Node,true)); + } + + /** + * Validate that hashes are different for same table from different branches + */ + @Test + public void testTableHashForVersionedDatasets() { + // Update t1 to be branch main + when(t1TableMetadata.getVersionContext()).thenReturn(new TableVersionContext(TableVersionType.BRANCH, "main")); + when(catalog.getTableSnapshot(new NamespaceKey(ImmutableList.of("t1")), new TableVersionContext(TableVersionType.BRANCH, "main"))).thenReturn(t1Table); + + // Create datasetconfig and relnode for table t1 at branch dev + DatasetConfig t1ConfigAtDev = new DatasetConfig() + .setType(DatasetType.PHYSICAL_DATASET) + .setRecordSchema(ByteString.bytesDefaultValue("recordSchema_dev")); + DremioTable t1TableAtDev = mock(DremioTable.class); + when(t1TableAtDev.getPath()).thenReturn(new NamespaceKey(ImmutableList.of("t1"))); + TableMetadata t1TableMetadataAtDev = mock(TableMetadata.class); + when(t1TableAtDev.getDataset()).thenReturn(t1TableMetadataAtDev); + when(t1TableAtDev.getDatasetConfig()).thenReturn(t1ConfigAtDev); + ScanCrel t1NodeAtDev = mock(ScanCrel.class); + when(t1NodeAtDev.accept(any(RelShuttle.class))).thenCallRealMethod(); + RelOptTable t1RelOptTableAtDev = mock(RelOptTable.class); + when(t1RelOptTableAtDev.unwrap(DremioTable.class)).thenReturn(t1TableAtDev); + when(t1NodeAtDev.getTable()).thenReturn(t1RelOptTableAtDev); + when(catalog.getTableSnapshot(new NamespaceKey(ImmutableList.of("t1")), new TableVersionContext(TableVersionType.BRANCH, "dev"))).thenReturn(t1TableAtDev); + + assertNotEquals(DatasetHashUtils.computeDatasetHash(t1Config, catalogService, t1Node, false), + DatasetHashUtils.computeDatasetHash(t1ConfigAtDev, catalogService, t1NodeAtDev, false)); + + assertNotEquals(DatasetHashUtils.computeDatasetHash(t1Config, catalogService, t1Node, true), + DatasetHashUtils.computeDatasetHash(t1ConfigAtDev, catalogService, t1NodeAtDev, true)); + } + +} diff --git a/services/accelerator/src/test/java/com/dremio/service/reflection/TestMaterializationCache.java b/services/accelerator/src/test/java/com/dremio/service/reflection/TestMaterializationCache.java index 21d81248e7..30195f5891 100644 --- a/services/accelerator/src/test/java/com/dremio/service/reflection/TestMaterializationCache.java +++ b/services/accelerator/src/test/java/com/dremio/service/reflection/TestMaterializationCache.java @@ -21,6 +21,7 @@ import static org.mockito.Mockito.when; import java.util.Arrays; +import java.util.Collections; import org.junit.Rule; import org.junit.Test; @@ -36,8 +37,9 @@ import com.dremio.service.namespace.NamespaceService; import com.dremio.service.reflection.proto.Materialization; import com.dremio.service.reflection.proto.MaterializationId; +import com.dremio.test.DremioTest; -public class TestMaterializationCache { +public class TestMaterializationCache extends DremioTest { @Rule public MockitoRule rule = MockitoJUnit.rule().strictness(Strictness.STRICT_STUBS); @@ -49,11 +51,12 @@ public class TestMaterializationCache { /** * Test in case materialization expansion (deserialization) takes too long time * {@code MaterializationCache.update(Materialization m)} does not race with - * {@code MaterializationCache.refresh()} and falls into infinite loop. + * {@code MaterializationCache.refresh()} and fall into infinite loop. + * (test will timeout in such case) */ - @Test (timeout = 10000) + @Test public void testMaterializationCacheUpdate() throws Exception { - MaterializationCache materializationCache = spy(new MaterializationCache(provider, namespaceService, reflectionStatusService, catalogService)); + MaterializationCache materializationCache = spy(new MaterializationCache(provider, reflectionStatusService, catalogService)); Materialization m1 = new Materialization(); Materialization m2 = new Materialization(); CachedMaterializationDescriptor descriptor = mock(CachedMaterializationDescriptor.class); @@ -65,39 +68,27 @@ public void testMaterializationCacheUpdate() throws Exception { // For materializationCache.refresh() when(provider.expand(m1)).thenReturn(descriptor); when(provider.getValidMaterializations()).thenReturn(Arrays.asList(m1)); - when(provider.getExternalReflections()).thenReturn(Arrays.asList()); + when(provider.getExternalReflections()).thenReturn(Collections.emptyList()); // For materializationCache.update(m2); when(provider.expand(m2)).thenAnswer(new Answer() { @Override public CachedMaterializationDescriptor answer(InvocationOnMock invocation) throws InterruptedException { // Simulate MaterializationCache.update(Materialization m) takes long time during expansion - Thread.sleep(3000); + // and during this time the cache entry has been refreshed. Before DX-54194's fix this will + // cause MaterializationCache.update(Materialization m) runs into infinite loop. + materializationCache.resetCache(); + materializationCache.refresh(); + // The sleep here is to avoid exhausting CPU time in case infinite loop happens. + try { + Thread.sleep(100); + } catch (InterruptedException e) { + } return descriptor; } }); - Runnable refreshTask = new Runnable() { - @Override - public void run() { - while(true) { - materializationCache.resetCache(); - materializationCache.refresh(); - try { - Thread.sleep(1000); - } catch (InterruptedException e) { - return; - } - } - } - }; - - Thread refreshThread = new Thread(refreshTask); - refreshThread.start(); - materializationCache.update(m2); assertThat(materializationCache.get(mId2)).isEqualTo(descriptor); - - refreshThread.interrupt(); } } diff --git a/services/accelerator/src/test/java/com/dremio/service/reflection/TestReflectionManager.java b/services/accelerator/src/test/java/com/dremio/service/reflection/TestReflectionManager.java index 64baeadf38..fd0eec5ab4 100644 --- a/services/accelerator/src/test/java/com/dremio/service/reflection/TestReflectionManager.java +++ b/services/accelerator/src/test/java/com/dremio/service/reflection/TestReflectionManager.java @@ -53,6 +53,13 @@ import org.mockito.MockedStatic; import org.mockito.Mockito; +import com.dremio.exec.catalog.CachingCatalog; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.catalog.CatalogEntityKey; +import com.dremio.exec.catalog.DremioTable; +import com.dremio.exec.catalog.MetadataRequestOptions; +import com.dremio.exec.catalog.TableVersionContext; +import com.dremio.exec.catalog.TableVersionType; import com.dremio.exec.server.SabotContext; import com.dremio.exec.store.CatalogService; import com.dremio.exec.store.dfs.FileSelection; @@ -73,6 +80,7 @@ import com.dremio.service.namespace.dataset.proto.DatasetConfig; import com.dremio.service.namespace.dataset.proto.RefreshMethod; import com.dremio.service.reflection.materialization.AccelerationStoragePlugin; +import com.dremio.service.reflection.materialization.AccelerationStoragePluginConfig; import com.dremio.service.reflection.proto.Materialization; import com.dremio.service.reflection.proto.MaterializationId; import com.dremio.service.reflection.proto.MaterializationState; @@ -351,6 +359,10 @@ public void testSyncDoesNotUpdateReflectionWhenOnlyBoostIsToggle(){ DatasetConfig datasetConfig = new DatasetConfig(); + final Catalog catalog = mock(Catalog.class); + final DremioTable dremioTable = mock(DremioTable.class); + when(dremioTable.getDatasetConfig()).thenReturn(datasetConfig); + when(catalog.getTable(dataSetId)).thenReturn(dremioTable); Subject subject = new Subject(); when(subject.contextFactory.create()).thenReturn(subject.dependencyResolutionContext); @@ -363,7 +375,7 @@ public void testSyncDoesNotUpdateReflectionWhenOnlyBoostIsToggle(){ when(subject.materializationStore.getAllExpiredWhen(anyLong())).thenReturn(emptyList()); when(subject.materializationStore.getDeletableEntriesModifiedBefore(anyLong(), anyInt())).thenReturn(emptyList()); - when(subject.namespaceService.findDatasetByUUID(dataSetId)).thenReturn(datasetConfig); + when(subject.catalogService.getCatalog(any(MetadataRequestOptions.class))).thenReturn(catalog); when(subject.materializationStore.getAllMaterializations()).thenReturn(emptyList()); when(subject.optionManager.getOption(ReflectionOptions.NO_DEPENDENCY_REFRESH_PERIOD_SECONDS)).thenReturn(5555L); @@ -422,7 +434,10 @@ public void testSyncDoesUpdateReflectionWhenChanged(){ .setArrowCachingEnabled(false); DatasetConfig datasetConfig = new DatasetConfig(); - + final Catalog catalog = mock(Catalog.class); + final DremioTable dremioTable = mock(DremioTable.class); + when(dremioTable.getDatasetConfig()).thenReturn(datasetConfig); + when(catalog.getTable(dataSetId)).thenReturn(dremioTable); Subject subject = new Subject(); when(subject.contextFactory.create()).thenReturn(subject.dependencyResolutionContext); @@ -437,7 +452,6 @@ public void testSyncDoesUpdateReflectionWhenChanged(){ when(subject.materializationStore.getAllDone(reflectionId)).thenReturn(singletonList(materialization)); when(subject.materializationStore.getAllMaterializations()).thenReturn(emptyList()); - when(subject.namespaceService.findDatasetByUUID(dataSetId)).thenReturn(datasetConfig); when(subject.optionManager.getOption(ReflectionOptions.NO_DEPENDENCY_REFRESH_PERIOD_SECONDS)).thenReturn(5555L); @@ -459,6 +473,7 @@ public void testSyncDoesUpdateReflectionWhenChanged(){ when(subject.userStore.getAllNotDeleted()).thenReturn(singletonList(reflectionGoal)); when(subject.userStore.getDeletedBefore(anyLong())).thenReturn(emptyList()); when(subject.userStore.getModifiedOrCreatedSince(anyLong())).thenReturn(singletonList(reflectionGoal)); + when(subject.catalogService.getCatalog(any(MetadataRequestOptions.class))).thenReturn(catalog); subject.reflectionManager.sync(); @@ -524,7 +539,7 @@ public void testSyncDoesPurgeOrphans() { when(subject.materializationStore.getRefreshesExclusivelyOwnedBy(materialization1)).thenReturn(Mockito.mock(Collection.class)); when(subject.materializationStore.getRefreshesExclusivelyOwnedBy(materialization2)).thenReturn(Mockito.mock(Collection.class)); when(subject.materializationStore.getRefreshesExclusivelyOwnedBy(materialization3)).thenReturn(Mockito.mock(Collection.class)); - + when(subject.contextFactory.create()).thenReturn(subject.dependencyResolutionContext); assertEquals(MaterializationState.DONE, materialization1.getState()); assertEquals(MaterializationState.DEPRECATED, materialization2.getState()); @@ -586,6 +601,10 @@ public void testIcebergIncrementalRefreshJobFailedAfterCommit() throws Exception when(snapshot.snapshotId()).thenReturn(newSnapshotId); when(icebergTable.manageSnapshots()).thenReturn(manageSnapshots); when(manageSnapshots.rollbackTo(anyLong())).thenReturn(manageSnapshots); + AccelerationStoragePluginConfig pluginConfig = Mockito.mock(AccelerationStoragePluginConfig.class); + when(pluginConfig.getPath()).thenReturn(Path.of(".")); + when(subject.accelerationPlugin.getConfig()).thenReturn(pluginConfig); + when(subject.catalogService.getSource(ReflectionServiceImpl.ACCELERATOR_STORAGEPLUGIN_NAME)).thenReturn(subject.accelerationPlugin); // Test subject.reflectionManager.handleEntry(entry, 0, new ReflectionManager.EntryCounts(), subject.dependencyResolutionContext); @@ -670,6 +689,10 @@ public void testIcebergIncrementalRefreshJobFailedBeforeCommit() throws Exceptio when(snapshot.snapshotId()).thenReturn(newSnapshotId); when(icebergTable.manageSnapshots()).thenReturn(manageSnapshots); when(manageSnapshots.rollbackTo(anyLong())).thenReturn(manageSnapshots); + AccelerationStoragePluginConfig pluginConfig = Mockito.mock(AccelerationStoragePluginConfig.class); + when(pluginConfig.getPath()).thenReturn(Path.of(".")); + when(subject.accelerationPlugin.getConfig()).thenReturn(pluginConfig); + when(subject.catalogService.getSource(ReflectionServiceImpl.ACCELERATOR_STORAGEPLUGIN_NAME)).thenReturn(subject.accelerationPlugin); // Test subject.reflectionManager.handleEntry(entry, 0, new ReflectionManager.EntryCounts(), @@ -716,6 +739,10 @@ public void testStartRefreshForIcebergReflection() throws Exception { when(icebergModel.getIcebergTable(any())).thenReturn(icebergTable); when(icebergTable.currentSnapshot()).thenReturn(snapshot); when(snapshot.snapshotId()).thenReturn(snapshotId); + AccelerationStoragePluginConfig pluginConfig = Mockito.mock(AccelerationStoragePluginConfig.class); + when(pluginConfig.getPath()).thenReturn(Path.of(".")); + when(subject.accelerationPlugin.getConfig()).thenReturn(pluginConfig); + when(subject.catalogService.getSource(ReflectionServiceImpl.ACCELERATOR_STORAGEPLUGIN_NAME)).thenReturn(subject.accelerationPlugin); // Test subject.reflectionManager.handleEntry(reflectionEntry, 0, new ReflectionManager.EntryCounts(), @@ -803,7 +830,7 @@ public void testRefreshDoneHandlerException() { ReflectionServiceImpl.PlanCacheInvalidationHelper helper = Mockito.mock(ReflectionServiceImpl.PlanCacheInvalidationHelper.class); RefreshDoneHandler handler = new RefreshDoneHandler(entry, m, job, subject.jobsService, - subject.namespaceService, subject.materializationStore, subject.dependencyManager, subject.expansionHelper, + subject.materializationStore, subject.dependencyManager, subject.expansionHelper, Path.of("."), subject.allocator, subject.catalogService, subject.dependencyResolutionContext); when(subject.planCacheInvalidationHelper.get()).thenReturn(helper); @@ -824,7 +851,7 @@ public void testCachedReflectionSettings() { DependencyResolutionContextFactory factory = new DependencyResolutionContextFactory(settings, requestsStore, subject.optionManager, subject.reflectionStore); DependencyResolutionContext context = factory.create(); - final NamespaceKey key = new NamespaceKey(ImmutableList.of("root", "path")); + final CatalogEntityKey key = CatalogEntityKey.fromNamespaceKey(new NamespaceKey(ImmutableList.of("root", "path"))); final AccelerationSettings testSettings = new AccelerationSettings() .setMethod(RefreshMethod.INCREMENTAL); when(settings.getReflectionSettings(key)).thenReturn(testSettings); @@ -847,6 +874,61 @@ public void testCachedReflectionSettings() { } + /** + * Verifies reflection settings cache on two different datasets with the same path but different version context + */ + @Test + public void testCachedVersionedReflectionSettings() { + + Subject subject = new Subject(); + when(subject.optionManager.getOption(ReflectionOptions.REFLECTION_MANAGER_SYNC_CACHE)).thenReturn(true); + ReflectionSettings settings = Mockito.mock(ReflectionSettings.class); + RefreshRequestsStore requestsStore = Mockito.mock(RefreshRequestsStore.class); + DependencyResolutionContextFactory factory = new DependencyResolutionContextFactory(settings, requestsStore, + subject.optionManager, subject.reflectionStore); + DependencyResolutionContext context = factory.create(); + + final CatalogEntityKey keyAtProd = CatalogEntityKey.newBuilder().keyComponents(ImmutableList.of("root", "path")) + .tableVersionContext(new TableVersionContext(TableVersionType.BRANCH, "prod")).build(); + final AccelerationSettings testSettingsAtProd = new AccelerationSettings() + .setMethod(RefreshMethod.INCREMENTAL).setRefreshPeriod(11111L); + when(settings.getReflectionSettings(keyAtProd)).thenReturn(testSettingsAtProd); + + final CatalogEntityKey keyAtStaging = CatalogEntityKey.newBuilder().keyComponents(ImmutableList.of("root", "path")) + .tableVersionContext(new TableVersionContext(TableVersionType.BRANCH, "staging")).build(); + final AccelerationSettings testSettingsAtStaging = new AccelerationSettings() + .setMethod(RefreshMethod.FULL).setRefreshPeriod(222222L); + when(settings.getReflectionSettings(keyAtStaging)).thenReturn(testSettingsAtStaging); + + // Test + AccelerationSettings accelerationSettings = context.getReflectionSettings(keyAtProd); + assertEquals(testSettingsAtProd.getMethod(), accelerationSettings.getMethod()); + assertEquals(testSettingsAtProd.getRefreshPeriod(), accelerationSettings.getRefreshPeriod()); + verify(settings, times(1)).getReflectionSettings(keyAtProd); + accelerationSettings = context.getReflectionSettings(keyAtProd); + assertEquals(testSettingsAtProd.getMethod(), accelerationSettings.getMethod()); + assertEquals(testSettingsAtProd.getRefreshPeriod(), accelerationSettings.getRefreshPeriod()); + verify(settings, times(1)).getReflectionSettings(keyAtProd); // Retrieved from cache + assertTrue(context.hasAccelerationSettingsChanged()); + + accelerationSettings = context.getReflectionSettings(keyAtStaging); + assertEquals(testSettingsAtStaging.getMethod(), accelerationSettings.getMethod()); + assertEquals(testSettingsAtStaging.getRefreshPeriod(), accelerationSettings.getRefreshPeriod()); + verify(settings, times(1)).getReflectionSettings(keyAtStaging); + accelerationSettings = context.getReflectionSettings(keyAtStaging); + assertEquals(testSettingsAtStaging.getMethod(), accelerationSettings.getMethod()); + assertEquals(testSettingsAtStaging.getRefreshPeriod(), accelerationSettings.getRefreshPeriod()); + verify(settings, times(1)).getReflectionSettings(keyAtStaging); // Retrieved from cache + + // Verify hasAccelerationSettingsChanged + context = factory.create(); + assertFalse(context.hasAccelerationSettingsChanged()); + when(settings.getAllHash()).thenReturn(123); + context = factory.create(); + assertTrue(context.hasAccelerationSettingsChanged()); + + } + @Test public void testSyncWithDependencyResolutionContext() { @@ -860,7 +942,7 @@ public void testSyncWithDependencyResolutionContext() { ReflectionManager reflectionManager = new ReflectionManager( subject.sabotContext, subject.jobsService, - subject.namespaceService, + subject.catalogService, subject.optionManager, subject.userStore, subject.reflectionStore, @@ -873,13 +955,10 @@ public void testSyncWithDependencyResolutionContext() { subject.expansionHelper, subject.planCacheInvalidationHelper, subject.allocator, - subject.accelerationPlugin, - Path.of("."), subject.reflectionGoalChecker, subject.refreshStartHandler, - subject.catalogService, factory - ); + ); ReflectionId reflectionId = new ReflectionId("r_id"); @@ -927,7 +1006,8 @@ public void testRefreshDoneHandlerLastRefreshDuration() throws DependencyGraph.D .setId(materializationId) .setReflectionId(reflectionId) .setInitRefreshSubmit(System.currentTimeMillis() - TimeUnit.MINUTES.toMillis(1)) - .setState(MaterializationState.RUNNING); + .setState(MaterializationState.RUNNING) + .setIsIcebergDataset(false); ReflectionEntry entry = new ReflectionEntry() .setId(reflectionId) @@ -960,6 +1040,7 @@ public void testRefreshDoneHandlerLastRefreshDuration() throws DependencyGraph.D .setInfo(jobInfo) .addExtraInfo(extraInfo) .setStats(JobProtobuf.JobStats.newBuilder().setOutputRecords(4L).build()) + .setAttemptId("1be6174f-8e89-9244-643a-565b81142700") .build(); JobDetails job = JobDetails.newBuilder() @@ -970,7 +1051,7 @@ public void testRefreshDoneHandlerLastRefreshDuration() throws DependencyGraph.D Subject subject = new Subject(); RefreshDoneHandler handler = new RefreshDoneHandler(entry, m, job, subject.jobsService, - subject.namespaceService, subject.materializationStore, subject.dependencyManager, subject.expansionHelper, + subject.materializationStore, subject.dependencyManager, subject.expansionHelper, Path.of("."), subject.allocator, subject.catalogService, subject.dependencyResolutionContext); com.dremio.service.reflection.proto.JobDetails jobd = new com.dremio.service.reflection.proto.JobDetails(); @@ -1008,6 +1089,48 @@ public void testRefreshDoneHandlerLastRefreshDuration() throws DependencyGraph.D assertEquals(m.getLastRefreshFinished() - m.getInitRefreshSubmit(), m.getLastRefreshDurationMillis().longValue()); } + + /** + * Verifies that the same caching catalog is used across all table lookups when handling deleted datasets + */ + @Test + public void testHandleDeletedDatasetsUsesCachingCatalog(){ + final String datasetId = "d1"; + ReflectionGoal reflectionGoal = new ReflectionGoal() + .setId(new ReflectionId("r_id")) + .setDatasetId(datasetId) + .setType(ReflectionType.EXTERNAL) + .setState(ReflectionGoalState.DISABLED); + + ReflectionGoal reflectionGoal2 = new ReflectionGoal() + .setId(new ReflectionId("r_id2")) + .setDatasetId(datasetId) + .setType(ReflectionType.EXTERNAL) + .setState(ReflectionGoalState.DISABLED); + + final Catalog catalog = mock(Catalog.class); + CachingCatalog cachingCatalog = new CachingCatalog(catalog); + final DremioTable dremioTable = mock(DremioTable.class); + when(dremioTable.getPath()).thenReturn(new NamespaceKey("Nessie.Table")); + when(catalog.getTable(datasetId)).thenReturn(dremioTable); + + Subject subject = new Subject(); + when(subject.contextFactory.create()).thenReturn(subject.dependencyResolutionContext); + when(subject.externalReflectionStore.getExternalReflections()).thenReturn(emptyList()); + when(subject.optionManager.getOption(ReflectionOptions.NO_DEPENDENCY_REFRESH_PERIOD_SECONDS)).thenReturn(5555L); + when(subject.reflectionStore.find()).thenReturn(emptyList()); + + when(subject.userStore.getAllNotDeleted()).thenReturn(ImmutableList.of(reflectionGoal, reflectionGoal2)); + when(subject.userStore.getDeletedBefore(anyLong())).thenReturn(emptyList()); + when(subject.userStore.getModifiedOrCreatedSince(anyLong())).thenReturn(singletonList(reflectionGoal)); + when(subject.catalogService.getCatalog(any(MetadataRequestOptions.class))).thenReturn(cachingCatalog); + + subject.reflectionManager.sync(); + + // ASSERT + // Both reflections built on d1 but only 1 catalog getTable call was made. + verify(catalog, times(1)).getTable(datasetId); + } } class Subject { @@ -1033,26 +1156,23 @@ class Subject { @VisibleForTesting DependencyResolutionContextFactory contextFactory = Mockito.mock(DependencyResolutionContextFactory.class); @VisibleForTesting DependencyResolutionContext dependencyResolutionContext = Mockito.mock(DependencyResolutionContext.class); @VisibleForTesting ReflectionManager reflectionManager = new ReflectionManager( - sabotContext, - jobsService, - namespaceService, - optionManager, - userStore, - reflectionStore, - externalReflectionStore, - materializationStore, - dependencyManager, - descriptorCache, - reflectionsToUpdate, - wakeUpCallback, - expansionHelper, - planCacheInvalidationHelper, - allocator, - accelerationPlugin, - Path.of("."), //TODO maybe we want to use JIMFS here, - reflectionGoalChecker, - refreshStartHandler, - catalogService, - contextFactory - ); + sabotContext, + jobsService, + catalogService, + optionManager, + userStore, + reflectionStore, + externalReflectionStore, + materializationStore, + dependencyManager, + descriptorCache, + reflectionsToUpdate, + wakeUpCallback, + expansionHelper, + planCacheInvalidationHelper, + allocator, + reflectionGoalChecker, + refreshStartHandler, + contextFactory + ); } diff --git a/services/accelerator/src/test/java/com/dremio/service/reflection/TestReflectionService.java b/services/accelerator/src/test/java/com/dremio/service/reflection/TestReflectionService.java new file mode 100644 index 0000000000..909b5d6975 --- /dev/null +++ b/services/accelerator/src/test/java/com/dremio/service/reflection/TestReflectionService.java @@ -0,0 +1,269 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.service.reflection; + +import static com.dremio.sabot.rpc.user.UserSession.MAX_METADATA_COUNT; +import static com.dremio.service.reflection.ReflectionOptions.AUTO_REBUILD_PLAN; +import static com.dremio.service.reflection.ReflectionOptions.MATERIALIZATION_CACHE_ENABLED; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.util.concurrent.ExecutorService; + +import javax.inject.Provider; + +import org.apache.arrow.memory.BufferAllocator; +import org.apache.calcite.adapter.java.JavaTypeFactory; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.MockitoJUnitRunner; + +import com.dremio.common.config.SabotConfig; +import com.dremio.config.DremioConfig; +import com.dremio.datastore.api.LegacyIndexedStore; +import com.dremio.datastore.api.LegacyKVStore; +import com.dremio.datastore.api.LegacyKVStoreProvider; +import com.dremio.exec.catalog.Catalog; +import com.dremio.exec.ops.QueryContext; +import com.dremio.exec.planner.acceleration.MaterializationDescriptor; +import com.dremio.exec.planner.serialization.DeserializationException; +import com.dremio.exec.planner.sql.SqlConverter; +import com.dremio.exec.server.SabotContext; +import com.dremio.exec.store.CatalogService; +import com.dremio.exec.work.protector.ForemenWorkManager; +import com.dremio.options.OptionManager; +import com.dremio.service.DirectProvider; +import com.dremio.service.jobs.JobsService; +import com.dremio.service.reflection.proto.ExternalReflection; +import com.dremio.service.reflection.proto.Materialization; +import com.dremio.service.reflection.proto.MaterializationId; +import com.dremio.service.reflection.proto.ReflectionDetails; +import com.dremio.service.reflection.proto.ReflectionEntry; +import com.dremio.service.reflection.proto.ReflectionGoal; +import com.dremio.service.reflection.proto.ReflectionId; +import com.dremio.service.reflection.proto.ReflectionType; +import com.dremio.service.reflection.store.MaterializationStore; +import com.dremio.service.reflection.store.ReflectionEntriesStore; +import com.dremio.service.reflection.store.ReflectionGoalsStore; +import com.dremio.service.scheduler.SchedulerService; +import com.google.common.base.Supplier; +import com.google.common.collect.ImmutableList; + +@RunWith(MockitoJUnitRunner.Silent.class) +public class TestReflectionService { + @Mock + private SabotContext sabotContext; + + @Mock + private JobsService jobsService; + + @Mock + private CatalogService catalogService; + + @Mock + private BufferAllocator allocator; + + @Mock + private SabotConfig config; + + @Mock + private OptionManager optionManager; + + @Mock + private DremioConfig dremioConfig; + + @Mock + private LegacyKVStoreProvider kvStoreProvider; + + @Mock + private LegacyIndexedStore reflectionGoalStore; + + @Mock + private LegacyKVStore reflectionEntryStore; + + @Mock + private LegacyIndexedStore materializationStore; + + private ReflectionServiceImpl service; + + private MaterializationDescriptor descriptor; + + private boolean isCoordinatorStarting; + + private Materialization materialization; + private ReflectionEntry entry; + private ReflectionGoal goal; + private ReflectionServiceImpl.ExpansionHelper expansionHelper; + + + @Before + public void setup() { + when(sabotContext.getDremioConfig()).thenReturn(dremioConfig); + when(sabotContext.getOptionManager()).thenReturn(optionManager); + when(optionManager.getOption(MATERIALIZATION_CACHE_ENABLED)).thenReturn(true); + when(optionManager.getOption(AUTO_REBUILD_PLAN)).thenReturn(true); + when(optionManager.getOption(MAX_METADATA_COUNT.getOptionName())).thenReturn(MAX_METADATA_COUNT.getDefault()); + + service = new TestableReflectionServiceImpl(config, + DirectProvider.wrap(kvStoreProvider), + DirectProvider.wrap(Mockito.mock(SchedulerService.class)), + DirectProvider.wrap(jobsService), + DirectProvider.wrap(catalogService), + DirectProvider.wrap(sabotContext), + DirectProvider.wrap(Mockito.mock(ReflectionStatusService.class)), + Mockito.mock(ExecutorService.class), + DirectProvider.wrap(Mockito.mock(ForemenWorkManager.class)), + false, + allocator); + + // Test KV store objects + ReflectionId rId = new ReflectionId("r1"); + MaterializationId mId = new MaterializationId("m1"); + materialization = new Materialization() + .setId(mId) + .setReflectionId(rId) + .setReflectionGoalVersion("v1"); + entry = new ReflectionEntry(); + entry.setId(rId); + ReflectionDetails details = new ReflectionDetails(); + details.setDisplayFieldList(ImmutableList.of()); + details.setDistributionFieldList(ImmutableList.of()); + details.setPartitionFieldList(ImmutableList.of()); + details.setSortFieldList(ImmutableList.of()); + goal = new ReflectionGoal() + .setId(rId) + .setTag(materialization.getReflectionGoalVersion()) + .setType(ReflectionType.RAW) + .setDetails(details); + + when(kvStoreProvider.getStore(ReflectionGoalsStore.StoreCreator.class)).thenReturn(reflectionGoalStore); + when(reflectionGoalStore.get(rId)).thenReturn(goal); + when(kvStoreProvider.getStore(ReflectionEntriesStore.StoreCreator.class)).thenReturn(reflectionEntryStore); + when(reflectionEntryStore.get(rId)).thenReturn(entry); + when(kvStoreProvider.getStore(MaterializationStore.MaterializationStoreCreator.class)).thenReturn(materializationStore); + when(materializationStore.get(mId)).thenReturn(materialization); + + // Setup Materialization expansion helper + expansionHelper = Mockito.mock(ReflectionServiceImpl.ExpansionHelper.class); + SqlConverter converter = Mockito.mock(SqlConverter.class); + when(converter.getCatalog()).thenReturn(Mockito.mock(Catalog.class)); + when(converter.getTypeFactory()).thenReturn(Mockito.mock(JavaTypeFactory.class)); + when(converter.getSerializerFactory()).thenThrow(new DeserializationException(new RuntimeException("Boom!"))); + when(expansionHelper.getConverter()).thenReturn(converter); + + // Test descriptor + descriptor = new MaterializationDescriptor(ReflectionUtils.toReflectionInfo(goal), mId.getId(), "tag", + 0, null, ImmutableList.of(), 0.0, 0, ImmutableList.of(), null, + null, 0L, 0, catalogService); + + } + + /** + * Verifies that reflection service won't trigger an inline metadata refresh on startup + */ + @Test + public void testNoInlineMetadataRefreshOnStart() { + isCoordinatorStarting = true; + service.start(); + verify(expansionHelper, times(2)).close(); + isCoordinatorStarting = false; + service.refreshCache(); + verify(expansionHelper, times(4)).close(); + } + + /** + * In order to make ReflectionServiceImpl testable, we need to override methods instead of using a Mockito spy. + * The issue with Mockito spy (which uses the decorator design pattern) is that ReflectionServiceImpl inner classes + * can't see the spy. + */ + private class TestableReflectionServiceImpl extends ReflectionServiceImpl { + + public TestableReflectionServiceImpl( + SabotConfig config, + Provider storeProvider, + Provider schedulerService, + Provider jobsService, + Provider catalogService, + Provider sabotContext, + Provider reflectionStatusService, + ExecutorService executorService, + Provider foremenWorkManagerProvider, + boolean isMaster, + BufferAllocator allocator) { + super( + config, + storeProvider, + schedulerService, + jobsService, + catalogService, + sabotContext, + reflectionStatusService, + executorService, + foremenWorkManagerProvider, + isMaster, + allocator, + null); + } + + @Override + Supplier getQueryContext() { + return new Supplier() { + @Override + public QueryContext get() { + /** + * Validates that we set the {@link com.dremio.exec.catalog.MetadataRequestOptions#neverPromote} + */ + Assert.assertEquals(isCoordinatorStarting, isReflectionServiceStarting); + QueryContext context = Mockito.mock(QueryContext.class); + return context; + } + }; + } + + @Override + Supplier getExpansionHelper() { + return new Supplier() { + @Override + public ExpansionHelper get() { + when(expansionHelper.getContext()).thenReturn(getQueryContext().get()); + return expansionHelper; + } + }; + } + + @Override + Iterable getValidMaterializations() { + return ImmutableList.of(materialization); + } + + @Override + public Iterable getAllExternalReflections() { + return ImmutableList.of(); + } + + @Override + MaterializationDescriptor getDescriptor(Materialization materialization) throws MaterializationCache.CacheException { + return TestReflectionService.this.descriptor; + } + + + } +} diff --git a/services/accelerator/src/test/java/com/dremio/service/reflection/TestReflectionStatusService.java b/services/accelerator/src/test/java/com/dremio/service/reflection/TestReflectionStatusService.java index 57078df911..48184a7165 100644 --- a/services/accelerator/src/test/java/com/dremio/service/reflection/TestReflectionStatusService.java +++ b/services/accelerator/src/test/java/com/dremio/service/reflection/TestReflectionStatusService.java @@ -104,7 +104,6 @@ public TestReflectionStatusService( statusService = new ReflectionStatusServiceImpl( sabotContext::getExecutors, - DirectProvider.wrap(namespaceService), DirectProvider.wrap(new ConstantCacheViewer(isMaterializationCached)), goalsStore, entriesStore, @@ -251,7 +250,6 @@ public void testGetExternalReflectionStatus() throws Exception { ReflectionStatusServiceImpl reflectionStatusService = new ReflectionStatusServiceImpl( sabotContext::getExecutors, - DirectProvider.wrap(namespaceService), DirectProvider.wrap(new ConstantCacheViewer(false)), goalsStore, entriesStore, @@ -261,20 +259,25 @@ public void testGetExternalReflectionStatus() throws Exception { DirectProvider.wrap(catalogService) ); + final Catalog catalog = mock(Catalog.class); + when(catalogService.getCatalog(any(MetadataRequestOptions.class))).thenReturn(catalog); // mock query dataset DatasetConfig queryDatasetConfig = new DatasetConfig(); queryDatasetConfig.setType(DatasetType.PHYSICAL_DATASET); - Integer queryHash = ReflectionUtils.computeDatasetHash(queryDatasetConfig, namespaceService, false); + Integer queryHash = DatasetHashUtils.computeDatasetHash(queryDatasetConfig, catalogService, false); String queryDatasetId = UUID.randomUUID().toString(); - when(namespaceService.findDatasetByUUID(queryDatasetId)).thenReturn(queryDatasetConfig); + final DremioTable queryTable = mock(DremioTable.class); + when(queryTable.getDatasetConfig()).thenReturn(queryDatasetConfig); + when(catalog.getTable(queryDatasetId)).thenReturn(queryTable); // mock target dataset DatasetConfig targetDatasetConfig = new DatasetConfig(); targetDatasetConfig.setType(DatasetType.PHYSICAL_DATASET); - Integer targetHash = ReflectionUtils.computeDatasetHash(targetDatasetConfig, namespaceService, false); + Integer targetHash = DatasetHashUtils.computeDatasetHash(targetDatasetConfig, catalogService, false); String targetDatasetId = UUID.randomUUID().toString(); - when(namespaceService.findDatasetByUUID(targetDatasetId)).thenReturn(targetDatasetConfig); - + final DremioTable targetTable = mock(DremioTable.class); + when(targetTable.getDatasetConfig()).thenReturn(targetDatasetConfig); + when(catalog.getTable(targetDatasetId)).thenReturn(targetTable); // mock external reflection ReflectionId reflectionId = new ReflectionId(UUID.randomUUID().toString()); @@ -287,7 +290,6 @@ public void testGetExternalReflectionStatus() throws Exception { externalReflection.setTargetDatasetHash(targetHash + 1); when(externalReflectionStore.get(reflectionId.getId())).thenReturn(externalReflection); - // since the hashes don't match, should return OUT_OF_SYNC ExternalReflectionStatus externalReflectionStatus = reflectionStatusService.getExternalReflectionStatus(reflectionId); assertEquals(externalReflectionStatus.getConfigStatus(), ExternalReflectionStatus.STATUS.OUT_OF_SYNC); diff --git a/services/accelerator/src/test/java/com/dremio/service/reflection/TestReflectionStatusServiceListing.java b/services/accelerator/src/test/java/com/dremio/service/reflection/TestReflectionStatusServiceListing.java index 47f6650087..b5bcdb5e45 100644 --- a/services/accelerator/src/test/java/com/dremio/service/reflection/TestReflectionStatusServiceListing.java +++ b/services/accelerator/src/test/java/com/dremio/service/reflection/TestReflectionStatusServiceListing.java @@ -114,7 +114,6 @@ public void setup() { statusService = new ReflectionStatusServiceImpl( sabotContext::getExecutors, - DirectProvider.wrap(namespaceService), DirectProvider.wrap(new TestReflectionStatusService.ConstantCacheViewer(true)), goalsStore, entriesStore, diff --git a/services/accelerator/src/test/java/com/dremio/service/reflection/TestReflectionUtils.java b/services/accelerator/src/test/java/com/dremio/service/reflection/TestReflectionUtils.java index dddf3d4a63..3f639879e5 100644 --- a/services/accelerator/src/test/java/com/dremio/service/reflection/TestReflectionUtils.java +++ b/services/accelerator/src/test/java/com/dremio/service/reflection/TestReflectionUtils.java @@ -15,12 +15,24 @@ */ package com.dremio.service.reflection; +import static com.dremio.service.reflection.ReflectionUtils.computeMetrics; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + import java.util.Arrays; import java.util.Collections; import org.junit.Assert; import org.junit.Test; - +import org.mockito.MockedStatic; +import org.mockito.Mockito; + +import com.dremio.service.job.proto.JobAttempt; +import com.dremio.service.job.proto.JobInfo; +import com.dremio.service.jobs.JobDataClientUtils; +import com.dremio.service.jobs.JobDataFragment; +import com.dremio.service.jobs.JobsProtoUtil; +import com.dremio.service.reflection.proto.MaterializationMetrics; import com.dremio.service.reflection.proto.PartitionDistributionStrategy; import com.dremio.service.reflection.proto.ReflectionDetails; import com.dremio.service.reflection.proto.ReflectionField; @@ -105,4 +117,24 @@ public void testAreReflectionDetailsEqual() { detail2.setSortFieldList(Arrays.asList(new ReflectionField("test"))); Assert.assertFalse(ReflectionUtils.areReflectionDetailsEqual(detail1, detail2)); } + @Test + public void testComputeMetricsEmptyReturnedRecordsCount() { + final JobDataFragment jobDataFragment = mock(JobDataFragment.class); + when(jobDataFragment.getReturnedRowCount()).thenReturn(0); + final JobInfo info = new JobInfo(); + final JobAttempt jobAttempt = new JobAttempt(); + jobAttempt.setInfo(info); + try (final MockedStatic jobDataClientUtilsMockedStatic = Mockito.mockStatic(JobDataClientUtils.class)) { + jobDataClientUtilsMockedStatic.when(() -> JobDataClientUtils.getJobData(null,null,null,0,1000)) + .thenReturn(jobDataFragment); + try(final MockedStatic jobsProtoUtilMockedStatic = Mockito.mockStatic(JobsProtoUtil.class)) { + jobsProtoUtilMockedStatic.when(() -> JobsProtoUtil.getLastAttempt(null)) + .thenReturn(jobAttempt); + final MaterializationMetrics materializationMetrics = computeMetrics(null, null, null, null); + Assert.assertEquals((Integer) 0, materializationMetrics.getNumFiles()); //then + jobDataClientUtilsMockedStatic.verify( + () -> JobDataClientUtils.getJobData(null, null, null, 0, 1000)); + } + } + } } diff --git a/services/accelerator/src/test/java/com/dremio/service/reflection/refresh/TestRefreshDoneHandler.java b/services/accelerator/src/test/java/com/dremio/service/reflection/refresh/TestRefreshDoneHandler.java new file mode 100644 index 0000000000..eb488b402b --- /dev/null +++ b/services/accelerator/src/test/java/com/dremio/service/reflection/refresh/TestRefreshDoneHandler.java @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.service.reflection.refresh; + + +import org.junit.Assert; +import org.junit.Test; + +import com.dremio.service.reflection.proto.Materialization; + +public class TestRefreshDoneHandler { + @Test + public void testGetIsEmptyReflection() { + + final Materialization materialization = new Materialization(); + //test with new materialization + Assert.assertFalse(RefreshDoneHandler.getIsEmptyReflection(false, false,materialization)); + Assert.assertFalse(RefreshDoneHandler.getIsEmptyReflection(false, true,materialization)); + Assert.assertFalse(RefreshDoneHandler.getIsEmptyReflection(true, false,materialization)); + Assert.assertFalse(RefreshDoneHandler.getIsEmptyReflection(true, true,materialization)); + + //test with null isIcebergDataset + materialization.setIsIcebergDataset(null); + Assert.assertFalse(RefreshDoneHandler.getIsEmptyReflection(false, false,materialization)); + Assert.assertFalse(RefreshDoneHandler.getIsEmptyReflection(false, true,materialization)); + Assert.assertFalse(RefreshDoneHandler.getIsEmptyReflection(true, false,materialization)); + Assert.assertFalse(RefreshDoneHandler.getIsEmptyReflection(true, true,materialization)); + + //test with Iceberg serialization + materialization.setIsIcebergDataset(true); + Assert.assertFalse(RefreshDoneHandler.getIsEmptyReflection(false, false,materialization)); + Assert.assertFalse(RefreshDoneHandler.getIsEmptyReflection(false, true,materialization)); + //this is the only case that is true, it is false in all other cases + Assert.assertTrue(RefreshDoneHandler.getIsEmptyReflection(true, false,materialization)); + Assert.assertFalse(RefreshDoneHandler.getIsEmptyReflection(true, true,materialization)); + } +} diff --git a/services/accelerator/src/test/java/com/dremio/service/reflection/refresh/TestRefreshHandler.java b/services/accelerator/src/test/java/com/dremio/service/reflection/refresh/TestRefreshHandler.java new file mode 100644 index 0000000000..017f2e7050 --- /dev/null +++ b/services/accelerator/src/test/java/com/dremio/service/reflection/refresh/TestRefreshHandler.java @@ -0,0 +1,76 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.service.reflection.refresh; + + +import static com.dremio.service.reflection.ReflectionServiceImpl.ACCELERATOR_STORAGEPLUGIN_NAME; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import org.junit.Assert; +import org.junit.Test; + +import com.dremio.common.utils.protos.AttemptId; +import com.dremio.service.reflection.proto.Materialization; +import com.dremio.service.reflection.proto.MaterializationId; +import com.dremio.service.reflection.proto.ReflectionId; +import com.dremio.service.reflection.proto.RefreshDecision; +import com.google.common.collect.ImmutableList; + +public class TestRefreshHandler { + + @Test + public void testGetRefreshPath() { + final Materialization materialization = new Materialization(); + final String reflectionIdStr = "reflectionID"; + final ReflectionId reflectionId = new ReflectionId(reflectionIdStr); + materialization.setReflectionId(reflectionId); + final String materilizatonIdStr = "materilizatonID"; + final MaterializationId materializationId = new MaterializationId(materilizatonIdStr); + materialization.setId(materializationId); + + final RefreshDecision decision = new RefreshDecision(); + final int attemptIdInt = 5; + final AttemptId attemptId = mock(AttemptId.class); + when(attemptId.getAttemptNum()).thenReturn(attemptIdInt); + + //test with IcebergDataset, not initial refresh + materialization.setIsIcebergDataset(true); + decision.setInitialRefresh(false); + final String materilizatonBasePathstr = "materilizatonBasePathstr"; + materialization.setBasePath(materilizatonBasePathstr); + //notice that in this case there is no + attemptIdInt + Assert.assertEquals(ImmutableList.of(ACCELERATOR_STORAGEPLUGIN_NAME, + reflectionIdStr, + materilizatonBasePathstr), + RefreshHandler.getRefreshPath(reflectionId,materialization,decision,attemptId)); + + //test with IcebergDataset, initial refresh + decision.setInitialRefresh(true); + Assert.assertEquals(ImmutableList.of(ACCELERATOR_STORAGEPLUGIN_NAME, + reflectionIdStr, + materilizatonIdStr + "_" + attemptIdInt), + RefreshHandler.getRefreshPath(reflectionId,materialization,decision,attemptId)); + + //test with non-Iceberg dataset + materialization.setIsIcebergDataset(false); + decision.setInitialRefresh(false); + Assert.assertEquals(ImmutableList.of(ACCELERATOR_STORAGEPLUGIN_NAME, + reflectionIdStr, + materilizatonIdStr + "_" + attemptIdInt), + RefreshHandler.getRefreshPath(reflectionId,materialization,decision,attemptId)); + } +} diff --git a/services/arrow-flight-common/pom.xml b/services/arrow-flight-common/pom.xml index d72bdad5ca..11ae33af15 100644 --- a/services/arrow-flight-common/pom.xml +++ b/services/arrow-flight-common/pom.xml @@ -21,7 +21,7 @@ dremio-services-parent com.dremio.services - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 4.0.0 diff --git a/services/arrow-flight/pom.xml b/services/arrow-flight/pom.xml index 55c2f43aba..ec955264d2 100644 --- a/services/arrow-flight/pom.xml +++ b/services/arrow-flight/pom.xml @@ -22,7 +22,7 @@ com.dremio.services dremio-services-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 com.dremio.services diff --git a/services/arrow-flight/src/main/java/com/dremio/service/flight/DremioFlightProducer.java b/services/arrow-flight/src/main/java/com/dremio/service/flight/DremioFlightProducer.java index ab9a2f7d17..6885f88f9c 100644 --- a/services/arrow-flight/src/main/java/com/dremio/service/flight/DremioFlightProducer.java +++ b/services/arrow-flight/src/main/java/com/dremio/service/flight/DremioFlightProducer.java @@ -38,6 +38,7 @@ import static org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery; import java.util.Collections; +import java.util.concurrent.Callable; import javax.inject.Provider; @@ -75,7 +76,6 @@ import com.dremio.service.flight.impl.FlightPreparedStatement; import com.dremio.service.flight.impl.FlightWorkManager; import com.dremio.service.flight.impl.FlightWorkManager.RunQueryResponseHandlerFactory; -import com.dremio.service.grpc.HeaderKeys; import com.dremio.service.usersessions.UserSessionService; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; @@ -93,26 +93,31 @@ public class DremioFlightProducer implements FlightSqlProducer { private final Location location; private final DremioFlightSessionsManager sessionsManager; private final BufferAllocator allocator; + private final Provider requestContextDecorator; public DremioFlightProducer(Location location, DremioFlightSessionsManager sessionsManager, Provider workerProvider, Provider optionManagerProvider, - BufferAllocator allocator, + BufferAllocator allocator, Provider requestContextDecorator, RunQueryResponseHandlerFactory runQueryResponseHandlerFactory) { this.location = location; this.sessionsManager = sessionsManager; this.allocator = allocator; + this.requestContextDecorator = requestContextDecorator; flightWorkManager = new FlightWorkManager(workerProvider, optionManagerProvider, runQueryResponseHandlerFactory); } @Override public void getStream(CallContext callContext, Ticket ticket, ServerStreamListener serverStreamListener) { - if (isFlightSqlTicket(ticket)) { - FlightSqlProducer.super.getStream(callContext, ticket, serverStreamListener); - return; - } + runWithRequestContext(callContext, () -> { + if (isFlightSqlTicket(ticket)) { + FlightSqlProducer.super.getStream(callContext, ticket, serverStreamListener); + return null; + } - getStreamLegacy(callContext, ticket, serverStreamListener); + getStreamLegacy(callContext, ticket, serverStreamListener); + return null; + }); } private void getStreamLegacy(CallContext callContext, Ticket ticket, ServerStreamListener serverStreamListener) { @@ -156,11 +161,13 @@ public void getStreamPreparedStatement(CommandPreparedStatementQuery commandPrep @Override public SchemaResult getSchema(CallContext context, FlightDescriptor descriptor) { - if (isFlightSqlCommand(descriptor)) { - return FlightSqlProducer.super.getSchema(context, descriptor); - } + return runWithRequestContext(context, () -> { + if (isFlightSqlCommand(descriptor)) { + return FlightSqlProducer.super.getSchema(context, descriptor); + } - return getSchemaLegacy(context, descriptor); + return getSchemaLegacy(context, descriptor); + }); } private SchemaResult getSchemaLegacy(CallContext context, FlightDescriptor descriptor) { @@ -171,22 +178,27 @@ private SchemaResult getSchemaLegacy(CallContext context, FlightDescriptor descr @Override public void listFlights(CallContext callContext, Criteria criteria, StreamListener streamListener) { - throw CallStatus.UNIMPLEMENTED.withDescription("listFlights is not implemented.").toRuntimeException(); + runWithRequestContext(callContext, () -> { + throw CallStatus.UNIMPLEMENTED.withDescription("listFlights is not implemented.").toRuntimeException(); + }); } @Override public FlightInfo getFlightInfo(CallContext callContext, FlightDescriptor flightDescriptor) { - if (isFlightSqlCommand(flightDescriptor)) { - return FlightSqlProducer.super.getFlightInfo(callContext, flightDescriptor); - } + return runWithRequestContext(callContext, () -> { + if (isFlightSqlCommand(flightDescriptor)) { + return FlightSqlProducer.super.getFlightInfo(callContext, flightDescriptor); + } - return getFlightInfoLegacy(callContext, flightDescriptor); + return getFlightInfoLegacy(callContext, flightDescriptor); + }); } private FlightInfo getFlightInfoLegacy(CallContext callContext, FlightDescriptor flightDescriptor) { final UserSession session = getUserSessionData(callContext).getSession(); - final FlightPreparedStatement flightPreparedStatement = flightWorkManager + final FlightPreparedStatement flightPreparedStatement = + flightWorkManager .createPreparedStatement(flightDescriptor, callContext::isCancelled, session); return flightPreparedStatement.getFlightInfoLegacy(location, flightDescriptor); @@ -213,26 +225,41 @@ public FlightInfo getFlightInfoPreparedStatement( @Override public Runnable acceptPut(CallContext callContext, FlightStream flightStream, StreamListener streamListener) { - if (isFlightSqlCommand(flightStream.getDescriptor())) { - return FlightSqlProducer.super.acceptPut(callContext, flightStream, streamListener); - } + return runWithRequestContext(callContext, () -> { + if (isFlightSqlCommand(flightStream.getDescriptor())) { + return FlightSqlProducer.super.acceptPut(callContext, flightStream, streamListener); + } - throw CallStatus.UNIMPLEMENTED.withDescription("acceptPut is not implemented.").toRuntimeException(); + throw CallStatus.UNIMPLEMENTED.withDescription("acceptPut is not implemented.").toRuntimeException(); + }); } @Override public void doAction(CallContext callContext, Action action, StreamListener streamListener) { - if (isFlightSqlAction(action)) { - FlightSqlProducer.super.doAction(callContext, action, streamListener); - return; - } + runWithRequestContext(callContext, () -> { + if (isFlightSqlAction(action)) { + FlightSqlProducer.super.doAction(callContext, action, streamListener); + return null; + } - throw CallStatus.UNIMPLEMENTED.withDescription("doAction is not implemented.").toRuntimeException(); + throw CallStatus.UNIMPLEMENTED.withDescription("doAction is not implemented.").toRuntimeException(); + }); } @Override public void listActions(CallContext callContext, StreamListener streamListener) { - throw CallStatus.UNIMPLEMENTED.withDescription("listActions is not implemented.").toRuntimeException(); + runWithRequestContext(callContext, () -> { + FlightSqlProducer.super.listActions(callContext, streamListener); + return null; + }); + } + + @Override + public void doExchange(CallContext callContext, FlightStream reader, ServerStreamListener writer) { + runWithRequestContext(callContext, () -> { + FlightSqlProducer.super.doExchange(callContext, reader, writer); + return null; + }); } @Override @@ -567,23 +594,36 @@ public void close() throws Exception { } + /// Helper method to execute Flight requests with the correct RequestContext based on the supplied CallContext. + /// This should be called for FlightProducer interface methods (not FlightSqlProducer interface methods which + /// are just routed through FlightProducer methods). These methods are getFlightInfo(), getSchema(), getStream(), + /// acceptPut(), listActions(), doAction(). + private V runWithRequestContext(CallContext context, Callable callable) { + try { + return requestContextDecorator.get().apply(RequestContext.current(), context).call(callable); + } catch (Exception ex) { + // Flight request handlers cannot throw any checked exceptions. So propagate RuntimeExceptions and convert + // checked exceptions to FlightRuntimeExceptions. Most exceptions thrown from above should really be + // FlightRuntimeExceptions already though. + if (ex instanceof RuntimeException) { + throw (RuntimeException) ex; + } else { + throw CallStatus.UNKNOWN.withCause(ex).toRuntimeException(); + } + } + } + private void runPreparedStatement(CallContext callContext, ServerStreamListener serverStreamListener, UserProtos.PreparedStatementHandle preparedStatementHandle) { final UserSessionService.UserSessionData sessionData = getUserSessionData(callContext); final ChangeTrackingUserSession userSession = new ChangeTrackingUserSession(sessionData.getSession()); - final CallHeaders incomingHeaders = retrieveHeadersFromCallContext(callContext); - String projectId = incomingHeaders.get(HeaderKeys.PROJECT_ID_HEADER_KEY.name()); - String orgId = incomingHeaders.get(HeaderKeys.ORG_ID_HEADER_KEY.name()); - - getRequestContext(projectId, orgId).run(() -> - flightWorkManager.runPreparedStatement(preparedStatementHandle, serverStreamListener, allocator, userSession, () -> { - if (userSession.isUpdated()) { - sessionsManager.updateSession(sessionData); - } + flightWorkManager.runPreparedStatement(preparedStatementHandle, serverStreamListener, allocator, userSession, () -> { + if (userSession.isUpdated()) { + sessionsManager.updateSession(sessionData); } - ) + } ); } diff --git a/services/arrow-flight/src/main/java/com/dremio/service/flight/DremioFlightService.java b/services/arrow-flight/src/main/java/com/dremio/service/flight/DremioFlightService.java index 1961694789..5e6b40f3e8 100644 --- a/services/arrow-flight/src/main/java/com/dremio/service/flight/DremioFlightService.java +++ b/services/arrow-flight/src/main/java/com/dremio/service/flight/DremioFlightService.java @@ -84,6 +84,7 @@ public class DremioFlightService implements Service { private final Provider optionManagerProvider; private final Provider userSessionServiceProvider; private final Provider authProvider; + private final Provider requestContextDecoratorProvider; private final Provider credentialsServiceProvider; private final RunQueryResponseHandlerFactory runQueryResponseHandlerFactory; @@ -100,10 +101,12 @@ public DremioFlightService(Provider configProvider, Provider optionManagerProvider, Provider userSessionServiceProvider, Provider authProvider, + Provider requestContextDecoratorProvider, Provider credentialsServiceProvider) { this(configProvider, bufferAllocator, userWorkerProvider, sabotContextProvider, tokenManagerProvider, optionManagerProvider, userSessionServiceProvider, - authProvider, credentialsServiceProvider, RunQueryResponseHandlerFactory.DEFAULT); + authProvider, requestContextDecoratorProvider, credentialsServiceProvider, + RunQueryResponseHandlerFactory.DEFAULT); } @VisibleForTesting @@ -115,6 +118,7 @@ public DremioFlightService(Provider configProvider, Provider optionManagerProvider, Provider userSessionServiceProvider, Provider authProvider, + Provider requestContextDecoratorProvider, Provider credentialsServiceProvider, RunQueryResponseHandlerFactory runQueryResponseHandlerFactory ) { @@ -127,6 +131,7 @@ public DremioFlightService(Provider configProvider, this.runQueryResponseHandlerFactory = runQueryResponseHandlerFactory; this.userSessionServiceProvider = userSessionServiceProvider; this.authProvider = authProvider; + this.requestContextDecoratorProvider = requestContextDecoratorProvider; this.credentialsServiceProvider = credentialsServiceProvider; } @@ -161,7 +166,7 @@ public void start() throws Exception { .maxConnectionAgeGrace(Integer.getInteger("dremio.services.arrow-flight.max-connection-age-grace", 0)) .maxConnectionAge(Integer.getInteger("dremio.services.arrow-flight.max-connection-age", 0)) .producer(new DremioFlightProducer(location, dremioFlightSessionsManager, userWorkerProvider, - optionManagerProvider, allocator, runQueryResponseHandlerFactory)); + optionManagerProvider, allocator, requestContextDecoratorProvider, runQueryResponseHandlerFactory)); builder.middleware(FLIGHT_CLIENT_PROPERTIES_MIDDLEWARE_KEY, new ServerCookieMiddleware.Factory()); diff --git a/services/arrow-flight/src/main/java/com/dremio/service/flight/FlightRequestContextDecorator.java b/services/arrow-flight/src/main/java/com/dremio/service/flight/FlightRequestContextDecorator.java new file mode 100644 index 0000000000..3707900648 --- /dev/null +++ b/services/arrow-flight/src/main/java/com/dremio/service/flight/FlightRequestContextDecorator.java @@ -0,0 +1,36 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dremio.service.flight; + +import java.util.function.BiFunction; + +import org.apache.arrow.flight.FlightProducer; + +import com.dremio.context.RequestContext; + +/** + * Interface to decorate a RequestContext based on input from a Flight CallContext. + */ +@FunctionalInterface +public interface FlightRequestContextDecorator + extends BiFunction { + + FlightRequestContextDecorator DEFAULT = (requestContext, flightContext) -> requestContext; + + @Override + RequestContext apply(RequestContext requestContext, FlightProducer.CallContext flightContext); +} diff --git a/services/arrow-flight/src/main/java/com/dremio/service/flight/impl/RunQueryResponseHandler.java b/services/arrow-flight/src/main/java/com/dremio/service/flight/impl/RunQueryResponseHandler.java index f06a1ac879..ae745b0b5d 100644 --- a/services/arrow-flight/src/main/java/com/dremio/service/flight/impl/RunQueryResponseHandler.java +++ b/services/arrow-flight/src/main/java/com/dremio/service/flight/impl/RunQueryResponseHandler.java @@ -349,6 +349,7 @@ public static class BackpressureHandlingResponseHandler extends RunQueryResponse this.optionManager = workerProvider.get().getSystemOptions(); } + @Override @VisibleForTesting WaitResult clientIsReadyForData() { return runQueryBackpressureStrategy.waitForListener( diff --git a/services/arrow-flight/src/main/java/org/apache/arrow/flight/DremioFlightServer.java b/services/arrow-flight/src/main/java/org/apache/arrow/flight/DremioFlightServer.java index 9a0899b38c..1d48c0ccac 100644 --- a/services/arrow-flight/src/main/java/org/apache/arrow/flight/DremioFlightServer.java +++ b/services/arrow-flight/src/main/java/org/apache/arrow/flight/DremioFlightServer.java @@ -123,6 +123,7 @@ public boolean awaitTermination(final long timeout, final TimeUnit unit) throws } /** Shutdown the server, waits for up to 6 seconds for successful shutdown before returning. */ + @Override public void close() throws InterruptedException { shutdown(); final boolean terminated = awaitTermination(3000, TimeUnit.MILLISECONDS); diff --git a/services/arrow-flight/src/test/java/com/dremio/service/flight/AbstractTestFlightSqlServerCatalogMethods.java b/services/arrow-flight/src/test/java/com/dremio/service/flight/AbstractTestFlightSqlServerCatalogMethods.java index 32b9a5b248..f797affd8b 100644 --- a/services/arrow-flight/src/test/java/com/dremio/service/flight/AbstractTestFlightSqlServerCatalogMethods.java +++ b/services/arrow-flight/src/test/java/com/dremio/service/flight/AbstractTestFlightSqlServerCatalogMethods.java @@ -448,4 +448,35 @@ public void testGetCatalogsCancelingAfterStreamIsRetrieved() throws Exception { stream.getRoot().clear(); } } + + private FlightStream getSqlInfoFlightStream() { + final FlightInfo flightInfo = flightSqlClient.getSqlInfo(new FlightSql.SqlInfo[] {}, getCallOptions()); + return flightSqlClient.getStream(flightInfo.getEndpoints().get(0).getTicket(), + getCallOptions()); + } + + @Test + public void testGetSqlInfoClosingAfterStreamIsRetrieved() throws Exception { + final FlightStream stream = getSqlInfoFlightStream(); + drainStream(stream); + + stream.close(); + } + + @Test + public void testGetSqlInfoCancelingBeforeStreamIsRetrieved() throws Exception { + try (final FlightStream stream = getSqlInfoFlightStream()) { + stream.cancel("Metadata retrieved canceled", new Exception("Testing query data retrieval cancellation.")); + } + } + + @Test + public void testGetSqlInfoCancelingAfterStreamIsRetrieved() throws Exception { + try (final FlightStream stream = getSqlInfoFlightStream()) { + drainStream(stream); + + stream.cancel("Metadata retrieved canceled", new Exception("Testing query data retrieval cancellation.")); + stream.getRoot().clear(); + } + } } diff --git a/services/arrow-flight/src/test/java/com/dremio/service/flight/BaseFlightQueryTest.java b/services/arrow-flight/src/test/java/com/dremio/service/flight/BaseFlightQueryTest.java index 1fe33f7515..5c35a19439 100644 --- a/services/arrow-flight/src/test/java/com/dremio/service/flight/BaseFlightQueryTest.java +++ b/services/arrow-flight/src/test/java/com/dremio/service/flight/BaseFlightQueryTest.java @@ -178,6 +178,7 @@ public static void createFlightService(RunQueryResponseHandlerFactory runQueryRe getBindingProvider().provider(OptionManager.class), userSessionServiceProvider, () -> new DremioFlightAuthProviderImpl(Providers.of(dremioConfig), getBindingProvider().provider(UserService.class), getBindingProvider().provider(TokenManager.class)), + Providers.of(FlightRequestContextDecorator.DEFAULT), getBindingProvider().provider(CredentialsService.class), runQueryResponseHandlerFactory ); @@ -302,6 +303,7 @@ public Optional isValid(byte[] bytes) { } } )), + Providers.of(FlightRequestContextDecorator.DEFAULT), getBindingProvider().provider(CredentialsService.class), runQueryResponseHandlerFactory) { diff --git a/services/arrow-flight/src/test/java/com/dremio/service/flight/TestFlightWithRequestContext.java b/services/arrow-flight/src/test/java/com/dremio/service/flight/TestFlightWithRequestContext.java new file mode 100644 index 0000000000..636271fd37 --- /dev/null +++ b/services/arrow-flight/src/test/java/com/dremio/service/flight/TestFlightWithRequestContext.java @@ -0,0 +1,131 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.service.flight; + +import static org.junit.Assert.assertEquals; + +import java.util.concurrent.Callable; + +import org.apache.arrow.flight.FlightProducer; +import org.junit.Before; +import org.junit.Test; + +import com.dremio.context.RequestContext; +import com.google.inject.util.Providers; + +/** + * Test that Flight RPC handlers are utilizing the request context. + */ +public class TestFlightWithRequestContext { + + private static class DummyFlightRequestContextDecorator implements FlightRequestContextDecorator { + + private int callCount = 0; + + @Override + public RequestContext apply(RequestContext requestContext, FlightProducer.CallContext flightContext) { + ++callCount; + return requestContext; + } + } + + private DummyFlightRequestContextDecorator decorator; + + // Note: FlightProducer interface is used to intentional limit testing to Flight (not FlightSql) RPC calls. + private FlightProducer producer; + + @Before + public void setup() { + decorator = new DummyFlightRequestContextDecorator(); + producer = new DremioFlightProducer( + null, null, null, null, null, + Providers.of(decorator), null); + } + + @Test + public void testGetStream() { + ignoreExceptionsAndValidateCallCount(() -> { + producer.getStream(null, null, null); + return null; + }); + } + + @Test + public void testListFlights() { + ignoreExceptionsAndValidateCallCount(() -> { + producer.listFlights(null, null, null); + return null; + }); + } + + @Test + public void testGetFlightInfo() { + ignoreExceptionsAndValidateCallCount(() -> { + producer.getFlightInfo(null, null); + return null; + }); + } + + @Test + public void testGetSchema() { + ignoreExceptionsAndValidateCallCount(() -> { + producer.getSchema(null, null); + return null; + }); + } + + @Test + public void testAcceptPut() { + ignoreExceptionsAndValidateCallCount(() -> { + producer.acceptPut(null, null, null); + return null; + }); + } + + @Test + public void testDoExchange() { + ignoreExceptionsAndValidateCallCount(() -> { + producer.doExchange(null, null, null); + return null; + }); + } + + @Test + public void testDoAction() { + ignoreExceptionsAndValidateCallCount(() -> { + producer.doAction(null, null, null); + return null; + }); + } + + @Test + public void testListActions() { + ignoreExceptionsAndValidateCallCount(() -> { + producer.listActions(null, null); + return null; + }); + } + + private void ignoreExceptionsAndValidateCallCount(Callable rpcHandlerBody) { + try { + rpcHandlerBody.call(); + } catch (Exception ex) { + // Suppress exceptions thrown from the RPC handler, since the point of this test + // is to just verify the RequestContext was invoked correctly. + } + assertEquals(decorator.callCount, 1); + } +} diff --git a/services/arrow-flight/src/test/java/com/dremio/service/flight/impl/TestBackpressureHandlingResponseHandler.java b/services/arrow-flight/src/test/java/com/dremio/service/flight/impl/TestBackpressureHandlingResponseHandler.java index 5aed296736..09d0df7d63 100644 --- a/services/arrow-flight/src/test/java/com/dremio/service/flight/impl/TestBackpressureHandlingResponseHandler.java +++ b/services/arrow-flight/src/test/java/com/dremio/service/flight/impl/TestBackpressureHandlingResponseHandler.java @@ -42,12 +42,14 @@ */ public class TestBackpressureHandlingResponseHandler extends BaseTestRunQueryResponseHandler { + @Override protected BackpressureHandlingResponseHandler createHandler() { return new BackpressureHandlingResponseHandler( getExternalId(), getUserSession(), getWorkerProvider(), getListener(), getAllocator(), () -> {}); } + @Override @Before public void setUp() { super.setUp(); diff --git a/services/arrow-flight/src/test/java/com/dremio/service/flight/impl/TestBasicResponseHandler.java b/services/arrow-flight/src/test/java/com/dremio/service/flight/impl/TestBasicResponseHandler.java index 6120663574..52b347e33e 100644 --- a/services/arrow-flight/src/test/java/com/dremio/service/flight/impl/TestBasicResponseHandler.java +++ b/services/arrow-flight/src/test/java/com/dremio/service/flight/impl/TestBasicResponseHandler.java @@ -30,11 +30,13 @@ */ public class TestBasicResponseHandler extends BaseTestRunQueryResponseHandler { + @Override protected RunQueryResponseHandler createHandler() { return new BasicResponseHandler(getExternalId(), getUserSession(), getWorkerProvider(), getListener(), getAllocator(), () -> {}); } + @Override @Before public void setUp() { super.setUp(); diff --git a/services/arrow-flight/src/test/java/com/dremio/service/flight/protector/TestCancellableUserResponseHandler.java b/services/arrow-flight/src/test/java/com/dremio/service/flight/protector/TestCancellableUserResponseHandler.java index ba24735d9c..4d0ff7100e 100644 --- a/services/arrow-flight/src/test/java/com/dremio/service/flight/protector/TestCancellableUserResponseHandler.java +++ b/services/arrow-flight/src/test/java/com/dremio/service/flight/protector/TestCancellableUserResponseHandler.java @@ -72,7 +72,7 @@ public void testSuccessfulResultPropagation() { // Act UserResult userResult = new UserResult(expected, queryId, UserBitShared.QueryResult.QueryState.COMPLETED, resultProfile, null, null, - false); + false, false, false); cancellableUserResponseHandler.completed(userResult); // Assert @@ -98,7 +98,7 @@ public void testExceptionalPropagation() { UserException userException = UserException.parseError(expected).buildSilently(); UserResult userResult = new UserResult(null, queryId, UserBitShared.QueryResult.QueryState.FAILED, resultProfile, userException, null, - false); + false, false, false); cancellableUserResponseHandler.completed( userResult); diff --git a/services/autocomplete/pom.xml b/services/autocomplete/pom.xml index afbfaffa99..62b2680d11 100644 --- a/services/autocomplete/pom.xml +++ b/services/autocomplete/pom.xml @@ -22,18 +22,13 @@ dremio-services-parent com.dremio.services - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 4.0.0 dremio-services-autocomplete Services - Autocomplete - - 8 - 8 - - com.dremio diff --git a/services/autocomplete/src/main/java/com/dremio/service/autocomplete/AutocompleteV2Request.java b/services/autocomplete/src/main/java/com/dremio/service/autocomplete/AutocompleteV2Request.java new file mode 100644 index 0000000000..31db520687 --- /dev/null +++ b/services/autocomplete/src/main/java/com/dremio/service/autocomplete/AutocompleteV2Request.java @@ -0,0 +1,93 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.service.autocomplete; + +import java.util.List; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * Autocomplete v2 API Request + */ +@JsonIgnoreProperties(ignoreUnknown = true) +public final class AutocompleteV2Request { + private final String prefix; + private final AutocompleteV2RequestType type; + private final List> catalogEntityKeys; + private final List queryContext; + private final String refType; + private final String refValue; + + @JsonCreator + public AutocompleteV2Request( + @JsonProperty("prefix") String prefix, + @JsonProperty("type") AutocompleteV2RequestType type, + @JsonProperty("catalogEntityKeys") List> catalogEntityKeys, + @JsonProperty("queryContext") List queryContext, + @JsonProperty("refType") String refType, + @JsonProperty("refValue") String refValue) { + this.prefix = prefix; + this.type = type; + this.catalogEntityKeys = catalogEntityKeys; + this.queryContext = queryContext; + this.refType = refType; + this.refValue = refValue; + } + + /** + * Get prefix + */ + public String getPrefix() { + return this.prefix; + } + + /** + * Get type + */ + public AutocompleteV2RequestType getType() { + return this.type; + } + + /** + * Get namespace keys + */ + public List> getCatalogEntityKeys() { + return this.catalogEntityKeys; + } + + /** + * Get query context + */ + public List getQueryContext() { + return this.queryContext; + } + + /** + * Get ref type + */ + public String getRefType() { + return this.refType; + } + + /** + * Get ref value + */ + public String getRefValue() { + return this.refValue; + } +} diff --git a/services/autocomplete/src/main/java/com/dremio/service/autocomplete/AutocompleteV2RequestType.java b/services/autocomplete/src/main/java/com/dremio/service/autocomplete/AutocompleteV2RequestType.java new file mode 100644 index 0000000000..1f452ccabe --- /dev/null +++ b/services/autocomplete/src/main/java/com/dremio/service/autocomplete/AutocompleteV2RequestType.java @@ -0,0 +1,48 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.service.autocomplete; + +import java.util.Arrays; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonValue; + +public enum AutocompleteV2RequestType { + COLUMN("column"), + CONTAINER("container"), + REFERENCE("reference"); + + private String type; + + private AutocompleteV2RequestType(String type) { + this.type = type; + } + + @JsonCreator + public static AutocompleteV2RequestType fromString(String type) { + for (AutocompleteV2RequestType requestType : values()) { + if (requestType.type.equalsIgnoreCase(type)) { + return requestType; + } + } + throw new IllegalArgumentException(String.format("Unknown enum type %s. Allowed values are %s.", type, Arrays.toString(values()))); + } + + @JsonValue + public String getType() { + return type; + } +} diff --git a/services/autocomplete/src/main/java/com/dremio/service/autocomplete/AutocompleteV2Response.java b/services/autocomplete/src/main/java/com/dremio/service/autocomplete/AutocompleteV2Response.java new file mode 100644 index 0000000000..2a2678d958 --- /dev/null +++ b/services/autocomplete/src/main/java/com/dremio/service/autocomplete/AutocompleteV2Response.java @@ -0,0 +1,33 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.service.autocomplete; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeInfo; + +@JsonIgnoreProperties(ignoreUnknown = true) +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.EXTERNAL_PROPERTY, property = "entityType", visible = true) +@JsonSubTypes({ + @JsonSubTypes.Type(value = ColumnSuggestions.class, name = "columnSuggestions"), + @JsonSubTypes.Type(value = ContainerSuggestions.class, name = "containerSuggestions"), + @JsonSubTypes.Type(value = ReferenceSuggestions.class, name = "referenceSuggestions") +}) +public interface AutocompleteV2Response { + String getSuggestionsType(); + Integer getCount(); + Integer getMaxCount(); +} diff --git a/services/autocomplete/src/main/java/com/dremio/service/autocomplete/ColumnSuggestions.java b/services/autocomplete/src/main/java/com/dremio/service/autocomplete/ColumnSuggestions.java new file mode 100644 index 0000000000..1d2c839819 --- /dev/null +++ b/services/autocomplete/src/main/java/com/dremio/service/autocomplete/ColumnSuggestions.java @@ -0,0 +1,59 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.service.autocomplete; + +import java.util.List; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; + +public final class ColumnSuggestions implements AutocompleteV2Response { + private final String suggestionsType; + private final Integer count; + private final Integer maxCount; + private final List columns; + + @JsonCreator + public ColumnSuggestions( + @JsonProperty("type") String suggestionsType, + @JsonProperty("count") Integer count, + @JsonProperty("maxCount") Integer maxCount, + @JsonProperty("suggestions") List columns) { + this.suggestionsType = suggestionsType; + this.count = count; + this.maxCount = maxCount; + this.columns = columns; + } + + @Override + public String getSuggestionsType() { + return suggestionsType; + } + + @Override + public Integer getCount() { + return count; + } + + @Override + public Integer getMaxCount() { + return maxCount; + } + + public List getColumns() { + return columns; + } +} diff --git a/services/autocomplete/src/main/java/com/dremio/service/autocomplete/ContainerSuggestions.java b/services/autocomplete/src/main/java/com/dremio/service/autocomplete/ContainerSuggestions.java new file mode 100644 index 0000000000..b6a0df52bd --- /dev/null +++ b/services/autocomplete/src/main/java/com/dremio/service/autocomplete/ContainerSuggestions.java @@ -0,0 +1,59 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.service.autocomplete; + +import java.util.List; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; + +public final class ContainerSuggestions implements AutocompleteV2Response { + private final String suggestionsType; + private final Integer count; + private final Integer maxCount; + private final List containers; + + @JsonCreator + public ContainerSuggestions( + @JsonProperty("type") String suggestionsType, + @JsonProperty("count") Integer count, + @JsonProperty("maxCount") Integer maxCount, + @JsonProperty("suggestions") List containers) { + this.suggestionsType = suggestionsType; + this.count = count; + this.maxCount = maxCount; + this.containers = containers; + } + + @Override + public String getSuggestionsType() { + return suggestionsType; + } + + @Override + public Integer getCount() { + return count; + } + + @Override + public Integer getMaxCount() { + return maxCount; + } + + public List getContainers() { + return containers; + } +} diff --git a/services/autocomplete/src/main/java/com/dremio/service/autocomplete/ReferenceSuggestions.java b/services/autocomplete/src/main/java/com/dremio/service/autocomplete/ReferenceSuggestions.java new file mode 100644 index 0000000000..fa35c5b634 --- /dev/null +++ b/services/autocomplete/src/main/java/com/dremio/service/autocomplete/ReferenceSuggestions.java @@ -0,0 +1,59 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.service.autocomplete; + +import java.util.List; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; + +public final class ReferenceSuggestions implements AutocompleteV2Response { + private final String suggestionsType; + private final Integer count; + private final Integer maxCount; + private final List branches; + + @JsonCreator + public ReferenceSuggestions( + @JsonProperty("type") String suggestionsType, + @JsonProperty("count") Integer count, + @JsonProperty("maxCount") Integer maxCount, + @JsonProperty("suggestions") List branches) { + this.suggestionsType = suggestionsType; + this.count = count; + this.maxCount = maxCount; + this.branches = branches; + } + + @Override + public String getSuggestionsType() { + return suggestionsType; + } + + @Override + public Integer getCount() { + return count; + } + + @Override + public Integer getMaxCount() { + return maxCount; + } + + public List getBranches() { + return branches; + } +} diff --git a/services/autocomplete/src/main/java/com/dremio/service/autocomplete/SuggestionEntity.java b/services/autocomplete/src/main/java/com/dremio/service/autocomplete/SuggestionEntity.java new file mode 100644 index 0000000000..c64fe70854 --- /dev/null +++ b/services/autocomplete/src/main/java/com/dremio/service/autocomplete/SuggestionEntity.java @@ -0,0 +1,59 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.service.autocomplete; + +import java.util.Objects; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; + +public final class SuggestionEntity { + private final String name; + private final String type; + + @JsonCreator + public SuggestionEntity( + @JsonProperty("name") String name, + @JsonProperty("type") String type) { + this.name = name; + this.type = type; + } + + public String getName() { + return name; + } + + public String getType() { + return type; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SuggestionEntity that = (SuggestionEntity) o; + return Objects.equals(name, that.name) && Objects.equals(type, that.type); + } + + @Override + public int hashCode() { + return Objects.hash(name, type); + } +} diff --git a/services/autocomplete/src/main/java/com/dremio/service/autocomplete/SuggestionEntityType.java b/services/autocomplete/src/main/java/com/dremio/service/autocomplete/SuggestionEntityType.java new file mode 100644 index 0000000000..9f67ff13f5 --- /dev/null +++ b/services/autocomplete/src/main/java/com/dremio/service/autocomplete/SuggestionEntityType.java @@ -0,0 +1,67 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.service.autocomplete; + +import java.util.Arrays; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonValue; + +/** + * Make sure these types are not conflict with ArrowType which is used for Dataset Field Types + * https://arrow.apache.org/docs/java/reference/org/apache/arrow/vector/types/pojo/ArrowType.html + */ +public enum SuggestionEntityType { + // Container types + FOLDER("folder"), + HOME("home"), + SOURCE("source"), + SPACE("space"), + FUNCTION("function"), + + FILE("file"), + + // Dataset types + VIRTUAL("virtual"), + PROMOTED("promoted"), + DIRECT("direct"), + + // Arctic types + BRANCH("branch"), + COMMIT("commit"), + TAG("tag"); + + private String type; + + private SuggestionEntityType(String type) { + this.type = type; + } + + @JsonCreator + public static SuggestionEntityType fromString(String type) { + for (SuggestionEntityType entityType : values()) { + if (entityType.type.equalsIgnoreCase(type)) { + return entityType; + } + } + throw new IllegalArgumentException(String.format("Unknown enum type %s. Allowed values are %s.", type, Arrays.toString(values()))); + } + + @JsonValue + public String getType() { + return type; + } +} diff --git a/services/autocomplete/src/main/java/com/dremio/service/autocomplete/SuggestionsType.java b/services/autocomplete/src/main/java/com/dremio/service/autocomplete/SuggestionsType.java new file mode 100644 index 0000000000..ae3edcc5ba --- /dev/null +++ b/services/autocomplete/src/main/java/com/dremio/service/autocomplete/SuggestionsType.java @@ -0,0 +1,49 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.service.autocomplete; + +import java.util.Arrays; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonValue; + +public enum SuggestionsType { + REFERENCE("reference"), + COLUMN("column"), + CONTAINER("container"); + + + private String type; + + private SuggestionsType(String type) { + this.type = type; + } + + @JsonCreator + public static SuggestionsType fromString(String type) { + for (SuggestionsType suggestionsType : values()) { + if (suggestionsType.type.equalsIgnoreCase(type)) { + return suggestionsType; + } + } + throw new IllegalArgumentException(String.format("Unknown enum type %s. Allowed values are %s.", type, Arrays.toString(values()))); + } + + @JsonValue + public String getType() { + return type; + } +} diff --git a/services/autocomplete/src/main/java/com/dremio/service/autocomplete/parsing/SqlNodeParser.java b/services/autocomplete/src/main/java/com/dremio/service/autocomplete/parsing/SqlNodeParser.java index 87eb97d4c4..b56381c2ff 100644 --- a/services/autocomplete/src/main/java/com/dremio/service/autocomplete/parsing/SqlNodeParser.java +++ b/services/autocomplete/src/main/java/com/dremio/service/autocomplete/parsing/SqlNodeParser.java @@ -18,7 +18,6 @@ import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.parser.SqlParseException; -import com.dremio.common.exceptions.UserException; import com.dremio.exec.planner.sql.SqlExceptionHelper; import com.dremio.service.autocomplete.tokens.DremioToken; import com.dremio.service.autocomplete.tokens.SqlQueryUntokenizer; @@ -36,11 +35,11 @@ public SqlNode parse(String sql) { try { return parseWithException(sql); } catch (SqlParseException parseException) { - UserException.Builder builder = SqlExceptionHelper.parseError( + throw SqlExceptionHelper + .parseError( sql, - parseException); - builder.message(SqlExceptionHelper.QUERY_PARSING_ERROR); - throw builder.build(logger); + parseException) + .build(logger); } } @@ -53,11 +52,11 @@ public SqlNode parse(ImmutableList tokens) { try { return parseWithException(tokens); } catch (SqlParseException parseException) { - UserException.Builder builder = SqlExceptionHelper.parseError( - SqlQueryUntokenizer.untokenize(tokens), - parseException); - builder.message(SqlExceptionHelper.QUERY_PARSING_ERROR); - throw builder.build(logger); + throw SqlExceptionHelper + .parseError( + SqlQueryUntokenizer.untokenize(tokens), + parseException) + .build(logger); } } } diff --git a/services/autocomplete/src/main/java/com/dremio/service/autocomplete/statements/grammar/CatalogPath.java b/services/autocomplete/src/main/java/com/dremio/service/autocomplete/statements/grammar/CatalogPath.java index 9433362fb8..204d84300c 100644 --- a/services/autocomplete/src/main/java/com/dremio/service/autocomplete/statements/grammar/CatalogPath.java +++ b/services/autocomplete/src/main/java/com/dremio/service/autocomplete/statements/grammar/CatalogPath.java @@ -55,6 +55,7 @@ public static CatalogPath parse(ImmutableList tokens) { switch (token.getKind()) { case IDENTIFIER: identifiersBuilder.add(token.getImage()); + // fall through default: tokensBuilder.add(token); break; diff --git a/services/autocomplete/src/main/java/com/dremio/service/autocomplete/statements/grammar/FieldList.java b/services/autocomplete/src/main/java/com/dremio/service/autocomplete/statements/grammar/FieldList.java index 002bf7b323..02ad0922cf 100644 --- a/services/autocomplete/src/main/java/com/dremio/service/autocomplete/statements/grammar/FieldList.java +++ b/services/autocomplete/src/main/java/com/dremio/service/autocomplete/statements/grammar/FieldList.java @@ -48,6 +48,7 @@ public FieldList( this.tableReference = tableReference; } + @Override public ImmutableList getTokens() { return tokens; } diff --git a/services/autocomplete/src/main/java/com/dremio/service/autocomplete/statements/grammar/FromClause.java b/services/autocomplete/src/main/java/com/dremio/service/autocomplete/statements/grammar/FromClause.java index b2072dc70f..dfbc4d73ae 100644 --- a/services/autocomplete/src/main/java/com/dremio/service/autocomplete/statements/grammar/FromClause.java +++ b/services/autocomplete/src/main/java/com/dremio/service/autocomplete/statements/grammar/FromClause.java @@ -94,6 +94,7 @@ private FromClause( this.joinConditions = joinConditions; } + @Override public ImmutableList getTokens() { return tokens; } diff --git a/services/autocomplete/src/main/java/com/dremio/service/autocomplete/statements/grammar/OptimizeStatement.java b/services/autocomplete/src/main/java/com/dremio/service/autocomplete/statements/grammar/OptimizeStatement.java index 5028e38eab..9aa308e66e 100644 --- a/services/autocomplete/src/main/java/com/dremio/service/autocomplete/statements/grammar/OptimizeStatement.java +++ b/services/autocomplete/src/main/java/com/dremio/service/autocomplete/statements/grammar/OptimizeStatement.java @@ -15,20 +15,42 @@ */ package com.dremio.service.autocomplete.statements.grammar; +import static com.dremio.exec.planner.sql.parser.impl.ParserImplConstants.FOR; +import static com.dremio.exec.planner.sql.parser.impl.ParserImplConstants.LPAREN; import static com.dremio.exec.planner.sql.parser.impl.ParserImplConstants.OPTIMIZE; +import static com.dremio.exec.planner.sql.parser.impl.ParserImplConstants.PARTITIONS; import static com.dremio.exec.planner.sql.parser.impl.ParserImplConstants.REWRITE; import static com.dremio.exec.planner.sql.parser.impl.ParserImplConstants.TABLE; +import static com.dremio.exec.planner.sql.parser.impl.ParserImplConstants.USING; import com.dremio.service.autocomplete.tokens.DremioToken; import com.dremio.service.autocomplete.tokens.TokenBuffer; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; + +/** + * optimize: + * OPTIMIZE TABLE tableReference + * [ REWRITE DATA ] + * [ USING BIN_PACK ] + * [ FOR PARTITIONS ] + * [ (option = value [, option = value ]) ] + */ public final class OptimizeStatement extends Statement { + private static final ImmutableSet BREAK_KEYWORDS = ImmutableSet.builder() + .add(REWRITE) + .add(USING) + .add(FOR) + .add(LPAREN) + .build(); + private OptimizeStatement( ImmutableList tokens, - TableReference table) { - super(tokens, asListIgnoringNulls(table)); + TableReference table, + Expression condition) { + super(tokens, asListIgnoringNulls(table, condition)); } public static Statement parse(TokenBuffer tokenBuffer) { @@ -36,8 +58,9 @@ public static Statement parse(TokenBuffer tokenBuffer) { ImmutableList tokens = tokenBuffer.toList(); TableReference tableReference = parseTable(tokenBuffer); + Expression condition = parseCondition(tokenBuffer, tableReference); return new OptimizeStatement(tokens, - tableReference); + tableReference, condition); } private static TableReference parseTable(TokenBuffer tokenBuffer) { @@ -51,9 +74,24 @@ private static TableReference parseTable(TokenBuffer tokenBuffer) { return null; } - ImmutableList setTokens = tokenBuffer.readUntilKind(REWRITE); - tokenBuffer.read(); + ImmutableList setTokens = tokenBuffer.readUntilKinds(BREAK_KEYWORDS); return TableReference.parse(new TokenBuffer(setTokens)); } + + private static Expression parseCondition(TokenBuffer tokenBuffer, TableReference tableReference) { + if (tokenBuffer.isEmpty()) { + return null; + } + + tokenBuffer.readAndCheckKind(FOR); + tokenBuffer.readAndCheckKind(PARTITIONS); + if (tokenBuffer.isEmpty()) { + return null; + } + + ImmutableList tokens = tokenBuffer.readUntilKind(LPAREN); + + return Expression.parse(tokens, ImmutableList.of(tableReference)); + } } diff --git a/services/autocomplete/src/main/resources/functions.yaml b/services/autocomplete/src/main/resources/functions.yaml deleted file mode 100644 index f13ae48ba0..0000000000 --- a/services/autocomplete/src/main/resources/functions.yaml +++ /dev/null @@ -1,31657 +0,0 @@ -# -# Copyright (C) 2017-2019 Dremio Corporation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - ---- -'!=': - description: "" - dremioVersion: "" - functionCategories: [] - name: "!=" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -$HISTOGRAM: - description: "" - dremioVersion: "" - functionCategories: [] - name: "$HISTOGRAM" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" -$HISTOGRAM_FIRST_VALUE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "$HISTOGRAM_FIRST_VALUE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -$HISTOGRAM_LAST_VALUE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "$HISTOGRAM_LAST_VALUE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -$HISTOGRAM_MAX: - description: "" - dremioVersion: "" - functionCategories: [] - name: "$HISTOGRAM_MAX" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -$HISTOGRAM_MIN: - description: "" - dremioVersion: "" - functionCategories: [] - name: "$HISTOGRAM_MIN" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -$SUM0: - description: "" - dremioVersion: "" - functionCategories: [] - name: "$SUM0" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -$SUM0_V2: - description: "" - dremioVersion: "" - functionCategories: [] - name: "$SUM0_V2" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -'&&': - description: "" - dremioVersion: "" - functionCategories: [] - name: "&&" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -"-": - description: "" - dremioVersion: "" - functionCategories: [] - name: "-" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -<: - description: "" - dremioVersion: "" - functionCategories: [] - name: "<" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -<=: - description: "" - dremioVersion: "" - functionCategories: [] - name: "<=" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -<>: - description: "" - dremioVersion: "" - functionCategories: [] - name: "<>" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -=: - description: "" - dremioVersion: "" - functionCategories: [] - name: "=" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -==: - description: "" - dremioVersion: "" - functionCategories: [] - name: "==" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -'>': - description: "" - dremioVersion: "" - functionCategories: [] - name: ">" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -'>=': - description: "" - dremioVersion: "" - functionCategories: [] - name: ">=" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -ABS: - description: "Computes the absolute value of a numeric expression.\n" - dremioVersion: "1.0+" - functionCategories: - - "MATH" - name: "ABS" - signatures: - - - description: "Returns a numeric or NULL if the is NULL. (Same return type as the input)" - parameters: - - - description: "BINARY, DECIMAL, DOUBLE, NUMERIC, INTEGER" - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT ABS(0.0)" - result: "0.0" - - - call: "SELECT ABS(-2)" - result: "2" - - - call: "SELECT ABS(NULL)" - result: "" -ACOS: - description: "Computes the arcccosine (inverse cosine) of a value in radians\n" - dremioVersion: "1.0+" - functionCategories: - - "MATH" - name: "ACOS" - signatures: - - - description: "Returns a value expressed as a floating point number." - parameters: - - - description: "The angle in radians (between 0 and PI)." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "SELECT ACOS(0)" - result: "1.5707963267948966" - - - call: "SELECT ACOS(1.0)" - result: "0.0" - - - call: "SELECT ACOS(-1)" - result: "3.141592653589793" -ADD: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ADD" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -ADD_DAYS: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ADD_DAYS" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -ADD_HOURS: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ADD_HOURS" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -ADD_MICROSECONDS: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ADD_MICROSECONDS" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -ADD_MINUTES: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ADD_MINUTES" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -ADD_MONTHS: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ADD_MONTHS" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -ADD_QUARTERS: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ADD_QUARTERS" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -ADD_SECONDS: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ADD_SECONDS" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -ADD_WEEKS: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ADD_WEEKS" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -ADD_YEARS: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ADD_YEARS" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -AES_DECRYPT: - name: "AES_DECRYPT" - signatures: - - - parameters: - - - kind: "REGULAR" - name: "parameter name" - type: "CHARACTERS" - - - kind: "REGULAR" - name: "parameter name" - type: "CHARACTERS" - returnType: "CHARACTERS" -AES_ENCRYPT: - name: "AES_ENCRYPT" - signatures: - - - parameters: - - - kind: "REGULAR" - name: "parameter name" - type: "CHARACTERS" - - - kind: "REGULAR" - name: "parameter name" - type: "CHARACTERS" - returnType: "CHARACTERS" -ALTERNATE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ALTERNATE" - signatures: - - - description: "" - parameters: [] - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -ALTERNATE3: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ALTERNATE3" - signatures: - - - description: "" - parameters: [] - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -AND: - description: "" - dremioVersion: "" - functionCategories: [] - name: "AND" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -APPROX_COUNT_DISTINCT: - description: "Returns the approximate number of rows that contain distinct values in a column. Ignores rows that contain a null value for the column." - dremioVersion: "1.0+" - functionCategories: - - "AGGREGATE" - name: "APPROX_COUNT_DISTINCT" - signatures: - - - description: "Returns an NUMERIC" - parameters: - - - description: "The column can contain values of any data type." - kind: "REGULAR" - name: "column_name" - type: "ANY" - returnType: "BIGINT" - sampleCodes: - - - call: "SELECT APPROX_COUNT_DISTINCT(business_id) AS \"number_of_businesses\" FROM \"restaurant_reviews.parquet\";" - result: "62" -ASCII: - description: "Returns the ASCII code for the first character of a STRING. If the STRING is empty, 0 is returned." - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - - "BINARY" - name: "ASCII" - signatures: - - - description: "Returns the ASCII code/numeric representation of the character." - parameters: - - - description: "The STRING for which the ASCII code for the first character in the STRING is returned." - kind: "REGULAR" - name: "expression" - type: "CHARACTERS" - returnType: "INT" - sampleCodes: - - - call: "SELECT ASCII ('DREMIO')" - result: "68" - - - call: "SELECT ASCII ('D')" - result: "68" - - - call: "SELECT ASCII ('')" - result: "0" -ASIN: - description: "Computes the arcsine (inverse sine) of a value in radians\n" - dremioVersion: "1.0+" - functionCategories: - - "MATH" - name: "ASIN" - signatures: - - - description: "Returns a value expressed as a floating point number (Float8)" - parameters: - - - description: "The angle in radians." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "SELECT ASIN(0)" - result: "0.0" - - - call: "SELECT ASIN(1)" - result: "1.5707963267948966" - - - call: "SELECT ASIN(-1)" - result: "-1.5707963267948966" -ASSERT_BIGINT: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ASSERT_BIGINT" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -ASSERT_BIT: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ASSERT_BIT" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -ASSERT_DATE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ASSERT_DATE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -ASSERT_DECIMAL: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ASSERT_DECIMAL" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -ASSERT_FIXEDSIZEBINARY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ASSERT_FIXEDSIZEBINARY" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -ASSERT_FLOAT4: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ASSERT_FLOAT4" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -ASSERT_FLOAT8: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ASSERT_FLOAT8" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -ASSERT_INT: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ASSERT_INT" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -ASSERT_INTERVALDAY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ASSERT_INTERVALDAY" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -ASSERT_INTERVALYEAR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ASSERT_INTERVALYEAR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -ASSERT_LIST: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ASSERT_LIST" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -ASSERT_STRUCT: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ASSERT_STRUCT" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -ASSERT_TIME: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ASSERT_TIME" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -ASSERT_TIMESTAMP: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ASSERT_TIMESTAMP" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -ASSERT_VARBINARY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ASSERT_VARBINARY" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -ASSERT_VARCHAR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ASSERT_VARCHAR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -ATAN: - description: "Computes the Arctangent (inverse Tangent) of a value \n" - dremioVersion: "3.1+" - functionCategories: - - "MATH" - name: "ATAN" - signatures: - - - description: "Returns a value in the range of (-1*(pi/2):pi/2), expressed as a NUMERICing point number (Float8)" - parameters: - - - description: "Floating-point input value, in the range (negative-infinity:positive-infinity)" - kind: "REGULAR" - name: "inputValue" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "SELECT ATAN(0)" - result: "0.0" - - - call: "SELECT ATAN(1)" - result: "0.7853981633974483" - - - call: "SELECT ATAN(-1)" - result: "-0.7853981633974483" - - - call: "SELECT ATAN(19564.7)" - result: "1.5707452143321894" -ATAN2: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ATAN2" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" -AVG: - description: "Computes the average of a set of values.\n" - dremioVersion: "1.0+" - functionCategories: - - "AGGREGATE" - name: "AVG" - signatures: - - - description: "Returns the average of all the values." - parameters: - - - description: "DOUBLE, NUMERIC, INTEGER, INTERVAL_DATE, INTERVAL_YEAR" - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "SELECT AVG(3)" - result: "3.0" - - - call: "SELECT AVG(\"val\")" - result: "-0.333333" -BASE64: - description: "" - dremioVersion: "" - functionCategories: [] - name: "BASE64" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -BIN: - description: "" - dremioVersion: "" - functionCategories: [] - name: "BIN" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -BINARY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "BINARY" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" -BINARY_STRING: - description: "Converts the input expression to a BINARY value. \n" - dremioVersion: "1.0+" - functionCategories: - - "CONVERSION" - name: "BINARY_STRING" - signatures: - - - description: "Returns the BINARY value of the input expression" - parameters: - - - description: "The expression to convert to BINARY" - kind: "REGULAR" - name: "expression" - type: "CHARACTERS" - returnType: "BYTES" - sampleCodes: - - - call: "SELECT BINARY_STRING('DREMIO')" - result: "RFJFTUlP" - - - call: "SELECT BINARY_STRING('000')" - result: "MDAw" -BITWISE_AND: - description: "" - dremioVersion: "" - functionCategories: [] - name: "BITWISE_AND" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -BITWISE_NOT: - description: "" - dremioVersion: "" - functionCategories: [] - name: "BITWISE_NOT" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -BITWISE_OR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "BITWISE_OR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -BITWISE_XOR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "BITWISE_XOR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -BIT_AND: - description: "" - dremioVersion: "" - functionCategories: [] - name: "BIT_AND" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -BIT_LENGTH: - description: "Gets length of bits of the input expression \n" - dremioVersion: "1.0+" - functionCategories: - - "BINARY" - name: "BIT_LENGTH" - signatures: - - - description: "Returns the bit length of the string input expression" - parameters: - - - description: "A BINARY or CHARACTER expression" - kind: "REGULAR" - name: "string_expression" - type: "STRING" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT BIT_LENGTH('DREMIO')" - result: "48" - - - call: "SELECT BIT_LENGTH('abc')" - result: "24" - - - call: "SELECT BIT_LENGTH(1010)" - result: "32" -BIT_OR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "BIT_OR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -BOOLEANAND: - description: "" - dremioVersion: "" - functionCategories: [] - name: "BOOLEANAND" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -BOOLEANOR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "BOOLEANOR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -BOOL_AND: - description: "Computes the BOOLEAN AND of two BOOLEAN expressions. Returns TRUE if both expressions evaluate to TRUE. Returns FALSE if one or both expression(s) evaluate(s) to FALSE." - dremioVersion: "1.0+" - functionCategories: - - "CONDITIONAL" - name: "BOOL_AND" - signatures: - - - description: "Returns the logical AND of two boolean expressions." - parameters: - - - description: "Boolean input expression." - kind: "REGULAR" - name: "bool_expression1" - type: "BOOLEAN" - - - description: "Boolean input expression." - kind: "REGULAR" - name: "bool_expression2" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "SELECT BOOLEANAND(TRUE, FALSE)" - result: "FALSE" -BOOL_OR: - description: "Computes the BOOLEAN OR of two BOOLEAN expressions. Returns TRUE if one or both expressions evaluate to TRUE. Returns FALSE if both expressions evaluate to FALSE." - dremioVersion: "1.0+" - functionCategories: - - "CONDITIONAL" - name: "BOOL_OR" - signatures: - - - description: "Returns the logical OR of two BOOLEAN expressions." - parameters: - - - description: "Boolean input expression." - kind: "REGULAR" - name: "bool_expression1" - type: "BOOLEAN" - - - description: "Boolean input expression." - kind: "REGULAR" - name: "bool_expression2" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "SELECT BOOLEANOR(TRUE, FALSE)" - result: "TRUE" -BROUND: - description: "" - dremioVersion: "" - functionCategories: [] - name: "BROUND" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" -BTRIM: - description: "Trims leading and trailing characters from a STRING.\n" - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - name: "BTRIM" - signatures: - - - description: "Returns the trimmed text" - parameters: - - - description: "The character string expression to be trimmed" - kind: "REGULAR" - name: "expression_to_trim" - type: "CHARACTERS" - - - description: "Leading and trailing characters to trim from the input expression. If this parameter is not specified, then the spaces will be trimmed from the input expression." - kind: "OPTIONAL" - name: "trim_text" - type: "CHARACTERS" - returnType: "STRING" - sampleCodes: - - - call: "SELECT BTRIM(' dremio ')" - result: "dremio" - - - call: "SELECT BTRIM('~/~/~/dremio~', '~')" - result: "/~/~/dremio" - - - call: "SELECT BTRIM('---dremio-', '-')" - result: "dremio" - - - call: "SELECT BTRIM('STRINGvalue', 'STRING')" - result: "value" -BYTESUBSTRING: - description: "" - dremioVersion: "" - functionCategories: [] - name: "BYTESUBSTRING" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" -BYTE_SUBSTR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "BYTE_SUBSTR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" -CARDINALITY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CARDINALITY" - signatures: [] -CAST: - description: "Converts a value of one data type to another data type. This function behaves similarly to the TO_ (i.e. TO_TIMESTAMP) functions.\n" - dremioVersion: "1.0+" - functionCategories: - - "CONVERSION" - name: "CAST" - signatures: - - - description: "Returns the input expression as the type specified for the data_type parameter" - parameters: - - - description: "The expression that you want to convert" - kind: "REGULAR" - name: "expression" - type: "ANY" - returnType: "ANY" - sampleCodes: - - - call: "SELECT CAST(3.14150 AS INTEGER)" - result: "3" - - - call: "SELECT CAST(.167 AS INTEGER)" - result: "2" - - - call: "SELECT CAST('2021-04-03' AS DATE)" - result: "2021-04-03" -CASTBIGINT: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CASTBIGINT" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -CASTBIT: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CASTBIT" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -CASTBOOLEAN: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CASTBOOLEAN" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -CASTDATE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CASTDATE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" -CASTDECIMAL: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CASTDECIMAL" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" -CASTDECIMALNULLONOVERFLOW: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CASTDECIMALNULLONOVERFLOW" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" -CASTFLOAT4: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CASTFLOAT4" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" -CASTFLOAT8: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CASTFLOAT8" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" -CASTINT: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CASTINT" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" -CASTINTERVALDAY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CASTINTERVALDAY" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -CASTINTERVALYEAR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CASTINTERVALYEAR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -CASTNULLABLEBIGINT: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CASTNULLABLEBIGINT" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -CASTNULLABLEINT: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CASTNULLABLEINT" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" -CASTNULLABLEINTERVALDAY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CASTNULLABLEINTERVALDAY" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -CASTNULLABLEINTERVALYEAR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CASTNULLABLEINTERVALYEAR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -CASTTIME: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CASTTIME" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" -CASTTIMESTAMP: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CASTTIMESTAMP" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -CASTTOUNION: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CASTTOUNION" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -CASTUNION: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CASTUNION" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -CASTVARBINARY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CASTVARBINARY" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" -CASTVARCHAR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CASTVARCHAR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -CBRT: - description: "Computes the cube root of a numeric expression\n" - dremioVersion: "1.0+" - functionCategories: - - "MATH" - name: "CBRT" - signatures: - - - description: "Returns the cubic root of the input expression" - parameters: - - - description: "The number for which you want to compute the cube root." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT CBRT(8)" - result: "2.0" - - - call: "SELECT CBRT(120)" - result: "4.932424148660941" - - - call: "SELECT CBRT(99.5)" - result: "4.633839922986558" -CEIL: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CEIL" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -CEILING: - description: "Returns the nearest equal or larger value of the input expression. Can also be called using CEIL()." - dremioVersion: "" - functionCategories: [] - name: "CEILING" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" -CHARACTER_LENGTH: - description: "Returns the character length of the input expression.\n" - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - name: "CHARACTER_LENGTH" - signatures: - - - description: "Returns the number of characters of the input expression" - parameters: - - - description: "The expression to determine character length for" - kind: "REGULAR" - name: "expression" - type: "CHARACTERS" - returnType: "INT" - sampleCodes: - - - call: "SELECT CHAR_LENGTH('get the char length')" - result: "19" - - - call: "SELECT CHAR_LENGTH('DREMIO')" - result: "6" -CHARSUBSTRING: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CHARSUBSTRING" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -CHAR_LENGTH: - description: "Returns the character length of the input expression.\n" - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - name: "CHAR_LENGTH" - signatures: - - - description: "Returns the number of characters of the input expression" - parameters: - - - description: "The expression to determine character length for" - kind: "REGULAR" - name: "expression" - type: "CHARACTERS" - returnType: "INT" - sampleCodes: - - - call: "SELECT CHAR_LENGTH('get the char length')" - result: "19" - - - call: "SELECT CHAR_LENGTH('DREMIO')" - result: "6" -CHR: - description: "Converts a Unicode code pointNUMERICo the character that matches the input Unicode character. If an invalid code point is specified, an empty STRING is returned." - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - name: "CHR" - signatures: - - - description: "Returns the Unicode character." - parameters: - - - description: "Unicode code point to convert to character." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT CHR(72)" - result: "H" - - - call: "SELECT CHR(33)" - result: "" -CLASSIFIER: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CLASSIFIER" - signatures: - - - description: "" - parameters: [] - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -COALESCE: - description: "Evaluates the arguments in order and returns the value of the first expression that does not contain `NULL`." - dremioVersion: "1.0+" - functionCategories: - - "CONDITIONAL" - name: "COALESCE" - signatures: - - - description: "" - parameters: - - - description: "A combination of symbols and operators that the database evaluates to obtain a single data value. Expressions can be a single constant, variable, column, or scalar function." - kind: "VARARG" - name: "expressions" - type: "ANY" - returnType: "ANY" - sampleCodes: - - - call: "SELECT COALESCE(address1, address2, city, state, zipCode)\nFROM customers\n" - result: "123 Main Street\n" -COLLECT: - description: "" - dremioVersion: "" - functionCategories: [] - name: "COLLECT" - signatures: [] -COMPARETYPE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "COMPARETYPE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" -COMPARE_TO_NULLS_HIGH: - description: "" - dremioVersion: "" - functionCategories: [] - name: "COMPARE_TO_NULLS_HIGH" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" -COMPARE_TO_NULLS_LOW: - description: "" - dremioVersion: "" - functionCategories: [] - name: "COMPARE_TO_NULLS_LOW" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" -CONCAT: - description: "Concatenates two or more strings.\n" - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - name: "CONCAT" - signatures: - - - description: "Returns the concatenated STRING. If one of the inputs is null, then the concatenation returns null." - parameters: - - - description: "First STRING expression" - kind: "REGULAR" - name: "first" - type: "STRING" - - - description: "Second STRING expression" - kind: "OPTIONAL" - name: "second" - type: "STRING" - - - description: "Variable amount of string expressions" - kind: "VARARG" - name: "varadicStrings" - type: "STRING" - returnType: "STRING" - sampleCodes: - - - call: "SELECT CONCAT('CON', 'CAT')" - result: "CONCAT" - - - call: "SELECT CONCAT('con', 'cat', NULL)" - result: "" -CONCATOPERATOR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONCATOPERATOR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -CONCAT_WS: - description: "Concatenate with separator. Returns a string resulting from the joining of two or more string values in an end-to-end manner. Uses the first argument as the separator between each string." - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - name: "CONCAT_WS" - signatures: - - - description: "Concatenate with separator. Returns a string resulting from the joining of two or more string values in an end-to-end manner. Uses the first argument as the separator between each string." - parameters: - - - description: "The separator to use when concatenating the strings " - kind: "REGULAR" - name: "separator" - type: "CHARACTERS" - - - description: "The first string to concatenate." - kind: "REGULAR" - name: "first" - type: "CHARACTERS" - - - description: "The second string to concatenate." - kind: "REGULAR" - name: "second" - type: "CHARACTERS" - - - description: "A variable number of strings to concatenate." - kind: "VARARG" - name: "varadicString" - type: "CHARACTERS" - returnType: "STRING" - sampleCodes: - - - call: "SELECT CONCAT_WS('-', 'cat', 'dog', 'bird')\n" - result: "cat-dog-bird\n" -CONTAINS: - description: "Returns TRUE if the first expression contains the second expression." - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - name: "CONTAINS" - signatures: - - - description: "Returns TRUE if we can find 'needle' in the 'haystack'." - parameters: - - - description: "The expression to search in." - kind: "REGULAR" - name: "haystack" - type: "STRING" - - - description: "The expression to search for." - kind: "REGULAR" - name: "needle" - type: "STRING" - returnType: "BOOLEAN" - sampleCodes: - - - call: "SELECT CONTAINS('dremio sql lakehouse', 'lake')" - result: "TRUE" -CONVERT: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT" - signatures: [] -CONVERT_FROM: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_FROM" - signatures: - - - description: "" - parameters: [] - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" -CONVERT_FROMBIGINT: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_FROMBIGINT" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -CONVERT_FROMBIGINT_BE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_FROMBIGINT_BE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -CONVERT_FROMBIGINT_HADOOPV: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_FROMBIGINT_HADOOPV" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -CONVERT_FROMBOOLEAN_BYTE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_FROMBOOLEAN_BYTE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -CONVERT_FROMDATE_EPOCH: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_FROMDATE_EPOCH" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" -CONVERT_FROMDATE_EPOCH_BE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_FROMDATE_EPOCH_BE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" -CONVERT_FROMDOUBLE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_FROMDOUBLE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" -CONVERT_FROMDOUBLE_BE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_FROMDOUBLE_BE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" -CONVERT_FROMFLOAT: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_FROMFLOAT" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" -CONVERT_FROMFLOAT_BE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_FROMFLOAT_BE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" -CONVERT_FROMINT: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_FROMINT" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" -CONVERT_FROMINT_BE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_FROMINT_BE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" -CONVERT_FROMINT_HADOOPV: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_FROMINT_HADOOPV" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" -CONVERT_FROMJSON: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_FROMJSON" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -CONVERT_FROMTIMESTAMP_EPOCH: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_FROMTIMESTAMP_EPOCH" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -CONVERT_FROMTIMESTAMP_EPOCH_BE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_FROMTIMESTAMP_EPOCH_BE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -CONVERT_FROMTIMESTAMP_IMPALA: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_FROMTIMESTAMP_IMPALA" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -CONVERT_FROMTIMESTAMP_IMPALA_LOCALTIMEZONE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_FROMTIMESTAMP_IMPALA_LOCALTIMEZONE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -CONVERT_FROMTIME_EPOCH: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_FROMTIME_EPOCH" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" -CONVERT_FROMTIME_EPOCH_BE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_FROMTIME_EPOCH_BE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" -CONVERT_FROMUTF8: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_FROMUTF8" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -CONVERT_REPLACEUTF8: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_REPLACEUTF8" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -CONVERT_TIMEZONE: - description: "Convert TIMESTAMP to the specified timezone\n" - dremioVersion: "1.0+" - functionCategories: - - "DATETIME" - - "CONVERSION" - name: "CONVERT_TIMEZONE" - signatures: - - - description: "Convert a date/time object (in UTC) to a specified timezone." - parameters: - - - description: "The timezone to convert to." - kind: "REGULAR" - name: "destinationTimezone" - type: "STRING" - - - description: "The date, timestamp, or string in ISO 8601 format to convert" - kind: "REGULAR" - name: "date" - type: "DATEANDTIME" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "SELECT CONVERT_TIMEZONE('America/Los_Angeles', 'America/New_York', '2021-04-01 15:27:32')" - result: "2021-04-01 18:27:32.000" - - - call: "SELECT CONVERT_TIMEZONE('America/Los_Angeles', 'America/New_York', TIMESTAMP '2021-04-01 15:27:32');" - result: "2021-04-01 18:27:32.000" - - - call: "SELECT CONVERT_TIMEZONE('PST', 'EST', '2021-04-01 15:27:32')" - result: "2021-04-01 18:27:32.000" - - - call: "SELECT CONVERT_TIMEZONE('America/Los_Angeles', 'America/New_York', '2021-04-01')" - result: "2021-04-01 03:00:00.000" - - - call: "SELECT CONVERT_TIMEZONE('America/Los_Angeles', 'America/New_York', DATE '2021-04-01')" - result: "2021-04-01 03:00:00.000" - - - call: "SELECT CONVERT_TIMEZONE('EDT', '2021-04-01 15:27:32')" - result: "2021-04-01 11:27:32.000" - - - call: "SELECT CONVERT_TIMEZONE('PST', '+02:00', '2021-04-01 15:27:32')" - result: "2021-04-02 01:27:32.000" -CONVERT_TO: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_TO" - signatures: - - - description: "" - parameters: [] - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" -CONVERT_TOBIGINT: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_TOBIGINT" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" -CONVERT_TOBIGINT_BE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_TOBIGINT_BE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" -CONVERT_TOBIGINT_HADOOPV: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_TOBIGINT_HADOOPV" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" -CONVERT_TOBOOLEAN_BYTE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_TOBOOLEAN_BYTE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" -CONVERT_TODATE_EPOCH: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_TODATE_EPOCH" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" -CONVERT_TODATE_EPOCH_BE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_TODATE_EPOCH_BE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" -CONVERT_TODOUBLE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_TODOUBLE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" -CONVERT_TODOUBLE_BE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_TODOUBLE_BE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" -CONVERT_TOEXTENDEDJSON: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_TOEXTENDEDJSON" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -CONVERT_TOFLOAT: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_TOFLOAT" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" -CONVERT_TOFLOAT_BE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_TOFLOAT_BE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" -CONVERT_TOINT: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_TOINT" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" -CONVERT_TOINT_BE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_TOINT_BE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" -CONVERT_TOINT_HADOOPV: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_TOINT_HADOOPV" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" -CONVERT_TOJSON: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_TOJSON" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -CONVERT_TOSIMPLEJSON: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_TOSIMPLEJSON" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -CONVERT_TOTIMESTAMP_EPOCH: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_TOTIMESTAMP_EPOCH" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" -CONVERT_TOTIMESTAMP_EPOCH_BE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_TOTIMESTAMP_EPOCH_BE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" -CONVERT_TOTIME_EPOCH: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_TOTIME_EPOCH" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" -CONVERT_TOTIME_EPOCH_BE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_TOTIME_EPOCH_BE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" -CONVERT_TOUTF8: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CONVERT_TOUTF8" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" -CORR: - description: "Calculates the covariance of the values expression1 and expression2. The function name must be enclosed in NUMERIC quotes (\\\"CORR\\\"). \n" - dremioVersion: "1.0+" - functionCategories: - - "AGGREGATE" - name: "CORR" - signatures: - - - description: "Returns" - parameters: - - - description: "The first expression." - kind: "REGULAR" - name: "expression1" - type: "NUMERIC" - - - description: "The second expression." - kind: "REGULAR" - name: "expression2" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "SELECT weight_class, CORR(list_price, min_price)\n FROM product_information\n GROUP BY weight_class\n" - result: "WEIGHT_CLASS CORR(LIST_PRICE,MIN_PRICE) ------------ --------------------------\n 1 .99914795\n 2 .999022941\n 3 .998484472\n 4 .999359909\n 5 .999536087\n" -CORRELATION: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CORRELATION" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" -COS: - description: "Computes the cosine of a value in radians\n" - dremioVersion: "1.0+" - functionCategories: - - "MATH" - name: "COS" - signatures: - - - description: "Returns a value expressed as a floating point number (Float8)" - parameters: - - - description: "The number in radians." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT COS(0)" - result: "1.0" - - - call: "SELECT COS(1.0)" - result: "0.5403023058681398" - - - call: "SELECT COS(-1)" - result: "0.5403023058681398" -COSH: - description: "Computes the hyperbolic cosine of a value in radians.\n" - dremioVersion: "1.0+" - functionCategories: - - "MATH" - name: "COSH" - signatures: - - - description: "Returns a value expressed as a floating point number." - parameters: - - - description: "The number in radians." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT COSH(0)" - result: "1.0" - - - call: "SELECT COSH(1.0)" - result: "1.543080634815244" - - - call: "SELECT COSH(-1)" - result: "1.543080634815244" -COT: - description: "Computes the cotangent of a value in radians.\n" - dremioVersion: "1.0+" - functionCategories: - - "MATH" - name: "COT" - signatures: - - - description: "Returns a value expressed as a floating point number (Float8)" - parameters: - - - description: "The number in radians." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT COS(0)" - result: "1.0" - - - call: "SELECT COS(1.0)" - result: "0.5403023058681398" - - - call: "SELECT COS(-1)" - result: "0.5403023058681398" -COUNT: - description: "Returns the total number of records for the specified expression." - dremioVersion: "1.0+" - functionCategories: - - "AGGREGATE" - - "WINDOW" - name: "COUNT" - signatures: - - - description: "Returns the total number of records in the dataset." - parameters: [] - returnType: "BIGINT" - sampleCodes: - - - call: "SELECT COUNT(*) FROM EMP" - result: "93456745" - snippetOverride: "COUNT(*)" - - - description: "Returns the total number of records for the specified expression." - parameters: - - - description: "Expression to evaluate number of records for." - kind: "REGULAR" - name: "expression" - type: "ANY" - returnType: "BIGINT" - sampleCodes: - - - call: "SELECT COUNT(column_name) FROM my_table" - result: "89" - snippetOverride: "COUNT(${1|ALL,DISTINCT|} ${2:value})" -COVARIANCE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "COVARIANCE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" -COVAR_POP: - description: "" - dremioVersion: "" - functionCategories: [] - name: "COVAR_POP" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" -COVAR_SAMP: - description: "" - dremioVersion: "" - functionCategories: [] - name: "COVAR_SAMP" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" -CRC32: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CRC32" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -CUME_DIST: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CUME_DIST" - signatures: - - - description: "" - parameters: [] - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" -CURRENT_CATALOG: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CURRENT_CATALOG" - signatures: - - - description: "" - parameters: [] - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -CURRENT_DATE: - description: "Returns the current DATE of the system." - dremioVersion: "1.0+" - functionCategories: - - "DATETIME" - - "CONTEXT" - name: "CURRENT_DATE" - signatures: - - - description: "Returns the current DATE of the system." - parameters: [] - returnType: "DATE" - sampleCodes: - - - call: "SELECT CURRENT_DATE()" - result: "2021-07-02" - - - call: "SELECT CURRENT_DATE" - result: "2021-07-02" -CURRENT_DATE_UTC: - description: "Returns the current DATE of the system based on the UTC TIMEzone." - dremioVersion: "1.0+" - functionCategories: - - "CONTEXT" - - "DATETIME" - name: "CURRENT_DATE_UTC" - signatures: - - - description: "Returns the current DATE of the system based on the UTC TIMEzone." - parameters: [] - returnType: "DATE" - sampleCodes: - - - call: "SELECT CURRENT_DATE_UTC()" - result: "2021-07-02" - - - call: "SELECT CURRENT_DATE_UTC" - result: "2021-07-02" -CURRENT_PATH: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CURRENT_PATH" - signatures: - - - description: "" - parameters: [] - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -CURRENT_ROLE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CURRENT_ROLE" - signatures: - - - description: "" - parameters: [] - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -CURRENT_SCHEMA: - description: "Returns the path/schema in use by the current session." - dremioVersion: "1.0+" - functionCategories: - - "CONTEXT" - name: "CURRENT_SCHEMA" - signatures: - - - description: "Returns the path/schema in use by the current session." - parameters: [] - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT CURRENT_SCHEMA" - result: "Samples.\"samples.dremio.com\"" -CURRENT_TIME: - description: "Returns the current TIME for the system.\n" - dremioVersion: "1.0+" - functionCategories: - - "CONTEXT" - - "DATETIME" - name: "CURRENT_TIME" - signatures: - - - description: "Returns the current TIME for the system." - parameters: [] - returnType: "TIME" - sampleCodes: - - - call: "SELECT CURRENT_TIME()" - result: "06:04:31" - - - call: "SELECT CURRENT_TIME" - result: "06:04:31" -CURRENT_TIMESTAMP: - description: "Returns the current TIMESTAMP for the system.\n" - dremioVersion: "1.0+" - functionCategories: - - "CONTEXT" - - "DATETIME" - name: "CURRENT_TIMESTAMP" - signatures: - - - description: "Returns the current DATE for the system." - parameters: [] - returnType: "TIMESTAMP" - sampleCodes: - - - call: "SELECT CURRENT_TIMESTAMP()" - result: "2021-06-24 06:11:51.567" - - - call: "SELECT CURRENT_TIMESTAMP" - result: "2021-06-24 06:11:51.567" -CURRENT_TIMESTAMP_UTC: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CURRENT_TIMESTAMP_UTC" - signatures: - - - description: "" - parameters: [] - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -CURRENT_TIME_UTC: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CURRENT_TIME_UTC" - signatures: - - - description: "" - parameters: [] - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" -CURRENT_USER: - description: "" - dremioVersion: "" - functionCategories: [] - name: "CURRENT_USER" - signatures: - - - description: "" - parameters: [] - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -DATEDIFF: - description: "" - dremioVersion: "" - functionCategories: [] - name: "DATEDIFF" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" -DATETYPE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "DATETYPE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" -DATE_ADD: - description: "Add (or subract) days from a DATE/TIMESTAMP value or column\n" - dremioVersion: "1.0+" - functionCategories: - - "DATETIME" - name: "DATE_ADD" - signatures: - - - description: "Add or subtract whole days to a DATE value encoded as a STRING" - parameters: - - - description: "String-formatted DATE, of the form 'YYYY-MM-DD'" - kind: "REGULAR" - name: "givenDate" - type: "STRING" - - - description: "Integer number of days to be added (or subtracted) from the givenDate. To subtract days, pass a negative number." - kind: "REGULAR" - name: "nrDays" - type: "NUMERIC" - returnType: "DATE" - sampleCodes: - - - call: "SELECT DATE_add('2021-04-01', 2)" - result: "2021-04-03" - - - call: "SELECT DATE_add('2021-04-01', 2)" - result: "2021-04-03" - - - description: "Add or subtract whole days to a DATE value" - parameters: - - - description: "Date value to which to add days (either a database column in DATE format, or literal value explicitly converted to DATE)" - kind: "REGULAR" - name: "givenDate" - type: "DATE" - - - description: "Integer number of days to be added (or subtracted) from the givenDate. To subtract days, pass a negative number." - kind: "REGULAR" - name: "nrDays" - type: "NUMERIC" - returnType: "DATE" - sampleCodes: - - - call: "SELECT DATE_add(to_DATE('2021-04-01'), 2)" - result: "2021-04-03" - - - description: "Add or subtract days (whole or fractional) to TIMESTAMP value encoded as a STRING" - parameters: - - - description: "String-formatted TIMESTAMP, of the form 'YYYY-MM-DD HH24:MI:SS'" - kind: "REGULAR" - name: "givenTimestamp" - type: "STRING" - - - description: "Floating point (fractional) number of days to be added (or subtracted) from the givenDate. To subtract days, pass a negative number." - kind: "REGULAR" - name: "nrDays" - type: "NUMERIC" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "SELECT DATE_add('2021-04-01 01:00:00.000', 2.5)" - result: "2021-04-03 13:00:00.000" - - - description: "Add or subtract days (whole or fractional) to TIMESTAMP value" - parameters: - - - description: "Date value to which to add days (either a database column in DATE format, or literal value explicitly converted to DATE)" - kind: "REGULAR" - name: "givenTimestamp" - type: "TIMESTAMP" - - - description: "Floating point (fractional) number of days to be added (or subtracted) from the givenDate. To subtract days, pass a negative number." - kind: "REGULAR" - name: "nrDays" - type: "NUMERIC" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "SELECT DATE_add(to_TIMESTAMP('2021-04-01 01:00:00.000'), 2.5)" - result: "2021-04-03 13:00:00.000" - - - description: "Add or subtract anNUMERICerval to/from a DATE value encoded as a STRING" - parameters: - - - description: "String-formatted DATE, of the form 'YYYY-MM-DD'" - kind: "REGULAR" - name: "givenDate" - type: "STRING" - - - description: "Single-quoted STRING representing a TIMENUMERICerval. Expressed as a number, followed by one of the following STRINGs - 'year', 'quarter', 'month', 'week', 'day', 'hour', 'minute', 'second'" - kind: "REGULAR" - name: "TIMEInterval" - type: "STRING" - returnType: "DATE" - sampleCodes: - - - call: "SELECT DATE_add('2021-04-01', '2 days')" - result: "2021-04-03" - - - description: "Add or subtract anNUMERICerval to/from a DATE value encoded as a DATE" - parameters: - - - description: "String-formatted DATE, of the form 'YYYY-MM-DD'" - kind: "REGULAR" - name: "givenDate" - type: "DATE" - - - description: "Single-quoted STRING representing a TIMENUMERICerval. Expressed as a number, followed by one of the following STRINGs - 'year', 'quarter', 'month', 'week', 'day', 'hour', 'minute', 'second'" - kind: "REGULAR" - name: "TIMEInterval" - type: "STRING" - returnType: "DATE" - sampleCodes: - - - call: "SELECT DATE_add(to_DATE('2021-04-01'), '2 days')" - result: "2021-04-03" - - - description: "Add or subtract anNUMERICerval to/from a TIMESTAMP value encoded as a STRING" - parameters: - - - description: "String-formatted TIMESTAMP, of the form 'YYYY-MM-DD HH24:MI:SS'" - kind: "REGULAR" - name: "givenTimestamp" - type: "STRING" - - - description: "Single-quoted STRING representing a TIMENUMERICerval. Expressed as a number, followed by one of the following STRINGs - 'year', 'quarter', 'month', 'week', 'day', 'hour', 'minute', 'second'" - kind: "REGULAR" - name: "TIMEInterval" - type: "STRING" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "SELECT DATE_add('2021-04-01 00:00:00', '2 days')" - result: "2021-04-03 00:00:00" - - - description: "Add or subtract anNUMERICerval to/from a TIMESTAMP value encoded as a TIMESTAMP" - parameters: - - - description: "String-formatted DATE, of the form 'YYYY-MM-DD'" - kind: "REGULAR" - name: "givenTimetamp" - type: "TIMESTAMP" - - - description: "String-formatted DATE, of the form 'YYYY-MM-DD'" - kind: "REGULAR" - name: "TIMEInterval" - type: "STRING" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "SELECT DATE_add(to_TIMESTAMP('2021-04-01 00:00:00', '2 days')" - result: "2021-04-03 00:00:00.000" - - - description: "Add or subtract anNUMERICerval to/from a TIME value encoded as a STRING" - parameters: - - - description: "String-formatted TIME, of the form 'HH24:MI:SS'" - kind: "REGULAR" - name: "givenTime" - type: "STRING" - - - description: "Single-quoted STRING representing a TIMENUMERICerval. Expressed as a number, followed by one of the following STRINGs - 'year', 'quarter', 'month', 'week', 'day', 'hour', 'minute', 'second'" - kind: "REGULAR" - name: "TIMEInterval" - type: "STRING" - returnType: "TIME" - sampleCodes: - - - call: "SELECT DATE_add('00:00:00', '2 minutes')" - result: "00:02:00" - - - description: "Add or subtract anNUMERICerval to/from a TIME value encoded as a TIME" - parameters: - - - description: "String-formatted DATE, of the form 'YYYY-MM-DD'" - kind: "REGULAR" - name: "givenTime" - type: "TIME" - - - description: "Single-quoted STRING representing a TIMENUMERICerval. Expressed as a number, followed by one of the following STRINGs - 'year', 'quarter', 'month', 'week', 'day', 'hour', 'minute', 'second'" - kind: "REGULAR" - name: "TIMEInterval" - type: "STRING" - returnType: "TIME" - sampleCodes: - - - call: "SELECT DATE_add(to_TIME('00:00:00'), '2 minutes')" - result: "00:02:00" -DATE_DIFF: - description: "" - dremioVersion: "" - functionCategories: [] - name: "DATE_DIFF" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -DATE_PART: - description: "Return subfields such as year or hour from date or timestamp values." - dremioVersion: "" - functionCategories: [] - name: "DATE_PART" - signatures: [] -DATE_SUB: - description: "Subtracts the number of days from the specified DATE or TIMESTAMP.\n" - dremioVersion: "1.0+" - functionCategories: - - "DATETIME" - name: "DATE_SUB" - signatures: - - - description: "Returns the calculated DATE value." - parameters: - - - description: "The DATE or TIMESTAMP to subtract days from. This value must be a literal in the DATE or TIMESTAMP format." - kind: "REGULAR" - name: "DATE_TIMESTAMP_expression" - type: "TIMESTAMP" - - - description: "Number of days to subtract from DATE_expression" - kind: "REGULAR" - name: "days" - type: "INT" - returnType: "DATE" - sampleCodes: - - - call: "SELECT DATE_SUB('2021-06-05', 5)" - result: "2021-05-31" - - - call: "SELECT DATE_SUB('2021-06-24 06:11:51.567', 4)" - result: "2021-06-20" - - - call: "SELECT DATE_SUB('2021-05-28', 10)" - result: "2021-05-18" - - - description: "Returns the calculated DATE value." - parameters: - - - description: "The DATE or TIMESTAMP to subtract days from. This value must be a literal in the DATE or TIMESTAMP format." - kind: "REGULAR" - name: "DATE_TIMESTAMP_expression" - type: "DATE" - - - description: "Number of days to subtract from DATE_expression" - kind: "REGULAR" - name: "days" - type: "INT" - returnType: "DATE" - sampleCodes: - - - call: "SELECT DATE_SUB('2021-06-05', 5)" - result: "2021-05-31" - - - call: "SELECT DATE_SUB('2021-06-24 06:11:51.567', 4)" - result: "2021-06-20" - - - call: "SELECT DATE_SUB('2021-05-28', 10)" - result: "2021-05-18" -DATE_TRUNC: - description: "Truncates the DATE or TIMESTAMP to the indicated precision.\n" - dremioVersion: "1.0+" - functionCategories: - - "DATETIME" - name: "DATE_TRUNC" - signatures: - - - description: "Returns the truncated DATE. Will return a TIMESTAMP if the DATE_TIMESTAMP_expression is cast as a TIMESTAMP." - parameters: - - - description: "The TIME unit that the DATE or TIMESTAMP needs to be truncated at. This must be the literal value of 'YEAR', 'MONTH', 'DAY', 'HOUR', 'MINUTE', or 'SECOND'." - kind: "REGULAR" - name: "TIME_unit" - type: "STRING" - - - description: "The DATE or TIMESTAMP to truncate. This value must be a literal in the DATE or TIMESTAMP format." - kind: "REGULAR" - name: "DATE_TIMESTAMP_expression" - type: "BYTES" - returnType: "ANY" - sampleCodes: [] - - - description: "Returns the truncated DATE. Will return a TIMESTAMP if the DATE_TIMESTAMP_expression is cast as a TIMESTAMP." - parameters: - - - description: "The TIME unit that the DATE or TIMESTAMP needs to be truncated at. This must be the literal value of 'YEAR', 'MONTH', 'DAY', 'HOUR', 'MINUTE', or 'SECOND'." - kind: "REGULAR" - name: "TIME_unit" - type: "BOOLEAN" - - - description: "The DATE or TIMESTAMP to truncate. This value must be a literal in the DATE or TIMESTAMP format." - kind: "REGULAR" - name: "DATE_TIMESTAMP_expression" - type: "BYTES" - returnType: "ANY" - sampleCodes: [] - - - description: "Returns the truncated DATE. Will return a TIMESTAMP if the DATE_TIMESTAMP_expression is cast as a TIMESTAMP." - parameters: - - - description: "The TIME unit that the DATE or TIMESTAMP needs to be truncated at. This must be the literal value of 'YEAR', 'MONTH', 'DAY', 'HOUR', 'MINUTE', or 'SECOND'." - kind: "REGULAR" - name: "TIME_unit" - type: "BYTES" - - - description: "The DATE or TIMESTAMP to truncate. This value must be a literal in the DATE or TIMESTAMP format." - kind: "REGULAR" - name: "DATE_TIMESTAMP_expression" - type: "BOOLEAN" - returnType: "ANY" - sampleCodes: [] - - - description: "Returns the truncated DATE. Will return a TIMESTAMP if the DATE_TIMESTAMP_expression is cast as a TIMESTAMP." - parameters: - - - description: "The TIME unit that the DATE or TIMESTAMP needs to be truncated at. This must be the literal value of 'YEAR', 'MONTH', 'DAY', 'HOUR', 'MINUTE', or 'SECOND'." - kind: "REGULAR" - name: "TIME_unit" - type: "BYTES" - - - description: "The DATE or TIMESTAMP to truncate. This value must be a literal in the DATE or TIMESTAMP format." - kind: "REGULAR" - name: "DATE_TIMESTAMP_expression" - type: "BYTES" - returnType: "ANY" - sampleCodes: [] - - - description: "Returns the truncated DATE. Will return a TIMESTAMP if the DATE_TIMESTAMP_expression is cast as a TIMESTAMP." - parameters: - - - description: "The TIME unit that the DATE or TIMESTAMP needs to be truncated at. This must be the literal value of 'YEAR', 'MONTH', 'DAY', 'HOUR', 'MINUTE', or 'SECOND'." - kind: "REGULAR" - name: "TIME_unit" - type: "BYTES" - - - description: "The DATE or TIMESTAMP to truncate. This value must be a literal in the DATE or TIMESTAMP format." - kind: "REGULAR" - name: "DATE_TIMESTAMP_expression" - type: "NUMERIC" - returnType: "ANY" - sampleCodes: [] - - - description: "Returns the truncated DATE. Will return a TIMESTAMP if the DATE_TIMESTAMP_expression is cast as a TIMESTAMP." - parameters: - - - description: "The TIME unit that the DATE or TIMESTAMP needs to be truncated at. This must be the literal value of 'YEAR', 'MONTH', 'DAY', 'HOUR', 'MINUTE', or 'SECOND'." - kind: "REGULAR" - name: "TIME_unit" - type: "BYTES" - - - description: "The DATE or TIMESTAMP to truncate. This value must be a literal in the DATE or TIMESTAMP format." - kind: "REGULAR" - name: "DATE_TIMESTAMP_expression" - type: "STRING" - returnType: "ANY" - sampleCodes: [] - - - description: "Returns the truncated DATE. Will return a TIMESTAMP if the DATE_TIMESTAMP_expression is cast as a TIMESTAMP." - parameters: - - - description: "The TIME unit that the DATE or TIMESTAMP needs to be truncated at. This must be the literal value of 'YEAR', 'MONTH', 'DAY', 'HOUR', 'MINUTE', or 'SECOND'." - kind: "REGULAR" - name: "TIME_unit" - type: "BYTES" - returnType: "ANY" - sampleCodes: [] - - - description: "Returns the truncated DATE. Will return a TIMESTAMP if the DATE_TIMESTAMP_expression is cast as a TIMESTAMP." - parameters: - - - description: "The TIME unit that the DATE or TIMESTAMP needs to be truncated at. This must be the literal value of 'YEAR', 'MONTH', 'DAY', 'HOUR', 'MINUTE', or 'SECOND'." - kind: "REGULAR" - name: "TIME_unit" - type: "NUMERIC" - - - description: "The DATE or TIMESTAMP to truncate. This value must be a literal in the DATE or TIMESTAMP format." - kind: "REGULAR" - name: "DATE_TIMESTAMP_expression" - type: "BYTES" - returnType: "ANY" - sampleCodes: [] - - - description: "Returns the truncated DATE. Will return a TIMESTAMP if the DATE_TIMESTAMP_expression is cast as a TIMESTAMP." - parameters: - - - description: "The DATE or TIMESTAMP to truncate. This value must be a literal in the DATE or TIMESTAMP format." - kind: "REGULAR" - name: "DATE_TIMESTAMP_expression" - type: "BYTES" - returnType: "ANY" - sampleCodes: [] -DATE_TRUNC_CENTURY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "DATE_TRUNC_CENTURY" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -DATE_TRUNC_DAY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "DATE_TRUNC_DAY" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -DATE_TRUNC_DECADE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "DATE_TRUNC_DECADE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -DATE_TRUNC_HOUR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "DATE_TRUNC_HOUR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -DATE_TRUNC_MILLENNIUM: - description: "" - dremioVersion: "" - functionCategories: [] - name: "DATE_TRUNC_MILLENNIUM" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -DATE_TRUNC_MINUTE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "DATE_TRUNC_MINUTE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -DATE_TRUNC_MONTH: - description: "" - dremioVersion: "" - functionCategories: [] - name: "DATE_TRUNC_MONTH" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -DATE_TRUNC_QUARTER: - description: "" - dremioVersion: "" - functionCategories: [] - name: "DATE_TRUNC_QUARTER" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -DATE_TRUNC_SECOND: - description: "" - dremioVersion: "" - functionCategories: [] - name: "DATE_TRUNC_SECOND" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -DATE_TRUNC_WEEK: - description: "" - dremioVersion: "" - functionCategories: [] - name: "DATE_TRUNC_WEEK" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -DATE_TRUNC_YEAR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "DATE_TRUNC_YEAR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -DAY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "DAY" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -DAYOFMONTH: - description: "" - dremioVersion: "" - functionCategories: [] - name: "DAYOFMONTH" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -DAYOFWEEK: - description: "" - dremioVersion: "" - functionCategories: [] - name: "DAYOFWEEK" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -DAYOFYEAR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "DAYOFYEAR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -DEGREES: - description: "Converts radians to degrees.\n" - dremioVersion: "1.0+" - functionCategories: - - "MATH" - name: "DEGREES" - signatures: - - - description: "Returns the value in degrees." - parameters: - - - description: "The number in radians." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT DEGREES(PI())" - result: "180.0" - - - call: "SELECT DEGREES(0)" - result: "0.0" - - - call: "SELECT DEGREES(1)" - result: "57.29577951308232" -DENSE_RANK: - description: "" - dremioVersion: "" - functionCategories: [] - name: "DENSE_RANK" - signatures: - - - description: "" - parameters: [] - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -DIV: - description: "" - dremioVersion: "" - functionCategories: [] - name: "DIV" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -DIVIDE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "DIVIDE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -E: - description: "Returns Euler's number, a constant approximately equal to 2.718281828459045.\n" - dremioVersion: "1.0+" - functionCategories: - - "MATH" - name: "E" - signatures: - - - description: "Returns e = 2.718281828459045." - parameters: [] - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT E()" - result: "2.718281828459045" -ELEMENT: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ELEMENT" - signatures: [] -ELT: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ELT" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -ENDS_WITH: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ENDS_WITH" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -EQ: - description: "" - dremioVersion: "" - functionCategories: [] - name: "EQ" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -EQUAL: - description: "" - dremioVersion: "" - functionCategories: [] - name: "EQUAL" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -EVERY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "EVERY" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -EXP: - description: "Calculates Euler's number, e, raised to the power of the specified value.\n" - dremioVersion: "1.0+" - functionCategories: - - "MATH" - name: "EXP" - signatures: - - - description: "Returns the value in degrees." - parameters: - - - description: "The exponent value to raise e to." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT EXP(1)" - result: "2.718281828459045" - - - call: "SELECT EXP(10.0)" - result: "22026.465794806718" -EXTRACT: - description: "Extracts the specified date or time part from the date or timestamp." - dremioVersion: "" - functionCategories: [] - name: "EXTRACT" - signatures: [] -EXTRACTCENTURY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "EXTRACTCENTURY" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -EXTRACTDAY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "EXTRACTDAY" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -EXTRACTDECADE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "EXTRACTDECADE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -EXTRACTDOW: - description: "" - dremioVersion: "" - functionCategories: [] - name: "EXTRACTDOW" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -EXTRACTDOY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "EXTRACTDOY" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -EXTRACTEPOCH: - description: "" - dremioVersion: "" - functionCategories: [] - name: "EXTRACTEPOCH" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -EXTRACTHOUR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "EXTRACTHOUR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -EXTRACTMILLENNIUM: - description: "" - dremioVersion: "" - functionCategories: [] - name: "EXTRACTMILLENNIUM" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -EXTRACTMINUTE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "EXTRACTMINUTE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -EXTRACTMONTH: - description: "" - dremioVersion: "" - functionCategories: [] - name: "EXTRACTMONTH" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -EXTRACTQUARTER: - description: "" - dremioVersion: "" - functionCategories: [] - name: "EXTRACTQUARTER" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -EXTRACTSECOND: - description: "" - dremioVersion: "" - functionCategories: [] - name: "EXTRACTSECOND" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -EXTRACTWEEK: - description: "" - dremioVersion: "" - functionCategories: [] - name: "EXTRACTWEEK" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -EXTRACTYEAR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "EXTRACTYEAR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -FACTORIAL: - description: "" - dremioVersion: "" - functionCategories: [] - name: "FACTORIAL" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -FIRST: - description: "" - dremioVersion: "" - functionCategories: [] - name: "FIRST" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -FIRST_MATCHING_MAP_ENTRY_FOR_KEY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "FIRST_MATCHING_MAP_ENTRY_FOR_KEY" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -FIRST_VALUE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "FIRST_VALUE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -FLATTEN: - description: "" - dremioVersion: "" - functionCategories: [] - name: "FLATTEN" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -FLOOR: - description: "Returns the value from the specified expression rounded to the nearest equal or smaller number.\n" - dremioVersion: "1.0+" - functionCategories: - - "MATH" - name: "FLOOR" - signatures: - - - description: "Returns the value from the specified expression rounded to the nearest equal or smaller number." - parameters: - - - description: "The number for which you want to compute the floor of." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "INT" - sampleCodes: - - - call: "SELECT FLOOR(0)" - result: "0" - - - call: "SELECT FLOOR(45.76)" - result: "45" - - - call: "SELECT FLOOR(-1.3)" - result: "-2" -FROM_HEX: - description: "Returns a BINARY value for the given hexadecimal STRING" - dremioVersion: "1.0+" - functionCategories: - - "CONVERSION" - - "CHARACTER" - - "BINARY" - name: "FROM_HEX" - signatures: - - - description: "Returns a BINARY value for the given hexadecimal STRING." - parameters: - - - description: "A hexadecimal string" - kind: "REGULAR" - name: "input" - type: "CHARACTERS" - returnType: "BYTES" - sampleCodes: - - - call: "select from_hex('3fd98a3c')" - result: "P9mKPA==" -FROM_UTC_TIMESTAMP: - description: "" - dremioVersion: "" - functionCategories: [] - name: "FROM_UTC_TIMESTAMP" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -FUSION: - description: "" - dremioVersion: "" - functionCategories: [] - name: "FUSION" - signatures: [] -GEO_BEYOND: - description: "" - dremioVersion: "" - functionCategories: [] - name: "GEO_BEYOND" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -GEO_DISTANCE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "GEO_DISTANCE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" -GEO_NEARBY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "GEO_NEARBY" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -GREATER_THAN: - description: "" - dremioVersion: "" - functionCategories: [] - name: "GREATER_THAN" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -GREATER_THAN_OR_EQUAL_TO: - description: "" - dremioVersion: "" - functionCategories: [] - name: "GREATER_THAN_OR_EQUAL_TO" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -GREATEST: - description: "" - dremioVersion: "" - functionCategories: [] - name: "GREATEST" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "BYTES" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "DECIMAL" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "TIME" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -GROUPING: - description: "" - dremioVersion: "" - functionCategories: [] - name: "GROUPING" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -GROUPING_ID: - description: "" - dremioVersion: "" - functionCategories: [] - name: "GROUPING_ID" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -GROUP_ID: - description: "" - dremioVersion: "" - functionCategories: [] - name: "GROUP_ID" - signatures: - - - description: "" - parameters: [] - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -HASH: - description: "" - dremioVersion: "" - functionCategories: [] - name: "HASH" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" -HASH32: - description: "" - dremioVersion: "" - functionCategories: [] - name: "HASH32" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" -HASH32ASDOUBLE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "HASH32ASDOUBLE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" -HASH64: - description: "" - dremioVersion: "" - functionCategories: [] - name: "HASH64" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -HASH64ASDOUBLE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "HASH64ASDOUBLE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -HASHMD5: - description: "" - dremioVersion: "" - functionCategories: [] - name: "HASHMD5" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -HASHSHA1: - description: "" - dremioVersion: "" - functionCategories: [] - name: "HASHSHA1" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -HASHSHA256: - description: "" - dremioVersion: "" - functionCategories: [] - name: "HASHSHA256" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -HASHSHA512: - description: "" - dremioVersion: "" - functionCategories: [] - name: "HASHSHA512" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -HEX: - name: "HEX" - signatures: - - - parameters: - - - kind: "REGULAR" - name: "parameter name" - type: "STRING" - returnType: "CHARACTERS" - - - parameters: - - - kind: "REGULAR" - name: "parameter name" - type: "INT" - returnType: "CHARACTERS" - - - parameters: - - - kind: "REGULAR" - name: "parameter name" - type: "BIGINT" - returnType: "CHARACTERS" -HLL: - description: "Uses HyperLogLog to return an approximation of the distinct cardinality of the input." - dremioVersion: "" - functionCategories: [] - name: "HLL" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" -HLL_DECODE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "HLL_DECODE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -HLL_MERGE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "HLL_MERGE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" -HLL_V2: - description: "" - dremioVersion: "" - functionCategories: [] - name: "HLL_V2" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -HOP: - description: "" - dremioVersion: "" - functionCategories: [] - name: "HOP" - signatures: [] -HOP_END: - description: "" - dremioVersion: "" - functionCategories: [] - name: "HOP_END" - signatures: [] -HOP_START: - description: "" - dremioVersion: "" - functionCategories: [] - name: "HOP_START" - signatures: [] -HOUR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "HOUR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -ICEBERGDISTRIBUTEBYPARTITION: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ICEBERGDISTRIBUTEBYPARTITION" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" -ILIKE: - description: "Determines if a string matches a pattern similar to the LIKE function. Unlike the LIKE function, the matching is case insensitive." - dremioVersion: "1.0+" - functionCategories: - - "BINARY" - - "CHARACTER" - name: "ILIKE" - signatures: - - - description: "Returns true if the subject matches the pattern with the optional escape. False otherwise." - parameters: - - - description: "The subject to match." - kind: "REGULAR" - name: "subject" - type: "CHARACTERS" - - - description: "The pattern to match." - kind: "REGULAR" - name: "pattern" - type: "CHARACTERS" - - - description: "Character(s) inserted in front of a wildcard character to indicate that the wildcard should be interpreted as a regular character and not as a wildcard." - kind: "OPTIONAL" - name: "escape" - type: "CHARACTERS" - returnType: "BOOLEAN" - sampleCodes: - - - call: "SELECT ILIKE('Dremio', 'dremio')" - result: "true" - - - call: "SELECT ILIKE('dremio', 'warehouse')" - result: "false" -IMAXDIR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "IMAXDIR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -IMINDIR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "IMINDIR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -INITCAP: - description: "Returns the input STRING with the first letter of each word in uppercase and the subsequent letters in the word are in lowercase)." - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - - "BINARY" - name: "INITCAP" - signatures: - - - description: "The STRING with the first letter of each word in the STRING uppercase and the subsequent letters in the word in lowercase." - parameters: - - - description: "Input STRING." - kind: "REGULAR" - name: "expression" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT INITCAP('a guide to data lakehouses')" - result: "A Guide To Data Lakehouses" - - - call: "SELECT INITCAP('a guide to data lakeHouses')" - result: "A Guide To Data Lakehouses" -INSTR: - name: "INSTR" - signatures: - - - parameters: - - - kind: "REGULAR" - name: "parameter name" - type: "CHARACTERS" - - - kind: "REGULAR" - name: "parameter name" - type: "CHARACTERS" - returnType: "INT" -INTERVAL_DAY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "INTERVAL_DAY" - signatures: [] -INTERVAL_YEAR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "INTERVAL_YEAR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -IN_FENNEL: - description: "" - dremioVersion: "" - functionCategories: [] - name: "IN_FENNEL" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -IS DISTINCT FROM: - description: "" - dremioVersion: "" - functionCategories: [] - name: "IS DISTINCT FROM" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -IS FALSE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "IS FALSE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -IS NOT DISTINCT FROM: - description: "" - dremioVersion: "" - functionCategories: [] - name: "IS NOT DISTINCT FROM" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -IS NOT FALSE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "IS NOT FALSE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -IS NOT NULL: - description: "" - dremioVersion: "" - functionCategories: [] - name: "IS NOT NULL" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -IS NOT TRUE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "IS NOT TRUE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -IS NULL: - description: "Determines if an expression is NULL. Returns true if is NULL, and false otherwise. Alias for the operator IS NULL." - dremioVersion: "1.0+" - functionCategories: - - "CONDITIONAL" - - "BOOLEAN" - name: "IS NULL" - signatures: - - - description: "Returns true if is NULL, and false otherwise." - parameters: - - - description: "Expression of any type to evaluate." - kind: "REGULAR" - name: "expression" - type: "ANY" - returnType: "BOOLEAN" - sampleCodes: - - - call: "select ISNULL('dremio')" - result: "false" - - - call: "select 'dremio' IS NULL" - result: "false" -IS TRUE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "IS TRUE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -ISDATE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ISDATE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -ISFALSE: - description: "Returns TRUE if the input expression is FALSE." - dremioVersion: "1.0+" - functionCategories: - - "BOOLEAN" - name: "ISFALSE" - signatures: - - - description: "Returns TRUE if the input expression is FALSE." - parameters: - - - description: "Input expression." - kind: "REGULAR" - name: "expression" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "SELECT FALSE(column_name)" - result: "TRUE" -ISNOTFALSE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ISNOTFALSE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -ISNOTNULL: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ISNOTNULL" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -ISNOTTRUE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ISNOTTRUE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -ISNULL: - description: "Determines if an expression is NULL. Returns true if is NULL, and false otherwise. Alias for the operator IS NULL." - dremioVersion: "1.0+" - functionCategories: - - "CONDITIONAL" - - "BOOLEAN" - name: "ISNULL" - signatures: - - - description: "Returns true if is NULL, and false otherwise." - parameters: - - - description: "Expression of any type to evaluate." - kind: "REGULAR" - name: "expression" - type: "ANY" - returnType: "BOOLEAN" - sampleCodes: - - - call: "select ISNULL('dremio')" - result: "false" - - - call: "select 'dremio' IS NULL" - result: "false" -ISNUMERIC: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ISNUMERIC" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -ISTRUE: - description: "Returns TRUE if the input expression evaluates to TRUE." - dremioVersion: "1.0+" - functionCategories: - - "BOOLEAN" - name: "ISTRUE" - signatures: - - - description: "Returns TRUE if the input expression evaluates to TRUE." - parameters: - - - description: "Input expression." - kind: "REGULAR" - name: "expression" - type: "NUMERIC" - returnType: "BOOLEAN" - sampleCodes: - - - call: "SELECT ISTRUE(1)" - result: "TRUE" - - - description: "Returns TRUE if the input expression evaluates to TRUE." - parameters: - - - description: "Input expression." - kind: "REGULAR" - name: "expression" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "SELECT ISTRUE(FALSE)" - result: "FALSE" - - - description: "Returns TRUE if the input expression evaluates to TRUE." - parameters: - - - description: "Input expression." - kind: "REGULAR" - name: "expression" - type: "NUMERIC" - returnType: "BOOLEAN" - sampleCodes: - - - call: "SELECT ISTRUE(0)" - result: "FALSE" -IS_BIGINT: - description: "Returns TRUE if the input expression is an big NUMERIC value." - dremioVersion: "1.0+" - functionCategories: - - "DATETYPE" - name: "IS_BIGINT" - signatures: - - - description: "Returns TRUE if the input expression is an big NUMERIC." - parameters: - - - description: "Input expression." - kind: "REGULAR" - name: "expression" - type: "BYTES" - returnType: "BOOLEAN" - sampleCodes: - - - call: "SELECT IS_BIGINT(column_name)" - result: "TRUE" -IS_BIT: - description: "" - dremioVersion: "" - functionCategories: [] - name: "IS_BIT" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -IS_DATE: - description: "Returns TRUE if the input expression can be cast to a DATE." - dremioVersion: "1.0+" - functionCategories: - - "DATETYPE" - name: "IS_DATE" - signatures: - - - description: "Returns TRUE if the input expression can be cast to a DATE." - parameters: - - - description: "Input expression." - kind: "REGULAR" - name: "input" - type: "ANY" - returnType: "BOOLEAN" - sampleCodes: - - - call: "SELECT IS_DATE('2021-04-22')" - result: "TRUE" -IS_DECIMAL: - description: "" - dremioVersion: "" - functionCategories: [] - name: "IS_DECIMAL" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -IS_DISTINCT_FROM: - description: "" - dremioVersion: "" - functionCategories: [] - name: "IS_DISTINCT_FROM" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -IS_FIXEDSIZEBINARY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "IS_FIXEDSIZEBINARY" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -IS_FLOAT4: - description: "" - dremioVersion: "" - functionCategories: [] - name: "IS_FLOAT4" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -IS_FLOAT8: - description: "" - dremioVersion: "" - functionCategories: [] - name: "IS_FLOAT8" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -IS_INT: - description: "Returns TRUE if the input expression is an NUMERIC value." - dremioVersion: "1.0+" - functionCategories: - - "DATETYPE" - name: "IS_INT" - signatures: - - - description: "Returns TRUE if the input expression is an NUMERIC." - parameters: - - - description: "Input expression." - kind: "REGULAR" - name: "expression" - type: "BYTES" - returnType: "BOOLEAN" - sampleCodes: - - - call: "SELECT IS_INT(column_name)" - result: "TRUE" -IS_INTERVALDAY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "IS_INTERVALDAY" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -IS_INTERVALYEAR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "IS_INTERVALYEAR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -IS_LIST: - description: "" - dremioVersion: "" - functionCategories: [] - name: "IS_LIST" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -IS_NOT_DISTINCT_FROM: - description: "" - dremioVersion: "" - functionCategories: [] - name: "IS_NOT_DISTINCT_FROM" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -IS_STRUCT: - description: "" - dremioVersion: "" - functionCategories: [] - name: "IS_STRUCT" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -IS_SUBSTR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "IS_SUBSTR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -IS_TIME: - description: "" - dremioVersion: "" - functionCategories: [] - name: "IS_TIME" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -IS_TIMESTAMP: - description: "" - dremioVersion: "" - functionCategories: [] - name: "IS_TIMESTAMP" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -IS_UTF8: - description: "Returns whether an expression is valid UTF-8" - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - name: "IS_UTF8" - signatures: - - - description: "Returns whether an expression is valid UTF-8" - parameters: - - - description: "an expression" - kind: "REGULAR" - name: "in" - type: "ANY" - returnType: "BOOLEAN" - sampleCodes: - - - call: "select is_utf8('hello')" - result: "true" -IS_VARBINARY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "IS_VARBINARY" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -IS_VARCHAR: - description: "Returns TRUE if the input expression is a CHARACTER value." - dremioVersion: "1.0+" - functionCategories: - - "DATETYPE" - name: "IS_VARCHAR" - signatures: - - - description: "Returns TRUE if the input expression is a CHARACTER value." - parameters: - - - description: "Input expression." - kind: "REGULAR" - name: "expression" - type: "BYTES" - returnType: "BOOLEAN" - sampleCodes: - - - call: "SELECT IS_CHARACTER(column_name)" - result: "TRUE" -ITEMS_SKETCH: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ITEMS_SKETCH" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -ITEMS_SKETCH_MERGE_BOOLEAN: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ITEMS_SKETCH_MERGE_BOOLEAN" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -ITEMS_SKETCH_MERGE_DOUBLE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ITEMS_SKETCH_MERGE_DOUBLE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -ITEMS_SKETCH_MERGE_LONG: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ITEMS_SKETCH_MERGE_LONG" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -ITEMS_SKETCH_MERGE_NUMBER: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ITEMS_SKETCH_MERGE_NUMBER" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -ITEMS_SKETCH_MERGE_VARCHAR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ITEMS_SKETCH_MERGE_VARCHAR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -KVGEN: - description: "" - dremioVersion: "" - functionCategories: [] - name: "KVGEN" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -LAG: - description: "" - dremioVersion: "" - functionCategories: [] - name: "LAG" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "CHARACTERS" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "BYTES" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "BOOLEAN" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "DATEANDTIME" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "CHARACTERS" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "CHARACTERS" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "CHARACTERS" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "CHARACTERS" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "CHARACTERS" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "CHARACTERS" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "TIMESTAMP" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "CHARACTERS" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "TIME" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "CHARACTERS" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "DATE" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -LAST: - description: "" - dremioVersion: "" - functionCategories: [] - name: "LAST" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -LAST_DAY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "LAST_DAY" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" -LAST_MATCHING_MAP_ENTRY_FOR_KEY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "LAST_MATCHING_MAP_ENTRY_FOR_KEY" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -LAST_QUERY_ID: - description: "Returns the ID for the most recently executed query in the current session." - dremioVersion: "1.0+" - functionCategories: - - "CONTEXT" - name: "LAST_QUERY_ID" - signatures: - - - description: "Returns the ID for the most recently executed query in the current session." - parameters: [] - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT LAST_QUERY_ID()" - result: "1f1ae232-55c0-9df3-caa9-2c52deecf300" -LAST_VALUE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "LAST_VALUE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -LCASE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "LCASE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -LEAD: - description: "" - dremioVersion: "" - functionCategories: [] - name: "LEAD" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "CHARACTERS" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "BYTES" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "BOOLEAN" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "DATEANDTIME" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "CHARACTERS" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "CHARACTERS" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "CHARACTERS" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "CHARACTERS" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "CHARACTERS" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "CHARACTERS" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "TIMESTAMP" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "CHARACTERS" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "TIME" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "CHARACTERS" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "DATE" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -LEAST: - description: "" - dremioVersion: "" - functionCategories: [] - name: "LEAST" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "BYTES" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "DECIMAL" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "TIME" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -LEFT: - description: "Returns the left-most substring. The function name must be escaped with quotes (\\\"LEFT\\\")." - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - name: "LEFT" - signatures: - - - description: "Left-most substring." - parameters: - - - description: "String input parameter" - kind: "REGULAR" - name: "expression" - type: "CHARACTERS" - - - description: "Number of characters on the left to return." - kind: "REGULAR" - name: "length" - type: "NUMERIC" - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT \\\"LEFT\\\"('Dremio - SQL Engine', -12)" - result: "Dremio" - - - call: "SELECT \\\"LEFT\\\"('Dremio - SQL Engine', 6)" - result: "Dremio" -LENGTH: - description: "Returns the length of an input STRING. If the character encoding isn't specified, it assumes to UTF8." - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - name: "LENGTH" - signatures: - - - description: "Returns the length of the specified expression." - parameters: - - - description: "String expression to determine the length of." - kind: "REGULAR" - name: "expression" - type: "CHARACTERS" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT LENGTH('DREMIO')" - result: "6" -LENGTHUTF8: - description: "" - dremioVersion: "" - functionCategories: [] - name: "LENGTHUTF8" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" -LESS_THAN: - description: "" - dremioVersion: "" - functionCategories: [] - name: "LESS_THAN" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -LESS_THAN_OR_EQUAL_TO: - description: "" - dremioVersion: "" - functionCategories: [] - name: "LESS_THAN_OR_EQUAL_TO" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -LEVENSHTEIN: - name: "LEVENSHTEIN" - signatures: - - - parameters: - - - kind: "REGULAR" - name: "parameter name" - type: "CHARACTERS" - - - kind: "REGULAR" - name: "parameter name" - type: "CHARACTERS" - returnType: "INT" -LIKE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "LIKE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "CHARACTERS" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -LISTAGG: - description: "" - dremioVersion: "" - functionCategories: [] - name: "LISTAGG" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "STRING" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "STRING" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -LISTAGG_MERGE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "LISTAGG_MERGE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -LN: - description: "" - dremioVersion: "" - functionCategories: [] - name: "LN" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" -LOCALTIME: - description: "Returns the current TIME for the system." - dremioVersion: "1.0+" - functionCategories: - - "CONTEXT" - name: "LOCALTIME" - signatures: - - - description: "Returns the current TIME for the system." - parameters: [] - returnType: "TIME" - sampleCodes: - - - call: "SELECT LOCALTIME()" - result: "05:07:01" -LOCALTIMESTAMP: - description: "Returns the current TIMESTAMP for the system." - dremioVersion: "1.0+" - functionCategories: - - "CONTEXT" - name: "LOCALTIMESTAMP" - signatures: - - - description: "Returns the current TIMESTAMP for the system." - parameters: [] - returnType: "TIMESTAMP" - sampleCodes: - - - call: "SELECT LOCALTIMESTAMP()" - result: "2021-06-29 05:17:44.703" -LOCAL_LISTAGG: - description: "" - dremioVersion: "" - functionCategories: [] - name: "LOCAL_LISTAGG" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "STRING" - returnType: "LIST" - sampleCodes: - - - call: "" - result: "" -LOCATE: - description: "Searches for the first occurrence of the first argument in the second argument and if found, returns the position the of the first argument in the second argument. The first character in a STRING is position 1. Returns 0 if the substring isn't found in the expression." - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - name: "LOCATE" - signatures: - - - description: "Returns the position of the first occurrence of the subSTRING in the expression if found." - parameters: - - - description: "The string to search for." - kind: "REGULAR" - name: "needle" - type: "CHARACTERS" - - - description: "The input expression to search in." - kind: "REGULAR" - name: "haystack" - type: "CHARACTERS" - - - description: "Position to start the search from." - kind: "OPTIONAL" - name: "start" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT LOCATE('an','banana', 3)" - result: "4" - - - call: "SELECT LOCATE('no','banana')" - result: "0" - - - call: "SELECT LOCATE('an','banana')" - result: "2" -LOG: - description: "Returns the logarithm of the numeric input expression. If no base is specified, the natural log (ln) will be calculated." - dremioVersion: "1.0+" - functionCategories: - - "MATH" - name: "LOG" - signatures: - - - description: "Returns the logarithm of the numeric input expression. The return type matches the input type." - parameters: - - - description: "The base to use." - kind: "OPTIONAL" - name: "base_expression" - type: "NUMERIC" - - - description: "The value for which you want to calculate the log." - kind: "REGULAR" - name: "expression" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "SELECT LOG(20.5, 1.5)" - result: "0.1342410830900514" -LOG10: - description: "Returns the log base 10 of the numeric input expression." - dremioVersion: "1.0+" - functionCategories: - - "MATH" - name: "LOG10" - signatures: - - - description: "Returns the log base 10 of the numeric input expression." - parameters: - - - description: "The value for which you want to calculate the log." - kind: "REGULAR" - name: "expression" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "SELECT LOG10(20.5)" - result: "1.3117538610557542" -LOWER: - description: "Returns the input expression with all the characters converted to lowercase." - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - - "BINARY" - name: "LOWER" - signatures: - - - description: "Returns the input expression with all the characters converted to lowercase." - parameters: - - - description: "String to convert to lowercase." - kind: "REGULAR" - name: "expression" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT LOWER('A GUIDE to data Lakehouses')" - result: "a guide to data lakehouses" -LPAD: - description: "Left pads a STRING with spaces or specified characters to reach the number of characters specified as a parameter." - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - - "BINARY" - name: "LPAD" - signatures: - - - description: "Returns the padded STRING." - parameters: - - - description: "The expression to pad." - kind: "REGULAR" - name: "base_expression" - type: "CHARACTERS" - - - description: "The number of characters to return." - kind: "REGULAR" - name: "length" - type: "NUMERIC" - - - description: "Characters to pad the base_expression with." - kind: "OPTIONAL" - name: "pad_expression" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT LPAD('parameter', 11)" - result: " parameter" - - - call: "SELECT LPAD('engineering', 6)" - result: "engine" - - - call: "select LPAD('parameter', 11, '-')" - result: "--parameter" -LSHIFT: - description: "Shifts the bits of the numeric expression to the left." - dremioVersion: "1.0+" - functionCategories: - - "BITWISE" - name: "LSHIFT" - signatures: - - - description: "Returns the result of shifting the input by the specified bits." - parameters: - - - description: "Integer to shift." - kind: "REGULAR" - name: "value" - type: "NUMERIC" - - - description: "The number of bits to shift by." - kind: "REGULAR" - name: "numBitsToShift" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT LSHIFT(1, 120)" - result: "16777216" - - - call: "SELECT LSHIFT(16, 1)" - result: "32" -LTRIM: - description: "Removes leading spaces or characters from a STRING." - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - name: "LTRIM" - signatures: - - - description: "Returns the trimmed STRING (no leading spaces)." - parameters: - - - description: "The expression to be trimmed." - kind: "REGULAR" - name: "expression" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT LTRIM(' dremio')" - result: "dremio" - - - description: "Returns the trimmed STRING." - parameters: - - - description: "The expression to be trimmed." - kind: "REGULAR" - name: "expression" - type: "CHARACTERS" - - - description: "Leading characters to trim." - kind: "REGULAR" - name: "trim_expression" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT LTRIM('pancake', 'pan')" - result: "cake" -MAPPIFY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "MAPPIFY" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -MAP_KEYS: - description: "" - dremioVersion: "" - functionCategories: [] - name: "MAP_KEYS" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -MAP_VALUES: - description: "" - dremioVersion: "" - functionCategories: [] - name: "MAP_VALUES" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -MASK: - description: "" - dremioVersion: "" - functionCategories: [] - name: "MASK" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "BYTES" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "DECIMAL" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "TIME" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -MASK_FIRST_N: - description: "" - dremioVersion: "" - functionCategories: [] - name: "MASK_FIRST_N" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "BYTES" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "DECIMAL" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "TIME" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -MASK_HASH: - description: "" - dremioVersion: "" - functionCategories: [] - name: "MASK_HASH" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "BYTES" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "DECIMAL" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "TIME" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -MASK_INTERNAL: - description: "" - dremioVersion: "" - functionCategories: [] - name: "MASK_INTERNAL" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "BYTES" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "DECIMAL" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "TIME" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" -MASK_LAST_N: - description: "" - dremioVersion: "" - functionCategories: [] - name: "MASK_LAST_N" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "BYTES" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "DECIMAL" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "TIME" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -MASK_SHOW_FIRST_N: - description: "" - dremioVersion: "" - functionCategories: [] - name: "MASK_SHOW_FIRST_N" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "BYTES" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "DECIMAL" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "TIME" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -MASK_SHOW_LAST_N: - description: "" - dremioVersion: "" - functionCategories: [] - name: "MASK_SHOW_LAST_N" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "BYTES" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "DECIMAL" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "TIME" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "VARARG" - name: "" - type: "ANY" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -'MATCH_NUMBER ': - description: "" - dremioVersion: "" - functionCategories: [] - name: "MATCH_NUMBER " - signatures: - - - description: "" - parameters: [] - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -MAX: - description: "Returns the maximum value among the non-NULL input expressions." - dremioVersion: "1.0+" - functionCategories: - - "AGGREGATE" - - "WINDOW" - name: "MAX" - signatures: - - - description: "Maximum value of the input expression." - parameters: - - - description: "The expression to take the max of across all rows." - kind: "REGULAR" - name: "expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT MAX(column_name) FROM my_table" - result: "Max of the values in the specified column." -MAXDIR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "MAXDIR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -MAX_V2: - description: "" - dremioVersion: "" - functionCategories: [] - name: "MAX_V2" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -MD5: - description: "" - dremioVersion: "" - functionCategories: [] - name: "MD5" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -MEDIAN: - description: "Computes the median based on a continuous distribution of the column value" - dremioVersion: "1.0+" - functionCategories: - - "AGGREGATE" - name: "MEDIAN" - signatures: - - - parameters: - - - description: "The fraction/percentile value to compute. The value for this must be a numeric literal in the range of 0 to 1 inclusive and represents a percentage." - kind: "REGULAR" - name: "fraction" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: [] -MIN: - description: "Returns the minimum value among the non-NULL input expressions." - dremioVersion: "1.0+" - functionCategories: - - "AGGREGATE" - - "WINDOW" - name: "MIN" - signatures: - - - description: "Minimum value of the input expressions." - parameters: - - - description: "Input expression." - kind: "REGULAR" - name: "expression" - type: "ANY" - returnType: "ANY" - sampleCodes: - - - call: "SELECT MIN(column_name)" - result: "Min of the values in the specified column." -MINDIR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "MINDIR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -MINUTE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "MINUTE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -MIN_V2: - description: "" - dremioVersion: "" - functionCategories: [] - name: "MIN_V2" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -MOD: - description: "Returns the remainder of the input expression divided by the second input expression." - dremioVersion: "1.0+" - functionCategories: - - "MATH" - name: "MOD" - signatures: - - - description: "Returns the remainder of the first input expression divided by the second input expression." - parameters: - - - description: "The number being divided." - kind: "REGULAR" - name: "dividend" - type: "NUMERIC" - - - description: "The number to divide by." - kind: "REGULAR" - name: "divisor" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT MOD(50, 7)" - result: "1" -MODULO: - description: "" - dremioVersion: "" - functionCategories: [] - name: "MODULO" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -MONTH: - description: "" - dremioVersion: "" - functionCategories: [] - name: "MONTH" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -MONTHS_BETWEEN: - name: "MONTHS_BETWEEN" - signatures: - - - parameters: - - - kind: "REGULAR" - name: "parameter name" - type: "DATE" - - - kind: "REGULAR" - name: "parameter name" - type: "DATE" - returnType: "DOUBLE" - - - parameters: - - - kind: "REGULAR" - name: "parameter name" - type: "TIMESTAMP" - - - kind: "REGULAR" - name: "parameter name" - type: "TIMESTAMP" - returnType: "DOUBLE" -MULTIPLY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "MULTIPLY" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -NDV: - description: "Returns an approximate distinct value number, similar to `COUNT(DISTINCT col)`. NDV can return results faster than using the combination of COUNT and DISTINCT while using a constant amount of memory, resulting in less memory usage for columns with high cardinality." - dremioVersion: "1.0+" - functionCategories: - - "AGGREGATE" - - "WINDOW" - name: "NDV" - signatures: - - - description: "Returns an estimate of the total number of records for the specified column." - parameters: - - - description: "Enter an expression to evaluate the number of records. This value can either be a column name or *." - kind: "REGULAR" - name: "expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT NDV(column_name)" - result: "163" - - - description: "Returns an estimate of total number of records for the specified expression." - parameters: - - - description: "Enter an expression to evaluate the number of records. This value can either be a column name or *." - kind: "REGULAR" - name: "expression" - type: "NUMERIC" - - - description: "Optional argument that maps to a precision used by the HyperLogLog (HLL) algorithm based on the mapping formula: `precision = scale +8`. Enter an NUMERIC in the range from 1 to 10." - kind: "REGULAR" - name: "scale" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT NDV(column_name, 1)" - result: "162" -NEGATIVE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "NEGATIVE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -NEWPARTITIONNUMBER: - description: "" - dremioVersion: "" - functionCategories: [] - name: "NEWPARTITIONNUMBER" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" -NEWPARTITIONVALUE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "NEWPARTITIONVALUE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -NEXT: - description: "" - dremioVersion: "" - functionCategories: [] - name: "NEXT" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -NEXT_DAY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "NEXT_DAY" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" -NONNULLSTATCOUNT: - description: "" - dremioVersion: "" - functionCategories: [] - name: "NONNULLSTATCOUNT" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -NOT: - description: "" - dremioVersion: "" - functionCategories: [] - name: "NOT" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -NOT_EQUAL: - description: "" - dremioVersion: "" - functionCategories: [] - name: "NOT_EQUAL" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -NOW: - description: "Returns the current TIMESTAMP (DATE and TIME) in UTC TIMEzone." - dremioVersion: "1.0+" - functionCategories: - - "CONTEXT" - name: "NOW" - signatures: - - - description: "Returns the current TIMESTAMP." - parameters: [] - returnType: "TIMESTAMP" - sampleCodes: - - - call: "SELECT NOW()" - result: "2021-07-02 04:55:55.267" -NTH_VALUE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "NTH_VALUE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -NTILE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "NTILE" - signatures: [] -NULLIF: - description: "Compares two expressions. If the values in each expression are equal, returns `NULL` and, if they are not equal, returns the value of the first expression." - dremioVersion: "1.0+" - functionCategories: - - "CONDITIONAL" - name: "NULLIF" - signatures: - - - description: "" - parameters: - - - description: "The expressions can be any data type, however all the expressions contained in the argument must be of the same type." - kind: "REGULAR" - name: "expression" - type: "ANY" - - - description: "The expressions can be any data type, however all the expressions contained in the argument must be of the same type." - kind: "REGULAR" - name: "expression2" - type: "ANY" - returnType: "ANY" - sampleCodes: - - - call: "SELECT NULLIF(user_id, customer_id)\n" - result: "user_id\n" -NVL: - description: "" - dremioVersion: "" - functionCategories: [] - name: "NVL" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -OCTET_LENGTH: - description: "Returns the length of the STRING in bytes." - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - - "BINARY" - name: "OCTET_LENGTH" - signatures: - - - description: "The length of the STRING in bytes." - parameters: - - - description: "The STRING for which the length is returned." - kind: "REGULAR" - name: "input" - type: "CHARACTERS" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT OCTET_LENGTH('abc')" - result: "3" -OR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "OR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -ORNOSHORTCIRCUIT: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ORNOSHORTCIRCUIT" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -OVERLAY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "OVERLAY" - signatures: [] -PARTITIONBITCOUNTER: - description: "" - dremioVersion: "" - functionCategories: [] - name: "PARTITIONBITCOUNTER" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" -PERCENTILE_CONT: - description: "Computes a percentile based on a continuous distribution of the column value" - dremioVersion: "1.0+" - functionCategories: - - "AGGREGATE" - name: "PERCENTILE_CONT" - signatures: - - - description: "Returns a value corresponding to the specified fraction in the ordering,NUMERICerpolatring between adjacent input items if needed" - parameters: - - - description: "The fraction/percentile value to compute. The value for this must be a numeric literal in the range of 0 to 1 inclusive and represents a percentage." - kind: "REGULAR" - name: "fraction" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: [] -PERCENTILE_DISC: - description: "Computes a specific percentile for sorted values in a column" - dremioVersion: "1.0+" - functionCategories: - - "AGGREGATE" - name: "PERCENTILE_DISC" - signatures: - - - description: "Returns the first input value whose position in the ordering equals or exceeds the specified fraction" - parameters: - - - description: "The fraction/percentile value to compute. The value for this must be a numeric literal in the range of 0 to 1 inclusive and represents a percentage." - kind: "REGULAR" - name: "fraction" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: [] -PERCENT_RANK: - description: "" - dremioVersion: "" - functionCategories: [] - name: "PERCENT_RANK" - signatures: - - - description: "" - parameters: [] - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" -PI: - description: "Returns the value of pi, which is approximately 3.14592654." - dremioVersion: "1.0+" - functionCategories: - - "MATH" - name: "PI" - signatures: - - - description: "Returns the value of pi, which is approximately 3.14592654." - parameters: [] - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT PI()" - result: "3.141592653589793" -PMOD: - description: "" - dremioVersion: "" - functionCategories: [] - name: "PMOD" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -POSITION: - description: "Returns the position of the first occurrence of a subSTRING within another STRING" - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - name: "POSITION" - signatures: - - - description: "Returns the position of the 'needle' in the 'haystack'." - parameters: - - - description: "The string to search for." - kind: "REGULAR" - name: "needle" - type: "STRING" - - - description: "The string to search in." - kind: "REGULAR" - name: "haystack" - type: "STRING" - returnType: "NUMERIC" - sampleCodes: - - - call: "select position('an' in 'banana')" - result: "2" - - - call: "select position('no' in 'banana')" - result: "0" -POSITIVE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "POSITIVE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -POW: - description: "" - dremioVersion: "" - functionCategories: [] - name: "POW" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" -POWER: - description: "Returns the result of raising the input value to the specified power." - dremioVersion: "1.0+" - functionCategories: - - "MATH" - name: "POWER" - signatures: - - - description: "Returns the result of raising the numeric_expression to the specified power." - parameters: - - - description: "The input expression." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - - - description: "The power to raise the numeric_expression to." - kind: "REGULAR" - name: "power" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT POWER(5, 2)" - result: "25.0" - - - call: "SELECT POWER(0.1, 2)" - result: "0.010000000000000002" - - - call: "SELECT POWER(-2, 2)" - result: "4.0" - - - call: "SELECT POWER(10, -2)" - result: "0.01" -PREV: - description: "" - dremioVersion: "" - functionCategories: [] - name: "PREV" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -QUARTER: - description: "" - dremioVersion: "" - functionCategories: [] - name: "QUARTER" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -QUERY_USER: - description: "Returns the username of the user that is currently logged in to the system." - dremioVersion: "1.0+" - functionCategories: - - "CONTEXT" - name: "QUERY_USER" - signatures: - - - description: "The username of the user that is currently logged in to the system." - parameters: [] - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT QUERY_USER()" - result: "testuser@dremio.com" -QUOTE: - name: "QUOTE" - signatures: - - - parameters: - - - kind: "REGULAR" - name: "parameter name" - type: "CHARACTERS" - returnType: "CHARACTERS" -RADIANS: - description: "Convert an angle in degrees to radians" - dremioVersion: "1.0+" - functionCategories: - - "MATH" - name: "RADIANS" - signatures: - - - description: "Converts the value in degrees to radians" - parameters: - - - description: "The angle in degrees" - kind: "REGULAR" - name: "x" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "select radians(45)" - result: "0.7853981633974483" -RAND: - description: "" - dremioVersion: "" - functionCategories: [] - name: "RAND" - signatures: - - - description: "" - parameters: [] - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" -RANDOM: - description: "Each call returns a random generated number between 0 and 1 for each row." - dremioVersion: "1.0+" - functionCategories: - - "MATH" - name: "RANDOM" - signatures: - - - description: "Returns a randomly generated number between 0 and 1." - parameters: - - - description: "Seed value." - kind: "OPTIONAL" - name: "seed" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT RANDOM()" - result: "0.24943567857336457" - - - call: "SELECT RANDOM(4000)" - result: "0.18633151868393985" -RAND_INTEGER: - description: "" - dremioVersion: "" - functionCategories: [] - name: "RAND_INTEGER" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "NUMERIC" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" -RANK: - description: "" - dremioVersion: "" - functionCategories: [] - name: "RANK" - signatures: - - - description: "" - parameters: [] - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -REGEXP_EXTRACT: - name: "REGEXP_EXTRACT" - signatures: - - - parameters: - - - kind: "REGULAR" - name: "parameter name" - type: "CHARACTERS" - - - kind: "REGULAR" - name: "parameter name" - type: "CHARACTERS" - - - kind: "REGULAR" - name: "parameter name" - type: "INT" - returnType: "CHARACTERS" -REGEXP_LIKE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "REGEXP_LIKE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -REGEXP_MATCHES: - description: "" - dremioVersion: "" - functionCategories: [] - name: "REGEXP_MATCHES" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -REGEXP_REPLACE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "REGEXP_REPLACE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -REGR_SXX: - description: "" - dremioVersion: "" - functionCategories: [] - name: "REGR_SXX" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" -REGR_SYY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "REGR_SYY" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" -REPEAT: - description: "Builds a string by repeating the input for the specified number of times" - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - - "BINARY" - name: "REPEAT" - signatures: - - - description: "Returns the constructed string." - parameters: - - - description: "The input string from which the output string is built." - kind: "REGULAR" - name: "expression" - type: "CHARACTERS" - - - description: "The number of TIMEs the input expression should be repeated." - kind: "REGULAR" - name: "nTimes" - type: "NUMERIC" - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT REPEAT('abc', 3)" - result: "abcabcabc" -REPEATSTR: - description: "Repeats the given STRING n TIMEs." - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - name: "REPEATSTR" - signatures: - - - description: "sample signature description" - parameters: - - - description: "String/characters to repeat." - kind: "REGULAR" - name: "expression" - type: "CHARACTERS" - - - description: "Number of TIMEs the STRING should be repeated." - kind: "REGULAR" - name: "nTimes" - type: "NUMERIC" - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT REPEATSTR('a ', 5)" - result: "a a a a a" -REPLACE: - description: "Removes all occurrences of a specified subSTRING and replaces them with another STRING." - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - name: "REPLACE" - signatures: - - - description: "Returns the string after the replacements have been made." - parameters: - - - description: "String expression in which to do the replacements." - kind: "REGULAR" - name: "string_expression" - type: "CHARACTERS" - - - description: "The pattern you want replace in the string_expression." - kind: "REGULAR" - name: "pattern" - type: "CHARACTERS" - - - description: "The string to replace the occurrences of the pattern with." - kind: "REGULAR" - name: "replacement" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT REPLACE('THE CATATONIC CAT', 'CAT', 'DOG')" - result: "sample return 1" -REVERSE: - description: "Reverses the order of characters in a STRING." - dremioVersion: "1.0+" - functionCategories: - - "BINARY" - - "CHARACTER" - name: "REVERSE" - signatures: - - - description: "Returns the reversed string (preserves the original case of the string)." - parameters: - - - description: "The string to reverse." - kind: "REGULAR" - name: "expression" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT REVERSE('Hello, world!');" - result: "!dlrow ,olleH" -RIGHT: - description: "Returns the right-most substring. The function name must be enclosed in double quotes (\"RIGHT\")." - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - name: "RIGHT" - signatures: - - - description: "Right-most substring." - parameters: - - - description: "String input parameter." - kind: "REGULAR" - name: "STRING" - type: "CHARACTERS" - - - description: "Number of characters on the right to return." - kind: "REGULAR" - name: "length" - type: "NUMERIC" - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT \"RIGHT\"('Dremio - SQL Engine', 6)" - result: "Engine" -ROUND: - description: "Returns the rounded value for the inputted value. If no scale is specified, the closest whole number is returned." - dremioVersion: "1.0+" - functionCategories: - - "MATH" - name: "ROUND" - signatures: - - - description: "Returns the closest rounded value based on the specified scale." - parameters: - - - description: "Numeric value to round." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - - - description: "The decimal place to round." - kind: "REGULAR" - name: "scale" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT ROUND(-24.35, -1)" - result: "-24.4" - - - call: "SELECT ROUND(24.35, 1)" - result: "24.4" -ROW_NUMBER: - description: "" - dremioVersion: "" - functionCategories: [] - name: "ROW_NUMBER" - signatures: - - - description: "" - parameters: [] - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -RPAD: - description: "Right pads a STRING with spaces or specified characters to reach the number of chracters specified as a parameter." - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - - "BINARY" - name: "RPAD" - signatures: - - - description: "Returns the padded STRING." - parameters: - - - description: "The expression to pad." - kind: "REGULAR" - name: "base_expression" - type: "CHARACTERS" - - - description: "The number of characters to return." - kind: "REGULAR" - name: "length" - type: "NUMERIC" - - - description: "Characters to pad the base_expression with." - kind: "OPTIONAL" - name: "pad_expression" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "select RPAD('dremio', 9, '!')" - result: "dremio!!!" - - - call: "select RPAD('base_', 9, 'expression')" - result: "base_expr" - - - description: "Returns the padded STRING." - parameters: - - - description: "The expression to pad." - kind: "REGULAR" - name: "base_expression" - type: "CHARACTERS" - - - description: "The number of characters to return." - kind: "REGULAR" - name: "length" - type: "NUMERIC" - returnType: "CHARACTERS" - sampleCodes: - - - call: "select RPAD('dremio', 9)" - result: "dremio " -RSHIFT: - description: "Shifts the bits of the numeric expression to he right." - dremioVersion: "1.0+" - functionCategories: - - "BITWISE" - name: "RSHIFT" - signatures: - - - description: "Returns the result of shifting the input by the specified bits." - parameters: - - - description: "Integer to shift." - kind: "REGULAR" - name: "expression1" - type: "NUMERIC" - - - description: "The number of bits to shift by." - kind: "REGULAR" - name: "expression2" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "select RSHIFT(1,1)" - result: "0" - - - description: "Returns the result of shifting the input by the specified bits." - parameters: - - - description: "Integer to shift." - kind: "REGULAR" - name: "expression1" - type: "NUMERIC" - - - description: "The number of bits to shift by." - kind: "REGULAR" - name: "expression2" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "sample call 1" - result: "sample return 1" -RTRIM: - description: "Removes trailing spaces or characters from a STRING." - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - name: "RTRIM" - signatures: - - - description: "Returns the trimmed STRING." - parameters: - - - description: "The expression to be trimmed." - kind: "REGULAR" - name: "expression" - type: "CHARACTERS" - - - description: "Trailing characters to trim." - kind: "OPTIONAL" - name: "trim_expression" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT RTRIM('pancake', 'cake')" - result: "pan" - - - description: "Returns the trimmed STRING (no trailing spaces)." - parameters: - - - description: "The expression to be trimmed." - kind: "REGULAR" - name: "expression" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT RTRIM('dremio ')" - result: "dremio" -SAME: - description: "" - dremioVersion: "" - functionCategories: [] - name: "SAME" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -SAMPLE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "SAMPLE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -SECOND: - description: "" - dremioVersion: "" - functionCategories: [] - name: "SECOND" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -SESSION: - description: "" - dremioVersion: "" - functionCategories: [] - name: "SESSION" - signatures: [] -SESSION_END: - description: "" - dremioVersion: "" - functionCategories: [] - name: "SESSION_END" - signatures: [] -SESSION_START: - description: "" - dremioVersion: "" - functionCategories: [] - name: "SESSION_START" - signatures: [] -SESSION_USER: - description: "Returns the username of the user that created the current session." - dremioVersion: "1.0+" - functionCategories: - - "CONTEXT" - name: "SESSION_USER" - signatures: - - - description: "Returns the username of the user that created the current session." - parameters: [] - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT SESSION_USER" - result: "testuser@dremio.com" -SHA: - description: "" - dremioVersion: "" - functionCategories: [] - name: "SHA" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -SHA1: - description: "" - dremioVersion: "" - functionCategories: [] - name: "SHA1" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -SHA256: - description: "" - dremioVersion: "" - functionCategories: [] - name: "SHA256" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -SHA512: - description: "" - dremioVersion: "" - functionCategories: [] - name: "SHA512" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -SIGN: - description: "Returns the sign of the input expression." - dremioVersion: "1.0+" - functionCategories: - - "MATH" - name: "SIGN" - signatures: - - - description: "Returns 1, 0, -1 based on the sign of the input expression." - parameters: - - - description: "Input expression." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT SIGN(10.3)" - result: "1" -SIMILAR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "SIMILAR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "CHARACTERS" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -SIMILAR_TO: - description: "" - dremioVersion: "" - functionCategories: [] - name: "SIMILAR_TO" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "CHARACTERS" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -SIN: - description: "Computes the sine of a value." - dremioVersion: "1.0+" - functionCategories: - - "MATH" - name: "SIN" - signatures: - - - description: "Returns a value expressed as a NUMERICing point number." - parameters: - - - description: "The number in radians." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT SIN(360)" - result: "0.9589157234143065" -SINGLE_VALUE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "SINGLE_VALUE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -SINH: - description: "Computes the hyperbolic sine of the input expression." - dremioVersion: "1.0+" - functionCategories: - - "MATH" - name: "SINH" - signatures: - - - description: "Returns the hyperbolic sine of the input expression." - parameters: - - - description: "Input expression." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT SINH(1)" - result: "1.1752011936438014" -SIZE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "SIZE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" -SOUNDEX: - description: "" - dremioVersion: "" - functionCategories: [] - name: "SOUNDEX" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -SPACE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "SPACE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -SPLIT_PART: - description: "Splits a given STRING at a specified character and returns the requested part." - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - name: "SPLIT_PART" - signatures: - - - description: "Returns the requested part of the STRING." - parameters: - - - description: "Input expression." - kind: "REGULAR" - name: "expression" - type: "CHARACTERS" - - - description: "String representing the delimiter to split the input expression by." - kind: "REGULAR" - name: "delimiter" - type: "CHARACTERS" - - - description: "Requested part of the split. Must be an NUMERIC greater than zero." - kind: "REGULAR" - name: "part_number" - type: "NUMERIC" - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT SPLIT_PART('127.0.0.1', '.', 1)" - result: "127" -SQRT: - description: "Returns the square root of the non-negative numeric expression." - dremioVersion: "1.0+" - functionCategories: - - "MATH" - name: "SQRT" - signatures: - - - description: "Returns the square root of the input expression." - parameters: - - - description: "Numeric expression to calculate the square root for." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT SQRT(25.25)" - result: "5.024937810560445" - - - call: "SELECT SQRT(-25.25)" - result: "NaN" -STARTS_WITH: - description: "" - dremioVersion: "" - functionCategories: [] - name: "STARTS_WITH" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -STATCOUNT: - description: "" - dremioVersion: "" - functionCategories: [] - name: "STATCOUNT" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -STATEMENT_TIMESTAMP: - description: "" - dremioVersion: "" - functionCategories: [] - name: "STATEMENT_TIMESTAMP" - signatures: - - - description: "" - parameters: [] - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -STDDEV: - description: "" - dremioVersion: "" - functionCategories: [] - name: "STDDEV" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" -STDDEV_POP: - description: "" - dremioVersion: "" - functionCategories: [] - name: "STDDEV_POP" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" -STDDEV_SAMP: - description: "" - dremioVersion: "" - functionCategories: [] - name: "STDDEV_SAMP" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" -STRING_BINARY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "STRING_BINARY" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -STRPOS: - description: "Searches for the first occurence of the subSTRING in the given expression and returns the position of where the subSTRING begins. Searching BINARY values is also supported." - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - - "BINARY" - name: "STRPOS" - signatures: - - - description: "Returns the position of the first occurrence of the subSTRING in the expression." - parameters: - - - description: "The expression to search." - kind: "REGULAR" - name: "expression" - type: "CHARACTERS" - - - description: "The subSTRING to search the expression for." - kind: "REGULAR" - name: "subSTRING" - type: "CHARACTERS" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT STRPOS('dremio cloud service', 'cloud')" - result: "8" - - - call: "SELECT STRPOS(1001111, 00)" - result: "2" - - - call: "SELECT STRPOS('dremio cloud service', 'sql')" - result: "0" -SUBSTR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "SUBSTR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "BIGINT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -SUBSTR2: - description: "" - dremioVersion: "" - functionCategories: [] - name: "SUBSTR2" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -SUBSTRING: - description: "Returns the portion of the STRING from the specified base expression starting at the specified chracters." - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - name: "SUBSTRING" - signatures: - - - description: "Returns the substring." - parameters: - - - description: "Base expression to extract substring from." - kind: "REGULAR" - name: "STRING_expression" - type: "CHARACTERS" - - - description: "The offset from which the substring starts." - kind: "REGULAR" - name: "offset" - type: "NUMERIC" - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT SUBSTRING('dremio user 1 2 3', 12)" - result: "1 2 3" - - - description: "Returns the subSTRING." - parameters: - - - description: "Base expression to extract subSTRING from." - kind: "REGULAR" - name: "STRING_expression" - type: "CHARACTERS" - - - description: "The offset from which the subSTRING starts." - kind: "REGULAR" - name: "offset" - type: "NUMERIC" - - - description: "The length limit of the subSTRING." - kind: "OPTIONAL" - name: "length" - type: "NUMERIC" - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT SUBSTRING('base expression', 6, 4)" - result: "expr" - - - description: "Returns the substring." - parameters: - - - description: "Base expression to extract substring from." - kind: "REGULAR" - name: "STRING_expression" - type: "CHARACTERS" - - - description: "The regex pattern to match for the start of the substring." - kind: "REGULAR" - name: "pattern" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT SUBSTRING('dremio user 123', '[0-9]+')" - result: "123" -SUBSTRING2: - description: "" - dremioVersion: "" - functionCategories: [] - name: "SUBSTRING2" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -SUBSTRING_INDEX: - description: "" - dremioVersion: "" - functionCategories: [] - name: "SUBSTRING_INDEX" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -SUBTRACT: - description: "" - dremioVersion: "" - functionCategories: [] - name: "SUBTRACT" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIME" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -SUM: - description: "Returns the sum of non-NULL input expressions." - dremioVersion: "1.0+" - functionCategories: - - "AGGREGATE" - - "WINDOW" - name: "SUM" - signatures: - - - description: "Returns the sum of non-NULL input expressions." - parameters: - - - description: "Numeric input expression." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT SUM(column_name) FROM my_table" - result: "Sum of the values in the specified column." -SUM_V2: - description: "" - dremioVersion: "" - functionCategories: [] - name: "SUM_V2" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -SYSTEM_USER: - description: "" - dremioVersion: "" - functionCategories: [] - name: "SYSTEM_USER" - signatures: - - - description: "" - parameters: [] - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -TAN: - description: "Computes the tangent of a value in radians." - dremioVersion: "1.0+" - functionCategories: - - "MATH" - name: "TAN" - signatures: - - - description: "Returns a value expressed as a NUMERICing point number." - parameters: - - - description: "The number in radians." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT TAN(180.8)" - result: "-6.259341891872157" -TANH: - description: "Computes the hyperbolic tangent of the input expression." - dremioVersion: "1.0+" - functionCategories: - - "MATH" - name: "TANH" - signatures: - - - description: "Returns the hyperbolic tangent of the input expression." - parameters: - - - description: "Input expression to calculate tanh for." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT TANH(1.5);" - result: "0.9051482536448664" -TDIGEST: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TDIGEST" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -TDIGEST_MERGE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TDIGEST_MERGE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "ANY" - sampleCodes: - - - call: "" - result: "" -TIMEOFDAY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TIMEOFDAY" - signatures: - - - description: "" - parameters: [] - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -TIMESTAMPADD: - description: "Add (or subtract) a numeric interval of TIME from a DATE/TIMESTAMP value or column\n" - dremioVersion: "1.0+" - functionCategories: - - "DATETIME" - name: "TIMESTAMPADD" - signatures: [] -TIMESTAMPADDDAY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TIMESTAMPADDDAY" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -TIMESTAMPADDHOUR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TIMESTAMPADDHOUR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -TIMESTAMPADDMICROSECOND: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TIMESTAMPADDMICROSECOND" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -TIMESTAMPADDMINUTE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TIMESTAMPADDMINUTE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -TIMESTAMPADDMONTH: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TIMESTAMPADDMONTH" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -TIMESTAMPADDQUARTER: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TIMESTAMPADDQUARTER" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -TIMESTAMPADDSECOND: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TIMESTAMPADDSECOND" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -TIMESTAMPADDWEEK: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TIMESTAMPADDWEEK" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -TIMESTAMPADDYEAR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TIMESTAMPADDYEAR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -TIMESTAMPDIFF: - description: "Return the amount of TIME between two DATE or TIMESTAMP values\n" - dremioVersion: "1.0+" - functionCategories: - - "DATETIME" - name: "TIMESTAMPDIFF" - signatures: - - - description: "Return the amount of TIME between two date time objects by subtracting first from second (rounded down)" - parameters: - - - kind: "REGULAR" - name: "first" - type: "DATEANDTIME" - - - kind: "REGULAR" - name: "second" - type: "DATEANDTIME" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT TIMESTAMPDIFF(month, DATE '2021-02-01', DATE '2021-05-01');" - result: "3" - - - call: "SELECT TIMESTAMPDIFF(day, TIMESTAMP '2003-02-01 11:43:22', TIMESTAMP '2005-04-09 12:05:55');" - result: "798" -TIMESTAMPDIFFDAY: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TIMESTAMPDIFFDAY" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" -TIMESTAMPDIFFHOUR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TIMESTAMPDIFFHOUR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" -TIMESTAMPDIFFMICROSECOND: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TIMESTAMPDIFFMICROSECOND" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" -TIMESTAMPDIFFMINUTE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TIMESTAMPDIFFMINUTE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" -TIMESTAMPDIFFMONTH: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TIMESTAMPDIFFMONTH" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" -TIMESTAMPDIFFQUARTER: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TIMESTAMPDIFFQUARTER" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" -TIMESTAMPDIFFSECOND: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TIMESTAMPDIFFSECOND" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" -TIMESTAMPDIFFWEEK: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TIMESTAMPDIFFWEEK" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" -TIMESTAMPDIFFYEAR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TIMESTAMPDIFFYEAR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" -TIMESTAMPTYPE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TIMESTAMPTYPE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -TIMETYPE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TIMETYPE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" -TOASCII: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TOASCII" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -TO_CHAR: - description: "Converts the input expression to a character/STRING using the specified format." - dremioVersion: "1.0+" - functionCategories: - - "CONVERSION" - name: "TO_CHAR" - signatures: - - - description: "Returns the input expression as a STRING." - parameters: - - - description: "Expression to convert to a STRING." - kind: "REGULAR" - name: "expression" - type: "TIME" - - - description: "Format to use for the conversion." - kind: "REGULAR" - name: "format" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT TO_CHAR(CAST('01:02:03' AS TIME) , 'HH:MI');" - result: "01:02" - - - description: "Returns the input expression as a STRING." - parameters: - - - description: "Expression to convert to a STRING." - kind: "REGULAR" - name: "expression" - type: "DATE" - - - description: "Format to use for the conversion." - kind: "REGULAR" - name: "format" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT TO_CHAR(CAST('2021-02-11' AS DATE) , 'yyyy.mm.dd');" - result: "2021.02.11" - - - description: "Returns the input expression as a STRING." - parameters: - - - description: "Expression to convert to a STRING." - kind: "REGULAR" - name: "expression" - type: "NUMERIC" - - - description: "Format to use for the conversion." - kind: "REGULAR" - name: "format" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT TO_CHAR(10, '#')" - result: "10" - - - description: "Returns the input expression as a STRING." - parameters: - - - description: "Expression to convert to a STRING." - kind: "REGULAR" - name: "expression" - type: "NUMERIC" - - - description: "Format to use for the conversion." - kind: "REGULAR" - name: "format" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT TO_CHAR(7.5, '#.#')" - result: "7.5" - - - description: "Returns the input expression as a STRING." - parameters: - - - description: "Expression to convert to a STRING." - kind: "REGULAR" - name: "expression" - type: "NUMERIC" - - - description: "Format to use for the conversion." - kind: "REGULAR" - name: "format" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT TO_CHAR(10, '#')" - result: "10" - - - description: "Returns the input expression as a STRING." - parameters: - - - description: "Expression to convert to a STRING." - kind: "REGULAR" - name: "expression" - type: "NUMERIC" - - - description: "Format to use for the conversion." - kind: "REGULAR" - name: "format" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT TO_CHAR(7.5, '#.#')" - result: "7.5" - - - description: "Returns the input expression as a STRING." - parameters: - - - description: "Expression to convert to a STRING." - kind: "REGULAR" - name: "expression" - type: "TIMESTAMP" - - - description: "Format to use for the conversion." - kind: "REGULAR" - name: "format" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT TO_CHAR(CAST('2013-04-05 01:02:03' AS TIMESTAMP) , 'mm/dd/yyyy, hh:mi');" - result: "04/05/2013, 01:02" -TO_DATE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TO_DATE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "INT" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" -TO_HEX: - description: "Returns a hexadecimal STRING for the given BINARY value." - dremioVersion: "1.0+" - functionCategories: - - "CONVERSION" - - "CHARACTER" - - "BINARY" - name: "TO_HEX" - signatures: - - - description: "Returns a hexadecimal STRING for the given BINARY value." - parameters: - - - description: "A BINARY value" - kind: "REGULAR" - name: "in" - type: "BYTES" - returnType: "STRING" - sampleCodes: - - - call: "select to_hex(BINARY_STRING('hello'))" - result: "68656C6C6F" -TO_NUMBER: - description: "Converts a STRINGNUMERICo a number (NUMERIC) in the specified format." - dremioVersion: "1.0+" - functionCategories: - - "CONVERSION" - name: "TO_NUMBER" - signatures: - - - description: "Returns the number with the specified format." - parameters: - - - description: "String to convert to a number." - kind: "REGULAR" - name: "expression" - type: "CHARACTERS" - - - description: "Format for number conversion." - kind: "REGULAR" - name: "format" - type: "CHARACTERS" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT TO_NUMBER('12374.0023', '#####.###')" - result: "12374.002" - - - call: "SELECT TO_NUMBER('12374', '#####')" - result: "12374.0" -TO_TIME: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TO_TIME" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "INT" - returnType: "TIME" - sampleCodes: - - - call: "" - result: "" -TO_TIMESTAMP: - description: "Converts the input expressions to the corresponding TIMESTAMP." - dremioVersion: "1.0+" - functionCategories: - - "DATETIME" - - "CONVERSION" - name: "TO_TIMESTAMP" - signatures: - - - description: "The value is returned in the TIMESTAMP format." - parameters: - - - description: "The Unix format of the TIMESTAMP." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "SELECT TO_TIMESTAMP(52 * 365.25 * 86400)" - result: "2022-01-01 00:00:00.000" - - - description: "The value is returned in the TIMESTAMP format." - parameters: - - - description: "The STRING from which to extract the TIMESTAMP." - kind: "REGULAR" - name: "STRING_expression" - type: "CHARACTERS" - - - description: "String to specify format of the TIMESTAMP." - kind: "REGULAR" - name: "format" - type: "CHARACTERS" - - - description: "sample parameter description" - kind: "OPTIONAL" - name: "replaceWithNullHolder" - type: "NUMERIC" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "SELECT TO_TIMESTAMP('2021-07-31 01:02:03', 'YYYY-MM-DD HH:MI:SS', 'FILL_IN')" - result: "2021-07-31 01:02:03.000" - - - description: "The value is returned in the TIMESTAMP format." - parameters: - - - description: "The Unix format of the TIMESTAMP." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "SELECT TO_TIMESTAMP(1640131200)" - result: "2021-12-22 00:00:00.000" - - - description: "The value is returned in the TIMESTAMP format." - parameters: - - - description: "The Unix format of the TIMESTAMP." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "SELECT TO_TIMESTAMP(1640131200)" - result: "2021-12-22 00:00:00.000" - - - description: "The value is returned in the TIMESTAMP format." - parameters: - - - description: "The Unix format of the TIMESTAMP." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "SELECT TO_TIMESTAMP(52 * 365.25 * 86400)" - result: "2022-01-01 00:00:00.000" - - - description: "The value is returned in the TIMESTAMP format." - parameters: - - - description: "String from which to extract the TIMESTAMP from." - kind: "REGULAR" - name: "STRING_expression" - type: "CHARACTERS" - - - description: "String to specify format of the TIMESTAMP." - kind: "REGULAR" - name: "format" - type: "CHARACTERS" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "SELECT TO_TIMESTAMP('2021-07-31 01:02:03', 'YYYY-MM-DD HH:MI:SS')" - result: "2021-07-31 01:02:03.000" -TO_UTC_TIMESTAMP: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TO_UTC_TIMESTAMP" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -TRANSACTION_TIMESTAMP: - description: "Returns the TIMESTAMP in UTC of the current transaction." - dremioVersion: "1.0+" - functionCategories: - - "CONTEXT" - name: "TRANSACTION_TIMESTAMP" - signatures: - - - description: "Returns the TIMESTAMP of the current transaction." - parameters: [] - returnType: "TIMESTAMP" - sampleCodes: - - - call: "SELECT TRANSACTION_TIMESTAMP()" - result: "2021-07-13 06:52:10.694" -TRANSLATE: - description: "Translates the base expression from the source characters/expression to the target characters/expression." - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - name: "TRANSLATE" - signatures: - - - description: "Returns the translated STRING." - parameters: - - - description: "The STRING to translate." - kind: "REGULAR" - name: "base_expression" - type: "CHARACTERS" - - - description: "A STRING with all the characters in the base expression that need translating. Each character in this STRING will be replaced with the corresponding character from the target_characters expression or ommitted from the STRING if target_characters expression has less characters than the source_characters." - kind: "REGULAR" - name: "source_characters" - type: "CHARACTERS" - - - description: "A STRING containing all the characters to replace the original characters with." - kind: "REGULAR" - name: "target_characters" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT TRANSLATE('*a*bX*dYZ*','XYZ*','cef');" - result: "abcdef" -TRANSLATE3: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TRANSLATE3" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BYTES" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -TRIM: - description: "Removes leading, trailing, or both spaces or characters from a STRING" - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - name: "TRIM" - signatures: - - - description: "Returns the trimmed STRING" - parameters: - - - description: "The expression to be trimmed" - kind: "REGULAR" - name: "expression" - type: "CHARACTERS" - - - description: "The characters to trim" - kind: "OPTIONAL" - name: "trim_expression" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT TRIM(' pancake ')" - result: "pancake" - - - call: "SELECT TRIM(leading 'pan' from 'pancake')" - result: "cake" - - - call: "SELECT TRIM(trailing 'cake' from 'pancake')" - result: "pan" - - - call: "SELECT TRIM(both 'pan' from 'pancake pan')" - result: "cake" - - - call: "SELECT TRIM('pan' from 'pancake pan')" - result: "cake" -TRUNC: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TRUNC" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "INT" - returnType: "FLOAT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DECIMAL" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "INT" - returnType: "DECIMAL" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "INT" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "OPTIONAL" - name: "" - type: "INT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -TRUNCATE: - description: "Rounds the input expression down the nearest of equal NUMERIC depending on the specified number of places before or after the decimal point." - dremioVersion: "1.0+" - functionCategories: - - "MATH" - name: "TRUNCATE" - signatures: - - - description: "Returns the input expression rounded to the nearest NUMERIC depending on the specified decimal place." - parameters: - - - description: "The numeric expression to truncate." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT TRUNCATE(987.65)" - result: "987" - - - description: "Returns the input expression rounded to the nearest NUMERIC depending on the specified decimal place." - parameters: - - - description: "The numeric expression to truncate." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT TRUNCATE(987.87)" - result: "987" - - - description: "Returns the input expression rounded to the nearest NUMERIC depending on the specified decimal place." - parameters: - - - description: "The numeric expression to truncate." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT TRUNCATE(2021)" - result: "2021" - - - description: "Returns the input expression rounded to the nearest NUMERIC depending on the specified decimal place." - parameters: - - - description: "The numeric expression to truncate." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - - - description: "sample parameter description" - kind: "OPTIONAL" - name: "scale_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT TRUNCATE(89.2283211, 2)" - result: "89.22" - - - description: "Returns the input expression rounded to the nearest NUMERIC depending on the specified decimal place." - parameters: - - - description: "The numeric expression to truncate." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - - - description: "sample parameter description" - kind: "OPTIONAL" - name: "scale_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT TRUNCATE(2021, -1)" - result: "2020" - - - description: "Returns the input expression rounded to the nearest NUMERIC depending on the specified decimal place." - parameters: - - - description: "The numeric expression to truncate." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT TRUNCATE(9.7)" - result: "9" - - - description: "Returns the input expression rounded to the nearest NUMERIC depending on the specified decimal place." - parameters: - - - description: "The numeric expression to truncate." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - - - description: "The decimal place to round to." - kind: "OPTIONAL" - name: "scale_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT TRUNCATE(78.9823, 2)" - result: "78.98" - - - description: "Returns the input expression rounded to the nearest NUMERIC depending on the specified decimal place." - parameters: - - - description: "The numeric expression to truncate." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - - - description: "The decimal place to round to." - kind: "OPTIONAL" - name: "scale_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT TRUNCATE(987.65, 1)" - result: "987.6" -TUMBLE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TUMBLE" - signatures: [] -TUMBLE_END: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TUMBLE_END" - signatures: [] -TUMBLE_START: - description: "" - dremioVersion: "" - functionCategories: [] - name: "TUMBLE_START" - signatures: [] -TYPEOF: - description: "Reports the type (in STRING format) of the input expression." - dremioVersion: "1.0+" - functionCategories: - - "DATETYPE" - name: "TYPEOF" - signatures: - - - description: "Reports the type (in STRING format) of the input expression." - parameters: - - - description: "sample parameter description" - kind: "REGULAR" - name: "input" - type: "BOOLEAN" - returnType: "STRING" - sampleCodes: - - - call: "SELECT TYPEOF(TRUE)" - result: "BIT" - - - call: "SELECT TYPEOF(100)" - result: "INT" - - - call: "SELECT TYPEOF(98.76)" - result: "DECIMAL" - - - call: "SELECT TYPEOF('2021-09-14')" - result: "CHARACTER" - - - description: "Reports the type (in STRING format) of the input expression." - parameters: - - - description: "sample parameter description" - kind: "REGULAR" - name: "input" - type: "BYTES" - returnType: "STRING" - sampleCodes: [] - - - description: "Reports the type (in STRING format) of the input expression." - parameters: - - - description: "sample parameter description" - kind: "REGULAR" - name: "input" - type: "NUMERIC" - returnType: "STRING" - sampleCodes: [] - - - description: "Reports the type (in STRING format) of the input expression." - parameters: - - - description: "sample parameter description" - kind: "REGULAR" - name: "input" - type: "STRING" - returnType: "STRING" - sampleCodes: [] - - - description: "Reports the type (in STRING format) of the input expression." - parameters: [] - returnType: "STRING" - sampleCodes: [] -U-: - description: "" - dremioVersion: "" - functionCategories: [] - name: "U-" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -UCASE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "UCASE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -UDFDEGREES: - description: "" - dremioVersion: "" - functionCategories: [] - name: "UDFDEGREES" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" -UNBASE64: - description: "" - dremioVersion: "" - functionCategories: [] - name: "UNBASE64" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "BYTES" - sampleCodes: - - - call: "" - result: "" -UNHEX: - name: "UNHEX" - signatures: - - - parameters: - - - kind: "REGULAR" - name: "parameter name" - type: "CHARACTERS" - returnType: "BYTES" -UNIX_TIMESTAMP: - description: "Returns the Unix TIMESTAMP for the TIMESTAMP parameter." - dremioVersion: "1.0+" - functionCategories: - - "DATETIME" - - "CONTEXT" - name: "UNIX_TIMESTAMP" - signatures: - - - description: "Calling the function without specifying parameters returns the current TIMESTAMP in Unix format." - parameters: [] - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT UNIX_TIMESTAMP()" - result: "1624928208" - - - description: "Returns the Unix TIMESTAMP." - parameters: - - - description: "The TIMESTAMP to convert to Unix TIMESTAMP. The expected format is 'YYYY-MM-DD HH:MM:SS' where HH can be a value 1-24." - kind: "REGULAR" - name: "DATE_TIMESTAMP_expression" - type: "CHARACTERS" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT UNIX_TIMESTAMP('2021-12-22 13:44:11')" - result: "1640180651" - - - description: "Returns the Unix TIMESTAMP." - parameters: - - - description: "The TIMESTAMP to convert to Unix TIMESTAMP." - kind: "REGULAR" - name: "DATE_TIMESTAMP_expression" - type: "CHARACTERS" - - - description: "Specify the format of the TIME, DATE, or TIMESTAMP parameter. For example, 'YY-MM-DD' or 'HH:MM:SS'." - kind: "OPTIONAL" - name: "format" - type: "CHARACTERS" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT UNIX_TIMESTAMP('21-12-22', 'YY-MM-DD')" - result: "1640131200" -UPPER: - description: "Returns the input expression with all the characters converted to uppercase." - dremioVersion: "1.0+" - functionCategories: - - "CHARACTER" - - "BINARY" - name: "UPPER" - signatures: - - - description: "Returns the input expression with all the characters converted to uppercase." - parameters: - - - description: "String to convert to uppercase." - kind: "REGULAR" - name: "expression" - type: "CHARACTERS" - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT UPPER('a guide to data lakehouses')" - result: "A GUIDE TO DATA LAKEHOUSES" -USER: - description: "Returns the user that is currently loggedNUMERICo the system." - dremioVersion: "1.0+" - functionCategories: - - "CONTEXT" - name: "USER" - signatures: - - - description: "Returns the user that is currently logged in to the system." - parameters: [] - returnType: "CHARACTERS" - sampleCodes: - - - call: "SELECT USER()" - result: "testuser@dremio.com" - - - call: "SELECT USER" - result: "testuser@dremio.com" -VARCHAR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "VARCHAR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "STRING" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "FLOAT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DOUBLE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BIGINT" - returnType: "CHARACTERS" - sampleCodes: - - - call: "" - result: "" -VARIANCE: - description: "" - dremioVersion: "" - functionCategories: [] - name: "VARIANCE" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "NUMERIC" - returnType: "DOUBLE" - sampleCodes: - - - call: "" - result: "" -VAR_POP: - description: "Returns the population variance of non-NULL records." - dremioVersion: "1.0+" - functionCategories: - - "AGGREGATE" - - "WINDOW" - name: "VAR_POP" - signatures: - - - description: "Returns the population variance of the records." - parameters: - - - description: "The set of records to calculate variance for." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT VAR_POP(pop) FROM \"zips.json\"" - result: "1.5167869917122573E8" - - - description: "Returns the population variance of the records." - parameters: - - - description: "The set of records to calculate variance for." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT VAR_POP(fare_amount) FROM Samples.\"samples.dremio.com\".\"NYC-taxi-trips\"" - result: "181.26187022731304" - - - description: "Returns the population variance of the records." - parameters: - - - description: "The set of records to calculate variance for." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT VAR_POP(fare_amount) FROM Samples.\"samples.dremio.com\".\"NYC-taxi-trips\"" - result: "181.26187022731304" - - - description: "Returns the population variance of the records." - parameters: - - - description: "The set of records to calculate variance for." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT VAR_POP(pop) FROM \"zips.json\"" - result: "1.5167869917122573E8" -VAR_SAMP: - description: "Returns the sample variance of non-NULL records." - dremioVersion: "1.0+" - functionCategories: - - "AGGREGATE" - - "WINDOW" - name: "VAR_SAMP" - signatures: - - - description: "Returns the sample variance of the records." - parameters: - - - description: "The set of records to calculate variance for." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT VAR_SAMP(passenger_count) FROM Samples.\"samples.dremio.com\".\"NYC-taxi-trips\"" - result: "1.868747683518558" - - - description: "Returns the sample variance of the records." - parameters: - - - description: "The set of records to calculate variance for." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT VAR_SAMP(tip_amount) FROM Samples.\"samples.dremio.com\".\"NYC-taxi-trips\"" - result: "5.106086065187043" - - - description: "Returns the sample variance of the records." - parameters: - - - description: "The set of records to calculate variance for." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT VAR_SAMP(tip_amount) FROM Samples.\"samples.dremio.com\".\"NYC-taxi-trips\"" - result: "5.106086065187043" - - - description: "Returns the sample variance of the records." - parameters: - - - description: "The set of records to calculate variance for." - kind: "REGULAR" - name: "numeric_expression" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT VAR_SAMP(passenger_count) FROM Samples.\"samples.dremio.com\".\"NYC-taxi-trips\"" - result: "1.868747683518558" -WEEK: - description: "" - dremioVersion: "" - functionCategories: [] - name: "WEEK" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -WEEKOFYEAR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "WEEKOFYEAR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -XOR: - description: "Returns the bitwise XOR of two NUMERICs." - dremioVersion: "1.0+" - functionCategories: - - "BITWISE" - name: "XOR" - signatures: - - - description: "The bitwise XOR value of the two input values." - parameters: - - - description: "The NUMERIC input." - kind: "REGULAR" - name: "expression1" - type: "NUMERIC" - - - description: "The NUMERIC input." - kind: "REGULAR" - name: "expression2" - type: "NUMERIC" - returnType: "NUMERIC" - sampleCodes: - - - call: "SELECT XOR(1, 1)" - result: "0" -YEAR: - description: "" - dremioVersion: "" - functionCategories: [] - name: "YEAR" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATEANDTIME" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -YEARWEEK: - description: "" - dremioVersion: "" - functionCategories: [] - name: "YEARWEEK" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "DATE" - returnType: "DATE" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "TIMESTAMP" - returnType: "TIMESTAMP" - sampleCodes: - - - call: "" - result: "" -^: - description: "" - dremioVersion: "" - functionCategories: [] - name: "^" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "INT" - returnType: "INT" - sampleCodes: - - - call: "" - result: "" -__THROWEXCEPTION: - description: "" - dremioVersion: "" - functionCategories: [] - name: "__THROWEXCEPTION" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "CHARACTERS" - returnType: "BIGINT" - sampleCodes: - - - call: "" - result: "" -'||': - description: "" - dremioVersion: "" - functionCategories: [] - name: "||" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "BOOLEAN" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" -'ARRAY_CONTAINS': - description: "" - dremioVersion: "" - functionCategories: [] - name: "ARRAY_CONTAINS" - signatures: - - - description: "" - parameters: - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "LIST" - - - description: "" - format: "" - kind: "REGULAR" - name: "" - type: "ANY" - returnType: "BOOLEAN" - sampleCodes: - - - call: "" - result: "" - diff --git a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/AutocompleteEngineTests.java b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/AutocompleteEngineTests.java index 70a53e98d6..adea8024c9 100644 --- a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/AutocompleteEngineTests.java +++ b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/AutocompleteEngineTests.java @@ -31,7 +31,6 @@ import com.dremio.service.autocomplete.completions.Completions; import com.dremio.service.autocomplete.functions.FunctionContext; import com.dremio.service.autocomplete.functions.ParameterResolverTests; -import com.dremio.test.GoldenFileTestBuilder.MultiLineString; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; @@ -39,16 +38,16 @@ * Tests for the autocomplete engine. */ public abstract class AutocompleteEngineTests { - protected CompletionsForBaselines executeTestWithRootContext(MultiLineString query) { + protected CompletionsForBaselines executeTestWithRootContext(String query) { return executeTest(query, ImmutableList.of()); } - protected CompletionsForBaselines executeTestWithFolderContext(MultiLineString query) { + protected CompletionsForBaselines executeTestWithFolderContext(String query) { return executeTest(query, ImmutableList.of("space", "folder")); } - protected CompletionsForBaselines executeTest(MultiLineString query, ImmutableList context) { - StringAndPos stringAndPos = SqlParserUtil.findPos(query.toString()); + protected CompletionsForBaselines executeTest(String query, ImmutableList context) { + StringAndPos stringAndPos = SqlParserUtil.findPos(query); Completions completions = generateCompletions( context, stringAndPos.sql, diff --git a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/DeleteStatementCompletionTests.java b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/DeleteStatementCompletionTests.java index 6c622a203a..15d4a8f922 100644 --- a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/DeleteStatementCompletionTests.java +++ b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/DeleteStatementCompletionTests.java @@ -22,15 +22,15 @@ public final class DeleteStatementCompletionTests extends AutocompleteEngineTests { @Test public void tests() { - new GoldenFileTestBuilder<>(this::executeTestWithFolderContext) - .add("DELETE", GoldenFileTestBuilder.MultiLineString.create("DELETE ^")) - .add("DELETE + PARTIAL FROM", GoldenFileTestBuilder.MultiLineString.create("DELETE FR^")) - .add("DELETE + FROM", GoldenFileTestBuilder.MultiLineString.create("DELETE FROM ^")) - .add("DELETE + FROM + TABLE", GoldenFileTestBuilder.MultiLineString.create("DELETE FROM EMP ^")) - .add("DELETE + FROM + TABLE + AS", GoldenFileTestBuilder.MultiLineString.create("DELETE FROM EMP AS ^")) - .add("DELETE + FROM + TABLE + AS + ALIAS", GoldenFileTestBuilder.MultiLineString.create("DELETE FROM EMP AS e ^")) - .add("DELETE + FROM + TABLE + WHERE", GoldenFileTestBuilder.MultiLineString.create("DELETE FROM EMP WHERE ^")) - .add("DELETE + FROM + TABLE + WHERE + CONDITION", GoldenFileTestBuilder.MultiLineString.create("DELETE FROM EMP WHERE EMP.NAME = 'Brandon' ^")) + GoldenFileTestBuilder.create(this::executeTestWithFolderContext) + .add("DELETE", "DELETE ^") + .add("DELETE + PARTIAL FROM", "DELETE FR^") + .add("DELETE + FROM", "DELETE FROM ^") + .add("DELETE + FROM + TABLE", "DELETE FROM EMP ^") + .add("DELETE + FROM + TABLE + AS", "DELETE FROM EMP AS ^") + .add("DELETE + FROM + TABLE + AS + ALIAS", "DELETE FROM EMP AS e ^") + .add("DELETE + FROM + TABLE + WHERE", "DELETE FROM EMP WHERE ^") + .add("DELETE + FROM + TABLE + WHERE + CONDITION", "DELETE FROM EMP WHERE EMP.NAME = 'Brandon' ^") .runTests(); } } diff --git a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/DropStatementCompletionTests.java b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/DropStatementCompletionTests.java index 438210922d..10f44215af 100644 --- a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/DropStatementCompletionTests.java +++ b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/DropStatementCompletionTests.java @@ -22,21 +22,14 @@ public final class DropStatementCompletionTests extends AutocompleteEngineTests { @Test public void tests() { - new GoldenFileTestBuilder<>(this::executeTestWithRootContext) - .add("Empty name", - GoldenFileTestBuilder.MultiLineString.create("DROP TABLE ^")) - .add("Works with completed path", - GoldenFileTestBuilder.MultiLineString.create("DROP TABLE \"space\".\"folder\".EMP ^")) - .add("Works with folder name", - GoldenFileTestBuilder.MultiLineString.create("DROP TABLE \"space\".^")) - .add("Works with folder with spaces", - GoldenFileTestBuilder.MultiLineString.create("DROP TABLE \"space with a space in the name\".^")) - .add("Works with IF EXISTS", - GoldenFileTestBuilder.MultiLineString.create("DROP TABLE IF EXISTS \"space\".^")) - .add("Works with VIEW", - GoldenFileTestBuilder.MultiLineString.create("DROP VIEW \"space\".^")) - .add("Works with VDS", - GoldenFileTestBuilder.MultiLineString.create("DROP VDS \"space\".^")) + new GoldenFileTestBuilder<>(this::executeTestWithRootContext, GoldenFileTestBuilder.MultiLineString::create) + .add("Empty name", "DROP TABLE ^") + .add("Works with completed path", "DROP TABLE \"space\".\"folder\".EMP ^") + .add("Works with folder name", "DROP TABLE \"space\".^") + .add("Works with folder with spaces", "DROP TABLE \"space with a space in the name\".^") + .add("Works with IF EXISTS", "DROP TABLE IF EXISTS \"space\".^") + .add("Works with VIEW", "DROP VIEW \"space\".^") + .add("Works with VDS", "DROP VDS \"space\".^") .runTests(); } } diff --git a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/FunctionCompletionTests.java b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/FunctionCompletionTests.java index f7af7a2be8..41a1800d51 100644 --- a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/FunctionCompletionTests.java +++ b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/FunctionCompletionTests.java @@ -26,70 +26,53 @@ public final class FunctionCompletionTests extends AutocompleteEngineTests { @Test public void commaSeparated() { - new GoldenFileTestBuilder<>(this::executeTestWithFolderContext) - .add( - "SIMPLE", - GoldenFileTestBuilder.MultiLineString.create("SELECT ABS(^ FROM EMP")) - .add( - "FUNCTION No Source", - GoldenFileTestBuilder.MultiLineString.create("SELECT REPLACE('hello', ^")) - .add( - "FUNCTION ONE ARG", - GoldenFileTestBuilder.MultiLineString.create("SELECT REPLACE(EMP.ENAME ^ FROM EMP")) - .add( - "FUNCTION ONE ARG + COMMA ", - GoldenFileTestBuilder.MultiLineString.create("SELECT REPLACE(EMP.ENAME, ^ FROM EMP")) - .add( - "FUNCTION ONE ARG + COMMA + ONE ARG", - GoldenFileTestBuilder.MultiLineString.create("SELECT REPLACE(EMP.ENAME, 'world'^ FROM EMP")) - .add( - "FUNCTION LAST ARG + COMPLETE FUNCTION", - GoldenFileTestBuilder.MultiLineString.create("SELECT REPLACE(EMP.ENAME, EMP.ENAME, ^) FROM EMP")) - .add( - "FUNCTION MIDDLE ARG + COMPLETE FUNCTION", - GoldenFileTestBuilder.MultiLineString.create("SELECT REPLACE(EMP.ENAME, ^, EMP.ENAME) FROM EMP")) - .add( - "COMPLEX ARG", - GoldenFileTestBuilder.MultiLineString.create("SELECT REPLACE(EMP.ENAME + EMP.ENAME, ^ FROM EMP")) + GoldenFileTestBuilder.create(this::executeTestWithFolderContext) + .add("SIMPLE", "SELECT ABS(^ FROM EMP") + .add("FUNCTION No Source", "SELECT REPLACE('hello', ^") + .add("FUNCTION ONE ARG", "SELECT REPLACE(EMP.ENAME ^ FROM EMP") + .add("FUNCTION ONE ARG + COMMA ", "SELECT REPLACE(EMP.ENAME, ^ FROM EMP") + .add("FUNCTION ONE ARG + COMMA + ONE ARG", + "SELECT REPLACE(EMP.ENAME, 'world'^ FROM EMP") + .add("FUNCTION LAST ARG + COMPLETE FUNCTION", + "SELECT REPLACE(EMP.ENAME, EMP.ENAME, ^) FROM EMP") + .add("FUNCTION MIDDLE ARG + COMPLETE FUNCTION", + "SELECT REPLACE(EMP.ENAME, ^, EMP.ENAME) FROM EMP") + .add("COMPLEX ARG", "SELECT REPLACE(EMP.ENAME + EMP.ENAME, ^ FROM EMP") .runTests(); } @Test public void substring() { - new GoldenFileTestBuilder<>(this::executeTestWithFolderContext) + GoldenFileTestBuilder.create(this::executeTestWithFolderContext) .add( - "SUBSTRING PREFIX", - GoldenFileTestBuilder.MultiLineString.create("SELECT SUBSTRI^")) + "SUBSTRING PREFIX", "SELECT SUBSTRI^") .add( - "SUBSTRING SIMPLE", - GoldenFileTestBuilder.MultiLineString.create("SELECT SUBSTRING(^ FROM EMP")) + "SUBSTRING SIMPLE", "SELECT SUBSTRING(^ FROM EMP") .add( - "SUBSTRING STRING PARAMETER", - GoldenFileTestBuilder.MultiLineString.create("SELECT SUBSTRING(EMP.ENAME ^ FROM EMP")) + "SUBSTRING STRING PARAMETER", "SELECT SUBSTRING(EMP.ENAME ^ FROM EMP") .add( - "SUBSTRING STRING PARAMETER FROM", - GoldenFileTestBuilder.MultiLineString.create("SELECT SUBSTRING(EMP.ENAME FROM ^ FROM EMP")) + "SUBSTRING STRING PARAMETER FROM", "SELECT SUBSTRING(EMP.ENAME FROM ^ FROM EMP") .add( "SUBSTRING STRING PARAMETER FROM INTEGER PARAMETER", - GoldenFileTestBuilder.MultiLineString.create("SELECT SUBSTRING(EMP.ENAME FROM 2 ^ FROM EMP")) + "SELECT SUBSTRING(EMP.ENAME FROM 2 ^ FROM EMP") .add( "SUBSTRING STRING PARAMETER FROM INTEGER PARAMETER FOR", - GoldenFileTestBuilder.MultiLineString.create("SELECT SUBSTRING(EMP.ENAME FROM 2 FOR ^ FROM EMP")) + "SELECT SUBSTRING(EMP.ENAME FROM 2 FOR ^ FROM EMP") .add( "SUBSTRING COMPLETE FUNCTION", - GoldenFileTestBuilder.MultiLineString.create("SELECT SUBSTRING(EMP.ENAME FROM 2 FOR 3 ^ FROM EMP")) + "SELECT SUBSTRING(EMP.ENAME FROM 2 FOR 3 ^ FROM EMP") .add( "SUBSTRING STRING PARAMETER FROM WITH COMMA", - GoldenFileTestBuilder.MultiLineString.create("SELECT SUBSTRING(EMP.ENAME, ^ FROM EMP")) + "SELECT SUBSTRING(EMP.ENAME, ^ FROM EMP") .add( "SUBSTRING STRING PARAMETER FROM INTEGER PARAMETER WITH COMMA", - GoldenFileTestBuilder.MultiLineString.create("SELECT SUBSTRING(EMP.ENAME , 2 ^ FROM EMP")) + "SELECT SUBSTRING(EMP.ENAME , 2 ^ FROM EMP") .add( "SUBSTRING STRING PARAMETER FROM INTEGER PARAMETER FOR WITH COMMA", - GoldenFileTestBuilder.MultiLineString.create("SELECT SUBSTRING(EMP.ENAME , 2 , ^ FROM EMP")) + "SELECT SUBSTRING(EMP.ENAME , 2 , ^ FROM EMP") .add( "SUBSTRING COMPLETE FUNCTION WITH COMMA", - GoldenFileTestBuilder.MultiLineString.create("SELECT SUBSTRING(EMP.ENAME , 2 , 3 ^ FROM EMP")) + "SELECT SUBSTRING(EMP.ENAME , 2 , 3 ^ FROM EMP") .runTests(); } @@ -99,70 +82,61 @@ public void aggregate() { "COLLECT", "MAX", "MIN", "STDDEV", "STDDEV_POP", "STDDEV_SAMP", "SUM", "VAR_POP", "VAR_SAMP", "LISTAGG"); ImmutableList commaSeparated = ImmutableList.of("COVAR_POP", "COVAR_SAMP", "REGR_SXX", "REGR_SYY", "APPROX_COUNT_DISTINCT"); - GoldenFileTestBuilder testBuilder = new GoldenFileTestBuilder<>(this::executeTestWithFolderContext); + GoldenFileTestBuilder testBuilder = + new GoldenFileTestBuilder<>(this::executeTestWithFolderContext, + GoldenFileTestBuilder.MultiLineString::create); for (String function : distinctOrAll) { testBuilder .add(function + " AGGREGATE PARTIAL NAME", - GoldenFileTestBuilder.MultiLineString.create("SELECT " + function.substring(0, function.length() - 1) + "^")) + "SELECT " + function.substring(0, function.length() - 1) + "^") .add(function + " AGGREGATE ONLY NAME", - GoldenFileTestBuilder.MultiLineString.create("SELECT " + function + "( ^ FROM EMP")) + "SELECT " + function + "( ^ FROM EMP") .add(function + " AGGREGATE WITH ALL", - GoldenFileTestBuilder.MultiLineString.create("SELECT " + function + "( ALL ^ FROM EMP" )) + "SELECT " + function + "( ALL ^ FROM EMP" ) .add(function + " AGGREGATE WITH DISTINCT", - GoldenFileTestBuilder.MultiLineString.create("SELECT " + function + "( DISTINCT ^ FROM EMP")) + "SELECT " + function + "( DISTINCT ^ FROM EMP") .add(function + " AGGREGATE WITH DISTINCT VALUE", - GoldenFileTestBuilder.MultiLineString.create("SELECT " + function + "( DISTINCT EMP.ENAME ^ FROM EMP")); + "SELECT " + function + "( DISTINCT EMP.ENAME ^ FROM EMP"); } for (String function : commaSeparated) { testBuilder .add("COMMA SEPARATED PARTIAL NAME", - GoldenFileTestBuilder.MultiLineString.create("SELECT " + function.substring(0, function.length() - 1) + "^")) + "SELECT " + function.substring(0, function.length() - 1) + "^") .add("COMMA SEPARATED NAME WITH ONE PARAMETER", - GoldenFileTestBuilder.MultiLineString.create("SELECT " + function + "(EMP.ENAME ^ FROM EMP")) + "SELECT " + function + "(EMP.ENAME ^ FROM EMP") .add("COMMA SEPARATED NAME WITH ONE PARAMETER COMMA", - GoldenFileTestBuilder.MultiLineString.create("SELECT " + function + "(EMP.ENAME, ^ FROM EMP")) + "SELECT " + function + "(EMP.ENAME, ^ FROM EMP") .add("COMMA SEPARATED WITH SECOND PARAMETER", - GoldenFileTestBuilder.MultiLineString.create("SELECT " + function + "(^ , EMP.ENAME) FROM EMP")); + "SELECT " + function + "(^ , EMP.ENAME) FROM EMP"); } testBuilder - .add("COUNT PARTIAL NAME", - GoldenFileTestBuilder.MultiLineString.create("SELECT COUN^")) - .add("COUNT NAME ONLY", - GoldenFileTestBuilder.MultiLineString.create("SELECT COUNT(^ FROM EMP")) - .add("COMPLETE COUNT WITH STAR", - GoldenFileTestBuilder.MultiLineString.create("SELECT COUNT( * ^ FROM EMP")) - .add("COUNT WITH ALL ONE PARAMETER", - GoldenFileTestBuilder.MultiLineString.create("SELECT COUNT( ALL EMP.ENAME ^ FROM EMP")) + .add("COUNT PARTIAL NAME", "SELECT COUN^") + .add("COUNT NAME ONLY", "SELECT COUNT(^ FROM EMP") + .add("COMPLETE COUNT WITH STAR", "SELECT COUNT( * ^ FROM EMP") + .add("COUNT WITH ALL ONE PARAMETER", "SELECT COUNT( ALL EMP.ENAME ^ FROM EMP") .add("COUNT WITH DISTINCT ONE PARAMETER COMMA", - GoldenFileTestBuilder.MultiLineString.create("SELECT COUNT( DISTINCT EMP.ENAME, ^ FROM EMP")) - .add("MODE NAME ONLY", - GoldenFileTestBuilder.MultiLineString.create("SELECT MODE(^ FROM EMP")) - .add("MODE WITH ONE PARAMETER", - GoldenFileTestBuilder.MultiLineString.create("SELECT MODE(EMP.ENAME ^ FROM EMP")) + "SELECT COUNT( DISTINCT EMP.ENAME, ^ FROM EMP") + .add("MODE NAME ONLY", "SELECT MODE(^ FROM EMP") + .add("MODE WITH ONE PARAMETER", "SELECT MODE(EMP.ENAME ^ FROM EMP") .runTests(); } @Test public void snippetScenarios() { - new GoldenFileTestBuilder<>(this::executeTestWithFolderContext) + GoldenFileTestBuilder.create(this::executeTestWithFolderContext) .add( - "SUBSTRING FIRST", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM EMP WHERE SUBSTRING(^ FROM fromIndex FOR forLength)")) + "SUBSTRING FIRST", "SELECT * FROM EMP WHERE SUBSTRING(^ FROM fromIndex FOR forLength)") .add( - "SUBSTRING MIDDLE", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM EMP WHERE SUBSTRING(string FROM ^ FOR forLength)")) + "SUBSTRING MIDDLE", "SELECT * FROM EMP WHERE SUBSTRING(string FROM ^ FOR forLength)") .add( - "SUBSTRING END", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM EMP WHERE SUBSTRING(string FROM fromIndex FOR ^)")) + "SUBSTRING END", "SELECT * FROM EMP WHERE SUBSTRING(string FROM fromIndex FOR ^)") .add( - "Multiple Argument Function 1", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM EMP WHERE BITWISE_AND(^, secondParameter)")) + "Multiple Argument Function 1", "SELECT * FROM EMP WHERE BITWISE_AND(^, secondParameter)") .add( - "Multiple Argument Function 2", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM EMP WHERE BITWISE_AND(firstParameter, ^)")) + "Multiple Argument Function 2", "SELECT * FROM EMP WHERE BITWISE_AND(firstParameter, ^)") .runTests(); } } diff --git a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/GeneralAutocompleteEngineTests.java b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/GeneralAutocompleteEngineTests.java index 30f92e16c5..4f4e3a3352 100644 --- a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/GeneralAutocompleteEngineTests.java +++ b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/GeneralAutocompleteEngineTests.java @@ -22,79 +22,47 @@ public final class GeneralAutocompleteEngineTests extends AutocompleteEngineTests { @Test public void prefixFiltering() { - new GoldenFileTestBuilder<>(this::executeTestWithFolderContext) - .add( - "PREFIX FILTERING FUNCTION", - GoldenFileTestBuilder.MultiLineString.create("SELECT AB^")) - .add( - "PREFIX FILTERING PARAMETER", - GoldenFileTestBuilder.MultiLineString.create("SELECT ABS(EMP.DEPT^ FROM EMP")) - .add( - "PREFIX FILTERING CATALOG ENTRIES", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM dep^")) - .add( - "PREFIX FILTERING CATALOG ENTRIES WITH DOUBLE QUOTES", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM \"dep^\"")) - .add( - "PREFIX FILTERING COLUMNS", - GoldenFileTestBuilder.MultiLineString.create("SELECT E^ FROM EMP")) - .add( - "PREFIX FILTERING KEYWORDS", - GoldenFileTestBuilder.MultiLineString.create("S^")) + new GoldenFileTestBuilder<>(this::executeTestWithFolderContext, GoldenFileTestBuilder.MultiLineString::create) + .add("PREFIX FILTERING FUNCTION", "SELECT AB^") + .add("PREFIX FILTERING PARAMETER", "SELECT ABS(EMP.DEPT^ FROM EMP") + .add("PREFIX FILTERING CATALOG ENTRIES", "SELECT * FROM dep^") + .add("PREFIX FILTERING CATALOG ENTRIES WITH DOUBLE QUOTES", "SELECT * FROM \"dep^\"") + .add("PREFIX FILTERING COLUMNS", "SELECT E^ FROM EMP") + .add("PREFIX FILTERING KEYWORDS", "S^") .runTests(); } @Test public void keywordOrFunction() { - new GoldenFileTestBuilder<>(this::executeTestWithFolderContext) - .add( - "BINARY FUNCTIONS SHOULD SURFACE AS KEYWORDS", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM EMP WHERE EMP.EMPNO ^")) - .add( - "NO DUPLICATES FOR SYSTEM FUNCTIONS", - GoldenFileTestBuilder.MultiLineString.create("SELECT COUN^ FROM EMP")) - .add( - "LEFT AS KEYWORD", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM EMP LEF^")) - .add( - "LEFT AS FUNCTION", - GoldenFileTestBuilder.MultiLineString.create("SELECT LEF^")) - .add( - "ABS IS ONLY EVER A FUNCTION", - GoldenFileTestBuilder.MultiLineString.create("SELECT AB^")) - .add( - "MIN AS FUNCTION", - GoldenFileTestBuilder.MultiLineString.create("SELECT MI^")) + new GoldenFileTestBuilder<>(this::executeTestWithFolderContext, GoldenFileTestBuilder.MultiLineString::create) + .add("BINARY FUNCTIONS SHOULD SURFACE AS KEYWORDS", "SELECT * FROM EMP WHERE EMP.EMPNO ^") + .add("NO DUPLICATES FOR SYSTEM FUNCTIONS", "SELECT COUN^ FROM EMP") + .add("LEFT AS KEYWORD", "SELECT * FROM EMP LEF^") + .add("LEFT AS FUNCTION", "SELECT LEF^") + .add("ABS IS ONLY EVER A FUNCTION", "SELECT AB^") + .add("MIN AS FUNCTION", "SELECT MI^") .add( "MIN AS KEYWORD", - GoldenFileTestBuilder.MultiLineString.create("ALTER TABLE EMP CREATE AGGREGATE REFLECTION myReflection\n" + + "ALTER TABLE EMP CREATE AGGREGATE REFLECTION myReflection\n" + "USING \n"+ "DIMENSIONS(EMPNO BY DAY, ENAME)\n" + - "MEASURES(EMPNO (COUNT, MI^")) + "MEASURES(EMPNO (COUNT, MI^") .runTests(); } @Test public void sqlStateMachine() { - new GoldenFileTestBuilder<>(this::executeTestWithFolderContext) - .add( - "INSIDE OF COMMENT BLOCK", - GoldenFileTestBuilder.MultiLineString.create("--SELECT ^")) - .add( - "OUTSIDE OF COMMENT BLOCK", - GoldenFileTestBuilder.MultiLineString.create("/*SELECT */ SELECT * ^")) - .add( - "INSIDE DOUBLE QUOTES", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM \"^\"")) + GoldenFileTestBuilder.create(this::executeTestWithFolderContext) + .add("INSIDE OF COMMENT BLOCK", "--SELECT ^") + .add("OUTSIDE OF COMMENT BLOCK", "/*SELECT */ SELECT * ^") + .add("INSIDE DOUBLE QUOTES", "SELECT * FROM \"^\"") .runTests(); } @Test public void tests() { - new GoldenFileTestBuilder<>(this::executeTestWithFolderContext) - .add( - "EMPTY STRING", - GoldenFileTestBuilder.MultiLineString.create("^")) + GoldenFileTestBuilder.create(this::executeTestWithFolderContext) + .add("EMPTY STRING", "^") .runTests(); } } diff --git a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/OptimizeStatementCompletionTests.java b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/OptimizeStatementCompletionTests.java index 62411c6764..d979e83574 100644 --- a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/OptimizeStatementCompletionTests.java +++ b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/OptimizeStatementCompletionTests.java @@ -22,14 +22,17 @@ public class OptimizeStatementCompletionTests extends AutocompleteEngineTests { @Test public void tests() { - new GoldenFileTestBuilder<>(this::executeTestWithFolderContext) - .add("OPTIMIZE", GoldenFileTestBuilder.MultiLineString.create("OPTIMIZE ^")) - .add("OPTIMIZE + PARTIAL TABLE", GoldenFileTestBuilder.MultiLineString.create("OPTIMIZE TA^")) - .add("OPTIMIZE + TABLE", GoldenFileTestBuilder.MultiLineString.create("OPTIMIZE TABLE ^")) - .add("OPTIMIZE + TABLE + TABLE NAME", GoldenFileTestBuilder.MultiLineString.create("OPTIMIZE TABLE EMP ^")) - .add("OPTIMIZE + TABLE + TABLE NAME + PAREN", GoldenFileTestBuilder.MultiLineString.create("OPTIMIZE TABLE EMP (^")) - .add("OPTIMIZE + TABLE + TABLE NAME + OPTION", GoldenFileTestBuilder.MultiLineString.create("OPTIMIZE TABLE EMP (MIN_INPUT_FILES=5 ^")) - .add("OPTIMIZE + TABLE + TABLE NAME + MULTIPLE OPTIONS", GoldenFileTestBuilder.MultiLineString.create("OPTIMIZE TABLE EMP (MIN_INPUT_FILES=5 , ^")) + new GoldenFileTestBuilder<>(this::executeTestWithFolderContext, GoldenFileTestBuilder.MultiLineString::create) + .add("OPTIMIZE", "OPTIMIZE ^") + .add("OPTIMIZE + PARTIAL TABLE", "OPTIMIZE TA^") + .add("OPTIMIZE + TABLE", "OPTIMIZE TABLE ^") + .add("OPTIMIZE + TABLE + TABLE NAME", "OPTIMIZE TABLE EMP ^") + .add("OPTIMIZE + TABLE + TABLE NAME + FOR PARTITIONS", "OPTIMIZE TABLE EMP FOR PARTITIONS ^") + .add("OPTIMIZE + TABLE + TABLE NAME + FOR PARTITIONS + BOOLEAN EXPRESSION", "OPTIMIZE TABLE EMP FOR PARTITIONS NAME != 'Brandon' ^") + .add("OPTIMIZE + TABLE + TABLE NAME + REWRITE", "OPTIMIZE TABLE EMP REWRITE ^") + .add("OPTIMIZE + TABLE + TABLE NAME + PAREN", "OPTIMIZE TABLE EMP (^") + .add("OPTIMIZE + TABLE + TABLE NAME + OPTION", "OPTIMIZE TABLE EMP (MIN_INPUT_FILES=5 ^") + .add("OPTIMIZE + TABLE + TABLE NAME + MULTIPLE OPTIONS", "OPTIMIZE TABLE EMP (MIN_INPUT_FILES=5 , ^") .runTests(); } } diff --git a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/ReflectionCreateCompletionTests.java b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/ReflectionCreateCompletionTests.java index e5488c7e84..abdae8078a 100644 --- a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/ReflectionCreateCompletionTests.java +++ b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/ReflectionCreateCompletionTests.java @@ -22,103 +22,103 @@ public final class ReflectionCreateCompletionTests extends AutocompleteEngineTests { @Test public void raw() { - new GoldenFileTestBuilder<>(this::executeTestWithFolderContext) + new GoldenFileTestBuilder<>(this::executeTestWithFolderContext, GoldenFileTestBuilder.MultiLineString::create) .add( "USING + DISPLAY + OPEN", - GoldenFileTestBuilder.MultiLineString.create("ALTER TABLE EMP CREATE RAW REFLECTION myReflection\n" + + "ALTER TABLE EMP CREATE RAW REFLECTION myReflection\n" + "USING \n" + - "DISPLAY(^")) + "DISPLAY(^") .add( "USING + DISPLAY + DISPLAY FIELDS", - GoldenFileTestBuilder.MultiLineString.create("ALTER TABLE EMP CREATE RAW REFLECTION myReflection\n" + + "ALTER TABLE EMP CREATE RAW REFLECTION myReflection\n" + "USING \n" + - "DISPLAY(EMPNO, ^")) + "DISPLAY(EMPNO, ^") .add( "DISTRIBUTE", - GoldenFileTestBuilder.MultiLineString.create("ALTER TABLE EMP CREATE RAW REFLECTION myReflection\n" + + "ALTER TABLE EMP CREATE RAW REFLECTION myReflection\n" + "USING \n" + "DISPLAY(EMPNO, ENAME) \n" + - "DISTRIBUTE BY(^")) + "DISTRIBUTE BY(^") .add( "PARTITION", - GoldenFileTestBuilder.MultiLineString.create("ALTER TABLE EMP CREATE RAW REFLECTION myReflection\n" + + "ALTER TABLE EMP CREATE RAW REFLECTION myReflection\n" + "USING \n" + "DISPLAY(EMPNO, ENAME) \n" + - "PARTITION BY(^")) + "PARTITION BY(^") .add( "LOCALSORT", - GoldenFileTestBuilder.MultiLineString.create("ALTER TABLE EMP CREATE RAW REFLECTION myReflection\n" + + "ALTER TABLE EMP CREATE RAW REFLECTION myReflection\n" + "USING \n" + "DISPLAY(EMPNO, ENAME) \n" + - "LOCALSORT BY(^")) + "LOCALSORT BY(^") .runTests(); } @Test public void aggregateReflectionCreateTests() { - new GoldenFileTestBuilder<>(this::executeTestWithFolderContext) + new GoldenFileTestBuilder<>(this::executeTestWithFolderContext, GoldenFileTestBuilder.MultiLineString::create) .add( "DIMENSIONS FIELD", - GoldenFileTestBuilder.MultiLineString.create("ALTER TABLE EMP CREATE AGGREGATE REFLECTION myReflection\n" + + "ALTER TABLE EMP CREATE AGGREGATE REFLECTION myReflection\n" + "USING \n"+ "DIMENSIONS(^)\n" + - "MEASURES(EMPNO)")) + "MEASURES(EMPNO)") .add( "DIMENSIONS FIELD BY DAY", - GoldenFileTestBuilder.MultiLineString.create("ALTER TABLE EMP CREATE AGGREGATE REFLECTION myReflection\n" + + "ALTER TABLE EMP CREATE AGGREGATE REFLECTION myReflection\n" + "USING \n"+ "DIMENSIONS(^ BY DAY)\n" + - "MEASURES(EMPNO)")) + "MEASURES(EMPNO)") .add( "DIMENSIONS MIXED", - GoldenFileTestBuilder.MultiLineString.create("ALTER TABLE EMP CREATE AGGREGATE REFLECTION myReflection\n" + + "ALTER TABLE EMP CREATE AGGREGATE REFLECTION myReflection\n" + "USING \n"+ "DIMENSIONS(EMPNO BY DAY, ^)\n" + - "MEASURES(EMPNO)")) + "MEASURES(EMPNO)") .add( "MEASURES FIELD", - GoldenFileTestBuilder.MultiLineString.create("ALTER TABLE EMP CREATE AGGREGATE REFLECTION myReflection\n" + + "ALTER TABLE EMP CREATE AGGREGATE REFLECTION myReflection\n" + "USING \n"+ "DIMENSIONS(EMPNO)\n" + - "MEASURES(^)")) + "MEASURES(^)") .add( "MEASURES WITH ANNOTATIONS", - GoldenFileTestBuilder.MultiLineString.create("ALTER TABLE EMP CREATE AGGREGATE REFLECTION myReflection\n" + + "ALTER TABLE EMP CREATE AGGREGATE REFLECTION myReflection\n" + "USING \n"+ "DIMENSIONS(EMPNO)\n" + - "MEASURES(^ (COUNT, MIN, MAX, SUM, APPROXIMATE COUNT DISTINCT))")) + "MEASURES(^ (COUNT, MIN, MAX, SUM, APPROXIMATE COUNT DISTINCT))") .add( "MEASURES WITH ANNOTATIONS2", - GoldenFileTestBuilder.MultiLineString.create("ALTER TABLE EMP CREATE AGGREGATE REFLECTION myReflection\n" + + "ALTER TABLE EMP CREATE AGGREGATE REFLECTION myReflection\n" + "USING \n"+ "DIMENSIONS(EMPNO)\n" + - "MEASURES(EMPNO (COUNT, MIN, MAX, ^, APPROXIMATE COUNT DISTINCT))")) + "MEASURES(EMPNO (COUNT, MIN, MAX, ^, APPROXIMATE COUNT DISTINCT))") .add( "MEASURES MIXED", - GoldenFileTestBuilder.MultiLineString.create("ALTER TABLE EMP CREATE AGGREGATE REFLECTION myReflection\n" + + "ALTER TABLE EMP CREATE AGGREGATE REFLECTION myReflection\n" + "USING \n"+ "DIMENSIONS(EMPNO)\n" + - "MEASURES(EMPNO (COUNT, MIN, MAX, SUM, APPROXIMATE COUNT DISTINCT), ^)")) + "MEASURES(EMPNO (COUNT, MIN, MAX, SUM, APPROXIMATE COUNT DISTINCT), ^)") .add( "EVERYTHING", - GoldenFileTestBuilder.MultiLineString.create("ALTER TABLE EMP CREATE AGGREGATE REFLECTION myReflection\n" + + "ALTER TABLE EMP CREATE AGGREGATE REFLECTION myReflection\n" + "USING \n"+ "DIMENSIONS(EMPNO BY DAY, ENAME)\n" + "MEASURES(EMPNO (COUNT, MIN, MAX, SUM, APPROXIMATE COUNT DISTINCT), ENAME)\n" + "DISTRIBUTE BY(EMPNO, ENAME)\n" + "PARTITION BY(^, ENAME)\n" + "LOCALSORT BY(EMPNO, ENAME)\n" + - "ARROW CACHE true")) + "ARROW CACHE true") .runTests(); } @Test public void externalReflectionCreateTests() { - new GoldenFileTestBuilder<>(this::executeTestWithFolderContext) + new GoldenFileTestBuilder<>(this::executeTestWithFolderContext, GoldenFileTestBuilder.MultiLineString::create) .add( "USING", - GoldenFileTestBuilder.MultiLineString.create("ALTER TABLE EMP CREATE EXTERNAL REFLECTION myReflection\n" + - "USING ^")) + "ALTER TABLE EMP CREATE EXTERNAL REFLECTION myReflection\n" + + "USING ^") .runTests(); } } diff --git a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/SelectStatementCompletionTests.java b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/SelectStatementCompletionTests.java index 825d3534fd..2403ad9fda 100644 --- a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/SelectStatementCompletionTests.java +++ b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/SelectStatementCompletionTests.java @@ -22,184 +22,141 @@ public final class SelectStatementCompletionTests extends AutocompleteEngineTests { @Test public void columnCompletionScenarios() { - new GoldenFileTestBuilder<>(this::executeTestWithFolderContext) + new GoldenFileTestBuilder<>(this::executeTestWithFolderContext, GoldenFileTestBuilder.MultiLineString::create) .add( "EMPTY SELECT", - GoldenFileTestBuilder.MultiLineString.create("SELECT ^ FROM EMP")) + "SELECT ^ FROM EMP") .add( "SINGLE SELECT ITEM", - GoldenFileTestBuilder.MultiLineString.create("SELECT EMP.ENAME ^ FROM EMP")) + "SELECT EMP.ENAME ^ FROM EMP") .add( "SINGLE SELECT ITEM AWAITING SECOND SELECT ITEM", - GoldenFileTestBuilder.MultiLineString.create("SELECT EMP.ENAME, ^ FROM EMP")) + "SELECT EMP.ENAME, ^ FROM EMP") .add( "MIDDLE OF SELECTS", - GoldenFileTestBuilder.MultiLineString.create("SELECT EMP.ENAME, ^ , EMP.JOB FROM EMP")) + "SELECT EMP.ENAME, ^ , EMP.JOB FROM EMP") .add( "ESCAPES IN PATH", - GoldenFileTestBuilder.MultiLineString.create("SELECT \"EMP\".\"ENAME\", ^ FROM EMP")) + "SELECT \"EMP\".\"ENAME\", ^ FROM EMP") .add( "SELECT *", - GoldenFileTestBuilder.MultiLineString.create("SELECT * ^ FROM EMP")) + "SELECT * ^ FROM EMP") .runTests(); } @Test public void joinScenarios() { - new GoldenFileTestBuilder<>(this::executeTestWithFolderContext) - .add( - "JOIN AWAITING SECOND TABLE", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM EMP JOIN ^")) - .add( - "JOIN AWAITING ON KEYWORD", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM EMP JOIN DEPT ^")) - .add( - "JOIN AWAITING EXPRESSION", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM EMP JOIN DEPT ON ^")) - .add( - "JOIN IN MIDDLE OF EXPRESSION", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM EMP JOIN DEPT ON EMP.DEPTNO = ^")) - .add( - "FROM CLAUSE WITH JOINS", - GoldenFileTestBuilder.MultiLineString.create("SELECT ^ FROM EMP JOIN DEPT ON EMP.DEPTNO = DEPT.DEPTNO")) - .add( - "MULTIPLE JOINS", - GoldenFileTestBuilder.MultiLineString.create("" + - "SELECT * FROM EMP " + - "JOIN DEPT ON EMP.DEPTNO = ^ " + - "JOIN SALGRADE ON SALGRADE.GRADE = DEPT.DEPTNO")) + new GoldenFileTestBuilder<>(this::executeTestWithFolderContext, GoldenFileTestBuilder.MultiLineString::create) + .add("JOIN AWAITING SECOND TABLE", "SELECT * FROM EMP JOIN ^") + .add("JOIN AWAITING ON KEYWORD", "SELECT * FROM EMP JOIN DEPT ^") + .add("JOIN AWAITING EXPRESSION", "SELECT * FROM EMP JOIN DEPT ON ^") + .add("JOIN IN MIDDLE OF EXPRESSION", "SELECT * FROM EMP JOIN DEPT ON EMP.DEPTNO = ^") + .add("FROM CLAUSE WITH JOINS", "SELECT ^ FROM EMP JOIN DEPT ON EMP.DEPTNO = DEPT.DEPTNO") + .add("MULTIPLE JOINS", + "SELECT * FROM EMP \n" + + "JOIN DEPT ON EMP.DEPTNO = ^ \n" + + "JOIN SALGRADE ON SALGRADE.GRADE = DEPT.DEPTNO") .runTests(); } @Test public void pathAliasing() { - new GoldenFileTestBuilder<>(this::executeTestWithFolderContext) + new GoldenFileTestBuilder<>(this::executeTestWithFolderContext, GoldenFileTestBuilder.MultiLineString::create) .add( - "BASIC", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM EMP ^")) + "BASIC", "SELECT * FROM EMP ^") .add( - "ALIAS ", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM EMP AS ^")) + "ALIAS ", "SELECT * FROM EMP AS ^") .add( - "ALIAS with no as", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM EMP e^")) + "ALIAS with no as", "SELECT * FROM EMP e^") .runTests(); } @Test public void pathCompletion() { - new GoldenFileTestBuilder<>(this::executeTestWithRootContext) - .add( - "EMPTY FROM", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM ^")) - .add( - "BASIC COMPLETION", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM \"space\".^")) - .add( - "COMPLETION WITH A SPACE IN NAME", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM \"space with a space in the name\".^")) - .add( - "PATH WITH MANY CHILDREN", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM \"space\".\"folder\".^")) - .add( - "PATH WITH NO CHILDREN", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM \"space\".\"folder\".\"file\".^")) - .add( - "INVALID PATH", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM \"path\".\"that\".\"does\".\"not\".\"exist\".^")) - .add( - "MULTIPLE TABLES", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM \"space\".\"folder\".\"physical dataset\", \"space\".\"folder\".^")) - .add( - "JOIN empty path", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM \"space\".\"folder\".\"physical dataset\" JOIN ^")) - .add( - "JOIN mid path", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM \"space\".\"folder\".\"physical dataset\" JOIN \"space\".\"folder\".^")) - .add( - "APPLY empty path", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM \"space\".\"folder\".\"physical dataset\" APPLY ^")) - .add( - "APPLY mid path", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM \"space\".\"folder\".\"physical dataset\" APPLY \"space\".\"folder\".^")) - .add( - "Path with special character incorrect.", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM @^")) - .add( - "Path with special character correct.", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM \"@^")) + new GoldenFileTestBuilder<>(this::executeTestWithRootContext, GoldenFileTestBuilder.MultiLineString::create) + .add("EMPTY FROM", "SELECT * FROM ^") + .add("BASIC COMPLETION", "SELECT * FROM \"space\".^") + .add("COMPLETION WITH A SPACE IN NAME", "SELECT * FROM \"space with a space in the name\".^") + .add("PATH WITH MANY CHILDREN", "SELECT * FROM \"space\".\"folder\".^") + .add("PATH WITH NO CHILDREN", "SELECT * FROM \"space\".\"folder\".\"file\".^") + .add("INVALID PATH", "SELECT * FROM \"path\".\"that\".\"does\".\"not\".\"exist\".^") + .add("MULTIPLE TABLES", "SELECT * FROM \"space\".\"folder\".\"physical dataset\", \"space\".\"folder\".^") + .add("JOIN empty path", "SELECT * FROM \"space\".\"folder\".\"physical dataset\" JOIN ^") + .add("JOIN mid path", "SELECT * FROM \"space\".\"folder\".\"physical dataset\" JOIN \"space\".\"folder\".^") + .add("APPLY empty path", "SELECT * FROM \"space\".\"folder\".\"physical dataset\" APPLY ^") + .add("APPLY mid path", "SELECT * FROM \"space\".\"folder\".\"physical dataset\" APPLY \"space\".\"folder\".^") + .add("Path with special character incorrect.", "SELECT * FROM @^") + .add("Path with special character correct.", "SELECT * FROM \"@^") .runTests(); } @Test public void subqueryScenarios() { - new GoldenFileTestBuilder<>(this::executeTestWithFolderContext) + new GoldenFileTestBuilder<>(this::executeTestWithFolderContext, GoldenFileTestBuilder.MultiLineString::create) .add( - "START OF SUBQUERY", - GoldenFileTestBuilder.MultiLineString.create("SELECT (^")) + "START OF SUBQUERY", "SELECT (^") .add( - "START OF SUBQUERY WITHOUT SOURCE", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM (^")) - .add("COLUMN in INNER query", GoldenFileTestBuilder.MultiLineString.create( + "START OF SUBQUERY WITHOUT SOURCE", "SELECT * FROM (^") + .add("COLUMN in INNER query", "SELECT DEPTNO, (\n" + " SELECT MAX(EMP.SAL), ^ \n" + " FROM EMP\n" + ")\n" + - "FROM DEPT\n")) - .add("COLUMN in OUTER query", GoldenFileTestBuilder.MultiLineString.create( + "FROM DEPT\n") + .add("COLUMN in OUTER query", "SELECT DEPTNO, ^, (\n" + " SELECT MAX(EMP.SAL) \n" + " FROM EMP\n" + ")\n" + - "FROM DEPT\n")) - .add("CATALOGENTRY in INNER query", GoldenFileTestBuilder.MultiLineString.create( + "FROM DEPT\n") + .add("CATALOGENTRY in INNER query", "SELECT DEPTNO, (\n" + " SELECT * \n" + " FROM ^" + ")\n" + - "FROM DEPT\n")) - .add("CATALOGENTRY in OUTER query", GoldenFileTestBuilder.MultiLineString.create( + "FROM DEPT\n") + .add("CATALOGENTRY in OUTER query", "SELECT (\n" + " SELECT * \n" + " FROM EMP\n" + ")\n" + - "FROM ^\n")) + "FROM ^\n") .runTests(); } @Test public void nessie() { - new GoldenFileTestBuilder<>(this::executeTestWithFolderContext) + GoldenFileTestBuilder.create(this::executeTestWithFolderContext) .add( "JUST FINISHED TABLE NAME", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM EMP ^")) + "SELECT * FROM EMP ^") .add( "JUST FINISHED TABLE NAME WITH AT", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM EMP AT ^")) + "SELECT * FROM EMP AT ^") .add( "JUST FINISHED TABLE NAME WITH AT", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM EMP AT B^")) + "SELECT * FROM EMP AT B^") .add( "JUST FINISHED TABLE NAME WITH AT", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM EMP AT C^")) + "SELECT * FROM EMP AT C^") .add( "JUST FINISHED TABLE NAME WITH AT", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM EMP AT T^")) + "SELECT * FROM EMP AT T^") .add( "BRANCH", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM EMP AT BRANCH ^")) + "SELECT * FROM EMP AT BRANCH ^") .add( "COMMIT", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM EMP AT COMMIT ^")) + "SELECT * FROM EMP AT COMMIT ^") .add( "TAG", - GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM EMP AT TAG ^")) + "SELECT * FROM EMP AT TAG ^") .add( "Column With Reference", - GoldenFileTestBuilder.MultiLineString.create("SELECT ^ FROM EMP AT BRANCH \"Branch A\"")) + "SELECT ^ FROM EMP AT BRANCH \"Branch A\"") .add( "Set branch", - GoldenFileTestBuilder.MultiLineString.create("USE BRANCH branchA; SELECT * FROM ^")) + "USE BRANCH branchA; SELECT * FROM ^") .runTests(); } } diff --git a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/SqlQueryTokenizerTests.java b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/SqlQueryTokenizerTests.java index 5080869e7f..49629c3afd 100644 --- a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/SqlQueryTokenizerTests.java +++ b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/SqlQueryTokenizerTests.java @@ -29,7 +29,7 @@ public final class SqlQueryTokenizerTests { @Test public void tests() { - new GoldenFileTestBuilder<>(SqlQueryTokenizerTests::executeTest) + GoldenFileTestBuilder.create(SqlQueryTokenizerTests::executeTest) .add( "BASIC QUERY", GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM EMP WHERE age < 10 ORDER by age LIMIT 10 OFFSET 10 FETCH FIRST 10 ONLY")) @@ -59,7 +59,7 @@ public void tests() { @Test public void testSingleQuoteIsTreatedAsStringLiteral() { - new GoldenFileTestBuilder<>(SqlQueryTokenizerTests::executeTest) + GoldenFileTestBuilder.create(SqlQueryTokenizerTests::executeTest) .add( "CLOSED SINGLE QUOTE", GoldenFileTestBuilder.MultiLineString.create("SELECT '123' FROM EMP")) @@ -77,7 +77,7 @@ public void testSingleQuoteIsTreatedAsStringLiteral() { @Test public void testDoubleQuoteIsTreatedAsIdentifier() { - new GoldenFileTestBuilder<>(SqlQueryTokenizerTests::executeTest) + GoldenFileTestBuilder.create(SqlQueryTokenizerTests::executeTest) .add( "CLOSED DOUBLE QUOTE", GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM \"EMP\"")) diff --git a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/UpdateStatementCompletionTests.java b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/UpdateStatementCompletionTests.java index 3df812fc76..1c1b8c9d71 100644 --- a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/UpdateStatementCompletionTests.java +++ b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/UpdateStatementCompletionTests.java @@ -22,34 +22,34 @@ public final class UpdateStatementCompletionTests extends AutocompleteEngineTests { @Test public void tests() { - new GoldenFileTestBuilder<>(this::executeTestWithFolderContext) + new GoldenFileTestBuilder<>(this::executeTestWithFolderContext, GoldenFileTestBuilder.MultiLineString::create) .add( "UPDATE", - GoldenFileTestBuilder.MultiLineString.create("UPDATE ^")) + "UPDATE ^") .add( "UPDATE + TABLE", - GoldenFileTestBuilder.MultiLineString.create("UPDATE EMP ^")) + "UPDATE EMP ^") .add( "UPDATE + TABLE + SET", - GoldenFileTestBuilder.MultiLineString.create("UPDATE EMP SET ^")) + "UPDATE EMP SET ^") .add( "UPDATE + TABLE + SET + PARTIAL ASSIGN", - GoldenFileTestBuilder.MultiLineString.create("UPDATE EMP SET NAME = ^")) + "UPDATE EMP SET NAME = ^") .add( "UPDATE + TABLE + SET + ASSIGN", - GoldenFileTestBuilder.MultiLineString.create("UPDATE EMP SET NAME = 'Brandon' ^")) + "UPDATE EMP SET NAME = 'Brandon' ^") .add( "UPDATE + TABLE + SET + PARTIAL ASSIGN LIST", - GoldenFileTestBuilder.MultiLineString.create("UPDATE EMP SET NAME = 'Brandon', ^")) + "UPDATE EMP SET NAME = 'Brandon', ^") .add( "UPDATE + TABLE + SET + ASSIGN LIST", - GoldenFileTestBuilder.MultiLineString.create("UPDATE EMP SET NAME = 'Brandon', AGE = 27 ^")) + "UPDATE EMP SET NAME = 'Brandon', AGE = 27 ^") .add( "UPDATE + TABLE + SET + ASSIGN LIST + WHERE", - GoldenFileTestBuilder.MultiLineString.create("UPDATE EMP SET NAME = 'Brandon', AGE = 27 WHERE ^")) + "UPDATE EMP SET NAME = 'Brandon', AGE = 27 WHERE ^") .add( "UPDATE + TABLE + SET + ASSIGN LIST + WHERE + BOOLEAN EXPRESSION", - GoldenFileTestBuilder.MultiLineString.create("UPDATE EMP SET NAME = 'Brandon', AGE = 27 WHERE NAME != 'Brandon' ^")) + "UPDATE EMP SET NAME = 'Brandon', AGE = 27 WHERE NAME != 'Brandon' ^") .runTests(); } } diff --git a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/catalog/mock/MockCatalog.java b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/catalog/mock/MockCatalog.java index 50dad135e6..c90590de8c 100644 --- a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/catalog/mock/MockCatalog.java +++ b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/catalog/mock/MockCatalog.java @@ -79,6 +79,11 @@ public DremioTable getTableForQuery(NamespaceKey key) { return getTable(key); } + @Override + public DremioTable getTableSnapshotForQuery(NamespaceKey key, TableVersionContext context) { + throw new UnsupportedOperationException(); + } + @Override public DremioTable getTableSnapshot(NamespaceKey key, TableVersionContext context) { throw new UnsupportedOperationException(); diff --git a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/catalog/mock/MockMetadataCatalog.java b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/catalog/mock/MockMetadataCatalog.java index 4ee3af808a..07ee65f4b3 100644 --- a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/catalog/mock/MockMetadataCatalog.java +++ b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/catalog/mock/MockMetadataCatalog.java @@ -79,11 +79,11 @@ public MockMetadataCatalog(CatalogData data) { @Override public DremioTable getTable(NamespaceKey key) { - return getTableSnapshot(key, TableVersionContext.LATEST_VERSION); + return getTableSnapshotForQuery(key, TableVersionContext.LATEST_VERSION); } @Override - public DremioTable getTableSnapshot(NamespaceKey key, TableVersionContext context) { + public DremioTable getTableSnapshotForQuery(NamespaceKey key, TableVersionContext context) { NodeMetadata schemas; switch (context.getType()) { case BRANCH: @@ -113,6 +113,11 @@ public DremioTable getTableSnapshot(NamespaceKey key, TableVersionContext contex return resolve(key, fullPath, schemas); } + @Override + public DremioTable getTableSnapshot(NamespaceKey key, TableVersionContext context) { + return getTableSnapshotForQuery(key, context); + } + private DremioTable resolve(NamespaceKey key, List path, NodeMetadata metadata) { if (metadata == null) { throw new RuntimeException(); diff --git a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/columns/ColumnResolverTests.java b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/columns/ColumnResolverTests.java index 08a233caa2..c2ba20a342 100644 --- a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/columns/ColumnResolverTests.java +++ b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/columns/ColumnResolverTests.java @@ -37,7 +37,7 @@ public final class ColumnResolverTests { @Test public void tests() { - new GoldenFileTestBuilder<>(ColumnResolverTests::executeTestWithFolderContext) + GoldenFileTestBuilder.create(ColumnResolverTests::executeTestWithFolderContext) .add( "SIMPLE FROM CLAUSE", "SELECT ^ FROM EMP") @@ -52,7 +52,7 @@ public void tests() { @Test public void aliasing() { - new GoldenFileTestBuilder<>(ColumnResolverTests::executeTestWithFolderContext) + GoldenFileTestBuilder.create(ColumnResolverTests::executeTestWithFolderContext) .add( "NO ALIAS", "SELECT ^ FROM EMP") @@ -88,7 +88,7 @@ public void aliasing() { @Test public void aliasingWithHomeContext() { - new GoldenFileTestBuilder<>(ColumnResolverTests::executeTestWithHomeContext) + GoldenFileTestBuilder.create(ColumnResolverTests::executeTestWithHomeContext) .add( "NO ALIAS", "SELECT ^ FROM \"space\".\"folder\".EMP") @@ -109,7 +109,7 @@ public void aliasingWithHomeContext() { @Test public void subquery() { - new GoldenFileTestBuilder<>(ColumnResolverTests::executeTestWithFolderContext) + GoldenFileTestBuilder.create(ColumnResolverTests::executeTestWithFolderContext) .add( "CURSOR IN MIDDLE QUERY", "SELECT DEPTNO, (SELECT ^ , (SELECT * FROM SALGRADE) FROM EMP) FROM DEPT") @@ -130,7 +130,7 @@ public void subquery() { @Test public void nessie() { - new GoldenFileTestBuilder<>(ColumnResolverTests::executeTestWithFolderContext) + GoldenFileTestBuilder.create(ColumnResolverTests::executeTestWithFolderContext) .add( "BRANCH", "SELECT ^ FROM EMP AT BRANCH \"Branch A\"") @@ -154,7 +154,7 @@ public void nessie() { @Test public void nessieWithHomeContext() { - new GoldenFileTestBuilder<>(ColumnResolverTests::executeTestWithHomeContext) + GoldenFileTestBuilder.create(ColumnResolverTests::executeTestWithHomeContext) .add( "BRANCH", "SELECT ^ FROM \"space\".\"folder\".EMP AT BRANCH \"Branch A\"") diff --git a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/FunctionParserTests.java b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/FunctionParserTests.java index 41f2669b81..a3e06456f1 100644 --- a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/FunctionParserTests.java +++ b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/FunctionParserTests.java @@ -31,7 +31,7 @@ public final class FunctionParserTests { @Test public void simple() { - new GoldenFileTestBuilder<>(FunctionParserTests::executeTest) + GoldenFileTestBuilder.create(FunctionParserTests::executeTest) .add("ONLY NAME", "myFunction") .add("NAME AND OPEN PARENS", "myFunction(") .add("SINGLE PARAMETER", "myFunction(1") @@ -49,7 +49,7 @@ public void simple() { @Test public void substring() { - new GoldenFileTestBuilder<>(FunctionParserTests::executeTest) + GoldenFileTestBuilder.create(FunctionParserTests::executeTest) .add("ONLY NAME", "SUBSTRING") .add("NAME AND OPEN PARENS", "SUBSTRING(") .add("SINGLE STRING PARAMETER", "SUBSTRING('hello'") @@ -79,7 +79,7 @@ public void aggregate() { "REGR_SYY", "APPROX_COUNT_DISTINCT"); ImmutableList conditioned = ImmutableList.of("EVERY", "SOME"); ImmutableList multisetFunctions = ImmutableList.of("FUSION", "INTERSECTION"); - GoldenFileTestBuilder testBuilder = new GoldenFileTestBuilder<>(FunctionParserTests::executeTest); + GoldenFileTestBuilder testBuilder = GoldenFileTestBuilder.create(FunctionParserTests::executeTest); for (String function : distinctOrAll) { testBuilder diff --git a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/OneArgBooleanFunction.java b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/OneArgBooleanFunction.java index 98c09d6e51..8462fdab01 100644 --- a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/OneArgBooleanFunction.java +++ b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/OneArgBooleanFunction.java @@ -47,6 +47,7 @@ public SqlOperandCountRange getOperandCountRange() { return SqlOperandCountRanges.of(1); } + @Override public boolean checkOperandTypes(SqlCallBinding sqlCallBinding, boolean throwOnFailure) { if (sqlCallBinding.operands().size() != 1) { return false; @@ -56,6 +57,7 @@ public boolean checkOperandTypes(SqlCallBinding sqlCallBinding, boolean throwOnF return relDataType.getSqlTypeName() == SqlTypeName.BOOLEAN; } + @Override public RelDataType inferReturnType( SqlOperatorBinding opBinding) { return JavaTypeFactoryImpl.INSTANCE.createSqlType(SqlTypeName.BOOLEAN); diff --git a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/OneArgNumericFunction.java b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/OneArgNumericFunction.java index f946b6faab..75cf44baef 100644 --- a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/OneArgNumericFunction.java +++ b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/OneArgNumericFunction.java @@ -48,6 +48,7 @@ public SqlOperandCountRange getOperandCountRange() { return SqlOperandCountRanges.of(1); } + @Override public boolean checkOperandTypes(SqlCallBinding sqlCallBinding, boolean throwOnFailure) { if (sqlCallBinding.operands().size() != 1) { return false; @@ -57,6 +58,7 @@ public boolean checkOperandTypes(SqlCallBinding sqlCallBinding, boolean throwOnF return SqlTypeName.NUMERIC_TYPES.contains(relDataType.getSqlTypeName()); } + @Override public RelDataType inferReturnType( SqlOperatorBinding opBinding) { return JavaTypeFactoryImpl.INSTANCE.createSqlType(SqlTypeName.DOUBLE); diff --git a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/OptionalArgFunction.java b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/OptionalArgFunction.java index 56768c8574..198ea4183b 100644 --- a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/OptionalArgFunction.java +++ b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/OptionalArgFunction.java @@ -71,6 +71,7 @@ private OptionalArgFunction(List operandTypes) { this.operandTypes = operandTypes; } + @Override public boolean checkOperandTypes(SqlCallBinding sqlCallBinding, boolean throwOnFailure) { if (sqlCallBinding.operands().size() != operandTypes.size()) { return false; @@ -100,6 +101,7 @@ public SqlOperandCountRange getOperandCountRange() { return SqlOperandCountRanges.of(operandTypes.size()); } + @Override public RelDataType inferReturnType( SqlOperatorBinding opBinding) { return JavaTypeFactoryImpl.INSTANCE.createSqlType(SqlTypeName.DOUBLE); diff --git a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/OverloadedFunction.java b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/OverloadedFunction.java index 1392d3f34f..ecde03d679 100644 --- a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/OverloadedFunction.java +++ b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/OverloadedFunction.java @@ -71,6 +71,7 @@ private OverloadedFunction(List operandTypes) { this.operandTypes = operandTypes; } + @Override public boolean checkOperandTypes(SqlCallBinding sqlCallBinding, boolean throwOnFailure) { if (sqlCallBinding.operands().size() != operandTypes.size()) { return false; @@ -100,6 +101,7 @@ public SqlOperandCountRange getOperandCountRange() { return SqlOperandCountRanges.of(operandTypes.size()); } + @Override public RelDataType inferReturnType( SqlOperatorBinding opBinding) { return JavaTypeFactoryImpl.INSTANCE.createSqlType(SqlTypeName.DOUBLE); diff --git a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/ParameterResolverTests.java b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/ParameterResolverTests.java index dbeedeafbe..359b398df3 100644 --- a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/ParameterResolverTests.java +++ b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/ParameterResolverTests.java @@ -98,7 +98,7 @@ public final class ParameterResolverTests { @Test public void tests() { - new GoldenFileTestBuilder<>(ParameterResolverTests::executeTest) + GoldenFileTestBuilder.create(ParameterResolverTests::executeTest) .add("NO FUNCTION", "SELECT ^") .add("COMPLETED FUNCTION", "ZERO_ARG_FUNCTION(^)") .add("NO PARAMETER FUNCTION", "ZERO_ARG_FUNCTION(^") diff --git a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/TwoArgNumericFunction.java b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/TwoArgNumericFunction.java index d0a8b2c48d..4a20fbe80f 100644 --- a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/TwoArgNumericFunction.java +++ b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/TwoArgNumericFunction.java @@ -47,6 +47,7 @@ public SqlOperandCountRange getOperandCountRange() { return SqlOperandCountRanges.of(2); } + @Override public boolean checkOperandTypes(SqlCallBinding sqlCallBinding, boolean throwOnFailure) { if (sqlCallBinding.operands().size() != 2) { return false; @@ -65,6 +66,7 @@ public boolean checkOperandTypes(SqlCallBinding sqlCallBinding, boolean throwOnF return true; } + @Override public RelDataType inferReturnType( SqlOperatorBinding opBinding) { return JavaTypeFactoryImpl.INSTANCE.createSqlType(SqlTypeName.DOUBLE); diff --git a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/UnstableReturnTypeFunction.java b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/UnstableReturnTypeFunction.java index c6fafd8680..08eb6c2355 100644 --- a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/UnstableReturnTypeFunction.java +++ b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/UnstableReturnTypeFunction.java @@ -47,10 +47,12 @@ public SqlOperandCountRange getOperandCountRange() { return SqlOperandCountRanges.of(1); } + @Override public boolean checkOperandTypes(SqlCallBinding sqlCallBinding, boolean throwOnFailure) { return sqlCallBinding.operands().size() == 1; } + @Override public RelDataType inferReturnType( SqlOperatorBinding opBinding) { RelDataType relDataType = opBinding.getOperandType(0); diff --git a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/VaradicFunction.java b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/VaradicFunction.java index a98d2c088f..71a67e7868 100644 --- a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/VaradicFunction.java +++ b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/VaradicFunction.java @@ -47,6 +47,7 @@ public SqlOperandCountRange getOperandCountRange() { return SqlOperandCountRanges.from(2); } + @Override public boolean checkOperandTypes(SqlCallBinding sqlCallBinding, boolean throwOnFailure) { if (sqlCallBinding.operands().size() < 2) { return false; @@ -63,6 +64,7 @@ public boolean checkOperandTypes(SqlCallBinding sqlCallBinding, boolean throwOnF return true; } + @Override public RelDataType inferReturnType( SqlOperatorBinding opBinding) { return JavaTypeFactoryImpl.INSTANCE.createSqlType(SqlTypeName.DOUBLE); diff --git a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/ZeroArgFunction.java b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/ZeroArgFunction.java index 85dcf8cc02..1334efd438 100644 --- a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/ZeroArgFunction.java +++ b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/functions/ZeroArgFunction.java @@ -47,10 +47,12 @@ public SqlOperandCountRange getOperandCountRange() { return SqlOperandCountRanges.of(0); } + @Override public boolean checkOperandTypes(SqlCallBinding sqlCallBinding, boolean throwOnFailure) { return sqlCallBinding.operands().size() == 0; } + @Override public RelDataType inferReturnType( SqlOperatorBinding opBinding) { return JavaTypeFactoryImpl.INSTANCE.createSqlType(SqlTypeName.DOUBLE); diff --git a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/statements/grammar/StatementParserTests.java b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/statements/grammar/StatementParserTests.java index f5e60fab8f..d33a23a30e 100644 --- a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/statements/grammar/StatementParserTests.java +++ b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/statements/grammar/StatementParserTests.java @@ -29,7 +29,7 @@ public final class StatementParserTests { @Test public void statementListTests() { - new GoldenFileTestBuilder<>(StatementParserTests::executeTest) + GoldenFileTestBuilder.create(StatementParserTests::executeTest) .add("Empty String", MultiLineString.create("^")) .add("Only semicolons and cursor after one of them", MultiLineString.create(";^;;;")) .add("Ends with semicolon and cursor after", MultiLineString.create(";;;;^")) @@ -45,7 +45,7 @@ public void statementListTests() { @Test public void queryStatementTests() { - new GoldenFileTestBuilder<>(StatementParserTests::executeTest) + GoldenFileTestBuilder.create(StatementParserTests::executeTest) .add("JUST SELECT", MultiLineString.create("SELECT ^")) .add("JUST SELECT *", MultiLineString.create("SELECT * ^")) .add("JUST SELECT * FROM ", MultiLineString.create("SELECT * FROM ^")) @@ -145,7 +145,7 @@ public void queryStatementTests() { @Test public void fromClauseExtraction() { - new GoldenFileTestBuilder<>(StatementParserTests::executeTest) + GoldenFileTestBuilder.create(StatementParserTests::executeTest) .add( "NO FROM CLAUSE", GoldenFileTestBuilder.MultiLineString.create("SELECT *^")) @@ -196,7 +196,7 @@ public void fromClauseExtraction() { @Test public void tableReferenceExtraction() { - new GoldenFileTestBuilder<>(StatementParserTests::executeTest) + GoldenFileTestBuilder.create(StatementParserTests::executeTest) .add( "Basic", GoldenFileTestBuilder.MultiLineString.create("SELECT * FROM EMP^")) @@ -220,7 +220,7 @@ public void tableReferenceExtraction() { @Test public void dropStatementTests() { - new GoldenFileTestBuilder<>(StatementParserTests::executeTest) + GoldenFileTestBuilder.create(StatementParserTests::executeTest) .add("JUST DROP", MultiLineString.create("DROP^")) .add("DROP INCOMPLETE / UNKNOWN TYPE", MultiLineString.create("DROP BR^")) .add("DROP BRANCH", MultiLineString.create("DROP BRANCH^")) @@ -237,7 +237,7 @@ public void dropStatementTests() { @Test public void deleteStatementTests() { - new GoldenFileTestBuilder<>(StatementParserTests::executeTest) + GoldenFileTestBuilder.create(StatementParserTests::executeTest) .add("DELETE", MultiLineString.create("DELETE^")) .add("DELETE + PARTIAL FROM", MultiLineString.create("DELETE FR^")) .add("DELETE + FROM", MultiLineString.create("DELETE FROM^")) @@ -252,7 +252,7 @@ public void deleteStatementTests() { @Test public void updateStatementTests() { - new GoldenFileTestBuilder<>(StatementParserTests::executeTest) + GoldenFileTestBuilder.create(StatementParserTests::executeTest) .add( "UPDATE", MultiLineString.create("UPDATE ^")) @@ -279,7 +279,7 @@ public void updateStatementTests() { @Test public void dQueryTests() { - new GoldenFileTestBuilder<>(StatementParserTests::executeTest) + GoldenFileTestBuilder.create(StatementParserTests::executeTest) .add("SET", MultiLineString.create("SET ^")) .add("SET T", @@ -299,7 +299,7 @@ public void dQueryTests() { @Test public void rawReflectionCreateTests() { - GoldenFileTestBuilder builder = new GoldenFileTestBuilder<>(StatementParserTests::executeTest) + GoldenFileTestBuilder builder = GoldenFileTestBuilder.create(StatementParserTests::executeTest) .add( "USING", MultiLineString.create("ALTER TABLE myTable CREATE RAW REFLECTION myReflection\n" + @@ -412,7 +412,7 @@ public void rawReflectionCreateTests() { @Test public void aggregateReflectionCreateTests() { - new GoldenFileTestBuilder<>(StatementParserTests::executeTest) + GoldenFileTestBuilder.create(StatementParserTests::executeTest) .add( "USING", MultiLineString.create("ALTER TABLE myTable CREATE AGGREGATE REFLECTION myReflection\n" + @@ -469,7 +469,7 @@ public void aggregateReflectionCreateTests() { @Test public void externalReflectionCreateTests() { - new GoldenFileTestBuilder<>(StatementParserTests::executeTest) + GoldenFileTestBuilder.create(StatementParserTests::executeTest) .add( "USING", MultiLineString.create("ALTER TABLE EMP CREATE EXTERNAL REFLECTION myReflection\n" + @@ -484,7 +484,7 @@ public void externalReflectionCreateTests() { @Test public void selectClauseTests() { - new GoldenFileTestBuilder<>(StatementParserTests::executeTest) + GoldenFileTestBuilder.create(StatementParserTests::executeTest) .add( "JUST SELECT", MultiLineString.create("SELECT ")) diff --git a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/tokens/CursorTests.java b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/tokens/CursorTests.java index 2568475fbe..5f1024c8b6 100644 --- a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/tokens/CursorTests.java +++ b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/tokens/CursorTests.java @@ -56,7 +56,7 @@ public void testIndexOfTokenWithCursor() { @Test public void testTokenizeWithCursor() { - new GoldenFileTestBuilder<>(CursorTests::testTokenizeWithCursorImplementation) + GoldenFileTestBuilder.create(CursorTests::testTokenizeWithCursorImplementation) .add("EMPTY STRING", new Input("", 0)) .add("END OF TOKEN", new Input("HELLO", "HELLO".length())) .add("START OF TOKEN", new Input("HELLO", 0)) diff --git a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/tokens/PrefixedTokensSearchTests.java b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/tokens/PrefixedTokensSearchTests.java index 69f26a7629..bf2233e9c9 100644 --- a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/tokens/PrefixedTokensSearchTests.java +++ b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/tokens/PrefixedTokensSearchTests.java @@ -37,7 +37,7 @@ public final class PrefixedTokensSearchTests { */ @Test public void topLevelStatements() { - new GoldenFileTestBuilder<>(PrefixedTokensSearchTests::executeTest) + GoldenFileTestBuilder.create(PrefixedTokensSearchTests::executeTest) .add("EMPTY", new Input("", 1)) .add("ANALYZE", new Input("ANALYZE", 3)) .add("CALL", new Input("CALL", 3)) @@ -74,7 +74,7 @@ public void topLevelStatements() { @Test public void alterTable() { - new GoldenFileTestBuilder<>(PrefixedTokensSearchTests::executeTest) + GoldenFileTestBuilder.create(PrefixedTokensSearchTests::executeTest) .add("JUST ALTER TABLE", new Input("ALTER TABLE mytable", 1)) .add("ALTER TABLE + ALTER", new Input("ALTER TABLE mytable ALTER", 4)) .add("ALTER TABLE + CHANGE", new Input("ALTER TABLE mytable CHANGE", 4)) diff --git a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/tokens/TokenResolverTests.java b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/tokens/TokenResolverTests.java index 4d6b740ecd..2b917a124e 100644 --- a/services/autocomplete/src/test/java/com/dremio/service/autocomplete/tokens/TokenResolverTests.java +++ b/services/autocomplete/src/test/java/com/dremio/service/autocomplete/tokens/TokenResolverTests.java @@ -29,7 +29,7 @@ public final class TokenResolverTests { @Test public void tests() { - new GoldenFileTestBuilder<>(TokenResolverTests::executeTest) + GoldenFileTestBuilder.create(TokenResolverTests::executeTest) .add("EMPTY STRING", "") .add("SELECT", "SELECT ") .add("SELECT STAR", "SELECT * ") @@ -44,7 +44,7 @@ public void tests() { @Test public void multiSql() { - new GoldenFileTestBuilder<>(TokenResolverTests::executeTest) + GoldenFileTestBuilder.create(TokenResolverTests::executeTest) .add( "MULTI SQL", "SELECT * FROM EMP; ^") @@ -62,7 +62,7 @@ public void multiSql() { @Test public void testDDL() { - new GoldenFileTestBuilder<>(TokenResolverTests::executeTest) + GoldenFileTestBuilder.create(TokenResolverTests::executeTest) .add("SHOW", "SHOW ") .add("EXPLAIN", "EXPLAIN ") .add("DROP", "DROP ") @@ -75,7 +75,7 @@ public void testDDL() { @Test public void testSubquery() { - new GoldenFileTestBuilder<>(TokenResolverTests::executeTest) + GoldenFileTestBuilder.create(TokenResolverTests::executeTest) .add("AFTER OPEN PARENS", "SELECT ( ") .add("IN", "SELECT * FROM EMP WHERE EMP.age IN (") .add("NOT IN", "SELECT * FROM EMP WHERE EMP.age NOT IN ( ") @@ -98,7 +98,7 @@ public void testSubquery() { @Test public void testNessie() { - new GoldenFileTestBuilder<>(TokenResolverTests::executeTest) + GoldenFileTestBuilder.create(TokenResolverTests::executeTest) .add("JUST FINISHED TABLE NAME", "SELECT * FROM EMP ") .add("JUST FINISHED TABLE NAME WITH AT", "SELECT * FROM EMP AT ") .add("BRANCH", "SELECT * FROM EMP AT BRANCH ") @@ -110,7 +110,7 @@ public void testNessie() { @Test @Ignore public void comparisonOperators() { - new GoldenFileTestBuilder<>(TokenResolverTests::executeTest) + GoldenFileTestBuilder.create(TokenResolverTests::executeTest) .add("Equals", "SELECT * FROM EMP WHERE EMP.age = ") .add("Not equal", "SELECT * FROM EMP WHERE EMP.age <> ") .add("Not equal", "SELECT * FROM EMP WHERE EMP.age != ") @@ -158,7 +158,7 @@ public void comparisonOperators() { @Test public void logicalOperators() { - new GoldenFileTestBuilder<>(TokenResolverTests::executeTest) + GoldenFileTestBuilder.create(TokenResolverTests::executeTest) .add("OR", "SELECT * FROM EMP WHERE EMP.age = 5 OR ") .add("AND", "SELECT * FROM EMP WHERE EMP.age = 5 AND ") .add("NOT", "SELECT * FROM EMP WHERE NOT ") @@ -175,7 +175,7 @@ public void logicalOperators() { @Test public void arithmeticOperators() { - new GoldenFileTestBuilder<>(TokenResolverTests::executeTest) + GoldenFileTestBuilder.create(TokenResolverTests::executeTest) .add("positive", "SELECT +") .add("negative", "SELECT -") .add("plus", "SELECT EMP.age + ") @@ -188,14 +188,14 @@ public void arithmeticOperators() { @Test public void characterOperators() { - new GoldenFileTestBuilder<>(TokenResolverTests::executeTest) + GoldenFileTestBuilder.create(TokenResolverTests::executeTest) .add("concat", "SELECT 'asdf' || ") .runTests(); } @Test public void reflections() { - new GoldenFileTestBuilder<>(TokenResolverTests::executeTest) + GoldenFileTestBuilder.create(TokenResolverTests::executeTest) .add("ALTER + CREATE", "ALTER DATASET blah CREATE ") // RAW REFLECTIONS .add("RAW REFLECTION", "ALTER DATASET blah CREATE RAW REFLECTION ") @@ -225,7 +225,7 @@ public void reflections() { @Test public void specialSyntaxFunctions() { - new GoldenFileTestBuilder<>(TokenResolverTests::executeTest) + GoldenFileTestBuilder.create(TokenResolverTests::executeTest) .add("CAST", "SELECT CAST(") .add("CAST", "SELECT CAST(myValue ") .add("CAST", "SELECT CAST(myValue AS") @@ -367,7 +367,7 @@ public void specialSyntaxFunctions() { @Test public void collectionFunctions() { - new GoldenFileTestBuilder<>(TokenResolverTests::executeTest) + GoldenFileTestBuilder.create(TokenResolverTests::executeTest) // MULTISET CONSTRUCTION .add("MULTISET CONSTRUCTION", "SELECT MULTISET") .add("MULTISET CONSTRUCTION SUBQUERY", "SELECT MULTISET(") @@ -459,7 +459,7 @@ public void collectionFunctions() { @Test public void periodPredicates() { - new GoldenFileTestBuilder<>(TokenResolverTests::executeTest) + GoldenFileTestBuilder.create(TokenResolverTests::executeTest) // PERIOD CONSTRUCTION .add("(", "SELECT (") .add("(datetime", "SELECT (datetime") @@ -487,7 +487,7 @@ public void periodPredicates() { @Test public void valueOperations() { - new GoldenFileTestBuilder<>(TokenResolverTests::executeTest) + GoldenFileTestBuilder.create(TokenResolverTests::executeTest) // Index is not recommended and close bracket is not high enough priority .add("ROW", "SELECT ROW") .add("ROW CONSTRUCTOR", "SELECT ROW(") diff --git a/services/autocomplete/src/test/resources/goldenfiles/expected/CursorTests.testTokenizeWithCursor.yaml b/services/autocomplete/src/test/resources/goldenfiles/expected/CursorTests.testTokenizeWithCursor.yaml index 53d53883df..539edfbacb 100644 --- a/services/autocomplete/src/test/resources/goldenfiles/expected/CursorTests.testTokenizeWithCursor.yaml +++ b/services/autocomplete/src/test/resources/goldenfiles/expected/CursorTests.testTokenizeWithCursor.yaml @@ -23,7 +23,7 @@ output: - image: "\a" - kind: 872 + kind: 874 - description: "END OF TOKEN" input: @@ -32,7 +32,7 @@ output: - image: "HELLO\a" - kind: 868 + kind: 870 - description: "START OF TOKEN" input: @@ -41,10 +41,10 @@ output: - image: "\a" - kind: 872 + kind: 874 - image: "HELLO" - kind: 868 + kind: 870 - description: "Unattached Bell Character" input: @@ -53,7 +53,7 @@ output: - image: "HELLO" - kind: 868 + kind: 870 - image: "\a" - kind: 872 + kind: 874 diff --git a/services/autocomplete/src/test/resources/goldenfiles/expected/FunctionCompletionTests.aggregate.yaml b/services/autocomplete/src/test/resources/goldenfiles/expected/FunctionCompletionTests.aggregate.yaml index af5a4e2d8a..26f0138531 100644 --- a/services/autocomplete/src/test/resources/goldenfiles/expected/FunctionCompletionTests.aggregate.yaml +++ b/services/autocomplete/src/test/resources/goldenfiles/expected/FunctionCompletionTests.aggregate.yaml @@ -19,7 +19,11 @@ description: "ANY_VALUE AGGREGATE PARTIAL NAME" input: "SELECT ANY_VALU^" output: - completions: [] + completions: + - + insertText: "ANY_VALUE(${1:BOOLEAN})" + kind: "Function" + label: "ANY_VALUE(???)" functionContext: null hasMoreResults: false - @@ -77,7 +81,336 @@ insertText: "EMP.HIREDATE" kind: "Column" label: "HIREDATE" - functionContext: null + functionContext: + function: + description: "" + functionCategories: [] + name: "ANY_VALUE" + signatures: + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "BOOLEAN" + returnType: "BOOLEAN" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "BYTES" + returnType: "BYTES" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "CHARACTERS" + returnType: "CHARACTERS" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "FLOAT" + returnType: "FLOAT" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "DECIMAL" + returnType: "DECIMAL" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "DOUBLE" + returnType: "DOUBLE" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "INT" + returnType: "INT" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "BIGINT" + returnType: "BIGINT" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "DATE" + returnType: "DATE" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "TIME" + returnType: "TIME" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "TIMESTAMP" + returnType: "TIMESTAMP" + sampleCodes: + - + call: "" + result: "" + missingTypes: + - "BOOLEAN" + - "BYTES" + - "CHARACTERS" + - "FLOAT" + - "DECIMAL" + - "DOUBLE" + - "INT" + - "BIGINT" + - "DATE" + - "TIME" + - "TIMESTAMP" + signaturesMatched: + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "BOOLEAN" + returnType: "BOOLEAN" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "BYTES" + returnType: "BYTES" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "CHARACTERS" + returnType: "CHARACTERS" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "FLOAT" + returnType: "FLOAT" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "DECIMAL" + returnType: "DECIMAL" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "DOUBLE" + returnType: "DOUBLE" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "INT" + returnType: "INT" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "BIGINT" + returnType: "BIGINT" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "DATE" + returnType: "DATE" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "TIME" + returnType: "TIME" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "TIMESTAMP" + returnType: "TIMESTAMP" + sampleCodes: + - + call: "" + result: "" + suppliedParameterTypes: + afterCursor: [] + beforeCursor: [] hasMoreResults: true - description: "ANY_VALUE AGGREGATE WITH ALL" @@ -134,23 +467,352 @@ insertText: "EMP.HIREDATE" kind: "Column" label: "HIREDATE" - functionContext: null - hasMoreResults: true - - - description: "ANY_VALUE AGGREGATE WITH DISTINCT" - input: "SELECT ANY_VALUE( DISTINCT ^ FROM EMP" - output: - completions: - - - data: - column: - name: "COMM" - type: "INTEGER" - tableAlias: "EMP" - detail: "column (INTEGER) in EMP" - insertText: "EMP.COMM" - kind: "Column" - label: "COMM" + functionContext: + function: + description: "" + functionCategories: [] + name: "ANY_VALUE" + signatures: + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "BOOLEAN" + returnType: "BOOLEAN" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "BYTES" + returnType: "BYTES" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "CHARACTERS" + returnType: "CHARACTERS" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "FLOAT" + returnType: "FLOAT" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "DECIMAL" + returnType: "DECIMAL" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "DOUBLE" + returnType: "DOUBLE" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "INT" + returnType: "INT" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "BIGINT" + returnType: "BIGINT" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "DATE" + returnType: "DATE" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "TIME" + returnType: "TIME" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "TIMESTAMP" + returnType: "TIMESTAMP" + sampleCodes: + - + call: "" + result: "" + missingTypes: + - "BOOLEAN" + - "BYTES" + - "CHARACTERS" + - "FLOAT" + - "DECIMAL" + - "DOUBLE" + - "INT" + - "BIGINT" + - "DATE" + - "TIME" + - "TIMESTAMP" + signaturesMatched: + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "BOOLEAN" + returnType: "BOOLEAN" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "BYTES" + returnType: "BYTES" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "CHARACTERS" + returnType: "CHARACTERS" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "FLOAT" + returnType: "FLOAT" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "DECIMAL" + returnType: "DECIMAL" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "DOUBLE" + returnType: "DOUBLE" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "INT" + returnType: "INT" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "BIGINT" + returnType: "BIGINT" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "DATE" + returnType: "DATE" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "TIME" + returnType: "TIME" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "TIMESTAMP" + returnType: "TIMESTAMP" + sampleCodes: + - + call: "" + result: "" + suppliedParameterTypes: + afterCursor: [] + beforeCursor: [] + hasMoreResults: true + - + description: "ANY_VALUE AGGREGATE WITH DISTINCT" + input: "SELECT ANY_VALUE( DISTINCT ^ FROM EMP" + output: + completions: + - + data: + column: + name: "COMM" + type: "INTEGER" + tableAlias: "EMP" + detail: "column (INTEGER) in EMP" + insertText: "EMP.COMM" + kind: "Column" + label: "COMM" - data: column: @@ -191,7 +853,336 @@ insertText: "EMP.HIREDATE" kind: "Column" label: "HIREDATE" - functionContext: null + functionContext: + function: + description: "" + functionCategories: [] + name: "ANY_VALUE" + signatures: + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "BOOLEAN" + returnType: "BOOLEAN" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "BYTES" + returnType: "BYTES" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "CHARACTERS" + returnType: "CHARACTERS" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "FLOAT" + returnType: "FLOAT" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "DECIMAL" + returnType: "DECIMAL" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "DOUBLE" + returnType: "DOUBLE" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "INT" + returnType: "INT" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "BIGINT" + returnType: "BIGINT" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "DATE" + returnType: "DATE" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "TIME" + returnType: "TIME" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "TIMESTAMP" + returnType: "TIMESTAMP" + sampleCodes: + - + call: "" + result: "" + missingTypes: + - "BOOLEAN" + - "BYTES" + - "CHARACTERS" + - "FLOAT" + - "DECIMAL" + - "DOUBLE" + - "INT" + - "BIGINT" + - "DATE" + - "TIME" + - "TIMESTAMP" + signaturesMatched: + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "BOOLEAN" + returnType: "BOOLEAN" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "BYTES" + returnType: "BYTES" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "CHARACTERS" + returnType: "CHARACTERS" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "FLOAT" + returnType: "FLOAT" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "DECIMAL" + returnType: "DECIMAL" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "DOUBLE" + returnType: "DOUBLE" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "INT" + returnType: "INT" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "BIGINT" + returnType: "BIGINT" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "DATE" + returnType: "DATE" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "TIME" + returnType: "TIME" + sampleCodes: + - + call: "" + result: "" + - + description: "" + parameters: + - + description: "" + format: "" + kind: "REGULAR" + name: "" + type: "TIMESTAMP" + returnType: "TIMESTAMP" + sampleCodes: + - + call: "" + result: "" + suppliedParameterTypes: + afterCursor: [] + beforeCursor: [] hasMoreResults: true - description: "ANY_VALUE AGGREGATE WITH DISTINCT VALUE" @@ -5007,9 +5998,9 @@ kind: "Function" label: "AES_ENCRYPT(???, ???)" - - insertText: "BIN(${1:INT})" + insertText: "ANY_VALUE(${1:BOOLEAN})" kind: "Function" - label: "BIN(???)" + label: "ANY_VALUE(???)" functionContext: function: description: "Returns values concatenated into a string, delimited by separator (default ‘,’)" @@ -5125,9 +6116,9 @@ kind: "Function" label: "AES_ENCRYPT(???, ???)" - - insertText: "BIN(${1:INT})" + insertText: "ANY_VALUE(${1:BOOLEAN})" kind: "Function" - label: "BIN(???)" + label: "ANY_VALUE(???)" functionContext: function: description: "Returns values concatenated into a string, delimited by separator (default ‘,’)" @@ -5243,9 +6234,9 @@ kind: "Function" label: "AES_ENCRYPT(???, ???)" - - insertText: "BIN(${1:INT})" + insertText: "ANY_VALUE(${1:BOOLEAN})" kind: "Function" - label: "BIN(???)" + label: "ANY_VALUE(???)" functionContext: function: description: "Returns values concatenated into a string, delimited by separator (default ‘,’)" diff --git a/services/autocomplete/src/test/resources/goldenfiles/expected/FunctionCompletionTests.commaSeparated.yaml b/services/autocomplete/src/test/resources/goldenfiles/expected/FunctionCompletionTests.commaSeparated.yaml index e57bfe64e4..b9a7da7065 100644 --- a/services/autocomplete/src/test/resources/goldenfiles/expected/FunctionCompletionTests.commaSeparated.yaml +++ b/services/autocomplete/src/test/resources/goldenfiles/expected/FunctionCompletionTests.commaSeparated.yaml @@ -267,9 +267,9 @@ kind: "Function" label: "AES_ENCRYPT(???, ???)" - - insertText: "BIN(${1:INT})" + insertText: "ANY_VALUE(${1:BOOLEAN})" kind: "Function" - label: "BIN(???)" + label: "ANY_VALUE(???)" functionContext: function: description: "Removes all occurrences of a specified subSTRING and replaces them with another STRING." @@ -388,9 +388,9 @@ kind: "Function" label: "AES_ENCRYPT(???, ???)" - - insertText: "BIN(${1:INT})" + insertText: "ANY_VALUE(${1:BOOLEAN})" kind: "Function" - label: "BIN(???)" + label: "ANY_VALUE(???)" functionContext: function: description: "Removes all occurrences of a specified subSTRING and replaces them with another STRING." @@ -488,9 +488,9 @@ kind: "Function" label: "AES_ENCRYPT(???, ???)" - - insertText: "BIN(${1:INT})" + insertText: "ANY_VALUE(${1:BOOLEAN})" kind: "Function" - label: "BIN(???)" + label: "ANY_VALUE(???)" functionContext: function: description: "Removes all occurrences of a specified subSTRING and replaces them with another STRING." diff --git a/services/autocomplete/src/test/resources/goldenfiles/expected/FunctionCompletionTests.snippetScenarios.yaml b/services/autocomplete/src/test/resources/goldenfiles/expected/FunctionCompletionTests.snippetScenarios.yaml index d2843d45f5..6c31162879 100644 --- a/services/autocomplete/src/test/resources/goldenfiles/expected/FunctionCompletionTests.snippetScenarios.yaml +++ b/services/autocomplete/src/test/resources/goldenfiles/expected/FunctionCompletionTests.snippetScenarios.yaml @@ -49,9 +49,9 @@ kind: "Function" label: "AES_ENCRYPT(???, ???)" - - insertText: "BIN(${1:INT})" + insertText: "ANY_VALUE(${1:BOOLEAN})" kind: "Function" - label: "BIN(???)" + label: "ANY_VALUE(???)" functionContext: function: description: "Returns the portion of the STRING from the specified base expression starting at the specified characters." diff --git a/services/autocomplete/src/test/resources/goldenfiles/expected/FunctionCompletionTests.substring.yaml b/services/autocomplete/src/test/resources/goldenfiles/expected/FunctionCompletionTests.substring.yaml index d3ba83d536..6eef600b90 100644 --- a/services/autocomplete/src/test/resources/goldenfiles/expected/FunctionCompletionTests.substring.yaml +++ b/services/autocomplete/src/test/resources/goldenfiles/expected/FunctionCompletionTests.substring.yaml @@ -75,9 +75,9 @@ kind: "Function" label: "AES_ENCRYPT(???, ???)" - - insertText: "BIN(${1:INT})" + insertText: "ANY_VALUE(${1:BOOLEAN})" kind: "Function" - label: "BIN(???)" + label: "ANY_VALUE(???)" functionContext: function: description: "Returns the portion of the STRING from the specified base expression starting at the specified characters." diff --git a/services/autocomplete/src/test/resources/goldenfiles/expected/OptimizeStatementCompletionTests.tests.yaml b/services/autocomplete/src/test/resources/goldenfiles/expected/OptimizeStatementCompletionTests.tests.yaml index bdac369c3d..924d7cec8b 100644 --- a/services/autocomplete/src/test/resources/goldenfiles/expected/OptimizeStatementCompletionTests.tests.yaml +++ b/services/autocomplete/src/test/resources/goldenfiles/expected/OptimizeStatementCompletionTests.tests.yaml @@ -86,19 +86,109 @@ kind: "Keyword" label: "." - - kind: "Keyword" - label: "(" + kind: "Keyword" + label: "(" - kind: "Keyword" label: "USING" - kind: "Keyword" - label: ";" + label: "FOR" - - kind: "Keyword" - label: "REWRITE" + kind: "Keyword" + label: ";" + functionContext: null + hasMoreResults: true + - + description: "OPTIMIZE + TABLE + TABLE NAME + FOR PARTITIONS" + input: "OPTIMIZE TABLE EMP FOR PARTITIONS ^" + output: + completions: + - + data: + column: + name: "COMM" + type: "INTEGER" + tableAlias: "EMP" + detail: "column (INTEGER) in EMP" + insertText: "EMP.COMM" + kind: "Column" + label: "COMM" + - + data: + column: + name: "DEPTNO" + type: "INTEGER" + tableAlias: "EMP" + detail: "column (INTEGER) in EMP" + insertText: "EMP.DEPTNO" + kind: "Column" + label: "DEPTNO" + - + data: + column: + name: "EMPNO" + type: "INTEGER" + tableAlias: "EMP" + detail: "column (INTEGER) in EMP" + insertText: "EMP.EMPNO" + kind: "Column" + label: "EMPNO" + - + data: + column: + name: "ENAME" + type: "VARCHAR" + tableAlias: "EMP" + detail: "column (VARCHAR) in EMP" + insertText: "EMP.ENAME" + kind: "Column" + label: "ENAME" + - + data: + column: + name: "HIREDATE" + type: "TIMESTAMP" + tableAlias: "EMP" + detail: "column (TIMESTAMP) in EMP" + insertText: "EMP.HIREDATE" + kind: "Column" + label: "HIREDATE" functionContext: null hasMoreResults: true + - + description: "OPTIMIZE + TABLE + TABLE NAME + FOR PARTITIONS + BOOLEAN EXPRESSION" + input: "OPTIMIZE TABLE EMP FOR PARTITIONS NAME != 'Brandon' ^" + output: + completions: + - + kind: "Keyword" + label: "!=" + - + kind: "Keyword" + label: "%" + - + kind: "Keyword" + label: "(" + - + kind: "Keyword" + label: "*" + - + kind: "Keyword" + label: "+" + functionContext: null + hasMoreResults: true + - + description: "OPTIMIZE + TABLE + TABLE NAME + REWRITE" + input: "OPTIMIZE TABLE EMP REWRITE ^" + output: + completions: + - kind: "Keyword" + label: "DATA" + - kind: "Keyword" + label: "MANIFESTS" + functionContext: null + hasMoreResults: false - description: "OPTIMIZE + TABLE + TABLE NAME + PAREN" input: "OPTIMIZE TABLE EMP (^" diff --git a/services/autocomplete/src/test/resources/goldenfiles/expected/PrefixedTokensSearchTests.alterTable.yaml b/services/autocomplete/src/test/resources/goldenfiles/expected/PrefixedTokensSearchTests.alterTable.yaml index 57bbb2de27..90bbf456fc 100644 --- a/services/autocomplete/src/test/resources/goldenfiles/expected/PrefixedTokensSearchTests.alterTable.yaml +++ b/services/autocomplete/src/test/resources/goldenfiles/expected/PrefixedTokensSearchTests.alterTable.yaml @@ -25,6 +25,7 @@ - "ALTER TABLE mytable " - "ALTER TABLE mytable . " - "ALTER TABLE mytable SET " + - "ALTER TABLE mytable AT " - "ALTER TABLE mytable ALTER " - "ALTER TABLE mytable CHANGE " - "ALTER TABLE mytable CREATE " @@ -36,6 +37,7 @@ - "ALTER TABLE mytable REFRESH " - "ALTER TABLE mytable RESET " - "ALTER TABLE mytable ROUTE " + - "ALTER TABLE mytable UNSET " - "ALTER TABLE mytable [ " pathsWithIdentifierCompletion: [] - @@ -128,7 +130,6 @@ - "ALTER TABLE mytable ALTER myIdentifier myIdentifier DECIMAL NOT " - "ALTER TABLE mytable ALTER myIdentifier myIdentifier DECIMAL NULL " - "ALTER TABLE mytable ALTER myIdentifier myIdentifier DOUBLE " - - "ALTER TABLE mytable ALTER myIdentifier myIdentifier DOUBLE ( " - "ALTER TABLE mytable ALTER myIdentifier myIdentifier DOUBLE ARRAY " - "ALTER TABLE mytable ALTER myIdentifier myIdentifier DOUBLE MULTISET " - "ALTER TABLE mytable ALTER myIdentifier myIdentifier DOUBLE NOT " @@ -285,7 +286,6 @@ - "ALTER TABLE mytable ALTER myIdentifier myIdentifier DECIMAL NOT " - "ALTER TABLE mytable ALTER myIdentifier myIdentifier DECIMAL NULL " - "ALTER TABLE mytable ALTER myIdentifier myIdentifier DOUBLE " - - "ALTER TABLE mytable ALTER myIdentifier myIdentifier DOUBLE ( " - "ALTER TABLE mytable ALTER myIdentifier myIdentifier DOUBLE ARRAY " - "ALTER TABLE mytable ALTER myIdentifier myIdentifier DOUBLE MULTISET " - "ALTER TABLE mytable ALTER myIdentifier myIdentifier DOUBLE NOT " @@ -449,7 +449,6 @@ - "ALTER TABLE mytable CHANGE myIdentifier myIdentifier DECIMAL NOT " - "ALTER TABLE mytable CHANGE myIdentifier myIdentifier DECIMAL NULL " - "ALTER TABLE mytable CHANGE myIdentifier myIdentifier DOUBLE " - - "ALTER TABLE mytable CHANGE myIdentifier myIdentifier DOUBLE ( " - "ALTER TABLE mytable CHANGE myIdentifier myIdentifier DOUBLE ARRAY " - "ALTER TABLE mytable CHANGE myIdentifier myIdentifier DOUBLE MULTISET " - "ALTER TABLE mytable CHANGE myIdentifier myIdentifier DOUBLE NOT " @@ -606,7 +605,6 @@ - "ALTER TABLE mytable CHANGE myIdentifier myIdentifier DECIMAL NOT " - "ALTER TABLE mytable CHANGE myIdentifier myIdentifier DECIMAL NULL " - "ALTER TABLE mytable CHANGE myIdentifier myIdentifier DOUBLE " - - "ALTER TABLE mytable CHANGE myIdentifier myIdentifier DOUBLE ( " - "ALTER TABLE mytable CHANGE myIdentifier myIdentifier DOUBLE ARRAY " - "ALTER TABLE mytable CHANGE myIdentifier myIdentifier DOUBLE MULTISET " - "ALTER TABLE mytable CHANGE myIdentifier myIdentifier DOUBLE NOT " @@ -869,7 +867,6 @@ - "ALTER TABLE mytable MODIFY myIdentifier myIdentifier DECIMAL NOT " - "ALTER TABLE mytable MODIFY myIdentifier myIdentifier DECIMAL NULL " - "ALTER TABLE mytable MODIFY myIdentifier myIdentifier DOUBLE " - - "ALTER TABLE mytable MODIFY myIdentifier myIdentifier DOUBLE ( " - "ALTER TABLE mytable MODIFY myIdentifier myIdentifier DOUBLE ARRAY " - "ALTER TABLE mytable MODIFY myIdentifier myIdentifier DOUBLE MULTISET " - "ALTER TABLE mytable MODIFY myIdentifier myIdentifier DOUBLE NOT " @@ -1026,7 +1023,6 @@ - "ALTER TABLE mytable MODIFY myIdentifier myIdentifier DECIMAL NOT " - "ALTER TABLE mytable MODIFY myIdentifier myIdentifier DECIMAL NULL " - "ALTER TABLE mytable MODIFY myIdentifier myIdentifier DOUBLE " - - "ALTER TABLE mytable MODIFY myIdentifier myIdentifier DOUBLE ( " - "ALTER TABLE mytable MODIFY myIdentifier myIdentifier DOUBLE ARRAY " - "ALTER TABLE mytable MODIFY myIdentifier myIdentifier DOUBLE MULTISET " - "ALTER TABLE mytable MODIFY myIdentifier myIdentifier DOUBLE NOT " @@ -1265,6 +1261,8 @@ - "ALTER TABLE mytable SET myIdentifier . * = " - "ALTER TABLE mytable SET myIdentifier . * [ " - "ALTER TABLE mytable SET myIdentifier [ " + - "ALTER TABLE mytable SET TBLPROPERTIES " + - "ALTER TABLE mytable SET TBLPROPERTIES ( " pathsWithIdentifierCompletion: - "ALTER TABLE mytable SET myIdentifier " - "ALTER TABLE mytable SET myIdentifier = " diff --git a/services/autocomplete/src/test/resources/goldenfiles/expected/PrefixedTokensSearchTests.topLevelStatements.yaml b/services/autocomplete/src/test/resources/goldenfiles/expected/PrefixedTokensSearchTests.topLevelStatements.yaml index f78044adae..952371d544 100644 --- a/services/autocomplete/src/test/resources/goldenfiles/expected/PrefixedTokensSearchTests.topLevelStatements.yaml +++ b/services/autocomplete/src/test/resources/goldenfiles/expected/PrefixedTokensSearchTests.topLevelStatements.yaml @@ -244,6 +244,13 @@ - "CREATE BRANCH myIdentifier AT " - "CREATE BRANCH IF " - "CREATE BRANCH IF NOT " + - "CREATE FOLDER " + - "CREATE FOLDER myIdentifier " + - "CREATE FOLDER myIdentifier . " + - "CREATE FOLDER myIdentifier AT " + - "CREATE FOLDER myIdentifier [ " + - "CREATE FOLDER IF " + - "CREATE FOLDER IF NOT " - "CREATE FUNCTION " - "CREATE FUNCTION myIdentifier " - "CREATE FUNCTION myIdentifier . " @@ -267,6 +274,7 @@ - "CREATE TABLE myIdentifier ROW " - "CREATE TABLE myIdentifier STORE " - "CREATE TABLE myIdentifier STRIPED " + - "CREATE TABLE myIdentifier TBLPROPERTIES " - "CREATE TABLE myIdentifier WITH " - "CREATE TABLE myIdentifier [ " - "CREATE TABLE IF " @@ -295,6 +303,10 @@ - "CREATE BRANCH myIdentifier " - "CREATE BRANCH myIdentifier IN " - "CREATE BRANCH myIdentifier AT " + - "CREATE FOLDER myIdentifier " + - "CREATE FOLDER myIdentifier . " + - "CREATE FOLDER myIdentifier AT " + - "CREATE FOLDER myIdentifier [ " - "CREATE FUNCTION myIdentifier " - "CREATE FUNCTION myIdentifier . " - "CREATE FUNCTION myIdentifier ( " @@ -312,6 +324,7 @@ - "CREATE TABLE myIdentifier ROW " - "CREATE TABLE myIdentifier STORE " - "CREATE TABLE myIdentifier STRIPED " + - "CREATE TABLE myIdentifier TBLPROPERTIES " - "CREATE TABLE myIdentifier WITH " - "CREATE TABLE myIdentifier [ " - "CREATE TAG myIdentifier " @@ -1144,7 +1157,6 @@ - "EXPLAIN PLAN FOR TRUE " - "EXPLAIN PLAN FOR UNKNOWN " - "EXPLAIN PLAN FOR UPDATE " - - "EXPLAIN PLAN FOR UPSERT " - "EXPLAIN PLAN FOR VALUES " - "EXPLAIN PLAN FOR WITH " - "EXPLAIN PLAN INCLUDING " @@ -1182,8 +1194,6 @@ - "GRANT ALL , CREATE " - "GRANT ALL , CREATE CATALOG " - "GRANT ALL , CREATE CLOUD " - - "GRANT ALL , CREATE EXTERNAL " - - "GRANT ALL , CREATE OAUTH " - "GRANT ALL , CREATE PROJECT " - "GRANT ALL , CREATE ROLE " - "GRANT ALL , CREATE SOURCE " @@ -1286,8 +1296,6 @@ - "GRANT ALTER , CREATE " - "GRANT ALTER , CREATE CATALOG " - "GRANT ALTER , CREATE CLOUD " - - "GRANT ALTER , CREATE EXTERNAL " - - "GRANT ALTER , CREATE OAUTH " - "GRANT ALTER , CREATE PROJECT " - "GRANT ALTER , CREATE ROLE " - "GRANT ALTER , CREATE SOURCE " @@ -1542,13 +1550,6 @@ - "GRANT CREATE CLOUD ON TABLE " - "GRANT CREATE CLOUD ON VDS " - "GRANT CREATE CLOUD ON VIEW " - - "GRANT CREATE EXTERNAL " - - "GRANT CREATE EXTERNAL TOKENS " - - "GRANT CREATE EXTERNAL TOKENS PROVIDER " - - "GRANT CREATE OAUTH " - - "GRANT CREATE OAUTH APPLICATION " - - "GRANT CREATE OAUTH APPLICATION , " - - "GRANT CREATE OAUTH APPLICATION ON " - "GRANT CREATE PROJECT " - "GRANT CREATE PROJECT , " - "GRANT CREATE PROJECT , ALL " @@ -1727,8 +1728,6 @@ - "GRANT DELETE , CREATE " - "GRANT DELETE , CREATE CATALOG " - "GRANT DELETE , CREATE CLOUD " - - "GRANT DELETE , CREATE EXTERNAL " - - "GRANT DELETE , CREATE OAUTH " - "GRANT DELETE , CREATE PROJECT " - "GRANT DELETE , CREATE ROLE " - "GRANT DELETE , CREATE SOURCE " @@ -1831,8 +1830,6 @@ - "GRANT DROP , CREATE " - "GRANT DROP , CREATE CATALOG " - "GRANT DROP , CREATE CLOUD " - - "GRANT DROP , CREATE EXTERNAL " - - "GRANT DROP , CREATE OAUTH " - "GRANT DROP , CREATE PROJECT " - "GRANT DROP , CREATE ROLE " - "GRANT DROP , CREATE SOURCE " @@ -1935,8 +1932,6 @@ - "GRANT EXECUTE , CREATE " - "GRANT EXECUTE , CREATE CATALOG " - "GRANT EXECUTE , CREATE CLOUD " - - "GRANT EXECUTE , CREATE EXTERNAL " - - "GRANT EXECUTE , CREATE OAUTH " - "GRANT EXECUTE , CREATE PROJECT " - "GRANT EXECUTE , CREATE ROLE " - "GRANT EXECUTE , CREATE SOURCE " @@ -2081,8 +2076,6 @@ - "GRANT INSERT , CREATE " - "GRANT INSERT , CREATE CATALOG " - "GRANT INSERT , CREATE CLOUD " - - "GRANT INSERT , CREATE EXTERNAL " - - "GRANT INSERT , CREATE OAUTH " - "GRANT INSERT , CREATE PROJECT " - "GRANT INSERT , CREATE ROLE " - "GRANT INSERT , CREATE SOURCE " @@ -2227,8 +2220,6 @@ - "GRANT MODIFY , CREATE " - "GRANT MODIFY , CREATE CATALOG " - "GRANT MODIFY , CREATE CLOUD " - - "GRANT MODIFY , CREATE EXTERNAL " - - "GRANT MODIFY , CREATE OAUTH " - "GRANT MODIFY , CREATE PROJECT " - "GRANT MODIFY , CREATE ROLE " - "GRANT MODIFY , CREATE SOURCE " @@ -2331,8 +2322,6 @@ - "GRANT MONITOR , CREATE " - "GRANT MONITOR , CREATE CATALOG " - "GRANT MONITOR , CREATE CLOUD " - - "GRANT MONITOR , CREATE EXTERNAL " - - "GRANT MONITOR , CREATE OAUTH " - "GRANT MONITOR , CREATE PROJECT " - "GRANT MONITOR , CREATE ROLE " - "GRANT MONITOR , CREATE SOURCE " @@ -2435,8 +2424,6 @@ - "GRANT OPERATE , CREATE " - "GRANT OPERATE , CREATE CATALOG " - "GRANT OPERATE , CREATE CLOUD " - - "GRANT OPERATE , CREATE EXTERNAL " - - "GRANT OPERATE , CREATE OAUTH " - "GRANT OPERATE , CREATE PROJECT " - "GRANT OPERATE , CREATE ROLE " - "GRANT OPERATE , CREATE SOURCE " @@ -2581,8 +2568,6 @@ - "GRANT SELECT , CREATE " - "GRANT SELECT , CREATE CATALOG " - "GRANT SELECT , CREATE CLOUD " - - "GRANT SELECT , CREATE EXTERNAL " - - "GRANT SELECT , CREATE OAUTH " - "GRANT SELECT , CREATE PROJECT " - "GRANT SELECT , CREATE ROLE " - "GRANT SELECT , CREATE SOURCE " @@ -2685,8 +2670,6 @@ - "GRANT UPDATE , CREATE " - "GRANT UPDATE , CREATE CATALOG " - "GRANT UPDATE , CREATE CLOUD " - - "GRANT UPDATE , CREATE EXTERNAL " - - "GRANT UPDATE , CREATE OAUTH " - "GRANT UPDATE , CREATE PROJECT " - "GRANT UPDATE , CREATE ROLE " - "GRANT UPDATE , CREATE SOURCE " @@ -2789,8 +2772,6 @@ - "GRANT UPLOAD , CREATE " - "GRANT UPLOAD , CREATE CATALOG " - "GRANT UPLOAD , CREATE CLOUD " - - "GRANT UPLOAD , CREATE EXTERNAL " - - "GRANT UPLOAD , CREATE OAUTH " - "GRANT UPLOAD , CREATE PROJECT " - "GRANT UPLOAD , CREATE ROLE " - "GRANT UPLOAD , CREATE SOURCE " @@ -2893,8 +2874,6 @@ - "GRANT USAGE , CREATE " - "GRANT USAGE , CREATE CATALOG " - "GRANT USAGE , CREATE CLOUD " - - "GRANT USAGE , CREATE EXTERNAL " - - "GRANT USAGE , CREATE OAUTH " - "GRANT USAGE , CREATE PROJECT " - "GRANT USAGE , CREATE ROLE " - "GRANT USAGE , CREATE SOURCE " @@ -2997,8 +2976,6 @@ - "GRANT VIEW , CREATE " - "GRANT VIEW , CREATE CATALOG " - "GRANT VIEW , CREATE CLOUD " - - "GRANT VIEW , CREATE EXTERNAL " - - "GRANT VIEW , CREATE OAUTH " - "GRANT VIEW , CREATE PROJECT " - "GRANT VIEW , CREATE ROLE " - "GRANT VIEW , CREATE SOURCE " @@ -4332,10 +4309,6 @@ - "REVOKE ALL , CREATE CLOUD " - "REVOKE ALL , CREATE CLOUD , " - "REVOKE ALL , CREATE CLOUD ON " - - "REVOKE ALL , CREATE EXTERNAL " - - "REVOKE ALL , CREATE EXTERNAL TOKENS " - - "REVOKE ALL , CREATE OAUTH " - - "REVOKE ALL , CREATE OAUTH APPLICATION " - "REVOKE ALL , CREATE PROJECT " - "REVOKE ALL , CREATE PROJECT , " - "REVOKE ALL , CREATE PROJECT ON " @@ -5064,10 +5037,6 @@ - "REVOKE ALTER , CREATE CLOUD " - "REVOKE ALTER , CREATE CLOUD , " - "REVOKE ALTER , CREATE CLOUD ON " - - "REVOKE ALTER , CREATE EXTERNAL " - - "REVOKE ALTER , CREATE EXTERNAL TOKENS " - - "REVOKE ALTER , CREATE OAUTH " - - "REVOKE ALTER , CREATE OAUTH APPLICATION " - "REVOKE ALTER , CREATE PROJECT " - "REVOKE ALTER , CREATE PROJECT , " - "REVOKE ALTER , CREATE PROJECT ON " @@ -5712,8 +5681,6 @@ - "REVOKE ALTER REFLECTION , CREATE " - "REVOKE ALTER REFLECTION , CREATE CATALOG " - "REVOKE ALTER REFLECTION , CREATE CLOUD " - - "REVOKE ALTER REFLECTION , CREATE EXTERNAL " - - "REVOKE ALTER REFLECTION , CREATE OAUTH " - "REVOKE ALTER REFLECTION , CREATE PROJECT " - "REVOKE ALTER REFLECTION , CREATE ROLE " - "REVOKE ALTER REFLECTION , CREATE SOURCE " @@ -5817,8 +5784,6 @@ - "REVOKE CONFIGURE SECURITY , CREATE " - "REVOKE CONFIGURE SECURITY , CREATE CATALOG " - "REVOKE CONFIGURE SECURITY , CREATE CLOUD " - - "REVOKE CONFIGURE SECURITY , CREATE EXTERNAL " - - "REVOKE CONFIGURE SECURITY , CREATE OAUTH " - "REVOKE CONFIGURE SECURITY , CREATE PROJECT " - "REVOKE CONFIGURE SECURITY , CREATE ROLE " - "REVOKE CONFIGURE SECURITY , CREATE SOURCE " @@ -5922,8 +5887,6 @@ - "REVOKE CREATE CATALOG , CREATE " - "REVOKE CREATE CATALOG , CREATE CATALOG " - "REVOKE CREATE CATALOG , CREATE CLOUD " - - "REVOKE CREATE CATALOG , CREATE EXTERNAL " - - "REVOKE CREATE CATALOG , CREATE OAUTH " - "REVOKE CREATE CATALOG , CREATE PROJECT " - "REVOKE CREATE CATALOG , CREATE ROLE " - "REVOKE CREATE CATALOG , CREATE SOURCE " @@ -6026,8 +5989,6 @@ - "REVOKE CREATE CLOUD , CREATE " - "REVOKE CREATE CLOUD , CREATE CATALOG " - "REVOKE CREATE CLOUD , CREATE CLOUD " - - "REVOKE CREATE CLOUD , CREATE EXTERNAL " - - "REVOKE CREATE CLOUD , CREATE OAUTH " - "REVOKE CREATE CLOUD , CREATE PROJECT " - "REVOKE CREATE CLOUD , CREATE ROLE " - "REVOKE CREATE CLOUD , CREATE SOURCE " @@ -6116,53 +6077,6 @@ - "REVOKE CREATE CLOUD ON VDS myIdentifier " - "REVOKE CREATE CLOUD ON VIEW " - "REVOKE CREATE CLOUD ON VIEW myIdentifier " - - "REVOKE CREATE EXTERNAL " - - "REVOKE CREATE EXTERNAL TOKENS " - - "REVOKE CREATE EXTERNAL TOKENS PROVIDER " - - "REVOKE CREATE EXTERNAL TOKENS PROVIDER , " - - "REVOKE CREATE EXTERNAL TOKENS PROVIDER ON " - - "REVOKE CREATE OAUTH " - - "REVOKE CREATE OAUTH APPLICATION " - - "REVOKE CREATE OAUTH APPLICATION , " - - "REVOKE CREATE OAUTH APPLICATION , ALL " - - "REVOKE CREATE OAUTH APPLICATION , ALTER " - - "REVOKE CREATE OAUTH APPLICATION , CONFIGURE " - - "REVOKE CREATE OAUTH APPLICATION , CREATE " - - "REVOKE CREATE OAUTH APPLICATION , DELETE " - - "REVOKE CREATE OAUTH APPLICATION , DROP " - - "REVOKE CREATE OAUTH APPLICATION , EXECUTE " - - "REVOKE CREATE OAUTH APPLICATION , EXTERNAL " - - "REVOKE CREATE OAUTH APPLICATION , INSERT " - - "REVOKE CREATE OAUTH APPLICATION , MANAGE " - - "REVOKE CREATE OAUTH APPLICATION , MODIFY " - - "REVOKE CREATE OAUTH APPLICATION , MONITOR " - - "REVOKE CREATE OAUTH APPLICATION , OPERATE " - - "REVOKE CREATE OAUTH APPLICATION , OWNERSHIP " - - "REVOKE CREATE OAUTH APPLICATION , SELECT " - - "REVOKE CREATE OAUTH APPLICATION , UPDATE " - - "REVOKE CREATE OAUTH APPLICATION , UPLOAD " - - "REVOKE CREATE OAUTH APPLICATION , USAGE " - - "REVOKE CREATE OAUTH APPLICATION , VIEW " - - "REVOKE CREATE OAUTH APPLICATION ON " - - "REVOKE CREATE OAUTH APPLICATION ON ALL " - - "REVOKE CREATE OAUTH APPLICATION ON CLOUD " - - "REVOKE CREATE OAUTH APPLICATION ON ENGINE " - - "REVOKE CREATE OAUTH APPLICATION ON EXTERNAL " - - "REVOKE CREATE OAUTH APPLICATION ON FOLDER " - - "REVOKE CREATE OAUTH APPLICATION ON FUNCTION " - - "REVOKE CREATE OAUTH APPLICATION ON IDENTITY " - - "REVOKE CREATE OAUTH APPLICATION ON OAUTH " - - "REVOKE CREATE OAUTH APPLICATION ON ORG " - - "REVOKE CREATE OAUTH APPLICATION ON PDS " - - "REVOKE CREATE OAUTH APPLICATION ON PROJECT " - - "REVOKE CREATE OAUTH APPLICATION ON SCHEMA " - - "REVOKE CREATE OAUTH APPLICATION ON SCRIPT " - - "REVOKE CREATE OAUTH APPLICATION ON SOURCE " - - "REVOKE CREATE OAUTH APPLICATION ON SPACE " - - "REVOKE CREATE OAUTH APPLICATION ON SYSTEM " - - "REVOKE CREATE OAUTH APPLICATION ON TABLE " - - "REVOKE CREATE OAUTH APPLICATION ON VDS " - - "REVOKE CREATE OAUTH APPLICATION ON VIEW " - "REVOKE CREATE PROJECT " - "REVOKE CREATE PROJECT , " - "REVOKE CREATE PROJECT , ALL " @@ -6177,8 +6091,6 @@ - "REVOKE CREATE PROJECT , CREATE " - "REVOKE CREATE PROJECT , CREATE CATALOG " - "REVOKE CREATE PROJECT , CREATE CLOUD " - - "REVOKE CREATE PROJECT , CREATE EXTERNAL " - - "REVOKE CREATE PROJECT , CREATE OAUTH " - "REVOKE CREATE PROJECT , CREATE PROJECT " - "REVOKE CREATE PROJECT , CREATE ROLE " - "REVOKE CREATE PROJECT , CREATE SOURCE " @@ -6281,8 +6193,6 @@ - "REVOKE CREATE ROLE , CREATE " - "REVOKE CREATE ROLE , CREATE CATALOG " - "REVOKE CREATE ROLE , CREATE CLOUD " - - "REVOKE CREATE ROLE , CREATE EXTERNAL " - - "REVOKE CREATE ROLE , CREATE OAUTH " - "REVOKE CREATE ROLE , CREATE PROJECT " - "REVOKE CREATE ROLE , CREATE ROLE " - "REVOKE CREATE ROLE , CREATE SOURCE " @@ -6385,8 +6295,6 @@ - "REVOKE CREATE SOURCE , CREATE " - "REVOKE CREATE SOURCE , CREATE CATALOG " - "REVOKE CREATE SOURCE , CREATE CLOUD " - - "REVOKE CREATE SOURCE , CREATE EXTERNAL " - - "REVOKE CREATE SOURCE , CREATE OAUTH " - "REVOKE CREATE SOURCE , CREATE PROJECT " - "REVOKE CREATE SOURCE , CREATE ROLE " - "REVOKE CREATE SOURCE , CREATE SOURCE " @@ -6489,8 +6397,6 @@ - "REVOKE CREATE TABLE , CREATE " - "REVOKE CREATE TABLE , CREATE CATALOG " - "REVOKE CREATE TABLE , CREATE CLOUD " - - "REVOKE CREATE TABLE , CREATE EXTERNAL " - - "REVOKE CREATE TABLE , CREATE OAUTH " - "REVOKE CREATE TABLE , CREATE PROJECT " - "REVOKE CREATE TABLE , CREATE ROLE " - "REVOKE CREATE TABLE , CREATE SOURCE " @@ -6677,10 +6583,6 @@ - "REVOKE DELETE , CREATE CLOUD " - "REVOKE DELETE , CREATE CLOUD , " - "REVOKE DELETE , CREATE CLOUD ON " - - "REVOKE DELETE , CREATE EXTERNAL " - - "REVOKE DELETE , CREATE EXTERNAL TOKENS " - - "REVOKE DELETE , CREATE OAUTH " - - "REVOKE DELETE , CREATE OAUTH APPLICATION " - "REVOKE DELETE , CREATE PROJECT " - "REVOKE DELETE , CREATE PROJECT , " - "REVOKE DELETE , CREATE PROJECT ON " @@ -7409,10 +7311,6 @@ - "REVOKE DROP , CREATE CLOUD " - "REVOKE DROP , CREATE CLOUD , " - "REVOKE DROP , CREATE CLOUD ON " - - "REVOKE DROP , CREATE EXTERNAL " - - "REVOKE DROP , CREATE EXTERNAL TOKENS " - - "REVOKE DROP , CREATE OAUTH " - - "REVOKE DROP , CREATE OAUTH APPLICATION " - "REVOKE DROP , CREATE PROJECT " - "REVOKE DROP , CREATE PROJECT , " - "REVOKE DROP , CREATE PROJECT ON " @@ -8141,10 +8039,6 @@ - "REVOKE EXECUTE , CREATE CLOUD " - "REVOKE EXECUTE , CREATE CLOUD , " - "REVOKE EXECUTE , CREATE CLOUD ON " - - "REVOKE EXECUTE , CREATE EXTERNAL " - - "REVOKE EXECUTE , CREATE EXTERNAL TOKENS " - - "REVOKE EXECUTE , CREATE OAUTH " - - "REVOKE EXECUTE , CREATE OAUTH APPLICATION " - "REVOKE EXECUTE , CREATE PROJECT " - "REVOKE EXECUTE , CREATE PROJECT , " - "REVOKE EXECUTE , CREATE PROJECT ON " @@ -8790,8 +8684,6 @@ - "REVOKE EXTERNAL QUERY , CREATE " - "REVOKE EXTERNAL QUERY , CREATE CATALOG " - "REVOKE EXTERNAL QUERY , CREATE CLOUD " - - "REVOKE EXTERNAL QUERY , CREATE EXTERNAL " - - "REVOKE EXTERNAL QUERY , CREATE OAUTH " - "REVOKE EXTERNAL QUERY , CREATE PROJECT " - "REVOKE EXTERNAL QUERY , CREATE ROLE " - "REVOKE EXTERNAL QUERY , CREATE SOURCE " @@ -8978,10 +8870,6 @@ - "REVOKE INSERT , CREATE CLOUD " - "REVOKE INSERT , CREATE CLOUD , " - "REVOKE INSERT , CREATE CLOUD ON " - - "REVOKE INSERT , CREATE EXTERNAL " - - "REVOKE INSERT , CREATE EXTERNAL TOKENS " - - "REVOKE INSERT , CREATE OAUTH " - - "REVOKE INSERT , CREATE OAUTH APPLICATION " - "REVOKE INSERT , CREATE PROJECT " - "REVOKE INSERT , CREATE PROJECT , " - "REVOKE INSERT , CREATE PROJECT ON " @@ -9627,8 +9515,6 @@ - "REVOKE MANAGE GRANTS , CREATE " - "REVOKE MANAGE GRANTS , CREATE CATALOG " - "REVOKE MANAGE GRANTS , CREATE CLOUD " - - "REVOKE MANAGE GRANTS , CREATE EXTERNAL " - - "REVOKE MANAGE GRANTS , CREATE OAUTH " - "REVOKE MANAGE GRANTS , CREATE PROJECT " - "REVOKE MANAGE GRANTS , CREATE ROLE " - "REVOKE MANAGE GRANTS , CREATE SOURCE " @@ -9815,10 +9701,6 @@ - "REVOKE MODIFY , CREATE CLOUD " - "REVOKE MODIFY , CREATE CLOUD , " - "REVOKE MODIFY , CREATE CLOUD ON " - - "REVOKE MODIFY , CREATE EXTERNAL " - - "REVOKE MODIFY , CREATE EXTERNAL TOKENS " - - "REVOKE MODIFY , CREATE OAUTH " - - "REVOKE MODIFY , CREATE OAUTH APPLICATION " - "REVOKE MODIFY , CREATE PROJECT " - "REVOKE MODIFY , CREATE PROJECT , " - "REVOKE MODIFY , CREATE PROJECT ON " @@ -10547,10 +10429,6 @@ - "REVOKE MONITOR , CREATE CLOUD " - "REVOKE MONITOR , CREATE CLOUD , " - "REVOKE MONITOR , CREATE CLOUD ON " - - "REVOKE MONITOR , CREATE EXTERNAL " - - "REVOKE MONITOR , CREATE EXTERNAL TOKENS " - - "REVOKE MONITOR , CREATE OAUTH " - - "REVOKE MONITOR , CREATE OAUTH APPLICATION " - "REVOKE MONITOR , CREATE PROJECT " - "REVOKE MONITOR , CREATE PROJECT , " - "REVOKE MONITOR , CREATE PROJECT ON " @@ -11279,10 +11157,6 @@ - "REVOKE OPERATE , CREATE CLOUD " - "REVOKE OPERATE , CREATE CLOUD , " - "REVOKE OPERATE , CREATE CLOUD ON " - - "REVOKE OPERATE , CREATE EXTERNAL " - - "REVOKE OPERATE , CREATE EXTERNAL TOKENS " - - "REVOKE OPERATE , CREATE OAUTH " - - "REVOKE OPERATE , CREATE OAUTH APPLICATION " - "REVOKE OPERATE , CREATE PROJECT " - "REVOKE OPERATE , CREATE PROJECT , " - "REVOKE OPERATE , CREATE PROJECT ON " @@ -12011,10 +11885,6 @@ - "REVOKE OWNERSHIP , CREATE CLOUD " - "REVOKE OWNERSHIP , CREATE CLOUD , " - "REVOKE OWNERSHIP , CREATE CLOUD ON " - - "REVOKE OWNERSHIP , CREATE EXTERNAL " - - "REVOKE OWNERSHIP , CREATE EXTERNAL TOKENS " - - "REVOKE OWNERSHIP , CREATE OAUTH " - - "REVOKE OWNERSHIP , CREATE OAUTH APPLICATION " - "REVOKE OWNERSHIP , CREATE PROJECT " - "REVOKE OWNERSHIP , CREATE PROJECT , " - "REVOKE OWNERSHIP , CREATE PROJECT ON " @@ -12748,10 +12618,6 @@ - "REVOKE SELECT , CREATE CLOUD " - "REVOKE SELECT , CREATE CLOUD , " - "REVOKE SELECT , CREATE CLOUD ON " - - "REVOKE SELECT , CREATE EXTERNAL " - - "REVOKE SELECT , CREATE EXTERNAL TOKENS " - - "REVOKE SELECT , CREATE OAUTH " - - "REVOKE SELECT , CREATE OAUTH APPLICATION " - "REVOKE SELECT , CREATE PROJECT " - "REVOKE SELECT , CREATE PROJECT , " - "REVOKE SELECT , CREATE PROJECT ON " @@ -13480,10 +13346,6 @@ - "REVOKE UPDATE , CREATE CLOUD " - "REVOKE UPDATE , CREATE CLOUD , " - "REVOKE UPDATE , CREATE CLOUD ON " - - "REVOKE UPDATE , CREATE EXTERNAL " - - "REVOKE UPDATE , CREATE EXTERNAL TOKENS " - - "REVOKE UPDATE , CREATE OAUTH " - - "REVOKE UPDATE , CREATE OAUTH APPLICATION " - "REVOKE UPDATE , CREATE PROJECT " - "REVOKE UPDATE , CREATE PROJECT , " - "REVOKE UPDATE , CREATE PROJECT ON " @@ -14212,10 +14074,6 @@ - "REVOKE UPLOAD , CREATE CLOUD " - "REVOKE UPLOAD , CREATE CLOUD , " - "REVOKE UPLOAD , CREATE CLOUD ON " - - "REVOKE UPLOAD , CREATE EXTERNAL " - - "REVOKE UPLOAD , CREATE EXTERNAL TOKENS " - - "REVOKE UPLOAD , CREATE OAUTH " - - "REVOKE UPLOAD , CREATE OAUTH APPLICATION " - "REVOKE UPLOAD , CREATE PROJECT " - "REVOKE UPLOAD , CREATE PROJECT , " - "REVOKE UPLOAD , CREATE PROJECT ON " @@ -14944,10 +14802,6 @@ - "REVOKE USAGE , CREATE CLOUD " - "REVOKE USAGE , CREATE CLOUD , " - "REVOKE USAGE , CREATE CLOUD ON " - - "REVOKE USAGE , CREATE EXTERNAL " - - "REVOKE USAGE , CREATE EXTERNAL TOKENS " - - "REVOKE USAGE , CREATE OAUTH " - - "REVOKE USAGE , CREATE OAUTH APPLICATION " - "REVOKE USAGE , CREATE PROJECT " - "REVOKE USAGE , CREATE PROJECT , " - "REVOKE USAGE , CREATE PROJECT ON " @@ -15676,10 +15530,6 @@ - "REVOKE VIEW , CREATE CLOUD " - "REVOKE VIEW , CREATE CLOUD , " - "REVOKE VIEW , CREATE CLOUD ON " - - "REVOKE VIEW , CREATE EXTERNAL " - - "REVOKE VIEW , CREATE EXTERNAL TOKENS " - - "REVOKE VIEW , CREATE OAUTH " - - "REVOKE VIEW , CREATE OAUTH APPLICATION " - "REVOKE VIEW , CREATE PROJECT " - "REVOKE VIEW , CREATE PROJECT , " - "REVOKE VIEW , CREATE PROJECT ON " @@ -16366,8 +16216,6 @@ - "REVOKE VIEW REFLECTION , CREATE " - "REVOKE VIEW REFLECTION , CREATE CATALOG " - "REVOKE VIEW REFLECTION , CREATE CLOUD " - - "REVOKE VIEW REFLECTION , CREATE EXTERNAL " - - "REVOKE VIEW REFLECTION , CREATE OAUTH " - "REVOKE VIEW REFLECTION , CREATE PROJECT " - "REVOKE VIEW REFLECTION , CREATE ROLE " - "REVOKE VIEW REFLECTION , CREATE SOURCE " @@ -19370,6 +19218,16 @@ output: paths: - "VACUUM " + - "VACUUM CATALOG " + - "VACUUM CATALOG myIdentifier " + - "VACUUM CATALOG myIdentifier . " + - "VACUUM CATALOG myIdentifier . myIdentifier " + - "VACUUM CATALOG myIdentifier . myIdentifier . " + - "VACUUM CATALOG myIdentifier . myIdentifier [ " + - "VACUUM CATALOG myIdentifier . * " + - "VACUUM CATALOG myIdentifier . * . " + - "VACUUM CATALOG myIdentifier . * [ " + - "VACUUM CATALOG myIdentifier [ " - "VACUUM TABLE " - "VACUUM TABLE myIdentifier " - "VACUUM TABLE myIdentifier . " @@ -19387,6 +19245,15 @@ - "VACUUM TABLE myIdentifier EXPIRE SNAPSHOTS RETAIN_LAST " - "VACUUM TABLE myIdentifier [ " pathsWithIdentifierCompletion: + - "VACUUM CATALOG myIdentifier " + - "VACUUM CATALOG myIdentifier . " + - "VACUUM CATALOG myIdentifier . myIdentifier " + - "VACUUM CATALOG myIdentifier . myIdentifier . " + - "VACUUM CATALOG myIdentifier . myIdentifier [ " + - "VACUUM CATALOG myIdentifier . * " + - "VACUUM CATALOG myIdentifier . * . " + - "VACUUM CATALOG myIdentifier . * [ " + - "VACUUM CATALOG myIdentifier [ " - "VACUUM TABLE myIdentifier " - "VACUUM TABLE myIdentifier . " - "VACUUM TABLE myIdentifier . myIdentifier " diff --git a/services/autocomplete/src/test/resources/goldenfiles/expected/SelectStatementCompletionTests.joinScenarios.yaml b/services/autocomplete/src/test/resources/goldenfiles/expected/SelectStatementCompletionTests.joinScenarios.yaml index ff90a01148..a896d468e7 100644 --- a/services/autocomplete/src/test/resources/goldenfiles/expected/SelectStatementCompletionTests.joinScenarios.yaml +++ b/services/autocomplete/src/test/resources/goldenfiles/expected/SelectStatementCompletionTests.joinScenarios.yaml @@ -252,7 +252,10 @@ hasMoreResults: true - description: "MULTIPLE JOINS" - input: "SELECT * FROM EMP JOIN DEPT ON EMP.DEPTNO = ^ JOIN SALGRADE ON SALGRADE.GRADE = DEPT.DEPTNO" + input: + - "SELECT * FROM EMP " + - "JOIN DEPT ON EMP.DEPTNO = ^ " + - "JOIN SALGRADE ON SALGRADE.GRADE = DEPT.DEPTNO" output: completions: - diff --git a/services/autocomplete/src/test/resources/goldenfiles/expected/TokenResolverTests.specialSyntaxFunctions.yaml b/services/autocomplete/src/test/resources/goldenfiles/expected/TokenResolverTests.specialSyntaxFunctions.yaml index c4d05b88f6..d88474b8c4 100644 --- a/services/autocomplete/src/test/resources/goldenfiles/expected/TokenResolverTests.specialSyntaxFunctions.yaml +++ b/services/autocomplete/src/test/resources/goldenfiles/expected/TokenResolverTests.specialSyntaxFunctions.yaml @@ -928,8 +928,8 @@ - "DAY" - "FRAC_SECOND" - "MICROSECOND" + - "NANOSECOND" - "SQL_TSI_DAY" - - "SQL_TSI_FRAC_SECOND" - description: "TIMESTAMPADD" input: "SELECT TIMESTAMPADD(DAY" @@ -1008,8 +1008,8 @@ - "DAY" - "FRAC_SECOND" - "MICROSECOND" + - "NANOSECOND" - "SQL_TSI_DAY" - - "SQL_TSI_FRAC_SECOND" - description: "TIMESTAMPDIFF" input: "SELECT TIMESTAMPDIFF(DAY" diff --git a/services/autocomplete/src/test/resources/goldenfiles/expected/TokenResolverTests.testDDL.yaml b/services/autocomplete/src/test/resources/goldenfiles/expected/TokenResolverTests.testDDL.yaml index b049e59115..6ed35ebe2d 100644 --- a/services/autocomplete/src/test/resources/goldenfiles/expected/TokenResolverTests.testDDL.yaml +++ b/services/autocomplete/src/test/resources/goldenfiles/expected/TokenResolverTests.testDDL.yaml @@ -65,10 +65,10 @@ hasMoreResults: true tokens: - "BRANCH" + - "FOLDER" - "FUNCTION" - "OR" - "ROLE" - - "TABLE" - description: "CREATE TABLE" input: "CREATE TABLE " diff --git a/services/base-rpc/pom.xml b/services/base-rpc/pom.xml index 9428c03749..ac004e63c0 100644 --- a/services/base-rpc/pom.xml +++ b/services/base-rpc/pom.xml @@ -21,7 +21,7 @@ com.dremio.services dremio-services-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-services-base-rpc Services - Base RPC diff --git a/services/base-rpc/src/main/java/com/dremio/exec/rpc/BasicClient.java b/services/base-rpc/src/main/java/com/dremio/exec/rpc/BasicClient.java index 849eb26020..dd0a256743 100644 --- a/services/base-rpc/src/main/java/com/dremio/exec/rpc/BasicClient.java +++ b/services/base-rpc/src/main/java/com/dremio/exec/rpc/BasicClient.java @@ -190,10 +190,10 @@ RpcFuture send(T rpcType, SEND protobufBody, Class clazz, Byte * @param host server hostname * @param port server port */ + @Override protected void connectAsClient(RpcConnectionHandler connectionHandler, HS handshakeValue, String host, int port) { ConnectionMultiListener cml = new ConnectionMultiListener(connectionHandler, handshakeValue, host, port); - b.connect(host, port) - .addListener(cml.establishmentListener); + b.connect(host, port).addListener(cml.establishmentListener); } public boolean isActive() { @@ -206,11 +206,19 @@ public void setAutoRead(boolean enableAutoRead) { connection.setAutoRead(enableAutoRead); } + private String getConnectionName(R connection) { + if (connection != null) { + return connection.getName(); + } else { + return "connection is null"; + } + } + @Override public void close() { - logger.debug("Closing client"); try { if (connection != null) { + logger.debug("Closing client in sync mode {}", getConnectionName(connection)); connection.getChannel().close().sync(); } } catch (final InterruptedException e) { @@ -331,8 +339,7 @@ void addNegotiator(ChannelFuture connectionFuture) throws Exception { final SslHandler sslHandler = new SslHandler(clientEngine); sslHandler.handshakeFuture() .addListener(future -> { - logger.debug("SSL client state '{}' on connection '{}'", future.isSuccess(), - connectionFuture.channel()); + logger.debug("SSL client state '{}' on connection '{}'", future.isSuccess(), connectionFuture.channel()); if (future.isSuccess()) { logger.trace("Adding handshake negotiator on '{}', after SSL succeeded", connectionFuture.channel()); @@ -358,6 +365,10 @@ void addHandshakeRequester(ChannelFuture connectionFuture) { } void sendHandshake() { + if (logger.isDebugEnabled()) { + logger.debug("sendHandshake - channel active {}", getConnectionName(connection)); + } + Preconditions.checkState(connection != null, "connection is not yet initialized"); // send a handshake on the current thread. This is the only time we will send from within the event thread. @@ -376,15 +387,28 @@ private void sendHandshakeAndRemoveSelf(ChannelHandlerContext ctx) { ctx.channel().pipeline().remove(this); } - @Override // channel may already be active + @Override public void handlerAdded(ChannelHandlerContext ctx) throws Exception { + // channel may already be active if (ctx.channel().isActive()) { + if (logger.isDebugEnabled()) { + logger.debug("sendHandshakeAndRemoveSelf - handler added: L {} -> R {}", + ctx.channel().localAddress(), + ctx.channel().remoteAddress() + ); + } sendHandshakeAndRemoveSelf(ctx); } } @Override public void channelActive(ChannelHandlerContext ctx) throws Exception { + if (logger.isDebugEnabled()) { + logger.debug("sendHandshakeAndRemoveSelf - channel active: L {} -> R {}", + ctx.channel().localAddress(), + ctx.channel().remoteAddress() + ); + } sendHandshakeAndRemoveSelf(ctx); super.channelActive(ctx); } @@ -397,21 +421,25 @@ private class HandshakeSendListener implements RpcOutcomeListener
        { @Override public void failed(RpcException ex) { - logger.debug("Failure while initiating handshake", ex); + logger.debug("Failure while initiating handshake for connection {}", getConnectionName(connection), ex); connectionHandler.connectionFailed(FailureType.HANDSHAKE_COMMUNICATION, ex); } @Override public void success(HR value, ByteBuf buffer) { - logger.debug("Handshake received on {}", connection); + if (logger.isDebugEnabled()) { + logger.debug("Handshake received on {}", getConnectionName(connection)); + } try { validateHandshake(value); finalizeConnection(value, connection); connectionHandler.connectionSucceeded(connection); - logger.trace("Handshake completed successfully on {}", connection); + if (logger.isTraceEnabled()) { + logger.trace("Handshake completed successfully on {}", getConnectionName(connection)); + } } catch (RpcException ex) { - logger.debug("Failure while validating handshake", ex); + logger.info("Failure while validating handshake for connection {}", getConnectionName(connection), ex); connectionHandler.connectionFailed(FailureType.HANDSHAKE_VALIDATION, ex); } } diff --git a/services/base-rpc/src/main/java/com/dremio/exec/rpc/ReconnectingConnection.java b/services/base-rpc/src/main/java/com/dremio/exec/rpc/ReconnectingConnection.java index 5756dd2eb3..5b8079c9d1 100644 --- a/services/base-rpc/src/main/java/com/dremio/exec/rpc/ReconnectingConnection.java +++ b/services/base-rpc/src/main/java/com/dremio/exec/rpc/ReconnectingConnection.java @@ -65,6 +65,7 @@ public abstract class ReconnectingConnection connectionHolder = new AtomicReference(); private final AtomicBoolean closed = new AtomicBoolean(false); + private final AtomicReference clientConnectedHostname = new AtomicReference(null); private final String host; private final int port; private final OUTBOUND_HANDSHAKE handshake; @@ -94,6 +95,8 @@ public ReconnectingConnection(String name, OUTBOUND_HANDSHAKE handshake, String this.timeBetweenAttemptMS = timeBetweenAttemptMS; } + protected abstract String getLocalAddress(); + protected abstract AbstractClient getNewClient() throws RpcException; /** @@ -130,6 +133,14 @@ public ChannelFutureListener getHandler( } } + private String getConnectionName(CONNECTION_TYPE connection) { + if (connection != null) { + return connection.getName(); + } else { + return "connection is null"; + } + } + /** * Listens for connection closes and clears connection holder. */ @@ -160,8 +171,24 @@ public CloseHandlerCreator getCloseHandlerCreator() { } public void addExternalConnection(CONNECTION_TYPE connection) { - // if the connection holder is not set, set it to this incoming connection. We'll simply ignore if already set. - this.connectionHolder.compareAndSet(null, connection); + // let's check if the client has not initiated the connection, and only the "greater" node will win. + // Will win means, allow only one connection between 2 nodes, e.g. A -> B should be used when A is talking to B and + // B is talking to A, never having both client conn + String clientHost = clientConnectedHostname.get(); + if (clientHost == null || (host != null && host.compareTo(clientHost) > 0)) { + // if the connection holder is not set, set it to this incoming connection. We'll simply ignore if already set. + final boolean wasSet = this.connectionHolder.compareAndSet(null, connection); + if (logger.isDebugEnabled()) { + if (wasSet) { + logger.debug("Adding external connection - {}", getConnectionName(connection)); + } else { + logger.debug("Ignoring external connection because connection holder is already set. External connection: - {}", + getConnectionName(connection)); + } + } + } else { + logger.debug("Not adding external connection because client has already initiated the connection L {} -> R {}", clientHost, host); + } } @Override @@ -174,6 +201,7 @@ public void close() { CONNECTION_TYPE c = connectionHolder.getAndSet(null); if (c != null) { try { + logger.debug("Closing channel: {}", getConnectionName(c)); c.getChannel().close().sync(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); @@ -206,7 +234,6 @@ private class ConnectionRunner implements Runnable { @Override public void run() { - // try to use the active connection. If someone else is setting, restart with the new value. while(true) { // first, try to get existing connection. @@ -216,9 +243,17 @@ public void run() { return; } + if (logger.isDebugEnabled()) { + logger.debug("connection runner: conn status {}", getConnectionName(conn)); + } + // bad connection, clear it. if(!connectionHolder.compareAndSet(conn, null)) { // if we failed to clear, someone changed the connection, restart the process. + + if (logger.isDebugEnabled()) { + logger.debug("Someone has changed the connection, restarting it: {}", getConnectionName(conn)); + } continue; } @@ -232,10 +267,10 @@ public void run() { return; } - logger.info("[{}]: No connection active, opening new connection to {}:{}.", name, host, port); + logger.info("[{}]: No connection active, opening new connection {} -> {}:{}.", + name, getLocalAddress(), host, port); final long runUntil = System.currentTimeMillis() + connectionSuccessTimeoutMS; - ConnectionResult lastResult = null; // keep attempting a connection until we hit the timeout. while (System.currentTimeMillis() < runUntil) { @@ -281,23 +316,52 @@ public void run() { } private ConnectionResult attempt(long runUntil) throws RpcException { + // let's check an external connection being configured when we have the client initiated the connection initiated + clientConnectedHostname.set(getLocalAddress()); + ConnectionHandle future = new ConnectionHandle(); AbstractClient client = getNewClient(); client.connectAsClient(future, handshake, host, port); + logger.debug("Connection attempt - Waiting for connection to be finished: {} -> {}:{}", + getLocalAddress(), host, port); ConnectionResult result = future.waitForFinished(runUntil); + logger.debug("Connection attempt - Connection finished: {} -> {}:{}, result {}|{}", + getLocalAddress(), host, port, result.ok(), result); + if (!result.ok()) { + clientConnectedHostname.set(null); return result; } + if (logger.isDebugEnabled()) { + CONNECTION_TYPE conn = connectionHolder.get(); + logger.debug("Connection attempt - connection holder: client to {}:{}, result {}|{}. ConnectionHolder {}", + host, port, result.ok(), result, + getConnectionName(conn) + ); + } + boolean wasSet = connectionHolder.compareAndSet(null, result.connection); - if(wasSet) { + + if (logger.isDebugEnabled()) { + CONNECTION_TYPE conn = connectionHolder.get(); + logger.debug("Connection attempt - connectionHolder wasSet {}: client to {}:{}, result {}|{}. ConnectionHolder {}", + wasSet, host, port, result.ok(), result, + getConnectionName(conn) + ); + } + + if (wasSet) { + clientConnectedHostname.set(null); return result; } // we failed to set the new connection, close it. result.discard(); + clientConnectedHostname.set(null); + CONNECTION_TYPE outsideSet = connectionHolder.get(); if(outsideSet == null) { // unexpected but let's handle. @@ -305,7 +369,6 @@ private ConnectionResult attempt(long runUntil) throws RpcException { } else { return new ConnectionResult(false, outsideSet); } - } /** @@ -321,8 +384,8 @@ public void executeCommand(RpcCommand cm try { // no need to set timing here since the lower layers have the timeouts necessary. ConnectionResult result = futureConnection.get(); - if(result.ok()) { - if(result.hadToConnect()) { + if (result.ok()) { + if (result.hadToConnect()) { cmd.connectionSucceeded(result.connection); } else { cmd.connectionAvailable(result.connection); @@ -330,18 +393,14 @@ public void executeCommand(RpcCommand cm } else { cmd.connectionFailed(result.failure.type, result.failure.throwable); } - } catch (InterruptedException e) { // shouldn't happen cmd.connectionFailed(FailureType.CONNECTION, e); - } catch (ExecutionException e) { + } catch (ExecutionException | IllegalStateException e) { // shouldn't happen cmd.connectionFailed(FailureType.CONNECTION, e.getCause()); } } - - - } /** @@ -368,6 +427,7 @@ public synchronized void connectionSucceeded(CONNECTION_TYPE connection) { // close channel since it took too long to create. try { + logger.debug("Closing channel because took too long: {}", getConnectionName(connection)); connection.getChannel().close().sync(); } catch (InterruptedException e) { // ignore. @@ -434,6 +494,7 @@ public boolean hadToConnect() { public void discard() { if (connection != null) { + logger.debug("Discarding connection. Closing channel {}", getConnectionName(connection)); try { connection.getChannel().close().sync(); } catch (InterruptedException e) { @@ -486,6 +547,7 @@ private boolean isStillValid() { } + @Override public String toString() { return String.format("[%s] %s:%d", name, host, port); } diff --git a/services/base-rpc/src/main/java/com/dremio/exec/rpc/RemoteConnection.java b/services/base-rpc/src/main/java/com/dremio/exec/rpc/RemoteConnection.java index 2830b78458..f0ebf8323c 100644 --- a/services/base-rpc/src/main/java/com/dremio/exec/rpc/RemoteConnection.java +++ b/services/base-rpc/src/main/java/com/dremio/exec/rpc/RemoteConnection.java @@ -97,6 +97,7 @@ boolean blockOnNotWritable(RpcOutcomeListener listener) { } } + @Override public void setAutoRead(boolean enableAutoRead) { channel.config().setAutoRead(enableAutoRead); } diff --git a/services/base-rpc/src/main/java/com/dremio/exec/rpc/RpcBus.java b/services/base-rpc/src/main/java/com/dremio/exec/rpc/RpcBus.java index 772d3adfd0..752cf41f65 100644 --- a/services/base-rpc/src/main/java/com/dremio/exec/rpc/RpcBus.java +++ b/services/base-rpc/src/main/java/com/dremio/exec/rpc/RpcBus.java @@ -282,6 +282,7 @@ private void sendOnce() { } } + @Override public void sendFailure(UserRpcException e){ sendFailure(e, true); } @@ -320,7 +321,7 @@ private boolean sendFailure(UserRpcException e, boolean failOnAlreadySent){ } - + @SuppressWarnings("checkstyle:MemberName") private final SecondFailureHandler RESPONSE_FAILURE_FAILURE = new SecondFailureHandler(); private class SecondFailureHandler implements ChannelFutureListener { diff --git a/services/base-rpc/src/test/java/com/dremio/exec/rpc/TestReconnectingConnection.java b/services/base-rpc/src/test/java/com/dremio/exec/rpc/TestReconnectingConnection.java index 1c7167f597..2241b1eb8c 100644 --- a/services/base-rpc/src/test/java/com/dremio/exec/rpc/TestReconnectingConnection.java +++ b/services/base-rpc/src/test/java/com/dremio/exec/rpc/TestReconnectingConnection.java @@ -182,6 +182,11 @@ public TestReConnection(long failFor, long tryFor, long tryEach, Iterator com.dremio.services dremio-services-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-services-catalog-api Services - Catalog API diff --git a/services/command-pool/pom.xml b/services/command-pool/pom.xml index a490ea5429..68db517ed8 100644 --- a/services/command-pool/pom.xml +++ b/services/command-pool/pom.xml @@ -21,7 +21,7 @@ dremio-services-parent com.dremio.services - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-services-commandpool diff --git a/services/configuration/pom.xml b/services/configuration/pom.xml index 9122d72a8d..82a8542f79 100644 --- a/services/configuration/pom.xml +++ b/services/configuration/pom.xml @@ -22,7 +22,7 @@ dremio-services-parent com.dremio.services - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 4.0.0 dremio-services-configuration diff --git a/services/coordinator/pom.xml b/services/coordinator/pom.xml index b68155211b..534009617f 100644 --- a/services/coordinator/pom.xml +++ b/services/coordinator/pom.xml @@ -21,7 +21,7 @@ dremio-services-parent com.dremio.services - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-services-coordinator Services - Coordinator @@ -50,28 +50,11 @@ org.apache.curator curator-x-discovery - - - slf4j-log4j12 - org.slf4j - - - log4j - log4j - - org.apache.curator curator-test - ${curator-test.version} test - - - log4j - log4j - - org.slf4j @@ -89,7 +72,6 @@ com.dremio.services dremio-services-telemetry-api ${project.version} - compile
        diff --git a/services/coordinator/src/main/java/com/dremio/service/coordinator/ClusterCoordinator.java b/services/coordinator/src/main/java/com/dremio/service/coordinator/ClusterCoordinator.java index a7877e1ece..0e50d9101a 100644 --- a/services/coordinator/src/main/java/com/dremio/service/coordinator/ClusterCoordinator.java +++ b/services/coordinator/src/main/java/com/dremio/service/coordinator/ClusterCoordinator.java @@ -122,6 +122,7 @@ public static Roles toEndpointRoles(Set roles) { * @return a provider for a collection of endpoints * @throws NullPointerException if role is {@code null} */ + @Override public abstract ServiceSet getServiceSet(Role role); /** @@ -129,14 +130,23 @@ public static Roles toEndpointRoles(Set roles) { * @param serviceName * @return */ + @Override public abstract ServiceSet getOrCreateServiceSet(String serviceName); + /** + * Delete a {@link ServiceSet} for the given service name + * @param serviceName + */ + @Override + public abstract void deleteServiceSet(String serviceName); + /** * Get the set of service names registered in the ClusterCoordinator ServiceSet. * NOTE: There is no guarantee of return object consistency depending on how Dremio is tracking the registered serivces. * * @return An Iterable of service names. */ + @Override public abstract Iterable getServiceNames() throws Exception; public abstract DistributedSemaphore getSemaphore(String name, int maximumLeases); @@ -147,5 +157,6 @@ public static Roles toEndpointRoles(Set roles) { * @param name the name of the election * @return an handle to be closed when leaving the election */ + @Override public abstract ElectionRegistrationHandle joinElection(String name, ElectionListener listener); } diff --git a/services/coordinator/src/main/java/com/dremio/service/coordinator/ClusterServiceSetManager.java b/services/coordinator/src/main/java/com/dremio/service/coordinator/ClusterServiceSetManager.java index a51faa6b90..918cc8eeb4 100644 --- a/services/coordinator/src/main/java/com/dremio/service/coordinator/ClusterServiceSetManager.java +++ b/services/coordinator/src/main/java/com/dremio/service/coordinator/ClusterServiceSetManager.java @@ -37,6 +37,12 @@ public interface ClusterServiceSetManager extends Service { */ ServiceSet getOrCreateServiceSet(String serviceName); + /** + * Delete a {@link ServiceSet} for the given service name + * @param serviceName + */ + void deleteServiceSet(String serviceName); + /** * Get the set of service names registered in the ClusterCoordinator ServiceSet. * NOTE: There is no guarantee of return object consistency depending on how Dremio is tracking the registered serivces. diff --git a/services/coordinator/src/main/java/com/dremio/service/coordinator/LocalExecutorSetService.java b/services/coordinator/src/main/java/com/dremio/service/coordinator/LocalExecutorSetService.java index 55756f9af2..8aa7d09563 100644 --- a/services/coordinator/src/main/java/com/dremio/service/coordinator/LocalExecutorSetService.java +++ b/services/coordinator/src/main/java/com/dremio/service/coordinator/LocalExecutorSetService.java @@ -71,6 +71,7 @@ public synchronized ListenableSet getExecutorSet(EngineManagementProtos.EngineId return executorSet; } + @Override public Collection getAllAvailableEndpoints() { if(executorSet != null) { return executorSet.getAvailableEndpoints(); @@ -125,6 +126,7 @@ public void removeNodeStatusListener(NodeStatusListener inner) { public void close() throws Exception { } + @Override public Map> listAllEnginesExecutors() { if(executorSet != null) { Map> executorsGroupedByReplica = diff --git a/services/coordinator/src/main/java/com/dremio/service/coordinator/NoOpClusterCoordinator.java b/services/coordinator/src/main/java/com/dremio/service/coordinator/NoOpClusterCoordinator.java index 99b9d073ab..58aa968077 100644 --- a/services/coordinator/src/main/java/com/dremio/service/coordinator/NoOpClusterCoordinator.java +++ b/services/coordinator/src/main/java/com/dremio/service/coordinator/NoOpClusterCoordinator.java @@ -46,6 +46,11 @@ public ServiceSet getOrCreateServiceSet(String serviceName) { return NO_OP_SERVICE_SET; } + @Override + public void deleteServiceSet(String serviceName) { + // do nothing + } + @Override public Iterable getServiceNames() throws Exception { return Collections.emptySet(); diff --git a/services/coordinator/src/main/java/com/dremio/service/coordinator/TaskLeaderElection.java b/services/coordinator/src/main/java/com/dremio/service/coordinator/TaskLeaderElection.java index bef1bf3ff8..f3d4c2e03c 100644 --- a/services/coordinator/src/main/java/com/dremio/service/coordinator/TaskLeaderElection.java +++ b/services/coordinator/src/main/java/com/dremio/service/coordinator/TaskLeaderElection.java @@ -366,8 +366,6 @@ void cancel(boolean mayInterruptRunning) { @Override public void close() throws Exception { - logger.info("Stopping TaskLeaderElection for service {}", serviceName); - if (isTaskLeader.compareAndSet(true, false)) { listeners.keySet().forEach(TaskLeaderChangeListener::onLeadershipLost); } @@ -386,6 +384,8 @@ public void close() throws Exception { closeHandles(); AutoCloseables.close(taskLeaderStatusListener); + + logger.info("Stopped TaskLeaderElection for service {}", serviceName); } private synchronized void closeHandles() throws Exception { diff --git a/services/coordinator/src/main/java/com/dremio/service/coordinator/local/LocalClusterCoordinator.java b/services/coordinator/src/main/java/com/dremio/service/coordinator/local/LocalClusterCoordinator.java index 7ebaea2995..344b3b3f1a 100644 --- a/services/coordinator/src/main/java/com/dremio/service/coordinator/local/LocalClusterCoordinator.java +++ b/services/coordinator/src/main/java/com/dremio/service/coordinator/local/LocalClusterCoordinator.java @@ -101,6 +101,18 @@ public ServiceSet getOrCreateServiceSet(final String name) { return serviceSets.computeIfAbsent(name, s -> new LocalServiceSet(s)); } + @Override + public void deleteServiceSet(String name) { + LocalServiceSet localServiceSet = serviceSets.remove(name); + if (localServiceSet != null) { + try { + localServiceSet.close(); + } catch (Exception e) { + logger.error("Unable to close LocalServiceSet {}", name, e); + } + } + } + @Override public Iterable getServiceNames() { return serviceSets.keySet(); diff --git a/services/coordinator/src/main/java/com/dremio/service/coordinator/zk/ZKClusterClient.java b/services/coordinator/src/main/java/com/dremio/service/coordinator/zk/ZKClusterClient.java index 0de69908f6..33f7730843 100644 --- a/services/coordinator/src/main/java/com/dremio/service/coordinator/zk/ZKClusterClient.java +++ b/services/coordinator/src/main/java/com/dremio/service/coordinator/zk/ZKClusterClient.java @@ -16,6 +16,7 @@ package com.dremio.service.coordinator.zk; import java.io.IOException; +import java.util.List; import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.CancellationException; @@ -39,7 +40,6 @@ import org.apache.curator.RetryPolicy; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; -import org.apache.curator.framework.imps.CuratorFrameworkState; import org.apache.curator.framework.recipes.leader.LeaderLatch; import org.apache.curator.framework.recipes.leader.LeaderLatch.CloseMode; import org.apache.curator.framework.recipes.leader.LeaderLatchListener; @@ -127,19 +127,6 @@ private ZKClusterClient(ZKClusterConfig config, String connect, Provider { - isConnected = newState.isConnected(); - logger.info("ZKClusterClient: new state received[{}] - isConnected: {}", newState, isConnected); - }); - } - } - } - @Override public void start() throws Exception { if(localPortProvider != null){ @@ -215,8 +202,6 @@ public void start() throws Exception { private void runSupervisorCheck() { if (configFeatureProvider != null && configFeatureProvider.isFeatureEnabled(Features.COORDINATOR_ZK_SUPERVISOR.getFeatureName())) { - initializeConnection(); - boolean isProbeSucceeded = false; if (isConnected == null || !isConnected) { logger.error("ZKClusterClient: Not connected to ZK."); @@ -331,8 +316,8 @@ public ElectionRegistrationHandle joinElection(final String name, final Election final String id = UUID.randomUUID().toString(); // In case of multicluster Dremio env. that use the same zookeeper // we need a root per Dremio clusterId - final LeaderLatch leaderLatch = - new LeaderLatch(curator, clusterIdPath + "/leader-latch/" + name, id, CloseMode.SILENT); + final String latchPath = clusterIdPath + "/leader-latch/" + name; + final LeaderLatch leaderLatch = new LeaderLatch(curator, latchPath, id, CloseMode.SILENT); logger.info("joinElection called {} - {}.", id, name); @@ -478,6 +463,7 @@ private synchronized void checkAndNotifyCancelled(long svdLeaderGeneration) { public void close() { try { leaderLatch.close(); + deleteServiceLeaderElectionPath(); } catch (IOException e) { logger.error("Error when closing registration handle for election {}", name, e); } @@ -497,6 +483,28 @@ public int instanceCount() { } return 0; } + + private void deleteServiceLeaderElectionPath() { + try { + boolean isZkConnected = isConnected != null && isConnected.equals(true); + if (isZkConnected && curator.checkExists().forPath(latchPath) != null) { + List allChildren = curator.getChildren().forPath(latchPath); + // every element in the election has a child in the election path. When there is no more child elements in + // the election path (latchPath), we can clear the election path otherwise it will stay in zk, since it is + // a persistent path in zk. + if (allChildren.isEmpty()) { + curator.delete().guaranteed().forPath(latchPath); + logger.info("Closed leader latch. Deleted latch path {}", latchPath); + } else { + logger.info("Closed leader latch. Nothing to do about latch path {}. It has children: {}", latchPath, allChildren.size()); + } + } else if (!isZkConnected) { + logger.warn("Closed leader latch. Nothing to do about latch path {}. Not connected to ZK", latchPath); + } + } catch (Exception e) { + logger.warn("Could not delete latch path {}", latchPath, e); + } + } }; } @@ -504,6 +512,26 @@ public ZKServiceSet newServiceSet(String name) { return new ZKServiceSet(name, discovery); } + public void deleteServiceSetZkNode(String name) { + String zkNodePath = clusterIdPath + "/" + name; + try { + boolean isZkConnected = isConnected != null && isConnected.equals(true); + if (isZkConnected && curator.checkExists().forPath(zkNodePath) != null) { + List allChildren = curator.getChildren().forPath(zkNodePath); + if (allChildren.isEmpty()) { + curator.delete().guaranteed().forPath(zkNodePath); + logger.info("Deleted ZKServiceSet zk node path {}", zkNodePath); + } else { + logger.info("Deleted ZKServiceSet. Nothing to do about zk node path {}. It has children: {}", zkNodePath, allChildren.size()); + } + } else if (!isZkConnected) { + logger.warn("Deleted ZKServiceSet. Nothing to do about zk node path {}. Not connected to ZK", zkNodePath); + } + } catch (Exception e) { + logger.warn("Deleted ZKServiceSet - Could not delete zk node path {}", zkNodePath, e); + } + } + private ServiceDiscovery newDiscovery(String clusterId) { return ServiceDiscoveryBuilder .builder(NodeEndpoint.class) @@ -528,9 +556,8 @@ private class ConnectionListener implements ConnectionStateListener { @Override public void stateChanged(CuratorFramework client, ConnectionState newState) { - if (connectionLostHandler.stateLoggingEnabled()) { - logger.info("ZK connection state changed to {}", newState); - } + isConnected = newState.isConnected(); + logger.info("ZKClusterClient: new state received[{}] - isConnected: {}", newState, isConnected); connectionLostHandler.handleConnectionState(newState); } } diff --git a/services/coordinator/src/main/java/com/dremio/service/coordinator/zk/ZKClusterCoordinator.java b/services/coordinator/src/main/java/com/dremio/service/coordinator/zk/ZKClusterCoordinator.java index 1d29ba07be..bc8ac7d66d 100644 --- a/services/coordinator/src/main/java/com/dremio/service/coordinator/zk/ZKClusterCoordinator.java +++ b/services/coordinator/src/main/java/com/dremio/service/coordinator/zk/ZKClusterCoordinator.java @@ -102,6 +102,11 @@ public ServiceSet getOrCreateServiceSet(final String serviceName) { return zkClusterServiceSetManager.getOrCreateServiceSet(serviceName); } + @Override + public void deleteServiceSet(String serviceName) { + zkClusterServiceSetManager.deleteServiceSet(serviceName); + } + // this interface doesn't guarantee the consistency of the registered service names. @Override public Iterable getServiceNames() throws Exception { diff --git a/services/coordinator/src/main/java/com/dremio/service/coordinator/zk/ZKClusterServiceSetManager.java b/services/coordinator/src/main/java/com/dremio/service/coordinator/zk/ZKClusterServiceSetManager.java index d47a120c6b..3d12f4766f 100644 --- a/services/coordinator/src/main/java/com/dremio/service/coordinator/zk/ZKClusterServiceSetManager.java +++ b/services/coordinator/src/main/java/com/dremio/service/coordinator/zk/ZKClusterServiceSetManager.java @@ -66,6 +66,7 @@ public ServiceSet getOrCreateServiceSet(String serviceName) { final ZKServiceSet newServiceSet = zkClient.newServiceSet(serviceName); try { newServiceSet.start(); + logger.info("Started zkServiceSet for service {}", serviceName); } catch (Exception e) { throw new RuntimeException(String.format("Unable to start %s service in Zookeeper", serviceName), e); } @@ -73,11 +74,28 @@ public ServiceSet getOrCreateServiceSet(String serviceName) { }); } + @Override + public void deleteServiceSet(String serviceName) { + ZKServiceSet serviceSet = serviceSets.remove(serviceName); + if (serviceSet != null) { + try { + serviceSet.close(); + zkClient.deleteServiceSetZkNode(serviceName); + logger.info("Stopped zkServiceSet for service {}", serviceName); + } catch (Exception e) { + logger.error("Unable to close zkService for service {}", serviceName, e); + } + } else { + logger.info("Nothing to do (delete) for zkServiceSet for service {}", serviceName); + } + } + public ServiceSet getOrCreateServiceSet(String role, String serviceName) { return serviceSets.computeIfAbsent(role, s -> { final ZKServiceSet newServiceSet = zkClient.newServiceSet(serviceName); try { newServiceSet.start(); + logger.info("Started zkServiceSet for service {} and role {}", serviceName, role); } catch (Exception e) { throw new RuntimeException(String.format("Unable to start %s service in Zookeeper", serviceName), e); } diff --git a/services/coordinator/src/main/java/com/dremio/service/coordinator/zk/ZKSabotConfig.java b/services/coordinator/src/main/java/com/dremio/service/coordinator/zk/ZKSabotConfig.java index cd863ddc2a..ca21607714 100644 --- a/services/coordinator/src/main/java/com/dremio/service/coordinator/zk/ZKSabotConfig.java +++ b/services/coordinator/src/main/java/com/dremio/service/coordinator/zk/ZKSabotConfig.java @@ -48,78 +48,97 @@ public ZKSabotConfig(SabotConfig sabotConfig) { this.config = sabotConfig; } + @Override public int getConnectionTimeoutMilliSecs() { return config.getInt(ZK_TIMEOUT); } + @Override public int getSessionTimeoutMilliSecs() { return config.getInt(ZK_SESSION_TIMEOUT); } + @Override public String getRoot() { return config.getString(ZK_ROOT); } + @Override public int getRetryBaseDelayMilliSecs() { return config.getMilliseconds(ZK_RETRY_BASE_DELAY).intValue(); } + @Override public long getInitialTimeoutMilliSecs() { return config.getLong(ZK_INITIAL_TIMEOUT_MS); } + @Override public int getRetryMaxDelayMilliSecs() { return config.getMilliseconds(ZK_RETRY_MAX_DELAY).intValue(); } + @Override public boolean isRetryUnlimited() { return config.getBoolean(ZK_RETRY_UNLIMITED); } + @Override public boolean isConnectionHandleEnabled() { return config.getBoolean(ZK_CONNECTION_HANDLE_ENABLED); } + @Override public long getRetryLimit() { return config.getLong(ZK_RETRY_LIMIT); } + @Override public long getElectionTimeoutMilliSecs() { return config.getMilliseconds(ZK_ELECTION_TIMEOUT); } + @Override public long getElectionPollingMilliSecs() { return config.getMilliseconds(ZK_ELECTION_POLLING); } + @Override public long getElectionDelayForLeaderCallbackMilliSecs() { return config.getMilliseconds(ZK_ELECTION_DELAY_FOR_LEADER_CALLBACK); } + @Override public CoordinatorLostHandle getConnectionLostHandler() { return config.getInstance(ZK_LOST_HANDLER_MODULE_CLASS, CoordinatorLostHandle.class, CoordinatorLostHandle.NO_OP); } + @Override public String getClusterId() { return config.getString(CLUSTER_ID); } + @Override public String getConnection() { return config.getString(ZK_CONNECTION); } + @Override public ConfigFeatureProvider getConfigFeatureProvider() { return null; } + @Override public int getZkSupervisorIntervalMilliSec() { return config.getInt(ZK_SUPERVISOR_INTERVAL_MS); } + @Override public int getZkSupervisorReadTimeoutMilliSec() { return config.getInt(ZK_SUPERVISOR_READ_TIMEOUT_MS); } + @Override public int getZkSupervisorMaxFailures() { return config.getInt(ZK_SUPERVISOR_MAX_FAILURES); } diff --git a/services/coordinator/src/main/protobuf/ProjectConfig.proto b/services/coordinator/src/main/protobuf/ProjectConfig.proto index ed6d1558dc..b8cd429016 100644 --- a/services/coordinator/src/main/protobuf/ProjectConfig.proto +++ b/services/coordinator/src/main/protobuf/ProjectConfig.proto @@ -63,5 +63,6 @@ message ProjectConfig { DistStoreConfig dist_store_config = 1; string org_id = 2; DataCredentials data_credentials = 3; + optional bool resolve_results_from_dist = 4; } diff --git a/services/coordinator/src/test/java/com/dremio/service/coordinator/zk/TestTaskLeaderElection.java b/services/coordinator/src/test/java/com/dremio/service/coordinator/zk/TestTaskLeaderElection.java index 4540a3ca20..53308b1b78 100644 --- a/services/coordinator/src/test/java/com/dremio/service/coordinator/zk/TestTaskLeaderElection.java +++ b/services/coordinator/src/test/java/com/dremio/service/coordinator/zk/TestTaskLeaderElection.java @@ -19,6 +19,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.util.List; @@ -29,6 +30,7 @@ import java.util.function.Function; import java.util.stream.Collectors; +import org.apache.zookeeper.data.Stat; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -86,7 +88,6 @@ public void testElectionsWithRegistration() throws Exception { .setRoles(ClusterCoordinator.Role.toEndpointRoles(Sets.newHashSet(ClusterCoordinator.Role.COORDINATOR))) .build(); - TaskLeaderElection taskLeaderElection1 = new TaskLeaderElection( SERVICE_NAME, @@ -143,6 +144,9 @@ public void testElectionsWithRegistration() throws Exception { leader.close(); taskLeaderElectionList.remove(leader); + Stat stat = zooKeeperServer.getZKClient().exists("/dremio/test/test-cluster-id/leader-latch/" + SERVICE_NAME, false); + assertNotNull(stat); + // wait until the leader is removed waitUntilLeaderRemoved(taskLeaderElectionList, leader); @@ -161,6 +165,9 @@ public void testElectionsWithRegistration() throws Exception { secondLeader.close(); taskLeaderElectionList.remove(secondLeader); + stat = zooKeeperServer.getZKClient().exists("/dremio/test/test-cluster-id/leader-latch/" + SERVICE_NAME, false); + assertNotNull(stat); + // wait until second leader is removed waitUntilLeaderRemoved(taskLeaderElectionList, secondLeader); @@ -178,6 +185,9 @@ public void testElectionsWithRegistration() throws Exception { // stop third leader assertNotNull(thirdLeader); thirdLeader.close(); + + stat = zooKeeperServer.getZKClient().exists("/dremio/test/test-cluster-id/leader-latch/" + SERVICE_NAME, false); + assertNull(stat); } } @@ -263,6 +273,9 @@ public void testGivingUpLeadership() throws Exception { throw new RuntimeException(e); } }); + + Stat stat = zooKeeperServer.getZKClient().exists("/dremio/test/test-cluster-id/leader-latch/" + SERVICE_NAME, false); + assertNull(stat); } } diff --git a/services/coordinator/src/test/java/com/dremio/service/coordinator/zk/TestZKClusterClient.java b/services/coordinator/src/test/java/com/dremio/service/coordinator/zk/TestZKClusterClient.java index b95f54cc4a..c52025cb9e 100644 --- a/services/coordinator/src/test/java/com/dremio/service/coordinator/zk/TestZKClusterClient.java +++ b/services/coordinator/src/test/java/com/dremio/service/coordinator/zk/TestZKClusterClient.java @@ -40,6 +40,7 @@ import com.dremio.service.coordinator.ClusterCoordinator; import com.dremio.service.coordinator.ElectionListener; import com.dremio.service.coordinator.ElectionRegistrationHandle; +import com.dremio.service.coordinator.RegistrationHandle; import com.dremio.test.DremioTest; import com.typesafe.config.ConfigValueFactory; @@ -138,6 +139,29 @@ public void test3ComponentsConnection() throws Exception { } } + @Test + public void test4ComponentsConnection() throws Exception { + assertNull(zooKeeperServer.getZKClient().exists("/dremio4/test/test-cluster-id", false)); + + try(ZKClusterClient client = new ZKClusterClient( + DEFAULT_ZK_CLUSTER_CONFIG, + String.format("%s/dremio4/test/test-cluster-id", zooKeeperServer.getConnectString())) + ) { + client.start(); + ZKServiceSet serviceSet = client.newServiceSet("coordinator"); + RegistrationHandle registrationHandle = serviceSet.register(NodeEndpoint.newBuilder().setAddress("foo").build()); + + Stat stat = zooKeeperServer.getZKClient().exists("/dremio4/test/test-cluster-id/coordinator", false); + assertNotNull(stat); + assertEquals(1, stat.getNumChildren()); + + serviceSet.unregister((ZKRegistrationHandle)registrationHandle); + client.deleteServiceSetZkNode("coordinator"); + stat = zooKeeperServer.getZKClient().exists("/dremio4/test/test-cluster-id/coordinator", false); + assertNull(stat); + } + } + @Test public void testElection() throws Exception { final CountDownLatch firstElection = new CountDownLatch(1); diff --git a/services/credentials/pom.xml b/services/credentials/pom.xml index abc082b3b2..680569df2e 100644 --- a/services/credentials/pom.xml +++ b/services/credentials/pom.xml @@ -22,7 +22,7 @@ com.dremio.services dremio-services-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-services-credentials Services - Credentials diff --git a/services/datastore/pom.xml b/services/datastore/pom.xml index 4a19862afc..0f007719e6 100644 --- a/services/datastore/pom.xml +++ b/services/datastore/pom.xml @@ -22,7 +22,7 @@ com.dremio.services dremio-services-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-services-datastore Services - Datastore @@ -147,7 +147,6 @@ org.apache.commons commons-math3 - 3.6.1 io.opentracing diff --git a/services/datastore/src/main/java/com/dremio/datastore/CoreBaseTimedStore.java b/services/datastore/src/main/java/com/dremio/datastore/CoreBaseTimedStore.java index 2cbd3fbcf2..f5994fb6b4 100644 --- a/services/datastore/src/main/java/com/dremio/datastore/CoreBaseTimedStore.java +++ b/services/datastore/src/main/java/com/dremio/datastore/CoreBaseTimedStore.java @@ -41,6 +41,7 @@ protected CoreKVStore getStore() { return kvStore; } + @Override public String getName() { return name; } diff --git a/services/datastore/src/main/java/com/dremio/datastore/CoreStoreProviderImpl.java b/services/datastore/src/main/java/com/dremio/datastore/CoreStoreProviderImpl.java index 56b3566b16..c464ba1129 100644 --- a/services/datastore/src/main/java/com/dremio/datastore/CoreStoreProviderImpl.java +++ b/services/datastore/src/main/java/com/dremio/datastore/CoreStoreProviderImpl.java @@ -118,6 +118,7 @@ public CoreStoreProviderImpl(String baseDirectory, boolean inMemory, boolean tim break; case MEMORY: inMemory = true; + break; default: // noop } diff --git a/services/datastore/src/main/java/com/dremio/datastore/LocalKVStore.java b/services/datastore/src/main/java/com/dremio/datastore/LocalKVStore.java index 67c5a556d2..61d84334a2 100644 --- a/services/datastore/src/main/java/com/dremio/datastore/LocalKVStore.java +++ b/services/datastore/src/main/java/com/dremio/datastore/LocalKVStore.java @@ -140,6 +140,7 @@ public String getTag() { return input.getTag(); } + @Override public boolean equals(Object o) { if (!(o instanceof Document)) { return false; diff --git a/services/datastore/src/main/java/com/dremio/datastore/SearchQueryUtils.java b/services/datastore/src/main/java/com/dremio/datastore/SearchQueryUtils.java index a2e08193a2..22b29073ed 100644 --- a/services/datastore/src/main/java/com/dremio/datastore/SearchQueryUtils.java +++ b/services/datastore/src/main/java/com/dremio/datastore/SearchQueryUtils.java @@ -81,6 +81,21 @@ public static final SearchQuery newTermQuery(String field, int value) { .build(); } + /** + * Create a term query + * + * @param field the field to scan + * @param value the value to look for + * @return a query instance + * @throws NullPointerException if {@code field} or {@code value} is {@code null} + */ + public static final SearchQuery newTermQuery(String field, boolean value) { + return SearchQuery.newBuilder() + .setType(SearchQuery.Type.TERM_BOOLEAN) + .setTermBoolean(SearchQuery.TermBoolean.newBuilder().setField(field).setValue(value)) + .build(); + } + /** * Create a term query * diff --git a/services/datastore/src/main/java/com/dremio/datastore/TimedKVStore.java b/services/datastore/src/main/java/com/dremio/datastore/TimedKVStore.java index dcf9bebf8c..e6b153d1cb 100644 --- a/services/datastore/src/main/java/com/dremio/datastore/TimedKVStore.java +++ b/services/datastore/src/main/java/com/dremio/datastore/TimedKVStore.java @@ -226,6 +226,13 @@ public void applyForAllTenants(FindByCondition condition, BiConsumer consu } } + @Override + public Iterable> findOnAllTenants(FindByCondition condition, FindOption... options) { + try(final OpTimer ctx = time(Ops.findForAllTenants)) { + return indexedStore.findOnAllTenants(condition, options); + } + } + @Override public Integer version() { return indexedStore.version(); diff --git a/services/datastore/src/main/java/com/dremio/datastore/api/IndexedStore.java b/services/datastore/src/main/java/com/dremio/datastore/api/IndexedStore.java index 7334852385..396ad532d2 100644 --- a/services/datastore/src/main/java/com/dremio/datastore/api/IndexedStore.java +++ b/services/datastore/src/main/java/com/dremio/datastore/api/IndexedStore.java @@ -107,6 +107,20 @@ default void applyForAllTenants(FindByCondition condition, BiConsumer cons throw new UnsupportedOperationException("Only applicable for MultiTenantKVstore"); } + /** + * Creates a lazy iterable over items that match the provided condition, in + * the order requested. This is similar to find operation but it returns matching + * documents across all tenants. + * + * @param condition the find condition. + * The condition to match + * @param options extra configurations for find operation. + * @return A lazy iterable over the matching Documents. + */ + default Iterable> findOnAllTenants(FindByCondition condition, FindOption... options) { + throw new UnsupportedOperationException("Only applicable for MultiTenantKVstore"); + } + /** * Version for the indicies. * diff --git a/services/datastore/src/main/java/com/dremio/datastore/indexed/LuceneQueryConverter.java b/services/datastore/src/main/java/com/dremio/datastore/indexed/LuceneQueryConverter.java index 78a9f91ec4..471b7c5004 100644 --- a/services/datastore/src/main/java/com/dremio/datastore/indexed/LuceneQueryConverter.java +++ b/services/datastore/src/main/java/com/dremio/datastore/indexed/LuceneQueryConverter.java @@ -98,7 +98,10 @@ Query toLuceneQuery(SearchQuery query) { case TERM_DOUBLE: return toTermDoubleQuery(query.getTermDouble()); - case EXISTS: + case TERM_BOOLEAN: + return toTermBooleanQuery(query.getTermBoolean()); + + case EXISTS: return toExistsquery(query.getExists()); case DOES_NOT_EXIST: @@ -269,6 +272,13 @@ private Query toTermDoubleQuery(SearchQuery.TermDouble term) { term.getValue()); } + private Query toTermBooleanQuery(SearchQuery.TermBoolean term) { + // there is no BooleanPoint or any other structure that can carry boolean query in lucene + // support for it will be handled in DX-60829 + throw new UnsupportedOperationException("The TermBoolean is not supported for " + LuceneQueryConverter.class.getName() + + ". For more info, see: DX-60829."); + } + private Query toExistsquery(SearchQuery.Exists exists) { return new DocValuesFieldExistsQuery(exists.getField()); } diff --git a/services/datastore/src/main/java/com/dremio/datastore/indexed/SearchTypeVisitor.java b/services/datastore/src/main/java/com/dremio/datastore/indexed/SearchTypeVisitor.java index e9a2faccf8..e3a5fd77ee 100644 --- a/services/datastore/src/main/java/com/dremio/datastore/indexed/SearchTypeVisitor.java +++ b/services/datastore/src/main/java/com/dremio/datastore/indexed/SearchTypeVisitor.java @@ -35,6 +35,8 @@ default R visit(SearchTypes.SearchQuery query) { return visit(query.getTermLong()); case TERM_DOUBLE: return visit(query.getTermDouble()); + case TERM_BOOLEAN: + return visit(query.getTermBoolean()); case CONTAINS: return visit(query.getContainsText()); case BOOLEAN: @@ -90,6 +92,11 @@ default R visit(SearchTypes.SearchQuery query) { */ R visit(SearchTypes.SearchQuery.TermDouble node); + /** + * Method visited when a {@code TermBoolean} is passed + */ + R visit(SearchTypes.SearchQuery.TermBoolean node); + /** * Method visited when a {@code Contains} is passed */ diff --git a/services/datastore/src/main/protobuf/search_types.proto b/services/datastore/src/main/protobuf/search_types.proto index 526bc1ea0a..121e783434 100644 --- a/services/datastore/src/main/protobuf/search_types.proto +++ b/services/datastore/src/main/protobuf/search_types.proto @@ -40,6 +40,7 @@ message SearchQuery { BOOST = 17; CONTAINS = 18; PREFIX = 19; + TERM_BOOLEAN = 20; } enum BooleanOp { @@ -70,6 +71,11 @@ message SearchQuery { required int32 value = 2; } + message TermBoolean { + required string field = 1; + required bool value = 2; + } + message TermLong { required string field = 1; required int64 value = 2; @@ -168,6 +174,7 @@ message SearchQuery { optional Boost boost = 17; optional Contains containsText = 18; optional Prefix prefix = 19; + optional TermBoolean termBoolean = 20; } enum SortOrder { diff --git a/services/datastore/src/test/java/com/dremio/datastore/CustomLocalKVStoreProvider.java b/services/datastore/src/test/java/com/dremio/datastore/CustomLocalKVStoreProvider.java index 27d2761176..5cf656bcce 100644 --- a/services/datastore/src/test/java/com/dremio/datastore/CustomLocalKVStoreProvider.java +++ b/services/datastore/src/test/java/com/dremio/datastore/CustomLocalKVStoreProvider.java @@ -46,6 +46,7 @@ public CustomLocalKVStoreProvider(Set> st this.storeCreators = storeCreators; } + @Override protected Supplier>, KVStore>> getStoreProvider(){ return () -> StoreLoader.buildStores(storeCreators, super::newStore); } diff --git a/services/datastore/src/test/java/com/dremio/datastore/TestByteSerializerFactoryBytes.java b/services/datastore/src/test/java/com/dremio/datastore/TestByteSerializerFactoryBytes.java index 47c970825f..9632bc4324 100644 --- a/services/datastore/src/test/java/com/dremio/datastore/TestByteSerializerFactoryBytes.java +++ b/services/datastore/src/test/java/com/dremio/datastore/TestByteSerializerFactoryBytes.java @@ -24,6 +24,7 @@ */ public class TestByteSerializerFactoryBytes extends AbstractTestByteSerializerFactory { + @Override protected void runCircularTest(Format format, T original) { final Serializer serializer = getSerializer(format); diff --git a/services/datastore/src/test/java/com/dremio/datastore/TestByteSerializerFactoryJson.java b/services/datastore/src/test/java/com/dremio/datastore/TestByteSerializerFactoryJson.java index 17e7dcdf0d..99be371782 100644 --- a/services/datastore/src/test/java/com/dremio/datastore/TestByteSerializerFactoryJson.java +++ b/services/datastore/src/test/java/com/dremio/datastore/TestByteSerializerFactoryJson.java @@ -27,6 +27,7 @@ */ public class TestByteSerializerFactoryJson extends AbstractTestByteSerializerFactory { + @Override protected void runCircularTest(Format format, T original) throws IOException { final Serializer serializer = getSerializer(format); diff --git a/services/exec-selector/pom.xml b/services/exec-selector/pom.xml index e8658d4e2c..aecafc282b 100644 --- a/services/exec-selector/pom.xml +++ b/services/exec-selector/pom.xml @@ -21,7 +21,7 @@ dremio-services-parent com.dremio.services - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-services-execselector diff --git a/services/exec-selector/src/main/java/com/dremio/service/execselector/ExecutorSelectorProvider.java b/services/exec-selector/src/main/java/com/dremio/service/execselector/ExecutorSelectorProvider.java index 2610fa1a0a..eaf01642e2 100644 --- a/services/exec-selector/src/main/java/com/dremio/service/execselector/ExecutorSelectorProvider.java +++ b/services/exec-selector/src/main/java/com/dremio/service/execselector/ExecutorSelectorProvider.java @@ -31,6 +31,7 @@ void setCurrentSelector(ExecutorSelector selector) { this.currentSelector = selector; } + @Override public ExecutorSelector get() { return currentSelector; } diff --git a/services/exec-selector/src/main/java/com/dremio/service/execselector/QueueProcessor.java b/services/exec-selector/src/main/java/com/dremio/service/execselector/QueueProcessor.java index a04b460056..9265f26a7a 100644 --- a/services/exec-selector/src/main/java/com/dremio/service/execselector/QueueProcessor.java +++ b/services/exec-selector/src/main/java/com/dremio/service/execselector/QueueProcessor.java @@ -113,6 +113,7 @@ public boolean completed() { /** * Reaps the event processing thread. The event processing thread might not finish processing the events on the queue */ + @Override public void close() throws Exception { if (!isClosed) { isClosed = true; diff --git a/services/exec-selector/src/test/java/com/dremio/service/execselector/TestExecutorSelectionServiceSet.java b/services/exec-selector/src/test/java/com/dremio/service/execselector/TestExecutorSelectionServiceSet.java index fb3519f2f0..035de13fa8 100644 --- a/services/exec-selector/src/test/java/com/dremio/service/execselector/TestExecutorSelectionServiceSet.java +++ b/services/exec-selector/src/test/java/com/dremio/service/execselector/TestExecutorSelectionServiceSet.java @@ -37,19 +37,23 @@ public class TestExecutorSelectionServiceSet implements ServiceSet { private Set endpoints = new HashSet<>(); private NodeStatusListener listener; + @Override public RegistrationHandle register(NodeEndpoint endpoint) { throw new UnsupportedOperationException("never invoked"); } + @Override public Collection getAvailableEndpoints() { return endpoints; } + @Override public void addNodeStatusListener(NodeStatusListener listener) { assertNull(this.listener); this.listener = listener; } + @Override public void removeNodeStatusListener(NodeStatusListener listener) { assertEquals(this.listener, listener); this.listener = null; diff --git a/services/exec-selector/src/test/java/com/dremio/service/execselector/TestServiceSet.java b/services/exec-selector/src/test/java/com/dremio/service/execselector/TestServiceSet.java index 19fd98d5fd..d9930e2179 100644 --- a/services/exec-selector/src/test/java/com/dremio/service/execselector/TestServiceSet.java +++ b/services/exec-selector/src/test/java/com/dremio/service/execselector/TestServiceSet.java @@ -36,19 +36,23 @@ public class TestServiceSet implements ServiceSet { private Set endpoints = new HashSet<>(); private NodeStatusListener listener; + @Override public RegistrationHandle register(CoordinationProtos.NodeEndpoint endpoint) { throw new UnsupportedOperationException("never invoked"); } + @Override public Collection getAvailableEndpoints() { return endpoints; } + @Override public void addNodeStatusListener(NodeStatusListener listener) { assertNull(this.listener); this.listener = listener; } + @Override public void removeNodeStatusListener(NodeStatusListener listener) { assertEquals(this.listener, listener); this.listener = null; diff --git a/services/executorservice/pom.xml b/services/executorservice/pom.xml index e300257edc..bb6eeea33b 100644 --- a/services/executorservice/pom.xml +++ b/services/executorservice/pom.xml @@ -23,7 +23,7 @@ com.dremio.services dremio-services-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-services-executorservice Services - Executor Service diff --git a/services/fabric-rpc/pom.xml b/services/fabric-rpc/pom.xml index bb1488e8a1..88e001c808 100644 --- a/services/fabric-rpc/pom.xml +++ b/services/fabric-rpc/pom.xml @@ -22,7 +22,7 @@ com.dremio.services dremio-services-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-services-fabric-rpc Services - Fabric RPC diff --git a/services/fabric-rpc/src/main/java/com/dremio/services/fabric/FabricConnectionManager.java b/services/fabric-rpc/src/main/java/com/dremio/services/fabric/FabricConnectionManager.java index 96e48e49a1..5fa9192c19 100644 --- a/services/fabric-rpc/src/main/java/com/dremio/services/fabric/FabricConnectionManager.java +++ b/services/fabric-rpc/src/main/java/com/dremio/services/fabric/FabricConnectionManager.java @@ -75,4 +75,8 @@ public FabricConnectionManager( new CloseHandlerCreator(), engineFactory); } + @Override + protected String getLocalAddress() { + return this.localIdentity.getAddress(); + } } diff --git a/services/fabric-rpc/src/main/java/com/dremio/services/fabric/FabricServer.java b/services/fabric-rpc/src/main/java/com/dremio/services/fabric/FabricServer.java index e688475bc4..7457ef1d43 100644 --- a/services/fabric-rpc/src/main/java/com/dremio/services/fabric/FabricServer.java +++ b/services/fabric-rpc/src/main/java/com/dremio/services/fabric/FabricServer.java @@ -38,6 +38,7 @@ * Fabric server that accepts connection. */ class FabricServer extends BasicServer{ + private final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(this.getClass()); private final FabricMessageHandler handler; private final ConnectionManagerRegistry connectionRegistry; @@ -116,6 +117,10 @@ public MessageLite getHandshakeResponse(FabricHandshake inbound) throws Exceptio // update the close handler. connection.wrapCloseHandler(manager.getCloseHandlerCreator()); + if (logger.isDebugEnabled()) { + logger.debug("Will ask to add an external connection: {}", connection.getName()); + } + // add to the connection manager. manager.addExternalConnection(connection); } diff --git a/services/functions/pom.xml b/services/functions/pom.xml index 33eda1100a..17b111d589 100644 --- a/services/functions/pom.xml +++ b/services/functions/pom.xml @@ -22,18 +22,13 @@ dremio-services-parent com.dremio.services - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 4.0.0 dremio-services-functions Services - Functions - - 8 - 8 - - com.dremio @@ -72,10 +67,5 @@ com.google.guava guava - - com.google.errorprone - javac-shaded - 9+181-r4173-1 - diff --git a/services/functions/src/main/java/com/dremio/service/functions/generator/MockCatalog.java b/services/functions/src/main/java/com/dremio/service/functions/generator/MockCatalog.java index f730367844..594e801b21 100644 --- a/services/functions/src/main/java/com/dremio/service/functions/generator/MockCatalog.java +++ b/services/functions/src/main/java/com/dremio/service/functions/generator/MockCatalog.java @@ -79,6 +79,11 @@ public DremioTable getTableForQuery(NamespaceKey key) { return getTable(key); } + @Override + public DremioTable getTableSnapshotForQuery(NamespaceKey key, TableVersionContext context) { + throw new UnsupportedOperationException(); + } + @Override public DremioTable getTableSnapshot(NamespaceKey key, TableVersionContext context) { throw new UnsupportedOperationException(); diff --git a/services/functions/src/main/resources/function_specs/documented/CARDINALITY.yaml b/services/functions/src/main/resources/function_specs/documented/CARDINALITY.yaml new file mode 100644 index 0000000000..38cd0231c8 --- /dev/null +++ b/services/functions/src/main/resources/function_specs/documented/CARDINALITY.yaml @@ -0,0 +1,41 @@ +# +# Copyright (C) 2017-2019 Dremio Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +--- +name: "CARDINALITY" +signatures: + - + returnType: "BIGINT" + parameters: + - + kind: "REGULAR" + type: "LIST" + name: "List" + description: "List to count elements from" + - + returnType: "BIGINT" + parameters: + - + kind: "REGULAR" + type: "MAP" + name: "Map" + description: "MAP to count elements from" + sampleCodes: + - + call: SELECT CARDINALITY(CONVERT_FROM('[1, 2, 3, 4, 5]', 'json')) + result: 5 +functionCategories: [] +description: "Returns the number of elements in LIST or MAP.\n" diff --git a/services/functions/src/main/resources/function_specs/documented/COL_LIKE.yaml b/services/functions/src/main/resources/function_specs/documented/COL_LIKE.yaml new file mode 100644 index 0000000000..cc2202e6df --- /dev/null +++ b/services/functions/src/main/resources/function_specs/documented/COL_LIKE.yaml @@ -0,0 +1,38 @@ +# +# Copyright (C) 2017-2019 Dremio Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +--- +name: "COL_LIKE" +signatures: + - + returnType: "BOOLEAN" + parameters: + - + kind: "REGULAR" + type: "CHARACTERS" + name: "expression_col" + description: "A column containing an expression to compare." + - + kind: "REGULAR" + type: "CHARACTERS" + name: "pattern_col" + description: "A column containing the pattern to compare to the expression." + sampleCodes: + - + call: "select name from $scratch.names where col_like (name, pat);" + result: "john" +functionCategories: [] +description: "Tests whether an expression column matches a pattern column. Comparisons are case-sensitive." diff --git a/services/functions/src/main/resources/function_specs/documented/REGEXP_COL_LIKE.yaml b/services/functions/src/main/resources/function_specs/documented/REGEXP_COL_LIKE.yaml new file mode 100644 index 0000000000..f1dea8171a --- /dev/null +++ b/services/functions/src/main/resources/function_specs/documented/REGEXP_COL_LIKE.yaml @@ -0,0 +1,42 @@ +# +# Copyright (C) 2017-2019 Dremio Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +--- +description: Returns true when the specified regular expression matches values in + a column. Otherwise, returns false. Use this function if your regular expression is + a column; if it a literal, use REGEXP_LIKE +functionCategories: +- CHARACTER +name: REGEXP_COL_LIKE +signatures: +- description: sample signature description + parameters: + - description: The expression to test. + kind: REGULAR + name: input + type: CHARACTERS + - description: The column containing the PCRE regular expression to use for the test. + kind: REGULAR + name: regex + type: CHARACTERS + returnType: BOOLEAN + sampleCodes: + - call: SELECT page_url, regex_pattern, regexp_col_like(page_url, regex_pattern) from regex_col_like + result: "Raw data\n --\ + \ /cat/, .*(product|pdp).*, false\n --\ + \ /cat/, .*(cat).*, true\n --\ + \ /greek.com/, .*(greek|gear).*, true\n --\ + \ /grass/, .*(greek|gear).*, false\n" diff --git a/services/functions/src/main/resources/function_specs/documented/REGEXP_COL_MATCHES.yaml b/services/functions/src/main/resources/function_specs/documented/REGEXP_COL_MATCHES.yaml new file mode 100644 index 0000000000..8cde246eb7 --- /dev/null +++ b/services/functions/src/main/resources/function_specs/documented/REGEXP_COL_MATCHES.yaml @@ -0,0 +1,42 @@ +# +# Copyright (C) 2017-2019 Dremio Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +--- +description: Returns true when the specified regular expression matches values in + a column. Otherwise, returns false. Use this function if your regular expression is + a column; if it a literal, use REGEXP_MATCHES +functionCategories: +- CHARACTER +name: REGEXP_COL_MATCHES +signatures: +- description: sample signature description + parameters: + - description: The expression to test. + kind: REGULAR + name: input + type: CHARACTERS + - description: The column containing the PCRE regular expression to use for the test. + kind: REGULAR + name: regex + type: CHARACTERS + returnType: BOOLEAN + sampleCodes: + - call: SELECT page_url, regex_pattern, regexp_col_matches(page_url, regex_pattern) from regex_col_like + result: "Raw data\n --\ + \ /cat/, .*(product|pdp).*, false\n --\ + \ /cat/, .*(cat).*, true\n --\ + \ /greek.com/, .*(greek|gear).*, true\n --\ + \ /grass/, .*(greek|gear).*, false\n" diff --git a/services/functions/src/main/resources/function_specs/documented/ST_FROMGEOHASH.yaml b/services/functions/src/main/resources/function_specs/documented/ST_FROMGEOHASH.yaml new file mode 100644 index 0000000000..aaffdbe6f2 --- /dev/null +++ b/services/functions/src/main/resources/function_specs/documented/ST_FROMGEOHASH.yaml @@ -0,0 +1,32 @@ +# +# Copyright (C) 2017-2019 Dremio Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +--- +description: Returns the latitude and longitude for a given Geohash. +functionCategories: +- GEOSPATIAL +name: ST_FROMGEOHASH +signatures: +- description: null + parameters: + - description: The Geohash string. + kind: REGULAR + name: geohash + type: STRING + returnType: STRUCT + sampleCodes: + - call: SELECT ST_FROMGEOHASH('u20f80') + result: "{\"Latitude\":45.44219970703125,\"Longitude\":12.3101806640625}" diff --git a/services/functions/src/main/resources/function_specs/documented/ST_GEOHASH.yaml b/services/functions/src/main/resources/function_specs/documented/ST_GEOHASH.yaml new file mode 100644 index 0000000000..20ad56962e --- /dev/null +++ b/services/functions/src/main/resources/function_specs/documented/ST_GEOHASH.yaml @@ -0,0 +1,40 @@ +# +# Copyright (C) 2017-2019 Dremio Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +--- +description: Returns the Geohash value of a point. +functionCategories: +- GEOSPATIAL +name: ST_GEOHASH +signatures: +- description: null + parameters: + - description: The latitude of the source location in degrees. + kind: REGULAR + name: lat + type: DOUBLE + - description: The longitude of the source location in degrees. + kind: REGULAR + name: lon + type: DOUBLE + - description: The precision of the resulting hash. + kind: OPTIONAL + name: precision + type: INT + returnType: STRING + sampleCodes: + - call: SELECT ST_GEOHASH(45.4408, 12.3155, 6) + result: u20f80 diff --git a/services/functions/src/main/resources/function_specs/undocumented/ANY_VALUE.yaml b/services/functions/src/main/resources/function_specs/undocumented/ANY_VALUE.yaml new file mode 100644 index 0000000000..dda4339388 --- /dev/null +++ b/services/functions/src/main/resources/function_specs/undocumented/ANY_VALUE.yaml @@ -0,0 +1,175 @@ +# +# Copyright (C) 2017-2019 Dremio Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +--- +name: "ANY_VALUE" +signatures: + - + returnType: "BOOLEAN" + parameters: + - + kind: "REGULAR" + type: "BOOLEAN" + name: "" + description: "" + format: "" + description: "" + sampleCodes: + - + call: "" + result: "" + - + returnType: "BYTES" + parameters: + - + kind: "REGULAR" + type: "BYTES" + name: "" + description: "" + format: "" + description: "" + sampleCodes: + - + call: "" + result: "" + - + returnType: "CHARACTERS" + parameters: + - + kind: "REGULAR" + type: "CHARACTERS" + name: "" + description: "" + format: "" + description: "" + sampleCodes: + - + call: "" + result: "" + - + returnType: "FLOAT" + parameters: + - + kind: "REGULAR" + type: "FLOAT" + name: "" + description: "" + format: "" + description: "" + sampleCodes: + - + call: "" + result: "" + - + returnType: "DECIMAL" + parameters: + - + kind: "REGULAR" + type: "DECIMAL" + name: "" + description: "" + format: "" + description: "" + sampleCodes: + - + call: "" + result: "" + - + returnType: "DOUBLE" + parameters: + - + kind: "REGULAR" + type: "DOUBLE" + name: "" + description: "" + format: "" + description: "" + sampleCodes: + - + call: "" + result: "" + - + returnType: "INT" + parameters: + - + kind: "REGULAR" + type: "INT" + name: "" + description: "" + format: "" + description: "" + sampleCodes: + - + call: "" + result: "" + - + returnType: "BIGINT" + parameters: + - + kind: "REGULAR" + type: "BIGINT" + name: "" + description: "" + format: "" + description: "" + sampleCodes: + - + call: "" + result: "" + - + returnType: "DATE" + parameters: + - + kind: "REGULAR" + type: "DATE" + name: "" + description: "" + format: "" + description: "" + sampleCodes: + - + call: "" + result: "" + - + returnType: "TIME" + parameters: + - + kind: "REGULAR" + type: "TIME" + name: "" + description: "" + format: "" + description: "" + sampleCodes: + - + call: "" + result: "" + - + returnType: "TIMESTAMP" + parameters: + - + kind: "REGULAR" + type: "TIMESTAMP" + name: "" + description: "" + format: "" + description: "" + sampleCodes: + - + call: "" + result: "" +functionCategories: [] +description: "" diff --git a/services/functions/src/test/java/com/dremio/service/functions/FunctionListCoverageTest.java b/services/functions/src/test/java/com/dremio/service/functions/FunctionListCoverageTest.java index 33c760e9ef..cc43ad9056 100644 --- a/services/functions/src/test/java/com/dremio/service/functions/FunctionListCoverageTest.java +++ b/services/functions/src/test/java/com/dremio/service/functions/FunctionListCoverageTest.java @@ -110,7 +110,9 @@ public void assertCoverage() throws IOException { Assert.fail("" + "Functions that are neither documented nor undocumented have been detected.\n" + - "Take the yaml files in missing_function_specs and move to either resources/function_specs/documented or resources/function_specs/undocumented.\n" + + "Take the yaml files in services/functions/target/missing_function_specs \n" + + "and move to either services/functions/src/main/resources/function_specs/documented \n" + + "or services/functions/src/main/resources/function_specs/undocumented.\n" + "The following are new to the system: " + String.join("\n", newToSystem.stream().sorted().collect(Collectors.toList()))); } } diff --git a/services/functions/src/test/java/com/dremio/service/functions/generator/FunctionFactoryTests.java b/services/functions/src/test/java/com/dremio/service/functions/generator/FunctionFactoryTests.java index af745d3c02..bc27ddb112 100644 --- a/services/functions/src/test/java/com/dremio/service/functions/generator/FunctionFactoryTests.java +++ b/services/functions/src/test/java/com/dremio/service/functions/generator/FunctionFactoryTests.java @@ -57,7 +57,7 @@ public void production() { .sorted() .collect(Collectors.toList()); - new GoldenFileTestBuilder<>((String functionName) -> executeTest(functionName)) + GoldenFileTestBuilder.create((String functionName) -> executeTest(functionName)) .addListByRule(names, (name) -> Pair.of(name,name)) .runTests(); } diff --git a/services/functions/src/test/resources/goldenfiles/expected/FunctionFactoryTests.production.yaml b/services/functions/src/test/resources/goldenfiles/expected/FunctionFactoryTests.production.yaml index 0579d3581a..b88b049747 100644 --- a/services/functions/src/test/resources/goldenfiles/expected/FunctionFactoryTests.production.yaml +++ b/services/functions/src/test/resources/goldenfiles/expected/FunctionFactoryTests.production.yaml @@ -431,6 +431,23 @@ name: "AND" signatures: - "(BOOLEAN, BOOLEAN) -> BOOLEAN" + - + description: "ANY_VALUE" + input: "ANY_VALUE" + output: + name: "ANY_VALUE" + signatures: + - "(BOOLEAN) -> BOOLEAN" + - "(BYTES) -> BYTES" + - "(CHARACTERS) -> CHARACTERS" + - "(FLOAT) -> FLOAT" + - "(DECIMAL) -> DECIMAL" + - "(DOUBLE) -> DOUBLE" + - "(INT) -> INT" + - "(BIGINT) -> BIGINT" + - "(DATE) -> DATE" + - "(TIME) -> TIME" + - "(TIMESTAMP) -> TIMESTAMP" - description: "APPROX_COUNT_DISTINCT" input: "APPROX_COUNT_DISTINCT" @@ -1058,6 +1075,13 @@ output: name: "COLLECT" signatures: [] + - + description: "COL_LIKE" + input: "COL_LIKE" + output: + name: "COL_LIKE" + signatures: + - "(CHARACTERS, CHARACTERS) -> BOOLEAN" - description: "COMPARETYPE" input: "COMPARETYPE" @@ -3987,6 +4011,20 @@ name: "RANK" signatures: - "() -> BIGINT" + - + description: "REGEXP_COL_LIKE" + input: "REGEXP_COL_LIKE" + output: + name: "REGEXP_COL_LIKE" + signatures: + - "(CHARACTERS, CHARACTERS) -> BOOLEAN" + - + description: "REGEXP_COL_MATCHES" + input: "REGEXP_COL_MATCHES" + output: + name: "REGEXP_COL_MATCHES" + signatures: + - "(CHARACTERS, CHARACTERS) -> BOOLEAN" - description: "REGEXP_EXTRACT" input: "REGEXP_EXTRACT" @@ -4049,8 +4087,7 @@ output: name: "REPLACE" signatures: - - "(BYTES, STRING, STRING) -> BYTES" - - "(CHARACTERS, STRING, STRING) -> CHARACTERS" + - "(CHARACTERS, CHARACTERS, CHARACTERS) -> CHARACTERS" - description: "REVERSE" input: "REVERSE" @@ -4373,6 +4410,20 @@ name: "STRPOS" signatures: - "(CHARACTERS, CHARACTERS) -> INT" + - + description: "ST_FROMGEOHASH" + input: "ST_FROMGEOHASH" + output: + name: "ST_FROMGEOHASH" + signatures: + - "(ANY) -> ANY" + - + description: "ST_GEOHASH" + input: "ST_GEOHASH" + output: + name: "ST_GEOHASH" + signatures: + - "(DOUBLE, DOUBLE, BIGINT?) -> CHARACTERS" - description: "SUBSTR" input: "SUBSTR" diff --git a/services/grpc/pom.xml b/services/grpc/pom.xml index 62146739a5..f9ca4b033c 100644 --- a/services/grpc/pom.xml +++ b/services/grpc/pom.xml @@ -22,7 +22,7 @@ com.dremio.services dremio-services-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-services-grpc Services - gRPC diff --git a/services/grpc/src/main/java/com/dremio/service/conduit/client/ConduitProviderImpl.java b/services/grpc/src/main/java/com/dremio/service/conduit/client/ConduitProviderImpl.java index a678fec182..bb3c7923ce 100644 --- a/services/grpc/src/main/java/com/dremio/service/conduit/client/ConduitProviderImpl.java +++ b/services/grpc/src/main/java/com/dremio/service/conduit/client/ConduitProviderImpl.java @@ -26,6 +26,7 @@ import com.dremio.exec.proto.CoordinationProtos.NodeEndpoint; import com.dremio.service.Service; +import com.dremio.service.grpc.ContextualizedClientInterceptor; import com.dremio.service.grpc.DefaultGrpcServiceConfigProvider; import com.dremio.service.grpc.GrpcChannelBuilderFactory; import com.dremio.ssl.SSLEngineFactory; @@ -84,6 +85,7 @@ public ManagedChannel load(@SuppressWarnings("NullableProblems") NodeEndpoint pe .maxInboundMetadataSize(Integer.MAX_VALUE) .enableRetry() .intercept(TracingClientInterceptor.newBuilder().withTracer(TracerFacade.INSTANCE.getTracer()).build()) + .intercept(ContextualizedClientInterceptor.buildSingleTenantClientInterceptor()) .defaultServiceConfig(DefaultGrpcServiceConfigProvider.getDefaultGrpcServiceConfig(serviceNames)) .maxRetryAttempts(GrpcChannelBuilderFactory.MAX_RETRY); diff --git a/services/grpc/src/main/java/com/dremio/service/conduit/server/ConduitInProcessChannelProvider.java b/services/grpc/src/main/java/com/dremio/service/conduit/server/ConduitInProcessChannelProvider.java index e84c1d2f21..2c30013013 100644 --- a/services/grpc/src/main/java/com/dremio/service/conduit/server/ConduitInProcessChannelProvider.java +++ b/services/grpc/src/main/java/com/dremio/service/conduit/server/ConduitInProcessChannelProvider.java @@ -20,7 +20,7 @@ import com.dremio.context.RequestContext; import com.dremio.service.Service; -import com.dremio.service.grpc.SingleTenantClientInterceptor; +import com.dremio.service.grpc.ContextualizedClientInterceptor; import io.grpc.Channel; import io.grpc.ManagedChannel; @@ -44,7 +44,7 @@ public ConduitInProcessChannelProvider(String inProcessServerName, Provider newInProcessChannelBuilder(String processName) { return newInProcessChannelBuilder(processName, defaultServiceConfigProvider.get()); } + @Override public ManagedChannelBuilder newInProcessChannelBuilder(String processName, Map defaultServiceConfigProvider) { final ManagedChannelBuilder builder = InProcessChannelBuilder.forName(processName); addDefaultBuilderProperties(builder, defaultServiceConfigProvider); diff --git a/services/grpc/src/main/java/com/dremio/service/grpc/ContextualizedClientInterceptor.java b/services/grpc/src/main/java/com/dremio/service/grpc/ContextualizedClientInterceptor.java new file mode 100644 index 0000000000..7fa02441d9 --- /dev/null +++ b/services/grpc/src/main/java/com/dremio/service/grpc/ContextualizedClientInterceptor.java @@ -0,0 +1,137 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.service.grpc; + +import javax.inject.Provider; + +import com.dremio.context.RequestContext; +import com.dremio.context.SerializableContext; +import com.dremio.context.SupportContext; +import com.dremio.context.TenantContext; +import com.dremio.context.UserContext; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; + +import io.grpc.CallOptions; +import io.grpc.Channel; +import io.grpc.ClientCall; +import io.grpc.ClientInterceptor; +import io.grpc.ForwardingClientCall; +import io.grpc.ForwardingClientCallListener; +import io.grpc.Metadata; +import io.grpc.MethodDescriptor; + +/** + * Interceptor that populates gRPC headers from RequestContext. + */ +public class ContextualizedClientInterceptor implements ClientInterceptor { + public static final class ContextTransferBehavior { + private final RequestContext.Key key; + private final boolean required; + private final Provider fallback; + + /** + * Describes how each context should be transferred: + * @param key Key to the context within RequestContext being transferred + * @param required Whether this key must be present (will raise an exception if not present when required) + * @param fallback When the context is not present, this fallback can be used instead (null for no fallback) + */ + public ContextTransferBehavior( + RequestContext.Key key, + boolean required, + Provider fallback) + { + this.key = key; + this.required = required; + this.fallback = fallback; + } + + public RequestContext.Key getKey() { + return key; + } + + public boolean getRequired() { + return required; + } + + public Provider getFallback() { + return fallback; + } + } + + public static ContextualizedClientInterceptor buildSingleTenantClientInterceptor() + { + return new ContextualizedClientInterceptor(ImmutableList.of( + new ContextTransferBehavior(TenantContext.CTX_KEY, false, null), + new ContextTransferBehavior(UserContext.CTX_KEY, false, null) + // TODO: Copy SupportContext too? + )); + } + + public static ContextualizedClientInterceptor buildSingleTenantClientInterceptorWithDefaults( + Provider defaultRequestContext) + { + return new ContextualizedClientInterceptor(ImmutableList.of( + new ContextTransferBehavior(TenantContext.CTX_KEY, false, () -> defaultRequestContext.get().get(TenantContext.CTX_KEY)), + new ContextTransferBehavior(UserContext.CTX_KEY, false, () -> defaultRequestContext.get().get(UserContext.CTX_KEY)) + // TODO: Copy SupportContext too? + )); + } + + public static ContextualizedClientInterceptor buildMultiTenantClientInterceptor() { + return new ContextualizedClientInterceptor(ImmutableList.of( + new ContextTransferBehavior(TenantContext.CTX_KEY, true, null), + new ContextTransferBehavior(UserContext.CTX_KEY, true, null), + new ContextTransferBehavior(SupportContext.CTX_KEY, false, null) + )); + } + + private final ImmutableList actions; + + public ContextualizedClientInterceptor(ImmutableList actions) { + this.actions = actions; + } + + @Override + public ClientCall interceptCall(MethodDescriptor methodDescriptor, CallOptions callOptions, Channel channel) { + return new ForwardingClientCall.SimpleForwardingClientCall(channel.newCall(methodDescriptor, callOptions)) { + @Override + public void start(Listener responseListener, Metadata headers) { + final ImmutableMap.Builder builder = ImmutableMap.builder(); + for (ContextTransferBehavior action : actions) { + final SerializableContext context = RequestContext.current().get(action.getKey()); + if (context != null) { + context.serialize(builder); + } else if (action.getFallback() != null) { + action.getFallback().get().serialize(builder); + } else if (action.getRequired()) { + throw new RuntimeException("RequestContext for " + action.getKey().getName() + " is required but not present"); + } + } + + builder.build().forEach( + (key, value) -> headers.put(Metadata.Key.of(key, Metadata.ASCII_STRING_MARSHALLER), value)); + + super.start(new ForwardingClientCallListener.SimpleForwardingClientCallListener(responseListener) { + @Override + public void onHeaders(Metadata headers) { + super.onHeaders(headers); + } + }, headers); + } + }; + } +} diff --git a/services/grpc/src/main/java/com/dremio/service/grpc/MultiTenantServerInterceptor.java b/services/grpc/src/main/java/com/dremio/service/grpc/ContextualizedServerInterceptor.java similarity index 58% rename from services/grpc/src/main/java/com/dremio/service/grpc/MultiTenantServerInterceptor.java rename to services/grpc/src/main/java/com/dremio/service/grpc/ContextualizedServerInterceptor.java index 4a22ed928b..312bf6f75e 100644 --- a/services/grpc/src/main/java/com/dremio/service/grpc/MultiTenantServerInterceptor.java +++ b/services/grpc/src/main/java/com/dremio/service/grpc/ContextualizedServerInterceptor.java @@ -15,12 +15,15 @@ */ package com.dremio.service.grpc; -import static com.dremio.context.SupportContext.deserializeSupportRoles; +import java.util.Map; +import com.dremio.context.ExecutorToken; import com.dremio.context.RequestContext; +import com.dremio.context.SerializableContextTransformer; import com.dremio.context.SupportContext; import com.dremio.context.TenantContext; import com.dremio.context.UserContext; +import com.google.common.collect.ImmutableList; import io.grpc.ForwardingServerCallListener; import io.grpc.Metadata; @@ -29,30 +32,43 @@ import io.grpc.ServerInterceptor; /** - * Interceptor that populates MT RequestContext entries from gRPC headers. + * Interceptor that populates RequestContext from gRPC headers. */ -public class MultiTenantServerInterceptor implements ServerInterceptor { +public class ContextualizedServerInterceptor implements ServerInterceptor { + /** + * Used to copy ExecutorToken so that it can be used for other gRPC calls in the context of the original API. + */ + public static ContextualizedServerInterceptor buildExecutorTokenCopier() { + return new ContextualizedServerInterceptor(ImmutableList.of( + new ExecutorToken.Transformer() + )); + } + + public static ContextualizedServerInterceptor buildBasicContextualizedServerInterceptor() { + return new ContextualizedServerInterceptor(ImmutableList.of( + new TenantContext.Transformer(), + new UserContext.Transformer(), + new SupportContext.Transformer() + )); + } + + private final ImmutableList transformers; + + public ContextualizedServerInterceptor(ImmutableList transformers) + { + this.transformers = transformers; + } + @Override public ServerCall.Listener interceptCall( ServerCall call, final Metadata requestHeaders, ServerCallHandler next) { try { - RequestContext contextBuilder = RequestContext.empty() - .with(TenantContext.CTX_KEY, - new TenantContext(requestHeaders.get(HeaderKeys.PROJECT_ID_HEADER_KEY), - requestHeaders.get(HeaderKeys.ORG_ID_HEADER_KEY))) - .with(UserContext.CTX_KEY, new UserContext(requestHeaders.get(HeaderKeys.USER_HEADER_KEY))); - - if (requestHeaders.containsKey(HeaderKeys.SUPPORT_TICKET_HEADER_KEY) - && requestHeaders.containsKey(HeaderKeys.SUPPORT_EMAIL_HEADER_KEY) - && requestHeaders.containsKey(HeaderKeys.SUPPORT_ROLES_HEADER_KEY)) - { - contextBuilder = contextBuilder.with(SupportContext.CTX_KEY, - new SupportContext( - requestHeaders.get(HeaderKeys.SUPPORT_TICKET_HEADER_KEY), - requestHeaders.get(HeaderKeys.SUPPORT_EMAIL_HEADER_KEY), - deserializeSupportRoles(requestHeaders.get(HeaderKeys.SUPPORT_ROLES_HEADER_KEY)))); + final Map headers = SerializableContextTransformer.convert(requestHeaders); + RequestContext contextBuilder = RequestContext.current(); + for (SerializableContextTransformer transformer : transformers) { + contextBuilder = transformer.deserialize(headers, contextBuilder); } final RequestContext context = contextBuilder; diff --git a/services/grpc/src/main/java/com/dremio/service/grpc/DrainableByteBufInputStream.java b/services/grpc/src/main/java/com/dremio/service/grpc/DrainableByteBufInputStream.java index 2dcba018a1..9b9ffbc025 100644 --- a/services/grpc/src/main/java/com/dremio/service/grpc/DrainableByteBufInputStream.java +++ b/services/grpc/src/main/java/com/dremio/service/grpc/DrainableByteBufInputStream.java @@ -35,6 +35,7 @@ public DrainableByteBufInputStream(ByteBuf buffer) { this.buf = buffer; } + @Override public int drainTo(OutputStream target) throws IOException { int size = this.buf.readableBytes(); if (!ByteBufToStreamCopier.add(this.buf, target)) { @@ -45,6 +46,7 @@ public int drainTo(OutputStream target) throws IOException { return size; } + @Override public void close() { if (!isClosed) { if (this.buf.refCnt() > 0) { diff --git a/services/grpc/src/main/java/com/dremio/service/grpc/HeaderKeys.java b/services/grpc/src/main/java/com/dremio/service/grpc/HeaderKeys.java index 0ec569b80a..61ae338f28 100644 --- a/services/grpc/src/main/java/com/dremio/service/grpc/HeaderKeys.java +++ b/services/grpc/src/main/java/com/dremio/service/grpc/HeaderKeys.java @@ -20,18 +20,8 @@ /** * grpc header keys used across dremio services. */ -public class HeaderKeys { - public static final Metadata.Key PROJECT_ID_HEADER_KEY = - Metadata.Key.of("x-dremio-project-id-key", Metadata.ASCII_STRING_MARSHALLER); - - public static final Metadata.Key TOKEN_HEADER_KEY = - Metadata.Key.of("x-dremio-token-key", Metadata.ASCII_STRING_MARSHALLER); - - public static final Metadata.Key ORG_ID_HEADER_KEY = - Metadata.Key.of("x-dremio-org-id-key", Metadata.ASCII_STRING_MARSHALLER); - - public static final Metadata.Key USER_HEADER_KEY = - Metadata.Key.of("x-dremio-user-key", Metadata.ASCII_STRING_MARSHALLER); +public final class HeaderKeys { + private HeaderKeys() {} public static final Metadata.Key RELEASE_NAME_HEADER_KEY = Metadata.Key.of("x-dremio-control-plane-service-release-name", @@ -39,14 +29,4 @@ public class HeaderKeys { public static final Metadata.Key SERVICE_NAME_HEADER_KEY = Metadata.Key.of("x-dremio-control-plane-service", Metadata.ASCII_STRING_MARSHALLER); - - public static final Metadata.Key SUPPORT_TICKET_HEADER_KEY = - Metadata.Key.of("x-dremio-support-ticket-key", Metadata.ASCII_STRING_MARSHALLER); - - public static final Metadata.Key SUPPORT_EMAIL_HEADER_KEY = - Metadata.Key.of("x-dremio-support-email-key", Metadata.ASCII_STRING_MARSHALLER); - - public static final Metadata.Key SUPPORT_ROLES_HEADER_KEY = - Metadata.Key.of("x-dremio-support-roles-key", Metadata.ASCII_STRING_MARSHALLER); - } diff --git a/services/grpc/src/main/java/com/dremio/service/grpc/MultiTenantClientInterceptor.java b/services/grpc/src/main/java/com/dremio/service/grpc/MultiTenantClientInterceptor.java deleted file mode 100644 index 6707b16ca5..0000000000 --- a/services/grpc/src/main/java/com/dremio/service/grpc/MultiTenantClientInterceptor.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.dremio.service.grpc; - -import static com.dremio.context.SupportContext.serializeSupportRoles; - -import com.dremio.context.RequestContext; -import com.dremio.context.SupportContext; -import com.dremio.context.TenantContext; -import com.dremio.context.UserContext; - -import io.grpc.CallOptions; -import io.grpc.Channel; -import io.grpc.ClientCall; -import io.grpc.ClientInterceptor; -import io.grpc.ForwardingClientCall; -import io.grpc.ForwardingClientCallListener; -import io.grpc.Metadata; -import io.grpc.MethodDescriptor; - -/** - * Interceptor that populates MT gRPC headers from RequestContext. - */ -public class MultiTenantClientInterceptor implements ClientInterceptor { - @Override - public ClientCall interceptCall(MethodDescriptor methodDescriptor, CallOptions callOptions, Channel channel) { - return new ForwardingClientCall.SimpleForwardingClientCall(channel.newCall(methodDescriptor, callOptions)) { - @Override - public void start(ClientCall.Listener responseListener, Metadata headers) { - - headers.put(HeaderKeys.PROJECT_ID_HEADER_KEY, - RequestContext.current().get(TenantContext.CTX_KEY).getProjectId().toString()); - headers.put(HeaderKeys.ORG_ID_HEADER_KEY, - RequestContext.current().get(TenantContext.CTX_KEY).getOrgId().toString()); - headers.put(HeaderKeys.USER_HEADER_KEY, RequestContext.current().get(UserContext.CTX_KEY).serialize()); - - SupportContext supportContext = RequestContext.current().get(SupportContext.CTX_KEY); - if (supportContext != null) { - headers.put(HeaderKeys.SUPPORT_TICKET_HEADER_KEY, supportContext.getTicket()); - headers.put(HeaderKeys.SUPPORT_EMAIL_HEADER_KEY, supportContext.getEmail()); - headers.put(HeaderKeys.SUPPORT_ROLES_HEADER_KEY, serializeSupportRoles(supportContext.getRoles())); - } - - super.start(new ForwardingClientCallListener.SimpleForwardingClientCallListener(responseListener) { - @Override - public void onHeaders(Metadata headers) { - super.onHeaders(headers); - } - }, headers); - } - }; - } -} diff --git a/services/grpc/src/main/java/com/dremio/service/grpc/MultiTenantGrpcChannelBuilderFactory.java b/services/grpc/src/main/java/com/dremio/service/grpc/MultiTenantGrpcChannelBuilderFactory.java index d5a729315f..fc9e840d17 100644 --- a/services/grpc/src/main/java/com/dremio/service/grpc/MultiTenantGrpcChannelBuilderFactory.java +++ b/services/grpc/src/main/java/com/dremio/service/grpc/MultiTenantGrpcChannelBuilderFactory.java @@ -28,15 +28,18 @@ * gRPC channel factory with multi-tenancy support. */ public final class MultiTenantGrpcChannelBuilderFactory extends BaseGrpcChannelBuilderFactory { - private static final ClientInterceptor mtInterceptor = new MultiTenantClientInterceptor(); + private static final ClientInterceptor mtInterceptor = + ContextualizedClientInterceptor.buildMultiTenantClientInterceptor(); public MultiTenantGrpcChannelBuilderFactory(Tracer tracer, Provider> defaultServiceConfigProvider) { super(tracer, Sets.newHashSet(mtInterceptor), defaultServiceConfigProvider); } - public MultiTenantGrpcChannelBuilderFactory(Tracer tracer, - Provider> defaultServiceConfigProvider, - ClientInterceptor interceptor) { + public MultiTenantGrpcChannelBuilderFactory( + Tracer tracer, + Provider> defaultServiceConfigProvider, + ClientInterceptor interceptor) + { super(tracer, Sets.newHashSet(mtInterceptor, interceptor), defaultServiceConfigProvider); } } diff --git a/services/grpc/src/main/java/com/dremio/service/grpc/MultiTenantGrpcServerBuilderFactory.java b/services/grpc/src/main/java/com/dremio/service/grpc/MultiTenantGrpcServerBuilderFactory.java index 8b9e192077..92a2bf114b 100644 --- a/services/grpc/src/main/java/com/dremio/service/grpc/MultiTenantGrpcServerBuilderFactory.java +++ b/services/grpc/src/main/java/com/dremio/service/grpc/MultiTenantGrpcServerBuilderFactory.java @@ -15,11 +15,12 @@ */ package com.dremio.service.grpc; -import java.util.Collections; -import java.util.Set; - +import com.dremio.context.SupportContext; +import com.dremio.context.TenantContext; +import com.dremio.context.UserContext; import com.dremio.telemetry.utils.GrpcTracerFacade; import com.dremio.telemetry.utils.TracerFacade; +import com.google.common.collect.ImmutableList; import com.google.common.collect.Sets; import io.grpc.ServerInterceptor; @@ -29,13 +30,14 @@ * gRPC server factory with multi-tenancy support. */ public final class MultiTenantGrpcServerBuilderFactory extends BaseGrpcServerBuilderFactory { - private static final ServerInterceptor mtInterceptor = new MultiTenantServerInterceptor(); + private static final ServerInterceptor mtInterceptor = + new ContextualizedServerInterceptor(ImmutableList.of( + new TenantContext.Transformer(), + new UserContext.Transformer(), + new SupportContext.Transformer() + )); public MultiTenantGrpcServerBuilderFactory(Tracer tracer) { super(new GrpcTracerFacade((TracerFacade) tracer), Sets.newHashSet(mtInterceptor)); } - - public MultiTenantGrpcServerBuilderFactory(GrpcTracerFacade tracer, Set serverInterceptors) { - super(tracer, Sets.union(serverInterceptors, Collections.singleton(mtInterceptor))); - } } diff --git a/services/grpc/src/main/java/com/dremio/service/grpc/SimpleGrpcServerBuilderFactory.java b/services/grpc/src/main/java/com/dremio/service/grpc/SimpleGrpcServerBuilderFactory.java index 3738c2eff0..e3ccd9dd02 100644 --- a/services/grpc/src/main/java/com/dremio/service/grpc/SimpleGrpcServerBuilderFactory.java +++ b/services/grpc/src/main/java/com/dremio/service/grpc/SimpleGrpcServerBuilderFactory.java @@ -18,18 +18,23 @@ import java.util.Set; import com.dremio.telemetry.utils.GrpcTracerFacade; +import com.dremio.telemetry.utils.TracerFacade; import io.grpc.ServerInterceptor; import io.opentracing.Tracer; /** - * Grpc server factory without multi-tenancy support. + * Grpc server factory helper. Most behavior depends on registered interceptors. */ public final class SimpleGrpcServerBuilderFactory extends BaseGrpcServerBuilderFactory { public SimpleGrpcServerBuilderFactory(Tracer tracer) { super(tracer); } + public SimpleGrpcServerBuilderFactory(TracerFacade tracer, Set serverInterceptorSet) { + super(new GrpcTracerFacade(tracer), serverInterceptorSet); + } + public SimpleGrpcServerBuilderFactory(GrpcTracerFacade tracer, Set serverInterceptorSet) { super(tracer, serverInterceptorSet); } diff --git a/services/grpc/src/main/java/com/dremio/service/grpc/SingleTenantClientInterceptor.java b/services/grpc/src/main/java/com/dremio/service/grpc/SingleTenantClientInterceptor.java deleted file mode 100644 index 145b1d604e..0000000000 --- a/services/grpc/src/main/java/com/dremio/service/grpc/SingleTenantClientInterceptor.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.dremio.service.grpc; - -import javax.inject.Provider; - -import com.dremio.context.ExecutorToken; -import com.dremio.context.RequestContext; -import com.dremio.context.TenantContext; -import com.dremio.context.UserContext; -import com.google.common.base.Preconditions; - -import io.grpc.CallOptions; -import io.grpc.Channel; -import io.grpc.ClientCall; -import io.grpc.ClientInterceptor; -import io.grpc.ForwardingClientCall; -import io.grpc.ForwardingClientCallListener; -import io.grpc.Metadata; -import io.grpc.MethodDescriptor; - -/** - * Interceptor for cases where the client is a per-tenant service. Use the defaults unless the caller overrides them. - */ -public class SingleTenantClientInterceptor implements ClientInterceptor { - private Provider defaultRequestContext; - - public SingleTenantClientInterceptor(Provider defaultRequestContext) { - this.defaultRequestContext = defaultRequestContext; - } - - @Override - public ClientCall interceptCall(MethodDescriptor methodDescriptor, CallOptions callOptions, Channel channel) { - return new ForwardingClientCall.SimpleForwardingClientCall(channel.newCall(methodDescriptor, callOptions)) { - @Override - public void start(ClientCall.Listener responseListener, Metadata headers) { - - TenantContext tenantContext = RequestContext.current().get(TenantContext.CTX_KEY); - if (tenantContext == null) { - tenantContext = defaultRequestContext.get().get(TenantContext.CTX_KEY); - } - headers.put(HeaderKeys.PROJECT_ID_HEADER_KEY, tenantContext.getProjectId().toString()); - headers.put(HeaderKeys.ORG_ID_HEADER_KEY, tenantContext.getOrgId().toString()); - - UserContext userContext = RequestContext.current().get(UserContext.CTX_KEY); - if (userContext == null) { - userContext = defaultRequestContext.get().get(UserContext.CTX_KEY); - } - headers.put(HeaderKeys.USER_HEADER_KEY, userContext.serialize()); - - ExecutorToken executorToken = RequestContext.current().get(ExecutorToken.CTX_KEY); - Preconditions.checkArgument(executorToken == null, "Executor Token should not be " + - "propagated out of the service"); - - super.start(new ForwardingClientCallListener.SimpleForwardingClientCallListener(responseListener) { - @Override - public void onHeaders(Metadata headers) { - super.onHeaders(headers); - } - }, headers); - } - }; - } -} diff --git a/services/grpc/src/main/java/com/dremio/service/grpc/SingleTenantGrpcChannelBuilderFactory.java b/services/grpc/src/main/java/com/dremio/service/grpc/SingleTenantGrpcChannelBuilderFactory.java index 377727e4a4..878c1005c4 100644 --- a/services/grpc/src/main/java/com/dremio/service/grpc/SingleTenantGrpcChannelBuilderFactory.java +++ b/services/grpc/src/main/java/com/dremio/service/grpc/SingleTenantGrpcChannelBuilderFactory.java @@ -30,14 +30,21 @@ * Channel builder factory where the client is a single-tenant service. */ public class SingleTenantGrpcChannelBuilderFactory extends BaseGrpcChannelBuilderFactory { - public SingleTenantGrpcChannelBuilderFactory(Tracer tracer, Provider defaultContext, - Provider> defaultServiceConfigProvider) { - super(tracer, Sets.newHashSet(new SingleTenantClientInterceptor(defaultContext)), defaultServiceConfigProvider); + public SingleTenantGrpcChannelBuilderFactory( + Tracer tracer, + Provider defaultContext, + Provider> defaultServiceConfigProvider) + { + super(tracer, + Sets.newHashSet(ContextualizedClientInterceptor.buildSingleTenantClientInterceptorWithDefaults(defaultContext)), + defaultServiceConfigProvider); } - public SingleTenantGrpcChannelBuilderFactory(Tracer tracer, - Provider> defaultServiceConfigProvider, - Set interceptors) { + public SingleTenantGrpcChannelBuilderFactory( + Tracer tracer, + Provider> defaultServiceConfigProvider, + Set interceptors) + { super(tracer, interceptors, defaultServiceConfigProvider); } } diff --git a/services/hadoopcredentials/pom.xml b/services/hadoopcredentials/pom.xml index ec19840e04..64639eaeee 100644 --- a/services/hadoopcredentials/pom.xml +++ b/services/hadoopcredentials/pom.xml @@ -22,7 +22,7 @@ dremio-services-parent com.dremio.services - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 4.0.0 diff --git a/services/hadoopcredentials/src/main/java/com/dremio/hadoop/security/alias/DremioCredentialProvider.java b/services/hadoopcredentials/src/main/java/com/dremio/hadoop/security/alias/DremioCredentialProvider.java index dd9376dec4..7d7cc45950 100644 --- a/services/hadoopcredentials/src/main/java/com/dremio/hadoop/security/alias/DremioCredentialProvider.java +++ b/services/hadoopcredentials/src/main/java/com/dremio/hadoop/security/alias/DremioCredentialProvider.java @@ -14,6 +14,7 @@ * limitations under the License. */ package com.dremio.hadoop.security.alias; + import java.io.IOException; import java.net.URI; import java.util.Collections; @@ -38,6 +39,7 @@ public class DremioCredentialProvider extends CredentialProvider { public static final String DREMIO_SCHEME_PREFIX = "dremio+"; + public static final String PROTOCOL_PREFIX = "https://"; private static final Logger logger = LoggerFactory.getLogger(DremioCredentialProvider.class); @@ -57,8 +59,7 @@ public void flush() { /** * @param alias the name of a specific credential * @return a pair of the credential name and its resolved secret. - * Return null if fail to resolve the provided conf value. - * @throws IOException + * @throws IOException if secret resolution fails. */ @Override public CredentialEntry getCredentialEntry(String alias) throws IOException { @@ -66,29 +67,41 @@ public CredentialEntry getCredentialEntry(String alias) throws IOException { if (pattern == null) { return null; } + + // Trim prefix "dremio+". If the prefix does not exist, return null. This implies the input is invalid here. + if (!pattern.toLowerCase(Locale.ROOT).startsWith(DREMIO_SCHEME_PREFIX)) { + return null; + } + final String trimmedPattern = pattern.substring(DREMIO_SCHEME_PREFIX.length()); + + // Convert the input to a URI. final URI secretUri; try { secretUri = CredentialsServiceUtils.safeURICreate(pattern); } catch (IllegalArgumentException e) { - return null; + // If it's not a URI, return the `pattern` as it is without "dremio+". + return new DremioCredentialEntry(alias, trimmedPattern.toCharArray()); } + + // Check if there is a scheme. + // If scheme does not exist, return the `pattern` as it is without "dremio+". + // If scheme exists, continue to Dremio credentials service. final String scheme = secretUri.getScheme(); if (Strings.isNullOrEmpty(scheme)) { - return null; - } - if (!scheme.toLowerCase(Locale.ROOT).startsWith(DREMIO_SCHEME_PREFIX)) { - return null; + return new DremioCredentialEntry(alias, trimmedPattern.toCharArray()); } - final String trimmedPattern = pattern.substring(DREMIO_SCHEME_PREFIX.length()); final char[] secretBytes; try { String secret = credentialsService.lookup(trimmedPattern); secretBytes = secret.toCharArray(); secret = null; return new DremioCredentialEntry(alias, secretBytes); - } catch (CredentialsException | IllegalArgumentException e) { + } catch (IllegalArgumentException e) { logger.error("Failed to resolve {}", alias); - return null; + throw new IOException(String.format("Failed to resolve '%s'", alias), e); + } catch (CredentialsException e) { + logger.error("Failed to resolve {}", alias); + throw new IOException("Failed to resolve credentials: " + e.getMessage(), e); } } diff --git a/services/hadoopcredentials/src/test/java/com/dremio/hadoop/security/alias/TestDremioCredentialProviders.java b/services/hadoopcredentials/src/test/java/com/dremio/hadoop/security/alias/TestDremioCredentialProviders.java index 09173181ad..278ef04cbe 100644 --- a/services/hadoopcredentials/src/test/java/com/dremio/hadoop/security/alias/TestDremioCredentialProviders.java +++ b/services/hadoopcredentials/src/test/java/com/dremio/hadoop/security/alias/TestDremioCredentialProviders.java @@ -20,11 +20,15 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.junit.Assert.assertEquals; +import java.io.BufferedWriter; +import java.io.FileWriter; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.junit.Before; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.TemporaryFolder; import com.dremio.services.credentials.CredentialsService; @@ -36,6 +40,9 @@ public class TestDremioCredentialProviders { private static final String SECRET_KEY = "a.b.c.key"; private CredentialsService credentialsService; + @Rule + public TemporaryFolder tempFolder = new TemporaryFolder(); + @Before public void setUp() throws Exception { credentialsService = CredentialsService.newInstance(DEFAULT_DREMIO_CONFIG, CLASSPATH_SCAN_RESULT); @@ -77,6 +84,15 @@ public void testClearTextPasswordFallback2() throws IOException { assertEquals("abc123", new String(conf.getPassword(SECRET_KEY))); } + @Test + public void testClearTextPasswordFallback3() throws IOException { + Configuration conf = new Configuration(); + conf.set(HADOOP_SECURITY_CREDENTIAL_PROVIDER_PATH, "dremio:///"); + conf.set(SECRET_KEY, "dremio+abc123`?~!@#$%^&*()-_=+[]{}\\|;:'\",./<>"); + + assertEquals("abc123`?~!@#$%^&*()-_=+[]{}\\|;:'\",./<>", new String(conf.getPassword(SECRET_KEY))); + } + @Test public void testSchemeCaseInsensitivity() throws IOException { @@ -99,12 +115,33 @@ public void invalidCredentialProviderScheme() throws IOException { } @Test - public void clearTextPasswordWithDremioScheme() throws IOException { + public void testInvalidSecret() throws IOException { Configuration conf = new Configuration(); conf.set(HADOOP_SECURITY_CREDENTIAL_PROVIDER_PATH, "dremio:///"); conf.set(SECRET_KEY, "dremio+data:abc123"); - assertEquals("If an issue occurred when Dremio is resolving the secret URI, we will fallback to cleartext.", - "dremio+data:abc123", new String(conf.getPassword(SECRET_KEY))); + assertThatThrownBy(() -> conf.getPassword(SECRET_KEY)) + .isInstanceOf(IOException.class) + .hasMessageContaining("Configuration problem with provider path."); + } + + @Test + public void fileSecret() throws IOException { + + final String originalString = "abc123"; + final String file = "/test.file"; + + final String fileLoc = tempFolder.newFolder().getAbsolutePath().concat(file); + + // create the password file + try (BufferedWriter writer = new BufferedWriter(new FileWriter(fileLoc))) { + writer.write(originalString); + } + + Configuration conf = new Configuration(); + conf.set(HADOOP_SECURITY_CREDENTIAL_PROVIDER_PATH, "dremio:///"); + conf.set(SECRET_KEY, "dremio+file://".concat(fileLoc)); + + assertEquals(originalString, new String(conf.getPassword(SECRET_KEY))); } } diff --git a/services/jobresults/client/pom.xml b/services/jobresults/client/pom.xml index 41bf95759a..7b27423b78 100644 --- a/services/jobresults/client/pom.xml +++ b/services/jobresults/client/pom.xml @@ -22,12 +22,11 @@ dremio-services-jobresults com.dremio.services - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 4.0.0 dremio-services-jobresults-client - 24.0.0-202302100528110223-3a169b7c Services - JobResults - Client jar diff --git a/services/jobresults/common/pom.xml b/services/jobresults/common/pom.xml index f8dbffc1cd..b5c173a73f 100644 --- a/services/jobresults/common/pom.xml +++ b/services/jobresults/common/pom.xml @@ -22,12 +22,11 @@ dremio-services-jobresults com.dremio.services - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 4.0.0 dremio-services-jobresults-common - 24.0.0-202302100528110223-3a169b7c Services - JobResults - Common jar diff --git a/services/jobresults/pom.xml b/services/jobresults/pom.xml index 91ba1c0eea..25531c133a 100644 --- a/services/jobresults/pom.xml +++ b/services/jobresults/pom.xml @@ -23,7 +23,7 @@ com.dremio.services dremio-services-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-services-jobresults diff --git a/services/jobresults/server/pom.xml b/services/jobresults/server/pom.xml index cd7412600f..47affc5df6 100644 --- a/services/jobresults/server/pom.xml +++ b/services/jobresults/server/pom.xml @@ -23,11 +23,10 @@ dremio-services-jobresults com.dremio.services - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-services-jobresults-server - 24.0.0-202302100528110223-3a169b7c Services - Job Results - Server jar diff --git a/services/jobresults/server/src/main/java/com/dremio/service/jobresults/server/JobResultsBindableService.java b/services/jobresults/server/src/main/java/com/dremio/service/jobresults/server/JobResultsBindableService.java index aabc336d99..9fbda537cf 100644 --- a/services/jobresults/server/src/main/java/com/dremio/service/jobresults/server/JobResultsBindableService.java +++ b/services/jobresults/server/src/main/java/com/dremio/service/jobresults/server/JobResultsBindableService.java @@ -41,6 +41,7 @@ public JobResultsBindableService(BufferAllocator allocator, JobResultsGrpcServer this.jobResultsGrpcServerFacade = jobResultsGrpcServerFacade; } + @Override public ServerServiceDefinition bindService() { ServerServiceDefinition.Builder serviceBuilder = ServerServiceDefinition.builder(JobResultsServiceGrpc.SERVICE_NAME); diff --git a/services/jobresults/server/src/main/java/com/dremio/service/jobresults/server/JobResultsGrpcServerFacade.java b/services/jobresults/server/src/main/java/com/dremio/service/jobresults/server/JobResultsGrpcServerFacade.java index 34f973e8b1..b9e51105f5 100644 --- a/services/jobresults/server/src/main/java/com/dremio/service/jobresults/server/JobResultsGrpcServerFacade.java +++ b/services/jobresults/server/src/main/java/com/dremio/service/jobresults/server/JobResultsGrpcServerFacade.java @@ -68,6 +68,7 @@ protected void runException(Runnable command, Throwable t) { }; } + @Override public StreamObserver jobResults(StreamObserver responseObserver) { return new StreamObserver() { diff --git a/services/jobs/pom.xml b/services/jobs/pom.xml index 4f5265fe45..c2b94aa034 100644 --- a/services/jobs/pom.xml +++ b/services/jobs/pom.xml @@ -22,7 +22,7 @@ com.dremio.services dremio-services-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-services-jobs Services - Jobs diff --git a/services/jobs/src/main/java/com/dremio/service/jobs/ExtraJobInfoStoreCreator.java b/services/jobs/src/main/java/com/dremio/service/jobs/ExtraJobInfoStoreCreator.java new file mode 100644 index 0000000000..45fe7eb5ce --- /dev/null +++ b/services/jobs/src/main/java/com/dremio/service/jobs/ExtraJobInfoStoreCreator.java @@ -0,0 +1,52 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.service.jobs; + +import com.dremio.datastore.api.DocumentConverter; +import com.dremio.datastore.api.DocumentWriter; +import com.dremio.datastore.api.LegacyIndexedStore; +import com.dremio.datastore.api.LegacyIndexedStoreCreationFunction; +import com.dremio.datastore.api.LegacyStoreBuildingFactory; +import com.dremio.datastore.format.Format; +import com.dremio.service.job.proto.ExtraJobInfo; +import com.dremio.service.job.proto.JobId; + +/** + * Creator for ExtraJobinfo store. + */ +public class ExtraJobInfoStoreCreator implements LegacyIndexedStoreCreationFunction { + public static final String NAME = "extraJobInfo"; + @SuppressWarnings("unchecked") + @Override + public LegacyIndexedStore build(LegacyStoreBuildingFactory factory) { + return factory.newStore().name(NAME).keyFormat(Format.wrapped(JobId.class, JobId::getId, JobId::new, Format.ofString())) + .valueFormat(Format.ofProtostuff(ExtraJobInfo.class)) + .buildIndexed(new ExtraJobInfoConverter()); + } + + private static class ExtraJobInfoConverter implements DocumentConverter { + private Integer version = 0; + + @Override + public void convert(DocumentWriter writer, JobId key, ExtraJobInfo record) { + } + + @Override + public Integer getVersion() { + return version; + } + } +} diff --git a/services/jobs/src/main/java/com/dremio/service/jobs/HybridJobsService.java b/services/jobs/src/main/java/com/dremio/service/jobs/HybridJobsService.java index 3a2e28792c..01196d6be1 100644 --- a/services/jobs/src/main/java/com/dremio/service/jobs/HybridJobsService.java +++ b/services/jobs/src/main/java/com/dremio/service/jobs/HybridJobsService.java @@ -24,8 +24,6 @@ import javax.inject.Provider; import org.apache.arrow.memory.BufferAllocator; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import com.dremio.common.AutoCloseables; import com.dremio.common.exceptions.GrpcExceptionUtil; @@ -65,6 +63,7 @@ import io.grpc.Status; import io.grpc.StatusRuntimeException; import io.opentelemetry.api.trace.Span; +import io.opentelemetry.instrumentation.annotations.WithSpan; /** * This is used by the clients of {@link JobsService}. This service redirects calls to {@link LocalJobsService} over @@ -72,8 +71,6 @@ */ @Deprecated //TODO DX-19547: Remove HJS public class HybridJobsService implements JobsService { - private static final Logger logger = LoggerFactory.getLogger(HybridJobsService.class); - private final GrpcChannelBuilderFactory grpcFactory; private final Provider allocator; @@ -139,6 +136,7 @@ private ChronicleBlockingStub getChronicleBlockingStub() { } @Override + @WithSpan public JobSubmission submitJob(SubmitJobRequest jobRequest, JobStatusListener statusListener) { final JobStatusListenerAdapter adapter = new JobStatusListenerAdapter(statusListener); getAsyncStub().submitJob(jobRequest, adapter); @@ -176,6 +174,7 @@ public JobSummary getJobSummary(JobSummaryRequest request) throws JobNotFoundExc } @Override + @WithSpan public JobCounts getJobCounts(JobCountsRequest request) { try { return getChronicleBlockingStub().getJobCounts(request); @@ -328,7 +327,7 @@ private static void throwSuitableException(StatusRuntimeException sre, JobId job case NOT_FOUND: throw new JobNotFoundException(jobId, sre); case FAILED_PRECONDITION: - throw new JobNotFoundException(jobId, JobNotFoundException.causeOfFailure.CANCEL_FAILED); + throw new JobNotFoundException(jobId, JobNotFoundException.CauseOfFailure.CANCEL_FAILED); case PERMISSION_DENIED: throw new AccessControlException( String.format("Permission denied on user [%s] to access job [%s]", username, jobId)); @@ -349,7 +348,7 @@ private static void throwSuitableExceptionForReflectionJob(StatusRuntimeExceptio case NOT_FOUND: throw new JobNotFoundException(jobId, sre); case FAILED_PRECONDITION: - throw new JobNotFoundException(jobId, JobNotFoundException.causeOfFailure.CANCEL_FAILED); + throw new JobNotFoundException(jobId, JobNotFoundException.CauseOfFailure.CANCEL_FAILED); case PERMISSION_DENIED: throw new AccessControlException( String.format("Permission denied on user [%s] to access job for reflection [%s]", username, reflectionId)); diff --git a/services/jobs/src/main/java/com/dremio/service/jobs/Job.java b/services/jobs/src/main/java/com/dremio/service/jobs/Job.java index 2f836a6c89..c125700dec 100644 --- a/services/jobs/src/main/java/com/dremio/service/jobs/Job.java +++ b/services/jobs/src/main/java/com/dremio/service/jobs/Job.java @@ -26,6 +26,7 @@ import com.dremio.service.job.proto.JobAttempt; import com.dremio.service.job.proto.JobId; import com.dremio.service.job.proto.JobResult; +import com.dremio.service.job.proto.QueryType; import com.dremio.service.job.proto.SessionId; import com.google.common.base.Objects; import com.google.common.base.Preconditions; @@ -57,6 +58,14 @@ public Job(JobId jobId, JobAttempt jobAttempt, SessionId sessionId) { attempts.add( checkNotNull(jobAttempt, "jobAttempt is null")); } + public Job(JobId jobId, JobResult jobResult) { + this.jobId = jobId; + this.sessionId = jobResult.getSessionId(); + this.resultsStore = null; + this.completed = jobResult.getCompleted(); + attempts.addAll(jobResult.getAttemptsList()); + } + /** * Create an instance which loads the job results lazily. * @param jobId @@ -94,6 +103,11 @@ public JobAttempt getJobAttempt() { return attempts.get(lastAttempt); } + public QueryType getQueryType() { + Preconditions.checkState(attempts.size() >=1, "There should be at least one attempt in Job"); + return attempts.get(0).getInfo().getQueryType(); + } + void addAttempt(final JobAttempt jobAttempt) { attempts.add(jobAttempt); } diff --git a/services/jobs/src/main/java/com/dremio/service/jobs/JobIndexKeys.java b/services/jobs/src/main/java/com/dremio/service/jobs/JobIndexKeys.java index b319076d45..90c2cb6177 100644 --- a/services/jobs/src/main/java/com/dremio/service/jobs/JobIndexKeys.java +++ b/services/jobs/src/main/java/com/dremio/service/jobs/JobIndexKeys.java @@ -18,6 +18,7 @@ import static com.dremio.service.job.proto.QueryType.ACCELERATOR_CREATE; import static com.dremio.service.job.proto.QueryType.ACCELERATOR_DROP; import static com.dremio.service.job.proto.QueryType.ACCELERATOR_EXPLAIN; +import static com.dremio.service.job.proto.QueryType.D2D; import static com.dremio.service.job.proto.QueryType.FLIGHT; import static com.dremio.service.job.proto.QueryType.INTERNAL_ICEBERG_METADATA_DROP; import static com.dremio.service.job.proto.QueryType.JDBC; @@ -65,7 +66,8 @@ private JobIndexKeys() {} SearchQueryUtils.newTermQuery("QUERY_TYPE", ODBC.toString()), SearchQueryUtils.newTermQuery("QUERY_TYPE", JDBC.toString()), SearchQueryUtils.newTermQuery("QUERY_TYPE", REST.toString()), - SearchQueryUtils.newTermQuery("QUERY_TYPE", FLIGHT.toString())); + SearchQueryUtils.newTermQuery("QUERY_TYPE", FLIGHT.toString()), + SearchQueryUtils.newTermQuery("QUERY_TYPE", D2D.toString())); public static final SearchQuery UI_EXTERNAL_JOBS_FILTER = SearchQueryUtils.or( UI_JOBS_FILTER, diff --git a/services/jobs/src/main/java/com/dremio/service/jobs/JobNotFoundException.java b/services/jobs/src/main/java/com/dremio/service/jobs/JobNotFoundException.java index 602363263f..c285bdedd5 100644 --- a/services/jobs/src/main/java/com/dremio/service/jobs/JobNotFoundException.java +++ b/services/jobs/src/main/java/com/dremio/service/jobs/JobNotFoundException.java @@ -15,6 +15,8 @@ */ package com.dremio.service.jobs; +import javax.annotation.Nullable; + import com.dremio.service.job.proto.JobId; /** @@ -23,40 +25,46 @@ public class JobNotFoundException extends JobException { private static final long serialVersionUID = 1L; - public enum causeOfFailure{ + public enum CauseOfFailure { NOT_FOUND, - CANCEL_FAILED + CANCEL_FAILED; + + private String buildErrorMessage(JobId jobId) { + if (this == CauseOfFailure.CANCEL_FAILED) { + return "Job " + jobId.getId() + " may have completed and cannot be canceled."; + } + return "Missing job " + jobId.getId(); + } } - private final JobId jobId; - private causeOfFailure errorType = causeOfFailure.NOT_FOUND ; + private final CauseOfFailure errorType; - public JobNotFoundException(JobId jobId, String error) { - super(jobId, error); - this.jobId = jobId; + public JobNotFoundException(JobId jobId) { + this(jobId, CauseOfFailure.NOT_FOUND); } - public JobNotFoundException(JobId jobId, causeOfFailure errorType) { - super(jobId, errorType.equals(causeOfFailure.CANCEL_FAILED)?"Job " + jobId.getId() + " may have completed and cannot be canceled." - :"Missing job " + jobId.getId()); - this.jobId = jobId; - this.errorType = errorType; + public JobNotFoundException(JobId jobId, CauseOfFailure errorType) { + this(jobId, errorType, null); } - public JobNotFoundException(JobId jobId, Exception error) { - super(jobId, "Missing job " + jobId.getId(), error); - this.jobId = jobId; + public JobNotFoundException(JobId jobId, Throwable cause) { + this(jobId, CauseOfFailure.NOT_FOUND, cause); } - public JobNotFoundException(JobId jobId) { - super(jobId, "Missing job " + jobId.getId()); - this.jobId = jobId; + public JobNotFoundException(JobId jobId, CauseOfFailure errorType, Throwable cause) { + this(jobId, errorType, cause, null); } - @Override - public JobId getJobId() { - return jobId; + public JobNotFoundException(JobId jobId, String error) { + this(jobId, CauseOfFailure.NOT_FOUND, null, error); } - public causeOfFailure getErrorType(){return errorType;} + private JobNotFoundException(JobId jobId, CauseOfFailure errorType, @Nullable Throwable cause, @Nullable String message) { + super(jobId, message != null ? message : errorType.buildErrorMessage(jobId), cause); + this.errorType = errorType; + } + + public CauseOfFailure getErrorType() { + return errorType; + } } diff --git a/services/jobs/src/main/java/com/dremio/service/jobs/JobResultToLogEntryConverter.java b/services/jobs/src/main/java/com/dremio/service/jobs/JobResultToLogEntryConverter.java index c6035169a4..05d19176da 100644 --- a/services/jobs/src/main/java/com/dremio/service/jobs/JobResultToLogEntryConverter.java +++ b/services/jobs/src/main/java/com/dremio/service/jobs/JobResultToLogEntryConverter.java @@ -51,6 +51,7 @@ public LoggedQuery apply(Job job) { } if (info.getSql() != null) { builder.setQueryText(info.getSql()); + builder.setIsTruncatedQueryText(info.getIsTruncatedSql()); } if (info.getFinishTime() != null) { builder.setFinish(info.getFinishTime()); diff --git a/services/jobs/src/main/java/com/dremio/service/jobs/JobResultsStore.java b/services/jobs/src/main/java/com/dremio/service/jobs/JobResultsStore.java index 48baf28a30..abf0682a12 100644 --- a/services/jobs/src/main/java/com/dremio/service/jobs/JobResultsStore.java +++ b/services/jobs/src/main/java/com/dremio/service/jobs/JobResultsStore.java @@ -265,13 +265,13 @@ protected boolean doesQueryResultsDirExists(Path jobOutputDir, JobId jobId) thro return false; } - Set nodeEndpoints = getNodeEndpoints(jobId); - if (nodeEndpoints == null || nodeEndpoints.isEmpty()) { - logger.debug("There are no nodeEndpoints where query results dir existence need to be checked." + - "For eg: for jdbc queries, results are not stored on executors."); - return false; - } if (dfs.isPdfs()) { + Set nodeEndpoints = getNodeEndpoints(jobId); + if (nodeEndpoints == null || nodeEndpoints.isEmpty()) { + logger.debug("There are no nodeEndpoints where query results dir existence need to be checked." + + "For eg: for jdbc queries, results are not stored on executors."); + return false; + } /** * This function borrows the implementation from PseduoDistributedFileSystem().createRemotePath(). diff --git a/services/jobs/src/main/java/com/dremio/service/jobs/JobsProtoUtil.java b/services/jobs/src/main/java/com/dremio/service/jobs/JobsProtoUtil.java index f85b3ff355..2c1bac6f4d 100644 --- a/services/jobs/src/main/java/com/dremio/service/jobs/JobsProtoUtil.java +++ b/services/jobs/src/main/java/com/dremio/service/jobs/JobsProtoUtil.java @@ -617,6 +617,8 @@ public static com.dremio.service.job.QueryType toBuf(com.dremio.service.job.prot return com.dremio.service.job.QueryType.UI_EXPORT; case ODBC: return com.dremio.service.job.QueryType.ODBC; + case D2D: + return com.dremio.service.job.QueryType.D2D; case JDBC: return com.dremio.service.job.QueryType.JDBC; case REST: @@ -792,6 +794,8 @@ public static com.dremio.service.job.proto.QueryType toStuff(QueryType queryType return com.dremio.service.job.proto.QueryType.ACCELERATOR_EXPLAIN; case ODBC: return com.dremio.service.job.proto.QueryType.ODBC; + case D2D: + return com.dremio.service.job.proto.QueryType.D2D; case JDBC: return com.dremio.service.job.proto.QueryType.JDBC; case REST: diff --git a/services/jobs/src/main/java/com/dremio/service/jobs/JobsRpcUtils.java b/services/jobs/src/main/java/com/dremio/service/jobs/JobsRpcUtils.java index 7a0b8b95ac..673425d8e8 100644 --- a/services/jobs/src/main/java/com/dremio/service/jobs/JobsRpcUtils.java +++ b/services/jobs/src/main/java/com/dremio/service/jobs/JobsRpcUtils.java @@ -47,7 +47,7 @@ static Throwable convertToGrpcException(Throwable t) { if (t instanceof UserException) { return GrpcExceptionUtil.toStatusRuntimeException((UserException) t); } else if (t instanceof JobNotFoundException) { - if (((JobNotFoundException) t).getErrorType().equals(JobNotFoundException.causeOfFailure.CANCEL_FAILED)) { + if (((JobNotFoundException) t).getErrorType().equals(JobNotFoundException.CauseOfFailure.CANCEL_FAILED)) { return Status.FAILED_PRECONDITION.withDescription(t.getMessage()).asRuntimeException(); } return io.grpc.Status.NOT_FOUND.asException(); diff --git a/services/jobs/src/main/java/com/dremio/service/jobs/JobsServiceUtil.java b/services/jobs/src/main/java/com/dremio/service/jobs/JobsServiceUtil.java index 46175fcd36..c0ca377cd4 100644 --- a/services/jobs/src/main/java/com/dremio/service/jobs/JobsServiceUtil.java +++ b/services/jobs/src/main/java/com/dremio/service/jobs/JobsServiceUtil.java @@ -36,7 +36,7 @@ import java.util.UUID; import java.util.stream.Collectors; -import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -62,6 +62,7 @@ import com.dremio.service.job.JobDetails; import com.dremio.service.job.JobStats; import com.dremio.service.job.JobSummary; +import com.dremio.service.job.RequestType; import com.dremio.service.job.StoreJobResultRequest; import com.dremio.service.job.SubmitJobRequest; import com.dremio.service.job.UsedReflections; @@ -310,7 +311,7 @@ public Void visitWriter(final Writer writer, Void value) throws RuntimeException static JobFailureInfo toFailureInfo(String verboseError) { // TODO: Would be easier if profile had structured error too String[] lines = verboseError.split("\n"); - if (lines.length < 3) { + if (lines.length < 2) { return null; } final JobFailureInfo.Type type; @@ -365,10 +366,10 @@ static JobFailureInfo toFailureInfo(String verboseError) { List errors; JobFailureInfo.Error error = new JobFailureInfo.Error() .setMessage(message); - if (lines.length > 3) { + if (lines.length > 2) { // Parse all the context lines Map context = new HashMap<>(); - for (int i = 3; i < lines.length; i++) { + for (int i = 2; i < lines.length; i++) { String line = lines[i]; if (line.isEmpty()) { break; @@ -801,8 +802,11 @@ public static JobTypeStats.Types toType(JobStats.Type type) { /** * Creates JobInfo from SubmitJobRequest */ - public static JobInfo createJobInfo(SubmitJobRequest jobRequest, JobId jobId, String inSpace) { - final JobInfo jobInfo = new JobInfo(jobId, jobRequest.getSqlQuery().getSql(), + public static JobInfo createJobInfo(SubmitJobRequest jobRequest, JobId jobId, String inSpace, int sqlTruncateLen) { + boolean isSqlTruncated = jobRequest.getSqlQuery().getSql().length() > sqlTruncateLen; + String sqlText = isSqlTruncated ? jobRequest.getSqlQuery().getSql().substring(0, sqlTruncateLen) : jobRequest.getSqlQuery().getSql(); + + final JobInfo jobInfo = new JobInfo(jobId, sqlText, jobRequest.getVersionedDataset().getVersion(), JobsProtoUtil.toStuff(jobRequest.getQueryType())) .setSpace(inSpace) .setUser(jobRequest.getUsername()) @@ -810,7 +814,8 @@ public static JobInfo createJobInfo(SubmitJobRequest jobRequest, JobId jobId, St .setDatasetPathList(jobRequest.getVersionedDataset().getPathList()) .setResultMetadataList(new ArrayList()) .setContextList(jobRequest.getSqlQuery().getContextList()) - .setQueryLabel(JobsProtoUtil.toStuff(jobRequest.getQueryLabel())); + .setQueryLabel(JobsProtoUtil.toStuff(jobRequest.getQueryLabel())) + .setIsTruncatedSql(isSqlTruncated); if (jobRequest.hasDownloadSettings()) { jobInfo.setDownloadInfo(new DownloadInfo() @@ -895,4 +900,13 @@ static SearchQuery getReflectionIdFilter(String reflectionId, IndexKey indexKey) JobIndexKeys.ACCELERATION_JOBS_FILTER)); return SearchQueryUtils.and(builder.build()); } + + public static String getJobDescription(com.dremio.proto.model.attempts.RequestType requestType, String sql, String desc) { + return getJobDescription(RequestType.valueOf(requestType.toString()), sql, desc); + } + + public static String getJobDescription(RequestType requestType, String sql, String desc) { + // description of a job is same as the sql text for RUN_SQL request types (Ref. UserRequest.java) + return requestType == RequestType.RUN_SQL ? sql : desc; + } } diff --git a/services/jobs/src/main/java/com/dremio/service/jobs/JoinAnalyzer.java b/services/jobs/src/main/java/com/dremio/service/jobs/JoinAnalyzer.java index 9211aaea63..86a2f69b94 100644 --- a/services/jobs/src/main/java/com/dremio/service/jobs/JoinAnalyzer.java +++ b/services/jobs/src/main/java/com/dremio/service/jobs/JoinAnalyzer.java @@ -30,6 +30,8 @@ import org.apache.calcite.rel.type.RelDataTypeField; import com.dremio.common.SuppressForbidden; +import com.dremio.exec.catalog.TableVersionContext; +import com.dremio.exec.planner.acceleration.substitution.SubstitutionUtils; import com.dremio.exec.proto.UserBitShared.MajorFragmentProfile; import com.dremio.exec.proto.UserBitShared.MinorFragmentProfile; import com.dremio.exec.proto.UserBitShared.OperatorProfile; @@ -172,6 +174,17 @@ private static long findMetric(OperatorProfile operatorProfile, final int id) { return FluentIterable.from(operatorProfile.getMetricList()).firstMatch(input -> input.getMetricId() == id).get().getLongValue(); } + /** + * When a reflection X is accelerated by reflection Y, we need to merge reflection Y's join analysis into + * reflection X so that reflection X's join analysis only contains physical datasets. The join analysis is used + * for snowflake reflections by pruning away tables (from non-expanding joins) not found in the user query. + * + * @param left + * @param right + * @param rightPlan + * @param materializationId + * @return + */ public static JoinAnalysis merge(JoinAnalysis left, JoinAnalysis right, final RelNode rightPlan, final String materializationId) { try { int leftMax = Integer.MIN_VALUE; @@ -186,11 +199,12 @@ public static JoinAnalysis merge(JoinAnalysis left, JoinAnalysis right, final Re JoinAnalysis newRight = remapJoinAnalysis(right, leftMax - rightMin + 1); - final Map, JoinTable> newTableMapping = FluentIterable.from(newRight.getJoinTablesList()) - .uniqueIndex(new Function>() { + final Map newTableMapping = FluentIterable.from(newRight.getJoinTablesList()) + .uniqueIndex(new Function() { @Override - public List apply(JoinTable joinTable) { - return joinTable.getTableSchemaPathList(); + public SubstitutionUtils.VersionedPath apply(JoinTable joinTable) { + return SubstitutionUtils.VersionedPath.of(joinTable.getTableSchemaPathList(), + joinTable.getVersionContext() != null ? TableVersionContext.deserialize(joinTable.getVersionContext()) : null ); } }); @@ -239,7 +253,8 @@ public JoinCondition apply(JoinCondition condition) { } RelColumnOrigin columnOrigin = metadataQuery.getColumnOrigin(rightPlan, field.getIndex()); RelOptTable originTable = columnOrigin.getOriginTable(); - newBuildTableId = newTableMapping.get(originTable.getQualifiedName()).getTableId(); + newBuildTableId = newTableMapping.get(SubstitutionUtils.VersionedPath.of(originTable.getQualifiedName(), + SubstitutionUtils.getVersionContext(originTable))).getTableId(); newBuildColumn = originTable.getRowType().getFieldList().get(columnOrigin.getOriginColumnOrdinal()).getName(); } else { newBuildTableId = condition.getBuildSideTableId(); @@ -255,7 +270,8 @@ public JoinCondition apply(JoinCondition condition) { } RelColumnOrigin columnOrigin = metadataQuery.getColumnOrigin(rightPlan, field.getIndex()); RelOptTable originTable = columnOrigin.getOriginTable(); - newProbeTableId = newTableMapping.get(originTable.getQualifiedName()).getTableId(); + newProbeTableId = newTableMapping.get(SubstitutionUtils.VersionedPath.of(originTable.getQualifiedName(), + SubstitutionUtils.getVersionContext(originTable))).getTableId(); newProbeColumn = originTable.getRowType().getFieldList().get(columnOrigin.getOriginColumnOrdinal()).getName(); } else { newProbeTableId = condition.getProbeSideTableId(); @@ -287,7 +303,9 @@ private static JoinAnalysis remapJoinAnalysis(final JoinAnalysis joinAnalysis, f @Override public JoinTable apply(JoinTable joinTable) { int newId = joinTable.getTableId() + offset; - return new JoinTable().setTableId(newId).setTableSchemaPathList(joinTable.getTableSchemaPathList()); + return new JoinTable().setTableId(newId) + .setTableSchemaPathList(joinTable.getTableSchemaPathList()) + .setVersionContext(joinTable.getVersionContext()); } }) .toList(); diff --git a/services/jobs/src/main/java/com/dremio/service/jobs/JoinPreAnalyzer.java b/services/jobs/src/main/java/com/dremio/service/jobs/JoinPreAnalyzer.java index 5720a41b7c..81ddd4bd66 100644 --- a/services/jobs/src/main/java/com/dremio/service/jobs/JoinPreAnalyzer.java +++ b/services/jobs/src/main/java/com/dremio/service/jobs/JoinPreAnalyzer.java @@ -30,7 +30,9 @@ import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.calcite.util.Pair; +import com.dremio.exec.catalog.TableVersionContext; import com.dremio.exec.planner.RoutingShuttle; +import com.dremio.exec.planner.acceleration.substitution.SubstitutionUtils; import com.dremio.exec.planner.common.ContainerRel; import com.dremio.exec.planner.physical.HashJoinPrel; import com.dremio.exec.planner.physical.JoinPrel; @@ -42,6 +44,7 @@ import com.dremio.service.Pointer; import com.dremio.service.job.proto.JoinCondition; import com.dremio.service.job.proto.JoinTable; +import com.google.common.base.Preconditions; import com.google.common.collect.Iterables; /** @@ -50,7 +53,7 @@ public final class JoinPreAnalyzer extends BasePrelVisitor, RuntimeException> { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(JoinPreAnalyzer.class); - private Map, Integer> tablesMap; + private Map tablesMap; private List joinTables; private List joinInfos; @@ -71,7 +74,8 @@ public static JoinPreAnalyzer prepare(Prel root) { .map(entry -> new JoinTable() .setTableId(entry.getValue()) - .setTableSchemaPathList(entry.getKey())) + .setTableSchemaPathList(entry.getKey().left) + .setVersionContext(entry.getKey().right != null ? entry.getKey().right.serialize() : null)) .collect(Collectors.toList()); // Collect join info. @@ -134,8 +138,10 @@ public Prel visitJoin(JoinPrel prel, Map fullMap) throws RuntimeExce return new JoinCondition() .setBuildSideColumn(rightTable.getRowType().getFieldList().get(rightOrdinal).getName()) .setProbeSideColumn(leftTable.getRowType().getFieldList().get(leftOrdinal).getName()) - .setBuildSideTableId(tablesMap.get(rightTable.getQualifiedName())) - .setProbeSideTableId(tablesMap.get(leftTable.getQualifiedName())); + .setBuildSideTableId(Preconditions.checkNotNull(tablesMap.get(SubstitutionUtils.VersionedPath.of(rightTable.getQualifiedName(), + SubstitutionUtils.getVersionContext(rightTable))))) + .setProbeSideTableId(Preconditions.checkNotNull(tablesMap.get(SubstitutionUtils.VersionedPath.of(leftTable.getQualifiedName(), + SubstitutionUtils.getVersionContext(leftTable))))); }) .collect(Collectors.toList()); } catch (Exception e) { @@ -150,9 +156,9 @@ public Prel visitJoin(JoinPrel prel, Map fullMap) throws RuntimeExce */ public static class TableScanCollector extends RoutingShuttle { private final Pointer counter = new Pointer<>(0); - private Map, Integer> tables = new HashMap<>(); + private Map tables = new HashMap<>(); - public static Map, Integer> collectTableScans(Prel root) { + public static Map collectTableScans(Prel root) { TableScanCollector collector = new TableScanCollector(); collector.visit(root); return collector.tables; @@ -161,7 +167,7 @@ public static Map, Integer> collectTableScans(Prel root) { @Override public RelNode visit(TableScan scan) { List table = scan.getTable().getQualifiedName(); - tables.put(table, counter.value++); + tables.put(SubstitutionUtils.VersionedPath.of(table), counter.value++); return scan; } @@ -174,7 +180,8 @@ public RelNode visit(RelNode other) { TableFunctionPrel tableFunctionPrel = ((TableFunctionPrel) other); if (tableFunctionPrel.getTable() != null) { List table = tableFunctionPrel.getTable().getQualifiedName(); - tables.put(table, counter.value++); + TableVersionContext versionContext = tableFunctionPrel.getTableMetadata().getVersionContext(); + tables.put(SubstitutionUtils.VersionedPath.of(table, versionContext), counter.value++); } } return super.visit(other); diff --git a/services/jobs/src/main/java/com/dremio/service/jobs/LocalJobsService.java b/services/jobs/src/main/java/com/dremio/service/jobs/LocalJobsService.java index 7e06814299..22d9c6faed 100644 --- a/services/jobs/src/main/java/com/dremio/service/jobs/LocalJobsService.java +++ b/services/jobs/src/main/java/com/dremio/service/jobs/LocalJobsService.java @@ -210,10 +210,12 @@ import com.dremio.service.job.JobSummaryRequest; import com.dremio.service.job.JobsWithParentDatasetRequest; import com.dremio.service.job.QueryProfileRequest; +import com.dremio.service.job.QueryResultData; import com.dremio.service.job.ReflectionJobDetailsRequest; import com.dremio.service.job.ReflectionJobEventsRequest; import com.dremio.service.job.ReflectionJobProfileRequest; import com.dremio.service.job.ReflectionJobSummaryRequest; +import com.dremio.service.job.RequestType; import com.dremio.service.job.SearchJobsRequest; import com.dremio.service.job.SearchReflectionJobsRequest; import com.dremio.service.job.SqlQuery; @@ -226,6 +228,7 @@ import com.dremio.service.job.proto.Acceleration; import com.dremio.service.job.proto.ExecutionNode; import com.dremio.service.job.proto.ExtraInfo; +import com.dremio.service.job.proto.ExtraJobInfo; import com.dremio.service.job.proto.JobAttempt; import com.dremio.service.job.proto.JobCancellationInfo; import com.dremio.service.job.proto.JobDetails; @@ -275,7 +278,7 @@ import io.grpc.StatusRuntimeException; import io.grpc.stub.StreamObserver; import io.opentelemetry.api.trace.Span; -import io.opentelemetry.extension.annotations.WithSpan; +import io.opentelemetry.instrumentation.annotations.WithSpan; import io.protostuff.ByteString; /** @@ -284,35 +287,21 @@ public class LocalJobsService implements Service, JobResultInfoProvider, SimpleJobRunner { private static final Logger logger = LoggerFactory.getLogger(LocalJobsService.class); public static final String QUERY_LOGGER = "query.logger"; - private static final ControlsInjector injector = ControlsInjectorFactory.getInjector(LocalJobsService.class); - private static final int DISABLE_CLEANUP_VALUE = -1; - private static final int DELAY_BEFORE_STARTING_CLEANUP_IN_MINUTES = 5; - private static final long ONE_DAY_IN_MILLIS = TimeUnit.DAYS.toMillis(1); - private static final long ONE_HOUR_IN_MILLIS = TimeUnit.HOURS.toMillis(1); - private static final int MAX_NUMBER_JOBS_TO_FETCH = 20; - public static final String JOBS_NAME = "jobs"; - private static final String LOCAL_TASK_LEADER_NAME = "localjobsclean"; - private static final String LOCAL_ONE_TIME_TASK_LEADER_NAME = "localjobsabandon"; - private static final int SEARCH_JOBS_PAGE_SIZE = 100; - private static final long LOCAL_ABANDONED_JOBS_TASK_SCHEDULE_MILLIS = 1800000; - @VisibleForTesting public static final String INJECTOR_ATTEMPT_COMPLETION_ERROR = "attempt-completion-error"; - @VisibleForTesting public static final String INJECTOR_ATTEMPT_COMPLETION_KV_ERROR = "attempt-completion-kv-error"; - private final Provider queryExecutor; private final Provider kvStoreProvider; private final Provider jobResultsStoreConfig; @@ -337,12 +326,12 @@ public class LocalJobsService implements Service, JobResultInfoProvider, SimpleJ private final ContextMigratingCloseableExecutorService executorService; private final List extraExternalCleaners; private final CloseableExecutorService queryLoggerExecutorService; - private NodeEndpoint identity; private LegacyIndexedStore store; + private LegacyIndexedStore extraJobInfoStore; private NamespaceService namespaceService; private String storageName; - private JobResultsStore jobResultsStore; + private volatile JobResultsStore jobResultsStore; private Cancellable jobResultsCleanupTask; private Cancellable jobDependenciesCleanupTask; private Cancellable abandonLocalJobsTask; @@ -350,7 +339,6 @@ public class LocalJobsService implements Service, JobResultInfoProvider, SimpleJ private QueryObserverFactory queryObserverFactory; private JobTelemetryServiceGrpc.JobTelemetryServiceBlockingStub jobTelemetryServiceStub; private SessionOptionManagerFactory sessionOptionManagerFactory; - private final RemoteJobServiceForwarder forwarder; private static final List DEFAULT_SORTER = ImmutableList.of( @@ -363,8 +351,7 @@ public class LocalJobsService implements Service, JobResultInfoProvider, SimpleJ * @return */ public static StructuredLogger createJobResultLogger() { - return StructuredLogger.get(LoggedQuery.class, QUERY_LOGGER) - .compose(new JobResultToLogEntryConverter()); + return StructuredLogger.get(LoggedQuery.class, QUERY_LOGGER).compose(new JobResultToLogEntryConverter()); } public LocalJobsService( @@ -425,13 +412,12 @@ public QueryObserverFactory getQueryObserverFactory() { @Override public void start() throws IOException, InterruptedException { logger.info("Starting JobsService"); - this.identity = JobsServiceUtil.toStuff(nodeEndpointProvider.get()); this.store = kvStoreProvider.get().getStore(JobsStoreCreator.class); + this.extraJobInfoStore = kvStoreProvider.get().getStore(ExtraJobInfoStoreCreator.class); this.namespaceService = namespaceServiceProvider.get(); final JobResultsStoreConfig resultsStoreConfig = jobResultsStoreConfig.get(); this.storageName = resultsStoreConfig.getStorageName(); - this.jobResultsStore = jobResultsStoreProvider.get(); this.jobTelemetryServiceStub = jobTelemetryClientProvider.get().getBlockingStub(); this.sessionOptionManagerFactory = new SessionOptionManagerFactoryImpl(optionValidatorProvider.get()); @@ -440,8 +426,7 @@ public void start() throws IOException, InterruptedException { final Cancellable task = schedulerService.get() .schedule(ScheduleUtils.scheduleToRunOnceNow(LOCAL_ONE_TIME_TASK_LEADER_NAME), () -> { try { - setAbandonedJobsToFailedState(store, - jobServiceInstances.get()); + setAbandonedJobsToFailedState(store, jobServiceInstances.get(), jobResultLogger); } finally { wasRun.countDown(); } @@ -456,7 +441,6 @@ public void start() throws IOException, InterruptedException { final OptionManager optionManager = optionManagerProvider.get(); // job results final long maxJobResultsAgeInDays = optionManager.getOption(ExecConstants.RESULTS_MAX_AGE_IN_DAYS); - // job profiles final long jobProfilesAgeOffsetInMillis = optionManager.getOption(ExecConstants.DEBUG_RESULTS_MAX_AGE_IN_MILLISECONDS); final long maxJobProfilesAgeInDays = optionManager.getOption(ExecConstants.JOB_MAX_AGE_IN_DAYS); @@ -498,7 +482,6 @@ public void start() throws IOException, InterruptedException { final Schedule resultSchedule = Schedule.Builder.everyDays(1, startTime) .withTimeZone(ZoneId.systemDefault()) .build(); - jobResultsCleanupTask = schedulerService.get().schedule(resultSchedule, new JobResultsCleanupTask()); } @@ -512,8 +495,7 @@ public void start() throws IOException, InterruptedException { final Schedule abandonedJobsSchedule = Schedule.Builder.everyMinutes(5) .withTimeZone(ZoneId.systemDefault()) .build(); - abandonLocalJobsTask = schedulerService.get() - .schedule(abandonedJobsSchedule, new AbandonLocalJobsTask()); + abandonLocalJobsTask = schedulerService.get().schedule(abandonedJobsSchedule, new AbandonLocalJobsTask()); } // schedule the task every 30 minutes to set abandoned jobs state to FAILED. @@ -525,7 +507,8 @@ public void start() throws IOException, InterruptedException { @VisibleForTesting static void setAbandonedJobsToFailedState(LegacyIndexedStore jobStore, - Collection coordinators) { + Collection coordinators, + StructuredLogger jobResultLogger) { final Set> apparentlyAbandoned = StreamSupport.stream(jobStore.find(new LegacyFindByCondition() .setCondition(JobsServiceUtil.getApparentlyAbandonedQuery())).spliterator(), false) @@ -560,7 +543,9 @@ static void setAbandonedJobsToFailedState(LegacyIndexedStore j attempts.remove(numAttempts - 1); attempts.add(newLastAttempt); jobResult.setCompleted(true); // mark the job as completed - jobStore.put(entry.getKey(), entry.getValue()); + jobStore.put(entry.getKey(), jobResult); + Job job = new Job(entry.getKey(), jobResult); + jobResultLogger.info(job, "Query: {}; outcome: {}", job.getJobId().getId(), job.getJobAttempt().getState()); } } } @@ -609,7 +594,6 @@ void registerListener(JobId jobId, StreamObserver observer) { void registerListenerWithJob(Job job, StreamObserver observer) throws JobNotFoundException { - final QueryListener queryListener = runningJobs.get(job.getJobId()); if (queryListener != null) { queryListener.listeners.register(observer, JobsServiceUtil.toJobSummary(job)); @@ -665,7 +649,8 @@ private void startJob( NameSpaceContainer.Type.SPACE) ? jobRequest.getVersionedDataset().getPath(0) : null; - final JobInfo jobInfo = JobsServiceUtil.createJobInfo(jobRequest, jobId, inSpace); + int sqlTruncateLen = getSqlTruncateLenFromOptionMgr(); + final JobInfo jobInfo = JobsServiceUtil.createJobInfo(jobRequest, jobId, inSpace, sqlTruncateLen); final JobAttempt jobAttempt = new JobAttempt() .setInfo(jobInfo) .setEndpoint(identity) @@ -674,7 +659,7 @@ private void startJob( final Job job = new Job(jobId, jobAttempt, new SessionId().setId(sessionId)); // (2) deduce execution configuration - final QueryType queryType = JobsProtoUtil.toStuff(jobRequest.getQueryType()); + final QueryType queryType = jobInfo.getQueryType(); final boolean enableLeafLimits = QueryTypeUtils.requiresLeafLimits(queryType); final LocalExecutionConfig config = LocalExecutionConfig.newBuilder() @@ -700,6 +685,9 @@ private void startJob( final QueryListener jobObserver = new QueryListener(job, eventObserver, sessionObserver, planTransformationListener, jobRequest.getStreamResultsMode()); storeJob(job); + if (jobInfo.getIsTruncatedSql()) { + extraJobInfoStore.put(jobId, new ExtraJobInfo().setSql(jobRequest.getSqlQuery().getSql())); + } runningJobs.put(jobId, jobObserver); final boolean isPrepare = queryType.equals(QueryType.PREPARE_INTERNAL); @@ -717,8 +705,8 @@ private void startJob( .setSource(SubmissionSource.LOCAL) .setPlan(jobRequest.getSqlQuery().getSql()) .setPriority(QueryPriority.newBuilder() - .setWorkloadClass(workloadClass) - .setWorkloadType(workloadType)) + .setWorkloadClass(workloadClass) + .setWorkloadType(workloadType)) .setQueryLabel(queryLabel) .build(); } @@ -779,7 +767,6 @@ private void startJob( */ @VisibleForTesting static SubmitJobRequest validateJobRequest(SubmitJobRequest submitJobRequest) { - final SubmitJobRequest.Builder submitJobRequestBuilder = SubmitJobRequest.newBuilder(); Preconditions.checkArgument(submitJobRequest.hasSqlQuery(),"sql query not provided"); @@ -933,7 +920,6 @@ void submitJob(SubmitJobRequest jobRequest, StreamObserver eventObserv JobSubmissionHelper getJobSubmissionHelper(SubmitJobRequest jobRequest, StreamObserver eventObserver, PlanTransformationListener planTransformationListener) { CommandPool commandPool = commandPoolService.get(); - if (commandPool instanceof ReleasableCommandPool) { ReleasableCommandPool releasableCommandPool = (ReleasableCommandPool) commandPool; // Protecting this code from callers who do not hold the command pool slot @@ -961,7 +947,6 @@ public void runQueryAsJob(String query, String userName, String queryType, Strin .setUsername(userName) .setRunInSameThread(true) .build(); - final CompletionListener completionListener = new CompletionListener(false); final JobStatusListenerAdapter streamObserver = new JobStatusListenerAdapter(completionListener); final JobSubmissionHelper jobSubmissionHelper = getJobSubmissionHelper(jobRequest, streamObserver, PlanTransformationListener.NO_OP); @@ -1016,7 +1001,7 @@ Job getJobFromStore(final JobId jobId) throws JobNotFoundException { } SessionId sessionId = jobResult.getSessionId(); - Job job = new Job(jobId, jobResult, jobResultsStore, sessionId); + Job job = new Job(jobId, jobResult, getJobResultsStore(), sessionId); populateJobDetailsFromFullProfile(job); return job; } @@ -1079,6 +1064,10 @@ JobSummary getJobSummary(JobSummaryRequest jobSummaryRequest) summary = JobsServiceUtil.toJobSummary(job); } } + if (job.getJobAttempt().getInfo().getIsTruncatedSql()) { + String fullSql = extraJobInfoStore.get(job.getJobId()).getSql(); + summary = summary.toBuilder().setSql(fullSql).build(); + } return summary; } @@ -1094,7 +1083,7 @@ com.dremio.service.job.JobDetails getJobDetails(JobDetailsRequest jobDetailsRequ com.dremio.service.job.JobDetails details = null; if (getJobRequest.isFromStore()) { job = getJobFromStore(getJobRequest.getJobId()); - details = JobsServiceUtil.toJobDetails(job, jobDetailsRequest.getProvideResultInfo()); + details = JobsServiceUtil.toJobDetails(job, jobDetailsRequest.getProvideResultInfo()); } else { job = getJob(getJobRequest); if (mustForwardRequest(job)) { @@ -1105,6 +1094,13 @@ com.dremio.service.job.JobDetails getJobDetails(JobDetailsRequest jobDetailsRequ details = JobsServiceUtil.toJobDetails(job, jobDetailsRequest.getProvideResultInfo()); } } + if (job.getJobAttempt().getInfo().getIsTruncatedSql()) { + String fullSql = extraJobInfoStore.get(job.getJobId()).getSql(); + int ai = details.getAttemptsCount() - 1; + com.dremio.service.job.proto.JobProtobuf.JobInfo info = details.getAttempts(ai).getInfo().toBuilder().setSql(fullSql).build(); + com.dremio.service.job.proto.JobProtobuf.JobAttempt lastAttempt = details.getAttempts(ai).toBuilder().setInfo(info).build(); + return details.toBuilder().setAttempts(ai, lastAttempt).build(); + } return details; } @@ -1140,7 +1136,17 @@ public JobDataFragment getJobData(JobId jobId, int offset, int limit) @VisibleForTesting JobResultsStore getJobResultsStore() { - return jobResultsStore; + if (this.jobResultsStore == null) { + // Lazy initialization to allow the late setup on pre-warmed coordinators in the cloud use-cases. + this.jobResultsStore = jobResultsStoreProvider.get(); + } + return this.jobResultsStore; + } + + private int getSqlTruncateLenFromOptionMgr() { + int sqlTruncateLen = (int) optionManagerProvider.get().getOption(ExecConstants.SQL_TEXT_TRUNCATE_LENGTH); + // value of 0 in SQL_TEXT_TRUNCATE_LENGTH is used to disable truncation + return sqlTruncateLen == 0 ? Integer.MAX_VALUE : sqlTruncateLen; } private static final ImmutableList> JOBS_STATS_TYPE_TO_SEARCH_QUERY_MAPPING = @@ -1315,7 +1321,6 @@ private static String map(String filterString) { } } - LegacyFindByCondition createCondition(SearchJobsRequest searchJobsRequest) { final LegacyFindByCondition condition = new LegacyFindByCondition(); VersionedDatasetPath versionedDatasetPath = searchJobsRequest.getDataset(); @@ -1342,7 +1347,6 @@ LegacyFindByCondition createCondition(SearchJobsRequest searchJobsRequest) { if (!Strings.isNullOrEmpty(sortColumn)) { condition.addSortings(buildSorter(sortColumn, JobsProtoUtil.toStoreSortOrder(searchJobsRequest.getSortOrder()))); } - return condition; } @@ -1425,7 +1429,7 @@ private Iterable toJobs(final Iterable> entries) { return Iterables.transform(entries, new Function, Job>() { @Override public Job apply(Entry input) { - return new Job(input.getKey(), input.getValue(), jobResultsStore, input.getValue().getSessionId()); + return new Job(input.getKey(), input.getValue(), getJobResultsStore(), input.getValue().getSessionId()); } }); } @@ -1439,7 +1443,7 @@ public java.util.Optional getJobResultInfo(String jobId, String u .build()); if (job.isCompleted()) { final BatchSchema batchSchema = BatchSchema.deserialize(job.getJobAttempt().getInfo().getBatchSchema()); - final List tableName = jobResultsStore.getOutputTablePath(job.getJobId()); + final List tableName = getJobResultsStore().getOutputTablePath(job.getJobId()); return java.util.Optional.of(new JobResultInfo(tableName, batchSchema)); } // else, fall through } catch (JobNotFoundException ignored) { @@ -1572,7 +1576,6 @@ public QueryObserver createNewQueryObserver(ExternalId id, UserSession session, } private final class QueryListener extends AbstractQueryObserver { - private final Job job; private final ExternalId externalId; private final UserResponseHandler responseHandler; @@ -1629,7 +1632,7 @@ private Job getJob(){ private void setupJobData() { final JobLoader jobLoader = (isInternal && !streamResultsMode) ? - new InternalJobLoader(exception, completionLatch, job.getJobId(), jobResultsStore, store) : + new InternalJobLoader(exception, completionLatch, job.getJobId(), getJobResultsStore(), store) : new ExternalJobLoader(completionLatch, exception); JobData jobData = new JobDataImpl(jobLoader, job.getJobId(), job.getSessionId()); job.setData(jobData); @@ -1637,6 +1640,7 @@ private void setupJobData() { @Override public AttemptObserver newAttempt(AttemptId attemptId, AttemptReason reason) { + Span.current().setAttribute("dremio.job.type", job.getQueryType().toString()); // first attempt is already part of the job if (attemptId.getAttemptNum() > 0) { // create a new JobAttempt for the new attempt @@ -1878,7 +1882,6 @@ public void execDataArrived(RpcOutcomeListener outcomeListener, QueryWritab * A query observer for internal queries with streamResultsMode. Delegates the data back to the original grpc connection. */ private final class InternalJobResultStreamingListener extends JobResultListener { - private final JobEventCollatingObserver eventObserver; InternalJobResultStreamingListener(AttemptId attemptId, @@ -1904,8 +1907,11 @@ public void execDataArrived(RpcOutcomeListener outcomeListener, QueryWritab data.getBuffers()[i].readBytes(outputStream, data.getBuffers()[i].readableBytes()); data.getBuffers()[i].release(); } - // todo: fix ser/deser overhead (https://dremio.atlassian.net/browse/DX-46512) - eventObserver.onData(JobEvent.newBuilder().setResultData(outputStream.toByteString()).build(), outcomeListener); + QueryResultData.Builder queryResultsBuilder = QueryResultData.newBuilder() + .setHeader(data.getHeader()) + .setResultData(outputStream.toByteString()); + // todo: fix ser/deser overhead (DX-46512) + eventObserver.onData(JobEvent.newBuilder().setQueryResultData(queryResultsBuilder.build()).build(), outcomeListener); } catch (IOException ex) { outcomeListener.failed(new RpcException(ex)); getDeferredException().addException(ex); @@ -1936,7 +1942,6 @@ void recordJobResult(StoreJobResultRequest request) { */ private class JobResultListener extends AbstractAttemptObserver { private final AttemptId attemptId; - private final DeferredException exception = new DeferredException(); private final Job job; private final JobId jobId; @@ -1951,7 +1956,7 @@ private class JobResultListener extends AbstractAttemptObserver { JobResultListener(AttemptId attemptId, Job job, BufferAllocator allocator, JobEventCollatingObserver eventObserver, PlanTransformationListener planTransformationListener, ExternalListenerManager externalListenerManager) { - Preconditions.checkNotNull(jobResultsStore); + Preconditions.checkNotNull(getJobResultsStore()); this.attemptId = attemptId; this.job = job; this.jobId = job.getJobId(); @@ -1995,8 +2000,20 @@ public void recordsOutput(long outputRecords) { @Override public void queryStarted(UserRequest query, String user) { job.getJobAttempt().getInfo().setRequestType(query.getRequestType()); - job.getJobAttempt().getInfo().setSql(query.getSql()); - job.getJobAttempt().getInfo().setDescription(query.getDescription()); + if (job.getJobAttempt().getInfo().getSql() == null || job.getJobAttempt().getInfo().getSql().equals("UNKNOWN")) { + int sqlTruncateLen = getSqlTruncateLenFromOptionMgr(); + String sqlText = query.getSql().length() > sqlTruncateLen ? query.getSql().substring(0, sqlTruncateLen) : query.getSql(); + job.getJobAttempt().getInfo().setSql(sqlText); + if (query.getSql().length() > sqlTruncateLen) { + job.getJobAttempt().getInfo().setIsTruncatedSql(true); + extraJobInfoStore.put(jobId, new ExtraJobInfo().setSql(query.getSql())); + } + } + if (RequestType.valueOf(query.getRequestType().toString()) != RequestType.RUN_SQL) { + job.getJobAttempt().getInfo().setDescription(query.getDescription()); + } else { + job.getJobAttempt().getInfo().setDescription("NA"); + } storeJob(job); if (externalListenerManager != null) { externalListenerManager.queryProgressed(JobsServiceUtil.toJobSummary(job)); @@ -2232,7 +2249,7 @@ public void recordExtraInfo(String name, byte[] bytes) { } @Override - public void planRelTransform(PlannerPhase phase, RelOptPlanner planner, RelNode before, RelNode after, long millisTaken) { + public void planRelTransform(PlannerPhase phase, RelOptPlanner planner, RelNode before, RelNode after, long millisTaken, Map timeBreakdownPerRule) { planTransformationListener.onPhaseCompletion(phase, before, after, millisTaken); switch(phase){ case LOGICAL: @@ -2415,7 +2432,6 @@ private void updateJobDetails(Job job, QueryProfile profile) throws IOException }); } }); - jobAttempt.setStats(profileParser.getJobStats()); } @@ -2514,7 +2530,7 @@ void cancel(CancelJobRequest request) throws JobException { void remoteCancel(JobId jobId, ExternalId externalId, NodeEndpoint endpoint, String reason) throws JobException { if(endpoint.equals(identity)){ - throw new JobNotFoundException(jobId, JobNotFoundException.causeOfFailure.CANCEL_FAILED); + throw new JobNotFoundException(jobId, JobNotFoundException.CauseOfFailure.CANCEL_FAILED); } try{ @@ -2524,7 +2540,7 @@ void remoteCancel(JobId jobId, ExternalId externalId, NodeEndpoint endpoint, Str logger.debug("Job cancellation requested on {}.", endpoint.getAddress()); return; } else { - throw new JobNotFoundException(jobId, JobNotFoundException.causeOfFailure.CANCEL_FAILED); + throw new JobNotFoundException(jobId, JobNotFoundException.CauseOfFailure.CANCEL_FAILED); } } catch(TimeoutException | RpcException | RuntimeException e){ logger.info("Unable to cancel remote job for external id: {}", ExternalIdHelper.toString(externalId), e); @@ -2537,7 +2553,6 @@ void remoteCancel(JobId jobId, ExternalId externalId, NodeEndpoint endpoint, Str * Schedule in the background */ class OnlineProfileCleaner extends ExternalCleaner { - @Override public void doGo(JobAttempt jobAttempt) { jobTelemetryServiceStub @@ -2547,7 +2562,6 @@ public void doGo(JobAttempt jobAttempt) { .build() ); } - } /** @@ -2568,6 +2582,7 @@ public static String deleteOldJobsAndDependencies(List external LegacyKVStoreProvider provider, long maxMs) { long jobsDeleted = 0; LegacyIndexedStore jobStore = provider.getStore(JobsStoreCreator.class); + LegacyIndexedStore extraJobInfoStore = provider.getStore(ExtraJobInfoStoreCreator.class); final LegacyFindByCondition oldJobs = getOldJobsCondition(0, System.currentTimeMillis() - maxMs) .setPageSize(MAX_NUMBER_JOBS_TO_FETCH); final ExternalCleanerRunner externalCleanerRunner = new ExternalCleanerRunner(externalCleaners); @@ -2575,6 +2590,7 @@ public static String deleteOldJobsAndDependencies(List external JobResult result = entry.getValue(); externalCleanerRunner.run(result); jobStore.delete(entry.getKey()); + extraJobInfoStore.delete(entry.getKey()); jobsDeleted++; } logger.debug("Job cleanup task completed with [{}] jobs deleted and and [{}] profiles deleted.", jobsDeleted, 0L); @@ -2602,8 +2618,7 @@ public void run() { } public void manage() { - setAbandonedJobsToFailedState(store, - jobServiceInstances.get()); + setAbandonedJobsToFailedState(store, jobServiceInstances.get(), jobResultLogger); } } @@ -2640,7 +2655,7 @@ private void cleanupJobs(long cutOffTime) { for (Entry entry : store.find(condition)) { logger.debug("JobResultsCleanupTask getting cleaned up for key {}", entry.getKey()); - jobResultsStore.cleanup(entry.getKey()); + getJobResultsStore().cleanup(entry.getKey()); } JOB_RESULTS_PREV_CUTOFF_TIME = cutOffTime - ONE_HOUR_IN_MILLIS; //Decreasing prev time by an hour to cover overlapping jobs } @@ -2695,7 +2710,6 @@ private static final LegacyFindByCondition getOldJobsCondition(long prevCutOffTi * Creator for jobs. */ public static class JobsStoreCreator implements LegacyIndexedStoreCreationFunction { - @SuppressWarnings("unchecked") @Override public LegacyIndexedStore build(LegacyStoreBuildingFactory factory) { @@ -2813,10 +2827,9 @@ UserBitShared.QueryProfile getReflectionJobProfile(ReflectionJobProfileRequest r String getReflectionSearchQuery(String reflectionId) { StringBuilder stringBuilder = new StringBuilder().append("(qt==\"ACCELERATION\");(") - .append("*") - .append("=contains=") + .append("sql==\"*") .append(reflectionId) - .append(")"); + .append("*\")"); return stringBuilder.toString(); } @@ -2843,12 +2856,10 @@ private boolean mustForwardRequest(final Job job) { && jobServiceInstances.get().contains(JobsProtoUtil.toBuf(source))){ return true; } - return false; } class LocalAbandonedJobsHandler implements AutoCloseable { - private ScheduledFuture abandonedJobsTask; private final CloseableSchedulerThreadPool threadPool; @@ -2934,6 +2945,8 @@ private void terminateLocalAbandonedJobs() { attempts.add(newLastAttempt); jobResult.setCompleted(true); // mark the job as completed store.put(entry.getKey(), jobResult); + Job job = new Job(entry.getKey(), jobResult); + jobResultLogger.info(job, "Query: {}; outcome: {}", job.getJobId().getId(), job.getJobAttempt().getState()); } } } diff --git a/services/jobs/src/main/java/com/dremio/service/jobs/QueryProfileParser.java b/services/jobs/src/main/java/com/dremio/service/jobs/QueryProfileParser.java index 62ac00cbd0..4c987c950a 100644 --- a/services/jobs/src/main/java/com/dremio/service/jobs/QueryProfileParser.java +++ b/services/jobs/src/main/java/com/dremio/service/jobs/QueryProfileParser.java @@ -24,6 +24,7 @@ import java.util.Comparator; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; @@ -136,6 +137,17 @@ private void setOutputStats(List streams) { } } + private void setDmlStats(OperatorProfile operatorProfile) { + if (operatorProfile == null) { + return; + } + long addedFiles = Optional.ofNullable(jobStats.getAddedFiles()).orElse(0L) + operatorProfile.getAddedFiles(); + jobStats.setAddedFiles(addedFiles); + + long removedFiles = Optional.ofNullable(jobStats.getRemovedFiles()).orElse(0L) + operatorProfile.getRemovedFiles(); + jobStats.setRemovedFiles(removedFiles); + } + private long toMillis(long nanos) { return TimeUnit.NANOSECONDS.toMillis(nanos); } @@ -451,6 +463,7 @@ private void parse() throws IOException { case HASH_JOIN: setJoinSpillInfo(operatorType, operatorProfile); + // fall through case MERGE_JOIN: case NESTED_LOOP_JOIN: setOperationStats(OperationType.Join, toMillis(operatorProfile.getProcessNanos() + operatorProfile.getSetupNanos())); @@ -524,6 +537,9 @@ private void parse() throws IOException { case DELTALAKE_SUB_SCAN: setOperationStats(OperationType.Reading, toMillis(operatorProfile.getProcessNanos() + operatorProfile.getSetupNanos())); break; + case WRITER_COMMITTER: + setDmlStats(operatorProfile); + break; default: break; } diff --git a/services/jobs/src/main/java/com/dremio/service/jobs/QueryTypeUtils.java b/services/jobs/src/main/java/com/dremio/service/jobs/QueryTypeUtils.java index 5b2ccdf751..7e43883f44 100644 --- a/services/jobs/src/main/java/com/dremio/service/jobs/QueryTypeUtils.java +++ b/services/jobs/src/main/java/com/dremio/service/jobs/QueryTypeUtils.java @@ -54,6 +54,7 @@ static boolean isInternal(final QueryType queryType) { case UI_RUN: case UI_PREVIEW: case ODBC: + case D2D: case JDBC: case REST: case UI_INITIAL_PREVIEW: @@ -98,6 +99,7 @@ static WorkloadClass getWorkloadClassFor(final QueryType queryType) { return WorkloadClass.NRT; case UNKNOWN: + case D2D: case JDBC: case ODBC: case REST: @@ -144,6 +146,10 @@ static QueryType getQueryType(final RpcEndpointInfos clientInfos) { } final String name = clientInfos.getName().toLowerCase(Locale.ROOT); + if (name.contains("dremio-to-dremio")) { + return QueryType.D2D; + } + if (name.contains("jdbc") || name.contains("java")) { return QueryType.JDBC; } @@ -172,6 +178,8 @@ public static WorkloadType getWorkloadType(final QueryType queryType) { return WorkloadType.UI_DOWNLOAD; case UI_INTERNAL_RUN: return WorkloadType.INTERNAL_RUN; + case D2D: + return WorkloadType.D2D; case JDBC: return WorkloadType.JDBC; case ODBC: diff --git a/services/jobs/src/main/java/com/dremio/service/jobs/metadata/AncestorsVisitor.java b/services/jobs/src/main/java/com/dremio/service/jobs/metadata/AncestorsVisitor.java index e36da23c6e..a28d6487fe 100644 --- a/services/jobs/src/main/java/com/dremio/service/jobs/metadata/AncestorsVisitor.java +++ b/services/jobs/src/main/java/com/dremio/service/jobs/metadata/AncestorsVisitor.java @@ -107,7 +107,7 @@ public List visit(SqlCall call) { SqlFunction tableFunction = (SqlFunction)((SqlCall)operand).getOperator(); return asList(tableFunction.getSqlIdentifier()); } - // pass through + return Collections.emptyList(); case VALUES: return Collections.emptyList(); default: @@ -128,6 +128,7 @@ public List visit(SqlCall call) { return Collections.emptyList(); } } + // fall through default: throw new UnsupportedOperationException("Unexpected operator in call: " + operator.getKind() + "\n" + SqlNodes.toTreeString(call)); } diff --git a/services/jobs/src/main/java/com/dremio/service/jobs/metadata/QueryMetadata.java b/services/jobs/src/main/java/com/dremio/service/jobs/metadata/QueryMetadata.java index 01cb334389..9d3e19a232 100644 --- a/services/jobs/src/main/java/com/dremio/service/jobs/metadata/QueryMetadata.java +++ b/services/jobs/src/main/java/com/dremio/service/jobs/metadata/QueryMetadata.java @@ -38,9 +38,11 @@ import org.apache.calcite.tools.ValidationException; import com.dremio.common.utils.PathUtils; +import com.dremio.exec.catalog.TableVersionContext; import com.dremio.exec.planner.StatelessRelShuttleImpl; import com.dremio.exec.planner.acceleration.ExpansionNode; import com.dremio.exec.planner.common.ContainerRel; +import com.dremio.exec.planner.common.ScanRelBase; import com.dremio.exec.planner.fragment.PlanningSet; import com.dremio.exec.planner.logical.TableModifyRel; import com.dremio.exec.planner.logical.TableOptimizeRel; @@ -64,9 +66,7 @@ import com.dremio.service.namespace.proto.NameSpaceContainer; import com.dremio.service.namespace.proto.NameSpaceContainer.Type; import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Function; import com.google.common.base.Preconditions; -import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; @@ -322,32 +322,47 @@ public QueryMetadata build() throws ValidationException { Preconditions.checkNotNull(rowType, "The validated row type must be observed before reporting metadata."); final List ancestors = new ArrayList<>(); + final List versionContexts = new ArrayList<>(); + if (expanded != null) { - expanded.accept(new RelShuttleImpl() { - @Override - public RelNode visit(RelNode other) { - List path = null; - if (other instanceof ExpansionNode) { - path = ((ExpansionNode) other).getPath().getPathComponents(); - } else if (other instanceof ExternalQueryRelBase) { - path = ((ExternalQueryRelBase) other).getPath().getPathComponents(); - } - if (path != null) { - ancestors.add(new SqlIdentifier(path, SqlParserPos.ZERO)); - return other; - } - return super.visit(other); - } + expanded.accept( + new RelShuttleImpl() { + @Override + public RelNode visit(RelNode other) { + List path = null; + if (other instanceof ExpansionNode) { + path = ((ExpansionNode) other).getPath().getPathComponents(); + } else if (other instanceof ExternalQueryRelBase) { + path = ((ExternalQueryRelBase) other).getPath().getPathComponents(); + } - @Override - public RelNode visit(TableScan scan) { - ancestors.add(new SqlIdentifier(scan.getTable().getQualifiedName(), SqlParserPos.ZERO)); - return scan; - } - }); + if (path != null) { + ancestors.add(new SqlIdentifier(path, SqlParserPos.ZERO)); + versionContexts.add( + (other instanceof ExpansionNode) + ? ((ExpansionNode) other).getVersionContext() + : null); + + return other; + } + + return super.visit(other); + } + + @Override + public RelNode visit(TableScan scan) { + ancestors.add( + new SqlIdentifier(scan.getTable().getQualifiedName(), SqlParserPos.ZERO)); + versionContexts.add(((ScanRelBase) scan).getTableMetadata().getVersionContext()); + + return scan; + } + }); } else if (sql != null) { - ancestors.addAll(AncestorsVisitor.extractAncestors(sql).stream() - .filter(input -> !RESERVED_PARENT_NAMES.contains(input.toString())).collect(Collectors.toList())); + ancestors.addAll( + AncestorsVisitor.extractAncestors(sql).stream() + .filter(input -> !RESERVED_PARENT_NAMES.contains(input.toString())) + .collect(Collectors.toList())); } List fieldOrigins = null; @@ -365,15 +380,7 @@ public RelNode visit(TableScan scan) { List scanPaths = null; if (logicalAfter != null) { - scanPaths = FluentIterable.from(getScans(logicalAfter)) - .transform(new Function, ScanPath>() { - @Override - public ScanPath apply(List path) { - return new ScanPath().setPathList(path); - } - }) - .toList(); - + scanPaths = getScans(logicalAfter); externalQuerySourceInfo = getExternalQuerySources(logicalAfter); } @@ -386,7 +393,7 @@ public ScanPath apply(List path) { ancestors, // list of parents fieldOrigins, null, - getParentsFromSql(ancestors), // convert parent to ParentDatasetInfo + getParentsFromSql(ancestors, versionContexts), // convert parent to ParentDatasetInfo sql, rowType, getGrandParents(ancestors), // list of all parents to be stored with dataset @@ -468,7 +475,8 @@ private List getGrandParents(List parents) { * Return lists of {@link ParentDatasetInfo} from given list of directly referred tables in the query. * @return The list of directly referenced virtual or physical datasets */ - private List getParentsFromSql(List ancestors) { + private List getParentsFromSql( + List ancestors, List versionContexts) { if (ancestors == null) { return null; } @@ -478,6 +486,17 @@ private List getParentsFromSql(List ancestors) final NamespaceKey datasetPath = new NamespaceKey(sqlIdentifier.names); result.add(getDataset(datasetPath)); } + + final int versionContextsSize = versionContexts.size(); + for (int index = 0; index < versionContextsSize; ++index) { + final TableVersionContext versionContext = versionContexts.get(index); + if (versionContext == null) { + continue; + } + + result.get(index).setVersionContext(versionContext.serialize()); + } + return result; } catch (Throwable e) { logger.warn( @@ -611,12 +630,17 @@ private static void getSourcesFromParentDatasetForExternalQuery(List> getScans(RelNode logicalPlan) { - final ImmutableList.Builder> builder = ImmutableList.builder(); + public static List getScans(RelNode logicalPlan) { + final ImmutableList.Builder builder = ImmutableList.builder(); logicalPlan.accept(new StatelessRelShuttleImpl() { @Override public RelNode visit(final TableScan scan) { - builder.add(scan.getTable().getQualifiedName()); + ScanPath path = new ScanPath().setPathList(scan.getTable().getQualifiedName()); + TableVersionContext versionContext = ((ScanRelBase)scan).getTableMetadata().getVersionContext(); + if (versionContext != null) { + path.setVersionContext(versionContext.serialize()); + } + builder.add(path); return super.visit(scan); } diff --git a/services/jobs/src/main/proto/job.proto b/services/jobs/src/main/proto/job.proto index 38912cda98..63ce1a339c 100644 --- a/services/jobs/src/main/proto/job.proto +++ b/services/jobs/src/main/proto/job.proto @@ -63,6 +63,7 @@ message JobSubmission { message ParentDatasetInfo { repeated string datasetPath = 1; optional com.dremio.service.namespace.dataset.proto.DatasetType type = 2; + optional string versionContext = 3; } message DownloadInfo { @@ -82,6 +83,7 @@ message MaterializationSummary { message ScanPath { repeated string path = 1; + optional string versionContext = 2; } message JobFailureInfo { @@ -119,7 +121,7 @@ message SpillJobDetails { message JobInfo { required JobId jobId = 1; - required string sql = 2; + required string sql = 2; // potentially truncated sql, is_truncated_sql is true if truncated optional com.dremio.proto.model.attempts.RequestType requestType = 3; optional string client = 4; // client from which job was submitted optional string user = 5; @@ -140,7 +142,7 @@ message JobInfo { // list of all parents of parents. repeated com.dremio.service.namespace.dataset.proto.ParentDataset grandParents = 19; optional DownloadInfo downloadInfo = 20; // link to download data for UI_EXPORT jobs - optional string description = 21; + optional string description = 21; // it'll be 'NA' in case of RUN_SQL jobrequest types, as same data can be found in sql=2 above optional MaterializationSummary materializationFor = 22; optional double original_cost = 23 [default = 1.0]; repeated string partitions = 24; @@ -170,6 +172,7 @@ message JobInfo { repeated string matched_reflection_ids = 47; repeated string chosen_reflection_ids = 48; optional QueryLabel queryLabel = 49 [default = NONE]; + optional bool is_truncated_sql = 50 [default = false]; } message ResourceSchedulingInfo { @@ -188,6 +191,7 @@ message ResourceSchedulingInfo { message JoinTable { optional int32 tableId = 1; // used to distinguish multiple instances of the same table in the query repeated string tableSchemaPath = 2; + optional string versionContext = 3; } // represents an equality condition @@ -258,6 +262,7 @@ enum QueryType { FLIGHT = 15; // for queries submitted outside of dac using a FlightClient METADATA_REFRESH = 16; // Queries related to metadata refresh INTERNAL_ICEBERG_METADATA_DROP = 17; // Queries related to internal iceberg metadata drop + D2D = 18; } enum QueryLabel { @@ -274,6 +279,8 @@ message JobStats { optional int64 inputRecords = 3; optional int64 outputRecords = 4; optional bool isOutputLimited = 5; // If true, the output was limited based on `planner.output_limit_size` + optional int64 addedFiles = 6; // DML specific stat + optional int64 removedFiles = 7; // DML specific stat } message JobResult { @@ -282,6 +289,10 @@ message JobResult { optional SessionId sessionId = 3; // Store session id as value in the KVStore } +message ExtraJobInfo { + optional string sql = 1; //full SQL text +} + message JobAttempt { optional JobState state = 1; optional JobInfo info = 2; @@ -396,6 +407,7 @@ message DataSet { optional int32 datasetSizeRows = 5; repeated Reflection reflectionsDefined = 6; repeated string datasetPaths = 7; + optional string versionContext = 8; } message ScannedDataset { diff --git a/services/jobs/src/main/protobuf/jobs-service.proto b/services/jobs/src/main/protobuf/jobs-service.proto index 1d49292e03..0a20beb7a6 100644 --- a/services/jobs/src/main/protobuf/jobs-service.proto +++ b/services/jobs/src/main/protobuf/jobs-service.proto @@ -86,10 +86,16 @@ message JobEvent { JobSummary progress_job_summary = 4; JobSummary final_job_summary = 5; com.dremio.service.job.proto.JobSubmission job_submission = 6; - bytes result_data = 7; + bytes result_data = 7 [deprecated = true]; + QueryResultData query_result_data = 8; } } +message QueryResultData { + exec.shared.QueryData header = 1; + bytes result_data = 2; +} + message StoreJobResultRequest { JobState job_state = 1; com.dremio.service.job.proto.JobId job_id = 2; @@ -306,6 +312,7 @@ enum QueryType { FLIGHT = 600; //query submitted by FlightClient METADATA_REFRESH = 700; // Queries related to metadata refresh INTERNAL_ICEBERG_METADATA_DROP = 800; // Queries related to internal iceberg metadata drop + D2D = 900; } enum QueryLabel { diff --git a/services/jobs/src/main/protobuf/logged-query.proto b/services/jobs/src/main/protobuf/logged-query.proto index f6d2361d65..b195189906 100644 --- a/services/jobs/src/main/protobuf/logged-query.proto +++ b/services/jobs/src/main/protobuf/logged-query.proto @@ -31,6 +31,7 @@ message LoggedQuery { JobState outcome = 6; string outcome_reason = 7; string username = 8; + bool is_truncated_query_text = 9; } // Copy of Jobstate from JobInfo. - job.proto #JobState diff --git a/services/jobs/src/test/java/com/dremio/service/jobs/TestArrowFileReader.java b/services/jobs/src/test/java/com/dremio/service/jobs/TestArrowFileReader.java index 500b63d645..dd685504da 100644 --- a/services/jobs/src/test/java/com/dremio/service/jobs/TestArrowFileReader.java +++ b/services/jobs/src/test/java/com/dremio/service/jobs/TestArrowFileReader.java @@ -27,11 +27,8 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import java.io.IOException; -import java.net.URI; import java.util.ArrayList; import java.util.List; -import java.util.function.Supplier; import org.apache.arrow.memory.BufferAllocator; import org.apache.arrow.vector.BitVector; @@ -56,10 +53,7 @@ import org.junit.rules.TemporaryFolder; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; -import com.dremio.exec.hadoop.DremioHadoopUtils; import com.dremio.exec.hadoop.HadoopFileSystem; import com.dremio.exec.physical.base.OpProps; import com.dremio.exec.proto.ExecProtos.FragmentHandle; @@ -79,7 +73,6 @@ import com.dremio.exec.store.easy.arrow.ArrowFormatPluginConfig; import com.dremio.exec.store.easy.arrow.ArrowRecordWriter; import com.dremio.sabot.exec.context.OperatorContext; -import com.dremio.service.users.SystemUser; import com.dremio.test.AllocatorRule; import com.dremio.test.DremioTest; import com.google.common.collect.Iterators; @@ -525,7 +518,7 @@ private ArrowFileMetadata writeArrowFile(VectorContainer... batches) throws Exce when(writerConf.getProps()).thenReturn(OpProps.prototype()); final EasyFormatPlugin formatPlugin = mock(EasyFormatPlugin.class); - final FileSystemPlugin fsPlugin = getMockedFileSystemPlugin(); + final FileSystemPlugin fsPlugin = mock(FileSystemPlugin.class); when(writerConf.getFormatPlugin()).thenReturn(formatPlugin); when(formatPlugin.getFsPlugin()).thenReturn(fsPlugin); when(fsPlugin.createFS(notNull(), notNull())).thenReturn(HadoopFileSystem.getLocal(FS_CONF)); @@ -585,41 +578,4 @@ public void assertArrowFileMetadata(ArrowFileMetadata arrowFileMetadata) { public List getRecords(ArrowFileReader reader, long start, long limit, BufferAllocator allocator) throws Exception { return reader.read(start, limit); } - - public static FileSystemPlugin getMockedFileSystemPlugin() { - FileSystemPlugin fileSystemPlugin = mock(FileSystemPlugin.class); - when(fileSystemPlugin.getHadoopFsSupplier(any(String.class), any(Configuration.class), any(String.class))). - thenAnswer( - new Answer() { - @Override - public Object answer(InvocationOnMock invocation) throws Throwable { - Object[] args = invocation.getArguments(); - Supplier fileSystemSupplier = getFileSystemSupplier(DremioHadoopUtils.toHadoopPath((String) args[0]).toUri(), (Configuration) args[1], (String) args[2]); - return fileSystemSupplier; - } - }); - - when(fileSystemPlugin.getHadoopFsSupplier(any(String.class), any(Configuration.class))). - thenAnswer( - new Answer() { - @Override - public Object answer(InvocationOnMock invocation) throws Throwable { - Object[] args = invocation.getArguments(); - Supplier fileSystemSupplier = getFileSystemSupplier(DremioHadoopUtils.toHadoopPath((String) args[0]).toUri(), (Configuration) args[1], SystemUser.SYSTEM_USERNAME); - - return fileSystemSupplier; - } - }); - return fileSystemPlugin; - } - - private static Supplier getFileSystemSupplier(final URI uri, final Configuration conf, String user) { - return () -> { - try { - return FileSystem.get(uri, conf, user); - } catch (IOException | InterruptedException e) { - throw new RuntimeException(); - } - }; - } } diff --git a/services/jobs/src/test/java/com/dremio/service/jobs/TestJobUtils.java b/services/jobs/src/test/java/com/dremio/service/jobs/TestJobUtils.java index 960ef1e656..937b585a12 100644 --- a/services/jobs/src/test/java/com/dremio/service/jobs/TestJobUtils.java +++ b/services/jobs/src/test/java/com/dremio/service/jobs/TestJobUtils.java @@ -59,7 +59,8 @@ public static List parameters() { new Object[] {com.dremio.service.job.proto.QueryType.ACCELERATOR_DROP, QueryType.ACCELERATOR_DROP}, new Object[] {com.dremio.service.job.proto.QueryType.PREPARE_INTERNAL, QueryType.PREPARE_INTERNAL}, new Object[] {com.dremio.service.job.proto.QueryType.UI_INITIAL_PREVIEW, QueryType.UI_INITIAL_PREVIEW}, - new Object[] {com.dremio.service.job.proto.QueryType.FLIGHT, QueryType.FLIGHT}); + new Object[] {com.dremio.service.job.proto.QueryType.FLIGHT, QueryType.FLIGHT}, + new Object[] {com.dremio.service.job.proto.QueryType.D2D, QueryType.D2D}); } public QueryTypeBufStuffMappingTest(com.dremio.service.job.proto.QueryType pb2Type, QueryType pb3Type) { @@ -97,7 +98,8 @@ public static List parameters() { new Object[]{"C++", com.dremio.service.job.proto.QueryType.ODBC}, new Object[]{"c++", com.dremio.service.job.proto.QueryType.ODBC}, new Object[]{"Arrow Flight", com.dremio.service.job.proto.QueryType.FLIGHT}, - new Object[]{"ARROW FLIGHT", com.dremio.service.job.proto.QueryType.FLIGHT} + new Object[]{"ARROW FLIGHT", com.dremio.service.job.proto.QueryType.FLIGHT}, + new Object[]{"Dremio-to-Dremio", com.dremio.service.job.proto.QueryType.D2D} ); } @@ -142,7 +144,8 @@ public static List parameters() { new Object[]{com.dremio.service.job.proto.QueryType.PREPARE_INTERNAL, UserBitShared.WorkloadType.DDL, UserBitShared.WorkloadClass.NRT}, new Object[]{com.dremio.service.job.proto.QueryType.ACCELERATOR_CREATE, UserBitShared.WorkloadType.ACCELERATOR, UserBitShared.WorkloadClass.BACKGROUND}, new Object[]{com.dremio.service.job.proto.QueryType.ACCELERATOR_EXPLAIN, UserBitShared.WorkloadType.ACCELERATOR, UserBitShared.WorkloadClass.BACKGROUND}, - new Object[]{com.dremio.service.job.proto.QueryType.FLIGHT, UserBitShared.WorkloadType.FLIGHT, UserBitShared.WorkloadClass.GENERAL} + new Object[]{com.dremio.service.job.proto.QueryType.FLIGHT, UserBitShared.WorkloadType.FLIGHT, UserBitShared.WorkloadClass.GENERAL}, + new Object[]{com.dremio.service.job.proto.QueryType.D2D, UserBitShared.WorkloadType.D2D, UserBitShared.WorkloadClass.GENERAL} ); } diff --git a/services/jobtelemetry/client/pom.xml b/services/jobtelemetry/client/pom.xml index 5c535008fe..9a534b6e00 100644 --- a/services/jobtelemetry/client/pom.xml +++ b/services/jobtelemetry/client/pom.xml @@ -22,7 +22,7 @@ com.dremio.services dremio-services-jobtelemetry - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 4.0.0 @@ -30,7 +30,6 @@ dremio-services-jobtelemetry-client jar Services - Job Telemetry Client - 24.0.0-202302100528110223-3a169b7c diff --git a/services/jobtelemetry/common/pom.xml b/services/jobtelemetry/common/pom.xml index 0e191037b8..373a93c56f 100644 --- a/services/jobtelemetry/common/pom.xml +++ b/services/jobtelemetry/common/pom.xml @@ -23,11 +23,10 @@ com.dremio.services dremio-services-jobtelemetry - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-services-jobtelemetry-common - 24.0.0-202302100528110223-3a169b7c Services - JobTelemetry - Common jar diff --git a/services/jobtelemetry/pom.xml b/services/jobtelemetry/pom.xml index 2a8e0565e9..460b39dafa 100644 --- a/services/jobtelemetry/pom.xml +++ b/services/jobtelemetry/pom.xml @@ -23,7 +23,7 @@ com.dremio.services dremio-services-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-services-jobtelemetry diff --git a/services/jobtelemetry/server/pom.xml b/services/jobtelemetry/server/pom.xml index c2218b433b..980c7abc74 100644 --- a/services/jobtelemetry/server/pom.xml +++ b/services/jobtelemetry/server/pom.xml @@ -23,11 +23,10 @@ com.dremio.services dremio-services-jobtelemetry - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-services-jobtelemetry-server - 24.0.0-202302100528110223-3a169b7c Services - Job Telemetry - Server diff --git a/services/jobtelemetry/server/src/main/java/com/dremio/service/jobtelemetry/server/JobTelemetryServiceImpl.java b/services/jobtelemetry/server/src/main/java/com/dremio/service/jobtelemetry/server/JobTelemetryServiceImpl.java index 0d17c0ea90..f99644a34f 100644 --- a/services/jobtelemetry/server/src/main/java/com/dremio/service/jobtelemetry/server/JobTelemetryServiceImpl.java +++ b/services/jobtelemetry/server/src/main/java/com/dremio/service/jobtelemetry/server/JobTelemetryServiceImpl.java @@ -62,7 +62,7 @@ public class JobTelemetryServiceImpl extends JobTelemetryServiceGrpc.JobTelemetr private final ProgressMetricsPublisher progressMetricsPublisher; private final BackgroundProfileWriter bgProfileWriter; private final boolean saveFullProfileOnQueryTermination; - private Retryer retryer; + private final Retryer retryer; @Inject JobTelemetryServiceImpl(MetricsStore metricsStore, ProfileStore profileStore, GrpcTracerFacade tracer) { @@ -85,7 +85,7 @@ public JobTelemetryServiceImpl(MetricsStore metricsStore, ProfileStore profileSt metricsPublishFrequencyMillis); this.bgProfileWriter = new BackgroundProfileWriter(profileStore, tracer); this.saveFullProfileOnQueryTermination = saveFullProfileOnQueryTermination; - this.retryer = new Retryer.Builder() + this.retryer = Retryer.newBuilder() .retryIfExceptionOfType(DatastoreException.class) .setMaxRetries(MAX_RETRIES) .build(); diff --git a/services/maestro/client/pom.xml b/services/maestro/client/pom.xml index 380228a2b0..e66c49c462 100644 --- a/services/maestro/client/pom.xml +++ b/services/maestro/client/pom.xml @@ -22,7 +22,7 @@ dremio-services-maestro com.dremio.services - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 4.0.0 @@ -30,7 +30,6 @@ dremio-services-maestro-client jar Services - Maestro Client - 24.0.0-202302100528110223-3a169b7c diff --git a/services/maestro/common/pom.xml b/services/maestro/common/pom.xml index 9864e18d6d..9a4940851f 100644 --- a/services/maestro/common/pom.xml +++ b/services/maestro/common/pom.xml @@ -22,7 +22,7 @@ dremio-services-maestro com.dremio.services - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 4.0.0 diff --git a/services/maestro/pom.xml b/services/maestro/pom.xml index 39785c2215..4c68ed3437 100644 --- a/services/maestro/pom.xml +++ b/services/maestro/pom.xml @@ -28,7 +28,7 @@ com.dremio.services dremio-services-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-services-maestro Services - Maestro Service diff --git a/services/namespace/pom.xml b/services/namespace/pom.xml index 4e4e06b007..f5e671a545 100644 --- a/services/namespace/pom.xml +++ b/services/namespace/pom.xml @@ -22,7 +22,7 @@ com.dremio.services dremio-services-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-services-namespace Services - Namespace @@ -133,6 +133,10 @@ org.xerial.snappy snappy-java + + io.opentelemetry.instrumentation + opentelemetry-instrumentation-annotations + diff --git a/services/namespace/src/main/java/com/dremio/service/namespace/DatasetConfigAndEntitiesOnPath.java b/services/namespace/src/main/java/com/dremio/service/namespace/DatasetConfigAndEntitiesOnPath.java new file mode 100644 index 0000000000..239434c6b1 --- /dev/null +++ b/services/namespace/src/main/java/com/dremio/service/namespace/DatasetConfigAndEntitiesOnPath.java @@ -0,0 +1,49 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.service.namespace; + +import java.util.List; + +import com.dremio.service.namespace.dataset.proto.DatasetConfig; +import com.dremio.service.namespace.proto.NameSpaceContainer; + +/* + * A class of a DatasetConfig of a dateset and Entities on the dataset path. + * Used as return from {@Link NamespaceService#getDatasetAndEntitiesOnPath()} to save later retrieve of KV store for the + * same dataset. + */ + +public class DatasetConfigAndEntitiesOnPath { + private final DatasetConfig datasetConfig; + private final List entitiesOnPath; + + public DatasetConfigAndEntitiesOnPath(DatasetConfig datasetConfig, List entitiesOnPath) { + this.datasetConfig = datasetConfig; + this.entitiesOnPath = entitiesOnPath; + } + + public DatasetConfig getDatasetConfig() { + return datasetConfig; + } + + public List getEntitiesOnPath() { + return entitiesOnPath; + } + + public static DatasetConfigAndEntitiesOnPath of(DatasetConfig datasetConfig, List entitiesOnPath) { + return new DatasetConfigAndEntitiesOnPath(datasetConfig, entitiesOnPath); + } +} diff --git a/services/namespace/src/main/java/com/dremio/service/namespace/NamespaceEntity.java b/services/namespace/src/main/java/com/dremio/service/namespace/NamespaceEntity.java index 9bb536361d..5c67723fc9 100644 --- a/services/namespace/src/main/java/com/dremio/service/namespace/NamespaceEntity.java +++ b/services/namespace/src/main/java/com/dremio/service/namespace/NamespaceEntity.java @@ -55,11 +55,10 @@ static NamespaceEntity toEntity( Type type, NamespaceKey path, Object config, - boolean keyNormalization, List attributes ) { final NameSpaceContainer container = new NameSpaceContainer(); - final NamespaceInternalKey namespaceInternalKey = new NamespaceInternalKey(path, keyNormalization); + final NamespaceInternalKey namespaceInternalKey = new NamespaceInternalKey(path); container.setType(type); switch (type) { case DATASET: diff --git a/services/namespace/src/main/java/com/dremio/service/namespace/NamespaceInternalKey.java b/services/namespace/src/main/java/com/dremio/service/namespace/NamespaceInternalKey.java index 9ddd7593fa..30e2a9dc79 100644 --- a/services/namespace/src/main/java/com/dremio/service/namespace/NamespaceInternalKey.java +++ b/services/namespace/src/main/java/com/dremio/service/namespace/NamespaceInternalKey.java @@ -52,12 +52,6 @@ class NamespaceInternalKey { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(NamespaceInternalKey.class); - /** - * Hard coded flag that allows disabling namespace key normalization. - * The purpose is to have one specific build with this set to false - */ - private static final boolean ENABLE_KEY_NORMALIZATION = true; - // TODO: make it independent of SQL // Back ticks are allowed to escape dot/keywords. private static final char QUOTE = SqlUtils.QUOTE; @@ -135,11 +129,7 @@ private static List generatePrefixes() { private final NamespaceKey namespaceKey; NamespaceInternalKey(final NamespaceKey path) { - this(path, ENABLE_KEY_NORMALIZATION); - } - - NamespaceInternalKey(final NamespaceKey path, boolean normalize) { - final List processedPathComponents = processPathComponents(path, normalize); + final List processedPathComponents = processPathComponents(path); this.key = buildKey(processedPathComponents); this.rangeStartKey = buildRangeStartKey(processedPathComponents); this.rangeEndKey = buildRangeEndKey(rangeStartKey); @@ -177,10 +167,9 @@ static String getRootLookupEndKey() { * Returns a list of path components. * * @param path the NamespaceKey to process. - * @param normalize indicates whether path components should be converted to lower case. * @return a list of path components. */ - static List processPathComponents(final NamespaceKey path, boolean normalize) { + static List processPathComponents(final NamespaceKey path) { final List processedPathComponents = new ArrayList<>(); final int numPathComponents = path.getPathComponents().size(); @@ -196,7 +185,7 @@ static List processPathComponents(final NamespaceKey path, boolean norma throw UserException.validationError().message("Invalid name space key. Given: %s, Expected format: %s", path.getSchemaPath(), ERROR_MSG_EXPECTED_NAMESPACE_PATH_FORMAT).build(logger); } - processedPathComponents.add((normalize)? component.toLowerCase(Locale.ROOT) : component); + processedPathComponents.add(component.toLowerCase(Locale.ROOT)); }); return processedPathComponents; diff --git a/services/namespace/src/main/java/com/dremio/service/namespace/NamespaceService.java b/services/namespace/src/main/java/com/dremio/service/namespace/NamespaceService.java index 4ab4406105..dd73871a90 100644 --- a/services/namespace/src/main/java/com/dremio/service/namespace/NamespaceService.java +++ b/services/namespace/src/main/java/com/dremio/service/namespace/NamespaceService.java @@ -122,6 +122,14 @@ interface Factory { */ DatasetConfig getDataset(NamespaceKey datasetPath) throws NamespaceException; + /** + * Returns {@link DatasetConfigAndEntitiesOnPath} corresponding to given path. + * + * @param datasetPath path whose config will be returned + * @throws NamespaceException if a namespace or a dataset cannot be found for the given key + */ + DatasetConfigAndEntitiesOnPath getDatasetAndEntitiesOnPath(NamespaceKey datasetPath) throws NamespaceException; + /** * Get multiple entities of given type * @param lookupKeys namespace keys @@ -300,4 +308,6 @@ interface DeleteCallback { * @return dataset associated with this path or null, if there is no dataset. */ NameSpaceContainer getEntityByPath(NamespaceKey datasetPath) throws NamespaceException; + + default void invalidateNamespaceCache(final NamespaceKey key) {} } diff --git a/services/namespace/src/main/java/com/dremio/service/namespace/NamespaceServiceImpl.java b/services/namespace/src/main/java/com/dremio/service/namespace/NamespaceServiceImpl.java index d7094724f5..ea636c52df 100644 --- a/services/namespace/src/main/java/com/dremio/service/namespace/NamespaceServiceImpl.java +++ b/services/namespace/src/main/java/com/dremio/service/namespace/NamespaceServiceImpl.java @@ -16,7 +16,7 @@ package com.dremio.service.namespace; import static com.dremio.service.namespace.NamespaceInternalKeyDumpUtil.parseKey; -import static com.dremio.service.namespace.NamespaceUtils.getId; +import static com.dremio.service.namespace.NamespaceUtils.getIdOrNull; import static com.dremio.service.namespace.NamespaceUtils.isListable; import static com.dremio.service.namespace.NamespaceUtils.isPhysicalDataset; import static com.dremio.service.namespace.NamespaceUtils.lastElement; @@ -35,7 +35,6 @@ import java.nio.charset.StandardCharsets; import java.util.AbstractMap; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; @@ -102,6 +101,9 @@ import com.google.common.collect.Range; import com.google.protobuf.ByteString; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.instrumentation.annotations.WithSpan; + /** * Namespace management. */ @@ -118,7 +120,6 @@ public class NamespaceServiceImpl implements NamespaceService { private final LegacyIndexedStore namespace; private final LegacyIndexedStore partitionChunkStore; private final LegacyKVStore multiSplitStore; - private final boolean keyNormalization; /** * Factory for {@code NamespaceServiceImpl} @@ -146,14 +147,13 @@ public NamespaceService get(NamespaceIdentity identity) { @Inject public NamespaceServiceImpl(final LegacyKVStoreProvider kvStoreProvider) { - this(kvStoreProvider, true); - } - - protected NamespaceServiceImpl(final LegacyKVStoreProvider kvStoreProvider, boolean keyNormalization) { this.namespace = createStore(kvStoreProvider); this.partitionChunkStore = kvStoreProvider.getStore(PartitionChunkCreator.class); this.multiSplitStore = kvStoreProvider.getStore(MultiSplitStoreCreator.class); - this.keyNormalization = keyNormalization; + } + + LegacyIndexedStore getNamespaceStore() { + return namespace; } protected LegacyIndexedStore createStore(final LegacyKVStoreProvider kvStoreProvider) { @@ -299,7 +299,6 @@ public int deleteSplitOrphans(PartitionChunkId.SplitOrphansRetentionPolicy polic continue; default: - continue; } } @@ -307,7 +306,7 @@ public int deleteSplitOrphans(PartitionChunkId.SplitOrphansRetentionPolicy polic Collections.sort(ranges, PARTITION_CHUNK_RANGE_COMPARATOR); // Some explanations: - // ranges is setup to contain the current (exclusive) range of partition chunks for each dataset. + // ranges is set up to contain the current (exclusive) range of partition chunks for each dataset. // The function then iterates over all partition chunks present in the partition chunks store and verify // that the partition chunk belongs to one of the dataset partition chunk ranges. If not, the item is dropped. // The binary search provides the index in ranges where a partition chunk would be inserted if not @@ -335,7 +334,7 @@ public int deleteSplitOrphans(PartitionChunkId.SplitOrphansRetentionPolicy polic sb.delete(0, sb.length()); count = 0; } - sb.append(id.toString()).append(" "); + sb.append(id).append(" "); count++; } else { logger.debug("Deleting partition chunk associated with key {} from the partition chunk store.", e.getKey()); @@ -369,7 +368,7 @@ public int deleteSplitOrphans(PartitionChunkId.SplitOrphansRetentionPolicy polic sb.delete(0, sb.length()); count = 0; } - sb.append(id.toString()).append(" "); + sb.append(id).append(" "); count++; if (partitionChunkStore.contains(id)) { logger.warn("MultiSplit being deleted, but PartitionChunk exists for id {}.", id.getSplitId()); @@ -394,7 +393,7 @@ protected LegacyIndexedStore getStore() { /** * Helper method which creates a new entity or update the existing entity with given entity * - * @param entity + * @param entity - The Namespace Entity * @throws NamespaceException */ private void createOrUpdateEntity(final NamespaceEntity entity, NamespaceAttribute... attributes) throws NamespaceException { @@ -425,7 +424,7 @@ protected void doCreateOrUpdateEntity(final NamespaceEntity entity, List existingPathList = existingContainer.getFullPathList(); throw UserException.concurrentModificationError() - .message("There already exists an entity of type [%s] at given path [%s]", - existingContainer.getType(), newOrUpdatedEntity.getPathKey().getPath()) + .message("The current location already contains a %s named \"%s\". Please use a unique name for the new %s.", + existingContainer.getType().toString().toLowerCase(), existingPathList.get(existingPathList.size() - 1), + newOrUpdatedEntity.getContainer().getType().toString().toLowerCase()) .build(logger); } @@ -472,7 +473,8 @@ protected boolean ensureIdExistsTypeMatches(NamespaceEntity newOrUpdatedEntity, } // make sure the id remains the same - final String idInExistingContainer = getId(existingContainer); + final String idInExistingContainer = getIdOrNull(existingContainer); + // TODO: Could throw a null pointer exception if idInExistingContainer == null. if (!idInExistingContainer.equals(idInContainer)) { throw UserException.invalidMetadataError().message("There already exists an entity of type [%s] at given path [%s] with Id %s. Unable to replace with Id %s", existingContainer.getType(), newOrUpdatedEntity.getPathKey().getPath(), idInExistingContainer, idInContainer).buildSilently(); @@ -483,12 +485,12 @@ protected boolean ensureIdExistsTypeMatches(NamespaceEntity newOrUpdatedEntity, @Override public void addOrUpdateSource(NamespaceKey sourcePath, SourceConfig sourceConfig, NamespaceAttribute... attributes) throws NamespaceException { - createOrUpdateEntity(NamespaceEntity.toEntity(SOURCE, sourcePath, sourceConfig, keyNormalization, new ArrayList<>()), attributes); + createOrUpdateEntity(NamespaceEntity.toEntity(SOURCE, sourcePath, sourceConfig, new ArrayList<>()), attributes); } @Override public void addOrUpdateSpace(NamespaceKey spacePath, SpaceConfig spaceConfig, NamespaceAttribute... attributes) throws NamespaceException { - createOrUpdateEntity(NamespaceEntity.toEntity(SPACE, spacePath, spaceConfig, keyNormalization, new ArrayList<>()), attributes); + createOrUpdateEntity(NamespaceEntity.toEntity(SPACE, spacePath, spaceConfig, new ArrayList<>()), attributes); } @Override @@ -497,7 +499,7 @@ public void addOrUpdateFunction(NamespaceKey udfPath, FunctionConfig functionCon functionConfig.setCreatedAt(System.currentTimeMillis()); } functionConfig.setLastModified(System.currentTimeMillis()); - createOrUpdateEntity(NamespaceEntity.toEntity(FUNCTION, udfPath, functionConfig, keyNormalization, new ArrayList<>()), attributes); + createOrUpdateEntity(NamespaceEntity.toEntity(FUNCTION, udfPath, functionConfig, new ArrayList<>()), attributes); } @Override @@ -533,7 +535,7 @@ public void addOrUpdateDataset(NamespaceKey datasetPath, DatasetConfig dataset, break; } - createOrUpdateEntity(NamespaceEntity.toEntity(DATASET, datasetPath, dataset, keyNormalization, new ArrayList<>()), attributes); + createOrUpdateEntity(NamespaceEntity.toEntity(DATASET, datasetPath, dataset, new ArrayList<>()), attributes); } @Override @@ -544,10 +546,7 @@ public boolean hasChildren(NamespaceKey key) { } catch (NamespaceException e) { throw new RuntimeException("failed during dataset listing of sub-tree under: " + key); } - if (FluentIterable.from(children).size() > 0) { - return true; - } - return false; + return FluentIterable.from(children).size() > 0; } /** @@ -567,7 +566,7 @@ private class DatasetMetadataSaverImpl implements DatasetMetadataSaver { private long accumulatedRecordCount; private List accumulatedSplits; private int totalNumSplits; - private boolean datasetMetadataConsistencyValidate; + private final boolean datasetMetadataConsistencyValidate; DatasetMetadataSaverImpl(NamespaceKey datasetPath, EntityId datasetId, long nextDatasetVersion, SplitCompression splitCompression, long maxSinglePartitionChunks, boolean datasetMetadataConsistencyValidate) { this.datasetPath = datasetPath; @@ -808,13 +807,15 @@ public void deleteSplits(Iterable splits) { } @Override + @WithSpan public void addOrUpdateFolder(NamespaceKey folderPath, FolderConfig folderConfig, NamespaceAttribute... attributes) throws NamespaceException { - createOrUpdateEntity(NamespaceEntity.toEntity(FOLDER, folderPath, folderConfig, keyNormalization, new ArrayList<>()), attributes); + createOrUpdateEntity(NamespaceEntity.toEntity(FOLDER, folderPath, folderConfig, new ArrayList<>()), attributes); } @Override + @WithSpan public void addOrUpdateHome(NamespaceKey homePath, HomeConfig homeConfig) throws NamespaceException { - createOrUpdateEntity(NamespaceEntity.toEntity(HOME, homePath, homeConfig, keyNormalization, new ArrayList<>())); + createOrUpdateEntity(NamespaceEntity.toEntity(HOME, homePath, homeConfig, new ArrayList<>())); } @Override @@ -839,23 +840,23 @@ public void canSourceConfigBeSaved(SourceConfig newConfig, SourceConfig existing @Override public String getEntityIdByPath(NamespaceKey datasetPath) throws NamespaceNotFoundException { - final List entities = getEntities(Arrays.asList(datasetPath)); + final List entities = getEntities(Collections.singletonList(datasetPath)); NameSpaceContainer entity = entities.get(0); - return entity != null ? NamespaceUtils.getId(entity) : null; + return entity != null ? NamespaceUtils.getIdOrNull(entity) : null; } @Override public NameSpaceContainer getEntityByPath(NamespaceKey datasetPath) throws NamespaceException { - final List entities = getEntities(Arrays.asList(datasetPath)); + final List entities = getEntities(Collections.singletonList(datasetPath)); return entities.get(0); } protected List doGetEntities(List lookupKeys) { final List keys = lookupKeys.stream() - .map(input -> new NamespaceInternalKey(input, keyNormalization).getKey()).collect(Collectors.toList()); + .map(input -> new NamespaceInternalKey(input).getKey()).collect(Collectors.toList()); return namespace.get(keys); } @@ -863,8 +864,9 @@ protected List doGetEntities(List lookupKeys) // GET @Override + @WithSpan public boolean exists(final NamespaceKey key, final Type type) { - final NameSpaceContainer container = namespace.get(new NamespaceInternalKey(key, keyNormalization).getKey()); + final NameSpaceContainer container = namespace.get(new NamespaceInternalKey(key).getKey()); return container != null && container.getType() == type; } @@ -900,13 +902,11 @@ NameSpaceContainer getEntity(final NamespaceKey key) throws NamespaceNotFoundExc throw new NamespaceNotFoundException(key, "not found"); } - return doGetEntity(key, container.getType(), entitiesOnPath); + return doGetEntity(entitiesOnPath); } - protected NameSpaceContainer doGetEntity(final NamespaceKey key, Type type, List entitiesOnPath) { - NameSpaceContainer container = lastElement(entitiesOnPath); - - return container; + protected NameSpaceContainer doGetEntity(List entitiesOnPath) { + return lastElement(entitiesOnPath); } protected NameSpaceContainer getEntityByIndex(IndexKey key, String index, Type type) throws NamespaceException { @@ -920,11 +920,13 @@ protected NameSpaceContainer getEntityByIndex(IndexKey key, String index, Type t } @Override + @WithSpan public SourceConfig getSource(NamespaceKey sourcePath) throws NamespaceException { return getEntity(sourcePath, SOURCE).getSource(); } @Override + @WithSpan public SourceConfig getSourceById(String id) throws NamespaceException { return getEntityByIndex(NamespaceIndexKeys.SOURCE_ID, id, SOURCE).getSource(); } @@ -945,6 +947,7 @@ public SpaceConfig getSpaceById(String id) throws NamespaceException { } @Override + @WithSpan public NameSpaceContainer getEntityById(String id) throws NamespaceNotFoundException { final SearchQuery query = SearchQueryUtils.or( SearchQueryUtils.newTermQuery(DatasetIndexKeys.DATASET_UUID, id), @@ -1020,16 +1023,33 @@ public List getEntitiesByIds(List ids) } @Override + @WithSpan public DatasetConfig getDataset(NamespaceKey datasetPath) throws NamespaceException { return getEntity(datasetPath, DATASET).getDataset(); } @Override + public DatasetConfigAndEntitiesOnPath getDatasetAndEntitiesOnPath(NamespaceKey datasetPath) throws NamespaceException { + final List entitiesOnPath = getEntitiesOnPath(datasetPath); + final NameSpaceContainer container = lastElement(entitiesOnPath); + + if (container == null || container.getType() != DATASET) { + throw new NamespaceNotFoundException(datasetPath, "not found"); + } + + final DatasetConfig dataset = doGetEntity(entitiesOnPath).getDataset(); + + return DatasetConfigAndEntitiesOnPath.of(dataset, entitiesOnPath); + } + + @Override + @WithSpan public FolderConfig getFolder(NamespaceKey folderPath) throws NamespaceException { return getEntity(folderPath, FOLDER).getFolder(); } @Override + @WithSpan public HomeConfig getHome(NamespaceKey homePath) throws NamespaceException { return getEntity(homePath, HOME).getHome(); } @@ -1061,8 +1081,9 @@ protected List doGetRootNamespaceContainers(final Type requi } @Override + @WithSpan public List getSpaces() { - return Lists.newArrayList( + final List spaces = Lists.newArrayList( Iterables.transform(doGetRootNamespaceContainers(SPACE), new Function() { @Override public SpaceConfig apply(NameSpaceContainer input) { @@ -1070,6 +1091,8 @@ public SpaceConfig apply(NameSpaceContainer input) { } }) ); + Span.current().setAttribute("dremio.namespace.getSpaces.numSpaces", spaces.size()); + return spaces; } @Override @@ -1085,6 +1108,7 @@ public FunctionConfig apply(NameSpaceContainer input) { } @Override + @WithSpan public List getHomeSpaces() { return Lists.newArrayList( Iterables.transform(doGetRootNamespaceContainers(HOME), new Function() { @@ -1097,8 +1121,9 @@ public HomeConfig apply(NameSpaceContainer input) { } @Override + @WithSpan public List getSources() { - return Lists.newArrayList( + final List sources = Lists.newArrayList( Iterables.transform(doGetRootNamespaceContainers(SOURCE), new Function() { @Override public SourceConfig apply(NameSpaceContainer input) { @@ -1106,8 +1131,11 @@ public SourceConfig apply(NameSpaceContainer input) { } }) ); + Span.current().setAttribute("dremio.namespace.getSpaces.numSources", sources.size()); + return sources; } + @Override public List getDatasets() { final Iterable> containerEntries; @@ -1131,7 +1159,7 @@ private List listEntity(final NamespaceKey rootKey) throws N // returns the child containers of the given rootKey as an iterable protected Iterable iterateEntity(final NamespaceKey rootKey) throws NamespaceException { - final NamespaceInternalKey rootInternalKey = new NamespaceInternalKey(rootKey, keyNormalization); + final NamespaceInternalKey rootInternalKey = new NamespaceInternalKey(rootKey); final Iterable> entries = namespace.find( new LegacyFindByRange<>(rootInternalKey.getRangeStartKey(), false, rootInternalKey.getRangeEndKey(), false)); return FluentIterable.from(entries).transform(input -> input.getValue()); @@ -1139,7 +1167,7 @@ protected Iterable iterateEntity(final NamespaceKey rootKey) @Override public Iterable getAllDatasets(final NamespaceKey root) throws NamespaceException { - final NameSpaceContainer rootContainer = namespace.get(new NamespaceInternalKey(root, keyNormalization).getKey()); + final NameSpaceContainer rootContainer = namespace.get(new NamespaceInternalKey(root).getKey()); if (rootContainer == null) { return Collections.emptyList(); } @@ -1152,7 +1180,7 @@ public Iterable getAllDatasets(final NamespaceKey root) throws Nam } /** - * Iterator that lazily loads dataset entries in the sub-tree under the given {@link NamespaceUtils#isListable + * Iterator that lazily loads dataset entries in the subtree under the given {@link NamespaceUtils#isListable * listable} root. This implementation uses depth-first-search algorithm, unlike {@link #traverseEntity} which uses * breadth-first-search algorithm. So this avoids queueing up "dataset" containers. Note that "stack" contains only * listable containers which have a small memory footprint. @@ -1219,7 +1247,7 @@ public NamespaceKey next() { @Override public Iterable getAllDescendants(final NamespaceKey root) { - final NameSpaceContainer rootContainer = namespace.get(new NamespaceInternalKey(root, keyNormalization).getKey()); + final NameSpaceContainer rootContainer = namespace.get(new NamespaceInternalKey(root).getKey()); if (rootContainer == null) { return Collections.emptyList(); } @@ -1232,7 +1260,7 @@ public Iterable getAllDescendants(final NamespaceKey root) { } /** - * Iterator that lazily loads dataset entries in the sub-tree under the given {@link NamespaceUtils#isListable + * Iterator that lazily loads dataset entries in the subtree under the given {@link NamespaceUtils#isListable * listable} root. This implementation uses depth-first-search algorithm, unlike {@link #traverseEntity} which uses * breadth-first-search algorithm. So this avoids queueing up "dataset" containers. Note that "stack" contains only * listable containers which have a small memory footprint. @@ -1309,6 +1337,7 @@ public int getAllDatasetsCount(NamespaceKey parent) throws NamespaceException { } @Override + @WithSpan public BoundedDatasetCount getDatasetCount(NamespaceKey root, long searchTimeLimitMillis, int countLimitToStopSearch) throws NamespaceException { return getDatasetCountHelper(root, searchTimeLimitMillis, countLimitToStopSearch, this::iterateEntity); @@ -1359,7 +1388,9 @@ protected interface FunctionWithNamespaceException { } @Override + @WithSpan public List list(NamespaceKey root) throws NamespaceException { + // TODO: Do we need to get entitiesOnPath? final List entitiesOnPath = getEntitiesOnPath(root); final NameSpaceContainer rootContainer = lastElement(entitiesOnPath); if (rootContainer == null) { @@ -1369,14 +1400,15 @@ public List list(NamespaceKey root) throws NamespaceExceptio if (!isListable(rootContainer.getType())) { throw new NamespaceNotFoundException(root, "no listable entity found"); } - return doList(root, entitiesOnPath); + return doList(root); } - protected List doList(NamespaceKey root, List entitiesOnPath) throws NamespaceException { + protected List doList(NamespaceKey root) throws NamespaceException { return listEntity(root); } @Override + @WithSpan public List getCounts(SearchQuery... queries) throws NamespaceException { final List counts = namespace.getCounts(queries); if (counts.size() != queries.length) { @@ -1408,7 +1440,7 @@ private void traverseAndDeleteChildren(final NamespaceInternalKey key, final Nam protected void doTraverseAndDeleteChildren(final NameSpaceContainer child, DeleteCallback callback) throws NamespaceException { final NamespaceInternalKey childKey = - new NamespaceInternalKey(new NamespaceKey(child.getFullPathList()), keyNormalization); + new NamespaceInternalKey(new NamespaceKey(child.getFullPathList())); traverseAndDeleteChildren(childKey, child, callback); switch (child.getType()) { @@ -1421,6 +1453,9 @@ protected void doTraverseAndDeleteChildren(final NameSpaceContainer child, Delet } namespace.delete(childKey.getKey(), child.getDataset().getTag()); break; + case FUNCTION: + namespace.delete(childKey.getKey(), child.getFunction().getTag()); + break; default: // Only leaf level or intermediate namespace container types are expected here. throw new RuntimeException("Unexpected namespace container type: " + child.getType()); @@ -1438,8 +1473,9 @@ NameSpaceContainer deleteEntityWithCallback(final NamespaceKey path, final Type return doDeleteEntity(path, type, version, entitiesOnPath, deleteRoot, callback); } + @WithSpan protected NameSpaceContainer doDeleteEntity(final NamespaceKey path, final Type type, String version, List entitiesOnPath, boolean deleteRoot, DeleteCallback callback) throws NamespaceException { - final NamespaceInternalKey key = new NamespaceInternalKey(path, keyNormalization); + final NamespaceInternalKey key = new NamespaceInternalKey(path); final NameSpaceContainer container = lastElement(entitiesOnPath); traverseAndDeleteChildren(key, container, callback); if (deleteRoot) { @@ -1481,10 +1517,11 @@ public void deleteFunction(NamespaceKey udfPath) throws NamespaceException { @Override public void deleteEntity(NamespaceKey entityPath) throws NamespaceException { - namespace.delete(new NamespaceInternalKey(entityPath, keyNormalization).getKey()); + namespace.delete(new NamespaceInternalKey(entityPath).getKey()); } @Override + @WithSpan public void deleteDataset(final NamespaceKey datasetPath, String version, final NamespaceAttribute... attributes) throws NamespaceException { NameSpaceContainer container = deleteEntityWithCallback(datasetPath, DATASET, version, true, null); if (container.getDataset().getType() == PHYSICAL_DATASET_SOURCE_FOLDER) { @@ -1499,6 +1536,7 @@ public void deleteDataset(final NamespaceKey datasetPath, String version, final } @Override + @WithSpan public void deleteFolder(final NamespaceKey folderPath, String version) throws NamespaceException { deleteEntityWithCallback(folderPath, FOLDER, version, true, null); } @@ -1510,7 +1548,7 @@ public DatasetConfig renameDataset(NamespaceKey oldDatasetPath, NamespaceKey new protected DatasetConfig doRenameDataset(NamespaceKey oldDatasetPath, NamespaceKey newDatasetPath) throws NamespaceException { final String newDatasetName = newDatasetPath.getName(); - final NamespaceInternalKey oldKey = new NamespaceInternalKey(oldDatasetPath, keyNormalization); + final NamespaceInternalKey oldKey = new NamespaceInternalKey(oldDatasetPath); final NameSpaceContainer container = getEntity(oldDatasetPath, DATASET); final DatasetConfig datasetConfig = container.getDataset(); @@ -1535,7 +1573,7 @@ protected DatasetConfig doRenameDataset(NamespaceKey oldDatasetPath, NamespaceKe datasetConfig.setVersion(null); datasetConfig.setTag(null); - final NamespaceEntity newValue = NamespaceEntity.toEntity(DATASET, newDatasetPath, datasetConfig, keyNormalization, + final NamespaceEntity newValue = NamespaceEntity.toEntity(DATASET, newDatasetPath, datasetConfig, container.getAttributesList()); namespace.put(newValue.getPathKey().getKey(), newValue.getContainer()); @@ -1552,7 +1590,7 @@ private boolean createSourceFolders(NamespaceKey datasetPath) throws NamespaceEx for (int i = 1; i < components.size() - 1; i++) { List fullPathList = components.subList(0, i + 1); NamespaceKey key = new NamespaceKey(fullPathList); - final NamespaceInternalKey keyInternal = new NamespaceInternalKey(key, keyNormalization); + final NamespaceInternalKey keyInternal = new NamespaceInternalKey(key); NameSpaceContainer folderContainer = namespace.get(keyInternal.getKey()); if (folderContainer == null) { @@ -1581,7 +1619,7 @@ private boolean createSourceFolders(NamespaceKey datasetPath) throws NamespaceEx if (folderContainer.getDataset().getType() == PHYSICAL_DATASET_SOURCE_FOLDER) { continue; } - // Fall-through + // fall through default: return false; } @@ -1591,10 +1629,11 @@ private boolean createSourceFolders(NamespaceKey datasetPath) throws NamespaceEx } @Override + @WithSpan public boolean tryCreatePhysicalDataset(NamespaceKey datasetPath, DatasetConfig datasetConfig, NamespaceAttribute... attributes) throws NamespaceException { if (createSourceFolders(datasetPath)) { datasetConfig.setSchemaVersion(DatasetHelper.CURRENT_VERSION); - final NamespaceInternalKey searchKey = new NamespaceInternalKey(datasetPath, keyNormalization); + final NamespaceInternalKey searchKey = new NamespaceInternalKey(datasetPath); NameSpaceContainer existingContainer = namespace.get(searchKey.getKey()); return doTryCreatePhysicalDataset(datasetPath, datasetConfig, searchKey, existingContainer, attributes); } @@ -1787,29 +1826,14 @@ public String dump() { * @param entityPath * @return */ - public List getEntitiesOnPath(NamespaceKey entityPath) throws NamespaceNotFoundException { - - final List keys = Lists.newArrayListWithExpectedSize(entityPath.getPathComponents().size()); + protected List getEntitiesOnPath(NamespaceKey entityPath) throws NamespaceNotFoundException { + List entitiesOnPath = getEntitiesOnPathWithoutValidation(entityPath); - NamespaceKey currentPath = entityPath; - for (int i = 0; i < entityPath.getPathComponents().size(); i++) { - keys.add(new NamespaceInternalKey(currentPath, keyNormalization).getKey()); - - if (currentPath.hasParent()) { - currentPath = currentPath.getParent(); - } - } - - // reverse the keys so that the order of keys is from root to leaf level entity. - Collections.reverse(keys); - - final List entitiesOnPath = namespace.get(keys); for (int i = 0; i < entitiesOnPath.size() - 1; i++) { if (entitiesOnPath.get(i) == null) { throw new NamespaceNotFoundException(entityPath, "one or more elements on the path are not found in namespace"); } } - return entitiesOnPath; } @@ -1825,7 +1849,7 @@ protected List getEntitiesOnPathWithoutValidation(NamespaceK NamespaceKey currentPath = entityPath; for (int i = 0; i < entityPath.getPathComponents().size(); i++) { - keys.add(new NamespaceInternalKey(currentPath, keyNormalization).getKey()); + keys.add(new NamespaceInternalKey(currentPath).getKey()); if (currentPath.hasParent()) { currentPath = currentPath.getParent(); @@ -1835,9 +1859,7 @@ protected List getEntitiesOnPathWithoutValidation(NamespaceK // reverse the keys so that the order of keys is from root to leaf level entity. Collections.reverse(keys); - final List entitiesOnPath = namespace.get(keys); - - return entitiesOnPath; + return namespace.get(keys); } /** diff --git a/services/namespace/src/main/java/com/dremio/service/namespace/NamespaceUtils.java b/services/namespace/src/main/java/com/dremio/service/namespace/NamespaceUtils.java index f9b66b1908..1bc5520fb4 100644 --- a/services/namespace/src/main/java/com/dremio/service/namespace/NamespaceUtils.java +++ b/services/namespace/src/main/java/com/dremio/service/namespace/NamespaceUtils.java @@ -31,7 +31,6 @@ import com.dremio.service.namespace.dataset.proto.PhysicalDataset; import com.dremio.service.namespace.proto.EntityId; import com.dremio.service.namespace.proto.NameSpaceContainer; -import com.dremio.service.namespace.proto.NameSpaceContainer.Type; import com.google.common.base.Preconditions; /** @@ -49,14 +48,10 @@ public static boolean isPhysicalDataset(DatasetType datasetType) { || datasetType == DatasetType.PHYSICAL_DATASET_SOURCE_FOLDER); } - static boolean isRootEntity(Type type) { - return type == HOME || type == SOURCE || type == SPACE; - } - /** * helper method that returns the id of the entity in given container */ - public static String getId(NameSpaceContainer container) { + public static String getIdOrNull(NameSpaceContainer container) { EntityId entityId; switch (container.getType()) { case SOURCE: diff --git a/services/namespace/src/main/proto/dataset-common.proto b/services/namespace/src/main/proto/dataset-common.proto index c00633cfcd..a3a9acf8a5 100644 --- a/services/namespace/src/main/proto/dataset-common.proto +++ b/services/namespace/src/main/proto/dataset-common.proto @@ -117,6 +117,8 @@ message IcebergMetadata { optional bytes partition_specs_json_map = 8; optional ScanStats deleteStats = 9; optional ScanStats deleteManifestStats = 10; + optional ScanStats equalityDeleteStats = 11; + optional int64 partition_stats_file_size = 12; // size of partition stats file } message UserDefinedSchemaSettings { diff --git a/services/namespace/src/main/proto/function.proto b/services/namespace/src/main/proto/function.proto index 1d7dc5229f..b481b19bb6 100644 --- a/services/namespace/src/main/proto/function.proto +++ b/services/namespace/src/main/proto/function.proto @@ -24,6 +24,7 @@ option java_outer_classname = "UdfProtobuf"; message FunctionBody { optional string raw_body = 1; + optional bytes serialized_plan = 2; } message ReturnType { @@ -33,6 +34,7 @@ message ReturnType { message FunctionArg { optional string name = 1; optional bytes raw_data_type = 2; + optional string default_expression = 3; } message FunctionDefinition { diff --git a/services/namespace/src/test/java/com/dremio/service/namespace/AbstractTestNamespaceService.java b/services/namespace/src/test/java/com/dremio/service/namespace/AbstractTestNamespaceService.java index edf176ae79..c5a6d67d7d 100644 --- a/services/namespace/src/test/java/com/dremio/service/namespace/AbstractTestNamespaceService.java +++ b/services/namespace/src/test/java/com/dremio/service/namespace/AbstractTestNamespaceService.java @@ -561,69 +561,6 @@ public void testRename() throws Exception { NamespaceTestUtils.addDS(namespaceService, "L.F.ds"); Map items; - /** - Map items = listFolder(ns, "s.a"); - //System.out.println("s.a--->" + items.keySet()); - assertEquals(3, items.size()); - assertTrue(items.containsKey("s.a.c")); - assertTrue(items.containsKey("s.a.b")); - assertTrue(items.containsKey("s.a.ds1")); - - ns.renameFolder(new NamespaceKey(PathUtils.parseFullPath("s.a.c")), new NamespaceKey(PathUtils.parseFullPath("s.a.c1"))); - items = listFolder(ns, "s.a.c1"); - assertEquals(2, items.size()); - assertTrue(items.keySet().toString(), items.containsKey("s.a.c1.ds3")); - assertTrue(items.keySet().toString(), items.containsKey("s.a.c1.file1")); - - items = listFolder(ns, "s.a"); - //System.out.println("s.a--->" + items.keySet()); - assertEquals(3, items.size()); - assertTrue(items.containsKey("s.a.c1")); - assertFalse(items.containsKey("s.a.c")); - assertTrue(items.containsKey("s.a.b")); - assertTrue(items.containsKey("s.a.ds1")); - - ns.renameFolder(new NamespaceKey(PathUtils.parseFullPath("s.a")), new NamespaceKey(PathUtils.parseFullPath("s.a1"))); - items = listFolder(ns, "s"); - //System.out.println("s--->" + items.keySet()); - assertEquals(3, items.size()); - assertTrue(items.containsKey("s.a1")); - assertTrue(items.containsKey("s.b")); - assertTrue(items.containsKey("s.c")); - - items = listFolder(ns, "s.a1"); - //System.out.println("s-->" + items.keySet()); - assertEquals(3, items.size()); - assertTrue(items.containsKey("s.a1.c1")); - assertFalse(items.containsKey("s.a1.c")); - assertTrue(items.containsKey("s.a1.b")); - assertTrue(items.containsKey("s.a1.ds1")); - - - ns.renameSpace(new NamespaceKey("s"), new NamespaceKey("s1")); - items = listFolder(ns, "s1"); - //System.out.println("s1--->" + items.keySet()); - assertEquals(3, items.size()); - assertTrue(items.containsKey("s1.a1")); - assertTrue(items.containsKey("s1.b")); - assertTrue(items.containsKey("s1.c")); - - items = listFolder(ns, "s1.a1"); - //System.out.println("s1-->" + items.keySet()); - assertEquals(3, items.size()); - assertTrue(items.containsKey("s1.a1.c1")); - assertFalse(items.containsKey("s1.a1.c")); - assertTrue(items.containsKey("s1.a1.b")); - assertTrue(items.containsKey("s1.a1.ds1")); - - items = listFolder(ns, "s1.c.c.c.c"); - assertEquals(1, items.size()); - assertTrue(items.containsKey("s1.c.c.c.c.ds4")); - - items = listFolder(ns, "s1.b"); - assertEquals(1, items.size()); - assertTrue(items.containsKey("s1.b.ds2")); - */ final NamespaceKey namespaceKey = new NamespaceKey(PathUtils.parseFullPath("s.b.ds2")); @@ -676,7 +613,7 @@ public void insertingDifferentEntityTypesAtSamePath() throws Exception { try { NamespaceTestUtils.addSource(namespaceService, "a"); } catch(UserException ex) { - assertTrue(ex.getMessage().contains("There already exists an entity of type [SPACE] at given path [a]")); + assertTrue(ex.getMessage().contains("The current location already contains a space named \"a\". Please use a unique name for the new source.")); } NamespaceTestUtils.addFolder(namespaceService, "a.foo"); @@ -686,7 +623,7 @@ public void insertingDifferentEntityTypesAtSamePath() throws Exception { NamespaceTestUtils.addDS(namespaceService, "a.foo"); fail("Expected the above call to fail"); } catch (UserException ex) { - assertTrue(ex.getMessage().contains("There already exists an entity of type [FOLDER] at given path [a.foo]")); + assertTrue(ex.getMessage().contains("The current location already contains a folder named \"foo\". Please use a unique name for the new dataset.")); } // Try to add folder with path "a.foo". There already a folder at "a.foo" @@ -694,7 +631,7 @@ public void insertingDifferentEntityTypesAtSamePath() throws Exception { NamespaceTestUtils.addFolder(namespaceService, "a.foo"); fail("Expected the above call to fail"); } catch (UserException ex) { - assertTrue(ex.getMessage().contains("There already exists an entity of type [FOLDER] at given path [a.foo]")); + assertTrue(ex.getMessage().contains("The current location already contains a folder named \"foo\". Please use a unique name for the new folder.")); } } diff --git a/services/namespace/src/test/java/com/dremio/service/namespace/TestNamespaceInternalKeyCompatibility.java b/services/namespace/src/test/java/com/dremio/service/namespace/TestNamespaceInternalKeyCompatibility.java index 7ce6a1018f..db7bbef6df 100644 --- a/services/namespace/src/test/java/com/dremio/service/namespace/TestNamespaceInternalKeyCompatibility.java +++ b/services/namespace/src/test/java/com/dremio/service/namespace/TestNamespaceInternalKeyCompatibility.java @@ -36,37 +36,31 @@ public class TestNamespaceInternalKeyCompatibility { @Parameterized.Parameters public static Collection input() { return Arrays.asList(new Object[][]{ - {"a.b.c", true}, - {"a.b.c.d", true}, - {"a", true}, - {"a.b", true}, - {"a1.b.c", true}, - {"a.a.a.a", true}, - {"0", true}, - {"0a.b.3", true}, - {"1.2.3", true}, - {"0.0.0.0", true}, - {"Aa.bB.cC.Dd", true}, - {"0.0.0.0", false}, - {"Aa.bB.cC.Dd", false}, - {"A.A.A.A", false}, - {"1A.2b.3C.4d", false}}); + {"a.b.c"}, + {"a.b.c.d"}, + {"a"}, + {"a.b"}, + {"a1.b.c"}, + {"a.a.a.a"}, + {"0"}, + {"0a.b.3"}, + {"1.2.3"}, + {"0.0.0.0"}, + {"Aa.bB.cC.Dd"}}); } private String path; - private boolean normalized; - public TestNamespaceInternalKeyCompatibility(String path, boolean normalized) { + public TestNamespaceInternalKeyCompatibility(String path) { this.path = path; - this.normalized = normalized; } private NamespaceInternalKey newKey(String path) { - return new NamespaceInternalKey(new NamespaceKey(PathUtils.parseFullPath(path)), normalized); + return new NamespaceInternalKey(new NamespaceKey(PathUtils.parseFullPath(path))); } private LegacyNamespaceInternalKey newLegacyKey(String path) { - return new LegacyNamespaceInternalKey(new NamespaceKey(PathUtils.parseFullPath(path)), normalized); + return new LegacyNamespaceInternalKey(new NamespaceKey(PathUtils.parseFullPath(path))); } private void verifyRangeEndKey(byte[] expected, byte[] actual) { diff --git a/services/namespace/src/test/java/com/dremio/service/namespace/TestNamespaceInternalKeySortOrder.java b/services/namespace/src/test/java/com/dremio/service/namespace/TestNamespaceInternalKeySortOrder.java index 0bc35b6139..122db0ce64 100644 --- a/services/namespace/src/test/java/com/dremio/service/namespace/TestNamespaceInternalKeySortOrder.java +++ b/services/namespace/src/test/java/com/dremio/service/namespace/TestNamespaceInternalKeySortOrder.java @@ -92,22 +92,17 @@ public class TestNamespaceInternalKeySortOrder { @Parameterized.Parameters public static Collection input() { return Arrays.asList(new Object[][]{ - {PATHS, true}, - {NUMBERED_PATHS, true}, - {MIXED_CASING_PATHS, true}, - {PATHS, false}, - {NUMBERED_PATHS, false}, - {MIXED_CASING_PATHS, false} + {PATHS}, + {NUMBERED_PATHS}, + {MIXED_CASING_PATHS} }); } private final List input; - private final boolean normalize; private final List> expected; - public TestNamespaceInternalKeySortOrder(List input, boolean normalize) { + public TestNamespaceInternalKeySortOrder(List input) { this.input = input; - this.normalize = normalize; this.expected = generateExpectedResults(); } @@ -201,7 +196,7 @@ private List> generateExpectedResults() * @return NamespaceInternalKey. */ private NamespaceInternalKey newKey(String path) { - return new NamespaceInternalKey(new NamespaceKey(PathUtils.parseFullPath(path)), normalize); + return new NamespaceInternalKey(new NamespaceKey(PathUtils.parseFullPath(path))); } /** @@ -211,7 +206,7 @@ private NamespaceInternalKey newKey(String path) { * @return LegacyNamespaceInternalKey. */ private LegacyNamespaceInternalKey newLegacyKey(String path) { - return new LegacyNamespaceInternalKey(new NamespaceKey(PathUtils.parseFullPath(path)), normalize); + return new LegacyNamespaceInternalKey(new NamespaceKey(PathUtils.parseFullPath(path))); } /** diff --git a/services/namespace/src/test/java/com/dremio/service/namespace/TestNamespaceInternalStringKey.java b/services/namespace/src/test/java/com/dremio/service/namespace/TestNamespaceInternalStringKey.java index 95552b9f4d..487aac55a2 100644 --- a/services/namespace/src/test/java/com/dremio/service/namespace/TestNamespaceInternalStringKey.java +++ b/services/namespace/src/test/java/com/dremio/service/namespace/TestNamespaceInternalStringKey.java @@ -74,47 +74,40 @@ public static class TestStringKeys { @Parameterized.Parameters public static Collection input() { return Arrays.asList(new Object[][]{ - //inputPath, expectedKey, expectedRangeStartKey, expectedRangeEndKey, normalize - {"a.b.c", "2.a.1.b.0.c", "3.a.2.b.1.c.0.", "3.a.2.b.1.c.0.", true}, - {"a.b.c.d", "3.a.2.b.1.c.0.d", "4.a.3.b.2.c.1.d.0.", "4.a.3.b.2.c.1.d.0.", true}, - {"a", "0.a", "1.a.0.", "1.a.0.", true}, - {"a.b", "1.a.0.b", "2.a.1.b.0.", "2.a.1.b.0.", true}, - {"a1.b.c", "2.a1.1.b.0.c", "3.a1.2.b.1.c.0.", "3.a1.2.b.1.c.0.", true}, - {"a.a.a.a", "3.a.2.a.1.a.0.a", "4.a.3.a.2.a.1.a.0.", "4.a.3.a.2.a.1.a.0.", true}, - {"0", "0.0", "1.0.0.", "1.0.0.", true}, - {"0a.b.3", "2.0a.1.b.0.3", "3.0a.2.b.1.3.0.", "3.0a.2.b.1.3.0.", true}, - {"1.2.3", "2.1.1.2.0.3", "3.1.2.2.1.3.0.", "3.1.2.2.1.3.0.", true}, - {"0.0.0.0", "3.0.2.0.1.0.0.0", "4.0.3.0.2.0.1.0.0.", "4.0.3.0.2.0.1.0.0.", true}, - {"Aa.bB.cC.Dd", "3.aa.2.bb.1.cc.0.dd", "4.aa.3.bb.2.cc.1.dd.0.", "4.aa.3.bb.2.cc.1.dd.0.", true}, - {"1A.2b.3C.4d", "3.1a.2.2b.1.3c.0.4d", "4.1a.3.2b.2.3c.1.4d.0.", "4.1a.3.2b.2.3c.1.4d.0.", true}, - {"0.0.0.0", "3.0.2.0.1.0.0.0", "4.0.3.0.2.0.1.0.0.", "4.0.3.0.2.0.1.0.0.", false}, - {"Aa.bB.cC.Dd", "3.Aa.2.bB.1.cC.0.Dd", "4.Aa.3.bB.2.cC.1.Dd.0.", "4.Aa.3.bB.2.cC.1.Dd.0.", false}, - {"1A.2b.3C.4d", "3.1A.2.2b.1.3C.0.4d", "4.1A.3.2b.2.3C.1.4d.0.", "4.1A.3.2b.2.3C.1.4d.0.", false}, - {"A.A.A.A", "3.A.2.A.1.A.0.A", "4.A.3.A.2.A.1.A.0.", "4.A.3.A.2.A.1.A.0.", false}}); + //inputPath, expectedKey, expectedRangeStartKey, expectedRangeEndKey + {"a.b.c", "2.a.1.b.0.c", "3.a.2.b.1.c.0.", "3.a.2.b.1.c.0."}, + {"a.b.c.d", "3.a.2.b.1.c.0.d", "4.a.3.b.2.c.1.d.0.", "4.a.3.b.2.c.1.d.0."}, + {"a", "0.a", "1.a.0.", "1.a.0."}, + {"a.b", "1.a.0.b", "2.a.1.b.0.", "2.a.1.b.0."}, + {"a1.b.c", "2.a1.1.b.0.c", "3.a1.2.b.1.c.0.", "3.a1.2.b.1.c.0."}, + {"a.a.a.a", "3.a.2.a.1.a.0.a", "4.a.3.a.2.a.1.a.0.", "4.a.3.a.2.a.1.a.0."}, + {"0", "0.0", "1.0.0.", "1.0.0."}, + {"0a.b.3", "2.0a.1.b.0.3", "3.0a.2.b.1.3.0.", "3.0a.2.b.1.3.0."}, + {"1.2.3", "2.1.1.2.0.3", "3.1.2.2.1.3.0.", "3.1.2.2.1.3.0."}, + {"0.0.0.0", "3.0.2.0.1.0.0.0", "4.0.3.0.2.0.1.0.0.", "4.0.3.0.2.0.1.0.0."}, + {"Aa.bB.cC.Dd", "3.aa.2.bb.1.cc.0.dd", "4.aa.3.bb.2.cc.1.dd.0.", "4.aa.3.bb.2.cc.1.dd.0."}, + {"1A.2b.3C.4d", "3.1a.2.2b.1.3c.0.4d", "4.1a.3.2b.2.3c.1.4d.0.", "4.1a.3.2b.2.3c.1.4d.0."}}); } private final String inputPath; private final String expectedKey; private final String expectedRangeStartKey; private final String expectedRangeEndKey; - private final boolean normalize; public TestStringKeys (String inputPath, String expectedKey, - String expectedRangeStartKey, String expectedRangeEndKey, - boolean normalize) { + String expectedRangeStartKey, String expectedRangeEndKey) { this.inputPath = inputPath; this.expectedKey = expectedKey; this.expectedRangeStartKey = expectedRangeStartKey; this.expectedRangeEndKey = expectedRangeEndKey; - this.normalize = normalize; } private NamespaceInternalKey newKey(String path) { - return new NamespaceInternalKey(new NamespaceKey(PathUtils.parseFullPath(path)), normalize); + return new NamespaceInternalKey(new NamespaceKey(PathUtils.parseFullPath(path))); } - private NamespaceInternalKey parseKey(byte[] keyBytes, boolean normalize) { + private NamespaceInternalKey parseKey(byte[] keyBytes) { String path = extractKey(keyBytes, false); - return new NamespaceInternalKey(new NamespaceKey(PathUtils.parseFullPath(path)), normalize); + return new NamespaceInternalKey(new NamespaceKey(PathUtils.parseFullPath(path))); } @Test @@ -138,38 +131,38 @@ public void testNamespaceInternalRangeEndKeys() { @Test public void testParsedKeyPath() { final NamespaceInternalKey key = newKey(inputPath); - final NamespaceInternalKey parsedKey = parseKey(key.getKey().getBytes(StandardCharsets.UTF_8), normalize); - final NamespaceKey expectedPath = (normalize)? key.getPath().asLowerCase() : key.getPath(); + final NamespaceInternalKey parsedKey = parseKey(key.getKey().getBytes(StandardCharsets.UTF_8)); + final NamespaceKey expectedPath = key.getPath().asLowerCase(); assertThat(parsedKey.getPath()).isEqualTo(expectedPath); } @Test public void testParsedKey() { final NamespaceInternalKey key = newKey(inputPath); - final NamespaceInternalKey parsedKey = parseKey(key.getKey().getBytes(StandardCharsets.UTF_8), normalize); + final NamespaceInternalKey parsedKey = parseKey(key.getKey().getBytes(StandardCharsets.UTF_8)); assertThat(parsedKey.getKey()).isEqualTo(key.getKey()); } @Test public void testParsedRangeStartKey() { final NamespaceInternalKey key = newKey(inputPath); - final NamespaceInternalKey parsedKey = parseKey(key.getKey().getBytes(StandardCharsets.UTF_8), normalize); + final NamespaceInternalKey parsedKey = parseKey(key.getKey().getBytes(StandardCharsets.UTF_8)); assertThat(parsedKey.getRangeStartKey()).isEqualTo(key.getRangeStartKey()); } @Test public void testParsedRangeEndKey() { final NamespaceInternalKey key = newKey(inputPath); - final NamespaceInternalKey parsedKey = parseKey(key.getKey().getBytes(StandardCharsets.UTF_8), normalize); + final NamespaceInternalKey parsedKey = parseKey(key.getKey().getBytes(StandardCharsets.UTF_8)); assertThat(parsedKey.getRangeEndKey()).isEqualTo(key.getRangeEndKey()); } @Test public void testPathProcessing() { - final String paths = (normalize)? inputPath.toLowerCase() : inputPath; + final String paths = inputPath.toLowerCase(); final List expectedPaths = Arrays.asList(paths.split("["+ NamespaceInternalKey.PATH_DELIMITER +"]")); final List actualPaths = NamespaceInternalKey.processPathComponents( - new NamespaceKey(PathUtils.parseFullPath(inputPath)), normalize); + new NamespaceKey(PathUtils.parseFullPath(inputPath))); assertThat(actualPaths).isEqualTo(expectedPaths); } } diff --git a/services/nessie-grpc/client/pom.xml b/services/nessie-grpc/client/pom.xml index 72e9023753..ebb2e436fe 100644 --- a/services/nessie-grpc/client/pom.xml +++ b/services/nessie-grpc/client/pom.xml @@ -22,7 +22,7 @@ com.dremio.services dremio-services-nessie-grpc - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-services-nessie-grpc-client @@ -36,11 +36,11 @@ true - org.projectnessie + org.projectnessie.nessie nessie-client - org.projectnessie + org.projectnessie.nessie nessie-model @@ -48,6 +48,10 @@ dremio-services-nessie-grpc-common ${project.version} + + org.slf4j + slf4j-api + io.grpc grpc-testing @@ -61,10 +65,5 @@ com.google.guava guava - - org.junit.jupiter - junit-jupiter-api - test - diff --git a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/GrpcClientBuilder.java b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/GrpcClientBuilder.java index f6ec2fb10c..9f62cf8d6b 100644 --- a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/GrpcClientBuilder.java +++ b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/GrpcClientBuilder.java @@ -24,10 +24,10 @@ import org.projectnessie.client.NessieClientBuilder; import org.projectnessie.client.api.NessieApi; -import org.projectnessie.client.api.NessieApiV1; +import org.projectnessie.client.api.NessieApiV2; import org.projectnessie.client.auth.NessieAuthentication; -import com.dremio.services.nessie.grpc.client.v1api.GrpcApiV1Impl; +import com.dremio.services.nessie.grpc.client.impl.GrpcApiImpl; import com.google.common.base.Preconditions; import io.grpc.ClientInterceptor; @@ -130,10 +130,10 @@ public API build(Class apiVersion) { Objects.requireNonNull(apiVersion, "API version class must be non-null"); Preconditions.checkArgument(channel != null, "Channel must be configured"); - if (apiVersion.isAssignableFrom(NessieApiV1.class)) { + if (apiVersion.isAssignableFrom(NessieApiV2.class)) { Preconditions.checkArgument(apiVersion.isInterface(), "must not use a concrete class for the apiVersion parameter"); - return (API) new GrpcApiV1Impl(channel, shutdownChannel, clientInterceptors.toArray(new ClientInterceptor[0])); + return (API) new GrpcApiImpl(channel, shutdownChannel, clientInterceptors.toArray(new ClientInterceptor[0])); } throw new IllegalArgumentException( diff --git a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/GrpcExceptionMapper.java b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/GrpcExceptionMapper.java index 4bda06120b..4faca23535 100644 --- a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/GrpcExceptionMapper.java +++ b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/GrpcExceptionMapper.java @@ -15,10 +15,15 @@ */ package com.dremio.services.nessie.grpc.client; +import static com.fasterxml.jackson.databind.DeserializationFeature.FAIL_ON_INVALID_SUBTYPE; +import static com.fasterxml.jackson.databind.DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES; + +import java.io.IOException; +import java.lang.reflect.UndeclaredThrowableException; import java.security.AccessControlException; +import java.util.Optional; import java.util.Set; import java.util.concurrent.Callable; -import java.util.function.Function; import java.util.stream.Collectors; import javax.validation.ConstraintViolation; @@ -31,6 +36,7 @@ import org.projectnessie.error.NessieConflictException; import org.projectnessie.error.NessieContentNotFoundException; import org.projectnessie.error.NessieError; +import org.projectnessie.error.NessieErrorDetails; import org.projectnessie.error.NessieNamespaceAlreadyExistsException; import org.projectnessie.error.NessieNamespaceNotEmptyException; import org.projectnessie.error.NessieNamespaceNotFoundException; @@ -40,6 +46,10 @@ import org.projectnessie.error.NessieReferenceConflictException; import org.projectnessie.error.NessieReferenceNotFoundException; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; + +import io.grpc.Metadata; import io.grpc.Status; import io.grpc.StatusRuntimeException; import io.grpc.stub.StreamObserver; @@ -47,6 +57,20 @@ /** Maps gRPC exceptions to Nessie-specific exceptions and the other way around. */ public final class GrpcExceptionMapper { + private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(GrpcExceptionMapper.class); + + /** + * Object mapper that ignores unknown properties and unknown subtypes, so it is able to process + * instances of {@link NessieError} and especially {@link + * org.projectnessie.error.NessieErrorDetails} with added/unknown properties or unknown subtypes + * of the latter. + */ + private static final ObjectMapper MAPPER = + new ObjectMapper().disable(FAIL_ON_UNKNOWN_PROPERTIES).disable(FAIL_ON_INVALID_SUBTYPE); + + private static final Metadata.Key NESSIE_ERROR_KEY = Metadata.Key.of("nessie-error-bin", + Metadata.BINARY_BYTE_MARSHALLER); + private GrpcExceptionMapper() { } @@ -60,27 +84,20 @@ private GrpcExceptionMapper() { */ public static StatusRuntimeException toProto(Exception ex) { if (ex instanceof BaseNessieClientServerException) { - return statusRuntimeExFromNessieEx(ex); + return statusRuntimeExFromNessieEx((BaseNessieClientServerException) ex); } if (ex.getCause() instanceof BaseNessieClientServerException) { - StatusRuntimeException exception = statusRuntimeExFromNessieEx(ex.getCause()); - return exception; + return statusRuntimeExFromNessieEx((BaseNessieClientServerException) ex.getCause()); } if (ex instanceof IllegalArgumentException) { - return Status.INVALID_ARGUMENT - .withDescription(ex.getMessage()) - .withCause(ex) - .asRuntimeException(); + return statusRuntimeExForBadRequest(ex.getMessage(), ex); } if (ex instanceof ConstraintViolationException) { ConstraintViolationException cve = (ConstraintViolationException) ex; Set> violations = cve.getConstraintViolations(); String message = violations.isEmpty() ? ex.getMessage() : violations.stream().map(v -> v.getPropertyPath() + ": " + v.getMessage()).collect(Collectors.joining(", ")); - return Status.INVALID_ARGUMENT - .withDescription(message) - .withCause(ex) - .asRuntimeException(); + return statusRuntimeExForBadRequest(message, ex); } if (ex instanceof AccessControlException) { return Status.PERMISSION_DENIED @@ -88,11 +105,57 @@ public static StatusRuntimeException toProto(Exception ex) { .withCause(ex) .asRuntimeException(); } + logger.warn("Handling internal exception", ex); return Status.INTERNAL.withDescription(ex.getMessage()).withCause(ex).asRuntimeException(); } - private static StatusRuntimeException statusRuntimeExFromNessieEx(Throwable ex) { - ErrorCode errorCode = ((BaseNessieClientServerException) ex).getErrorCode(); + private static Metadata toProto(NessieError nessieError) { + Metadata metadata = new Metadata(); + try { + byte[] bytes = MAPPER.writerFor(NessieError.class).writeValueAsBytes(nessieError); + metadata.put(NESSIE_ERROR_KEY, bytes); + } catch (JsonProcessingException e) { + logger.error("Unable to serialize NessieError.", e); + } + return metadata; + } + + private static NessieError fromProto(Metadata metadata) { + if (metadata == null) { + return null; + } + + byte[] bytes = metadata.get(NESSIE_ERROR_KEY); + if (bytes == null) { + return null; + } + + try { + return MAPPER.readValue(bytes, NessieError.class); + } catch (IOException e) { + throw new IllegalStateException("Unable to deserialize NessieError: " + e, e); + } + } + + private static StatusRuntimeException statusRuntimeExForBadRequest(String message, Exception cause) { + Status status = Status.INVALID_ARGUMENT; + + Metadata trailers = toProto(ImmutableNessieError.builder() + .status(ErrorCode.BAD_REQUEST.httpStatus()) + .reason("Bad Request") + .message(message) + .errorCode(ErrorCode.BAD_REQUEST) + .build()); + + return status.withDescription(message) + .withCause(cause) + .asRuntimeException(trailers); + } + + private static StatusRuntimeException statusRuntimeExFromNessieEx(BaseNessieClientServerException ex) { + ErrorCode errorCode = ex.getErrorCode(); + NessieErrorDetails errorDetails = ex.getErrorDetails(); + Status status; if (ex instanceof NessieNotFoundException) { status = Status.NOT_FOUND; @@ -101,10 +164,19 @@ private static StatusRuntimeException statusRuntimeExFromNessieEx(Throwable ex) } else { status = Status.INVALID_ARGUMENT; } - return status.withDescription((errorCode != null ? errorCode : ErrorCode.UNKNOWN).name()) - .augmentDescription(ex.getMessage()) + + Metadata trailers = toProto(ImmutableNessieError.builder() + .status(errorCode.httpStatus()) + .reason(errorCode.name()) + .message(ex.getMessage()) + .errorCode(errorCode) + .errorDetails(errorDetails) + .build()); + + return status.withDescription(errorCode.name()) + .augmentDescription(ex.getMessage()) // keep old exception data for compatibility with old clients .withCause(ex) - .asRuntimeException(); + .asRuntimeException(trailers); } /** @@ -147,15 +219,10 @@ public static T handle(Callable callable) } catch (Exception e) { if (e instanceof StatusRuntimeException) { StatusRuntimeException sre = (StatusRuntimeException) e; - if (isNotFound(sre)) { - throw GrpcExceptionMapper.toNessieNotFoundException(sre); - } else if (isAlreadyExists(sre)) { - throw GrpcExceptionMapper.toNessieConflictException(sre); - } else if (isInvalidArgument(sre)) { - throw GrpcExceptionMapper.toNessieBadRequestException(sre); - } - throw sre; + throwDeclaredException(sre, NessieNotFoundException.class, NessieConflictException.class); + throw sre; // unreachable } + logger.warn("Handling unknown exception", e); throw Status.UNKNOWN.withCause(e).asRuntimeException(); } } @@ -180,79 +247,10 @@ public static T handleNessieNotFoundEx(Callable callable) throws NessieNo } catch (Exception e) { if (e instanceof StatusRuntimeException) { StatusRuntimeException sre = (StatusRuntimeException) e; - if (isNotFound(sre)) { - throw GrpcExceptionMapper.toNessieNotFoundException(sre); - } else if (isInvalidArgument(sre)) { - throw GrpcExceptionMapper.toNessieBadRequestException(sre); - } - throw sre; - } - throw Status.UNKNOWN.withCause(e).asRuntimeException(); - } - } - - /** - * Executes the given callable and performs additional exception handling/conversion. - * - * @param callable The callable to call - * @param The type of the callable. - * @return The result of the callable - * @throws NessieNamespaceNotFoundException If the callable threw a gRPC exception, where the status - * matches a {@link NessieNamespaceNotFoundException} - * @throws NessieReferenceNotFoundException If the callable threw a gRPC exception, where the status - * matches a {@link NessieReferenceNotFoundException} - * @throws StatusRuntimeException If the underlying exception couldn't be converted to the - * mentioned Nessie-specific exceptions, then a {@link StatusRuntimeException} with {@link - * Status#UNKNOWN} is thrown. - */ - public static T handleNamespaceRetrieval(Callable callable) - throws NessieNamespaceNotFoundException, NessieReferenceNotFoundException { - try { - return callable.call(); - } catch (Exception e) { - if (e instanceof StatusRuntimeException) { - StatusRuntimeException sre = (StatusRuntimeException) e; - if (isNamespaceNotFound(sre)) { - throw GrpcExceptionMapper.toNessieNamespaceNotFoundException(sre); - } - if (isReferenceNotFound(sre)) { - throw GrpcExceptionMapper.toNessieReferenceNotFoundException(sre); - } - throw sre; - } - throw Status.UNKNOWN.withCause(e).asRuntimeException(); - } - } - - /** - * Executes the given callable and performs additional exception handling/conversion. - * - * @param callable The callable to call - * @param The type of the callable. - * @return The result of the callable - * @throws NessieNamespaceAlreadyExistsException If the callable threw a gRPC exception, where the status - * matches a {@link NessieNamespaceAlreadyExistsException} - * @throws NessieReferenceNotFoundException If the callable threw a gRPC exception, where the status - * matches a {@link NessieReferenceNotFoundException} - * @throws StatusRuntimeException If the underlying exception couldn't be converted to the - * mentioned Nessie-specific exceptions, then a {@link StatusRuntimeException} with {@link - * Status#UNKNOWN} is thrown. - */ - public static T handleNamespaceCreation(Callable callable) - throws NessieNamespaceAlreadyExistsException, NessieReferenceNotFoundException { - try { - return callable.call(); - } catch (Exception e) { - if (e instanceof StatusRuntimeException) { - StatusRuntimeException sre = (StatusRuntimeException) e; - if (isNamespaceAlreadyExists(sre)) { - throw GrpcExceptionMapper.toNessieNamespaceAlreadyExistsException(sre); - } - if (isReferenceNotFound(sre)) { - throw GrpcExceptionMapper.toNessieReferenceNotFoundException(sre); - } - throw sre; + throwDeclaredException(sre, NessieNotFoundException.class, RuntimeException.class); + throw sre; // unreachable } + logger.warn("Handling unknown exception", e); throw Status.UNKNOWN.withCause(e).asRuntimeException(); } } @@ -263,83 +261,89 @@ public static T handleNamespaceCreation(Callable callable) * @param callable The callable to call * @param The type of the callable. * @return The result of the callable - * @throws NessieNamespaceNotFoundException If the callable threw a gRPC exception, where the status - * matches a {@link NessieNamespaceNotFoundException} - * @throws NessieNamespaceNotEmptyException If the callable threw a gRPC exception, where the status - * matches a {@link NessieNamespaceNotEmptyException} - * @throws NessieReferenceNotFoundException If the callable threw a gRPC exception, where the status - * matches a {@link NessieReferenceNotFoundException} + * @throws NessieBadRequestException If the callable threw a gRPC exception, where the status + * matches a {@link NessieBadRequestException} * @throws StatusRuntimeException If the underlying exception couldn't be converted to the * mentioned Nessie-specific exceptions, then a {@link StatusRuntimeException} with {@link * Status#UNKNOWN} is thrown. */ - public static T handleNamespaceDeletion(Callable callable) - throws NessieNamespaceNotFoundException, NessieNamespaceNotEmptyException, NessieReferenceNotFoundException { + public static T handleNessieRuntimeEx(Callable callable) { try { return callable.call(); } catch (Exception e) { if (e instanceof StatusRuntimeException) { StatusRuntimeException sre = (StatusRuntimeException) e; - if (isNamespaceNotFound(sre)) { - throw GrpcExceptionMapper.toNessieNamespaceNotFoundException(sre); - } - if (isNamespaceNotEmpty(sre)) { - throw GrpcExceptionMapper.toNessieNamespaceNotEmptyException(sre); - } - if (isReferenceNotFound(sre)) { - throw GrpcExceptionMapper.toNessieReferenceNotFoundException(sre); - } + throwDeclaredException(sre, RuntimeException.class, RuntimeException.class); throw sre; } + logger.warn("Handling unknown exception", e); throw Status.UNKNOWN.withCause(e).asRuntimeException(); } } - private static boolean isInvalidArgument(StatusRuntimeException sre) { - return Status.INVALID_ARGUMENT.getCode() == sre.getStatus().getCode(); - } + private static void throwDeclaredException( + StatusRuntimeException e, Class scope1, Class scope2) throws E1, E2 { - private static boolean isNotFound(StatusRuntimeException sre) { - return Status.NOT_FOUND.getCode() == sre.getStatus().getCode(); - } + Exception ex = toNessieException(e); - private static boolean isAlreadyExists(StatusRuntimeException sre) { - return Status.ALREADY_EXISTS.getCode() == sre.getStatus().getCode(); - } + if (scope1.isInstance(ex)) { + throw scope1.cast(ex); + } - private static boolean isNamespaceNotFound(StatusRuntimeException sre) { - // cause might not be set, so we need to examine the description - return isNotFound(sre) && null != sre.getStatus().getDescription() && - sre.getStatus().getDescription().contains(ErrorCode.NAMESPACE_NOT_FOUND.name()); - } + if (scope2.isInstance(ex)) { + throw scope2.cast(ex); + } - private static boolean isNamespaceAlreadyExists(StatusRuntimeException sre) { - // cause might not be set, so we need to examine the description - return isAlreadyExists(sre) && null != sre.getStatus().getDescription() && - sre.getStatus().getDescription().contains(ErrorCode.NAMESPACE_ALREADY_EXISTS.name()); - } + if (ex instanceof RuntimeException) { + throw (RuntimeException) ex; + } - private static boolean isNamespaceNotEmpty(StatusRuntimeException sre) { - // cause might not be set, so we need to examine the description - return isAlreadyExists(sre) && null != sre.getStatus().getDescription() && - sre.getStatus().getDescription().contains(ErrorCode.NAMESPACE_NOT_EMPTY.name()); + throw new UndeclaredThrowableException(ex, "Undeclared exception: " + ex.getClass().getName() + ":" + ex + + " (allowed types: " + scope1.getName() + ", " + scope2.getName() + ")"); } - private static boolean isReferenceNotFound(StatusRuntimeException sre) { - // cause might not be set, so we need to examine the description - return isNotFound(sre) && null != sre.getStatus().getDescription() && - sre.getStatus().getDescription().contains(ErrorCode.REFERENCE_NOT_FOUND.name()); - } + private static Exception toNessieException(StatusRuntimeException e) { + NessieError error = fromProto(e.getTrailers()); + if (error != null) { + Optional modelException = ErrorCode.asException(error); + if (modelException.isPresent()) { + return modelException.get(); + } + } - private static BaseNessieClientServerException toNessieException(StatusRuntimeException e, ImmutableNessieError.Builder nessieError, Function fallback) { + // Fallback for older clients + ImmutableNessieError.Builder nessieError = ImmutableNessieError.builder(); String msg = e.getStatus().getDescription(); if (msg != null) { int i = msg.indexOf('\n'); try { - ErrorCode errorCode = ErrorCode.valueOf(i == -1 ? msg : msg.substring(0, i)); + ErrorCode errorCode; + if (i < 0) { + errorCode = ErrorCode.UNKNOWN; + } else { + errorCode = ErrorCode.valueOf(msg.substring(0, i)); + } + + String message = msg.substring(i + 1); nessieError.errorCode(errorCode) .reason(errorCode.name()) - .message(msg.substring(i + 1)); + .message(message); + + switch (e.getStatus().getCode()) { + case NOT_FOUND: + nessieError.status(404); + break; + case ALREADY_EXISTS: + nessieError.status(409); + break; + case INVALID_ARGUMENT: + nessieError.status(400); + break; + default: + nessieError.status(500); + break; + } + switch (errorCode) { case REFERENCE_ALREADY_EXISTS: return new NessieReferenceAlreadyExistsException(nessieError.build()); @@ -351,12 +355,28 @@ private static BaseNessieClientServerException toNessieException(StatusRuntimeEx return new NessieReferenceNotFoundException(nessieError.build()); case REFLOG_NOT_FOUND: return new NessieRefLogNotFoundException(nessieError.build()); + case BAD_REQUEST: + return new NessieBadRequestException(nessieError.build()); case NAMESPACE_ALREADY_EXISTS: return new NessieNamespaceAlreadyExistsException(nessieError.build()); case NAMESPACE_NOT_EMPTY: return new NessieNamespaceNotEmptyException(nessieError.build()); case NAMESPACE_NOT_FOUND: return new NessieNamespaceNotFoundException(nessieError.build()); + case UNKNOWN: + // Generic exceptions without an error code. These exceptions are never thrown by modern Nessie Servers, + // but are handled here for the sake of completeness and compatibility with older servers. + switch (e.getStatus().getCode()) { + case NOT_FOUND: + case ALREADY_EXISTS: + case INVALID_ARGUMENT: + return new NessieBadRequestException(nessieError.errorCode(ErrorCode.BAD_REQUEST) + .reason("Bad Request: " + e.getStatus().getCode().name()) + .build()); + default: + break; + } + break; default: break; // fall through } @@ -367,65 +387,6 @@ private static BaseNessieClientServerException toNessieException(StatusRuntimeEx nessieError.message(e.getMessage()); } - return fallback.apply(nessieError.build()); - } - - private static NessieNotFoundException toNessieNotFoundException(StatusRuntimeException e) { - return (NessieNotFoundException) toNessieException(e, ImmutableNessieError.builder() - .message("Not found") - .status(404), - NessieNotFoundException::new); - } - - private static NessieReferenceNotFoundException toNessieReferenceNotFoundException(StatusRuntimeException e) { - return (NessieReferenceNotFoundException) toNessieException(e, ImmutableNessieError.builder() - .message("Reference not found") - .status(404), - NessieReferenceNotFoundException::new); - } - - private static NessieConflictException toNessieConflictException(StatusRuntimeException e) { - return (NessieConflictException) toNessieException(e, ImmutableNessieError.builder() - .message("Conflict") - .status(409), - NessieConflictException::new); - } - - private static NessieBadRequestException toNessieBadRequestException(StatusRuntimeException e) { - String msg = e.getStatus().getDescription(); - if (msg == null) { - msg = e.getMessage(); - } - - ImmutableNessieError.Builder nessieError = ImmutableNessieError.builder() - .message(msg) - .status(400) - .errorCode(ErrorCode.BAD_REQUEST) - .reason("Bad Request"); - return new NessieBadRequestException(nessieError.build()); - } - - private static NessieNamespaceNotFoundException toNessieNamespaceNotFoundException(StatusRuntimeException e) { - return (NessieNamespaceNotFoundException) toNessieException(e, ImmutableNessieError.builder() - .message("Namespace not found") - .status(ErrorCode.NAMESPACE_NOT_FOUND.httpStatus()) - .errorCode(ErrorCode.NAMESPACE_NOT_FOUND), - NessieNamespaceNotFoundException::new); - } - - private static NessieNamespaceNotEmptyException toNessieNamespaceNotEmptyException(StatusRuntimeException e) { - return (NessieNamespaceNotEmptyException) toNessieException(e, ImmutableNessieError.builder() - .message("Namespace not empty") - .status(ErrorCode.NAMESPACE_NOT_EMPTY.httpStatus()) - .errorCode(ErrorCode.NAMESPACE_NOT_EMPTY), - NessieNamespaceNotEmptyException::new); - } - - private static NessieNamespaceAlreadyExistsException toNessieNamespaceAlreadyExistsException(StatusRuntimeException e) { - return (NessieNamespaceAlreadyExistsException) toNessieException(e, ImmutableNessieError.builder() - .message("Namespace already exists") - .status(ErrorCode.NAMESPACE_ALREADY_EXISTS.httpStatus()) - .errorCode(ErrorCode.NAMESPACE_ALREADY_EXISTS), - NessieNamespaceAlreadyExistsException::new); + return e; } } diff --git a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcApiV1Impl.java b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcApiImpl.java similarity index 87% rename from services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcApiV1Impl.java rename to services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcApiImpl.java index 09b1e71d3a..598a9cf5d6 100644 --- a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcApiV1Impl.java +++ b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcApiImpl.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.dremio.services.nessie.grpc.client.v1api; +package com.dremio.services.nessie.grpc.client.impl; import static com.dremio.services.nessie.grpc.ProtoUtil.fromProto; import static com.dremio.services.nessie.grpc.client.GrpcExceptionMapper.handleNessieNotFoundEx; @@ -37,8 +37,14 @@ import org.projectnessie.client.api.GetReferenceBuilder; import org.projectnessie.client.api.MergeReferenceBuilder; import org.projectnessie.client.api.NessieApiV1; +import org.projectnessie.client.api.NessieApiV2; import org.projectnessie.client.api.TransplantCommitsBuilder; import org.projectnessie.client.api.UpdateNamespaceBuilder; +import org.projectnessie.client.util.v2api.ClientSideCreateNamespace; +import org.projectnessie.client.util.v2api.ClientSideDeleteNamespace; +import org.projectnessie.client.util.v2api.ClientSideGetMultipleNamespaces; +import org.projectnessie.client.util.v2api.ClientSideGetNamespace; +import org.projectnessie.client.util.v2api.ClientSideUpdateNamespace; import org.projectnessie.error.NessieNotFoundException; import org.projectnessie.model.Branch; import org.projectnessie.model.NessieConfiguration; @@ -50,8 +56,6 @@ import com.dremio.services.nessie.grpc.api.DiffServiceGrpc; import com.dremio.services.nessie.grpc.api.DiffServiceGrpc.DiffServiceBlockingStub; import com.dremio.services.nessie.grpc.api.Empty; -import com.dremio.services.nessie.grpc.api.NamespaceServiceGrpc; -import com.dremio.services.nessie.grpc.api.NamespaceServiceGrpc.NamespaceServiceBlockingStub; import com.dremio.services.nessie.grpc.api.RefLogServiceGrpc; import com.dremio.services.nessie.grpc.api.RefLogServiceGrpc.RefLogServiceBlockingStub; import com.dremio.services.nessie.grpc.api.TreeServiceGrpc; @@ -63,7 +67,7 @@ /** * gRPC client implementation for {@link NessieApiV1}. */ -public class GrpcApiV1Impl implements NessieApiV1 { +public class GrpcApiImpl implements NessieApiV1, NessieApiV2 { private final ManagedChannel channel; private final boolean shutdownChannel; @@ -72,13 +76,12 @@ public class GrpcApiV1Impl implements NessieApiV1 { private final ContentServiceBlockingStub contentServiceBlockingStub; private final DiffServiceBlockingStub diffServiceBlockingStub; private final RefLogServiceBlockingStub refLogServiceBlockingStub; - private final NamespaceServiceBlockingStub namespaceServiceBlockingStub; - public GrpcApiV1Impl(ManagedChannel channel, boolean shutdownChannel) { + public GrpcApiImpl(ManagedChannel channel, boolean shutdownChannel) { this(channel, shutdownChannel, new ClientInterceptor[0]); } - public GrpcApiV1Impl(ManagedChannel channel, boolean shutdownChannel, ClientInterceptor... clientInterceptors) { + public GrpcApiImpl(ManagedChannel channel, boolean shutdownChannel, ClientInterceptor... clientInterceptors) { this.channel = channel; this.shutdownChannel = shutdownChannel; this.configServiceBlockingStub = ConfigServiceGrpc.newBlockingStub(channel).withInterceptors(clientInterceptors); @@ -86,7 +89,6 @@ public GrpcApiV1Impl(ManagedChannel channel, boolean shutdownChannel, ClientInte this.treeServiceBlockingStub = TreeServiceGrpc.newBlockingStub(channel).withInterceptors(clientInterceptors); this.diffServiceBlockingStub = DiffServiceGrpc.newBlockingStub(channel).withInterceptors(clientInterceptors); this.refLogServiceBlockingStub = RefLogServiceGrpc.newBlockingStub(channel).withInterceptors(clientInterceptors); - this.namespaceServiceBlockingStub = NamespaceServiceGrpc.newBlockingStub(channel).withInterceptors(clientInterceptors); } @Override @@ -187,26 +189,26 @@ public GetRefLogBuilder getRefLog() { @Override public GetNamespaceBuilder getNamespace() { - return new GrpcGetNamespace(namespaceServiceBlockingStub); + return new ClientSideGetNamespace(this); } @Override public GetMultipleNamespacesBuilder getMultipleNamespaces() { - return new GrpcGetMultipleNamespaces(namespaceServiceBlockingStub); + return new ClientSideGetMultipleNamespaces(this); } @Override public CreateNamespaceBuilder createNamespace() { - return new GrpcCreateNamespace(namespaceServiceBlockingStub); + return new ClientSideCreateNamespace(this); } @Override public DeleteNamespaceBuilder deleteNamespace() { - return new GrpcDeleteNamespace(namespaceServiceBlockingStub); + return new ClientSideDeleteNamespace(this); } @Override public UpdateNamespaceBuilder updateProperties() { - return new GrpcUpdateNamespace(namespaceServiceBlockingStub); + return new ClientSideUpdateNamespace(this); } } diff --git a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcAssignBranch.java b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcAssignBranch.java similarity index 85% rename from services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcAssignBranch.java rename to services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcAssignBranch.java index 9bf1d36732..fe2818fef9 100644 --- a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcAssignBranch.java +++ b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcAssignBranch.java @@ -13,11 +13,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.dremio.services.nessie.grpc.client.v1api; +package com.dremio.services.nessie.grpc.client.impl; import org.projectnessie.client.api.AssignBranchBuilder; import org.projectnessie.error.NessieConflictException; import org.projectnessie.error.NessieNotFoundException; +import org.projectnessie.model.Branch; import org.projectnessie.model.Reference; import com.dremio.services.nessie.grpc.api.ReferenceType; @@ -49,6 +50,11 @@ public AssignBranchBuilder hash(String hash) { @Override public void assign() throws NessieNotFoundException, NessieConflictException { - super.assign(ReferenceType.BRANCH); + assignAndGet(); + } + + @Override + public Branch assignAndGet() throws NessieNotFoundException, NessieConflictException { + return (Branch) super.assign(ReferenceType.BRANCH); } } diff --git a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcAssignReference.java b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcAssignReference.java similarity index 83% rename from services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcAssignReference.java rename to services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcAssignReference.java index 87e302b170..bb5f21b6e0 100644 --- a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcAssignReference.java +++ b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcAssignReference.java @@ -13,8 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.dremio.services.nessie.grpc.client.v1api; +package com.dremio.services.nessie.grpc.client.impl; +import static com.dremio.services.nessie.grpc.ProtoUtil.refFromProtoResponse; import static com.dremio.services.nessie.grpc.client.GrpcExceptionMapper.handle; import org.projectnessie.error.NessieConflictException; @@ -26,6 +27,7 @@ import com.dremio.services.nessie.grpc.ProtoUtil; import com.dremio.services.nessie.grpc.api.AssignReferenceRequest; +import com.dremio.services.nessie.grpc.api.ReferenceResponse; import com.dremio.services.nessie.grpc.api.ReferenceType; import com.dremio.services.nessie.grpc.api.TreeServiceGrpc.TreeServiceBlockingStub; @@ -52,8 +54,8 @@ void setHash(String hash) { this.hash = hash; } - void assign(ReferenceType refType) throws NessieNotFoundException, NessieConflictException { - handle( + Reference assign(ReferenceType refType) throws NessieNotFoundException, NessieConflictException { + return handle( () -> { AssignReferenceRequest.Builder builder = AssignReferenceRequest.newBuilder() .setReferenceType(refType) @@ -66,7 +68,9 @@ void assign(ReferenceType refType) throws NessieNotFoundException, NessieConflic } else if (assignTo instanceof Detached) { builder.setDetached(ProtoUtil.toProto((Detached) assignTo)); } - return stub.assignReference(builder.build()); + + ReferenceResponse response = stub.assignReference(builder.build()); + return refFromProtoResponse(response); }); } } diff --git a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcAssignTag.java b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcAssignTag.java similarity index 85% rename from services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcAssignTag.java rename to services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcAssignTag.java index 7b33748bc2..5603f5234b 100644 --- a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcAssignTag.java +++ b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcAssignTag.java @@ -13,12 +13,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.dremio.services.nessie.grpc.client.v1api; +package com.dremio.services.nessie.grpc.client.impl; import org.projectnessie.client.api.AssignTagBuilder; import org.projectnessie.error.NessieConflictException; import org.projectnessie.error.NessieNotFoundException; import org.projectnessie.model.Reference; +import org.projectnessie.model.Tag; import com.dremio.services.nessie.grpc.api.ReferenceType; import com.dremio.services.nessie.grpc.api.TreeServiceGrpc.TreeServiceBlockingStub; @@ -49,6 +50,11 @@ public AssignTagBuilder hash(String hash) { @Override public void assign() throws NessieNotFoundException, NessieConflictException { - super.assign(ReferenceType.TAG); + assignAndGet(); + } + + @Override + public Tag assignAndGet() throws NessieNotFoundException, NessieConflictException { + return (Tag) super.assign(ReferenceType.TAG); } } diff --git a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcCommitMultipleOperations.java b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcCommitMultipleOperations.java similarity index 76% rename from services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcCommitMultipleOperations.java rename to services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcCommitMultipleOperations.java index 63d2014948..2426aa85fb 100644 --- a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcCommitMultipleOperations.java +++ b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcCommitMultipleOperations.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.dremio.services.nessie.grpc.client.v1api; +package com.dremio.services.nessie.grpc.client.impl; import static com.dremio.services.nessie.grpc.ProtoUtil.fromProto; import static com.dremio.services.nessie.grpc.ProtoUtil.toProto; @@ -22,17 +22,19 @@ import java.util.List; import org.projectnessie.client.api.CommitMultipleOperationsBuilder; +import org.projectnessie.client.builder.BaseCommitMultipleOperationsBuilder; import org.projectnessie.error.NessieConflictException; import org.projectnessie.error.NessieNotFoundException; import org.projectnessie.model.Branch; import org.projectnessie.model.CommitMeta; +import org.projectnessie.model.CommitResponse; import org.projectnessie.model.ImmutableOperations; import org.projectnessie.model.Operation; import com.dremio.services.nessie.grpc.api.CommitRequest; import com.dremio.services.nessie.grpc.api.TreeServiceGrpc.TreeServiceBlockingStub; -final class GrpcCommitMultipleOperations implements CommitMultipleOperationsBuilder { +final class GrpcCommitMultipleOperations extends BaseCommitMultipleOperationsBuilder { private final TreeServiceBlockingStub stub; private final ImmutableOperations.Builder operations = ImmutableOperations.builder(); @@ -75,14 +77,22 @@ public CommitMultipleOperationsBuilder hash(String hash) { @Override public Branch commit() throws NessieNotFoundException, NessieConflictException { + return commitWithResponse().getTargetBranch(); + } + + @Override + public CommitResponse commitWithResponse() throws NessieNotFoundException, NessieConflictException { return handle( - () -> - fromProto( - stub.commitMultipleOperations( - CommitRequest.newBuilder() - .setBranch(branchName) - .setHash(hash) - .setCommitOperations(toProto(operations.build())) - .build()))); + () -> { + CommitRequest.Builder request = CommitRequest.newBuilder() + .setBranch(branchName) + .setCommitOperations(toProto(operations.build())); + + if (hash != null) { + request.setHash(hash); + } + + return fromProto(stub.commitMultipleOperations(request.build())); + }); } } diff --git a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcCreateReference.java b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcCreateReference.java similarity index 76% rename from services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcCreateReference.java rename to services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcCreateReference.java index 0524882005..26c7aafcf6 100644 --- a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcCreateReference.java +++ b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcCreateReference.java @@ -13,13 +13,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.dremio.services.nessie.grpc.client.v1api; +package com.dremio.services.nessie.grpc.client.impl; import static com.dremio.services.nessie.grpc.ProtoUtil.refFromProto; import static com.dremio.services.nessie.grpc.ProtoUtil.refToProto; import static com.dremio.services.nessie.grpc.client.GrpcExceptionMapper.handle; -import org.projectnessie.client.api.CreateReferenceBuilder; +import org.projectnessie.client.builder.BaseCreateReferenceBuilder; import org.projectnessie.error.NessieConflictException; import org.projectnessie.error.NessieNotFoundException; import org.projectnessie.model.Reference; @@ -27,28 +27,14 @@ import com.dremio.services.nessie.grpc.api.CreateReferenceRequest; import com.dremio.services.nessie.grpc.api.TreeServiceGrpc.TreeServiceBlockingStub; -final class GrpcCreateReference implements CreateReferenceBuilder { +final class GrpcCreateReference extends BaseCreateReferenceBuilder { private final TreeServiceBlockingStub stub; - private String sourceRefName; - private Reference reference; public GrpcCreateReference(TreeServiceBlockingStub stub) { this.stub = stub; } - @Override - public CreateReferenceBuilder sourceRefName(String sourceRefName) { - this.sourceRefName = sourceRefName; - return this; - } - - @Override - public CreateReferenceBuilder reference(Reference reference) { - this.reference = reference; - return this; - } - @Override public Reference create() throws NessieNotFoundException, NessieConflictException { return handle( diff --git a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcDeleteBranch.java b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcDeleteBranch.java similarity index 60% rename from services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcDeleteBranch.java rename to services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcDeleteBranch.java index 60ecf8e07c..24b2b685d0 100644 --- a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcDeleteBranch.java +++ b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcDeleteBranch.java @@ -13,45 +13,48 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.dremio.services.nessie.grpc.client.v1api; +package com.dremio.services.nessie.grpc.client.impl; +import static com.dremio.services.nessie.grpc.ProtoUtil.refFromProtoResponse; import static com.dremio.services.nessie.grpc.client.GrpcExceptionMapper.handle; import org.projectnessie.client.api.DeleteBranchBuilder; +import org.projectnessie.client.builder.BaseOnBranchBuilder; import org.projectnessie.error.NessieConflictException; import org.projectnessie.error.NessieNotFoundException; +import org.projectnessie.model.Branch; import com.dremio.services.nessie.grpc.api.DeleteReferenceRequest; +import com.dremio.services.nessie.grpc.api.ReferenceResponse; import com.dremio.services.nessie.grpc.api.ReferenceType; import com.dremio.services.nessie.grpc.api.TreeServiceGrpc.TreeServiceBlockingStub; -final class GrpcDeleteBranch implements DeleteBranchBuilder { +final class GrpcDeleteBranch extends BaseOnBranchBuilder + implements DeleteBranchBuilder { private final TreeServiceBlockingStub stub; - private String branchName; - private String hash; public GrpcDeleteBranch(TreeServiceBlockingStub stub) { this.stub = stub; } @Override - public DeleteBranchBuilder branchName(String branchName) { - this.branchName = branchName; - return this; - } - - @Override - public DeleteBranchBuilder hash(String hash) { - this.hash = hash; - return this; + public Branch getAndDelete() throws NessieConflictException, NessieNotFoundException { + return handle( + () -> + { + ReferenceResponse response = stub.deleteReference( + DeleteReferenceRequest.newBuilder() + .setReferenceType(ReferenceType.BRANCH) + .setNamedRef(branchName) + .setHash(hash) + .build()); + return (Branch) refFromProtoResponse(response); + }); } @Override public void delete() throws NessieConflictException, NessieNotFoundException { - handle( - () -> - stub.deleteReference( - DeleteReferenceRequest.newBuilder().setReferenceType(ReferenceType.BRANCH).setNamedRef(branchName).setHash(hash).build())); + getAndDelete(); } } diff --git a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcDeleteTag.java b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcDeleteTag.java similarity index 61% rename from services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcDeleteTag.java rename to services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcDeleteTag.java index 17a6e0b1c9..cf09369e27 100644 --- a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcDeleteTag.java +++ b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcDeleteTag.java @@ -13,45 +13,47 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.dremio.services.nessie.grpc.client.v1api; +package com.dremio.services.nessie.grpc.client.impl; +import static com.dremio.services.nessie.grpc.ProtoUtil.refFromProtoResponse; import static com.dremio.services.nessie.grpc.client.GrpcExceptionMapper.handle; import org.projectnessie.client.api.DeleteTagBuilder; +import org.projectnessie.client.builder.BaseOnTagBuilder; import org.projectnessie.error.NessieConflictException; import org.projectnessie.error.NessieNotFoundException; +import org.projectnessie.model.Tag; import com.dremio.services.nessie.grpc.api.DeleteReferenceRequest; +import com.dremio.services.nessie.grpc.api.ReferenceResponse; import com.dremio.services.nessie.grpc.api.ReferenceType; import com.dremio.services.nessie.grpc.api.TreeServiceGrpc.TreeServiceBlockingStub; -final class GrpcDeleteTag implements DeleteTagBuilder { +final class GrpcDeleteTag extends BaseOnTagBuilder implements DeleteTagBuilder { private final TreeServiceBlockingStub stub; - private String tagName; - private String hash; public GrpcDeleteTag(TreeServiceBlockingStub stub) { this.stub = stub; } @Override - public DeleteTagBuilder tagName(String tagName) { - this.tagName = tagName; - return this; - } - - @Override - public DeleteTagBuilder hash(String hash) { - this.hash = hash; - return this; + public Tag getAndDelete() throws NessieNotFoundException, NessieConflictException { + return handle( + () -> + { + ReferenceResponse response = stub.deleteReference( + DeleteReferenceRequest.newBuilder() + .setReferenceType(ReferenceType.TAG) + .setNamedRef(tagName) + .setHash(hash) + .build()); + return (Tag) refFromProtoResponse(response); + }); } @Override public void delete() throws NessieConflictException, NessieNotFoundException { - handle( - () -> - stub.deleteReference( - DeleteReferenceRequest.newBuilder().setReferenceType(ReferenceType.TAG).setNamedRef(tagName).setHash(hash).build())); + getAndDelete(); } } diff --git a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcGetAllReferences.java b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcGetAllReferences.java new file mode 100644 index 0000000000..2ea704566c --- /dev/null +++ b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcGetAllReferences.java @@ -0,0 +1,64 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.services.nessie.grpc.client.impl; + +import static com.dremio.services.nessie.grpc.client.GrpcExceptionMapper.handleNessieRuntimeEx; + +import org.projectnessie.api.v1.params.ReferencesParams; +import org.projectnessie.client.builder.BaseGetAllReferencesBuilder; +import org.projectnessie.model.ImmutableReferencesResponse; +import org.projectnessie.model.ReferencesResponse; + +import com.dremio.services.nessie.grpc.ProtoUtil; +import com.dremio.services.nessie.grpc.api.GetAllReferencesResponse; +import com.dremio.services.nessie.grpc.api.TreeServiceGrpc.TreeServiceBlockingStub; + +final class GrpcGetAllReferences extends BaseGetAllReferencesBuilder { + + private final TreeServiceBlockingStub stub; + + public GrpcGetAllReferences(TreeServiceBlockingStub stub) { + super(ReferencesParams::forNextPage); + this.stub = stub; + } + + @Override + protected ReferencesParams params() { + return ReferencesParams.builder() + .maxRecords(maxRecords) + .fetchOption(fetchOption) + .filter(filter) + .build(); + } + + @Override + protected ReferencesResponse get(ReferencesParams p) { + return handleNessieRuntimeEx( + () -> { + GetAllReferencesResponse response = stub.getAllReferences(ProtoUtil.toProto(p)); + ImmutableReferencesResponse.Builder builder = ReferencesResponse.builder(); + response.getReferenceList() + .stream() + .map(ProtoUtil::refFromProto) + .forEach(builder::addReferences); + builder.isHasMore(response.getHasMore()); + if (response.hasPageToken()) { + builder.token(response.getPageToken()); + } + return builder.build(); + }); + } +} diff --git a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcGetCommitLog.java b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcGetCommitLog.java new file mode 100644 index 0000000000..34799b499f --- /dev/null +++ b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcGetCommitLog.java @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.services.nessie.grpc.client.impl; + +import static com.dremio.services.nessie.grpc.ProtoUtil.fromProto; +import static com.dremio.services.nessie.grpc.ProtoUtil.toProto; +import static com.dremio.services.nessie.grpc.client.GrpcExceptionMapper.handleNessieNotFoundEx; + +import org.projectnessie.api.v1.params.CommitLogParams; +import org.projectnessie.client.builder.BaseGetCommitLogBuilder; +import org.projectnessie.error.NessieNotFoundException; +import org.projectnessie.model.LogResponse; + +import com.dremio.services.nessie.grpc.api.TreeServiceGrpc.TreeServiceBlockingStub; + +final class GrpcGetCommitLog extends BaseGetCommitLogBuilder { + + private final TreeServiceBlockingStub stub; + + public GrpcGetCommitLog(TreeServiceBlockingStub stub) { + super(CommitLogParams::forNextPage); + this.stub = stub; + } + + @Override + protected CommitLogParams params() { + return CommitLogParams.builder() + .filter(filter) + .maxRecords(maxRecords) + .fetchOption(fetchOption) + .startHash(untilHash) + .endHash(hashOnRef) + .build(); + } + + @Override + protected LogResponse get(CommitLogParams p) throws NessieNotFoundException { + return handleNessieNotFoundEx( + () -> fromProto(stub.getCommitLog(toProto(refName, p)))); + } +} diff --git a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcGetContent.java b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcGetContent.java similarity index 52% rename from services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcGetContent.java rename to services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcGetContent.java index 25b781d298..cfbb1b48ec 100644 --- a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcGetContent.java +++ b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcGetContent.java @@ -13,72 +13,61 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.dremio.services.nessie.grpc.client.v1api; +package com.dremio.services.nessie.grpc.client.impl; import static com.dremio.services.nessie.grpc.ProtoUtil.fromProto; import static com.dremio.services.nessie.grpc.ProtoUtil.toProto; import static com.dremio.services.nessie.grpc.client.GrpcExceptionMapper.handleNessieNotFoundEx; -import java.util.List; import java.util.Map; -import java.util.stream.Collectors; -import javax.annotation.Nullable; - -import org.projectnessie.client.api.GetContentBuilder; +import org.projectnessie.client.builder.BaseGetContentBuilder; +import org.projectnessie.error.NessieContentNotFoundException; import org.projectnessie.error.NessieNotFoundException; import org.projectnessie.model.Content; import org.projectnessie.model.ContentKey; +import org.projectnessie.model.ContentResponse; import org.projectnessie.model.GetMultipleContentsResponse; -import org.projectnessie.model.GetMultipleContentsResponse.ContentWithKey; -import org.projectnessie.model.ImmutableGetMultipleContentsRequest; import com.dremio.services.nessie.grpc.api.ContentServiceGrpc.ContentServiceBlockingStub; -final class GrpcGetContent implements GetContentBuilder { +final class GrpcGetContent extends BaseGetContentBuilder { private final ContentServiceBlockingStub stub; - private final ImmutableGetMultipleContentsRequest.Builder request = - ImmutableGetMultipleContentsRequest.builder(); - private String refName; - private String hashOnRef; public GrpcGetContent(ContentServiceBlockingStub stub) { this.stub = stub; } @Override - public GetContentBuilder key(ContentKey key) { - request.addRequestedKeys(key); - return this; + public Map get() throws NessieNotFoundException { + return getWithResponse().toContentsMap(); } @Override - public GetContentBuilder keys(List keys) { - request.addAllRequestedKeys(keys); - return this; - } + public ContentResponse getSingle(ContentKey key) throws NessieNotFoundException { + if (!request.build().getRequestedKeys().isEmpty()) { + throw new IllegalStateException( + "Must not use getSingle() with key() or keys(), pass the single key to getSingle()"); + } - @Override - public GetContentBuilder refName(String refName) { - this.refName = refName; - return this; - } + GetMultipleContentsResponse multi = handleNessieNotFoundEx( + () -> fromProto(stub.getMultipleContents( + toProto(refName, hashOnRef, request.build().withRequestedKeys(key))))); - @Override - public GetContentBuilder hashOnRef(@Nullable String hashOnRef) { - this.hashOnRef = hashOnRef; - return this; + Content content = multi.toContentsMap().get(key); + if (content == null) { + throw new NessieContentNotFoundException(key, refName); + } + + return ContentResponse.builder() + .content(content) + .effectiveReference(multi.getEffectiveReference()).build(); } @Override - public Map get() throws NessieNotFoundException { + public GetMultipleContentsResponse getWithResponse() throws NessieNotFoundException { return handleNessieNotFoundEx( - () -> { - GetMultipleContentsResponse response = - fromProto(stub.getMultipleContents(toProto(refName, hashOnRef, request.build()))); - return response.getContents().stream() - .collect(Collectors.toMap(ContentWithKey::getKey, ContentWithKey::getContent)); - }); + () -> fromProto(stub.getMultipleContents(toProto(refName, hashOnRef, request.build())))); } } diff --git a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcGetDiff.java b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcGetDiff.java similarity index 53% rename from services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcGetDiff.java rename to services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcGetDiff.java index 41ab383218..02fa0989ee 100644 --- a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcGetDiff.java +++ b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcGetDiff.java @@ -13,58 +13,45 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.dremio.services.nessie.grpc.client.v1api; +package com.dremio.services.nessie.grpc.client.impl; import static com.dremio.services.nessie.grpc.ProtoUtil.fromProto; -import static com.dremio.services.nessie.grpc.ProtoUtil.toProto; +import static com.dremio.services.nessie.grpc.ProtoUtil.toProtoDiffRequest; import static com.dremio.services.nessie.grpc.client.GrpcExceptionMapper.handleNessieNotFoundEx; -import org.projectnessie.api.params.DiffParams; -import org.projectnessie.api.params.DiffParamsBuilder; -import org.projectnessie.client.api.GetDiffBuilder; +import org.projectnessie.client.builder.BaseGetDiffBuilder; import org.projectnessie.error.NessieNotFoundException; import org.projectnessie.model.DiffResponse; +import com.dremio.services.nessie.grpc.api.DiffRequest; import com.dremio.services.nessie.grpc.api.DiffServiceGrpc.DiffServiceBlockingStub; /** * Returns the diff for two given references. */ -public class GrpcGetDiff implements GetDiffBuilder { +public class GrpcGetDiff extends BaseGetDiffBuilder { private final DiffServiceBlockingStub stub; - private final DiffParamsBuilder builder = DiffParams.builder(); public GrpcGetDiff(DiffServiceBlockingStub stub) { + super((request, pageToken) -> { + DiffRequest.Builder builder = request.toBuilder().clearPageToken(); + if (pageToken != null) { + builder.setPageToken(pageToken); + } + return builder.build(); + }); this.stub = stub; } @Override - public GetDiffBuilder fromRefName(String fromRefName) { - builder.fromRef(fromRefName); - return this; + protected DiffRequest params() { + return toProtoDiffRequest(fromRefName, fromHashOnRef, toRefName, toHashOnRef, maxRecords, minKey, maxKey, prefixKey, + keys, filter); } @Override - public GetDiffBuilder fromHashOnRef(String fromHashOnRef) { - builder.fromHashOnRef(fromHashOnRef); - return this; - } - - @Override - public GetDiffBuilder toRefName(String toRefName) { - builder.toRef(toRefName); - return this; - } - - @Override - public GetDiffBuilder toHashOnRef(String toHashOnRef) { - builder.toHashOnRef(toHashOnRef); - return this; - } - - @Override - public DiffResponse get() throws NessieNotFoundException { - return handleNessieNotFoundEx(() -> fromProto(stub.getDiff(toProto(builder.build())))); + protected DiffResponse get(DiffRequest request) throws NessieNotFoundException { + return handleNessieNotFoundEx(() -> fromProto(stub.getDiff(request))); } } diff --git a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcGetEntries.java b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcGetEntries.java new file mode 100644 index 0000000000..bb5262c882 --- /dev/null +++ b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcGetEntries.java @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.services.nessie.grpc.client.impl; + +import static com.dremio.services.nessie.grpc.ProtoUtil.fromProto; +import static com.dremio.services.nessie.grpc.ProtoUtil.toProtoEntriesRequest; +import static com.dremio.services.nessie.grpc.client.GrpcExceptionMapper.handleNessieNotFoundEx; + +import org.projectnessie.client.builder.BaseGetEntriesBuilder; +import org.projectnessie.error.NessieNotFoundException; +import org.projectnessie.model.EntriesResponse; + +import com.dremio.services.nessie.grpc.api.EntriesRequest; +import com.dremio.services.nessie.grpc.api.TreeServiceGrpc.TreeServiceBlockingStub; + +final class GrpcGetEntries extends BaseGetEntriesBuilder { + + private final TreeServiceBlockingStub stub; + + public GrpcGetEntries(TreeServiceBlockingStub stub) { + super((request, pageToken) -> { + EntriesRequest.Builder builder = request.toBuilder().clearPageToken(); + if (pageToken != null) { + builder.setPageToken(pageToken); + } + return builder.build(); + }); + this.stub = stub; + } + + @Override + protected EntriesRequest params() { + return toProtoEntriesRequest(refName, hashOnRef, maxRecords, filter, namespaceDepth, withContent, minKey, maxKey, + prefixKey, keys); + } + + @Override + protected EntriesResponse get(EntriesRequest p) throws NessieNotFoundException { + return handleNessieNotFoundEx(() -> fromProto(stub.getEntries(p))); + } +} diff --git a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcGetRefLog.java b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcGetRefLog.java similarity index 94% rename from services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcGetRefLog.java rename to services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcGetRefLog.java index 9defa6fd80..3cda565d29 100644 --- a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcGetRefLog.java +++ b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcGetRefLog.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.dremio.services.nessie.grpc.client.v1api; +package com.dremio.services.nessie.grpc.client.impl; import static com.dremio.services.nessie.grpc.ProtoUtil.fromProto; import static com.dremio.services.nessie.grpc.ProtoUtil.toProto; @@ -23,8 +23,8 @@ import javax.annotation.Nullable; -import org.projectnessie.api.params.RefLogParams; -import org.projectnessie.api.params.RefLogParamsBuilder; +import org.projectnessie.api.v1.params.RefLogParams; +import org.projectnessie.api.v1.params.RefLogParamsBuilder; import org.projectnessie.client.StreamingUtil; import org.projectnessie.client.api.GetRefLogBuilder; import org.projectnessie.error.NessieNotFoundException; diff --git a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcGetReference.java b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcGetReference.java similarity index 89% rename from services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcGetReference.java rename to services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcGetReference.java index ddda71b846..4d61d6b12b 100644 --- a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcGetReference.java +++ b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcGetReference.java @@ -13,17 +13,17 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.dremio.services.nessie.grpc.client.v1api; +package com.dremio.services.nessie.grpc.client.impl; import static com.dremio.services.nessie.grpc.ProtoUtil.refFromProto; import static com.dremio.services.nessie.grpc.ProtoUtil.toProto; import static com.dremio.services.nessie.grpc.client.GrpcExceptionMapper.handleNessieNotFoundEx; -import org.projectnessie.api.params.FetchOption; -import org.projectnessie.api.params.GetReferenceParams; -import org.projectnessie.api.params.GetReferenceParamsBuilder; +import org.projectnessie.api.v1.params.GetReferenceParams; +import org.projectnessie.api.v1.params.GetReferenceParamsBuilder; import org.projectnessie.client.api.GetReferenceBuilder; import org.projectnessie.error.NessieNotFoundException; +import org.projectnessie.model.FetchOption; import org.projectnessie.model.Reference; import com.dremio.services.nessie.grpc.api.TreeServiceGrpc.TreeServiceBlockingStub; diff --git a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcMergeReference.java b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcMergeReference.java new file mode 100644 index 0000000000..084c235bd2 --- /dev/null +++ b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcMergeReference.java @@ -0,0 +1,62 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.services.nessie.grpc.client.impl; + +import static com.dremio.services.nessie.grpc.ProtoUtil.fromProto; +import static com.dremio.services.nessie.grpc.ProtoUtil.toProto; +import static com.dremio.services.nessie.grpc.client.GrpcExceptionMapper.handle; + +import org.projectnessie.api.v1.params.ImmutableMerge; +import org.projectnessie.client.builder.BaseMergeReferenceBuilder; +import org.projectnessie.error.NessieConflictException; +import org.projectnessie.error.NessieNotFoundException; +import org.projectnessie.model.MergeResponse; + +import com.dremio.services.nessie.grpc.api.TreeServiceGrpc.TreeServiceBlockingStub; + +final class GrpcMergeReference extends BaseMergeReferenceBuilder { + + private final TreeServiceBlockingStub stub; + + public GrpcMergeReference(TreeServiceBlockingStub stub) { + this.stub = stub; + } + + @Override + public MergeResponse merge() throws NessieNotFoundException, NessieConflictException { + ImmutableMerge.Builder merge = + ImmutableMerge.builder() + .fromHash(fromHash) + .fromRefName(fromRefName) + .isDryRun(dryRun) + .isReturnConflictAsResult(returnConflictAsResult) + .isFetchAdditionalInfo(fetchAdditionalInfo) + .keepIndividualCommits(keepIndividualCommits); + + if (defaultMergeMode != null) { + merge.defaultKeyMergeMode(defaultMergeMode); + } + + if (mergeModes != null) { + merge.keyMergeModes(mergeModes.values()); + } + + return handle( + () -> fromProto( + stub.mergeRefIntoBranch( + toProto(branchName, hash, merge.build(), message, commitMeta)))); + } +} diff --git a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcTransplantCommits.java b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcTransplantCommits.java new file mode 100644 index 0000000000..62853733f6 --- /dev/null +++ b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/impl/GrpcTransplantCommits.java @@ -0,0 +1,70 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.services.nessie.grpc.client.impl; + +import static com.dremio.services.nessie.grpc.ProtoUtil.fromProto; +import static com.dremio.services.nessie.grpc.ProtoUtil.toProto; +import static com.dremio.services.nessie.grpc.client.GrpcExceptionMapper.handle; + +import org.projectnessie.api.v1.params.ImmutableTransplant; +import org.projectnessie.client.api.TransplantCommitsBuilder; +import org.projectnessie.client.builder.BaseTransplantCommitsBuilder; +import org.projectnessie.error.NessieConflictException; +import org.projectnessie.error.NessieNotFoundException; +import org.projectnessie.model.CommitMeta; +import org.projectnessie.model.MergeResponse; + +import com.dremio.services.nessie.grpc.api.TreeServiceGrpc.TreeServiceBlockingStub; + +final class GrpcTransplantCommits extends BaseTransplantCommitsBuilder { + + private final TreeServiceBlockingStub stub; + + public GrpcTransplantCommits(TreeServiceBlockingStub stub) { + this.stub = stub; + } + + @Override + public TransplantCommitsBuilder commitMeta(CommitMeta commitMeta) { + throw new UnsupportedOperationException( + "Setting CommitMeta overrides is not supported for transplants."); + } + + @Override + public MergeResponse transplant() throws NessieNotFoundException, NessieConflictException { + ImmutableTransplant.Builder transplant = + ImmutableTransplant.builder() + .fromRefName(fromRefName) + .hashesToTransplant(hashesToTransplant) + .isDryRun(dryRun) + .isReturnConflictAsResult(returnConflictAsResult) + .isFetchAdditionalInfo(fetchAdditionalInfo) + .keepIndividualCommits(keepIndividualCommits); + + if (defaultMergeMode != null) { + transplant.defaultKeyMergeMode(defaultMergeMode); + } + + if (mergeModes != null) { + transplant.keyMergeModes(mergeModes.values()); + } + + return handle( + () -> fromProto( + stub.transplantCommitsIntoBranch( + toProto(branchName, hash, message, transplant.build())))); + } +} diff --git a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcCreateNamespace.java b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcCreateNamespace.java deleted file mode 100644 index 18e3a73ba4..0000000000 --- a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcCreateNamespace.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.dremio.services.nessie.grpc.client.v1api; - -import static com.dremio.services.nessie.grpc.ProtoUtil.fromProto; -import static com.dremio.services.nessie.grpc.ProtoUtil.toProto; -import static com.dremio.services.nessie.grpc.client.GrpcExceptionMapper.handleNamespaceCreation; - -import java.util.HashMap; -import java.util.Map; - -import javax.annotation.Nullable; - -import org.projectnessie.api.params.NamespaceParams; -import org.projectnessie.api.params.NamespaceParamsBuilder; -import org.projectnessie.client.api.CreateNamespaceBuilder; -import org.projectnessie.error.NessieNamespaceAlreadyExistsException; -import org.projectnessie.error.NessieReferenceNotFoundException; -import org.projectnessie.model.ImmutableNamespace; -import org.projectnessie.model.Namespace; - -import com.dremio.services.nessie.grpc.api.NamespaceServiceGrpc.NamespaceServiceBlockingStub; - -public class GrpcCreateNamespace implements CreateNamespaceBuilder { - - private final NamespaceServiceBlockingStub stub; - private final NamespaceParamsBuilder builder = NamespaceParams.builder(); - private final Map properties = new HashMap<>(); - private Namespace namespace; - - GrpcCreateNamespace(NamespaceServiceBlockingStub stub) { - this.stub = stub; - } - - @Override - public CreateNamespaceBuilder namespace(Namespace namespace) { - this.namespace = namespace; - return this; - } - - @Override - public CreateNamespaceBuilder refName(String refName) { - builder.refName(refName); - return this; - } - - @Override - public CreateNamespaceBuilder hashOnRef(@Nullable String hashOnRef) { - builder.hashOnRef(hashOnRef); - return this; - } - - @Override - public CreateNamespaceBuilder properties(Map properties) { - this.properties.putAll(properties); - return this; - } - - @Override - public CreateNamespaceBuilder property(String key, String value) { - this.properties.put(key, value); - return this; - } - - @Override - public Namespace create() - throws NessieNamespaceAlreadyExistsException, NessieReferenceNotFoundException { - return handleNamespaceCreation(() -> { - builder.namespace(ImmutableNamespace.builder() - .from(this.namespace) - .properties(properties) - .build()); - return fromProto(stub.createNamespace(toProto(builder.build()))); - }); - } -} diff --git a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcDeleteNamespace.java b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcDeleteNamespace.java deleted file mode 100644 index c36af73202..0000000000 --- a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcDeleteNamespace.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.dremio.services.nessie.grpc.client.v1api; - -import static com.dremio.services.nessie.grpc.ProtoUtil.toProto; -import static com.dremio.services.nessie.grpc.client.GrpcExceptionMapper.handleNamespaceDeletion; - -import javax.annotation.Nullable; - -import org.projectnessie.api.params.NamespaceParams; -import org.projectnessie.api.params.NamespaceParamsBuilder; -import org.projectnessie.client.api.DeleteNamespaceBuilder; -import org.projectnessie.error.NessieNamespaceNotEmptyException; -import org.projectnessie.error.NessieNamespaceNotFoundException; -import org.projectnessie.error.NessieReferenceNotFoundException; -import org.projectnessie.model.Namespace; - -import com.dremio.services.nessie.grpc.api.NamespaceServiceGrpc.NamespaceServiceBlockingStub; - -public class GrpcDeleteNamespace implements DeleteNamespaceBuilder { - - private final NamespaceParamsBuilder builder = NamespaceParams.builder(); - private final NamespaceServiceBlockingStub stub; - - public GrpcDeleteNamespace(NamespaceServiceBlockingStub stub) { - this.stub = stub; - } - - @Override - public DeleteNamespaceBuilder namespace(Namespace namespace) { - builder.namespace(namespace); - return this; - } - - @Override - public DeleteNamespaceBuilder refName(String refName) { - builder.refName(refName); - return this; - } - - @Override - public DeleteNamespaceBuilder hashOnRef(@Nullable String hashOnRef) { - builder.hashOnRef(hashOnRef); - return this; - } - - @Override - public void delete() - throws NessieReferenceNotFoundException, NessieNamespaceNotEmptyException, - NessieNamespaceNotFoundException { - handleNamespaceDeletion(() -> stub.deleteNamespace(toProto(builder.build()))); - } -} diff --git a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcGetAllReferences.java b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcGetAllReferences.java deleted file mode 100644 index 4f2d030f98..0000000000 --- a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcGetAllReferences.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.dremio.services.nessie.grpc.client.v1api; - -import java.util.List; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -import org.projectnessie.api.params.FetchOption; -import org.projectnessie.api.params.ReferencesParams; -import org.projectnessie.api.params.ReferencesParamsBuilder; -import org.projectnessie.client.StreamingUtil; -import org.projectnessie.client.api.GetAllReferencesBuilder; -import org.projectnessie.error.NessieNotFoundException; -import org.projectnessie.model.Reference; -import org.projectnessie.model.ReferencesResponse; - -import com.dremio.services.nessie.grpc.ProtoUtil; -import com.dremio.services.nessie.grpc.api.TreeServiceGrpc.TreeServiceBlockingStub; - -final class GrpcGetAllReferences implements GetAllReferencesBuilder { - - private final TreeServiceBlockingStub stub; - private final ReferencesParamsBuilder params = ReferencesParams.builder(); - - public GrpcGetAllReferences(TreeServiceBlockingStub stub) { - this.stub = stub; - } - - @Override - public GetAllReferencesBuilder maxRecords(int maxRecords) { - params.maxRecords(maxRecords); - return this; - } - - @Override - public GetAllReferencesBuilder pageToken(String pageToken) { - params.pageToken(pageToken); - return this; - } - - public GetAllReferencesBuilder fetch(FetchOption fetchOption) { - params.fetchOption(fetchOption); - return this; - } - - @Override - public GetAllReferencesBuilder filter(String filter) { - params.filter(filter); - return this; - } - - @Override - public ReferencesResponse get() { - return get(params.build()); - } - - private ReferencesResponse get(ReferencesParams p) { - List result = stub.getAllReferences(ProtoUtil.toProto(p)) - .getReferenceList() - .stream() - .map(ProtoUtil::refFromProto) - .collect(Collectors.toList()); - return ReferencesResponse.builder().addAllReferences(result).build(); - } - - @Override - public Stream stream() throws NessieNotFoundException { - ReferencesParams p = params.build(); - return StreamingUtil.generateStream( - ReferencesResponse::getReferences, pageToken -> get(p.forNextPage(pageToken))); - } -} diff --git a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcGetCommitLog.java b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcGetCommitLog.java deleted file mode 100644 index 45659a1523..0000000000 --- a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcGetCommitLog.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.dremio.services.nessie.grpc.client.v1api; - -import static com.dremio.services.nessie.grpc.ProtoUtil.fromProto; -import static com.dremio.services.nessie.grpc.ProtoUtil.toProto; -import static com.dremio.services.nessie.grpc.client.GrpcExceptionMapper.handleNessieNotFoundEx; - -import java.util.stream.Stream; - -import javax.annotation.Nullable; - -import org.projectnessie.api.params.CommitLogParams; -import org.projectnessie.api.params.CommitLogParamsBuilder; -import org.projectnessie.api.params.FetchOption; -import org.projectnessie.client.StreamingUtil; -import org.projectnessie.client.api.GetCommitLogBuilder; -import org.projectnessie.error.NessieNotFoundException; -import org.projectnessie.model.LogResponse; - -import com.dremio.services.nessie.grpc.api.TreeServiceGrpc.TreeServiceBlockingStub; - -final class GrpcGetCommitLog implements GetCommitLogBuilder { - - private final TreeServiceBlockingStub stub; - private final CommitLogParamsBuilder params = CommitLogParams.builder(); - private String refName; - - public GrpcGetCommitLog(TreeServiceBlockingStub stub) { - this.stub = stub; - } - - @Override - public GetCommitLogBuilder fetch(FetchOption fetchOption) { - params.fetchOption(fetchOption); - return this; - } - - @Override - public GetCommitLogBuilder untilHash(@Nullable String untilHash) { - params.startHash(untilHash); - return this; - } - - @Override - public GetCommitLogBuilder refName(String refName) { - this.refName = refName; - return this; - } - - @Override - public GetCommitLogBuilder hashOnRef(@Nullable String hashOnRef) { - params.endHash(hashOnRef); - return this; - } - - @Override - public GetCommitLogBuilder maxRecords(int maxRecords) { - params.maxRecords(maxRecords); - return this; - } - - @Override - public GetCommitLogBuilder pageToken(String pageToken) { - params.pageToken(pageToken); - return this; - } - - @Override - public GetCommitLogBuilder filter(String filter) { - params.filter(filter); - return this; - } - - @Override - public LogResponse get() throws NessieNotFoundException { - return get(params.build()); - } - - private LogResponse get(CommitLogParams p) throws NessieNotFoundException { - return handleNessieNotFoundEx( - () -> fromProto(stub.getCommitLog(toProto(refName, p)))); - } - - @Override - public Stream stream() throws NessieNotFoundException { - CommitLogParams p = params.build(); - return StreamingUtil.generateStream( - LogResponse::getLogEntries, pageToken -> get(p.forNextPage(pageToken))); - } -} diff --git a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcGetEntries.java b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcGetEntries.java deleted file mode 100644 index 364d27b826..0000000000 --- a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcGetEntries.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.dremio.services.nessie.grpc.client.v1api; - -import static com.dremio.services.nessie.grpc.ProtoUtil.fromProto; -import static com.dremio.services.nessie.grpc.ProtoUtil.toProto; -import static com.dremio.services.nessie.grpc.client.GrpcExceptionMapper.handleNessieNotFoundEx; - -import java.util.stream.Stream; - -import javax.annotation.Nullable; - -import org.projectnessie.api.params.EntriesParams; -import org.projectnessie.api.params.EntriesParamsBuilder; -import org.projectnessie.client.StreamingUtil; -import org.projectnessie.client.api.GetEntriesBuilder; -import org.projectnessie.error.NessieNotFoundException; -import org.projectnessie.model.EntriesResponse; - -import com.dremio.services.nessie.grpc.api.TreeServiceGrpc.TreeServiceBlockingStub; - -final class GrpcGetEntries implements GetEntriesBuilder { - - private final TreeServiceBlockingStub stub; - private final EntriesParamsBuilder params = EntriesParams.builder(); - private String refName; - - public GrpcGetEntries(TreeServiceBlockingStub stub) { - this.stub = stub; - } - - @Override - public GetEntriesBuilder namespaceDepth(Integer namespaceDepth) { - params.namespaceDepth(namespaceDepth); - return this; - } - - @Override - public GetEntriesBuilder refName(String refName) { - this.refName = refName; - return this; - } - - @Override - public GetEntriesBuilder hashOnRef(@Nullable String hashOnRef) { - params.hashOnRef(hashOnRef); - return this; - } - - @Override - public GetEntriesBuilder maxRecords(int maxRecords) { - params.maxRecords(maxRecords); - return this; - } - - @Override - public GetEntriesBuilder pageToken(String pageToken) { - params.pageToken(pageToken); - return this; - } - - @Override - public GetEntriesBuilder filter(String filter) { - params.filter(filter); - return this; - } - - @Override - public EntriesResponse get() throws NessieNotFoundException { - return get(params.build()); - } - - private EntriesResponse get(EntriesParams p) throws NessieNotFoundException { - return handleNessieNotFoundEx( - () -> fromProto(stub.getEntries(toProto(refName, p)))); - } - - @Override - public Stream stream() throws NessieNotFoundException { - EntriesParams p = params.build(); - return StreamingUtil.generateStream( - EntriesResponse::getEntries, pageToken -> get(p.forNextPage(pageToken))); - } -} diff --git a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcGetMultipleNamespaces.java b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcGetMultipleNamespaces.java deleted file mode 100644 index edef9a30f9..0000000000 --- a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcGetMultipleNamespaces.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.dremio.services.nessie.grpc.client.v1api; - -import static com.dremio.services.nessie.grpc.ProtoUtil.fromProto; -import static com.dremio.services.nessie.grpc.ProtoUtil.toProto; - -import javax.annotation.Nullable; - -import org.projectnessie.api.params.MultipleNamespacesParams; -import org.projectnessie.api.params.MultipleNamespacesParamsBuilder; -import org.projectnessie.client.api.GetMultipleNamespacesBuilder; -import org.projectnessie.error.NessieReferenceNotFoundException; -import org.projectnessie.model.GetNamespacesResponse; -import org.projectnessie.model.Namespace; - -import com.dremio.services.nessie.grpc.api.NamespaceServiceGrpc.NamespaceServiceBlockingStub; - -public class GrpcGetMultipleNamespaces implements GetMultipleNamespacesBuilder { - - private final NamespaceServiceBlockingStub stub; - private final MultipleNamespacesParamsBuilder builder = MultipleNamespacesParams.builder(); - - GrpcGetMultipleNamespaces(NamespaceServiceBlockingStub stub) { - this.stub = stub; - } - - /** - * The namespace prefix to search for. - * - * @param namespace The namespace prefix to search for - * @return this - */ - @Override - public GetMultipleNamespacesBuilder namespace(Namespace namespace) { - builder.namespace(namespace); - return this; - } - - @Override - public GetMultipleNamespacesBuilder refName(String refName) { - builder.refName(refName); - return this; - } - - @Override - public GetMultipleNamespacesBuilder hashOnRef(@Nullable String hashOnRef) { - builder.hashOnRef(hashOnRef); - return this; - } - - @Override - public GetNamespacesResponse get() throws NessieReferenceNotFoundException { - return fromProto(stub.getNamespaces(toProto(builder.build()))); - } -} diff --git a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcGetNamespace.java b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcGetNamespace.java deleted file mode 100644 index 38656993c2..0000000000 --- a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcGetNamespace.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.dremio.services.nessie.grpc.client.v1api; - -import static com.dremio.services.nessie.grpc.ProtoUtil.fromProto; -import static com.dremio.services.nessie.grpc.ProtoUtil.toProto; -import static com.dremio.services.nessie.grpc.client.GrpcExceptionMapper.handleNamespaceRetrieval; - -import javax.annotation.Nullable; - -import org.projectnessie.api.params.NamespaceParams; -import org.projectnessie.api.params.NamespaceParamsBuilder; -import org.projectnessie.client.api.GetNamespaceBuilder; -import org.projectnessie.error.NessieNamespaceNotFoundException; -import org.projectnessie.error.NessieReferenceNotFoundException; -import org.projectnessie.model.Namespace; - -import com.dremio.services.nessie.grpc.api.NamespaceServiceGrpc.NamespaceServiceBlockingStub; - -final class GrpcGetNamespace implements GetNamespaceBuilder { - - private final NamespaceServiceBlockingStub stub; - private final NamespaceParamsBuilder builder = NamespaceParams.builder(); - - GrpcGetNamespace(NamespaceServiceBlockingStub stub) { - this.stub = stub; - } - - @Override - public GrpcGetNamespace namespace(Namespace namespace) { - builder.namespace(namespace); - return this; - } - - @Override - public GrpcGetNamespace refName(String refName) { - builder.refName(refName); - return this; - } - - @Override - public GetNamespaceBuilder hashOnRef(@Nullable String hashOnRef) { - builder.hashOnRef(hashOnRef); - return this; - } - - @Override - public Namespace get() throws NessieNamespaceNotFoundException, NessieReferenceNotFoundException { - return handleNamespaceRetrieval(() -> fromProto(stub.getNamespace(toProto(builder.build())))); - } -} diff --git a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcMergeReference.java b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcMergeReference.java deleted file mode 100644 index d043ea74bd..0000000000 --- a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcMergeReference.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.dremio.services.nessie.grpc.client.v1api; - -import static com.dremio.services.nessie.grpc.ProtoUtil.fromProto; -import static com.dremio.services.nessie.grpc.ProtoUtil.toProto; -import static com.dremio.services.nessie.grpc.client.GrpcExceptionMapper.handle; - -import org.projectnessie.client.api.MergeReferenceBuilder; -import org.projectnessie.error.NessieConflictException; -import org.projectnessie.error.NessieNotFoundException; -import org.projectnessie.model.ImmutableMerge; -import org.projectnessie.model.MergeResponse; - -import com.dremio.services.nessie.grpc.api.TreeServiceGrpc.TreeServiceBlockingStub; - -final class GrpcMergeReference implements MergeReferenceBuilder { - - private final TreeServiceBlockingStub stub; - private String branchName; - private String hash; - private final ImmutableMerge.Builder merge = ImmutableMerge.builder(); - - public GrpcMergeReference(TreeServiceBlockingStub stub) { - this.stub = stub; - } - - @Override - public MergeReferenceBuilder fromHash(String fromHash) { - merge.fromHash(fromHash); - return this; - } - - @Override - public MergeReferenceBuilder fromRefName(String fromRefName) { - merge.fromRefName(fromRefName); - return this; - } - - @Override - public MergeReferenceBuilder keepIndividualCommits(boolean keepIndividualCommits) { - merge.keepIndividualCommits(keepIndividualCommits); - return this; - } - - @Override - public MergeReferenceBuilder dryRun(boolean dryRun) { - merge.isDryRun(dryRun); - return this; - } - - @Override - public MergeReferenceBuilder fetchAdditionalInfo(boolean fetchAdditionalInfo) { - merge.isFetchAdditionalInfo(fetchAdditionalInfo); - return this; - } - - @Override - public MergeReferenceBuilder returnConflictAsResult(boolean returnConflictAsResult) { - merge.isReturnConflictAsResult(returnConflictAsResult); - return this; - } - - @Override - public MergeReferenceBuilder branchName(String branchName) { - this.branchName = branchName; - return this; - } - - @Override - public MergeReferenceBuilder hash(String hash) { - this.hash = hash; - return this; - } - - @Override - public MergeResponse merge() throws NessieNotFoundException, NessieConflictException { - return handle( - () -> fromProto( - stub.mergeRefIntoBranch( - toProto(branchName, hash, merge.build())))); - } -} diff --git a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcTransplantCommits.java b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcTransplantCommits.java deleted file mode 100644 index 76ee37bb6c..0000000000 --- a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcTransplantCommits.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.dremio.services.nessie.grpc.client.v1api; - -import static com.dremio.services.nessie.grpc.ProtoUtil.fromProto; -import static com.dremio.services.nessie.grpc.ProtoUtil.toProto; -import static com.dremio.services.nessie.grpc.client.GrpcExceptionMapper.handle; - -import java.util.List; - -import org.projectnessie.client.api.TransplantCommitsBuilder; -import org.projectnessie.error.NessieConflictException; -import org.projectnessie.error.NessieNotFoundException; -import org.projectnessie.model.ImmutableTransplant; -import org.projectnessie.model.MergeResponse; - -import com.dremio.services.nessie.grpc.api.TreeServiceGrpc.TreeServiceBlockingStub; - -final class GrpcTransplantCommits implements TransplantCommitsBuilder { - - private final TreeServiceBlockingStub stub; - private String branchName; - private String hash; - private String message; - private final ImmutableTransplant.Builder transplant = ImmutableTransplant.builder(); - - public GrpcTransplantCommits(TreeServiceBlockingStub stub) { - this.stub = stub; - } - - @Override - public TransplantCommitsBuilder branchName(String branchName) { - this.branchName = branchName; - return this; - } - - @Override - public TransplantCommitsBuilder hash(String hash) { - this.hash = hash; - return this; - } - - @Override - public TransplantCommitsBuilder message(String message) { - this.message = message; - return this; - } - - @Override - public TransplantCommitsBuilder fromRefName(String fromRefName) { - transplant.fromRefName(fromRefName); - return this; - } - - @Override - public TransplantCommitsBuilder keepIndividualCommits(boolean keepIndividualCommits) { - transplant.keepIndividualCommits(keepIndividualCommits); - return this; - } - - @Override - public TransplantCommitsBuilder dryRun(boolean dryRun) { - transplant.isDryRun(dryRun); - return this; - } - - @Override - public TransplantCommitsBuilder fetchAdditionalInfo(boolean fetchAdditionalInfo) { - transplant.isFetchAdditionalInfo(fetchAdditionalInfo); - return this; - } - - @Override - public TransplantCommitsBuilder returnConflictAsResult(boolean returnConflictAsResult) { - transplant.isReturnConflictAsResult(returnConflictAsResult); - return this; - } - - @Override - public TransplantCommitsBuilder hashesToTransplant(List hashesToTransplant) { - transplant.hashesToTransplant(hashesToTransplant); - return this; - } - - @Override - public MergeResponse transplant() throws NessieNotFoundException, NessieConflictException { - return handle( - () -> fromProto( - stub.transplantCommitsIntoBranch( - toProto(branchName, hash, message, transplant.build())))); - } -} diff --git a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcUpdateNamespace.java b/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcUpdateNamespace.java deleted file mode 100644 index 1600f91049..0000000000 --- a/services/nessie-grpc/client/src/main/java/com/dremio/services/nessie/grpc/client/v1api/GrpcUpdateNamespace.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright (C) 2017-2019 Dremio Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.dremio.services.nessie.grpc.client.v1api; - -import static com.dremio.services.nessie.grpc.ProtoUtil.toProto; - -import java.util.Map; -import java.util.Set; - -import javax.annotation.Nullable; - -import org.projectnessie.api.params.NamespaceParams; -import org.projectnessie.api.params.NamespaceParamsBuilder; -import org.projectnessie.client.api.UpdateNamespaceBuilder; -import org.projectnessie.error.NessieNamespaceNotFoundException; -import org.projectnessie.error.NessieReferenceNotFoundException; -import org.projectnessie.model.Namespace; - -import com.dremio.services.nessie.grpc.api.NamespaceServiceGrpc.NamespaceServiceBlockingStub; -import com.dremio.services.nessie.grpc.api.NamespaceUpdateRequest; -import com.dremio.services.nessie.grpc.client.GrpcExceptionMapper; - -public class GrpcUpdateNamespace implements UpdateNamespaceBuilder { - private final NamespaceServiceBlockingStub stub; - private final NamespaceParamsBuilder builder = NamespaceParams.builder(); - private final NamespaceUpdateRequest.Builder request = NamespaceUpdateRequest.newBuilder(); - - GrpcUpdateNamespace(NamespaceServiceBlockingStub namespaceServiceBlockingStub) { - stub = namespaceServiceBlockingStub; - } - - @Override - public UpdateNamespaceBuilder namespace(Namespace namespace) { - builder.namespace(namespace); - return this; - } - - @Override - public UpdateNamespaceBuilder refName(String refName) { - builder.refName(refName); - return this; - } - - @Override - public UpdateNamespaceBuilder hashOnRef(@Nullable String hashOnRef) { - builder.hashOnRef(hashOnRef); - return this; - } - - @Override - public UpdateNamespaceBuilder removeProperties(Set propertyRemovals) { - request.addAllPropertyRemovals(propertyRemovals); - return this; - } - - @Override - public UpdateNamespaceBuilder updateProperty(String key, String value) { - request.putPropertyUpdates(key, value); - return this; - } - - @Override - public UpdateNamespaceBuilder removeProperty(String key) { - request.addPropertyRemovals(key); - return this; - } - - @Override - public UpdateNamespaceBuilder updateProperties(Map propertyUpdates) { - request.putAllPropertyUpdates(propertyUpdates); - return this; - } - - @Override - public void update() throws NessieNamespaceNotFoundException, NessieReferenceNotFoundException { - GrpcExceptionMapper.handleNamespaceRetrieval(() -> - stub.updateProperties(request.setNamespaceRequest(toProto(builder.build())).build()) - ); - } -} diff --git a/services/nessie-grpc/client/src/test/java/com/dremio/services/nessie/grpc/client/TestGrpcExceptionMapper.java b/services/nessie-grpc/client/src/test/java/com/dremio/services/nessie/grpc/client/TestGrpcExceptionMapper.java index 80a7aada7b..52936679c0 100644 --- a/services/nessie-grpc/client/src/test/java/com/dremio/services/nessie/grpc/client/TestGrpcExceptionMapper.java +++ b/services/nessie-grpc/client/src/test/java/com/dremio/services/nessie/grpc/client/TestGrpcExceptionMapper.java @@ -16,9 +16,6 @@ package com.dremio.services.nessie.grpc.client; import static com.dremio.services.nessie.grpc.client.GrpcExceptionMapper.handle; -import static com.dremio.services.nessie.grpc.client.GrpcExceptionMapper.handleNamespaceCreation; -import static com.dremio.services.nessie.grpc.client.GrpcExceptionMapper.handleNamespaceDeletion; -import static com.dremio.services.nessie.grpc.client.GrpcExceptionMapper.handleNamespaceRetrieval; import static com.dremio.services.nessie.grpc.client.GrpcExceptionMapper.handleNessieNotFoundEx; import static com.dremio.services.nessie.grpc.client.GrpcExceptionMapper.toProto; import static org.assertj.core.api.Assertions.assertThat; @@ -31,16 +28,24 @@ import javax.validation.ConstraintViolation; import javax.validation.ConstraintViolationException; +import org.assertj.core.api.InstanceOfAssertFactories; import org.junit.jupiter.api.Test; +import org.projectnessie.error.ContentKeyErrorDetails; import org.projectnessie.error.ErrorCode; import org.projectnessie.error.ImmutableNessieError; +import org.projectnessie.error.ImmutableReferenceConflicts; import org.projectnessie.error.NessieBadRequestException; -import org.projectnessie.error.NessieConflictException; +import org.projectnessie.error.NessieContentNotFoundException; import org.projectnessie.error.NessieNamespaceAlreadyExistsException; import org.projectnessie.error.NessieNamespaceNotEmptyException; import org.projectnessie.error.NessieNamespaceNotFoundException; import org.projectnessie.error.NessieNotFoundException; +import org.projectnessie.error.NessieReferenceAlreadyExistsException; +import org.projectnessie.error.NessieReferenceConflictException; import org.projectnessie.error.NessieReferenceNotFoundException; +import org.projectnessie.error.ReferenceConflicts; +import org.projectnessie.model.Conflict; +import org.projectnessie.model.ContentKey; import io.grpc.Status; import io.grpc.StatusRuntimeException; @@ -50,6 +55,10 @@ */ public class TestGrpcExceptionMapper { + private static final ContentKey CONTENT_KEY = ContentKey.of("folder1", "folder2"); + private static final ContentKeyErrorDetails CONTENT_ERROR_DETAILS = + ContentKeyErrorDetails.contentKeyErrorDetails(CONTENT_KEY); + @Test public void exceptionToProtoConversion() { assertThat(toProto(new IllegalArgumentException("x"))) @@ -57,12 +66,12 @@ public void exceptionToProtoConversion() { .extracting(e -> e.getStatus().getCode()) .isEqualTo(Status.INVALID_ARGUMENT.getCode()); - assertThat(toProto(new NessieNotFoundException("not found"))) + assertThat(toProto(new NessieReferenceNotFoundException("not found"))) .isInstanceOf(StatusRuntimeException.class) .extracting(e -> e.getStatus().getCode()) .isEqualTo(Status.NOT_FOUND.getCode()); - assertThat(toProto(new NessieConflictException("conflict"))) + assertThat(toProto(new NessieReferenceAlreadyExistsException("conflict"))) .isInstanceOf(StatusRuntimeException.class) .extracting(e -> e.getStatus().getCode()) .isEqualTo(Status.ALREADY_EXISTS.getCode()); @@ -72,19 +81,19 @@ public void exceptionToProtoConversion() { .extracting(e -> e.getStatus().getCode()) .isEqualTo(Status.PERMISSION_DENIED.getCode()); - assertThat(toProto(new NessieNamespaceNotFoundException("namespace not found"))) + assertThat(toProto(new NessieNamespaceNotFoundException(CONTENT_ERROR_DETAILS, "namespace not found"))) .isInstanceOf(StatusRuntimeException.class) .hasCauseInstanceOf(NessieNamespaceNotFoundException.class) .extracting(e -> e.getStatus().getCode()) .isEqualTo(Status.NOT_FOUND.getCode()); - assertThat(toProto(new NessieNamespaceNotEmptyException("namespace not empty"))) + assertThat(toProto(new NessieNamespaceNotEmptyException(CONTENT_ERROR_DETAILS, "namespace not empty"))) .isInstanceOf(StatusRuntimeException.class) .hasCauseInstanceOf(NessieNamespaceNotEmptyException.class) .extracting(e -> e.getStatus().getCode()) .isEqualTo(Status.ALREADY_EXISTS.getCode()); - assertThat(toProto(new NessieNamespaceAlreadyExistsException("namespace already exists"))) + assertThat(toProto(new NessieNamespaceAlreadyExistsException(CONTENT_ERROR_DETAILS, "namespace already exists"))) .isInstanceOf(StatusRuntimeException.class) .hasCauseInstanceOf(NessieNamespaceAlreadyExistsException.class) .extracting(e -> e.getStatus().getCode()) @@ -109,35 +118,83 @@ public Set> getConstraintViolations() { .isEqualTo(Status.INTERNAL.getCode()); } + @Test + public void handlingNessieErrorDetails() { + ReferenceConflicts conflicts = ImmutableReferenceConflicts.builder() + .addConflicts(Conflict.conflict(Conflict.ConflictType.KEY_CONFLICT, ContentKey.of("test1"), "msg1")) + .addConflicts(Conflict.conflict(Conflict.ConflictType.NAMESPACE_NOT_EMPTY, ContentKey.of("test2"), "msg2")) + .build(); + + assertThatThrownBy(() -> handle(() -> { + throw toProto(new NessieReferenceConflictException(conflicts, "exc-msg1", new RuntimeException("test"))); + })) + .isInstanceOf(NessieReferenceConflictException.class) + .hasMessage("exc-msg1") + .asInstanceOf(InstanceOfAssertFactories.type(NessieReferenceConflictException.class)) + .extracting(NessieReferenceConflictException::getErrorDetails) + .isEqualTo(conflicts); + } + + @Test + public void handlingLegacyExceptions() { + // Validate the handling of exception data provided by old clients (without gRPC "trailers") + assertThatThrownBy(() -> handleNessieNotFoundEx(() -> { + throw Status.NOT_FOUND + .withDescription(ErrorCode.NAMESPACE_NOT_FOUND.name()) + .augmentDescription("Namespace ABC not found") + .asRuntimeException(); + })) + .isInstanceOf(NessieNamespaceNotFoundException.class) + .hasMessage("Namespace ABC not found"); + + assertThatThrownBy(() -> handleNessieNotFoundEx(() -> { + throw Status.INVALID_ARGUMENT + .withDescription("test-msg123") + .withCause(new RuntimeException("test-cause")) + .asRuntimeException(); + })) + .isInstanceOf(NessieBadRequestException.class) + .hasMessageContaining("test-msg123"); + } + @Test public void handlingExceptions() { - NessieNotFoundException notFound = new NessieNotFoundException("not found"); + NessieNotFoundException notFound = new NessieReferenceNotFoundException("not found"); assertThatThrownBy(() -> handle(() -> { throw toProto(notFound); })) .isInstanceOf(NessieNotFoundException.class) .hasMessage(notFound.getMessage()); - NessieNamespaceNotFoundException namespaceNotFound = new NessieNamespaceNotFoundException("not found"); + NessieNamespaceNotFoundException namespaceNotFound = new NessieNamespaceNotFoundException( + CONTENT_ERROR_DETAILS, "not found"); assertThatThrownBy(() -> handle(() -> { throw toProto(namespaceNotFound); })) .isInstanceOf(NessieNamespaceNotFoundException.class) - .hasMessage(namespaceNotFound.getMessage()); + .hasMessage(namespaceNotFound.getMessage()) + .extracting("ErrorDetails") + .isEqualTo(CONTENT_ERROR_DETAILS); - NessieNamespaceNotEmptyException namespaceNotEmpty = new NessieNamespaceNotEmptyException("not empty"); + NessieNamespaceNotEmptyException namespaceNotEmpty = new NessieNamespaceNotEmptyException( + CONTENT_ERROR_DETAILS, "not empty"); assertThatThrownBy(() -> handle(() -> { throw toProto(namespaceNotEmpty); })) .isInstanceOf(NessieNamespaceNotEmptyException.class) - .hasMessage(namespaceNotEmpty.getMessage()); + .hasMessage(namespaceNotEmpty.getMessage()) + .extracting("ErrorDetails") + .isEqualTo(CONTENT_ERROR_DETAILS); - NessieNamespaceAlreadyExistsException namespaceAlreadyExists = new NessieNamespaceAlreadyExistsException("already exists"); + NessieNamespaceAlreadyExistsException namespaceAlreadyExists = new NessieNamespaceAlreadyExistsException( + CONTENT_ERROR_DETAILS, "already exists"); assertThatThrownBy(() -> handle(() -> { throw toProto(namespaceAlreadyExists); })) .isInstanceOf(NessieNamespaceAlreadyExistsException.class) - .hasMessage(namespaceAlreadyExists.getMessage()); + .hasMessage(namespaceAlreadyExists.getMessage()) + .extracting("ErrorDetails") + .isEqualTo(CONTENT_ERROR_DETAILS); IllegalArgumentException iae = new IllegalArgumentException("illegal"); assertThatThrownBy(() -> handle(() -> { @@ -155,19 +212,24 @@ public void handlingExceptions() { @Test public void handlingNotFoundExceptions() { - NessieNotFoundException notFound = new NessieNotFoundException("not found"); + NessieContentNotFoundException notFound = new NessieContentNotFoundException(CONTENT_KEY, "not found"); assertThatThrownBy(() -> handleNessieNotFoundEx(() -> { throw toProto(notFound); })) - .isInstanceOf(NessieNotFoundException.class) - .hasMessage(notFound.getMessage()); + .isInstanceOf(NessieContentNotFoundException.class) + .hasMessage(notFound.getMessage()) + .extracting("ErrorDetails") + .isEqualTo(CONTENT_ERROR_DETAILS); - NessieNamespaceNotFoundException namespaceNotFound = new NessieNamespaceNotFoundException("not found"); + NessieNamespaceNotFoundException namespaceNotFound = new NessieNamespaceNotFoundException( + CONTENT_ERROR_DETAILS, "not found"); assertThatThrownBy(() -> handleNessieNotFoundEx(() -> { throw toProto(namespaceNotFound); })) .isInstanceOf(NessieNamespaceNotFoundException.class) - .hasMessage(namespaceNotFound.getMessage()); + .hasMessage(namespaceNotFound.getMessage()) + .extracting("ErrorDetails") + .isEqualTo(CONTENT_ERROR_DETAILS); IllegalArgumentException iae = new IllegalArgumentException("illegal"); assertThatThrownBy(() -> handleNessieNotFoundEx(() -> { @@ -182,78 +244,4 @@ public void handlingNotFoundExceptions() { ErrorCode.BAD_REQUEST).status(400).reason("bad request").build())); })).isInstanceOf(StatusRuntimeException.class); } - - @Test - public void handlingNamespaceCreationExceptions() { - NessieNamespaceAlreadyExistsException exists = new NessieNamespaceAlreadyExistsException("namespace already exists"); - - assertThatThrownBy(() -> handleNamespaceCreation(() -> { - throw toProto(exists); - })) - .isInstanceOf(NessieNamespaceAlreadyExistsException.class) - .hasMessage(exists.getMessage()); - - NessieReferenceNotFoundException refNotFound = new NessieReferenceNotFoundException("ref not found"); - assertThatThrownBy(() -> handleNamespaceCreation(() -> { - throw toProto(refNotFound); - })) - .isInstanceOf(NessieReferenceNotFoundException.class) - .hasMessage(refNotFound.getMessage()); - - // any other exception will result in a StatusRuntimeException - assertThatThrownBy(() -> handleNamespaceCreation(() -> { - throw toProto(new NessieNamespaceNotEmptyException("x")); - })).isInstanceOf(StatusRuntimeException.class); - } - - @Test - public void handlingNamespaceDeletionExceptions() { - NessieNamespaceNotFoundException namespaceNotFound = new NessieNamespaceNotFoundException("namespace not found"); - assertThatThrownBy(() -> handleNamespaceDeletion(() -> { - throw toProto(namespaceNotFound); - })) - .isInstanceOf(NessieNamespaceNotFoundException.class) - .hasMessage(namespaceNotFound.getMessage()); - - NessieNamespaceNotEmptyException namespaceNotEmpty = new NessieNamespaceNotEmptyException("namespace not empty"); - assertThatThrownBy(() -> handleNamespaceDeletion(() -> { - throw toProto(namespaceNotEmpty); - })) - .isInstanceOf(NessieNamespaceNotEmptyException.class) - .hasMessage(namespaceNotEmpty.getMessage()); - - NessieReferenceNotFoundException refNotFound = new NessieReferenceNotFoundException("ref not found"); - assertThatThrownBy(() -> handleNamespaceDeletion(() -> { - throw toProto(refNotFound); - })) - .isInstanceOf(NessieReferenceNotFoundException.class) - .hasMessage(refNotFound.getMessage()); - - // any other exception will result in a StatusRuntimeException - assertThatThrownBy(() -> handleNamespaceDeletion(() -> { - throw toProto(new NessieNamespaceAlreadyExistsException("x")); - })).isInstanceOf(StatusRuntimeException.class); - } - - @Test - public void handlingNamespaceRetrievalExceptions() { - NessieNamespaceNotFoundException namespaceNotFound = new NessieNamespaceNotFoundException("namespace not found"); - assertThatThrownBy(() -> handleNamespaceRetrieval(() -> { - throw toProto(namespaceNotFound); - })) - .isInstanceOf(NessieNamespaceNotFoundException.class) - .hasMessage(namespaceNotFound.getMessage()); - - NessieReferenceNotFoundException refNotFound = new NessieReferenceNotFoundException("ref not found"); - assertThatThrownBy(() -> handleNamespaceRetrieval(() -> { - throw toProto(refNotFound); - })) - .isInstanceOf(NessieReferenceNotFoundException.class) - .hasMessage(refNotFound.getMessage()); - - // any other exception will result in a StatusRuntimeException - assertThatThrownBy(() -> handleNamespaceRetrieval(() -> { - throw toProto(new NessieNamespaceAlreadyExistsException("x")); - })).isInstanceOf(StatusRuntimeException.class); - } } diff --git a/services/nessie-grpc/client/src/test/java/com/dremio/services/nessie/grpc/client/TestNessieGrpcClient.java b/services/nessie-grpc/client/src/test/java/com/dremio/services/nessie/grpc/client/TestNessieGrpcClient.java index a12402ae90..de0b518af7 100644 --- a/services/nessie-grpc/client/src/test/java/com/dremio/services/nessie/grpc/client/TestNessieGrpcClient.java +++ b/services/nessie-grpc/client/src/test/java/com/dremio/services/nessie/grpc/client/TestNessieGrpcClient.java @@ -27,14 +27,17 @@ import org.mockito.Mockito; import org.projectnessie.client.api.NessieApi; import org.projectnessie.client.api.NessieApiV1; +import org.projectnessie.client.api.NessieApiV2; import org.projectnessie.model.ContentKey; +import com.dremio.services.nessie.grpc.client.impl.GrpcApiImpl; + import io.grpc.ManagedChannel; import io.grpc.Metadata; import io.grpc.stub.MetadataUtils; /** - * Tests for the {@link com.dremio.services.nessie.grpc.client.v1api.GrpcApiV1Impl} + * Tests for the {@link GrpcApiImpl} */ public class TestNessieGrpcClient extends AbstractTestNessieGrpcClient{ interface IncompatibleApiInterface extends NessieApi { @@ -126,6 +129,21 @@ public void testApiCalls() throws IOException { .containsValue(fromProto(ICEBERG_TABLE)); } + @Test + public void testApiV2Calls() throws IOException { + ServiceWithChannel serviceWithChannel = startGrpcServer(); + assertThat(serviceWithChannel.getServer().getServices()).hasSize(3); + NessieApiV2 api = + GrpcClientBuilder.builder() + .withChannel(serviceWithChannel.getChannel()) + .build(NessieApiV2.class); + assertThat(api.getConfig().getDefaultBranch()).isEqualTo(REF_NAME); + assertThat(api.getDefaultBranch().getName()).isEqualTo(REF_NAME); + assertThat(api.getAllReferences().get().getReferences()).containsExactly(REF); + assertThat(api.getContent().key(ContentKey.of("test")).refName(REF_NAME).get()) + .containsValue(fromProto(ICEBERG_TABLE)); + } + @Test public void testApiCallsWithInterceptor() throws IOException { ServiceWithChannel serviceWithChannel = startGrpcServer(); diff --git a/services/nessie-grpc/common/pom.xml b/services/nessie-grpc/common/pom.xml index 7ec745a272..5d179f75b5 100644 --- a/services/nessie-grpc/common/pom.xml +++ b/services/nessie-grpc/common/pom.xml @@ -22,7 +22,7 @@ com.dremio.services dremio-services-nessie-grpc - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-services-nessie-grpc-common @@ -31,7 +31,7 @@ - org.projectnessie + org.projectnessie.nessie nessie-model @@ -50,11 +50,6 @@ jakarta.annotation jakarta.annotation-api - - org.junit.jupiter - junit-jupiter-api - test - com.fasterxml.jackson.core jackson-databind @@ -66,7 +61,6 @@ org.jboss.jandex jandex-maven-plugin - 1.2.1 make-index diff --git a/services/nessie-grpc/common/src/main/java/com/dremio/services/nessie/grpc/ProtoUtil.java b/services/nessie-grpc/common/src/main/java/com/dremio/services/nessie/grpc/ProtoUtil.java index 7cc4cadbdc..33765f43b0 100644 --- a/services/nessie-grpc/common/src/main/java/com/dremio/services/nessie/grpc/ProtoUtil.java +++ b/services/nessie-grpc/common/src/main/java/com/dremio/services/nessie/grpc/ProtoUtil.java @@ -17,27 +17,34 @@ import java.time.Instant; import java.util.Collections; +import java.util.List; +import java.util.function.Supplier; import java.util.stream.Collectors; -import org.projectnessie.api.params.CommitLogParams; -import org.projectnessie.api.params.CommitLogParamsBuilder; -import org.projectnessie.api.params.DiffParams; -import org.projectnessie.api.params.DiffParamsBuilder; -import org.projectnessie.api.params.EntriesParams; -import org.projectnessie.api.params.EntriesParamsBuilder; -import org.projectnessie.api.params.GetReferenceParams; -import org.projectnessie.api.params.GetReferenceParamsBuilder; -import org.projectnessie.api.params.MultipleNamespacesParams; -import org.projectnessie.api.params.MultipleNamespacesParamsBuilder; -import org.projectnessie.api.params.NamespaceParams; -import org.projectnessie.api.params.NamespaceParamsBuilder; -import org.projectnessie.api.params.RefLogParams; -import org.projectnessie.api.params.RefLogParamsBuilder; -import org.projectnessie.api.params.ReferencesParams; -import org.projectnessie.api.params.ReferencesParamsBuilder; -import org.projectnessie.model.BaseMergeTransplant; +import javax.annotation.Nullable; + +import org.projectnessie.api.v1.params.CommitLogParams; +import org.projectnessie.api.v1.params.CommitLogParamsBuilder; +import org.projectnessie.api.v1.params.EntriesParams; +import org.projectnessie.api.v1.params.EntriesParamsBuilder; +import org.projectnessie.api.v1.params.GetReferenceParams; +import org.projectnessie.api.v1.params.GetReferenceParamsBuilder; +import org.projectnessie.api.v1.params.ImmutableMerge; +import org.projectnessie.api.v1.params.ImmutableTransplant; +import org.projectnessie.api.v1.params.Merge; +import org.projectnessie.api.v1.params.MultipleNamespacesParams; +import org.projectnessie.api.v1.params.MultipleNamespacesParamsBuilder; +import org.projectnessie.api.v1.params.NamespaceParams; +import org.projectnessie.api.v1.params.NamespaceParamsBuilder; +import org.projectnessie.api.v1.params.RefLogParams; +import org.projectnessie.api.v1.params.RefLogParamsBuilder; +import org.projectnessie.api.v1.params.ReferencesParams; +import org.projectnessie.api.v1.params.ReferencesParamsBuilder; +import org.projectnessie.api.v1.params.Transplant; import org.projectnessie.model.Branch; import org.projectnessie.model.CommitMeta; +import org.projectnessie.model.CommitResponse; +import org.projectnessie.model.Conflict; import org.projectnessie.model.ContentKey; import org.projectnessie.model.DeltaLakeTable; import org.projectnessie.model.Detached; @@ -51,8 +58,11 @@ import org.projectnessie.model.GetNamespacesResponse; import org.projectnessie.model.IcebergTable; import org.projectnessie.model.IcebergView; +import org.projectnessie.model.ImmutableAddedContent; import org.projectnessie.model.ImmutableBranch; import org.projectnessie.model.ImmutableCommitMeta; +import org.projectnessie.model.ImmutableCommitResponse; +import org.projectnessie.model.ImmutableConflict; import org.projectnessie.model.ImmutableContentKeyDetails; import org.projectnessie.model.ImmutableDelete; import org.projectnessie.model.ImmutableDeltaLakeTable; @@ -60,25 +70,24 @@ import org.projectnessie.model.ImmutableDiffEntry; import org.projectnessie.model.ImmutableDiffResponse; import org.projectnessie.model.ImmutableEntriesResponse; +import org.projectnessie.model.ImmutableEntry; import org.projectnessie.model.ImmutableGetNamespacesResponse; import org.projectnessie.model.ImmutableIcebergTable; import org.projectnessie.model.ImmutableIcebergView; import org.projectnessie.model.ImmutableLogEntry; import org.projectnessie.model.ImmutableLogResponse; -import org.projectnessie.model.ImmutableMerge; import org.projectnessie.model.ImmutableMergeKeyBehavior; import org.projectnessie.model.ImmutableMergeResponse; +import org.projectnessie.model.ImmutableNamespace; import org.projectnessie.model.ImmutableNessieConfiguration; import org.projectnessie.model.ImmutableOperations; import org.projectnessie.model.ImmutablePut; import org.projectnessie.model.ImmutableRefLogResponse; import org.projectnessie.model.ImmutableReferenceMetadata; import org.projectnessie.model.ImmutableTag; -import org.projectnessie.model.ImmutableTransplant; import org.projectnessie.model.ImmutableUnchanged; import org.projectnessie.model.LogResponse; import org.projectnessie.model.LogResponse.LogEntry; -import org.projectnessie.model.Merge; import org.projectnessie.model.MergeResponse; import org.projectnessie.model.Namespace; import org.projectnessie.model.Operation; @@ -92,7 +101,6 @@ import org.projectnessie.model.Reference.ReferenceType; import org.projectnessie.model.ReferenceMetadata; import org.projectnessie.model.Tag; -import org.projectnessie.model.Transplant; import org.projectnessie.model.types.ContentTypes; import com.dremio.services.nessie.grpc.api.CommitLogEntry; @@ -102,6 +110,8 @@ import com.dremio.services.nessie.grpc.api.CommitOps; import com.dremio.services.nessie.grpc.api.Content; import com.dremio.services.nessie.grpc.api.ContentKeyConflict; +import com.dremio.services.nessie.grpc.api.ContentKeyConflictDetails; +import com.dremio.services.nessie.grpc.api.ContentKeyDetails; import com.dremio.services.nessie.grpc.api.ContentRequest; import com.dremio.services.nessie.grpc.api.ContentType; import com.dremio.services.nessie.grpc.api.DeltaLakeTable.Builder; @@ -118,8 +128,11 @@ import com.dremio.services.nessie.grpc.api.MultipleNamespacesResponse; import com.dremio.services.nessie.grpc.api.NamespaceRequest; import com.dremio.services.nessie.grpc.api.NessieConfiguration; +import com.dremio.services.nessie.grpc.api.ReferenceResponse; import com.dremio.services.nessie.grpc.api.TransplantRequest; import com.google.common.base.Preconditions; +import com.google.common.base.Strings; +import com.google.common.collect.ImmutableList; import com.google.protobuf.Timestamp; /** @@ -160,6 +173,36 @@ public static com.dremio.services.nessie.grpc.api.Reference refToProto(Reference throw new IllegalArgumentException(String.format("'%s' should be a Branch/Tag/Detached", ref)); } + public static com.dremio.services.nessie.grpc.api.Reference refToProto( + com.dremio.services.nessie.grpc.api.ReferenceType type, String name, String hash) { + Preconditions.checkArgument(null != name, "Reference name must be non-null"); + switch (type) { + case BRANCH: + return com.dremio.services.nessie.grpc.api.Reference.newBuilder() + .setBranch(toProto(Branch.of(name, hash))) + .build(); + + case TAG: + return com.dremio.services.nessie.grpc.api.Reference.newBuilder() + .setTag(toProto(Tag.of(name, hash))) + .build(); + + default: + throw new IllegalArgumentException(String.format("Reference type '%s' should be Branch or Tag", type)); + } + } + + @Nullable + public static Reference refFromProtoResponse(ReferenceResponse response) { + Preconditions.checkArgument(null != response, "Reference response must be non-null"); + + if (!response.hasReference()) { + return null; + } + + return refFromProto(response.getReference()); + } + public static Branch fromProto(com.dremio.services.nessie.grpc.api.Branch branch) { Preconditions.checkArgument(null != branch, "Branch must be non-null"); ImmutableBranch.Builder builder = ImmutableBranch.builder().name(branch.getName()); @@ -187,6 +230,59 @@ public static com.dremio.services.nessie.grpc.api.Branch toProto(Branch branch) return builder.build(); } + public static CommitResponse fromProto(com.dremio.services.nessie.grpc.api.CommitResponse commitResponse) { + Preconditions.checkArgument(null != commitResponse, "CommitResponse must be non-null"); + + if (!commitResponse.hasBranch()) { // legacy response + ImmutableBranch.Builder builder = ImmutableBranch.builder().name(commitResponse.getName()); + + if (commitResponse.hasHash()) { + builder.hash(commitResponse.getHash()); + } + if (commitResponse.hasMetadata()) { + builder.metadata(fromProto(commitResponse.getMetadata())); + } + + return CommitResponse.builder().targetBranch(builder.build()).build(); + } + + ImmutableCommitResponse.Builder builder = CommitResponse.builder(); + builder.targetBranch(fromProto(commitResponse.getBranch())); + + commitResponse.getAddedContentList().forEach(ac -> + builder.addAddedContents(ImmutableAddedContent.builder() + .key(fromProto(ac.getKey())) + .contentId(ac.getContentId()) + .build())); + + return builder.build(); + } + + public static com.dremio.services.nessie.grpc.api.CommitResponse toProto(CommitResponse commitResponse) { + Preconditions.checkArgument(null != commitResponse, "CommitResponse must be non-null"); + com.dremio.services.nessie.grpc.api.CommitResponse.Builder builder = + com.dremio.services.nessie.grpc.api.CommitResponse.newBuilder(); + + com.dremio.services.nessie.grpc.api.Branch branch = toProto(commitResponse.getTargetBranch()); + builder.setBranch(branch); + + if (commitResponse.getAddedContents() != null) { + commitResponse.getAddedContents().forEach(ac -> + builder.addAddedContentBuilder().setKey(toProto(ac.getKey())).setContentId(ac.contentId())); + } + + // Allow older clients to read the response as `Branch` - remove these fields with DX-61406 + builder.setName(branch.getName()); + if (branch.hasHash()) { + builder.setHash(branch.getHash()); + } + if (branch.hasMetadata()) { + builder.setMetadata(branch.getMetadata()); + } + + return builder.build(); + } + public static Tag fromProto(com.dremio.services.nessie.grpc.api.Tag tag) { Preconditions.checkArgument(null != tag, "Tag must be non-null"); ImmutableTag.Builder builder = ImmutableTag.builder().name(tag.getName()); @@ -430,10 +526,25 @@ public static com.dremio.services.nessie.grpc.api.IcebergView toProto(IcebergVie public static NessieConfiguration toProto(org.projectnessie.model.NessieConfiguration config) { Preconditions.checkArgument(null != config, "NessieConfiguration must be non-null"); NessieConfiguration.Builder builder = NessieConfiguration.newBuilder() - .setMaxSupportedApiVersion(config.getMaxSupportedApiVersion()); + .setMaxSupportedApiVersion(config.getMaxSupportedApiVersion()) + .setMinSupportedApiVersion(config.getMinSupportedApiVersion()) + .setActualApiVersion(config.getActualApiVersion()); if (null != config.getDefaultBranch()) { builder.setDefaultBranch(config.getDefaultBranch()); } + if (null != config.getSpecVersion()) { + builder.setSpecVersion(config.getSpecVersion()); + } + if (null != config.getNoAncestorHash()) { + builder.setNoAncestorHash(config.getNoAncestorHash()); + } + if (null != config.getRepositoryCreationTimestamp()) { + builder.setRepositoryCreationTimestamp(toProto(config.getRepositoryCreationTimestamp())); + } + if (null != config.getOldestPossibleCommitTimestamp()) { + builder.setOldestPossibleCommitTimestamp(toProto(config.getOldestPossibleCommitTimestamp())); + } + builder.putAllAdditionalProperties(config.getAdditionalProperties()); return builder.build(); } @@ -441,9 +552,28 @@ public static org.projectnessie.model.NessieConfiguration fromProto(NessieConfig Preconditions.checkArgument(null != config, "NessieConfiguration must be non-null"); ImmutableNessieConfiguration.Builder builder = ImmutableNessieConfiguration.builder() .maxSupportedApiVersion(config.getMaxSupportedApiVersion()); + if (config.hasMinSupportedApiVersion()) { + builder.minSupportedApiVersion(config.getMinSupportedApiVersion()); + } + if (config.hasSpecVersion()) { + builder.specVersion(config.getSpecVersion()); + } if (config.hasDefaultBranch()) { builder.defaultBranch(config.getDefaultBranch()); } + if (config.hasNoAncestorHash()) { + builder.noAncestorHash(config.getNoAncestorHash()); + } + if (config.hasRepositoryCreationTimestamp()) { + builder.repositoryCreationTimestamp(fromProto(config.getRepositoryCreationTimestamp())); + } + if (config.hasOldestPossibleCommitTimestamp()) { + builder.oldestPossibleCommitTimestamp(fromProto(config.getOldestPossibleCommitTimestamp())); + } + if (config.hasActualApiVersion()) { + builder.actualApiVersion(config.getActualApiVersion()); + } + builder.additionalProperties(config.getAdditionalPropertiesMap()); return builder.build(); } @@ -459,6 +589,13 @@ public static ContentKey fromProto(com.dremio.services.nessie.grpc.api.ContentKe return ContentKey.of(key.getElementsList()); } + public static List fromProto(List keys) { + Preconditions.checkArgument(null != keys, "ContentKey list must be non-null"); + ImmutableList.Builder list = ImmutableList.builder(); + keys.forEach(k -> list.add(fromProto(k))); + return list.build(); + } + public static ContentWithKey fromProto(com.dremio.services.nessie.grpc.api.ContentWithKey c) { Preconditions.checkArgument(null != c, "ContentWithKey must be non-null"); return ContentWithKey.of(fromProto(c.getContentKey()), fromProto(c.getContent())); @@ -474,18 +611,30 @@ public static com.dremio.services.nessie.grpc.api.ContentWithKey toProto(Content public static com.dremio.services.nessie.grpc.api.Entry toProto(Entry entry) { Preconditions.checkArgument(null != entry, "Entry must be non-null"); - return com.dremio.services.nessie.grpc.api.Entry.newBuilder() + com.dremio.services.nessie.grpc.api.Entry.Builder builder = com.dremio.services.nessie.grpc.api.Entry.newBuilder() .setContentKey(toProto(entry.getName())) - .setType(ContentType.valueOf(entry.getType().name())) - .build(); + .setType(ContentType.valueOf(entry.getType().name())); + if (null != entry.getContentId()) { + builder.setContentId(entry.getContentId()); + } + if (null != entry.getContent()) { + builder.setContent(toProto(entry.getContent())); + } + return builder.build(); } public static Entry fromProto(com.dremio.services.nessie.grpc.api.Entry entry) { Preconditions.checkArgument(null != entry, "Entry must be non-null"); - return Entry.builder() + ImmutableEntry.Builder builder = Entry.builder() .type(ContentTypes.forName(entry.getType().name())) - .name(fromProto(entry.getContentKey())) - .build(); + .name(fromProto(entry.getContentKey())); + if (entry.hasContentId()) { + builder.contentId(entry.getContentId()); + } + if (entry.hasContent()) { + builder.content(fromProto(entry.getContent())); + } + return builder.build(); } public static com.dremio.services.nessie.grpc.api.CommitMeta toProto(CommitMeta commitMeta) { @@ -513,6 +662,7 @@ public static com.dremio.services.nessie.grpc.api.CommitMeta toProto(CommitMeta builder.setCommitTime(toProto(commitMeta.getCommitTime())); } return builder + .addAllParentHashes(commitMeta.getParentCommitHashes()) .setMessage(commitMeta.getMessage()) .putAllProperties(commitMeta.getProperties()) .build(); @@ -542,6 +692,7 @@ public static CommitMeta fromProto(com.dremio.services.nessie.grpc.api.CommitMet builder.commitTime(fromProto(commitMeta.getCommitTime())); } return builder + .addAllParentCommitHashes(commitMeta.getParentHashesList()) .message(commitMeta.getMessage()) .properties(commitMeta.getPropertiesMap()) .build(); @@ -652,24 +803,42 @@ public static EntriesParams fromProto(EntriesRequest request) { return builder.build(); } - public static EntriesRequest toProto(String refName, EntriesParams params) { - Preconditions.checkArgument(null != refName, "refName must be non-null"); - Preconditions.checkArgument(null != params, "EntriesParams must be non-null"); + public static EntriesRequest toProtoEntriesRequest(@Nullable String refName, + @Nullable String hashOnRef, + @Nullable Integer maxRecords, + @Nullable String filter, + @Nullable Integer namespaceDepth, + boolean withContent, + @Nullable ContentKey minKey, + @Nullable ContentKey maxKey, + @Nullable ContentKey prefixKey, + List keys) { + refName = refNameOrDetached(refName); EntriesRequest.Builder builder = EntriesRequest.newBuilder().setNamedRef(refName); - if (null != params.hashOnRef()) { - builder.setHashOnRef(params.hashOnRef()); + builder.setWithContent(withContent); + if (null != hashOnRef) { + builder.setHashOnRef(hashOnRef); } - if (null != params.maxRecords()) { - builder.setMaxRecords(params.maxRecords()); + if (null != maxRecords) { + builder.setMaxRecords(maxRecords); } - if (null != params.pageToken()) { - builder.setPageToken(params.pageToken()); + if (null != filter) { + builder.setFilter(filter); } - if (null != params.filter()) { - builder.setFilter(params.filter()); + if (null != namespaceDepth) { + builder.setNamespaceDepth(namespaceDepth); + } + if (null != minKey) { + builder.setMinKey(toProto(minKey)); } - if (null != params.namespaceDepth()) { - builder.setNamespaceDepth(params.namespaceDepth()); + if (null != maxKey) { + builder.setMaxKey(toProto(maxKey)); + } + if (null != prefixKey) { + builder.setPrefixKey(toProto(prefixKey)); + } + if (null != keys) { + keys.forEach(k -> builder.addKeys(toProto(k))); } return builder.build(); } @@ -693,13 +862,17 @@ public static CommitLogParams fromProto(CommitLogRequest request) { builder.maxRecords(request.getMaxRecords()); } if (request.hasFetchOption()) { - builder.fetchOption(org.projectnessie.api.params.FetchOption.valueOf(request.getFetchOption().name())); + builder.fetchOption(org.projectnessie.model.FetchOption.valueOf(request.getFetchOption().name())); } return builder.build(); } - public static CommitLogRequest toProto(String refName, CommitLogParams params) { - Preconditions.checkArgument(null != refName, "refName must be non-null"); + private static String refNameOrDetached(@Nullable String name) { + return name == null ? Detached.REF_NAME : name; + } + + public static CommitLogRequest toProto(@Nullable String refName, CommitLogParams params) { + refName = refNameOrDetached(refName); Preconditions.checkArgument(null != params, "CommitLogParams must be non-null"); CommitLogRequest.Builder builder = CommitLogRequest.newBuilder().setNamedRef(refName); if (null != params.startHash()) { @@ -790,6 +963,9 @@ public static EntriesResponse fromProto(com.dremio.services.nessie.grpc.api.Entr if (entries.hasToken()) { builder.token(entries.getToken()); } + if (entries.hasEffectiveReference()) { + builder.effectiveReference(refFromProto(entries.getEffectiveReference())); + } return builder.build(); } @@ -802,11 +978,14 @@ public static com.dremio.services.nessie.grpc.api.EntriesResponse toProto( if (null != entries.getToken()) { builder.setToken(entries.getToken()); } + if (null != entries.getEffectiveReference()) { + builder.setEffectiveReference(refToProto(entries.getEffectiveReference())); + } return builder.build(); } - public static ContentRequest toProto(ContentKey key, String ref, String hashOnRef) { - Preconditions.checkArgument(null != ref, "ref must be non-null"); + public static ContentRequest toProto(ContentKey key, @Nullable String ref, String hashOnRef) { + ref = refNameOrDetached(ref); ContentRequest.Builder builder = ContentRequest.newBuilder().setContentKey(toProto(key)).setRef(ref); builder = null != hashOnRef ? builder.setHashOnRef(hashOnRef) : builder; @@ -814,10 +993,12 @@ public static ContentRequest toProto(ContentKey key, String ref, String hashOnRe } public static MultipleContentsRequest toProto( - String ref, String hashOnRef, GetMultipleContentsRequest request) { - Preconditions.checkArgument(null != ref, "ref must be non-null"); + @Nullable String ref, String hashOnRef, GetMultipleContentsRequest request) { final MultipleContentsRequest.Builder builder = - MultipleContentsRequest.newBuilder().setRef(ref); + MultipleContentsRequest.newBuilder(); + if (null != ref) { + builder.setRef(ref); + } if (null != hashOnRef) { builder.setHashOnRef(hashOnRef); } @@ -832,15 +1013,23 @@ public static MultipleContentsResponse toProto(GetMultipleContentsResponse respo Preconditions.checkArgument(null != response, "GetMultipleContentsResponse must be non-null"); MultipleContentsResponse.Builder builder = MultipleContentsResponse.newBuilder(); response.getContents().forEach(c -> builder.addContentWithKey(toProto(c))); + if (response.getEffectiveReference() != null) { + builder.setEffectiveReference(refToProto(response.getEffectiveReference())); + } return builder.build(); } public static GetMultipleContentsResponse fromProto(MultipleContentsResponse response) { Preconditions.checkArgument(null != response, "MultipleContentsResponse must be non-null"); + Reference effectiveRef = null; + if (response.hasEffectiveReference()) { + effectiveRef = refFromProto(response.getEffectiveReference()); + } return GetMultipleContentsResponse.of( response.getContentWithKeyList().stream() .map(ProtoUtil::fromProto) - .collect(Collectors.toList())); + .collect(Collectors.toList()), + effectiveRef); } public static ReferencesParams fromProto(GetAllReferencesRequest request) { @@ -856,7 +1045,7 @@ public static ReferencesParams fromProto(GetAllReferencesRequest request) { builder.filter(request.getFilter()); } if (request.hasFetchOption()) { - builder.fetchOption(org.projectnessie.api.params.FetchOption.valueOf(request.getFetchOption().name())); + builder.fetchOption(org.projectnessie.model.FetchOption.valueOf(request.getFetchOption().name())); } return builder.build(); } @@ -884,7 +1073,7 @@ public static GetReferenceParams fromProto(GetReferenceByNameRequest request) { GetReferenceParamsBuilder builder = GetReferenceParams.builder() .refName(request.getNamedRef()); if (request.hasFetchOption()) { - builder.fetchOption(org.projectnessie.api.params.FetchOption.valueOf(request.getFetchOption().name())); + builder.fetchOption(org.projectnessie.model.FetchOption.valueOf(request.getFetchOption().name())); } return builder.build(); } @@ -899,50 +1088,79 @@ public static GetReferenceByNameRequest toProto(GetReferenceParams params) { return builder.build(); } - public static DiffParams fromProto(DiffRequest request) { - Preconditions.checkArgument(null != request, "DiffRequest must be non-null"); - DiffParamsBuilder builder = DiffParams.builder() - .fromRef(request.getFromRefName()) - .toRef(request.getToRefName()); - if (request.hasFromHashOnRef()) { - builder.fromHashOnRef(request.getFromHashOnRef()); + public static DiffRequest toProtoDiffRequest(@Nullable String fromRef, String fromHashOnRef, String toRef, + String toHashOnRef, Integer maxRecords, ContentKey minKey, + ContentKey maxKey, ContentKey prefixKey, List keys, + String filter) { + fromRef = refNameOrDetached(fromRef); + toRef = refNameOrDetached(toRef); + DiffRequest.Builder builder = DiffRequest.newBuilder() + .setFromRefName(fromRef) + .setToRefName(toRef); + if (null != fromHashOnRef) { + builder.setFromHashOnRef(fromHashOnRef); } - if (request.hasToHashOnRef()) { - builder.toHashOnRef(request.getToHashOnRef()); + if (null != toHashOnRef) { + builder.setToHashOnRef(toHashOnRef); } - return builder.build(); - } - - public static DiffRequest toProto(DiffParams params) { - Preconditions.checkArgument(null != params, "DiffParams must be non-null"); - DiffRequest.Builder builder = DiffRequest.newBuilder() - .setFromRefName(params.getFromRef()) - .setToRefName(params.getToRef()); - if (null != params.getFromHashOnRef()) { - builder.setFromHashOnRef(params.getFromHashOnRef()); + if (null != maxRecords) { + builder.setMaxRecords(maxRecords); + } + if (null != minKey) { + builder.setMinKey(toProto(minKey)); + } + if (null != maxKey) { + builder.setMaxKey(toProto(maxKey)); + } + if (null != prefixKey) { + builder.setPrefixKey(toProto(prefixKey)); } - if (null != params.getToHashOnRef()) { - builder.setToHashOnRef(params.getToHashOnRef()); + if (null != filter) { + builder.setFilter(filter); + } + if (null != keys) { + keys.forEach(k -> builder.addKeys(toProto(k))); } return builder.build(); } public static DiffResponse fromProto(com.dremio.services.nessie.grpc.api.DiffResponse response) { Preconditions.checkArgument(null != response, "DiffResponse must be non-null"); - return ImmutableDiffResponse.builder() + ImmutableDiffResponse.Builder builder = ImmutableDiffResponse.builder() .addAllDiffs(response.getDiffsList().stream() .map(ProtoUtil::fromProto) .collect(Collectors.toList())) - .build(); + .isHasMore(response.getHasMore()); + if (response.hasEffectiveFromRef()) { + builder.effectiveFromReference(refFromProto(response.getEffectiveFromRef())); + } + if (response.hasEffectiveToRef()) { + builder.effectiveToReference(refFromProto(response.getEffectiveToRef())); + } + if (response.hasPageToken()) { + builder.token(response.getPageToken()); + } + return builder.build(); } public static com.dremio.services.nessie.grpc.api.DiffResponse toProto(DiffResponse response) { Preconditions.checkArgument(null != response, "DiffResponse must be non-null"); - return com.dremio.services.nessie.grpc.api.DiffResponse.newBuilder() - .addAllDiffs(response.getDiffs().stream() - .map(ProtoUtil::toProto) - .collect(Collectors.toList())) - .build(); + com.dremio.services.nessie.grpc.api.DiffResponse.Builder builder = + com.dremio.services.nessie.grpc.api.DiffResponse.newBuilder() + .addAllDiffs(response.getDiffs().stream() + .map(ProtoUtil::toProto) + .collect(Collectors.toList())) + .setHasMore(response.isHasMore()); + if (null != response.getEffectiveFromReference()) { + builder.setEffectiveFromRef(refToProto(response.getEffectiveFromReference())); + } + if (null != response.getEffectiveToReference()) { + builder.setEffectiveToRef(refToProto(response.getEffectiveToReference())); + } + if (null != response.getToken()) { + builder.setPageToken(response.getToken()); + } + return builder.build(); } public static DiffEntry fromProto(com.dremio.services.nessie.grpc.api.DiffEntry diffEntry) { @@ -1063,15 +1281,26 @@ public static RefLogResponseEntry fromProto(com.dremio.services.nessie.grpc.api. public static com.dremio.services.nessie.grpc.api.Namespace toProto(Namespace namespace) { Preconditions.checkArgument(null != namespace, "Namespace must be non-null"); - return com.dremio.services.nessie.grpc.api.Namespace.newBuilder() - .addAllElements(namespace.getElements()) - .putAllProperties(namespace.getProperties()) - .build(); + com.dremio.services.nessie.grpc.api.Namespace.Builder builder = + com.dremio.services.nessie.grpc.api.Namespace.newBuilder() + .addAllElements(namespace.getElements()) + .putAllProperties(namespace.getProperties()); + + // the ID is optional when a new table is created - will be assigned on the server side + if (null != namespace.getId()) { + builder.setId(namespace.getId()); + } + + return builder.build(); } public static Namespace fromProto(com.dremio.services.nessie.grpc.api.Namespace namespace) { Preconditions.checkArgument(null != namespace, "Namespace must be non-null"); - return Namespace.of(namespace.getElementsList(), namespace.getPropertiesMap()); + return ImmutableNamespace.builder() + .id(asId(namespace.getId())) + .elements(namespace.getElementsList()) + .properties(namespace.getPropertiesMap()) + .build(); } public static NamespaceRequest toProto(NamespaceParams params) { @@ -1140,11 +1369,11 @@ public static GetNamespacesResponse fromProto(MultipleNamespacesResponse respons .build(); } - private static BaseMergeTransplant.MergeBehavior fromProto(MergeBehavior mergeBehavior) { - return BaseMergeTransplant.MergeBehavior.valueOf(mergeBehavior.name()); + private static org.projectnessie.model.MergeBehavior fromProto(MergeBehavior mergeBehavior) { + return org.projectnessie.model.MergeBehavior.valueOf(mergeBehavior.name()); } - private static MergeBehavior toProto(BaseMergeTransplant.MergeBehavior mergeBehavior) { + private static MergeBehavior toProto(org.projectnessie.model.MergeBehavior mergeBehavior) { return MergeBehavior.valueOf(mergeBehavior.name()); } @@ -1156,6 +1385,45 @@ private static ContentKeyConflict toProto(MergeResponse.ContentKeyConflict confl return ContentKeyConflict.valueOf(conflict.name()); } + private static Conflict fromProto(ContentKeyConflictDetails conflict) { + return ImmutableConflict.builder() + .conflictType(Conflict.ConflictType.parse(conflict.getConflictType())) + .key(fromProto(conflict.getKey())) + .message(conflict.getMessage()) + .build(); + } + + private static ContentKeyConflictDetails toProto(Conflict conflict) { + ContentKeyConflictDetails.Builder builder = ContentKeyConflictDetails.newBuilder(); + builder.setMessage(conflict.message()); + if (null != conflict.conflictType()) { + builder.setConflictType(conflict.conflictType().name()); + } + if (null != conflict.key()) { + builder.setKey(toProto(conflict.key())); + } + return builder.build(); + } + + public static CommitMeta fromProto(Supplier message, Supplier hasCommitMeta, + Supplier commitMeta) { + String msg = Strings.emptyToNull(message.get()); + + ImmutableCommitMeta.Builder meta = CommitMeta.builder(); + if (hasCommitMeta.get()) { + com.dremio.services.nessie.grpc.api.CommitMeta requestMeta = commitMeta.get(); + String metaMsg = Strings.emptyToNull(requestMeta.getMessage()); + meta.from(fromProto(requestMeta)); + if (metaMsg == null && msg != null) { + meta.message(msg); + } + } else { + meta.message(msg == null ? "" : msg); + } + + return meta.build(); + } + public static Merge fromProto(MergeRequest request) { Preconditions.checkArgument(null != request, "MergeRequest must be non-null"); ImmutableMerge.Builder builder = ImmutableMerge.builder() @@ -1184,7 +1452,8 @@ public static Merge fromProto(MergeRequest request) { return builder.build(); } - public static MergeRequest toProto(String branchName, String hash, Merge merge) { + public static MergeRequest toProto(String branchName, String hash, Merge merge, String message, + CommitMeta commitMeta) { Preconditions.checkArgument(null != merge, "Merge must be non-null"); MergeRequest.Builder builder = MergeRequest.newBuilder().setToBranch(branchName).setExpectedHash(hash); @@ -1215,6 +1484,12 @@ public static MergeRequest toProto(String branchName, String hash, Merge merge) .setKey(toProto(m.getKey())) .setMergeBehavior(toProto(m.getMergeBehavior()))); } + if (null != message) { + builder.setMessage(message); + } + if (null != commitMeta) { + builder.setCommitMeta(toProto(commitMeta)); + } return builder.build(); } @@ -1246,6 +1521,10 @@ public static Transplant fromProto(TransplantRequest request) { return builder.build(); } + public static String fromProtoMessage(TransplantRequest request) { + return Strings.emptyToNull(request.getMessage()); + } + public static TransplantRequest toProto(String branchName, String hash, String message, Transplant transplant) { Preconditions.checkArgument(null != transplant, "Transplant must be non-null"); TransplantRequest.Builder builder = TransplantRequest.newBuilder() @@ -1303,13 +1582,20 @@ public static MergeResponse fromProto(com.dremio.services.nessie.grpc.api.MergeR builder.addAllTargetCommits(Collections.emptyList()); // make sure it is not null mergeResponse.getTargetCommits().getEntriesList().forEach(e -> builder.addTargetCommits(fromProto(e))); } - mergeResponse.getDetailsList().forEach(d -> builder.addDetails(ImmutableContentKeyDetails.builder() - .key(fromProto(d.getContentKey())) - .conflictType(fromProto(d.getConflictType())) - .mergeBehavior(fromProto(d.getMergeBehavior())) - .sourceCommits(d.getSourceCommitHashesList()) - .targetCommits(d.getTargetCommitHashesList()) - .build())); + mergeResponse.getDetailsList().forEach(d -> { + ImmutableContentKeyDetails.Builder details = ImmutableContentKeyDetails.builder() + .key(fromProto(d.getContentKey())) + .conflictType(fromProto(d.getConflictType())) + .mergeBehavior(fromProto(d.getMergeBehavior())) + .sourceCommits(d.getSourceCommitHashesList()) + .targetCommits(d.getTargetCommitHashesList()); + + if (d.hasConflict()) { + details.conflict(fromProto(d.getConflict())); + } + + builder.addDetails(details.build()); + }); return builder.build(); } @@ -1337,13 +1623,26 @@ public static com.dremio.services.nessie.grpc.api.MergeResponse toProto(MergeRes builder.getTargetCommitsBuilder(); // this call ensures the field is set even if the list is empty mergeResponse.getTargetCommits().forEach(e -> fieldBuilder.addEntries(toProto(e))); } - mergeResponse.getDetails().forEach(d -> builder - .addDetailsBuilder() - .setContentKey(toProto(d.getKey())) - .setConflictType(toProto(d.getConflictType())) - .addAllSourceCommitHashes(d.getSourceCommits()) - .addAllTargetCommitHashes(d.getTargetCommits()) - .setMergeBehavior(toProto(d.getMergeBehavior()))); + mergeResponse.getDetails().forEach(d -> { + ContentKeyDetails.Builder details = builder + .addDetailsBuilder(); + details.setContentKey(toProto(d.getKey())); + details.setConflictType(toProto(d.getConflictType())); + details.addAllSourceCommitHashes(d.getSourceCommits()); + details.addAllTargetCommitHashes(d.getTargetCommits()); + details.setMergeBehavior(toProto(d.getMergeBehavior())); + if (null != d.getConflict()) { + details.setConflict(toProto(d.getConflict())); + } + }); return builder.build(); } + + public static T fromProto(Supplier isPresent, Supplier value) { + if (!isPresent.get()) { + return null; + } + + return value.get(); + } } diff --git a/services/nessie-grpc/common/src/main/proto/config_service.proto b/services/nessie-grpc/common/src/main/proto/config_service.proto index 5243e2d142..e7f8e3db46 100644 --- a/services/nessie-grpc/common/src/main/proto/config_service.proto +++ b/services/nessie-grpc/common/src/main/proto/config_service.proto @@ -22,6 +22,8 @@ option java_outer_classname = "ConfigApiProto"; package com.dremio.services.nessie.grpc.proto; +import "google/protobuf/timestamp.proto"; + service ConfigService { rpc getConfig (Empty) returns (NessieConfiguration) {} } @@ -29,4 +31,11 @@ service ConfigService { message NessieConfiguration { optional string default_branch = 1; int32 maxSupportedApiVersion = 2; + optional int32 minSupportedApiVersion = 3; + optional string specVersion = 4; + optional string noAncestorHash = 5; + optional google.protobuf.Timestamp repositoryCreationTimestamp = 6; + optional google.protobuf.Timestamp oldestPossibleCommitTimestamp = 7; + map additionalProperties = 8; + optional int32 actualApiVersion = 9; } diff --git a/services/nessie-grpc/common/src/main/proto/content_service.proto b/services/nessie-grpc/common/src/main/proto/content_service.proto index f9b4d59298..2f8a403e54 100644 --- a/services/nessie-grpc/common/src/main/proto/content_service.proto +++ b/services/nessie-grpc/common/src/main/proto/content_service.proto @@ -21,6 +21,8 @@ option java_outer_classname = "ContentApiProto"; package com.dremio.services.nessie.grpc.proto; +import "util.proto"; + service ContentService { rpc getContent (ContentRequest) returns (Content) {}; rpc getMultipleContents (MultipleContentsRequest) returns (MultipleContentsResponse) {}; @@ -40,6 +42,7 @@ message MultipleContentsRequest { message MultipleContentsResponse { repeated ContentWithKey contentWithKey = 1; + optional Reference effectiveReference = 2; } message Content { @@ -72,7 +75,7 @@ message IcebergTable { message IcebergView { string id = 1; string metadataLocation = 2; - int32 versionId = 3; + int64 versionId = 3; int32 schemaId = 4; string dialect = 5; string sqlText = 6; @@ -88,4 +91,5 @@ message DeltaLakeTable { message Namespace { repeated string elements = 1; map properties = 2; + string id = 3; } diff --git a/services/nessie-grpc/common/src/main/proto/diff_service.proto b/services/nessie-grpc/common/src/main/proto/diff_service.proto index a7b8411c38..ae3146d7f6 100644 --- a/services/nessie-grpc/common/src/main/proto/diff_service.proto +++ b/services/nessie-grpc/common/src/main/proto/diff_service.proto @@ -15,6 +15,7 @@ */ syntax = "proto3"; +import "util.proto"; import "content_service.proto"; option java_multiple_files = true; @@ -32,10 +33,21 @@ message DiffRequest { string toRefName = 2; optional string fromHashOnRef = 3; optional string toHashOnRef = 4; + optional int32 maxRecords = 5; + optional string pageToken = 6; + optional ContentKey minKey = 7; + optional ContentKey maxKey = 8; + optional ContentKey prefixKey = 9; + optional string filter = 10; + repeated ContentKey keys = 11; } message DiffResponse { repeated DiffEntry diffs = 1; + optional Reference effectiveFromRef = 2; + optional Reference effectiveToRef = 3; + bool hasMore = 4; + optional string pageToken = 5; } message DiffEntry { diff --git a/services/nessie-grpc/common/src/main/proto/tree_service.proto b/services/nessie-grpc/common/src/main/proto/tree_service.proto index a99ea4ac3a..1fc0b85b26 100644 --- a/services/nessie-grpc/common/src/main/proto/tree_service.proto +++ b/services/nessie-grpc/common/src/main/proto/tree_service.proto @@ -17,7 +17,6 @@ syntax = "proto3"; package com.dremio.services.nessie.grpc.proto; -import "google/protobuf/timestamp.proto"; import "util.proto"; import "content_service.proto"; @@ -30,13 +29,13 @@ service TreeService { rpc getReferenceByName (GetReferenceByNameRequest) returns (Reference) {} rpc createReference (CreateReferenceRequest) returns (Reference) {} rpc getDefaultBranch (Empty) returns (Reference) {} - rpc assignReference (AssignReferenceRequest) returns (Empty) {} - rpc deleteReference (DeleteReferenceRequest) returns (Empty) {} + rpc assignReference (AssignReferenceRequest) returns (ReferenceResponse) {} + rpc deleteReference (DeleteReferenceRequest) returns (ReferenceResponse) {} rpc getCommitLog (CommitLogRequest) returns (CommitLogResponse) {} rpc getEntries (EntriesRequest) returns (EntriesResponse) {} rpc transplantCommitsIntoBranch (TransplantRequest) returns (MergeResponse) {} rpc mergeRefIntoBranch (MergeRequest) returns (MergeResponse) {} - rpc commitMultipleOperations (CommitRequest) returns (Branch) {} + rpc commitMultipleOperations (CommitRequest) returns (CommitResponse) {} } enum ReferenceType { @@ -44,39 +43,6 @@ enum ReferenceType { TAG = 1; } -message Reference { - oneof type { - Branch branch = 1; - Tag tag = 2; - Detached detached = 3; - } -} - -message Branch { - string name = 1; - optional string hash = 2; - optional ReferenceMetadata metadata = 3; -} - -message Tag { - string name = 1; - optional string hash = 2; - optional ReferenceMetadata metadata = 3; -} - -message Detached { - optional string hash = 2; - optional ReferenceMetadata metadata = 3; -} - -message ReferenceMetadata { - optional int32 numCommitsAhead = 1; - optional int32 numCommitsBehind = 2; - optional CommitMeta commitMetaOfHEAD = 3; - optional string commonAncestorHash = 4; - optional int64 numTotalCommits = 5; -} - message CreateReferenceRequest { string sourceRefName = 1; Reference reference = 2; @@ -91,6 +57,8 @@ message GetAllReferencesRequest { message GetAllReferencesResponse { repeated Reference reference = 1; + bool hasMore = 2; + optional string pageToken = 3; } message GetReferenceByNameRequest { @@ -109,6 +77,10 @@ message AssignReferenceRequest { ReferenceType reference_type = 6; } +message ReferenceResponse { + optional Reference reference = 1; +} + message DeleteReferenceRequest { string namedRef = 1; string hash = 2; @@ -154,6 +126,13 @@ message ContentKeyDetails { ContentKeyConflict conflictType = 3; repeated string sourceCommitHashes = 4; repeated string targetCommitHashes = 5; + optional ContentKeyConflictDetails conflict = 6; +} + +message ContentKeyConflictDetails { + string conflictType = 1; // the canonical enum for this value is the OSS ConflictType java class + ContentKey key = 2; + string message = 3; } enum ContentKeyConflict { @@ -173,17 +152,6 @@ message CommitLogEntry { repeated CommitOperation operations = 3; } -message CommitMeta { - optional string hash = 1; - optional string committer = 2; - optional string author = 3; - optional string signedOffBy = 4; - string message = 5; - optional google.protobuf.Timestamp commitTime = 6; - optional google.protobuf.Timestamp authorTime = 7; - map properties = 8; -} - message EntriesRequest { string namedRef = 1; optional string hashOnRef = 2; @@ -191,12 +159,18 @@ message EntriesRequest { optional string pageToken = 4; optional string filter = 5; optional int32 namespaceDepth = 6; + optional bool withContent = 7; + optional ContentKey minKey = 8; + optional ContentKey maxKey = 9; + optional ContentKey prefixKey = 10; + repeated ContentKey keys = 11; } message EntriesResponse { repeated Entry entries = 1; bool hasMore = 2; optional string token = 3; + optional Reference effectiveReference = 4; } enum ContentType { @@ -210,6 +184,8 @@ enum ContentType { message Entry { ContentType type = 1; ContentKey contentKey = 2; + optional string contentId = 3; + optional Content content = 4; } message TransplantRequest { @@ -237,6 +213,8 @@ message MergeRequest { optional bool fetchAdditionalInfo = 9; optional MergeBehavior defaultKeyMergeMode = 10; repeated MergeKeyBehavior mergeModes = 11; + optional string message = 12; + optional CommitMeta commitMeta = 13; } message MergeKeyBehavior { @@ -250,6 +228,22 @@ message CommitRequest { CommitOps commitOperations = 3; } +message CommitResponse { + // Inlined Branch fields for backward compatibility with older servers (Dremio version < 25.0.0) + optional string name = 1; + optional string hash = 2; + optional ReferenceMetadata metadata = 3; + // End of backward-compatibility fields - remove them with DX-61406 + + Branch branch = 4; + repeated AddedContent addedContent = 5; +} + +message AddedContent { + ContentKey key = 1; + string contentId = 2; +} + message CommitOps { CommitMeta commitMeta = 1; repeated CommitOperation operations = 2; diff --git a/services/nessie-grpc/common/src/main/proto/util.proto b/services/nessie-grpc/common/src/main/proto/util.proto index 7ac45e4105..e08034ce26 100644 --- a/services/nessie-grpc/common/src/main/proto/util.proto +++ b/services/nessie-grpc/common/src/main/proto/util.proto @@ -21,4 +21,51 @@ option java_outer_classname = "Util"; package com.dremio.services.nessie.grpc.proto; +import "google/protobuf/timestamp.proto"; + message Empty {} + +message CommitMeta { + optional string hash = 1; + optional string committer = 2; + optional string author = 3; + optional string signedOffBy = 4; + string message = 5; + optional google.protobuf.Timestamp commitTime = 6; + optional google.protobuf.Timestamp authorTime = 7; + map properties = 8; + repeated string parentHashes = 9; +} + +message Reference { + oneof type { + Branch branch = 1; + Tag tag = 2; + Detached detached = 3; + } +} + +message Branch { + string name = 1; + optional string hash = 2; + optional ReferenceMetadata metadata = 3; +} + +message Tag { + string name = 1; + optional string hash = 2; + optional ReferenceMetadata metadata = 3; +} + +message Detached { + optional string hash = 2; + optional ReferenceMetadata metadata = 3; +} + +message ReferenceMetadata { + optional int32 numCommitsAhead = 1; + optional int32 numCommitsBehind = 2; + optional CommitMeta commitMetaOfHEAD = 3; + optional string commonAncestorHash = 4; + optional int64 numTotalCommits = 5; +} diff --git a/services/nessie-grpc/common/src/test/java/com/dremio/services/nessie/grpc/ProtoUtilTest.java b/services/nessie-grpc/common/src/test/java/com/dremio/services/nessie/grpc/ProtoUtilTest.java index 3c05eb29e7..33dddd5ddf 100644 --- a/services/nessie-grpc/common/src/test/java/com/dremio/services/nessie/grpc/ProtoUtilTest.java +++ b/services/nessie-grpc/common/src/test/java/com/dremio/services/nessie/grpc/ProtoUtilTest.java @@ -17,11 +17,17 @@ import static com.dremio.services.nessie.grpc.ProtoUtil.fromProto; import static com.dremio.services.nessie.grpc.ProtoUtil.refFromProto; +import static com.dremio.services.nessie.grpc.ProtoUtil.refFromProtoResponse; import static com.dremio.services.nessie.grpc.ProtoUtil.refToProto; import static com.dremio.services.nessie.grpc.ProtoUtil.toProto; +import static com.dremio.services.nessie.grpc.ProtoUtil.toProtoDiffRequest; +import static com.dremio.services.nessie.grpc.ProtoUtil.toProtoEntriesRequest; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.assertj.core.groups.Tuple.tuple; +import java.io.ByteArrayOutputStream; +import java.io.IOException; import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; @@ -29,17 +35,20 @@ import java.util.List; import org.junit.jupiter.api.Test; -import org.projectnessie.api.params.CommitLogParams; -import org.projectnessie.api.params.DiffParams; -import org.projectnessie.api.params.EntriesParams; -import org.projectnessie.api.params.FetchOption; -import org.projectnessie.api.params.GetReferenceParams; -import org.projectnessie.api.params.MultipleNamespacesParams; -import org.projectnessie.api.params.NamespaceParams; -import org.projectnessie.api.params.ReferencesParams; -import org.projectnessie.model.BaseMergeTransplant; +import org.projectnessie.api.v1.params.CommitLogParams; +import org.projectnessie.api.v1.params.EntriesParams; +import org.projectnessie.api.v1.params.GetReferenceParams; +import org.projectnessie.api.v1.params.ImmutableMerge; +import org.projectnessie.api.v1.params.ImmutableTransplant; +import org.projectnessie.api.v1.params.Merge; +import org.projectnessie.api.v1.params.MultipleNamespacesParams; +import org.projectnessie.api.v1.params.NamespaceParams; +import org.projectnessie.api.v1.params.ReferencesParams; +import org.projectnessie.api.v1.params.Transplant; import org.projectnessie.model.Branch; import org.projectnessie.model.CommitMeta; +import org.projectnessie.model.CommitResponse; +import org.projectnessie.model.CommitResponse.AddedContent; import org.projectnessie.model.Content; import org.projectnessie.model.Content.Type; import org.projectnessie.model.ContentKey; @@ -47,7 +56,7 @@ import org.projectnessie.model.Detached; import org.projectnessie.model.DiffResponse.DiffEntry; import org.projectnessie.model.EntriesResponse.Entry; -import org.projectnessie.model.GenericMetadata; +import org.projectnessie.model.FetchOption; import org.projectnessie.model.GetMultipleContentsRequest; import org.projectnessie.model.GetMultipleContentsResponse; import org.projectnessie.model.GetMultipleContentsResponse.ContentWithKey; @@ -63,18 +72,16 @@ import org.projectnessie.model.ImmutableGetNamespacesResponse; import org.projectnessie.model.ImmutableLogEntry; import org.projectnessie.model.ImmutableLogResponse; -import org.projectnessie.model.ImmutableMerge; import org.projectnessie.model.ImmutableMergeKeyBehavior; import org.projectnessie.model.ImmutableMergeResponse; +import org.projectnessie.model.ImmutableNamespace; import org.projectnessie.model.ImmutableNessieConfiguration; import org.projectnessie.model.ImmutableOperations; import org.projectnessie.model.ImmutableRefLogResponseEntry; import org.projectnessie.model.ImmutableReferenceMetadata; import org.projectnessie.model.ImmutableTag; -import org.projectnessie.model.ImmutableTransplant; import org.projectnessie.model.LogResponse; import org.projectnessie.model.LogResponse.LogEntry; -import org.projectnessie.model.Merge; import org.projectnessie.model.MergeResponse; import org.projectnessie.model.Namespace; import org.projectnessie.model.NessieConfiguration; @@ -87,7 +94,6 @@ import org.projectnessie.model.Reference; import org.projectnessie.model.ReferenceMetadata; import org.projectnessie.model.Tag; -import org.projectnessie.model.Transplant; import com.dremio.services.nessie.grpc.api.CommitLogEntry; import com.dremio.services.nessie.grpc.api.CommitLogRequest; @@ -101,10 +107,13 @@ import com.dremio.services.nessie.grpc.api.EntriesResponse; import com.dremio.services.nessie.grpc.api.GetAllReferencesRequest; import com.dremio.services.nessie.grpc.api.GetReferenceByNameRequest; +import com.dremio.services.nessie.grpc.api.MergeRequest; import com.dremio.services.nessie.grpc.api.MultipleContentsRequest; import com.dremio.services.nessie.grpc.api.MultipleContentsResponse; import com.dremio.services.nessie.grpc.api.RefLogParams; import com.dremio.services.nessie.grpc.api.RefLogResponse; +import com.dremio.services.nessie.grpc.api.ReferenceResponse; +import com.dremio.services.nessie.grpc.api.ReferenceType; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -141,6 +150,46 @@ public void referenceConversion() { assertThat(refFromProto(refToProto(d))).isEqualTo(d); } + @Test + public void refToProtoDecomposed() { + assertThatThrownBy(() -> refToProto(ReferenceType.BRANCH, null, "hash")) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("Reference name must be non-null"); + + assertThatThrownBy(() -> refToProto(ReferenceType.UNRECOGNIZED, "name", "hash")) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("Reference type 'UNRECOGNIZED' should be Branch or Tag"); + + assertThat(refToProto(ReferenceType.BRANCH, "name", "1234567890123456")) + .extracting(r -> r.getBranch().getName(), r -> r.getBranch().getHash(), r -> r.getBranch().hasMetadata()) + .containsExactly("name", "1234567890123456", false); + assertThat(refToProto(ReferenceType.BRANCH, "name", null)) + .extracting(r -> r.getBranch().getName(), r -> r.getBranch().hasHash()) + .containsExactly("name", false); + + assertThat(refToProto(ReferenceType.TAG, "name", "1234567890123456")) + .extracting(r -> r.getTag().getName(), r -> r.getTag().getHash(), r -> r.getTag().hasMetadata()) + .containsExactly("name", "1234567890123456", false); + } + + @Test + public void refFromProtoResponseConversion() { + assertThatThrownBy(() -> refFromProtoResponse(null)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("Reference response must be non-null"); + + assertThat(refFromProtoResponse(ReferenceResponse.getDefaultInstance())).isNull(); + + assertThat(refFromProtoResponse( + ReferenceResponse.newBuilder().setReference( + com.dremio.services.nessie.grpc.api.Reference.newBuilder() + .setBranch(com.dremio.services.nessie.grpc.api.Branch.newBuilder().setName("br1").build()) + .build()) + .build())) + .extracting(r -> r.getType().name(), Reference::getName, Reference::getHash) + .containsExactly("BRANCH", "br1", null); + } + @Test public void detachedConversion() { assertThatThrownBy(() -> toProto((Detached) null)) @@ -241,9 +290,9 @@ public void icebergTableMetadataConversion() throws JsonProcessingException { .sortOrderId(3) .specId(4) .metadataLocation("file") - .metadata(GenericMetadata.of("test", json)) + .metadata(ImmutableMap.of("test", json)) .build(); - // DX-57058: GenericMetadata should be null + // DX-57058: metadata should be null assertThat(fromProto(toProto(icebergTable)).getMetadata()).isNull(); } @@ -274,9 +323,9 @@ public void icebergViewMetadataConversion() throws JsonProcessingException { .dialect("test-dialect") .sqlText("SELECT 1") .metadataLocation("file") - .metadata(GenericMetadata.of("test", json)) + .metadata(ImmutableMap.of("test", json)) .build(); - // DX-57058: GenericMetadata should be null + // DX-57058: metadata should be null assertThat(fromProto(toProto(icebergView)).getMetadata()).isNull(); } @@ -319,6 +368,20 @@ public void nessieConfigurationConversion() { NessieConfiguration config = ImmutableNessieConfiguration.builder().maxSupportedApiVersion(42).defaultBranch("main").build(); assertThat(fromProto(toProto(config))).isEqualTo(config); + + NessieConfiguration config2 = + ImmutableNessieConfiguration.builder() + .maxSupportedApiVersion(42) + .minSupportedApiVersion(24) + .actualApiVersion(99) + .specVersion("spec-test") + .defaultBranch("mymain") + .noAncestorHash("myhash") + .repositoryCreationTimestamp(Instant.now()) + .oldestPossibleCommitTimestamp(Instant.now()) + .additionalProperties(ImmutableMap.of("foo", "bar")) + .build(); + assertThat(fromProto(toProto(config2))).isEqualTo(config2); } @Test @@ -377,6 +440,10 @@ public void entryConversion() { Entry entry = ImmutableEntry.builder().name(ContentKey.of("a.b.c.txt")).type(Type.ICEBERG_TABLE).build(); assertThat(fromProto(toProto(entry))).isEqualTo(entry); + + Entry entryWithContent = ImmutableEntry.builder().from(entry) + .contentId("id").content(IcebergTable.of("loc", 1, 2, 3, 4, "id")).build(); + assertThat(fromProto(toProto(entryWithContent))).isEqualTo(entryWithContent); } @Test @@ -403,6 +470,14 @@ public void commitMetaConversion() { CommitMeta minimalCommitMeta = CommitMeta.builder().message("commit msg").properties(ImmutableMap.of("a", "b")).build(); assertThat(fromProto(toProto(minimalCommitMeta))).isEqualTo(minimalCommitMeta); + + CommitMeta commitMetaWithParents = CommitMeta.builder() + .from(minimalCommitMeta) + .addParentCommitHashes("1122334455667700") + .addParentCommitHashes("1122334455667701") + .addParentCommitHashes("1122334455667702") + .build(); + assertThat(fromProto(toProto(commitMetaWithParents))).isEqualTo(commitMetaWithParents); } @Test @@ -483,25 +558,47 @@ public void entriesRequestConversion() { .isInstanceOf(IllegalArgumentException.class) .hasMessage("EntriesRequest must be non-null"); - assertThatThrownBy(() -> toProto(null, EntriesParams.empty())) - .isInstanceOf(IllegalArgumentException.class) - .hasMessage("refName must be non-null"); - - assertThatThrownBy(() -> toProto("main", (EntriesParams) null)) - .isInstanceOf(IllegalArgumentException.class) - .hasMessage("EntriesParams must be non-null"); - EntriesParams params = EntriesParams.builder() .filter("a > b") .hashOnRef("123") .maxRecords(23) - .pageToken("abc") .build(); - assertThat(fromProto(toProto("main", params))).isEqualTo(params); - - EntriesParams empty = EntriesParams.empty(); - assertThat(fromProto(toProto("main", empty))).isEqualTo(empty); + assertThat(fromProto(toProtoEntriesRequest("main", "123", 23, "a > b", null, false, null, null, null, null))) + .isEqualTo(params); + assertThat(fromProto(toProtoEntriesRequest(null, "123", 23, "a > b", null, false, null, null, null, null))) + .isEqualTo(params); + assertThat(toProtoEntriesRequest(null, "1", 1, "", null, false, null, null, null, null).getNamedRef()) + .isEqualTo(Detached.REF_NAME); + assertThat(toProtoEntriesRequest("main", "1", 1, "", null, false, null, null, null, null).getNamedRef()) + .isEqualTo("main"); + assertThat(toProtoEntriesRequest(null, "1", 1, "1", null, false, null, null, null, null).getWithContent()) + .isFalse(); + assertThat(toProtoEntriesRequest(null, "1", 1, "1", null, true, null, null, null, null).getWithContent()) + .isTrue(); + assertThat(toProtoEntriesRequest(null, "1", 1, "1", null, true, null, null, null, null).hasMinKey()) + .isFalse(); + assertThat(toProtoEntriesRequest(null, "1", 1, "1", null, true, null, null, null, null).hasMaxKey()) + .isFalse(); + assertThat(toProtoEntriesRequest(null, "1", 1, "1", null, true, null, null, null, null).hasPrefixKey()) + .isFalse(); + assertThat(toProtoEntriesRequest(null, "1", 1, "1", null, true, null, null, null, null).getKeysList()) + .isEmpty(); + assertThat(toProtoEntriesRequest(null, "1", 1, "1", null, true, ContentKey.of("min"), null, null, null) + .getMinKey().getElementsList()).containsExactly("min"); + assertThat(toProtoEntriesRequest(null, "1", 1, "1", null, true, null, ContentKey.of("max"), null, null) + .getMaxKey().getElementsList()).containsExactly("max"); + assertThat(toProtoEntriesRequest(null, "1", 1, "1", null, true, null, null, ContentKey.of("prefix"), null) + .getPrefixKey().getElementsList()).containsExactly("prefix"); + assertThat(toProtoEntriesRequest(null, "1", 1, "1", null, true, null, null, null, + ImmutableList.of(ContentKey.of("k1"), ContentKey.of("k2"))).getKeysList()) + .map(ProtoUtil::fromProto) + .containsExactly(ContentKey.of("k1"), ContentKey.of("k2")); + + assertThat(fromProto( + toProtoEntriesRequest("main", "123", 23, "a > b", null, false, null, null, null, null).toBuilder() + .setPageToken("token1").build())) + .isEqualTo(params.forNextPage("token1")); } @Test @@ -510,10 +607,6 @@ public void commitLogRequestConversion() { .isInstanceOf(IllegalArgumentException.class) .hasMessage("CommitLogRequest must be non-null"); - assertThatThrownBy(() -> toProto(null, CommitLogParams.empty())) - .isInstanceOf(IllegalArgumentException.class) - .hasMessage("refName must be non-null"); - assertThatThrownBy(() -> toProto("main", (CommitLogParams) null)) .isInstanceOf(IllegalArgumentException.class) .hasMessage("CommitLogParams must be non-null"); @@ -528,6 +621,8 @@ public void commitLogRequestConversion() { .fetchOption(FetchOption.ALL) .build(); assertThat(fromProto(toProto("main", params))).isEqualTo(params); + assertThat(fromProto(toProto(null, params))).isEqualTo(params); + assertThat(toProto(null, params).getNamedRef()).isEqualTo(Detached.REF_NAME); CommitLogParams empty = CommitLogParams.empty(); assertThat(fromProto(toProto("main", empty))).isEqualTo(empty); @@ -561,6 +656,13 @@ public void entriesResponseConversion() { org.projectnessie.model.EntriesResponse responseWithToken = org.projectnessie.model.EntriesResponse.builder().entries(entries).token("abc").build(); assertThat(fromProto(toProto(responseWithToken))).isEqualTo(responseWithToken); + + org.projectnessie.model.EntriesResponse responseWithRef = + org.projectnessie.model.EntriesResponse.builder() + .entries(entries) + .effectiveReference(Branch.of("ref", null)) + .build(); + assertThat(fromProto(toProto(responseWithRef))).isEqualTo(responseWithRef); } @Test @@ -653,10 +755,6 @@ public void contentRequestConversion() { .hasMessage("ContentKey must be non-null"); ContentKey key = ContentKey.of("test.me.txt"); - assertThatThrownBy(() -> toProto(key, null, null)) - .isInstanceOf(IllegalArgumentException.class) - .hasMessage("ref must be non-null"); - String ref = "main"; String hashOnRef = "x"; ContentRequest request = toProto(key, ref, null); @@ -668,14 +766,15 @@ public void contentRequestConversion() { assertThat(request.getContentKey()).isEqualTo(toProto(key)); assertThat(request.getRef()).isEqualTo(ref); assertThat(request.getHashOnRef()).isEqualTo(hashOnRef); + + request = toProto(key, null, hashOnRef); + assertThat(request.getContentKey()).isEqualTo(toProto(key)); + assertThat(request.getRef()).isEqualTo(Detached.REF_NAME); + assertThat(request.getHashOnRef()).isEqualTo(hashOnRef); } @Test public void multipleContentsRequestConversion() { - assertThatThrownBy(() -> toProto(null, null, (GetMultipleContentsRequest) null)) - .isInstanceOf(IllegalArgumentException.class) - .hasMessage("ref must be non-null"); - String ref = "main"; String hashOnRef = "x"; ContentKey key = ContentKey.of("test.me.txt"); @@ -688,6 +787,11 @@ public void multipleContentsRequestConversion() { assertThat(request.getRef()).isEqualTo(ref); assertThat(request.getHashOnRef()).isEqualTo(hashOnRef); assertThat(request.getRequestedKeysList()).containsExactly(toProto(key)); + + request = toProto(null, hashOnRef, GetMultipleContentsRequest.of(key)); + assertThat(request.getRef()).isEqualTo(""); + assertThat(request.getHashOnRef()).isEqualTo(hashOnRef); + assertThat(request.getRequestedKeysList()).containsExactly(toProto(key)); } @Test @@ -704,7 +808,10 @@ public void multipleContentsResponseConversion() { IcebergTable icebergTable = IcebergTable.of("test.me.txt", 42L, 42, 42, 42); ContentWithKey c = ContentWithKey.of(key, icebergTable); - GetMultipleContentsResponse response = GetMultipleContentsResponse.of(Collections.singletonList(c)); + GetMultipleContentsResponse response = GetMultipleContentsResponse.of(Collections.singletonList(c), null); + assertThat(fromProto(toProto(response))).isEqualTo(response); + + response = GetMultipleContentsResponse.of(Collections.singletonList(c), Branch.of("test", null)); assertThat(fromProto(toProto(response))).isEqualTo(response); } @@ -769,29 +876,32 @@ public void diffEntryConversion() { @Test public void diffRequestConversion() { - assertThatThrownBy(() -> fromProto((DiffRequest) null)) - .isInstanceOf(IllegalArgumentException.class) - .hasMessage("DiffRequest must be non-null"); - - assertThatThrownBy(() -> toProto((DiffParams) null)) - .isInstanceOf(IllegalArgumentException.class) - .hasMessage("DiffParams must be non-null"); - - DiffParams params = DiffParams.builder().fromRef("x").toRef("y").build(); - assertThat(fromProto(toProto(params))).isEqualTo(params); - - params = DiffParams.builder().fromRef("x").toRef("y") - .fromHashOnRef("1234567890123456789012345678901234567890").build(); - assertThat(fromProto(toProto(params))).isEqualTo(params); - - params = DiffParams.builder().fromRef("x").toRef("y") - .toHashOnRef("1234567890123456789012345678901234567890").build(); - assertThat(fromProto(toProto(params))).isEqualTo(params); - - params = DiffParams.builder().fromRef("x").toRef("y") - .fromHashOnRef("1234567890123456789012345678901234567890") - .toHashOnRef("aabbccddeeffaabbccddeeffaabbccddeeffaabb").build(); - assertThat(fromProto(toProto(params))).isEqualTo(params); + DiffRequest request = toProtoDiffRequest("from", "fromHash", "to", "toHash", null, null, null, null, null, null); + assertThat(request.getFromRefName()).isEqualTo("from"); + assertThat(request.getFromHashOnRef()).isEqualTo("fromHash"); + assertThat(request.getToRefName()).isEqualTo("to"); + assertThat(request.getToHashOnRef()).isEqualTo("toHash"); + assertThat(request.hasMaxRecords()).isFalse(); + assertThat(request.hasPageToken()).isFalse(); + assertThat(request.hasMinKey()).isFalse(); + assertThat(request.hasMaxKey()).isFalse(); + assertThat(request.hasPrefixKey()).isFalse(); + assertThat(request.hasFilter()).isFalse(); + assertThat(request.getKeysList()).isEmpty(); + + request = toProtoDiffRequest(null, null, null, null, 42, ContentKey.of("min"), ContentKey.of("max"), + ContentKey.of("prefix"), ImmutableList.of(ContentKey.of("k1"), ContentKey.of("k2")), "filter"); + assertThat(request.getFromRefName()).isEqualTo(Detached.REF_NAME); + assertThat(request.hasFromHashOnRef()).isFalse(); + assertThat(request.getToRefName()).isEqualTo(Detached.REF_NAME); + assertThat(request.hasToHashOnRef()).isFalse(); + assertThat(request.getMaxRecords()).isEqualTo(42); + assertThat(request.hasPageToken()).isFalse(); + assertThat(request.getMinKey().getElementsList()).containsExactly("min"); + assertThat(request.getMaxKey().getElementsList()).containsExactly("max"); + assertThat(request.getPrefixKey().getElementsList()).containsExactly("prefix"); + assertThat(request.getKeysList()).containsExactly(toProto(ContentKey.of("k1")), toProto(ContentKey.of("k2"))); + assertThat(request.getFilter()).isEqualTo("filter"); } @Test @@ -817,6 +927,24 @@ public void diffResponseConversion() { ImmutableDiffResponse diffResponse = ImmutableDiffResponse.builder().addAllDiffs(diffs).build(); assertThat(fromProto(toProto(diffResponse))).isEqualTo(diffResponse); + + ImmutableDiffResponse diffResponseWithMore = ImmutableDiffResponse.builder().isHasMore(true).build(); + assertThat(fromProto(toProto(diffResponseWithMore))).isEqualTo(diffResponseWithMore); + + ImmutableDiffResponse diffResponseWithToken = ImmutableDiffResponse.builder() + .isHasMore(true) + .token("token123") + .build(); + assertThat(fromProto(toProto(diffResponseWithToken))).isEqualTo(diffResponseWithToken); + + Branch from = Branch.of("from", null); + Tag to = Tag.of("from", "1234567890123456"); + ImmutableDiffResponse diffResponseWithRefs = ImmutableDiffResponse.builder() + .addAllDiffs(diffs) + .effectiveFromReference(from) + .effectiveToReference(to) + .build(); + assertThat(fromProto(toProto(diffResponseWithRefs))).isEqualTo(diffResponseWithRefs); } @Test @@ -825,12 +953,12 @@ public void refLogParamsConversion() { .isInstanceOf(IllegalArgumentException.class) .hasMessage("RefLogParams must be non-null"); - assertThatThrownBy(() -> toProto((org.projectnessie.api.params.RefLogParams) null)) + assertThatThrownBy(() -> toProto((org.projectnessie.api.v1.params.RefLogParams) null)) .isInstanceOf(IllegalArgumentException.class) .hasMessage("RefLogParams must be non-null"); - org.projectnessie.api.params.RefLogParams params = - org.projectnessie.api.params.RefLogParams.builder() + org.projectnessie.api.v1.params.RefLogParams params = + org.projectnessie.api.v1.params.RefLogParams.builder() .startHash("foo") .endHash("bar") .maxRecords(23) @@ -838,7 +966,7 @@ public void refLogParamsConversion() { .build(); assertThat(fromProto(toProto(params))).isEqualTo(params); - org.projectnessie.api.params.RefLogParams empty = org.projectnessie.api.params.RefLogParams.empty(); + org.projectnessie.api.v1.params.RefLogParams empty = org.projectnessie.api.v1.params.RefLogParams.empty(); assertThat(fromProto(toProto(empty))).isEqualTo(empty); } @@ -909,6 +1037,9 @@ public void namespaceConversion() { Namespace namespace = Namespace.of("a", "b", "c"); assertThat(fromProto(toProto(namespace))).isEqualTo(namespace); + Namespace namespaceWithId = ImmutableNamespace.builder().from(namespace).id("id1").build(); + assertThat(fromProto(toProto(namespaceWithId))).isEqualTo(namespaceWithId); + assertThat(fromProto(toProto(Namespace.EMPTY))).isEqualTo(Namespace.EMPTY); Namespace namespaceWithProperties = Namespace.of(ImmutableMap.of("key1", "prop1"), "a", "b", "c"); assertThat(fromProto(toProto(namespaceWithProperties))).isEqualTo(namespaceWithProperties); @@ -986,41 +1117,53 @@ public void multipleNamespaceResponseConversion() { assertThat(fromProto(toProto(response))).isEqualTo(response); } + private CommitMeta toCommitMeta(MergeRequest request) { + return ProtoUtil.fromProto(request::getMessage, request::hasCommitMeta, request::getCommitMeta); + } + @Test public void mergeConversion() { assertThatThrownBy(() -> fromProto((com.dremio.services.nessie.grpc.api.MergeRequest) null)) .isInstanceOf(IllegalArgumentException.class) .hasMessage("MergeRequest must be non-null"); - assertThatThrownBy(() -> toProto("main", "x", (Merge) null)) + assertThatThrownBy(() -> toProto("main", "x", (Merge) null, null, null)) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Merge must be non-null"); String hash = "1234567890123456"; Merge merge = ImmutableMerge.builder().fromRefName("main").fromHash(hash).build(); - assertThat(fromProto(toProto("y", "z", merge))).isEqualTo(merge); + assertThat(fromProto(toProto("y", "z", merge, null, null))).isEqualTo(merge); + assertThat(fromProto(toProto("y", "z", merge, "msg", null))).isEqualTo(merge); + assertThat(toCommitMeta(toProto("y", "z", merge, null, null)).getMessage()).isEmpty(); + assertThat(toCommitMeta(toProto("y", "z", merge, "m1", null)).getMessage()).isEqualTo("m1"); + assertThat(toCommitMeta(toProto("y", "z", merge, "m1", CommitMeta.fromMessage(""))).getMessage()).isEqualTo("m1"); + assertThat(toCommitMeta(toProto("y", "z", merge, "m1", CommitMeta.fromMessage("m2"))).getMessage()).isEqualTo("m2"); + assertThat(toCommitMeta(toProto("y", "z", merge, "", CommitMeta.fromMessage("m2"))).getMessage()).isEqualTo("m2"); + assertThat(toCommitMeta(toProto("y", "z", merge, "", CommitMeta.builder().author("a2").message("").build())) + .getAuthor()).isEqualTo("a2"); Merge mergeWithKeepingCommits = ImmutableMerge.builder() .keepIndividualCommits(true) .fromRefName("main") .fromHash(hash) .build(); - assertThat(fromProto(toProto("y", "z", mergeWithKeepingCommits))).isEqualTo(mergeWithKeepingCommits); + assertThat(fromProto(toProto("y", "z", mergeWithKeepingCommits, null, null))).isEqualTo(mergeWithKeepingCommits); Merge mergeWithExtraInfo = ImmutableMerge.builder() .from(mergeWithKeepingCommits) .isReturnConflictAsResult(true) .isFetchAdditionalInfo(true) - .defaultKeyMergeMode(BaseMergeTransplant.MergeBehavior.FORCE) + .defaultKeyMergeMode(org.projectnessie.model.MergeBehavior.FORCE) .addKeyMergeModes(ImmutableMergeKeyBehavior.builder() - .mergeBehavior(BaseMergeTransplant.MergeBehavior.DROP) + .mergeBehavior(org.projectnessie.model.MergeBehavior.DROP) .key(ContentKey.of("test", "key")) .build()) .isDryRun(true) .isReturnConflictAsResult(true) .build(); - assertThat(fromProto(toProto("y", "z", mergeWithExtraInfo))).isEqualTo(mergeWithExtraInfo); + assertThat(fromProto(toProto("y", "z", mergeWithExtraInfo, null, null))).isEqualTo(mergeWithExtraInfo); } @Test @@ -1052,9 +1195,9 @@ public void transplant() { .from(transplantWithKeepingCommits) .isReturnConflictAsResult(true) .isFetchAdditionalInfo(true) - .defaultKeyMergeMode(BaseMergeTransplant.MergeBehavior.FORCE) + .defaultKeyMergeMode(org.projectnessie.model.MergeBehavior.FORCE) .addKeyMergeModes(ImmutableMergeKeyBehavior.builder() - .mergeBehavior(BaseMergeTransplant.MergeBehavior.DROP) + .mergeBehavior(org.projectnessie.model.MergeBehavior.DROP) .key(ContentKey.of("test", "key")) .build()) .isDryRun(true) @@ -1130,7 +1273,7 @@ public void mergeResponse() { .details(ImmutableList.of( ImmutableContentKeyDetails.builder() .key(ContentKey.of("test", "key")) - .mergeBehavior(BaseMergeTransplant.MergeBehavior.FORCE) + .mergeBehavior(org.projectnessie.model.MergeBehavior.FORCE) .conflictType(MergeResponse.ContentKeyConflict.UNRESOLVABLE) .sourceCommits(ImmutableList.of("a", "b")) .targetCommits(ImmutableList.of("c", "d")) @@ -1138,4 +1281,69 @@ public void mergeResponse() { .build(); assertThat(fromProto(toProto(mergeResponse))).isEqualTo(mergeResponse); } + + @Test + public void legacyCommitResponse() throws IOException { + ReferenceMetadata meta = ImmutableReferenceMetadata.builder().numCommitsAhead(1).numCommitsBehind(2).build(); + Branch branch = Branch.builder().name("name").hash("1122334455667788").metadata(meta).build(); + + // Legacy servers return a Branch from commitMultipleOperations() + com.dremio.services.nessie.grpc.api.Branch protoBranch = toProto(branch); + ByteArrayOutputStream responsePayload = new ByteArrayOutputStream(); + protoBranch.writeTo(responsePayload); + + CommitResponse commitResponse = fromProto( + com.dremio.services.nessie.grpc.api.CommitResponse.parseFrom(responsePayload.toByteArray())); + assertThat(commitResponse.getTargetBranch()).isEqualTo(branch); + assertThat(commitResponse.getAddedContents()).isNull(); + } + + @Test + public void legacyCommitResponseReader() throws IOException { + ReferenceMetadata meta = ImmutableReferenceMetadata.builder().numCommitsAhead(1).numCommitsBehind(2).build(); + Branch branch = Branch.builder().name("name").hash("1122334455667788").metadata(meta).build(); + + // Legacy clients should be able to read responses from new commitMultipleOperations() implementations as `Branch` + CommitResponse commitResponse = CommitResponse.builder() + .targetBranch(branch) + .build(); + ByteArrayOutputStream responsePayload = new ByteArrayOutputStream(); + toProto(commitResponse).writeTo(responsePayload); + + Branch branchResponse = fromProto( + com.dremio.services.nessie.grpc.api.Branch.parseFrom(responsePayload.toByteArray())); + assertThat(branchResponse).isEqualTo(branch); + } + + @Test + public void commitResponse() { + assertThatThrownBy(() -> toProto((CommitResponse) null)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("CommitResponse must be non-null"); + + assertThatThrownBy(() -> fromProto((com.dremio.services.nessie.grpc.api.CommitResponse) null)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("CommitResponse must be non-null"); + + ReferenceMetadata meta = ImmutableReferenceMetadata.builder().numCommitsAhead(1).numCommitsBehind(2).build(); + Branch branch = Branch.builder().name("name").hash("1122334455667788").metadata(meta).build(); + + CommitResponse commitResponse = CommitResponse.builder() + .targetBranch(branch) + .build(); + assertThat(fromProto(toProto(commitResponse)).getTargetBranch()).isEqualTo(branch); + assertThat(fromProto(toProto(commitResponse)).getAddedContents()).isNull(); + + ContentKey key1 = ContentKey.of("test1"); + ContentKey key2 = ContentKey.of("test3"); + commitResponse = CommitResponse.builder() + .targetBranch(branch) + .addAddedContents(AddedContent.addedContent(key1, "abc")) + .addAddedContents(AddedContent.addedContent(key2, "def")) + .build(); + assertThat(fromProto(toProto(commitResponse)).getTargetBranch()).isEqualTo(branch); + assertThat(fromProto(toProto(commitResponse)).getAddedContents()) + .extracting(AddedContent::getKey, AddedContent::contentId) + .containsExactly(tuple(key1, "abc"), tuple(key2, "def")); + } } diff --git a/services/nessie-grpc/pom.xml b/services/nessie-grpc/pom.xml index 2e88abe33f..5cd0db8c97 100644 --- a/services/nessie-grpc/pom.xml +++ b/services/nessie-grpc/pom.xml @@ -22,7 +22,7 @@ com.dremio.services dremio-services-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-services-nessie-grpc diff --git a/services/nessie-grpc/server/pom.xml b/services/nessie-grpc/server/pom.xml index d96bdedad6..126b6e9f3c 100644 --- a/services/nessie-grpc/server/pom.xml +++ b/services/nessie-grpc/server/pom.xml @@ -22,7 +22,7 @@ dremio-services-nessie-grpc com.dremio.services - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 4.0.0 @@ -40,6 +40,14 @@ dremio-services-nessie-grpc-client ${project.version} + + org.projectnessie.nessie + nessie-services + + + org.projectnessie.nessie + nessie-versioned-spi + @@ -47,7 +55,6 @@ org.jboss.jandex jandex-maven-plugin - 1.2.1 make-index diff --git a/services/nessie-grpc/server/src/main/java/com/dremio/services/nessie/grpc/server/ConfigService.java b/services/nessie-grpc/server/src/main/java/com/dremio/services/nessie/grpc/server/ConfigService.java index e2a5b4871c..fea854982c 100644 --- a/services/nessie-grpc/server/src/main/java/com/dremio/services/nessie/grpc/server/ConfigService.java +++ b/services/nessie-grpc/server/src/main/java/com/dremio/services/nessie/grpc/server/ConfigService.java @@ -20,8 +20,6 @@ import java.util.function.Supplier; -import org.projectnessie.api.ConfigApi; - import com.dremio.services.nessie.grpc.api.ConfigServiceGrpc; import com.dremio.services.nessie.grpc.api.Empty; import com.dremio.services.nessie.grpc.api.NessieConfiguration; @@ -33,9 +31,9 @@ */ public class ConfigService extends ConfigServiceGrpc.ConfigServiceImplBase { - private final Supplier bridge; + private final Supplier bridge; - public ConfigService(Supplier bridge) { + public ConfigService(Supplier bridge) { this.bridge = bridge; } diff --git a/services/nessie-grpc/server/src/main/java/com/dremio/services/nessie/grpc/server/ContentService.java b/services/nessie-grpc/server/src/main/java/com/dremio/services/nessie/grpc/server/ContentService.java index ec65e007de..1c322fff8b 100644 --- a/services/nessie-grpc/server/src/main/java/com/dremio/services/nessie/grpc/server/ContentService.java +++ b/services/nessie-grpc/server/src/main/java/com/dremio/services/nessie/grpc/server/ContentService.java @@ -23,9 +23,7 @@ import java.util.List; import java.util.function.Supplier; -import org.projectnessie.api.ContentApi; import org.projectnessie.model.ContentKey; -import org.projectnessie.model.GetMultipleContentsRequest; import com.dremio.services.nessie.grpc.api.Content; import com.dremio.services.nessie.grpc.api.ContentRequest; @@ -40,9 +38,9 @@ */ public class ContentService extends ContentServiceGrpc.ContentServiceImplBase { - private final Supplier bridge; + private final Supplier bridge; - public ContentService(Supplier bridge) { + public ContentService(Supplier bridge) { this.bridge = bridge; } @@ -53,8 +51,10 @@ public void getContent(ContentRequest request, StreamObserver observer) toProto( bridge.get().getContent( fromProto(request.getContentKey()), - request.getRef(), - getHashOnRefFromProtoRequest(request.getHashOnRef()))), + getRefFromProtoRequest(request.getRef()), + getHashOnRefFromProtoRequest(request.getHashOnRef()), + false // TODO: support withDocumentation + ).getContent()), observer); } @@ -67,9 +67,12 @@ public void getMultipleContents( request.getRequestedKeysList().forEach(k -> requestedKeys.add(fromProto(k))); return toProto( bridge.get().getMultipleContents( - request.getRef(), + getRefFromProtoRequest(request.getRef()), getHashOnRefFromProtoRequest(request.getHashOnRef()), - GetMultipleContentsRequest.of(requestedKeys))); + requestedKeys, + false // TODO: support withDocumentation + ) + ); }, observer); } @@ -77,4 +80,8 @@ public void getMultipleContents( private String getHashOnRefFromProtoRequest(String hashOnRef) { return "".equals(hashOnRef) ? null : hashOnRef; } + + private String getRefFromProtoRequest(String ref) { + return "".equals(ref) ? null : ref; + } } diff --git a/services/nessie-grpc/server/src/main/java/com/dremio/services/nessie/grpc/server/DiffService.java b/services/nessie-grpc/server/src/main/java/com/dremio/services/nessie/grpc/server/DiffService.java index a59a35b03d..bacd081ac5 100644 --- a/services/nessie-grpc/server/src/main/java/com/dremio/services/nessie/grpc/server/DiffService.java +++ b/services/nessie-grpc/server/src/main/java/com/dremio/services/nessie/grpc/server/DiffService.java @@ -16,12 +16,15 @@ package com.dremio.services.nessie.grpc.server; import static com.dremio.services.nessie.grpc.ProtoUtil.fromProto; +import static com.dremio.services.nessie.grpc.ProtoUtil.refToProto; import static com.dremio.services.nessie.grpc.ProtoUtil.toProto; import static com.dremio.services.nessie.grpc.client.GrpcExceptionMapper.handle; +import static org.projectnessie.services.impl.RefUtil.toReference; import java.util.function.Supplier; -import org.projectnessie.api.DiffApi; +import org.projectnessie.model.DiffResponse.DiffEntry; +import org.projectnessie.services.spi.PagedCountingResponseHandler; import com.dremio.services.nessie.grpc.api.DiffRequest; import com.dremio.services.nessie.grpc.api.DiffResponse; @@ -34,14 +37,51 @@ */ public class DiffService extends DiffServiceImplBase { - private final Supplier bridge; + private final Supplier bridge; - public DiffService(Supplier bridge) { + public DiffService(Supplier bridge) { this.bridge = bridge; } @Override public void getDiff(DiffRequest request, StreamObserver observer) { - handle(() -> toProto(bridge.get().getDiff(fromProto(request))), observer); + + handle(() -> { + DiffResponse.Builder response = DiffResponse.newBuilder(); + return bridge.get().getDiff( + request.getFromRefName(), + fromProto(request::hasFromHashOnRef, request::getFromHashOnRef), + request.getToRefName(), + fromProto(request::hasToHashOnRef, request::getToHashOnRef), + fromProto(request::hasPageToken, request::getPageToken), + new PagedCountingResponseHandler( + fromProto(request::hasMaxRecords, request::getMaxRecords)) { + + @Override + protected boolean doAddEntry(DiffEntry entry) { + response.addDiffs(toProto(entry)); + return true; + } + + @Override + public DiffResponse build() { + return response.build(); + } + + @Override + public void hasMore(String pagingToken) { + response.setHasMore(true).setPageToken(pagingToken); + } + }, + fromReference -> response.setEffectiveFromRef(refToProto(toReference(fromReference))), + toReference -> response.setEffectiveToRef(refToProto(toReference(toReference))), + fromProto(request::hasMinKey, () -> fromProto(request.getMinKey())), + fromProto(request::hasMaxKey, () -> fromProto(request.getMaxKey())), + fromProto(request::hasPrefixKey, () -> fromProto(request.getPrefixKey())), + fromProto(request.getKeysList()), + fromProto(request::hasFilter, request::getFilter) + ); + }, + observer); } } diff --git a/services/nessie-grpc/server/src/main/java/com/dremio/services/nessie/grpc/server/NamespaceService.java b/services/nessie-grpc/server/src/main/java/com/dremio/services/nessie/grpc/server/NamespaceService.java index 44cf289afa..741c33d01e 100644 --- a/services/nessie-grpc/server/src/main/java/com/dremio/services/nessie/grpc/server/NamespaceService.java +++ b/services/nessie-grpc/server/src/main/java/com/dremio/services/nessie/grpc/server/NamespaceService.java @@ -21,9 +21,6 @@ import java.util.function.Supplier; -import org.projectnessie.api.NamespaceApi; -import org.projectnessie.api.params.ImmutableNamespaceUpdate; - import com.dremio.services.nessie.grpc.api.Empty; import com.dremio.services.nessie.grpc.api.MultipleNamespacesRequest; import com.dremio.services.nessie.grpc.api.MultipleNamespacesResponse; @@ -31,6 +28,7 @@ import com.dremio.services.nessie.grpc.api.NamespaceRequest; import com.dremio.services.nessie.grpc.api.NamespaceServiceGrpc.NamespaceServiceImplBase; import com.dremio.services.nessie.grpc.api.NamespaceUpdateRequest; +import com.google.common.collect.ImmutableSet; import io.grpc.stub.StreamObserver; @@ -39,44 +37,51 @@ */ public class NamespaceService extends NamespaceServiceImplBase { - private final Supplier bridge; + private final Supplier bridge; - public NamespaceService(Supplier bridge) { + public NamespaceService(Supplier bridge) { this.bridge = bridge; } @Override public void createNamespace(NamespaceRequest request, StreamObserver observer) { - handle(() -> toProto(bridge.get().createNamespace(fromProto(request), fromProto(request.getNamespace()))), observer); + handle(() -> toProto(bridge.get().createNamespace(request.getNamedRef(), fromProto(request.getNamespace()))), + observer); } @Override public void deleteNamespace(NamespaceRequest request, StreamObserver observer) { handle(() -> { - bridge.get().deleteNamespace(fromProto(request)); + bridge.get().deleteNamespace(request.getNamedRef(), fromProto(request.getNamespace())); return Empty.getDefaultInstance(); }, observer); } @Override public void getNamespace(NamespaceRequest request, StreamObserver observer) { - handle(() -> toProto(bridge.get().getNamespace(fromProto(request))), observer); + handle(() -> toProto(bridge.get().getNamespace( + request.getNamedRef(), + fromProto(request::hasHashOnRef, request::getHashOnRef), + fromProto(request.getNamespace()))), observer); } @Override public void getNamespaces(MultipleNamespacesRequest request, StreamObserver observer) { - handle(() -> toProto(bridge.get().getNamespaces(fromProto(request))), observer); + handle(() -> toProto(bridge.get().getNamespaces( + request.getNamedRef(), + fromProto(request::hasHashOnRef, request::getHashOnRef), + fromProto(request.getNamespace()))), observer); } @Override public void updateProperties(NamespaceUpdateRequest request, StreamObserver observer) { handle(() -> { - bridge.get().updateProperties(fromProto(request.getNamespaceRequest()), - ImmutableNamespaceUpdate.builder() - .propertyUpdates(request.getPropertyUpdatesMap()) - .propertyRemovals(request.getPropertyRemovalsList()) - .build()); + bridge.get().updateProperties( + request.getNamespaceRequest().getNamedRef(), + fromProto(request.getNamespaceRequest().getNamespace()), + request.getPropertyUpdatesMap(), + ImmutableSet.builder().addAll(request.getPropertyRemovalsList()).build()); return Empty.getDefaultInstance(); }, observer); } diff --git a/services/nessie-grpc/server/src/main/java/com/dremio/services/nessie/grpc/server/RefLogService.java b/services/nessie-grpc/server/src/main/java/com/dremio/services/nessie/grpc/server/RefLogService.java index ebd12aa646..e6531f12f4 100644 --- a/services/nessie-grpc/server/src/main/java/com/dremio/services/nessie/grpc/server/RefLogService.java +++ b/services/nessie-grpc/server/src/main/java/com/dremio/services/nessie/grpc/server/RefLogService.java @@ -21,8 +21,6 @@ import java.util.function.Supplier; -import org.projectnessie.api.RefLogApi; - import com.dremio.services.nessie.grpc.api.RefLogParams; import com.dremio.services.nessie.grpc.api.RefLogResponse; import com.dremio.services.nessie.grpc.api.RefLogServiceGrpc.RefLogServiceImplBase; @@ -34,14 +32,20 @@ */ public class RefLogService extends RefLogServiceImplBase { - private final Supplier bridge; + private final Supplier bridge; - public RefLogService(Supplier bridge) { + public RefLogService(Supplier bridge) { this.bridge = bridge; } @Override public void getRefLog(RefLogParams params, StreamObserver observer) { - handle(() -> toProto(bridge.get().getRefLog(fromProto(params))), observer); + handle(() -> toProto(bridge.get().getRefLog( + fromProto(params::hasStartHash, params::getStartHash), + fromProto(params::hasEndHash, params::getEndHash), + fromProto(params::hasFilter, params::getFilter), + fromProto(params::hasMaxRecords, params::getMaxRecords), + fromProto(params::hasPageToken, params::getPageToken)) + ), observer); } } diff --git a/services/nessie-grpc/server/src/main/java/com/dremio/services/nessie/grpc/server/TreeService.java b/services/nessie-grpc/server/src/main/java/com/dremio/services/nessie/grpc/server/TreeService.java index 9a25c7c19e..41fd28fd6d 100644 --- a/services/nessie-grpc/server/src/main/java/com/dremio/services/nessie/grpc/server/TreeService.java +++ b/services/nessie-grpc/server/src/main/java/com/dremio/services/nessie/grpc/server/TreeService.java @@ -16,24 +16,34 @@ package com.dremio.services.nessie.grpc.server; import static com.dremio.services.nessie.grpc.ProtoUtil.fromProto; +import static com.dremio.services.nessie.grpc.ProtoUtil.fromProtoMessage; import static com.dremio.services.nessie.grpc.ProtoUtil.refFromProto; import static com.dremio.services.nessie.grpc.ProtoUtil.refToProto; import static com.dremio.services.nessie.grpc.ProtoUtil.toProto; import static com.dremio.services.nessie.grpc.client.GrpcExceptionMapper.handle; +import static org.projectnessie.services.impl.RefUtil.toReference; +import static org.projectnessie.services.spi.TreeService.MAX_COMMIT_LOG_ENTRIES; import java.util.function.Supplier; -import java.util.stream.Collectors; -import org.projectnessie.api.TreeApi; +import org.projectnessie.api.v1.params.CommitLogParams; +import org.projectnessie.api.v1.params.EntriesParams; +import org.projectnessie.api.v1.params.GetReferenceParams; +import org.projectnessie.api.v1.params.Merge; +import org.projectnessie.api.v1.params.ReferencesParams; +import org.projectnessie.api.v1.params.Transplant; import org.projectnessie.model.Branch; +import org.projectnessie.model.CommitMeta; import org.projectnessie.model.Detached; +import org.projectnessie.model.LogResponse; import org.projectnessie.model.Tag; +import org.projectnessie.services.spi.PagedCountingResponseHandler; -import com.dremio.services.nessie.grpc.ProtoUtil; import com.dremio.services.nessie.grpc.api.AssignReferenceRequest; import com.dremio.services.nessie.grpc.api.CommitLogRequest; import com.dremio.services.nessie.grpc.api.CommitLogResponse; import com.dremio.services.nessie.grpc.api.CommitRequest; +import com.dremio.services.nessie.grpc.api.CommitResponse; import com.dremio.services.nessie.grpc.api.CreateReferenceRequest; import com.dremio.services.nessie.grpc.api.DeleteReferenceRequest; import com.dremio.services.nessie.grpc.api.Empty; @@ -45,8 +55,10 @@ import com.dremio.services.nessie.grpc.api.MergeRequest; import com.dremio.services.nessie.grpc.api.MergeResponse; import com.dremio.services.nessie.grpc.api.Reference; +import com.dremio.services.nessie.grpc.api.ReferenceResponse; import com.dremio.services.nessie.grpc.api.TransplantRequest; import com.dremio.services.nessie.grpc.api.TreeServiceGrpc; +import com.google.common.base.Strings; import io.grpc.stub.StreamObserver; @@ -55,9 +67,9 @@ */ public class TreeService extends TreeServiceGrpc.TreeServiceImplBase { - private final Supplier bridge; + private final Supplier bridge; - public TreeService(Supplier bridge) { + public TreeService(Supplier bridge) { this.bridge = bridge; } @@ -66,29 +78,60 @@ public void getAllReferences(GetAllReferencesRequest request, StreamObserver observer) { handle( () -> - GetAllReferencesResponse.newBuilder() - .addAllReference( - bridge.get().getAllReferences(fromProto(request)).getReferences().stream() - .map(ProtoUtil::refToProto) - .collect(Collectors.toList())) - .build(), + { + ReferencesParams params = fromProto(request); + return bridge.get().getAllReferences( + params.fetchOption(), + params.filter(), + params.pageToken(), + new PagedCountingResponseHandler( + params.maxRecords()) { + + private final GetAllReferencesResponse.Builder response = GetAllReferencesResponse.newBuilder(); + + @Override + protected boolean doAddEntry(org.projectnessie.model.Reference entry) { + response.addReference(refToProto(entry)); + return true; + } + + @Override + public GetAllReferencesResponse build() { + return response.build(); + } + + @Override + public void hasMore(String pagingToken) { + response.setHasMore(true).setPageToken(pagingToken); + } + } + ); + }, observer); } @Override public void getReferenceByName( GetReferenceByNameRequest request, StreamObserver observer) { - handle(() -> refToProto(bridge.get().getReferenceByName(fromProto(request))), observer); + handle(() -> { + GetReferenceParams params = fromProto(request); + return refToProto(bridge.get().getReferenceByName(params.getRefName(), params.fetchOption())); + }, observer); } @Override public void createReference(CreateReferenceRequest request, StreamObserver observer) { handle( () -> - refToProto( + { + org.projectnessie.model.Reference ref = refFromProto(request.getReference()); + return refToProto( bridge.get().createReference( - "".equals(request.getSourceRefName()) ? null : request.getSourceRefName(), - refFromProto(request.getReference()))), + ref.getName(), + ref.getType(), + ref.getHash(), + "".equals(request.getSourceRefName()) ? null : request.getSourceRefName())); + }, observer); } @@ -98,7 +141,7 @@ public void getDefaultBranch(Empty request, StreamObserver observer) } @Override - public void assignReference(AssignReferenceRequest request, StreamObserver observer) { + public void assignReference(AssignReferenceRequest request, StreamObserver observer) { handle( () -> { org.projectnessie.model.Reference ref; @@ -111,18 +154,29 @@ public void assignReference(AssignReferenceRequest request, StreamObserver observer) { + public void deleteReference(DeleteReferenceRequest request, StreamObserver observer) { handle( () -> { - bridge.get().deleteReference(fromProto(request.getReferenceType()), request.getNamedRef(), request.getHash()); - return Empty.getDefaultInstance(); + String refName = request.getNamedRef(); + String refHash = request.getHash(); + bridge.get().deleteReference(fromProto(request.getReferenceType()), refName, refHash); + // The backend service allows deleting a reference only when the expected hash is equal + // to the current HEAD of the reference. Therefore, we can construct the response object + // using input parameters in the successful case. + return ReferenceResponse.newBuilder() + .setReference(refToProto(request.getReferenceType(), refName, refHash)) + .build(); }, observer); } @@ -130,49 +184,137 @@ public void deleteReference(DeleteReferenceRequest request, StreamObserver observer) { handle( - () -> toProto(bridge.get().getCommitLog(request.getNamedRef(), fromProto(request))), + () -> { + CommitLogParams params = fromProto(request); + return bridge.get().getCommitLog( + request.getNamedRef(), + params.fetchOption(), + params.startHash(), + params.endHash(), + params.filter(), + params.pageToken(), + new PagedCountingResponseHandler( + params.maxRecords(), MAX_COMMIT_LOG_ENTRIES) { + + private final CommitLogResponse.Builder response = CommitLogResponse.newBuilder(); + + @Override + protected boolean doAddEntry(LogResponse.LogEntry entry) { + response.addLogEntries(toProto(entry)); + return true; + } + + @Override + public CommitLogResponse build() { + return response.build(); + } + + @Override + public void hasMore(String pagingToken) { + response.setHasMore(true).setToken(pagingToken); + } + } + ); + }, observer); } @Override public void getEntries(EntriesRequest request, StreamObserver observer) { handle( - () -> toProto(bridge.get().getEntries(request.getNamedRef(), fromProto(request))), + () -> { + EntriesParams params = fromProto(request); + EntriesResponse.Builder response = EntriesResponse.newBuilder(); + return bridge.get().getEntries( + request.getNamedRef(), + params.hashOnRef(), + params.namespaceDepth(), + params.filter(), + params.pageToken(), + request.getWithContent(), + new PagedCountingResponseHandler( + params.maxRecords()) { + + @Override + protected boolean doAddEntry(org.projectnessie.model.EntriesResponse.Entry entry) { + response.addEntries(toProto(entry)); + return true; + } + + @Override + public EntriesResponse build() { + return response.build(); + } + + @Override + public void hasMore(String pagingToken) { + response.setHasMore(true).setToken(pagingToken); + } + }, + effectiveRef -> response.setEffectiveReference(refToProto(toReference(effectiveRef))), + fromProto(request::hasMinKey, () -> fromProto(request.getMinKey())), + fromProto(request::hasMaxKey, () -> fromProto(request.getMaxKey())), + fromProto(request::hasPrefixKey, () -> fromProto(request.getPrefixKey())), + fromProto(request.getKeysList()) + ); + }, observer); } @Override public void transplantCommitsIntoBranch(TransplantRequest request, StreamObserver observer) { handle( - () -> toProto( - bridge.get().transplantCommitsIntoBranch( - request.getBranchName(), - request.getHash(), - request.getMessage(), - fromProto(request))), + () -> { + String msg = fromProtoMessage(request); + CommitMeta meta = CommitMeta.fromMessage(msg == null ? "" : msg); + Transplant transplant = fromProto(request); + return toProto( + bridge.get().transplantCommitsIntoBranch( + request.getBranchName(), + request.getHash(), + meta, + transplant.getHashesToTransplant(), + transplant.getFromRefName(), + transplant.keepIndividualCommits(), + transplant.getKeyMergeModes(), + transplant.getDefaultKeyMergeMode(), + transplant.isDryRun(), + transplant.isFetchAdditionalInfo(), + transplant.isReturnConflictAsResult())); + }, observer); } @Override public void mergeRefIntoBranch(MergeRequest request, StreamObserver observer) { handle( - () -> toProto( - bridge.get().mergeRefIntoBranch( - request.getToBranch(), - request.getExpectedHash(), - fromProto(request))), + () -> { + Merge merge = fromProto(request); + return toProto( + bridge.get().mergeRefIntoBranch( + request.getToBranch(), + request.getExpectedHash(), + merge.getFromRefName(), + merge.getFromHash(), + merge.keepIndividualCommits(), + fromProto(request::getMessage, request::hasCommitMeta, request::getCommitMeta), + merge.getKeyMergeModes(), + merge.getDefaultKeyMergeMode(), + merge.isDryRun(), + merge.isFetchAdditionalInfo(), + merge.isReturnConflictAsResult())); + }, observer); } @Override - public void commitMultipleOperations( - CommitRequest request, StreamObserver observer) { + public void commitMultipleOperations(CommitRequest request, StreamObserver observer) { handle( () -> toProto( bridge.get().commitMultipleOperations( request.getBranch(), - request.getHash(), + Strings.emptyToNull(request.getHash()), fromProto(request.getCommitOperations()))), observer); } diff --git a/services/nessie-proxy/pom.xml b/services/nessie-proxy/pom.xml new file mode 100644 index 0000000000..53ed2c394f --- /dev/null +++ b/services/nessie-proxy/pom.xml @@ -0,0 +1,166 @@ + + + 4.0.0 + + + com.dremio.services + dremio-services-parent + 24.1.0-202306130653310132-d30779f6 + + + com.dremio.services + dremio-services-nessie-proxy + jar + Services - Nessie - Proxy + + + + com.dremio + dremio-common + + + com.dremio + dremio-common + tests + test-jar + test + + + com.dremio.services + dremio-services-options + + + org.projectnessie.nessie + nessie-model + + + org.projectnessie.nessie + nessie-rest-services + + + org.projectnessie.nessie + nessie-services + + + org.projectnessie.nessie + nessie-client + + + org.mockito + mockito-junit-jupiter + test + + + javax.enterprise + cdi-api + 2.0 + compile + + + org.hamcrest + hamcrest + 2.1 + test + + + org.projectnessie.nessie + nessie-jaxrs-tests + test + + + org.projectnessie.nessie + nessie-jaxrs-testextension + test + + + javax.ws.rs + javax.ws.rs-api + ${javax.ws.rs-api.version} + + + + + + jdk11-or-higher + + [11,) + + + + + + maven-failsafe-plugin + + + default-integration-test + + integration-test + verify + + + + + ${nessie.server.url} + + + + + + + + org.projectnessie + nessie-apprunner-maven-plugin + 0.29.0 + + ${skipTests} + + org.projectnessie.nessie:nessie-quarkus:jar:runner:${nessie.version} + + + + IN_MEMORY + + + + nessie.server.http.port + + nessie.server.url + + + + + start + pre-integration-test + start + + + + stop + post-integration-test + stop + + + + + + + + diff --git a/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyConfigResource.java b/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyConfigResource.java new file mode 100644 index 0000000000..c4aadadfad --- /dev/null +++ b/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyConfigResource.java @@ -0,0 +1,43 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.services.nessie.proxy; + +import javax.enterprise.context.RequestScoped; +import javax.inject.Inject; + +import org.projectnessie.api.v1.http.HttpConfigApi; +import org.projectnessie.client.api.NessieApiV1; +import org.projectnessie.model.NessieConfiguration; +import org.projectnessie.model.ser.Views; + +import com.fasterxml.jackson.annotation.JsonView; + +/** Nessie config-API REST endpoint that forwards via gRPC. */ +@RequestScoped +public class ProxyConfigResource implements HttpConfigApi { + + @SuppressWarnings("checkstyle:visibilityModifier") + @Inject NessieApiV1 api; + + public ProxyConfigResource() { + } + + @Override + @JsonView(Views.V1.class) + public NessieConfiguration getConfig() { + return api.getConfig(); + } +} diff --git a/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyContentResource.java b/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyContentResource.java new file mode 100644 index 0000000000..5db1e7c7f2 --- /dev/null +++ b/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyContentResource.java @@ -0,0 +1,68 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.services.nessie.proxy; + +import java.util.Map; + +import javax.enterprise.context.RequestScoped; +import javax.inject.Inject; + +import org.projectnessie.api.v1.http.HttpContentApi; +import org.projectnessie.client.api.NessieApiV1; +import org.projectnessie.error.NessieContentNotFoundException; +import org.projectnessie.error.NessieNotFoundException; +import org.projectnessie.model.Content; +import org.projectnessie.model.ContentKey; +import org.projectnessie.model.GetMultipleContentsRequest; +import org.projectnessie.model.GetMultipleContentsResponse; +import org.projectnessie.model.GetMultipleContentsResponse.ContentWithKey; +import org.projectnessie.model.ImmutableGetMultipleContentsResponse; +import org.projectnessie.model.ser.Views; + +import com.fasterxml.jackson.annotation.JsonView; + +/** Nessie content-API REST endpoint that forwards via gRPC. */ +@RequestScoped +public class ProxyContentResource implements HttpContentApi { + + @SuppressWarnings("checkstyle:visibilityModifier") + @Inject NessieApiV1 api; + + public ProxyContentResource() { + } + + @Override + @JsonView(Views.V1.class) + public Content getContent(ContentKey key, + String ref, String hashOnRef) throws NessieNotFoundException { + Content content = api.getContent().refName(ref).hashOnRef(hashOnRef).key(key).get().get(key); + if (content == null) { + throw new NessieContentNotFoundException(key, ref); + } + return content; + } + + @Override + @JsonView(Views.V1.class) + public GetMultipleContentsResponse getMultipleContents(String ref, + String hashOnRef, GetMultipleContentsRequest request) throws NessieNotFoundException { + Map contents = api.getContent().refName(ref).hashOnRef(hashOnRef) + .keys(request.getRequestedKeys()).get(); + ImmutableGetMultipleContentsResponse.Builder resp = ImmutableGetMultipleContentsResponse.builder(); + contents.forEach((k, v) -> resp.addContents(ContentWithKey.of(k, v))); + return resp.build(); + } +} diff --git a/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyDiffResource.java b/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyDiffResource.java new file mode 100644 index 0000000000..4de5353685 --- /dev/null +++ b/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyDiffResource.java @@ -0,0 +1,56 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.services.nessie.proxy; + +import javax.enterprise.context.RequestScoped; +import javax.inject.Inject; + +import org.projectnessie.api.v1.http.HttpDiffApi; +import org.projectnessie.api.v1.params.DiffParams; +import org.projectnessie.client.api.GetDiffBuilder; +import org.projectnessie.client.api.NessieApiV1; +import org.projectnessie.error.NessieNotFoundException; +import org.projectnessie.model.DiffResponse; +import org.projectnessie.model.ser.Views; + +import com.fasterxml.jackson.annotation.JsonView; + +/** + * Nessie diff-API REST endpoint that forwards via gRPC. + */ +@RequestScoped +public class ProxyDiffResource implements HttpDiffApi { + + @SuppressWarnings("checkstyle:visibilityModifier") + @Inject + NessieApiV1 api; + + public ProxyDiffResource() { + } + + @Override + @JsonView(Views.V1.class) + public DiffResponse getDiff(DiffParams params) throws NessieNotFoundException { + GetDiffBuilder diff = api.getDiff().fromRefName(params.getFromRef()).toRefName(params.getToRef()); + if (params.getFromHashOnRef() != null) { + diff.fromHashOnRef(params.getFromHashOnRef()); + } + if (params.getToHashOnRef() != null) { + diff.toHashOnRef(params.getToHashOnRef()); + } + return diff.get(); + } +} diff --git a/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyExceptionMapper.java b/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyExceptionMapper.java new file mode 100644 index 0000000000..21c44c16ba --- /dev/null +++ b/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyExceptionMapper.java @@ -0,0 +1,44 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.services.nessie.proxy; + +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import javax.ws.rs.core.Response.ResponseBuilder; +import javax.ws.rs.ext.ExceptionMapper; +import javax.ws.rs.ext.Provider; + +import org.projectnessie.client.rest.NessieServiceException; +import org.projectnessie.error.NessieError; + +/** + * Maps the client-side {@link NessieServiceException} thrown by the {@code DremioNessieApiV1} back to + * HTTP responses. + */ +@Provider +public class ProxyExceptionMapper implements ExceptionMapper { + + // Do not move this class without adjusting c.d.d.a.atlantisservice.server.nessie.processing.NessieServiceResourceConfig ! + + @Override + public Response toResponse(NessieServiceException e) { + NessieError error = e.getError(); + ResponseBuilder responseBuilder = + Response.status(error.getStatus(), error.getReason()).entity(error) + .type(MediaType.APPLICATION_JSON_TYPE); + return responseBuilder.build(); + } +} diff --git a/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyNamespaceResource.java b/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyNamespaceResource.java new file mode 100644 index 0000000000..4a23646d94 --- /dev/null +++ b/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyNamespaceResource.java @@ -0,0 +1,126 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.services.nessie.proxy; + +import javax.enterprise.context.RequestScoped; +import javax.inject.Inject; + +import org.projectnessie.api.v1.http.HttpNamespaceApi; +import org.projectnessie.api.v1.params.MultipleNamespacesParams; +import org.projectnessie.api.v1.params.NamespaceParams; +import org.projectnessie.api.v1.params.NamespaceUpdate; +import org.projectnessie.client.api.CreateNamespaceBuilder; +import org.projectnessie.client.api.DeleteNamespaceBuilder; +import org.projectnessie.client.api.GetMultipleNamespacesBuilder; +import org.projectnessie.client.api.GetNamespaceBuilder; +import org.projectnessie.client.api.NessieApiV1; +import org.projectnessie.client.api.UpdateNamespaceBuilder; +import org.projectnessie.error.NessieNamespaceAlreadyExistsException; +import org.projectnessie.error.NessieNamespaceNotEmptyException; +import org.projectnessie.error.NessieNamespaceNotFoundException; +import org.projectnessie.error.NessieReferenceNotFoundException; +import org.projectnessie.model.GetNamespacesResponse; +import org.projectnessie.model.Namespace; +import org.projectnessie.model.ser.Views; + +import com.fasterxml.jackson.annotation.JsonView; + +/** + * Nessie Namespace-API REST endpoint that forwards via gRPC. + */ +@RequestScoped +public class ProxyNamespaceResource implements HttpNamespaceApi { + + @SuppressWarnings("checkstyle:visibilityModifier") + @Inject + NessieApiV1 api; + + public ProxyNamespaceResource() { + } + + @Override + @JsonView(Views.V1.class) + public Namespace createNamespace(NamespaceParams params, Namespace namespace) + throws NessieNamespaceAlreadyExistsException, NessieReferenceNotFoundException { + CreateNamespaceBuilder builder = api.createNamespace().refName(params.getRefName()) + .namespace(params.getNamespace()) + .properties(namespace.getProperties()); + if (null != params.getHashOnRef()) { + builder.hashOnRef(params.getHashOnRef()); + } + return builder.create(); + } + + @Override + @JsonView(Views.V1.class) + public void deleteNamespace(NamespaceParams params) + throws NessieReferenceNotFoundException, NessieNamespaceNotEmptyException, NessieNamespaceNotFoundException { + DeleteNamespaceBuilder builder = api.deleteNamespace().refName(params.getRefName()) + .namespace(params.getNamespace()); + if (null != params.getHashOnRef()) { + builder.hashOnRef(params.getHashOnRef()); + } + builder.delete(); + } + + @Override + @JsonView(Views.V1.class) + public Namespace getNamespace(NamespaceParams params) + throws NessieNamespaceNotFoundException, NessieReferenceNotFoundException { + GetNamespaceBuilder builder = api.getNamespace().refName(params.getRefName()) + .namespace(params.getNamespace()); + if (null != params.getHashOnRef()) { + builder.hashOnRef(params.getHashOnRef()); + } + return builder.get(); + } + + @Override + @JsonView(Views.V1.class) + public GetNamespacesResponse getNamespaces(MultipleNamespacesParams params) + throws NessieReferenceNotFoundException { + GetMultipleNamespacesBuilder builder = api.getMultipleNamespaces() + .refName(params.getRefName()); + if (null != params.getNamespace()) { + builder.namespace(params.getNamespace()); + } + if (null != params.getHashOnRef()) { + builder.hashOnRef(params.getHashOnRef()); + } + return builder.get(); + } + + @Override + @JsonView(Views.V1.class) + public void updateProperties(NamespaceParams params, NamespaceUpdate namespaceUpdate) + throws NessieNamespaceNotFoundException, NessieReferenceNotFoundException { + UpdateNamespaceBuilder builder = api.updateProperties().refName(params.getRefName()) + .namespace(params.getNamespace()); + + if (namespaceUpdate.getPropertyRemovals() != null) { + builder.removeProperties(namespaceUpdate.getPropertyRemovals()); + } + + if (namespaceUpdate.getPropertyUpdates() != null) { + builder.updateProperties(namespaceUpdate.getPropertyUpdates()); + } + + if (null != params.getHashOnRef()) { + builder.hashOnRef(params.getHashOnRef()); + } + builder.update(); + } +} diff --git a/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyNessieConfig.java b/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyNessieConfig.java new file mode 100644 index 0000000000..5d1dc950a0 --- /dev/null +++ b/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyNessieConfig.java @@ -0,0 +1,34 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.services.nessie.proxy; + +import org.projectnessie.services.config.ServerConfig; + +public class ProxyNessieConfig implements ServerConfig { + + private static final String NESSIE_DEFAULT_BRANCH = "main"; + + @Override + public String getDefaultBranch() { + return NESSIE_DEFAULT_BRANCH; + } + + @Override + public boolean sendStacktraceToClient() { + return false; + } + +} diff --git a/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyRefLogResource.java b/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyRefLogResource.java new file mode 100644 index 0000000000..d86e9e47aa --- /dev/null +++ b/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyRefLogResource.java @@ -0,0 +1,61 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.services.nessie.proxy; + +import static com.dremio.services.nessie.proxy.ProxyUtil.paging; + +import javax.enterprise.context.RequestScoped; +import javax.inject.Inject; + +import org.projectnessie.api.v1.http.HttpRefLogApi; +import org.projectnessie.api.v1.params.RefLogParams; +import org.projectnessie.client.api.GetRefLogBuilder; +import org.projectnessie.client.api.NessieApiV1; +import org.projectnessie.error.NessieNotFoundException; +import org.projectnessie.model.RefLogResponse; +import org.projectnessie.model.ser.Views; + +import com.fasterxml.jackson.annotation.JsonView; + +/** + * Nessie RefLog-API REST endpoint that forwards via gRPC. + */ +@RequestScoped +public class ProxyRefLogResource implements HttpRefLogApi { + + @SuppressWarnings("checkstyle:visibilityModifier") + @Inject + NessieApiV1 api; + + public ProxyRefLogResource() { + } + + @Override + @JsonView(Views.V1.class) + public RefLogResponse getRefLog(RefLogParams params) throws NessieNotFoundException { + GetRefLogBuilder req = api.getRefLog(); + if (params.endHash() != null) { + req.fromHash(params.endHash()); + } + if (params.startHash() != null) { + req.untilHash(params.startHash()); + } + if (params.filter() != null) { + req.filter(params.filter()); + } + return paging(req, params.pageToken(), params.maxRecords()).get(); + } +} diff --git a/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyRuntimeExceptionMapper.java b/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyRuntimeExceptionMapper.java new file mode 100644 index 0000000000..7112325387 --- /dev/null +++ b/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyRuntimeExceptionMapper.java @@ -0,0 +1,44 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.services.nessie.proxy; + +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import javax.ws.rs.core.Response.ResponseBuilder; +import javax.ws.rs.ext.ExceptionMapper; +import javax.ws.rs.ext.Provider; + +import org.projectnessie.error.NessieError; +import org.projectnessie.error.NessieRuntimeException; + +/** + * Maps the client-side {@link NessieRuntimeException} thrown by the {@code DremioNessieApiV1} back to + * HTTP responses. + */ +@Provider +public class ProxyRuntimeExceptionMapper implements ExceptionMapper { + + // Do not move this class without adjusting c.d.d.a.atlantisservice.server.nessie.processing.NessieServiceResourceConfig ! + + @Override + public Response toResponse(NessieRuntimeException e) { + NessieError error = e.getError(); + ResponseBuilder responseBuilder = + Response.status(error.getStatus(), error.getReason()).entity(error) + .type(MediaType.APPLICATION_JSON_TYPE); + return responseBuilder.build(); + } +} diff --git a/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyTreeResource.java b/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyTreeResource.java new file mode 100644 index 0000000000..3656a37531 --- /dev/null +++ b/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyTreeResource.java @@ -0,0 +1,248 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.services.nessie.proxy; + +import static com.dremio.services.nessie.proxy.ProxyUtil.paging; + +import javax.enterprise.context.RequestScoped; +import javax.inject.Inject; + +import org.projectnessie.api.v1.http.HttpTreeApi; +import org.projectnessie.api.v1.params.CommitLogParams; +import org.projectnessie.api.v1.params.EntriesParams; +import org.projectnessie.api.v1.params.GetReferenceParams; +import org.projectnessie.api.v1.params.Merge; +import org.projectnessie.api.v1.params.ReferencesParams; +import org.projectnessie.api.v1.params.Transplant; +import org.projectnessie.client.api.GetAllReferencesBuilder; +import org.projectnessie.client.api.GetCommitLogBuilder; +import org.projectnessie.client.api.GetEntriesBuilder; +import org.projectnessie.client.api.GetReferenceBuilder; +import org.projectnessie.client.api.MergeReferenceBuilder; +import org.projectnessie.client.api.NessieApiV1; +import org.projectnessie.client.api.OnBranchBuilder; +import org.projectnessie.client.api.OnReferenceBuilder; +import org.projectnessie.client.api.OnTagBuilder; +import org.projectnessie.client.api.TransplantCommitsBuilder; +import org.projectnessie.error.NessieConflictException; +import org.projectnessie.error.NessieNotFoundException; +import org.projectnessie.model.Branch; +import org.projectnessie.model.EntriesResponse; +import org.projectnessie.model.LogResponse; +import org.projectnessie.model.MergeResponse; +import org.projectnessie.model.Operations; +import org.projectnessie.model.Reference; +import org.projectnessie.model.ReferencesResponse; +import org.projectnessie.model.ser.Views; + +import com.fasterxml.jackson.annotation.JsonView; + +/** Nessie tree-API REST endpoint that forwards via gRPC. */ +@RequestScoped +public class ProxyTreeResource implements HttpTreeApi { + + @SuppressWarnings("checkstyle:visibilityModifier") + @Inject + NessieApiV1 api; + + public ProxyTreeResource() { + } + + public ProxyTreeResource(NessieApiV1 api) { + this.api = api; + } + + @JsonView(Views.V1.class) + @Override + public ReferencesResponse getAllReferences(ReferencesParams params) { + GetAllReferencesBuilder allReferences = api.getAllReferences(); + if (null != params.pageToken()) { + allReferences.pageToken(params.pageToken()); + } + if (null != params.maxRecords()) { + allReferences.maxRecords(params.maxRecords()); + } + if (null != params.filter()) { + allReferences.filter(params.filter()); + } + if (null != params.fetchOption()) { + allReferences.fetch(params.fetchOption()); + } + return allReferences.get(); + } + + @JsonView(Views.V1.class) + @Override + public Branch getDefaultBranch() throws NessieNotFoundException { + return api.getDefaultBranch(); + } + + @JsonView(Views.V1.class) + @Override + public Reference createReference(String sourceRefName, Reference reference) + throws NessieNotFoundException, NessieConflictException { + return api.createReference().sourceRefName(sourceRefName).reference(reference).create(); + } + + @JsonView(Views.V1.class) + @Override + public Reference getReferenceByName(GetReferenceParams params) throws NessieNotFoundException { + GetReferenceBuilder builder = api.getReference().refName(params.getRefName()); + if (null != params.fetchOption()) { + builder.fetch(params.fetchOption()); + } + return builder.get(); + } + + @JsonView(Views.V1.class) + @Override + public EntriesResponse getEntries(String refName, EntriesParams params) throws NessieNotFoundException { + GetEntriesBuilder req = onReference(api.getEntries(), refName, params.hashOnRef()); + paging(req, params.pageToken(), params.maxRecords()); + if (params.namespaceDepth() != null) { + req.namespaceDepth(params.namespaceDepth()); + } + if (params.filter() != null) { + req.filter(params.filter()); + } + return req.get(); + } + + @JsonView(Views.V1.class) + @Override + public LogResponse getCommitLog(String ref, CommitLogParams params) throws NessieNotFoundException { + GetCommitLogBuilder req = api.getCommitLog(); + if (ref != null) { + req.refName(ref); + } + if (params.endHash() != null) { + req.hashOnRef(params.endHash()); + } + if (params.startHash() != null) { + req.untilHash(params.startHash()); + } + if (params.filter() != null) { + req.filter(params.filter()); + } + if (null != params.fetchOption()) { + req.fetch(params.fetchOption()); + } + return paging(req, params.pageToken(), params.maxRecords()).get(); + } + + @JsonView(Views.V1.class) + @Override + public void assignReference(Reference.ReferenceType referenceType, String referenceName, + String oldHash, Reference assignTo) throws NessieNotFoundException, NessieConflictException { + switch (referenceType) { + case BRANCH: + onBranch(api.assignBranch(), referenceName, oldHash).assignTo(assignTo).assign(); + break; + case TAG: + onTag(api.assignTag(), referenceName, oldHash).assignTo(assignTo).assign(); + break; + default: + throw new IllegalArgumentException("Invalid reference type " + referenceType); + } + } + + @JsonView(Views.V1.class) + @Override + public void deleteReference(Reference.ReferenceType referenceType, String referenceName, + String hash) throws NessieConflictException, NessieNotFoundException { + switch (referenceType) { + case BRANCH: + onBranch(api.deleteBranch(), referenceName, hash).delete(); + break; + case TAG: + onTag(api.deleteTag(), referenceName, hash).delete(); + break; + default: + throw new IllegalArgumentException("Invalid reference type " + referenceType); + } + } + + @JsonView(Views.V1.class) + @Override + public MergeResponse transplantCommitsIntoBranch(String branchName, String hash, String message, + Transplant transplant) throws NessieNotFoundException, NessieConflictException { + TransplantCommitsBuilder req = onBranch(api.transplantCommitsIntoBranch(), branchName, hash); + ProxyUtil.applyBaseMergeTransplant(req, transplant); + if (message != null) { + req.message(message); + } + return req.fromRefName(transplant.getFromRefName()) + .hashesToTransplant(transplant.getHashesToTransplant()) + .transplant(); + } + + @JsonView(Views.V1.class) + @Override + public MergeResponse mergeRefIntoBranch(String branchName, String hash, Merge merge) + throws NessieNotFoundException, NessieConflictException { + MergeReferenceBuilder req = onBranch(api.mergeRefIntoBranch(), branchName, hash) + .fromRefName(merge.getFromRefName()); + ProxyUtil.applyBaseMergeTransplant(req, merge); + if (merge.getFromHash() != null) { + req.fromHash(merge.getFromHash()); + } + if (merge.keepIndividualCommits() != null) { + req.keepIndividualCommits(merge.keepIndividualCommits()); + } + return req.merge(); + } + + @JsonView(Views.V1.class) + @Override + public Branch commitMultipleOperations(String branchName, String hash, + Operations operations) throws NessieNotFoundException, NessieConflictException { + return onBranch(api.commitMultipleOperations(), branchName, hash) + .commitMeta(operations.getCommitMeta()) + .operations(operations.getOperations()) + .commit(); + } + + private static > B onReference(B builder, String refName, String hashOnRef) { + if (refName != null) { + builder.refName(refName); + } + if (hashOnRef != null) { + builder.hashOnRef(hashOnRef); + } + return builder; + } + + private static > B onTag(B builder, String tagName, String hash) { + if (tagName != null) { + builder.tagName(tagName); + } + if (hash != null) { + builder.hash(hash); + } + return builder; + } + + private static > B onBranch(B builder, String branchName, String hash) { + if (branchName != null) { + builder.branchName(branchName); + } + if (hash != null) { + builder.hash(hash); + } + return builder; + } + +} diff --git a/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyUtil.java b/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyUtil.java new file mode 100644 index 0000000000..a07a5103db --- /dev/null +++ b/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyUtil.java @@ -0,0 +1,79 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.services.nessie.proxy; + +import org.projectnessie.api.v1.params.BaseMergeTransplant; +import org.projectnessie.client.api.MergeTransplantBuilder; +import org.projectnessie.client.api.PagingBuilder; +import org.projectnessie.model.Branch; +import org.projectnessie.model.Detached; +import org.projectnessie.model.Reference; +import org.projectnessie.model.Tag; + +final class ProxyUtil { + private ProxyUtil() {} + + static , RESP, ENTRY> B paging(B builder, String pageToken, + Integer maxRecords) { + if (pageToken != null) { + builder.pageToken(pageToken); + } + if (maxRecords != null) { + builder.maxRecords(maxRecords); + } + return builder; + } + + static > MergeTransplantBuilder applyBaseMergeTransplant( + MergeTransplantBuilder builder, BaseMergeTransplant base) { + if (base.getKeyMergeModes() != null) { + base.getKeyMergeModes().forEach( + keyBehavior -> builder.mergeMode(keyBehavior.getKey(), keyBehavior.getMergeBehavior())); + } + if (base.getDefaultKeyMergeMode() != null) { + builder.defaultMergeMode(base.getDefaultKeyMergeMode()); + } + if (base.keepIndividualCommits() != null) { + builder.keepIndividualCommits(base.keepIndividualCommits()); + } + if (base.isDryRun() != null) { + builder.dryRun(base.isDryRun()); + } + if (base.isFetchAdditionalInfo() != null) { + builder.fetchAdditionalInfo(base.isFetchAdditionalInfo()); + } + if (base.isReturnConflictAsResult() != null) { + builder.returnConflictAsResult(base.isReturnConflictAsResult()); + } + return builder; + } + + public static Reference toReference(String name, Reference.ReferenceType type, String hash) { + if (name == null) { + return Detached.of(hash); + } + + switch (type) { + case BRANCH: + return Branch.of(name, hash); + case TAG: + return Tag.of(name, hash); + default: + throw new IllegalArgumentException("Unsupported reference type: " + type); + } + } + +} diff --git a/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyV2ConfigResource.java b/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyV2ConfigResource.java new file mode 100644 index 0000000000..f55e39a3a5 --- /dev/null +++ b/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyV2ConfigResource.java @@ -0,0 +1,41 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.services.nessie.proxy; + +import javax.inject.Inject; + +import org.projectnessie.api.v2.http.HttpConfigApi; +import org.projectnessie.client.api.NessieApiV2; +import org.projectnessie.model.ImmutableNessieConfiguration; +import org.projectnessie.model.NessieConfiguration; +import org.projectnessie.model.ser.Views; + +import com.fasterxml.jackson.annotation.JsonView; + +public class ProxyV2ConfigResource implements HttpConfigApi { + private final NessieApiV2 api; + + @Inject + public ProxyV2ConfigResource(NessieApiV2 api) { + this.api = api; + } + + @Override + @JsonView(Views.V2.class) + public NessieConfiguration getConfig() { + return ImmutableNessieConfiguration.builder().from(api.getConfig()).actualApiVersion(2).build(); + } +} diff --git a/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyV2TreeResource.java b/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyV2TreeResource.java new file mode 100644 index 0000000000..66233f4296 --- /dev/null +++ b/services/nessie-proxy/src/main/java/com/dremio/services/nessie/proxy/ProxyV2TreeResource.java @@ -0,0 +1,345 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.services.nessie.proxy; + +import static com.dremio.services.nessie.proxy.ProxyUtil.toReference; +import static org.projectnessie.api.v2.params.ReferenceResolver.resolveReferencePathElement; +import static org.projectnessie.api.v2.params.ReferenceResolver.resolveReferencePathElementWithDefaultBranch; + +import java.util.List; + +import javax.inject.Inject; + +import org.projectnessie.api.v2.http.HttpTreeApi; +import org.projectnessie.api.v2.params.BaseMergeTransplant; +import org.projectnessie.api.v2.params.CommitLogParams; +import org.projectnessie.api.v2.params.DiffParams; +import org.projectnessie.api.v2.params.EntriesParams; +import org.projectnessie.api.v2.params.GetReferenceParams; +import org.projectnessie.api.v2.params.Merge; +import org.projectnessie.api.v2.params.ParsedReference; +import org.projectnessie.api.v2.params.ReferencesParams; +import org.projectnessie.api.v2.params.Transplant; +import org.projectnessie.client.api.GetAllReferencesBuilder; +import org.projectnessie.client.api.GetCommitLogBuilder; +import org.projectnessie.client.api.GetDiffBuilder; +import org.projectnessie.client.api.GetEntriesBuilder; +import org.projectnessie.client.api.MergeReferenceBuilder; +import org.projectnessie.client.api.MergeTransplantBuilder; +import org.projectnessie.client.api.NessieApiV2; +import org.projectnessie.client.api.TransplantCommitsBuilder; +import org.projectnessie.error.NessieConflictException; +import org.projectnessie.error.NessieNotFoundException; +import org.projectnessie.model.CommitResponse; +import org.projectnessie.model.ContentKey; +import org.projectnessie.model.ContentResponse; +import org.projectnessie.model.DiffResponse; +import org.projectnessie.model.EntriesResponse; +import org.projectnessie.model.GetMultipleContentsRequest; +import org.projectnessie.model.GetMultipleContentsResponse; +import org.projectnessie.model.ImmutableGetMultipleContentsRequest; +import org.projectnessie.model.LogResponse; +import org.projectnessie.model.MergeResponse; +import org.projectnessie.model.Operations; +import org.projectnessie.model.Reference; +import org.projectnessie.model.ReferencesResponse; +import org.projectnessie.model.SingleReferenceResponse; +import org.projectnessie.model.ser.Views; + +import com.fasterxml.jackson.annotation.JsonView; + +public class ProxyV2TreeResource implements HttpTreeApi { + private final NessieApiV2 api; + + @Inject + public ProxyV2TreeResource(NessieApiV2 api) { + this.api = api; + } + private ParsedReference resolveRef(String refPathString) { + return resolveReferencePathElementWithDefaultBranch(refPathString, + () -> api.getConfig().getDefaultBranch()); + } + + @Override + @JsonView(Views.V2.class) + public ReferencesResponse getAllReferences(ReferencesParams params) { + GetAllReferencesBuilder request = api.getAllReferences() + .fetch(params.fetchOption()) + .filter(params.filter()) + .pageToken(params.pageToken()); + + Integer maxRecords = params.maxRecords(); + if (maxRecords != null) { + request.maxRecords(maxRecords); + } + + return request.get(); + } + + @Override + @JsonView(Views.V2.class) + public SingleReferenceResponse createReference(String name, Reference.ReferenceType type, Reference reference) + throws NessieNotFoundException, NessieConflictException { + + String fromRefName = null; + String fromHash = null; + if (reference != null) { + fromRefName = reference.getName(); + fromHash = reference.getHash(); + } + + Reference toCreate = toReference(name, type, fromHash); + Reference created = api.createReference().sourceRefName(fromRefName).reference(toCreate).create(); + return SingleReferenceResponse.builder().reference(created).build(); + } + + @Override + @JsonView(Views.V2.class) + public SingleReferenceResponse getReferenceByName(GetReferenceParams params) throws NessieNotFoundException { + ParsedReference ref = resolveRef(params.getRef()); + Reference result = api.getReference().refName(ref.name()).fetch(params.fetchOption()).get(); + return SingleReferenceResponse.builder().reference(result).build(); + } + + @Override + @JsonView(Views.V2.class) + public EntriesResponse getEntries(String ref, EntriesParams params) throws NessieNotFoundException { + ParsedReference reference = resolveRef(ref); + GetEntriesBuilder request = api.getEntries() + .refName(reference.name()) + .hashOnRef(reference.hash()) + .withContent(params.withContent()) + .filter(params.filter()) + .minKey(params.minKey()) + .maxKey(params.maxKey()) + .prefixKey(params.prefixKey()) + .pageToken(params.pageToken()); + + Integer maxRecords = params.maxRecords(); + if (maxRecords != null) { + request.maxRecords(maxRecords); + } + + List requestedKeys = params.getRequestedKeys(); + if (requestedKeys != null) { + requestedKeys.forEach(request::key); + } + + return request.get(); + } + + @Override + @JsonView(Views.V2.class) + public LogResponse getCommitLog(String ref, CommitLogParams params) throws NessieNotFoundException { + ParsedReference reference = resolveRef(ref); + GetCommitLogBuilder request = api.getCommitLog() + .refName(reference.name()) + .hashOnRef(reference.hash()) + .untilHash(params.startHash()) + .filter(params.filter()) + .fetch(params.fetchOption()) + .pageToken(params.pageToken()); + + Integer maxRecords = params.maxRecords(); + if (maxRecords != null) { + request.maxRecords(maxRecords); + } + + return request.get(); + } + + @Override + @JsonView(Views.V2.class) + public DiffResponse getDiff(DiffParams params) throws NessieNotFoundException { + ParsedReference from = resolveRef(params.getFromRef()); + ParsedReference to = resolveRef(params.getToRef()); + + GetDiffBuilder request = api.getDiff() + .fromRefName(from.name()) + .fromHashOnRef(from.hash()) + .toRefName(to.name()) + .toHashOnRef(to.hash()) + .minKey(params.minKey()) + .maxKey(params.maxKey()) + .prefixKey(params.prefixKey()) + .filter(params.getFilter()) + .pageToken(params.pageToken()); + + Integer maxRecords = params.maxRecords(); + if (maxRecords != null) { + request.maxRecords(maxRecords); + } + + List requestedKeys = params.getRequestedKeys(); + if (requestedKeys != null) { + requestedKeys.forEach(request::key); + } + + return request.get(); + } + + @Override + @JsonView(Views.V2.class) + public SingleReferenceResponse assignReference(Reference.ReferenceType type, String ref, Reference assignTo) + throws NessieNotFoundException, NessieConflictException { + + ParsedReference reference = resolveReferencePathElement(ref, type); + Reference result; + switch (type) { + case BRANCH: + result = api.assignBranch() + .branchName(reference.name()) + .hash(reference.hash()) + .assignTo(assignTo) + .assignAndGet(); + break; + case TAG: + result = api.assignTag() + .tagName(reference.name()) + .hash(reference.hash()) + .assignTo(assignTo) + .assignAndGet(); + break; + default: + throw new IllegalArgumentException("Unsupported reference type: " + type); + } + + return SingleReferenceResponse.builder().reference(result).build(); + } + + @Override + @JsonView(Views.V2.class) + public SingleReferenceResponse deleteReference(Reference.ReferenceType type, String ref) + throws NessieConflictException, NessieNotFoundException { + + ParsedReference reference = resolveReferencePathElement(ref, type); + Reference result; + switch (type) { + case BRANCH: + result = api.deleteBranch().branchName(reference.name()).hash(reference.hash()).getAndDelete(); + break; + case TAG: + result = api.deleteTag().tagName(reference.name()).hash(reference.hash()).getAndDelete(); + break; + default: + throw new IllegalArgumentException("Unsupported reference type: " + type); + } + return SingleReferenceResponse.builder().reference(result).build(); + } + + @Override + @JsonView(Views.V2.class) + public ContentResponse getContent(ContentKey key, String ref, boolean withDocumentation) throws NessieNotFoundException { + ParsedReference reference = resolveRef(ref); + return api.getContent().refName(reference.name()).hashOnRef(reference.hash()).getSingle(key); + } + + @Override + @JsonView(Views.V2.class) + public GetMultipleContentsResponse getSeveralContents(String ref, List keys, boolean withDocumentation) throws NessieNotFoundException { + ImmutableGetMultipleContentsRequest.Builder request = GetMultipleContentsRequest.builder(); + keys.forEach(k -> request.addRequestedKeys(ContentKey.fromPathString(k))); + return getMultipleContents(ref, request.build(), withDocumentation); + } + + @Override + @JsonView(Views.V2.class) + public GetMultipleContentsResponse getMultipleContents(String ref, GetMultipleContentsRequest request, boolean withDocumentation) + throws NessieNotFoundException { + + ParsedReference reference = resolveRef(ref); + return api.getContent() + .refName(reference.name()) + .hashOnRef(reference.hash()) + .keys(request.getRequestedKeys()) + .getWithResponse(); + } + + @Override + @JsonView(Views.V2.class) + public MergeResponse transplantCommitsIntoBranch(String branch, Transplant transplant) + throws NessieNotFoundException, NessieConflictException { + + ParsedReference targetRef = resolveRef(branch); + TransplantCommitsBuilder request = api.transplantCommitsIntoBranch() + .message(transplant.getMessage()) + .fromRefName(transplant.getFromRefName()) + .hashesToTransplant(transplant.getHashesToTransplant()); + + mergeTransplantAttr(request, targetRef, transplant); + + return request.transplant(); + } + + @Override + @JsonView(Views.V2.class) + public MergeResponse mergeRefIntoBranch(String branch, Merge merge) + throws NessieNotFoundException, NessieConflictException { + + ParsedReference targetRef = resolveRef(branch); + MergeReferenceBuilder request = api.mergeRefIntoBranch() + .commitMeta(merge.getCommitMeta()) + .message(merge.getMessage()) + .fromRefName(merge.getFromRefName()) + .fromHash(merge.getFromHash()); + + mergeTransplantAttr(request, targetRef, merge); + + return request.merge(); + } + + private > void mergeTransplantAttr(MergeTransplantBuilder request, + ParsedReference targetRef, + BaseMergeTransplant base) { + request.branchName(targetRef.name()); + request.hash(targetRef.hash()); + + request.defaultMergeMode(base.getDefaultKeyMergeMode()); + if (base.getKeyMergeModes() != null) { + base.getKeyMergeModes().forEach(kmt -> request.mergeMode(kmt.getKey(), kmt.getMergeBehavior())); + } + + Boolean dryRun = base.isDryRun(); + if (dryRun != null) { + request.dryRun(dryRun); + } + + Boolean fetchAdditionalInfo = base.isFetchAdditionalInfo(); + if (fetchAdditionalInfo != null) { + request.fetchAdditionalInfo(fetchAdditionalInfo); + } + + Boolean returnConflictAsResult = base.isReturnConflictAsResult(); + if (returnConflictAsResult != null) { + request.returnConflictAsResult(returnConflictAsResult); + } + } + + + @Override + @JsonView(Views.V2.class) + public CommitResponse commitMultipleOperations(String branch, Operations operations) + throws NessieNotFoundException, NessieConflictException { + + ParsedReference reference = resolveRef(branch); + return api.commitMultipleOperations() + .branchName(reference.name()) + .hash(reference.hash()) + .commitMeta(operations.getCommitMeta()) + .operations(operations.getOperations()) + .commitWithResponse(); + } +} diff --git a/services/nessie-proxy/src/test/java/com/dremio/services/nessie/proxy/ITProxyRestOverRest.java b/services/nessie-proxy/src/test/java/com/dremio/services/nessie/proxy/ITProxyRestOverRest.java new file mode 100644 index 0000000000..43fee7535f --- /dev/null +++ b/services/nessie-proxy/src/test/java/com/dremio/services/nessie/proxy/ITProxyRestOverRest.java @@ -0,0 +1,55 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.services.nessie.proxy; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assumptions.assumeThat; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.projectnessie.client.ext.NessieApiVersion; +import org.projectnessie.client.ext.NessieApiVersions; +import org.projectnessie.client.ext.NessieClientFactory; +import org.projectnessie.jaxrs.tests.BaseTestNessieRest; + +/** + * Verifies the Nessie REST API proxy implementation by running API tests against the proxy + * configured with a Nessie REST client as the backend. + */ +public class ITProxyRestOverRest extends BaseTestNessieRest { + + @RegisterExtension + private static NessieProxyJaxRsExtension proxy = new NessieProxyJaxRsExtension(RestClientProducer.class); + + @Override + protected boolean fullPagingSupport() { + return true; + } + + @SuppressWarnings("JUnitMalformedDeclaration") + @BeforeEach + void checkApiVersion(NessieClientFactory clientFactory) { + // REST-over-REST proxy use cases are supported only for API v2 + assumeThat(clientFactory.apiVersion()).isEqualTo(NessieApiVersion.V2); + } + + @Test + @NessieApiVersions(versions = NessieApiVersion.V2) + void testActualApiVersion() { + assertThat(api().getConfig().getActualApiVersion()).isEqualTo(2); + } +} diff --git a/services/nessie-proxy/src/test/java/com/dremio/services/nessie/proxy/NessieProxyJaxRsExtension.java b/services/nessie-proxy/src/test/java/com/dremio/services/nessie/proxy/NessieProxyJaxRsExtension.java new file mode 100644 index 0000000000..8a3bd7c75d --- /dev/null +++ b/services/nessie-proxy/src/test/java/com/dremio/services/nessie/proxy/NessieProxyJaxRsExtension.java @@ -0,0 +1,200 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.services.nessie.proxy; + +import java.net.URI; + +import javax.ws.rs.core.Application; + +import org.glassfish.jersey.message.DeflateEncoder; +import org.glassfish.jersey.message.GZipEncoder; +import org.glassfish.jersey.server.ResourceConfig; +import org.glassfish.jersey.server.filter.EncodingFilter; +import org.glassfish.jersey.test.JerseyTest; +import org.glassfish.jersey.test.TestProperties; +import org.jboss.weld.environment.se.Weld; +import org.junit.jupiter.api.extension.AfterEachCallback; +import org.junit.jupiter.api.extension.BeforeAllCallback; +import org.junit.jupiter.api.extension.BeforeEachCallback; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.api.extension.ExtensionContext.Store.CloseableResource; +import org.junit.jupiter.api.extension.ParameterContext; +import org.junit.jupiter.api.extension.ParameterResolutionException; +import org.junit.jupiter.api.extension.ParameterResolver; +import org.projectnessie.client.ext.NessieClientResolver; +import org.projectnessie.services.authz.AbstractBatchAccessChecker; +import org.projectnessie.services.authz.AccessContext; +import org.projectnessie.services.authz.AuthorizerExtension; +import org.projectnessie.services.authz.BatchAccessChecker; +import org.projectnessie.services.restjavax.ConstraintViolationExceptionMapper; +import org.projectnessie.services.restjavax.ContentKeyParamConverterProvider; +import org.projectnessie.services.restjavax.NamespaceParamConverterProvider; +import org.projectnessie.services.restjavax.NessieExceptionMapper; +import org.projectnessie.services.restjavax.NessieJaxRsJsonMappingExceptionMapper; +import org.projectnessie.services.restjavax.NessieJaxRsJsonParseExceptionMapper; +import org.projectnessie.services.restjavax.ReferenceTypeParamConverterProvider; +import org.projectnessie.services.restjavax.ValidationExceptionMapper; + +/** A JUnit 5 extension that starts up Weld/JerseyTest. */ +public class NessieProxyJaxRsExtension extends NessieClientResolver + implements BeforeAllCallback, BeforeEachCallback, AfterEachCallback, ParameterResolver { + private static final ExtensionContext.Namespace NAMESPACE = + ExtensionContext.Namespace.create(NessieProxyJaxRsExtension.class); + private Class clientProducer; + + public NessieProxyJaxRsExtension(final Class clientProducer) { + this.clientProducer = clientProducer; + } + + @Override + protected URI getBaseUri(ExtensionContext extensionContext) { + EnvHolder env = extensionContext.getStore(NAMESPACE).get(EnvHolder.class, EnvHolder.class); + if (env == null) { + throw new ParameterResolutionException( + "Nessie JaxRs env. is not initialized in " + extensionContext.getUniqueId()); + } + return env.jerseyTest.target().getUri(); + } + + @Override + public void beforeAll(ExtensionContext extensionContext) { + // Put EnvHolder into the top-most context handled by this exception. Nested contexts will reuse + // the same value to minimize Jersey restarts. EnvHolder will initialize on first use and close + // when its owner context is destroyed. + // Note: we also use EnvHolder.class as a key to the map of stored values. + extensionContext + .getStore(NAMESPACE) + .getOrComputeIfAbsent( + EnvHolder.class, + key -> { + try { + return new EnvHolder(clientProducer); + } catch (Exception e) { + throw new IllegalStateException(e); + } + }); + } + + @Override + public void afterEach(ExtensionContext extensionContext) { + EnvHolder env = extensionContext.getStore(NAMESPACE).get(EnvHolder.class, EnvHolder.class); + env.reset(); + } + + @Override + public void beforeEach(ExtensionContext extensionContext) { + EnvHolder env = extensionContext.getStore(NAMESPACE).get(EnvHolder.class, EnvHolder.class); + env.reset(); + } + + @Override + public boolean supportsParameter( + ParameterContext parameterContext, ExtensionContext extensionContext) + throws ParameterResolutionException { + return super.supportsParameter(parameterContext, extensionContext) || + parameterContext.isAnnotated(ProxyUri.class); + } + + @Override + public Object resolveParameter( + ParameterContext parameterContext, ExtensionContext extensionContext) + throws ParameterResolutionException { + if (super.supportsParameter(parameterContext, extensionContext)) { + return super.resolveParameter(parameterContext, extensionContext); + } + + EnvHolder env = extensionContext.getStore(NAMESPACE).get(EnvHolder.class, EnvHolder.class); + if (env == null) { + throw new ParameterResolutionException( + "Nessie JaxRs env. is not initialized in " + extensionContext.getUniqueId()); + } + + if (parameterContext.isAnnotated(ProxyUri.class)) { + return env.jerseyTest.target().getUri(); + } + + throw new ParameterResolutionException( + "Unsupported annotation on parameter " + + parameterContext.getParameter() + + " on " + + parameterContext.getTarget()); + } + + private static class EnvHolder implements CloseableResource { + private final Weld weld; + private final JerseyTest jerseyTest; + + void reset() { + } + + public EnvHolder(final Class clientProducer) throws Exception { + weld = new Weld(); + weld.addBeanClass(clientProducer); + // Let Weld scan all the resources to discover injection points and dependencies + weld.addPackages(true, ProxyConfigResource.class); + // Inject external beans + weld.addExtension(new AuthorizerExtension().setAccessCheckerSupplier(this::createNewChecker)); + weld.initialize(); + + jerseyTest = + new JerseyTest() { + @Override + protected Application configure() { + ResourceConfig config = new ResourceConfig(); + config.register(ProxyV2ConfigResource.class); + config.register(ProxyV2TreeResource.class); + config.register(ProxyTreeResource.class); + config.register(ProxyDiffResource.class); + config.register(ProxyContentResource.class); + config.register(ProxyConfigResource.class); + config.register(ProxyRefLogResource.class); + config.register(ProxyNamespaceResource.class); + config.register(ContentKeyParamConverterProvider.class); + config.register(NamespaceParamConverterProvider.class); + config.register(ReferenceTypeParamConverterProvider.class); + config.register(ProxyExceptionMapper.class, 10); + config.register(ProxyRuntimeExceptionMapper.class, 10); + config.register(ConstraintViolationExceptionMapper.class, 10); + config.register(ValidationExceptionMapper.class, 10); + config.register(NessieExceptionMapper.class); + config.register(NessieJaxRsJsonParseExceptionMapper.class, 10); + config.register(NessieJaxRsJsonMappingExceptionMapper.class, 10); + config.register(EncodingFilter.class); + config.register(GZipEncoder.class); + config.register(DeflateEncoder.class); + + // Use a dynamically allocated port, not a static default (80/443) or statically + // configured port. + set(TestProperties.CONTAINER_PORT, "0"); + + return config; + } + }; + + jerseyTest.setUp(); + } + + private BatchAccessChecker createNewChecker(AccessContext context) { + return AbstractBatchAccessChecker.NOOP_ACCESS_CHECKER; + } + + @Override + public void close() throws Throwable { + jerseyTest.tearDown(); + weld.shutdown(); + } + } +} diff --git a/services/nessie-proxy/src/test/java/com/dremio/services/nessie/proxy/ProxyUri.java b/services/nessie-proxy/src/test/java/com/dremio/services/nessie/proxy/ProxyUri.java new file mode 100644 index 0000000000..057e3d26ef --- /dev/null +++ b/services/nessie-proxy/src/test/java/com/dremio/services/nessie/proxy/ProxyUri.java @@ -0,0 +1,31 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.services.nessie.proxy; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Annotation for JUnit5 method parameters that need a URI to the Nessie server managed by {@link + * NessieProxyJaxRsExtension}. + */ +@Target({ElementType.FIELD, ElementType.PARAMETER}) +@Retention(RetentionPolicy.RUNTIME) +@Inherited +public @interface ProxyUri {} diff --git a/services/nessie-proxy/src/test/java/com/dremio/services/nessie/proxy/RestClientProducer.java b/services/nessie-proxy/src/test/java/com/dremio/services/nessie/proxy/RestClientProducer.java new file mode 100644 index 0000000000..ecbae047e8 --- /dev/null +++ b/services/nessie-proxy/src/test/java/com/dremio/services/nessie/proxy/RestClientProducer.java @@ -0,0 +1,42 @@ +/* + * Copyright (C) 2017-2019 Dremio Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dremio.services.nessie.proxy; + +import static org.projectnessie.client.NessieConfigConstants.CONF_FORCE_URL_CONNECTION_CLIENT; + +import java.util.Collections; + +import javax.enterprise.inject.Produces; +import javax.inject.Singleton; + +import org.projectnessie.client.api.NessieApiV2; +import org.projectnessie.client.http.HttpClientBuilder; + +public class RestClientProducer { + + @Produces + @Singleton + public NessieApiV2 createClient() { + return HttpClientBuilder.builder() + .fromConfig(Collections.singletonMap(CONF_FORCE_URL_CONNECTION_CLIENT, "true")::get) + .withUri(createNessieURIString()) + .build(NessieApiV2.class); + } + + private static String createNessieURIString() { + return System.getProperty("nessie.server.url") +"/api/v2"; + } +} diff --git a/services/nessie-storage-upgrade/pom.xml b/services/nessie-storage-upgrade/pom.xml index 107560c5ca..fd8e7159f6 100644 --- a/services/nessie-storage-upgrade/pom.xml +++ b/services/nessie-storage-upgrade/pom.xml @@ -22,7 +22,7 @@ com.dremio.services dremio-services-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-services-nessie-storage-upgrade @@ -35,16 +35,6 @@ dremio-dac-daemon ${project.version} - - org.junit.jupiter - junit-jupiter - test - - - org.mockito - mockito-junit-jupiter - test - diff --git a/services/nessie-storage-upgrade/src/main/java/com/dremio/service/nessie/maintenance/NessieRepoMaintenanceCommand.java b/services/nessie-storage-upgrade/src/main/java/com/dremio/service/nessie/maintenance/NessieRepoMaintenanceCommand.java index c47755bb65..c018265362 100644 --- a/services/nessie-storage-upgrade/src/main/java/com/dremio/service/nessie/maintenance/NessieRepoMaintenanceCommand.java +++ b/services/nessie-storage-upgrade/src/main/java/com/dremio/service/nessie/maintenance/NessieRepoMaintenanceCommand.java @@ -19,6 +19,7 @@ import java.util.Optional; import java.util.stream.Stream; +import org.projectnessie.nessie.relocated.protobuf.ByteString; import org.projectnessie.versioned.GetNamedRefsParams; import org.projectnessie.versioned.ReferenceInfo; import org.projectnessie.versioned.persist.adapter.DatabaseAdapter; @@ -45,7 +46,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.ByteString; /** * Admin CLI tool to invoking Embedded Nessie maintenance operations and performing basic repository sanity checks. @@ -128,6 +128,7 @@ public static void main(String[] args) throws Exception { static String execute(LocalKVStoreProvider kvStore, Options options) throws Exception { NonTransactionalDatabaseAdapterConfig adapterCfg = ImmutableAdjustableNonTransactionalDatabaseAdapterConfig .builder() + .validateNamespaces(false) .build(); NessieDatastoreInstance store = new NessieDatastoreInstance(); store.configure(new ImmutableDatastoreDbConfig.Builder().setStoreProvider(() -> kvStore).build()); diff --git a/services/nessie-storage-upgrade/src/main/java/com/dremio/service/nessie/upgrade/storage/MigrateIcebergMetadataPointer.java b/services/nessie-storage-upgrade/src/main/java/com/dremio/service/nessie/upgrade/storage/MigrateIcebergMetadataPointer.java index bc7641eb9b..380685786d 100644 --- a/services/nessie-storage-upgrade/src/main/java/com/dremio/service/nessie/upgrade/storage/MigrateIcebergMetadataPointer.java +++ b/services/nessie-storage-upgrade/src/main/java/com/dremio/service/nessie/upgrade/storage/MigrateIcebergMetadataPointer.java @@ -31,12 +31,12 @@ import java.util.stream.Stream; import org.projectnessie.model.CommitMeta; +import org.projectnessie.model.ContentKey; import org.projectnessie.model.IcebergTable; import org.projectnessie.server.store.proto.ObjectTypes; import org.projectnessie.versioned.BranchName; import org.projectnessie.versioned.GetNamedRefsParams; import org.projectnessie.versioned.Hash; -import org.projectnessie.versioned.Key; import org.projectnessie.versioned.ReferenceInfo; import org.projectnessie.versioned.ReferenceNotFoundException; import org.projectnessie.versioned.TagName; @@ -51,6 +51,7 @@ import org.projectnessie.versioned.persist.nontx.ImmutableAdjustableNonTransactionalDatabaseAdapterConfig; import org.projectnessie.versioned.store.DefaultStoreWorker; +import com.dremio.common.SuppressForbidden; import com.dremio.dac.cmd.AdminLogger; import com.dremio.dac.cmd.upgrade.UpgradeContext; import com.dremio.dac.cmd.upgrade.UpgradeTask; @@ -59,8 +60,6 @@ import com.dremio.service.nessie.ImmutableDatastoreDbConfig; import com.dremio.service.nessie.NessieDatastoreInstance; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.ByteString; -import com.google.protobuf.InvalidProtocolBufferException; /** * Migrates legacy on-reference state entries using the {@code ICEBERG_METADATA_POINTER} type to current format. @@ -94,6 +93,7 @@ void upgrade(KVStoreProvider kvStoreProvider, String branchName) throws Exceptio DatabaseAdapter adapter = new DatastoreDatabaseAdapterFactory().newBuilder() .withConnector(store) .withConfig(ImmutableAdjustableNonTransactionalDatabaseAdapterConfig.builder() + .validateNamespaces(false) // Suppress periodic key list generation by the DatabaseAdapter. We'll do that once at the end of the upgrade. .keyListDistance(Integer.MAX_VALUE) .build()) @@ -104,7 +104,7 @@ void upgrade(KVStoreProvider kvStoreProvider, String branchName) throws Exceptio adapter.initializeRepo("main"); // Legacy data was stored only on `main`, so migrate data only on this branch. - ReferenceInfo main = adapter.namedRef("main", GetNamedRefsParams.DEFAULT); + ReferenceInfo main = adapter.namedRef("main", GetNamedRefsParams.DEFAULT); // The upgrade is performed in two phases: // 1) Scan the commit log to find ICEBERG_METADATA_POINTER entries. Those entries can only exist in @@ -128,13 +128,13 @@ void upgrade(KVStoreProvider kvStoreProvider, String branchName) throws Exceptio BranchName upgradeBranch = BranchName.of(branchName); Hash upgradeStartHash = adapter.noAncestorHash(); try { - ReferenceInfo branchInfo = adapter.namedRef(branchName, GetNamedRefsParams.DEFAULT); + ReferenceInfo branchInfo = adapter.namedRef(branchName, GetNamedRefsParams.DEFAULT); AdminLogger.log("Resetting old upgrade branch: " + branchInfo); adapter.assign(upgradeBranch, Optional.empty(), upgradeStartHash); } catch (Exception e1) { // Create a new upgrade branch try { - upgradeStartHash = adapter.create(upgradeBranch, upgradeStartHash); + upgradeStartHash = adapter.create(upgradeBranch, upgradeStartHash).getHash(); } catch (Exception e2) { IllegalStateException ex = new IllegalStateException("Unable to create upgrade branch: " + branchName, e2); ex.addSuppressed(e1); @@ -165,7 +165,7 @@ void upgrade(KVStoreProvider kvStoreProvider, String branchName) throws Exceptio .build(); // Make an empty commit to force key list computation in the adapter - Hash upgradedHead = adapter1.commit(keyListCommit); + Hash upgradedHead = adapter1.commit(keyListCommit).getCommitHash(); AdminLogger.log("Committed post-upgrade key list as {}", upgradedHead); // Tag old `main` branch @@ -183,7 +183,7 @@ void upgrade(KVStoreProvider kvStoreProvider, String branchName) throws Exceptio private static final class Converter { private final DatabaseAdapter adapter; private final Hash sourceBranch; - private final Set activeKeys = new HashSet<>(); + private final Set activeKeys = new HashSet<>(); private BranchName targetBranch; private ImmutableCommitParams.Builder commit; @@ -227,7 +227,7 @@ private void drain() { } try { - head = adapter.commit(commit.build()); + head = adapter.commit(commit.build()).getCommitHash(); numCommits++; } catch (Exception e) { throw new IllegalStateException(e); @@ -240,7 +240,7 @@ private void drain() { } private void processValues(BiConsumer action) { - Set keysToProcess = new HashSet<>(activeKeys); + Set keysToProcess = new HashSet<>(activeKeys); try (Stream log = adapter.commitLog(sourceBranch); Stream commitLog = takeUntilExcludeLast(log, k -> keysToProcess.isEmpty())) { commitLog.forEach(entry -> entry.getPuts().forEach(put -> { @@ -258,7 +258,7 @@ private boolean upgradeRequired() { AtomicLong current = new AtomicLong(); processValues((logEntry, keyWithBytes) -> { - ObjectTypes.Content.ObjectTypeCase refType = parseContent(keyWithBytes.getValue()).getObjectTypeCase(); + ObjectTypes.Content.ObjectTypeCase refType = parseContent(keyWithBytes).getObjectTypeCase(); if (refType == ObjectTypes.Content.ObjectTypeCase.ICEBERG_METADATA_POINTER) { legacy.incrementAndGet(); @@ -286,16 +286,17 @@ private void upgrade(BranchName upgradeBranch, Hash upgradeStartHash) { AdminLogger.log("Processed {} entries.", totalEntries); } - private ObjectTypes.Content parseContent(ByteString content) { + @SuppressForbidden // This method has to use Nessie's relocated ByteString in method parameters. + private ObjectTypes.Content parseContent(KeyWithBytes kb) { try { - return ObjectTypes.Content.parseFrom(content); - } catch (InvalidProtocolBufferException e) { + return ObjectTypes.Content.parseFrom(kb.getValue()); + } catch (Exception e) { throw new IllegalStateException(e); } } private void upgradeValue(CommitLogEntry logEntry, KeyWithBytes kb) { - ObjectTypes.Content refState = parseContent(kb.getValue()); + ObjectTypes.Content refState = parseContent(kb); ObjectTypes.Content.ObjectTypeCase refType = refState.getObjectTypeCase(); if (refType == ObjectTypes.Content.ObjectTypeCase.ICEBERG_METADATA_POINTER) { @@ -312,8 +313,8 @@ private void upgradeValue(CommitLogEntry logEntry, KeyWithBytes kb) { KeyWithBytes.of( kb.getKey(), contentId, - DefaultStoreWorker.payloadForContent(table), - DefaultStoreWorker.instance().toStoreOnReferenceState(table, commit::addAttachments))); + (byte) DefaultStoreWorker.payloadForContent(table), + DefaultStoreWorker.instance().toStoreOnReferenceState(table))); } else { // This case is not expected during actual upgrades. It is handled only for the sake of completeness. diff --git a/services/nessie-storage-upgrade/src/main/java/com/dremio/service/nessie/upgrade/storage/MigrateToNessieAdapter.java b/services/nessie-storage-upgrade/src/main/java/com/dremio/service/nessie/upgrade/storage/MigrateToNessieAdapter.java index 32c410f763..14fd7c6154 100644 --- a/services/nessie-storage-upgrade/src/main/java/com/dremio/service/nessie/upgrade/storage/MigrateToNessieAdapter.java +++ b/services/nessie-storage-upgrade/src/main/java/com/dremio/service/nessie/upgrade/storage/MigrateToNessieAdapter.java @@ -26,11 +26,12 @@ import java.util.concurrent.atomic.AtomicReference; import org.projectnessie.model.CommitMeta; +import org.projectnessie.model.ContentKey; import org.projectnessie.model.IcebergTable; +import org.projectnessie.nessie.relocated.protobuf.ByteString; import org.projectnessie.versioned.BranchName; import org.projectnessie.versioned.GetNamedRefsParams; import org.projectnessie.versioned.Hash; -import org.projectnessie.versioned.Key; import org.projectnessie.versioned.ReferenceConflictException; import org.projectnessie.versioned.ReferenceInfo; import org.projectnessie.versioned.ReferenceNotFoundException; @@ -54,7 +55,6 @@ import com.dremio.service.nessie.upgrade.version040.MetadataReader; import com.dremio.service.nessie.upgrade.version040.MetadataReader040; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.ByteString; /** * Migrates legacy Nessie data stored in custom format in the KVStore to OSS format managed by @@ -110,6 +110,7 @@ void upgrade(KVStoreProvider kvStoreProvider, String upgradeBranchName, Metadata DatabaseAdapter adapter = new DatastoreDatabaseAdapterFactory().newBuilder().withConnector(store) .withConfig(ImmutableAdjustableNonTransactionalDatabaseAdapterConfig.builder() + .validateNamespaces(false) .keyListDistance(Integer.MAX_VALUE) .build()) .build(); @@ -160,7 +161,7 @@ void upgrade(KVStoreProvider kvStoreProvider, String upgradeBranchName, Metadata ))); } - Key key = Key.of(contentKey.toArray(new String[0])); + ContentKey key = ContentKey.of(contentKey.toArray(new String[0])); AdminLogger.log("Migrating key: " + key + ", location: " + location); @@ -177,8 +178,8 @@ void upgrade(KVStoreProvider kvStoreProvider, String upgradeBranchName, Metadata KeyWithBytes.of( key, contentId, - DefaultStoreWorker.payloadForContent(table), - DefaultStoreWorker.instance().toStoreOnReferenceState(table, a -> commit.get().addAttachments(a)))); + (byte) DefaultStoreWorker.payloadForContent(table), + DefaultStoreWorker.instance().toStoreOnReferenceState(table))); if (numEntries.incrementAndGet() >= MAX_ENTRIES_PER_COMMIT) { commit(adapter, commit.get(), numEntries.get(), totalEntries); @@ -217,6 +218,7 @@ private void commitKeyList(NessieDatastoreInstance store, DatabaseAdapter adapter = new DatastoreDatabaseAdapterFactory().newBuilder() .withConnector(store) .withConfig(ImmutableAdjustableNonTransactionalDatabaseAdapterConfig.builder() + .validateNamespaces(false) .keyListDistance(1) .build()) .build(); @@ -234,7 +236,7 @@ private void commitKeyList(NessieDatastoreInstance store, .build(); // Make an empty commit to force key list computation in the adapter - Hash hash = adapter.commit(emptyCommit); + Hash hash = adapter.commit(emptyCommit).getCommitHash(); AdminLogger.log("Committed post-upgrade key list ({} entries) as {}", totalEntries.get(), hash); } @@ -244,7 +246,7 @@ private void commit(DatabaseAdapter adapter, ImmutableCommitParams.Builder commi } try { - Hash hash = adapter.commit(commit.build()); + Hash hash = adapter.commit(commit.build()).getCommitHash(); total.addAndGet(numEntries); AdminLogger.log("Committed {} (total {}) migrated tables as {}", numEntries, total.get(), hash); } catch (Exception e) { diff --git a/services/nessie-storage-upgrade/src/main/java/com/dremio/service/nessie/upgrade/storage/PurgeObsoleteKeyLists.java b/services/nessie-storage-upgrade/src/main/java/com/dremio/service/nessie/upgrade/storage/PurgeObsoleteKeyLists.java index 163feca407..3384a2589d 100644 --- a/services/nessie-storage-upgrade/src/main/java/com/dremio/service/nessie/upgrade/storage/PurgeObsoleteKeyLists.java +++ b/services/nessie-storage-upgrade/src/main/java/com/dremio/service/nessie/upgrade/storage/PurgeObsoleteKeyLists.java @@ -78,7 +78,9 @@ String upgrade(KVStoreProvider storeProvider) throws Exception { DatabaseAdapter adapter = new DatastoreDatabaseAdapterFactory().newBuilder() .withConnector(store) - .withConfig(ImmutableAdjustableNonTransactionalDatabaseAdapterConfig.builder().build()) + .withConfig(ImmutableAdjustableNonTransactionalDatabaseAdapterConfig.builder() + .validateNamespaces(false) + .build()) .build(); Map> result = adapter.repoMaintenance(params); diff --git a/services/nessie-storage-upgrade/src/main/java/com/dremio/service/nessie/upgrade/storage/RebuildKeyList.java b/services/nessie-storage-upgrade/src/main/java/com/dremio/service/nessie/upgrade/storage/RebuildKeyList.java index d7def00d79..af950a3e2b 100644 --- a/services/nessie-storage-upgrade/src/main/java/com/dremio/service/nessie/upgrade/storage/RebuildKeyList.java +++ b/services/nessie-storage-upgrade/src/main/java/com/dremio/service/nessie/upgrade/storage/RebuildKeyList.java @@ -25,9 +25,9 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Stream; +import org.projectnessie.model.ContentKey; import org.projectnessie.versioned.GetNamedRefsParams; import org.projectnessie.versioned.Hash; -import org.projectnessie.versioned.Key; import org.projectnessie.versioned.ReferenceInfo; import org.projectnessie.versioned.persist.adapter.CommitLogEntry; import org.projectnessie.versioned.persist.adapter.ImmutableKeyList; @@ -38,6 +38,7 @@ import org.projectnessie.versioned.persist.adapter.spi.DatabaseAdapterUtil; import org.projectnessie.versioned.persist.nontx.ImmutableAdjustableNonTransactionalDatabaseAdapterConfig; +import com.dremio.common.SuppressForbidden; import com.dremio.dac.cmd.AdminLogger; import com.dremio.dac.cmd.upgrade.UpgradeContext; import com.dremio.dac.cmd.upgrade.UpgradeTask; @@ -48,7 +49,6 @@ import com.dremio.service.nessie.ImmutableDatastoreDbConfig; import com.dremio.service.nessie.NessieDatastoreInstance; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.ByteString; /** * This upgrade task rebuilds the most recent key list to add missing commit IDs to key list entries. @@ -87,11 +87,13 @@ void upgrade(KVStoreProvider storeProvider) throws Exception { store.initialize(); DatastoreDatabaseAdapter adapter = (DatastoreDatabaseAdapter) new DatastoreDatabaseAdapterFactory().newBuilder() .withConnector(store) - .withConfig(ImmutableAdjustableNonTransactionalDatabaseAdapterConfig.builder().build()) + .withConfig(ImmutableAdjustableNonTransactionalDatabaseAdapterConfig.builder() + .validateNamespaces(false) + .build()) .build(); // Only process main. Other branches in the Embedded Nessie are not utilized during runtime. - ReferenceInfo main = adapter.namedRef("main", GetNamedRefsParams.DEFAULT); + ReferenceInfo main = adapter.namedRef("main", GetNamedRefsParams.DEFAULT); // Find the most recent key list AtomicReference> keyListIds = new AtomicReference<>(); try (Stream log = adapter.commitLog(main.getHash())) { @@ -111,7 +113,7 @@ void upgrade(KVStoreProvider storeProvider) throws Exception { // Process all key list entities together. This will use memory, but will speed up the upgrade process. // Testing with some larger data sets of 300K keys, ~170 key list entities shown that default // upgrade process memory settings were sufficient. - Set activeKeys = new HashSet<>(); + Set activeKeys = new HashSet<>(); Map keyLists = new HashMap<>(); keyListIds.get().forEach(hash -> { Document doc = store.getKeyList().get(adapter.dbKey(hash)); @@ -126,7 +128,7 @@ void upgrade(KVStoreProvider storeProvider) throws Exception { }); // Find relevant commit IDs for active keys. - Map activePuts = new HashMap<>(); + Map activePuts = new HashMap<>(); AtomicInteger numCommits = new AtomicInteger(); try (Stream log = adapter.commitLog(main.getHash())) { DatabaseAdapterUtil.takeUntilExcludeLast(log, c -> activeKeys.isEmpty()) @@ -150,7 +152,7 @@ void upgrade(KVStoreProvider storeProvider) throws Exception { if (keyListEntry == null) { updated.addKeys((KeyListEntry) null); } else { - Key key = keyListEntry.getKey(); + ContentKey key = keyListEntry.getKey(); Hash commitHash = activePuts.get(key); if (commitHash == null) { throw new IllegalStateException("Put not found for key: " + key); @@ -162,9 +164,14 @@ void upgrade(KVStoreProvider storeProvider) throws Exception { }); // Overwrite the key list entity at the same hash. - store.getKeyList().put(adapter.dbKey(keyListHash), ProtoSerialization.toProto(updated.build()).toByteArray()); + store.getKeyList().put(adapter.dbKey(keyListHash), toBytes(updated.build())); AdminLogger.log("Updated key list {}", keyListHash); }); } } + + @SuppressForbidden // This method has to use Nessie's relocated ByteString in method parameters. + private static byte[] toBytes(KeyList keyList) { + return ProtoSerialization.toProto(keyList).toByteArray(); + } } diff --git a/services/nessie-storage-upgrade/src/main/java/com/dremio/service/nessie/upgrade/version040/NessieRefKVStoreBuilder.java b/services/nessie-storage-upgrade/src/main/java/com/dremio/service/nessie/upgrade/version040/NessieRefKVStoreBuilder.java index a87f927f65..6c51a0b26c 100644 --- a/services/nessie-storage-upgrade/src/main/java/com/dremio/service/nessie/upgrade/version040/NessieRefKVStoreBuilder.java +++ b/services/nessie-storage-upgrade/src/main/java/com/dremio/service/nessie/upgrade/version040/NessieRefKVStoreBuilder.java @@ -47,6 +47,7 @@ public BranchRef(String name) { super(name); } + @Override public String encode() { return String.format("B|%s", getName()); } @@ -57,6 +58,7 @@ public TagRef(String name) { super(name); } + @Override public String encode() { return String.format("T|%s", getName()); } diff --git a/services/nessie-storage-upgrade/src/test/java/com/dremio/service/nessie/maintenance/TestNessieRepoMaintenanceCommand.java b/services/nessie-storage-upgrade/src/test/java/com/dremio/service/nessie/maintenance/TestNessieRepoMaintenanceCommand.java index 13c0a44c21..c1f92f684b 100644 --- a/services/nessie-storage-upgrade/src/test/java/com/dremio/service/nessie/maintenance/TestNessieRepoMaintenanceCommand.java +++ b/services/nessie-storage-upgrade/src/test/java/com/dremio/service/nessie/maintenance/TestNessieRepoMaintenanceCommand.java @@ -45,6 +45,7 @@ void createKVStore() throws Exception { NonTransactionalDatabaseAdapterConfig adapterCfg = ImmutableAdjustableNonTransactionalDatabaseAdapterConfig .builder() + .validateNamespaces(false) .build(); NessieDatastoreInstance store = new NessieDatastoreInstance(); store.configure(new ImmutableDatastoreDbConfig.Builder().setStoreProvider(() -> storeProvider).build()); diff --git a/services/nessie-storage-upgrade/src/test/java/com/dremio/service/nessie/upgrade/storage/TestMigrateIcebergMetadataPointer.java b/services/nessie-storage-upgrade/src/test/java/com/dremio/service/nessie/upgrade/storage/TestMigrateIcebergMetadataPointer.java index 87937cd6ad..229689af03 100644 --- a/services/nessie-storage-upgrade/src/test/java/com/dremio/service/nessie/upgrade/storage/TestMigrateIcebergMetadataPointer.java +++ b/services/nessie-storage-upgrade/src/test/java/com/dremio/service/nessie/upgrade/storage/TestMigrateIcebergMetadataPointer.java @@ -35,12 +35,13 @@ import org.junit.jupiter.params.provider.ValueSource; import org.projectnessie.model.CommitMeta; import org.projectnessie.model.Content; +import org.projectnessie.model.ContentKey; import org.projectnessie.model.IcebergTable; +import org.projectnessie.nessie.relocated.protobuf.ByteString; import org.projectnessie.versioned.BranchName; import org.projectnessie.versioned.GetNamedRefsParams; import org.projectnessie.versioned.Hash; import org.projectnessie.versioned.ImmutablePut; -import org.projectnessie.versioned.Key; import org.projectnessie.versioned.ReferenceConflictException; import org.projectnessie.versioned.ReferenceInfo; import org.projectnessie.versioned.ReferenceNotFoundException; @@ -54,11 +55,11 @@ import org.projectnessie.versioned.persist.nontx.NonTransactionalDatabaseAdapterConfig; import org.projectnessie.versioned.persist.store.PersistVersionStore; +import com.dremio.common.SuppressForbidden; import com.dremio.datastore.LocalKVStoreProvider; import com.dremio.service.nessie.DatastoreDatabaseAdapterFactory; import com.dremio.service.nessie.ImmutableDatastoreDbConfig; import com.dremio.service.nessie.NessieDatastoreInstance; -import com.google.protobuf.ByteString; /** * Unit tests for {@link MigrateIcebergMetadataPointer}. @@ -88,6 +89,7 @@ void createKVStore() throws Exception { NonTransactionalDatabaseAdapterConfig adapterCfg = ImmutableAdjustableNonTransactionalDatabaseAdapterConfig .builder() + .validateNamespaces(false) .build(); adapter = new DatastoreDatabaseAdapterFactory().newBuilder() .withConfig(adapterCfg) @@ -102,7 +104,8 @@ void stopKVStore() throws Exception { } } - private void commitLegacyData(Key key, ContentId contentId) + @SuppressForbidden // This method has to use Nessie's relocated ByteString to interface with Nessie Database Adapters. + private void commitLegacyData(ContentKey key, ContentId contentId) throws ReferenceNotFoundException, ReferenceConflictException { ByteString refState = ByteString.copyFrom(Base64.getDecoder().decode(LEGACY_REF_STATE_BASE64)); @@ -124,8 +127,8 @@ private void commitLegacyData(Key key, ContentId contentId) void testUpgrade(int numExtraTables) throws Exception { adapter.initializeRepo("main"); // Load a legacy entry into the adapter - List keys = new ArrayList<>(); - Key key1 = Key.of("test", "table", "11111"); + List keys = new ArrayList<>(); + ContentKey key1 = ContentKey.of("test", "table", "11111"); ContentId contentId1 = ContentId.of("test-content-id"); commitLegacyData(key1, contentId1); keys.add(key1); @@ -134,11 +137,13 @@ void testUpgrade(int numExtraTables) throws Exception { // Create some extra Iceberg tables in current Nessie format for (int i = 0; i < numExtraTables; i++) { - Key extraKey = Key.of("test", "table", "current-" + i); + ContentKey extraKey = ContentKey.of("test", "table", "current-" + i); + IcebergTable table = IcebergTable.of("test-metadata-location", 1, 2, 3, 4, + "extra-content-id-" + i); versionStore.commit(BranchName.of("main"), Optional.empty(), CommitMeta.fromMessage("test"), Collections.singletonList(ImmutablePut.builder() .key(extraKey) - .value(IcebergTable.of("test-metadata-location", 1, 2, 3, 4, "extra-content-id-" + i)) + .valueSupplier(() -> table) .build())); keys.add(extraKey); } @@ -150,9 +155,9 @@ void testUpgrade(int numExtraTables) throws Exception { assertThat(refs).noneMatch(r -> r.getNamedRef().getName().equals(UPGRADE_BRANCH_NAME)); } - Map tables = versionStore.getValues(BranchName.of("main"), keys); + Map tables = versionStore.getValues(BranchName.of("main"), keys); - assertThat(tables.keySet()).containsExactlyInAnyOrder(keys.toArray(new Key[0])); + assertThat(tables.keySet()).containsExactlyInAnyOrder(keys.toArray(new ContentKey[0])); assertThat(tables).allSatisfy((k, v) -> { assertThat(v).isInstanceOf(IcebergTable.class) .extracting("metadataLocation") @@ -200,9 +205,10 @@ void testUnnecessaryUpgrade() throws Exception { VersionStore versionStore = new PersistVersionStore(adapter); Hash head = versionStore.commit(BranchName.of("main"), Optional.empty(), CommitMeta.fromMessage("test"), Collections.singletonList(ImmutablePut.builder() - .key(Key.of("test-key")) - .value(table) - .build())); + .key(ContentKey.of("test-key")) + .valueSupplier(() -> table) + .build())) + .getCommitHash(); task.upgrade(storeProvider, UPGRADE_BRANCH_NAME); @@ -214,7 +220,7 @@ void testUnnecessaryUpgrade() throws Exception { void testUnnecessaryUpgradeOfDeletedEntry() throws Exception { adapter.initializeRepo("main"); // Load a legacy entry into the adapter - Key key1 = Key.of("test", "table", "11111"); + ContentKey key1 = ContentKey.of("test", "table", "11111"); ContentId contentId1 = ContentId.of("test-content-id"); commitLegacyData(key1, contentId1); @@ -223,7 +229,8 @@ void testUnnecessaryUpgradeOfDeletedEntry() throws Exception { .toBranch(BranchName.of("main")) .commitMetaSerialized(METADATA_SERIALIZER.toBytes(CommitMeta.fromMessage("test delete"))) .addDeletes(key1) - .build()); + .build()) + .getCommitHash(); task.upgrade(storeProvider, UPGRADE_BRANCH_NAME); @@ -235,7 +242,7 @@ void testUnnecessaryUpgradeOfDeletedEntry() throws Exception { void testUnnecessaryUpgradeOfReplacedEntry() throws Exception { adapter.initializeRepo("main"); // Load a legacy entry into the adapter - Key key1 = Key.of("test", "table", "11111"); + ContentKey key1 = ContentKey.of("test", "table", "11111"); ContentId contentId1 = ContentId.of("test-content-id"); commitLegacyData(key1, contentId1); @@ -245,8 +252,9 @@ void testUnnecessaryUpgradeOfReplacedEntry() throws Exception { Hash head = versionStore.commit(BranchName.of("main"), Optional.empty(), CommitMeta.fromMessage("test"), Collections.singletonList(ImmutablePut.builder() .key(key1) - .value(table) - .build())); + .valueSupplier(() -> table) + .build())) + .getCommitHash(); task.upgrade(storeProvider, UPGRADE_BRANCH_NAME); @@ -257,7 +265,7 @@ void testUnnecessaryUpgradeOfReplacedEntry() throws Exception { @Test void testUpgradeBranchReset() throws Exception { adapter.initializeRepo("main"); - commitLegacyData(Key.of("test1"), ContentId.of("test-cid")); + commitLegacyData(ContentKey.of("test1"), ContentId.of("test-cid")); adapter.create(BranchName.of(UPGRADE_BRANCH_NAME), adapter.noAncestorHash()); task.upgrade(storeProvider, UPGRADE_BRANCH_NAME); diff --git a/services/nessie-storage-upgrade/src/test/java/com/dremio/service/nessie/upgrade/storage/TestMigrateToNessieAdapter.java b/services/nessie-storage-upgrade/src/test/java/com/dremio/service/nessie/upgrade/storage/TestMigrateToNessieAdapter.java index 6daf1dc65d..23192b50a9 100644 --- a/services/nessie-storage-upgrade/src/test/java/com/dremio/service/nessie/upgrade/storage/TestMigrateToNessieAdapter.java +++ b/services/nessie-storage-upgrade/src/test/java/com/dremio/service/nessie/upgrade/storage/TestMigrateToNessieAdapter.java @@ -40,11 +40,12 @@ import org.junit.jupiter.params.provider.ValueSource; import org.projectnessie.model.CommitMeta; import org.projectnessie.model.Content; +import org.projectnessie.model.ContentKey; import org.projectnessie.model.IcebergTable; +import org.projectnessie.nessie.relocated.protobuf.ByteString; import org.projectnessie.versioned.BranchName; import org.projectnessie.versioned.GetNamedRefsParams; import org.projectnessie.versioned.ImmutablePut; -import org.projectnessie.versioned.Key; import org.projectnessie.versioned.ReferenceInfo; import org.projectnessie.versioned.VersionStore; import org.projectnessie.versioned.persist.adapter.CommitLogEntry; @@ -62,7 +63,6 @@ import com.dremio.service.nessie.ImmutableDatastoreDbConfig; import com.dremio.service.nessie.NessieDatastoreInstance; import com.google.common.collect.ImmutableList; -import com.google.protobuf.ByteString; /** * Unit tests for {@link MigrateToNessieAdapter}. @@ -89,6 +89,7 @@ void createKVStore() throws Exception { NonTransactionalDatabaseAdapterConfig adapterCfg = ImmutableAdjustableNonTransactionalDatabaseAdapterConfig .builder() + .validateNamespaces(false) .build(); adapter = new DatastoreDatabaseAdapterFactory().newBuilder() .withConfig(adapterCfg) @@ -128,11 +129,11 @@ void testUpgradeWithPreviousHistory() throws Exception { VersionStore versionStore = new PersistVersionStore(adapter); - Key extraKey = Key.of("existing", "table", "abc"); + ContentKey extraKey = ContentKey.of("existing", "table", "abc"); versionStore.commit(BranchName.of("main"), Optional.empty(), CommitMeta.fromMessage("test"), Collections.singletonList(ImmutablePut.builder() .key(extraKey) - .value(IcebergTable.of("test-metadata-location", 1, 2, 3, 4, "extra-content-id")) + .valueSupplier(() -> IcebergTable.of("test-metadata-location", 1, 2, 3, 4, "extra-content-id")) .build())); task.upgrade(storeProvider, UPGRADE_BRANCH_NAME, c -> {}); @@ -159,7 +160,7 @@ void testUpgradeBranchReset() throws Exception { @ParameterizedTest @ValueSource(ints = {1, 2, 40, 99, 100, 101, 499, 500, 501}) void testUpgrade(int numCommits) throws Exception { - List keys = new ArrayList<>(); + List keys = new ArrayList<>(); List testEntries = new ArrayList<>(); task.upgrade(storeProvider, UPGRADE_BRANCH_NAME, c -> { @@ -169,7 +170,7 @@ void testUpgrade(int numCommits) throws Exception { c.migrateCommit("main", key, location); - Key nessieKey = Key.of(key.toArray(new String[0])); + ContentKey nessieKey = ContentKey.of(key.toArray(new String[0])); keys.add(nessieKey); testEntries.add(location + "|" + nessieKey); } @@ -177,10 +178,10 @@ void testUpgrade(int numCommits) throws Exception { VersionStore versionStore = new PersistVersionStore(adapter); - Map tables = versionStore.getValues(BranchName.of("main"), keys); + Map tables = versionStore.getValues(BranchName.of("main"), keys); assertThat(tables.entrySet().stream().map(e -> { - Key key = e.getKey(); + ContentKey key = e.getKey(); IcebergTable table = (IcebergTable) e.getValue(); return table.getMetadataLocation() + "|" + key; })).containsExactlyInAnyOrder(testEntries.toArray(new String[0])); diff --git a/services/nessie-storage-upgrade/src/test/java/com/dremio/service/nessie/upgrade/storage/TestPurgeObsoleteKeyLists.java b/services/nessie-storage-upgrade/src/test/java/com/dremio/service/nessie/upgrade/storage/TestPurgeObsoleteKeyLists.java index 3ee07745cd..9e0ac642ae 100644 --- a/services/nessie-storage-upgrade/src/test/java/com/dremio/service/nessie/upgrade/storage/TestPurgeObsoleteKeyLists.java +++ b/services/nessie-storage-upgrade/src/test/java/com/dremio/service/nessie/upgrade/storage/TestPurgeObsoleteKeyLists.java @@ -46,6 +46,7 @@ void createKVStore() throws Exception { NonTransactionalDatabaseAdapterConfig adapterCfg = ImmutableAdjustableNonTransactionalDatabaseAdapterConfig .builder() + .validateNamespaces(false) .build(); new DatastoreDatabaseAdapterFactory().newBuilder() .withConfig(adapterCfg) diff --git a/services/nessie-storage-upgrade/src/test/java/com/dremio/service/nessie/upgrade/storage/TestRebuildKeyList.java b/services/nessie-storage-upgrade/src/test/java/com/dremio/service/nessie/upgrade/storage/TestRebuildKeyList.java index 103a01abba..b488e591fb 100644 --- a/services/nessie-storage-upgrade/src/test/java/com/dremio/service/nessie/upgrade/storage/TestRebuildKeyList.java +++ b/services/nessie-storage-upgrade/src/test/java/com/dremio/service/nessie/upgrade/storage/TestRebuildKeyList.java @@ -30,10 +30,10 @@ import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; import org.projectnessie.model.CommitMeta; +import org.projectnessie.model.ContentKey; import org.projectnessie.model.IcebergTable; import org.projectnessie.versioned.BranchName; import org.projectnessie.versioned.GetNamedRefsParams; -import org.projectnessie.versioned.Key; import org.projectnessie.versioned.ReferenceConflictException; import org.projectnessie.versioned.ReferenceInfo; import org.projectnessie.versioned.ReferenceNotFoundException; @@ -48,11 +48,11 @@ import org.projectnessie.versioned.persist.nontx.NonTransactionalDatabaseAdapterConfig; import org.projectnessie.versioned.store.DefaultStoreWorker; +import com.dremio.common.SuppressForbidden; import com.dremio.datastore.LocalKVStoreProvider; import com.dremio.service.nessie.DatastoreDatabaseAdapterFactory; import com.dremio.service.nessie.ImmutableDatastoreDbConfig; import com.dremio.service.nessie.NessieDatastoreInstance; -import com.google.protobuf.ByteString; class TestRebuildKeyList extends AbstractNessieUpgradeTest { @@ -73,6 +73,7 @@ void createKVStore() throws Exception { NonTransactionalDatabaseAdapterConfig adapterCfg = ImmutableAdjustableNonTransactionalDatabaseAdapterConfig .builder() + .validateNamespaces(false) .keyListDistance(10) // build key lists every 10 commits .maxKeyListSize(0) // force key list entities to be used even for small keys (i.e. prevent in-commit key lists) .maxKeyListEntitySize(0) @@ -91,7 +92,7 @@ void stopKVStore() throws Exception { } } - private void put(Key key) throws ReferenceNotFoundException, ReferenceConflictException { + private void put(ContentKey key) throws ReferenceNotFoundException, ReferenceConflictException { IcebergTable table = IcebergTable.of(key.toString() + "-loc", 1, 2, 3, 4, UUID.randomUUID().toString()); ContentId contentId = ContentId.of(UUID.randomUUID().toString()); @@ -99,25 +100,25 @@ private void put(Key key) throws ReferenceNotFoundException, ReferenceConflictEx adapter.commit(commit .toBranch(BranchName.of("main")) .commitMetaSerialized(METADATA_SERIALIZER.toBytes(CommitMeta.fromMessage("test-" + key))) - .addPuts(KeyWithBytes.of(key, contentId, DefaultStoreWorker.payloadForContent(table), - DefaultStoreWorker.instance().toStoreOnReferenceState(table, commit::addAttachments))) + .addPuts(KeyWithBytes.of(key, contentId, (byte) DefaultStoreWorker.payloadForContent(table), + DefaultStoreWorker.instance().toStoreOnReferenceState(table))) .build()); } - private void validateActiveKeys(Collection activeKeys) throws ReferenceNotFoundException { - ReferenceInfo main = adapter.namedRef("main", GetNamedRefsParams.DEFAULT); + @SuppressForbidden // This method has to use Nessie's relocated ByteString in method parameters. + private void validateActiveKeys(Collection activeKeys) throws ReferenceNotFoundException { + ReferenceInfo main = adapter.namedRef("main", GetNamedRefsParams.DEFAULT); try (Stream keys = adapter.keys(main.getHash(), KeyFilterPredicate.ALLOW_ALL)) { assertThat(keys.map(KeyListEntry::getKey)).containsExactlyInAnyOrderElementsOf(activeKeys); } - Map values = adapter.values(main.getHash(), activeKeys, KeyFilterPredicate.ALLOW_ALL); + Map values = adapter.values(main.getHash(), activeKeys, KeyFilterPredicate.ALLOW_ALL); assertThat(values).hasSize(activeKeys.size()); activeKeys.forEach(k -> { ContentAndState value = values.get(k); - ByteString refState = value.getRefState(); IcebergTable table = (IcebergTable) DefaultStoreWorker.instance().valueFromStore( - value.getPayload(), refState, () -> null, keys -> Stream.empty()); + value.getPayload(), value.getRefState(), () -> null); assertThat(table.getMetadataLocation()).isEqualTo(k.toString() + "-loc"); }); } @@ -125,9 +126,9 @@ private void validateActiveKeys(Collection activeKeys) throws ReferenceNotF @ParameterizedTest @ValueSource(ints = {0, 1, 2, 3, 5, 9, 10, 11, 99, 100, 101, 200, 1000}) void testUpgrade(int numKeys) throws Exception { - List keys = new ArrayList<>(); + List keys = new ArrayList<>(); for (int i = 0; i < numKeys; i++) { - Key key = Key.of("test", "key-" + i); + ContentKey key = ContentKey.of("test", "key-" + i); put(key); keys.add(key); } diff --git a/services/nessie/pom.xml b/services/nessie/pom.xml index 272b2c931d..844801f268 100644 --- a/services/nessie/pom.xml +++ b/services/nessie/pom.xml @@ -22,7 +22,7 @@ com.dremio.services dremio-services-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 com.dremio.services @@ -51,48 +51,48 @@ dremio-services-options - org.projectnessie + org.projectnessie.nessie nessie-rest-services - org.projectnessie + org.projectnessie.nessie nessie-server-store - org.projectnessie + org.projectnessie.nessie nessie-services - org.projectnessie + org.projectnessie.nessie nessie-versioned-persist-store - org.projectnessie + org.projectnessie.nessie nessie-versioned-spi - org.projectnessie + org.projectnessie.nessie nessie-versioned-persist-adapter - org.projectnessie + org.projectnessie.nessie nessie-versioned-persist-serialize - org.projectnessie + org.projectnessie.nessie nessie-versioned-persist-non-transactional - org.projectnessie + org.projectnessie.nessie nessie-versioned-persist-in-memory - org.projectnessie + org.projectnessie.nessie nessie-versioned-tests test - org.projectnessie + org.projectnessie.nessie nessie-versioned-persist-tests test @@ -110,11 +110,6 @@ com.dremio.services dremio-services-scheduler - - org.mockito - mockito-junit-jupiter - test - diff --git a/services/nessie/src/main/java/com/dremio/service/nessie/DatastoreDatabaseAdapter.java b/services/nessie/src/main/java/com/dremio/service/nessie/DatastoreDatabaseAdapter.java index d32f4ab961..4f8880c31c 100644 --- a/services/nessie/src/main/java/com/dremio/service/nessie/DatastoreDatabaseAdapter.java +++ b/services/nessie/src/main/java/com/dremio/service/nessie/DatastoreDatabaseAdapter.java @@ -30,7 +30,6 @@ import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Optional; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; @@ -40,6 +39,8 @@ import java.util.stream.IntStream; import java.util.stream.Stream; +import org.projectnessie.nessie.relocated.protobuf.ByteString; +import org.projectnessie.nessie.relocated.protobuf.InvalidProtocolBufferException; import org.projectnessie.versioned.GetNamedRefsParams; import org.projectnessie.versioned.Hash; import org.projectnessie.versioned.NamedRef; @@ -65,21 +66,19 @@ import org.projectnessie.versioned.persist.serialize.AdapterTypes.GlobalStateLogEntry; import org.projectnessie.versioned.persist.serialize.AdapterTypes.GlobalStatePointer; import org.projectnessie.versioned.persist.serialize.AdapterTypes.RefLogEntry; -import org.rocksdb.RocksDBException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.dremio.common.SuppressForbidden; import com.dremio.datastore.api.Document; import com.dremio.datastore.api.KVStore; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; import com.google.common.collect.Streams; -import com.google.protobuf.ByteString; -import com.google.protobuf.InvalidProtocolBufferException; /** * Datastore Database Adapter for Embedded Nessie. */ +@SuppressForbidden // This impl. of a Nessie Database Adapter has to use Nessie's relocated protobuf classes. public class DatastoreDatabaseAdapter extends NonTransactionalDatabaseAdapter { private static final Logger logger = LoggerFactory.getLogger(DatastoreDatabaseAdapter.class); @@ -441,8 +440,6 @@ protected void doEraseRepo() { db.getGlobalPointer(), db.getGlobalLog(), db.getCommitLog(), - db.getAttachments(), - db.getAttachmentKeys(), db.getNamedRefHeads(), db.getRefNames(), db.getRepoDescription(), @@ -689,158 +686,6 @@ protected List doFetchReferenceNames(NonTransaction } } - @Override - protected void writeAttachments(Stream> attachments) { - Lock lock = db.getLock().writeLock(); - lock.lock(); - try { - attachments.forEach(b -> { - storeAttachmentKey(b.getKey()); - db.getAttachments().put(dbKey(b.getKey().getAttachmentId()), b.getValue().toByteArray()); - }); - } finally { - lock.unlock(); - } - } - - @Override - protected boolean consistentWriteAttachment(AdapterTypes.AttachmentKey key, AdapterTypes.AttachmentValue value, - Optional expectedVersion) { - Lock lock = db.getLock().writeLock(); - lock.lock(); - try { - String dbKey = dbKey(key.getAttachmentId()); - Document current = db.getAttachments().get(dbKey); - if (expectedVersion.isPresent()) { - try { - if (current == null) { - return false; - } - AdapterTypes.AttachmentValue val = AdapterTypes.AttachmentValue.parseFrom(current.getValue()); - if (!val.hasVersion() || !val.getVersion().equals(expectedVersion.get())) { - return false; - } - } catch (InvalidProtocolBufferException e) { - throw new RuntimeException(e); - } - } else { - if (current != null) { - return false; - } - storeAttachmentKey(key); - } - db.getAttachments().put(dbKey, value.toByteArray()); - return true; - } catch (Exception e) { - throw new RuntimeException(e); - } finally { - lock.unlock(); - } - } - - private void storeAttachmentKey(AdapterTypes.AttachmentKey attachmentKey) { - String dbKey = dbKey(attachmentKey.getContentId().getId()); - Document old = db.getAttachmentKeys().get(dbKey); - AdapterTypes.AttachmentKeyList.Builder keyList; - if (old == null) { - keyList = AdapterTypes.AttachmentKeyList.newBuilder().addKeys(attachmentKey); - } else { - try { - keyList = AdapterTypes.AttachmentKeyList.newBuilder().mergeFrom(old.getValue()); - } catch (InvalidProtocolBufferException e) { - throw new RuntimeException(e); - } - if (!keyList.getKeysList().contains(attachmentKey)) { - keyList.addKeys(attachmentKey); - } - } - db.getAttachmentKeys().put(dbKey, keyList.build().toByteArray()); - } - - @Override - protected Stream fetchAttachmentKeys(String contentId) { - try { - String dbKey = dbKey(contentId); - Document attachmentKeys = db.getAttachmentKeys().get(dbKey); - if (attachmentKeys == null) { - return Stream.empty(); - } - AdapterTypes.AttachmentKeyList keyList; - try { - keyList = AdapterTypes.AttachmentKeyList.parseFrom(attachmentKeys.getValue()); - } catch (InvalidProtocolBufferException e) { - throw new RuntimeException(e); - } - return keyList.getKeysList().stream(); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - @Override - protected Stream> fetchAttachments( - Stream keys) { - try { - return keys.map(k -> { - Document entry = db.getAttachments().get(dbKey(k.getAttachmentId())); - if (entry == null) { - return null; - } - - try { - return Maps.immutableEntry(k, AdapterTypes.AttachmentValue.parseFrom(entry.getValue())); - } catch (InvalidProtocolBufferException e) { - throw new RuntimeException(e); - } - }) - .filter(Objects::nonNull); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - @Override - protected void purgeAttachments(Stream keys) { - Lock lock = db.getLock().writeLock(); - lock.lock(); - try { - keys.forEach( - k -> { - try { - db.getAttachments().delete(dbKey(k.getAttachmentId())); - removeAttachmentKey(k); - } catch (RocksDBException e) { - throw new RuntimeException(e); - } - }); - } catch (Exception e) { - throw new RuntimeException(e); - } finally { - lock.unlock(); - } - } - - private void removeAttachmentKey(AdapterTypes.AttachmentKey attachmentKey) throws RocksDBException { - String dbKey = attachmentKey.getContentId().getId(); - Document old = db.getAttachmentKeys().get(dbKey); - if (old == null) { - return; - } - AdapterTypes.AttachmentKeyList.Builder keyList; - try { - keyList = AdapterTypes.AttachmentKeyList.newBuilder().mergeFrom(old.getValue()); - } catch (InvalidProtocolBufferException e) { - throw new RuntimeException(e); - } - for (int i = 0; i < keyList.getKeysList().size(); i++) { - if (keyList.getKeys(i).equals(attachmentKey)) { - keyList.removeKeys(i); - break; - } - } - db.getAttachmentKeys().put(dbKey, keyList.build().toByteArray()); - } - @Override public Map> repoMaintenance(RepoMaintenanceParams params) { ImmutableMap.Builder> results = ImmutableMap.builder(); diff --git a/services/nessie/src/main/java/com/dremio/service/nessie/NessieDatabaseAdapterConfig.java b/services/nessie/src/main/java/com/dremio/service/nessie/NessieDatabaseAdapterConfig.java index 6f80a70d3c..4ebb2b0d8c 100644 --- a/services/nessie/src/main/java/com/dremio/service/nessie/NessieDatabaseAdapterConfig.java +++ b/services/nessie/src/main/java/com/dremio/service/nessie/NessieDatabaseAdapterConfig.java @@ -68,6 +68,11 @@ public NessieDatabaseAdapterConfig(Provider optionManager) { this.optionManager = optionManager; } + @Override + public boolean validateNamespaces() { + return false; + } + @Override public int getCommitRetries() { return (int) optionManager.get().getOption(COMMIT_RETRIES); diff --git a/services/nessie/src/main/java/com/dremio/service/nessie/NessieDatastoreInstance.java b/services/nessie/src/main/java/com/dremio/service/nessie/NessieDatastoreInstance.java index 719a335b49..49547c0535 100644 --- a/services/nessie/src/main/java/com/dremio/service/nessie/NessieDatastoreInstance.java +++ b/services/nessie/src/main/java/com/dremio/service/nessie/NessieDatastoreInstance.java @@ -36,8 +36,6 @@ public class NessieDatastoreInstance implements DatabaseConnectionProvider refNames; private KVStore globalLog; private KVStore commitLog; - private KVStore attachments; - private KVStore attachmentKeys; private KVStore keyList; private DatastoreDbConfig config; @@ -61,8 +59,6 @@ public void initialize() { refNames = kvStoreProvider.get().getStore(NessieRefNamesStoreBuilder.class); globalLog = kvStoreProvider.get().getStore(NessieGlobalLogStoreBuilder.class); commitLog = kvStoreProvider.get().getStore(NessieCommitLogStoreBuilder.class); - attachments = kvStoreProvider.get().getStore(NessieAttachmentsStoreBuilder.class); - attachmentKeys = kvStoreProvider.get().getStore(NessieAttachmentKeysStoreBuilder.class); keyList = kvStoreProvider.get().getStore(NessieKeyListStoreBuilder.class); } @@ -86,14 +82,6 @@ public KVStore getGlobalPointer() { return globalPointer; } - public KVStore getAttachments() { - return attachments; - } - - public KVStore getAttachmentKeys() { - return attachmentKeys; - } - public KVStore getNamedRefHeads() { return namedRefHeads; } diff --git a/services/nessie/src/main/java/com/dremio/service/nessie/NessieService.java b/services/nessie/src/main/java/com/dremio/service/nessie/NessieService.java index 99d0bff247..c610f9a2f4 100644 --- a/services/nessie/src/main/java/com/dremio/service/nessie/NessieService.java +++ b/services/nessie/src/main/java/com/dremio/service/nessie/NessieService.java @@ -15,12 +15,16 @@ */ package com.dremio.service.nessie; +import static org.projectnessie.services.authz.AbstractBatchAccessChecker.NOOP_ACCESS_CHECKER; + +import java.security.Principal; import java.util.List; import java.util.function.Supplier; import javax.inject.Provider; import org.projectnessie.server.store.TableCommitMetaStoreWorker; +import org.projectnessie.services.authz.Authorizer; import org.projectnessie.services.impl.ConfigApiImpl; import org.projectnessie.services.impl.ContentApiImpl; import org.projectnessie.services.impl.TreeApiImpl; @@ -53,12 +57,12 @@ public class NessieService implements Service { private final NessieConfig serverConfig; private final Supplier adapter; private final Supplier versionStoreSupplier; - private final Supplier treeApi; private final TreeService treeService; private final ContentService contentService; private final ConfigService configService; private final Provider optionManagerProvider; private final Supplier isMaster; + private final Principal nessieCommitter = () -> "Embedded Nessie Service"; public NessieService(Provider kvStoreProvider, Provider optionManagerProvider, @@ -74,10 +78,14 @@ public NessieService(Provider kvStoreProvider, this.adapter = Suppliers.memoize(() -> createAdapter(inMemoryBackend)); this.versionStoreSupplier = Suppliers.memoize(() -> new PersistVersionStore(adapter.get(), worker)); - this.treeApi = Suppliers.memoize(() -> new TreeApiImpl(serverConfig, versionStoreSupplier.get(), null, null)); - this.treeService = new TreeService(treeApi); - this.contentService = new ContentService(Suppliers.memoize(() -> new ContentApiImpl(serverConfig, versionStoreSupplier.get(), null, null))); - this.configService = new ConfigService(Suppliers.memoize(() -> new ConfigApiImpl(serverConfig))); + Supplier principalSupplier = () -> nessieCommitter; + Authorizer authorizer = context -> NOOP_ACCESS_CHECKER; + this.treeService = new TreeService(Suppliers.memoize(() -> + new TreeApiImpl(serverConfig, versionStoreSupplier.get(), authorizer, principalSupplier))); + this.contentService = new ContentService(Suppliers.memoize(() -> + new ContentApiImpl(serverConfig, versionStoreSupplier.get(), authorizer, principalSupplier))); + this.configService = new ConfigService(Suppliers.memoize(() -> + new ConfigApiImpl(serverConfig, versionStoreSupplier.get(), 2))); this.isMaster = isMaster; } diff --git a/services/nessie/src/test/java/com/dremio/service/nessie/ITCommitLogMaintenance.java b/services/nessie/src/test/java/com/dremio/service/nessie/ITCommitLogMaintenance.java index 3d237e1fb6..a8882551e5 100644 --- a/services/nessie/src/test/java/com/dremio/service/nessie/ITCommitLogMaintenance.java +++ b/services/nessie/src/test/java/com/dremio/service/nessie/ITCommitLogMaintenance.java @@ -17,7 +17,6 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.projectnessie.versioned.CommitMetaSerializer.METADATA_SERIALIZER; -import static org.projectnessie.versioned.persist.tests.DatabaseAdapterTestUtils.ALWAYS_THROWING_ATTACHMENT_CONSUMER; import java.time.Clock; import java.time.Instant; @@ -36,12 +35,12 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.projectnessie.model.CommitMeta; +import org.projectnessie.model.ContentKey; import org.projectnessie.model.IcebergTable; import org.projectnessie.server.store.TableCommitMetaStoreWorker; import org.projectnessie.versioned.BranchName; import org.projectnessie.versioned.GetNamedRefsParams; import org.projectnessie.versioned.Hash; -import org.projectnessie.versioned.Key; import org.projectnessie.versioned.ReferenceConflictException; import org.projectnessie.versioned.ReferenceInfo; import org.projectnessie.versioned.ReferenceNotFoundException; @@ -61,8 +60,8 @@ import org.projectnessie.versioned.persist.tests.extension.NessieExternalDatabase; import org.projectnessie.versioned.store.DefaultStoreWorker; +import com.dremio.common.SuppressForbidden; import com.google.common.collect.ImmutableMap; -import com.google.protobuf.ByteString; @ExtendWith({DatabaseAdapterExtension.class}) @NessieDbAdapterName(DatastoreDatabaseAdapterFactory.NAME) @@ -111,7 +110,7 @@ private DatastoreDatabaseAdapter adapter() { return (DatastoreDatabaseAdapter) databaseAdapter; } - private void put(Key key, long timestamp) throws ReferenceNotFoundException, ReferenceConflictException { + private void put(ContentKey key, long timestamp) throws ReferenceNotFoundException, ReferenceConflictException { IcebergTable table = IcebergTable.of(key.toString() + "-loc", 1, 2, 3, 4, UUID.randomUUID().toString()); clockMillis.set(timestamp); @@ -119,12 +118,12 @@ private void put(Key key, long timestamp) throws ReferenceNotFoundException, Ref adapter().commit(ImmutableCommitParams.builder() .toBranch(BranchName.of("main")) .commitMetaSerialized(METADATA_SERIALIZER.toBytes(CommitMeta.fromMessage("test-" + key))) - .addPuts(KeyWithBytes.of(key, contentId, DefaultStoreWorker.payloadForContent(table), - DefaultStoreWorker.instance().toStoreOnReferenceState(table, ALWAYS_THROWING_ATTACHMENT_CONSUMER))) + .addPuts(KeyWithBytes.of(key, contentId, (byte) DefaultStoreWorker.payloadForContent(table), + DefaultStoreWorker.instance().toStoreOnReferenceState(table))) .build()); } - private void delete(Key key, long timestamp) throws ReferenceNotFoundException, ReferenceConflictException { + private void delete(ContentKey key, long timestamp) throws ReferenceNotFoundException, ReferenceConflictException { clockMillis.set(timestamp); adapter().commit(ImmutableCommitParams.builder() .toBranch(BranchName.of("main")) @@ -135,23 +134,24 @@ private void delete(Key key, long timestamp) throws ReferenceNotFoundException, @BeforeEach void resetRepo() throws ReferenceNotFoundException, ReferenceConflictException { - ReferenceInfo main = adapter().namedRef("main", GetNamedRefsParams.DEFAULT); + ReferenceInfo main = adapter().namedRef("main", GetNamedRefsParams.DEFAULT); adapter().assign(main.getNamedRef(), Optional.empty(), adapter().noAncestorHash()); } - private void validateActiveKeys(Collection activeKeys) throws ReferenceNotFoundException { - ReferenceInfo main = adapter().namedRef("main", GetNamedRefsParams.DEFAULT); + @SuppressForbidden // This method has to use Nessie's relocated ByteString in method parameters. + private void validateActiveKeys(Collection activeKeys) throws ReferenceNotFoundException { + ReferenceInfo main = adapter().namedRef("main", GetNamedRefsParams.DEFAULT); try (Stream keys = adapter().keys(main.getHash(), KeyFilterPredicate.ALLOW_ALL)) { assertThat(keys.map(KeyListEntry::getKey)).containsExactlyInAnyOrderElementsOf(activeKeys); } - Map values = adapter() + Map values = adapter() .values(main.getHash(), activeKeys, KeyFilterPredicate.ALLOW_ALL); activeKeys.forEach(k -> { ContentAndState value = values.get(k); IcebergTable table = (IcebergTable) worker.valueFromStore(value.getPayload(), value.getRefState(), - value::getGlobalState, keys -> Stream.empty()); + value::getGlobalState); assertThat(table.getMetadataLocation()).isEqualTo(k.toString() + "-loc"); }); assertThat(values).hasSize(activeKeys.size()); @@ -159,11 +159,11 @@ private void validateActiveKeys(Collection activeKeys) throws ReferenceNotF @Test void testKeyListPurge() throws ReferenceNotFoundException, ReferenceConflictException { - Set activeKeys = new HashSet<>(); + Set activeKeys = new HashSet<>(); // Generate commits to have one key list for (int i = 0; i < 20; i++) { - Key key = Key.of("test-" + i); + ContentKey key = ContentKey.of("test-" + i); put(key, 100); activeKeys.add(key); } @@ -187,7 +187,7 @@ void testKeyListPurge() throws ReferenceNotFoundException, ReferenceConflictExce // Delete some old keys, but do not cause a new key list to be generated for (int i = 0; i < 8; i++) { - Key key = Key.of("test-" + i); + ContentKey key = ContentKey.of("test-" + i); delete(key, 200); activeKeys.remove(key); } @@ -208,7 +208,7 @@ void testKeyListPurge() throws ReferenceNotFoundException, ReferenceConflictExce // Add more keys to cause a new key list to be generated for (int i = 0; i < 12; i++) { - Key key = Key.of("test2-" + i); + ContentKey key = ContentKey.of("test2-" + i); put(key, 300); activeKeys.add(key); } @@ -230,7 +230,7 @@ void testKeyListPurge() throws ReferenceNotFoundException, ReferenceConflictExce // Delete all keys from the first key list (some of them for the second time) for (int i = 0; i < 20; i++) { - Key key = Key.of("test-" + i); + ContentKey key = ContentKey.of("test-" + i); delete(key, 400); activeKeys.remove(key); } @@ -254,7 +254,7 @@ void testKeyListPurge() throws ReferenceNotFoundException, ReferenceConflictExce void testDryRun() throws ReferenceNotFoundException, ReferenceConflictException { // Generate commits to have two key lists for (int i = 0; i < 40; i++) { - Key key = Key.of("test-" + i); + ContentKey key = ContentKey.of("test-" + i); put(key, 100); } clockMillis.set(150); @@ -280,7 +280,7 @@ void testDryRun() throws ReferenceNotFoundException, ReferenceConflictException void testProgress() throws ReferenceNotFoundException, ReferenceConflictException { // Generate commits to have three key lists for (int i = 0; i < 60; i++) { - Key key = Key.of("test-" + i); + ContentKey key = ContentKey.of("test-" + i); put(key, 100); } clockMillis.set(150); diff --git a/services/options/pom.xml b/services/options/pom.xml index de5dff7498..0c6443831e 100644 --- a/services/options/pom.xml +++ b/services/options/pom.xml @@ -23,7 +23,7 @@ dremio-services-parent com.dremio.services - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-services-options diff --git a/services/orphanage/pom.xml b/services/orphanage/pom.xml index fd0b2dd141..34083dc5b1 100644 --- a/services/orphanage/pom.xml +++ b/services/orphanage/pom.xml @@ -22,7 +22,7 @@ com.dremio.services dremio-services-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-services-orphanage diff --git a/services/orphanagecleaner/pom.xml b/services/orphanagecleaner/pom.xml index 5f5af1a0e6..5953c16e89 100644 --- a/services/orphanagecleaner/pom.xml +++ b/services/orphanagecleaner/pom.xml @@ -22,7 +22,7 @@ com.dremio.services dremio-services-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-services-orphanagecleaner diff --git a/services/pom.xml b/services/pom.xml index bca5e2ccab..c269171171 100644 --- a/services/pom.xml +++ b/services/pom.xml @@ -22,7 +22,7 @@ com.dremio dremio-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 com.dremio.services @@ -50,6 +50,7 @@ maestro namespace nessie-grpc + nessie-proxy nessie nessie-storage-upgrade options diff --git a/services/reindexer/pom.xml b/services/reindexer/pom.xml index de394d176d..5141f93eb9 100644 --- a/services/reindexer/pom.xml +++ b/services/reindexer/pom.xml @@ -22,7 +22,7 @@ dremio-services-parent com.dremio.services - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 4.0.0 diff --git a/services/resourcescheduler/pom.xml b/services/resourcescheduler/pom.xml index c854243a91..f7e801bef3 100644 --- a/services/resourcescheduler/pom.xml +++ b/services/resourcescheduler/pom.xml @@ -22,7 +22,7 @@ com.dremio.services dremio-services-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-services-resourcescheduler Services - Resource Scheduler diff --git a/services/resourcescheduler/src/main/java/com/dremio/resource/QueryCancelTool.java b/services/resourcescheduler/src/main/java/com/dremio/resource/QueryCancelTool.java index 9578cc0271..59066ab62e 100644 --- a/services/resourcescheduler/src/main/java/com/dremio/resource/QueryCancelTool.java +++ b/services/resourcescheduler/src/main/java/com/dremio/resource/QueryCancelTool.java @@ -19,6 +19,8 @@ /** * Interface to be able to cancel running query + * + * Note that this interface is not used when query cancellation is triggered by APIs */ public interface QueryCancelTool { @@ -28,11 +30,11 @@ public interface QueryCancelTool { * @param reason - reason for cancellation * @return */ - boolean cancel(UserBitShared.ExternalId id, String reason); + boolean cancel(UserBitShared.ExternalId id, String reason, boolean runTimeExceeded); QueryCancelTool NO_OP = new QueryCancelTool() { @Override - public boolean cancel(UserBitShared.ExternalId id, String reason) { + public boolean cancel(UserBitShared.ExternalId id, String reason, boolean runTimeExceeded) { return false; } }; diff --git a/services/resourcescheduler/src/main/java/com/dremio/resource/ResourceAllocator.java b/services/resourcescheduler/src/main/java/com/dremio/resource/ResourceAllocator.java index a7f11d591a..c17506f7bf 100644 --- a/services/resourcescheduler/src/main/java/com/dremio/resource/ResourceAllocator.java +++ b/services/resourcescheduler/src/main/java/com/dremio/resource/ResourceAllocator.java @@ -53,6 +53,8 @@ ResourceSchedulingResult allocate(final ResourceSchedulingContext queryContext, final ResourceSchedulingObserver resourceSchedulingObserver, final Consumer resourceDecisionConsumer); + void cancel(final ResourceSchedulingContext queryContext); + /** * Get group resource information, used for planning. * @@ -82,6 +84,11 @@ public ResourceSchedulingResult allocate(ResourceSchedulingContext queryContext, throw new UnsupportedOperationException(); } + @Override + public void cancel(ResourceSchedulingContext queryContext) { + throw new UnsupportedOperationException(); + } + @Override public GroupResourceInformation getGroupResourceInformation(OptionManager optionManager, ResourceSchedulingProperties resourceSchedulingProperties) { diff --git a/services/resourcescheduler/src/main/java/com/dremio/resource/basic/BasicResourceAllocator.java b/services/resourcescheduler/src/main/java/com/dremio/resource/basic/BasicResourceAllocator.java index 67ca428c8f..5655d34177 100644 --- a/services/resourcescheduler/src/main/java/com/dremio/resource/basic/BasicResourceAllocator.java +++ b/services/resourcescheduler/src/main/java/com/dremio/resource/basic/BasicResourceAllocator.java @@ -125,6 +125,10 @@ public void onFailure(Throwable throwable) { return resourceSchedulingResult; } + @Override + public void cancel(ResourceSchedulingContext queryContext) { + } + protected QueueType getQueueNameFromSchedulingProperties(final ResourceSchedulingContext queryContext, final ResourceSchedulingProperties resourceSchedulingProperties) { final Double cost = resourceSchedulingProperties.getQueryCost(); diff --git a/services/resourcescheduler/src/main/java/com/dremio/resource/common/ReflectionRoutingManager.java b/services/resourcescheduler/src/main/java/com/dremio/resource/common/ReflectionRoutingManager.java index 70ff2f7e53..b677872acf 100644 --- a/services/resourcescheduler/src/main/java/com/dremio/resource/common/ReflectionRoutingManager.java +++ b/services/resourcescheduler/src/main/java/com/dremio/resource/common/ReflectionRoutingManager.java @@ -20,7 +20,7 @@ */ public interface ReflectionRoutingManager { - default boolean checkQueueExists(String queueName) throws Exception { + default boolean checkQueueExists(String queueNameOrId) throws Exception { throw new UnsupportedOperationException(); } @@ -28,7 +28,7 @@ default boolean checkEngineExists(String engineName) throws Exception { throw new UnsupportedOperationException(); } - default String getQueueIdByName(String queueName) throws Exception { + default String getQueueId(String queueNameOrId) throws Exception { throw new UnsupportedOperationException(); } diff --git a/services/scheduler/pom.xml b/services/scheduler/pom.xml index 7e523edcdc..6398017d6b 100644 --- a/services/scheduler/pom.xml +++ b/services/scheduler/pom.xml @@ -22,7 +22,7 @@ com.dremio.services dremio-services-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-services-scheduler Services - Task Scheduler diff --git a/services/scheduler/src/main/java/com/dremio/service/scheduler/ModifiableThreadPoolExecutor.java b/services/scheduler/src/main/java/com/dremio/service/scheduler/ModifiableThreadPoolExecutor.java index d428a44cda..0493802d72 100644 --- a/services/scheduler/src/main/java/com/dremio/service/scheduler/ModifiableThreadPoolExecutor.java +++ b/services/scheduler/src/main/java/com/dremio/service/scheduler/ModifiableThreadPoolExecutor.java @@ -42,6 +42,7 @@ public ModifiableThreadPoolExecutor(ThreadPoolExecutor threadPoolExecutor, Posit this.optionManager = optionManager; } + @Override public synchronized void onChange() { int newPoolSize = (int) optionManager.get().getOption(option); if (currentPoolSize == newPoolSize) { diff --git a/services/script/pom.xml b/services/script/pom.xml index 66e2f3d931..c73cadddba 100644 --- a/services/script/pom.xml +++ b/services/script/pom.xml @@ -23,7 +23,7 @@ com.dremio.services dremio-services-parent - 24.0.0-202302100528110223-3a169b7c + 24.1.0-202306130653310132-d30779f6 dremio-services-script @@ -53,6 +53,10 @@ com.dremio.services dremio-services-users + + io.opentelemetry.instrumentation + opentelemetry-instrumentation-annotations + diff --git a/tools/mongo-test-runner/src/main/java/com/dremio/test/mongo/ImportOptions.java b/services/script/src/main/java/com/dremio/service/script/MaxScriptsLimitReachedException.java similarity index 61% rename from tools/mongo-test-runner/src/main/java/com/dremio/test/mongo/ImportOptions.java rename to services/script/src/main/java/com/dremio/service/script/MaxScriptsLimitReachedException.java index bf0d959d8b..562df9afa5 100644 --- a/tools/mongo-test-runner/src/main/java/com/dremio/test/mongo/ImportOptions.java +++ b/services/script/src/main/java/com/dremio/service/script/MaxScriptsLimitReachedException.java @@ -13,24 +13,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.dremio.test.mongo; + +package com.dremio.service.script; /** - * Standard import options + * MaxScriptsException class */ -public enum ImportOptions implements ImportOption { - /** - * To be set if the data is represented as a JSON array - */ - JSON_ARRAY, - - /** - * To be set if the collection should be dropped before data is imported - */ - DROP_COLLECTION, - - /** - * To be set if documents should be replaced if already present - */ - UPSERT_DOCUMENTS +public class MaxScriptsLimitReachedException extends Exception { + public MaxScriptsLimitReachedException(Long maxNumberOfScriptsPerUser, Long countOfScriptsByCurrentUser) { + super(String.format("Maximum scripts limit per user is reached. Limit %s; Current %s.", maxNumberOfScriptsPerUser, + countOfScriptsByCurrentUser)); + } } diff --git a/services/script/src/main/java/com/dremio/service/script/ScriptService.java b/services/script/src/main/java/com/dremio/service/script/ScriptService.java index 7aab67ddac..2e5685d9eb 100644 --- a/services/script/src/main/java/com/dremio/service/script/ScriptService.java +++ b/services/script/src/main/java/com/dremio/service/script/ScriptService.java @@ -50,8 +50,9 @@ List